mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-27 14:52:16 +00:00
Bug 1885769 - Update harfbuzz to 8.3.1 r=jfkthame
Differential Revision: https://phabricator.services.mozilla.com/D204859
This commit is contained in:
parent
1b81ac01ef
commit
f0e50cc358
@ -1,3 +1,25 @@
|
||||
Overview of changes leading to 8.3.1
|
||||
Saturday, March 16, 2024
|
||||
====================================
|
||||
- hb_blob_create_from_file_or_fail() on Windows will now try to interpret the
|
||||
file name as UTF-8 first, and as system code page if it is not valid UTF-8.
|
||||
- Fix hb_style_get_value() in fonts with “STAT” table.
|
||||
- Properly handle negative offsets in CFF table.
|
||||
- Update IANA Language Subtag Registry to 2024-03-07.
|
||||
- Subsetter now supports subsetting “BASE” table.
|
||||
- Subsetter will update “hhea” font metrics in sync with “OS/2” ones.
|
||||
- “--variations” option of “hb-subset” now supports leaving out values that
|
||||
should be unchanged, e.g. “wght=:500:” will change the default and keep max
|
||||
and min unchanged. It also supports “*=drop” to to pin all axes to default
|
||||
location.
|
||||
- Fix hb_ot_math_get_glyph_kerning() to match updated “MATH” table spec.
|
||||
- Support legacy MacRoman encoding in “cmap” table.
|
||||
- Various build fixes.
|
||||
- Various subsetting and instancing fixes.
|
||||
|
||||
- New API:
|
||||
hb_subset_input_pin_all_axes_to_default()
|
||||
|
||||
Overview of changes leading to 8.3.0
|
||||
Saturday, November 11, 2023
|
||||
====================================
|
||||
@ -191,7 +213,7 @@ Saturday, February 11, 2023
|
||||
====================================
|
||||
- New hb-paint API that is designed mainly to paint “COLRv1” glyphs, but can be
|
||||
also used as a unified API to paint any of the glyph representations
|
||||
supported by HarfBuzz (B/W outlines, color layers, or color bitmaps).
|
||||
supported by HarfBuzz (B/W outlines, color layers, or color bitmaps).
|
||||
(Behdad Esfahbod, Matthias Clasen)
|
||||
- New hb-cairo API for integrating with cairo graphics library. This is provided
|
||||
as a separate harfbuzz-cairo library. (Behdad Esfahbod, Matthias Clasen)
|
||||
@ -202,7 +224,7 @@ Saturday, February 11, 2023
|
||||
https://github.com/harfbuzz/boring-expansion-spec/blob/main/glyf1-cubicOutlines.md
|
||||
for spec. (Behdad Esfahbod)
|
||||
- Various subsetter improvements. (Garret Rieger, Qunxin Liu, Behdad Esfahbod)
|
||||
- Various documentation improvements.
|
||||
- Various documentation improvements.
|
||||
(Behdad Esfahbod, Matthias Clasen, Khaled Hosny)
|
||||
- Significantly reduced memory use during shaping. (Behdad Esfahbod)
|
||||
- Greatly reduced memory use during subsetting “CFF” table. (Behdad Esfahbod)
|
||||
@ -946,7 +968,7 @@ Tuesday, March 16, 2021
|
||||
Previously these were shaped using the generalized Arabic shaper. (David Corbett)
|
||||
- Fix regression in shaping of U+0B55 ORIYA SIGN OVERLINE. (David Corbett)
|
||||
- Update language tags. (David Corbett)
|
||||
- Variations: reduce error: do not round each interpolated delta. (Just van Rossum)
|
||||
- Variations: reduce error: do not round each interpolated delta. (Just van Rossum)
|
||||
- Documentation improvements. (Khaled Hosny, Nathan Willis)
|
||||
- Subsetter improvements: subsets most, if not all, lookup types now. (Garret Rieger, Qunxin Liu)
|
||||
- Fuzzer-found fixes and other improvements when memory failures happen. (Behdad)
|
||||
|
@ -2,7 +2,7 @@
|
||||
[![CircleCI Build Status](https://circleci.com/gh/harfbuzz/harfbuzz/tree/main.svg?style=svg)](https://circleci.com/gh/harfbuzz/harfbuzz/tree/main)
|
||||
[![OSS-Fuzz Status](https://oss-fuzz-build-logs.storage.googleapis.com/badges/harfbuzz.svg)](https://oss-fuzz-build-logs.storage.googleapis.com/index.html)
|
||||
[![Coverity Scan Build Status](https://scan.coverity.com/projects/15166/badge.svg)](https://scan.coverity.com/projects/harfbuzz)
|
||||
[![Codacy Badge](https://app.codacy.com/project/badge/Grade/89c872f5ce1c42af802602bfcd15d90a)](https://www.codacy.com/gh/harfbuzz/harfbuzz/dashboard?utm_source=github.com&utm_medium=referral&utm_content=harfbuzz/harfbuzz&utm_campaign=Badge_Grade)
|
||||
[![Codacy Badge](https://app.codacy.com/project/badge/Grade/89c872f5ce1c42af802602bfcd15d90a)](https://app.codacy.com/gh/harfbuzz/harfbuzz/dashboard?utm_source=gh&utm_medium=referral&utm_content=&utm_campaign=Badge_grade)
|
||||
[![Codecov Code Coverage](https://codecov.io/gh/harfbuzz/harfbuzz/branch/main/graph/badge.svg)](https://codecov.io/gh/harfbuzz/harfbuzz)
|
||||
[![Packaging status](https://repology.org/badge/tiny-repos/harfbuzz.svg)](https://repology.org/project/harfbuzz/versions)
|
||||
[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/harfbuzz/harfbuzz/badge)](https://securityscorecards.dev/viewer/?uri=github.com/harfbuzz/harfbuzz)
|
||||
|
@ -1,6 +1,6 @@
|
||||
AC_PREREQ([2.64])
|
||||
AC_INIT([HarfBuzz],
|
||||
[8.3.0],
|
||||
[8.3.1],
|
||||
[https://github.com/harfbuzz/harfbuzz/issues/new],
|
||||
[harfbuzz],
|
||||
[http://harfbuzz.org/])
|
||||
|
@ -20,11 +20,11 @@ origin:
|
||||
|
||||
# Human-readable identifier for this version/release
|
||||
# Generally "version NNN", "tag SSS", "bookmark SSS"
|
||||
release: 8.3.0 (2023-11-11T16:07:57+02:00).
|
||||
release: 8.3.1 (2024-03-17T07:50:59+02:00).
|
||||
|
||||
# Revision to pull in
|
||||
# Must be a long or short commit SHA (long preferred)
|
||||
revision: 8.3.0
|
||||
revision: 8.3.1
|
||||
|
||||
# The package's license, where possible using the mnemonic from
|
||||
# https://spdx.org/licenses/
|
||||
|
@ -174,7 +174,7 @@ libharfbuzz_la_CPPFLAGS = $(HBCFLAGS) $(CODE_COVERAGE_CFLAGS)
|
||||
libharfbuzz_la_LDFLAGS = $(base_link_flags) $(export_symbols) $(CODE_COVERAGE_LDFLAGS)
|
||||
libharfbuzz_la_LIBADD = $(HBLIBS)
|
||||
EXTRA_libharfbuzz_la_DEPENDENCIES = $(harfbuzz_def_dependency)
|
||||
pkginclude_HEADERS = $(HBHEADERS)
|
||||
pkginclude_HEADERS = $(HBHEADERS) hb-features.h
|
||||
nodist_pkginclude_HEADERS =
|
||||
pkgconfigdir = $(libdir)/pkgconfig
|
||||
pkgconfig_DATA = harfbuzz.pc
|
||||
@ -533,11 +533,11 @@ test_instancer_solver_SOURCES = test-subset-instancer-solver.cc hb-subset-instan
|
||||
test_instancer_solver_CPPFLAGS = $(COMPILED_TESTS_CPPFLAGS)
|
||||
test_instancer_solver_LDADD = $(COMPILED_TESTS_LDADD)
|
||||
|
||||
test_tuple_varstore_SOURCES = test-tuple-varstore.cc hb-subset-instancer-solver.cc hb-static.cc
|
||||
test_tuple_varstore_SOURCES = test-tuple-varstore.cc hb-subset-instancer-solver.cc hb-subset-instancer-iup.cc hb-static.cc
|
||||
test_tuple_varstore_CPPFLAGS = $(COMPILED_TESTS_CPPFLAGS)
|
||||
test_tuple_varstore_LDADD = $(COMPILED_TESTS_LDADD)
|
||||
|
||||
test_item_varstore_SOURCES = test-item-varstore.cc hb-subset-instancer-solver.cc hb-static.cc
|
||||
test_item_varstore_SOURCES = test-item-varstore.cc hb-subset-instancer-solver.cc hb-subset-instancer-iup.cc hb-static.cc
|
||||
test_item_varstore_CPPFLAGS = $(COMPILED_TESTS_CPPFLAGS)
|
||||
test_item_varstore_LDADD = $(COMPILED_TESTS_LDADD)
|
||||
|
||||
|
@ -368,6 +368,8 @@ HB_SUBSET_sources = \
|
||||
hb-subset-cff2.cc \
|
||||
hb-subset-input.cc \
|
||||
hb-subset-input.hh \
|
||||
hb-subset-instancer-iup.hh \
|
||||
hb-subset-instancer-iup.cc \
|
||||
hb-subset-instancer-solver.hh \
|
||||
hb-subset-instancer-solver.cc \
|
||||
hb-subset-accelerator.hh \
|
||||
|
@ -68,7 +68,7 @@ public:
|
||||
hb_font_t *font;
|
||||
unsigned int palette_index;
|
||||
hb_color_t foreground;
|
||||
VarStoreInstancer &instancer;
|
||||
ItemVarStoreInstancer &instancer;
|
||||
hb_map_t current_glyphs;
|
||||
hb_map_t current_layers;
|
||||
int depth_left = HB_MAX_NESTING_LEVEL;
|
||||
@ -80,7 +80,7 @@ public:
|
||||
hb_font_t *font_,
|
||||
unsigned int palette_,
|
||||
hb_color_t foreground_,
|
||||
VarStoreInstancer &instancer_) :
|
||||
ItemVarStoreInstancer &instancer_) :
|
||||
base (base_),
|
||||
funcs (funcs_),
|
||||
data (data_),
|
||||
@ -245,7 +245,7 @@ struct Variable
|
||||
{ value.closurev1 (c); }
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
if (!value.subset (c, instancer, varIdxBase)) return_trace (false);
|
||||
@ -270,7 +270,7 @@ struct Variable
|
||||
|
||||
void get_color_stop (hb_paint_context_t *c,
|
||||
hb_color_stop_t *stop,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
value.get_color_stop (c, stop, varIdxBase, instancer);
|
||||
}
|
||||
@ -305,7 +305,7 @@ struct NoVariable
|
||||
{ value.closurev1 (c); }
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
return_trace (value.subset (c, instancer, varIdxBase));
|
||||
@ -325,7 +325,7 @@ struct NoVariable
|
||||
|
||||
void get_color_stop (hb_paint_context_t *c,
|
||||
hb_color_stop_t *stop,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
value.get_color_stop (c, stop, VarIdx::NO_VARIATION, instancer);
|
||||
}
|
||||
@ -348,7 +348,7 @@ struct ColorStop
|
||||
{ c->add_palette_index (paletteIndex); }
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -374,7 +374,7 @@ struct ColorStop
|
||||
void get_color_stop (hb_paint_context_t *c,
|
||||
hb_color_stop_t *out,
|
||||
uint32_t varIdx,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
out->offset = stopOffset.to_float(instancer (varIdx, 0));
|
||||
out->color = c->get_color (paletteIndex,
|
||||
@ -410,7 +410,7 @@ struct ColorLine
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (this);
|
||||
@ -439,7 +439,7 @@ struct ColorLine
|
||||
unsigned int start,
|
||||
unsigned int *count,
|
||||
hb_color_stop_t *color_stops,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
unsigned int len = stops.len;
|
||||
|
||||
@ -543,7 +543,7 @@ struct Affine2x3
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -588,7 +588,7 @@ struct PaintColrLayers
|
||||
void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer HB_UNUSED) const
|
||||
const ItemVarStoreInstancer &instancer HB_UNUSED) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (this);
|
||||
@ -620,7 +620,7 @@ struct PaintSolid
|
||||
{ c->add_palette_index (paletteIndex); }
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -669,7 +669,7 @@ struct PaintLinearGradient
|
||||
{ (this+colorLine).closurev1 (c); }
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -736,7 +736,7 @@ struct PaintRadialGradient
|
||||
{ (this+colorLine).closurev1 (c); }
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -803,7 +803,7 @@ struct PaintSweepGradient
|
||||
{ (this+colorLine).closurev1 (c); }
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -863,7 +863,7 @@ struct PaintGlyph
|
||||
void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (this);
|
||||
@ -906,7 +906,7 @@ struct PaintColrGlyph
|
||||
void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer HB_UNUSED) const
|
||||
const ItemVarStoreInstancer &instancer HB_UNUSED) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (this);
|
||||
@ -936,7 +936,7 @@ struct PaintTransform
|
||||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (this);
|
||||
@ -975,7 +975,7 @@ struct PaintTranslate
|
||||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -1024,7 +1024,7 @@ struct PaintScale
|
||||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -1073,7 +1073,7 @@ struct PaintScaleAroundCenter
|
||||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -1132,7 +1132,7 @@ struct PaintScaleUniform
|
||||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -1176,7 +1176,7 @@ struct PaintScaleUniformAroundCenter
|
||||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -1232,7 +1232,7 @@ struct PaintRotate
|
||||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -1276,7 +1276,7 @@ struct PaintRotateAroundCenter
|
||||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -1332,7 +1332,7 @@ struct PaintSkew
|
||||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -1381,7 +1381,7 @@ struct PaintSkewAroundCenter
|
||||
HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -1440,7 +1440,7 @@ struct PaintComposite
|
||||
void closurev1 (hb_colrv1_closure_context_t* c) const;
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (this);
|
||||
@ -1491,7 +1491,7 @@ struct ClipBoxFormat1
|
||||
return_trace (c->check_struct (this));
|
||||
}
|
||||
|
||||
void get_clip_box (ClipBoxData &clip_box, const VarStoreInstancer &instancer HB_UNUSED) const
|
||||
void get_clip_box (ClipBoxData &clip_box, const ItemVarStoreInstancer &instancer HB_UNUSED) const
|
||||
{
|
||||
clip_box.xMin = xMin;
|
||||
clip_box.yMin = yMin;
|
||||
@ -1500,7 +1500,7 @@ struct ClipBoxFormat1
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
uint32_t varIdxBase) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -1533,7 +1533,7 @@ struct ClipBoxFormat1
|
||||
|
||||
struct ClipBoxFormat2 : Variable<ClipBoxFormat1>
|
||||
{
|
||||
void get_clip_box (ClipBoxData &clip_box, const VarStoreInstancer &instancer) const
|
||||
void get_clip_box (ClipBoxData &clip_box, const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
value.get_clip_box(clip_box, instancer);
|
||||
if (instancer)
|
||||
@ -1549,7 +1549,7 @@ struct ClipBoxFormat2 : Variable<ClipBoxFormat1>
|
||||
struct ClipBox
|
||||
{
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
switch (u.format) {
|
||||
@ -1572,7 +1572,7 @@ struct ClipBox
|
||||
}
|
||||
|
||||
bool get_extents (hb_glyph_extents_t *extents,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
ClipBoxData clip_box;
|
||||
switch (u.format) {
|
||||
@ -1608,7 +1608,7 @@ struct ClipRecord
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const void *base,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
@ -1625,7 +1625,7 @@ struct ClipRecord
|
||||
|
||||
bool get_extents (hb_glyph_extents_t *extents,
|
||||
const void *base,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
return (base+clipBox).get_extents (extents, instancer);
|
||||
}
|
||||
@ -1642,7 +1642,7 @@ DECLARE_NULL_NAMESPACE_BYTES (OT, ClipRecord);
|
||||
struct ClipList
|
||||
{
|
||||
unsigned serialize_clip_records (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer,
|
||||
const ItemVarStoreInstancer &instancer,
|
||||
const hb_set_t& gids,
|
||||
const hb_map_t& gid_offset_map) const
|
||||
{
|
||||
@ -1695,7 +1695,7 @@ struct ClipList
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
@ -1735,7 +1735,7 @@ struct ClipList
|
||||
bool
|
||||
get_extents (hb_codepoint_t gid,
|
||||
hb_glyph_extents_t *extents,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
auto *rec = clips.as_array ().bsearch (gid);
|
||||
if (rec)
|
||||
@ -1855,7 +1855,7 @@ struct BaseGlyphPaintRecord
|
||||
|
||||
bool serialize (hb_serialize_context_t *s, const hb_map_t* glyph_map,
|
||||
const void* src_base, hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
auto *out = s->embed (this);
|
||||
@ -1884,7 +1884,7 @@ struct BaseGlyphPaintRecord
|
||||
struct BaseGlyphList : SortedArray32Of<BaseGlyphPaintRecord>
|
||||
{
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (this);
|
||||
@ -1916,7 +1916,7 @@ struct LayerList : Array32OfOffset32To<Paint>
|
||||
{ return this+(*this)[i]; }
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const VarStoreInstancer &instancer) const
|
||||
const ItemVarStoreInstancer &instancer) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (this);
|
||||
@ -2206,7 +2206,7 @@ struct COLR
|
||||
auto snap = c->serializer->snapshot ();
|
||||
if (!c->serializer->allocate_size<void> (5 * HBUINT32::static_size)) return_trace (false);
|
||||
|
||||
VarStoreInstancer instancer (varStore ? &(this+varStore) : nullptr,
|
||||
ItemVarStoreInstancer instancer (varStore ? &(this+varStore) : nullptr,
|
||||
varIdxMap ? &(this+varIdxMap) : nullptr,
|
||||
c->plan->normalized_coords.as_array ());
|
||||
|
||||
@ -2250,7 +2250,7 @@ struct COLR
|
||||
if (version != 1)
|
||||
return false;
|
||||
|
||||
VarStoreInstancer instancer (&(this+varStore),
|
||||
ItemVarStoreInstancer instancer (&(this+varStore),
|
||||
&(this+varIdxMap),
|
||||
hb_array (font->coords, font->num_coords));
|
||||
|
||||
@ -2301,7 +2301,7 @@ struct COLR
|
||||
|
||||
bool get_clip (hb_codepoint_t glyph,
|
||||
hb_glyph_extents_t *extents,
|
||||
const VarStoreInstancer instancer) const
|
||||
const ItemVarStoreInstancer instancer) const
|
||||
{
|
||||
return (this+clipList).get_extents (glyph,
|
||||
extents,
|
||||
@ -2312,7 +2312,7 @@ struct COLR
|
||||
bool
|
||||
paint_glyph (hb_font_t *font, hb_codepoint_t glyph, hb_paint_funcs_t *funcs, void *data, unsigned int palette_index, hb_color_t foreground, bool clip = true) const
|
||||
{
|
||||
VarStoreInstancer instancer (&(this+varStore),
|
||||
ItemVarStoreInstancer instancer (&(this+varStore),
|
||||
&(this+varIdxMap),
|
||||
hb_array (font->coords, font->num_coords));
|
||||
hb_paint_context_t c (this, funcs, data, font, palette_index, foreground, instancer);
|
||||
@ -2327,7 +2327,7 @@ struct COLR
|
||||
{
|
||||
// COLRv1 glyph
|
||||
|
||||
VarStoreInstancer instancer (&(this+varStore),
|
||||
ItemVarStoreInstancer instancer (&(this+varStore),
|
||||
&(this+varIdxMap),
|
||||
hb_array (font->coords, font->num_coords));
|
||||
|
||||
@ -2413,7 +2413,7 @@ struct COLR
|
||||
Offset32To<LayerList> layerList;
|
||||
Offset32To<ClipList> clipList; // Offset to ClipList table (may be NULL)
|
||||
Offset32To<DeltaSetIndexMap> varIdxMap; // Offset to DeltaSetIndexMap table (may be NULL)
|
||||
Offset32To<VariationStore> varStore;
|
||||
Offset32To<ItemVariationStore> varStore;
|
||||
public:
|
||||
DEFINE_SIZE_MIN (14);
|
||||
};
|
||||
|
@ -189,7 +189,7 @@ struct CaretValueFormat3
|
||||
friend struct CaretValue;
|
||||
|
||||
hb_position_t get_caret_value (hb_font_t *font, hb_direction_t direction,
|
||||
const VariationStore &var_store) const
|
||||
const ItemVariationStore &var_store) const
|
||||
{
|
||||
return HB_DIRECTION_IS_HORIZONTAL (direction) ?
|
||||
font->em_scale_x (coordinate) + (this+deviceTable).get_x_delta (font, var_store) :
|
||||
@ -251,7 +251,7 @@ struct CaretValue
|
||||
hb_position_t get_caret_value (hb_font_t *font,
|
||||
hb_direction_t direction,
|
||||
hb_codepoint_t glyph_id,
|
||||
const VariationStore &var_store) const
|
||||
const ItemVariationStore &var_store) const
|
||||
{
|
||||
switch (u.format) {
|
||||
case 1: return u.format1.get_caret_value (font, direction);
|
||||
@ -316,7 +316,7 @@ struct LigGlyph
|
||||
unsigned get_lig_carets (hb_font_t *font,
|
||||
hb_direction_t direction,
|
||||
hb_codepoint_t glyph_id,
|
||||
const VariationStore &var_store,
|
||||
const ItemVariationStore &var_store,
|
||||
unsigned start_offset,
|
||||
unsigned *caret_count /* IN/OUT */,
|
||||
hb_position_t *caret_array /* OUT */) const
|
||||
@ -372,7 +372,7 @@ struct LigCaretList
|
||||
unsigned int get_lig_carets (hb_font_t *font,
|
||||
hb_direction_t direction,
|
||||
hb_codepoint_t glyph_id,
|
||||
const VariationStore &var_store,
|
||||
const ItemVariationStore &var_store,
|
||||
unsigned int start_offset,
|
||||
unsigned int *caret_count /* IN/OUT */,
|
||||
hb_position_t *caret_array /* OUT */) const
|
||||
@ -609,7 +609,7 @@ struct GDEFVersion1_2
|
||||
* definitions--from beginning of GDEF
|
||||
* header (may be NULL). Introduced
|
||||
* in version 0x00010002. */
|
||||
Offset32To<VariationStore>
|
||||
Offset32To<ItemVariationStore>
|
||||
varStore; /* Offset to the table of Item Variation
|
||||
* Store--from beginning of GDEF
|
||||
* header (may be NULL). Introduced
|
||||
@ -884,14 +884,14 @@ struct GDEF
|
||||
default: return false;
|
||||
}
|
||||
}
|
||||
const VariationStore &get_var_store () const
|
||||
const ItemVariationStore &get_var_store () const
|
||||
{
|
||||
switch (u.version.major) {
|
||||
case 1: return u.version.to_int () >= 0x00010003u ? this+u.version1.varStore : Null(VariationStore);
|
||||
case 1: return u.version.to_int () >= 0x00010003u ? this+u.version1.varStore : Null(ItemVariationStore);
|
||||
#ifndef HB_NO_BEYOND_64K
|
||||
case 2: return this+u.version2.varStore;
|
||||
#endif
|
||||
default: return Null(VariationStore);
|
||||
default: return Null(ItemVariationStore);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1011,7 +1011,7 @@ struct GDEF
|
||||
hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map /* OUT */) const
|
||||
{
|
||||
if (!has_var_store ()) return;
|
||||
const VariationStore &var_store = get_var_store ();
|
||||
const ItemVariationStore &var_store = get_var_store ();
|
||||
float *store_cache = var_store.create_cache ();
|
||||
|
||||
unsigned new_major = 0, new_minor = 0;
|
||||
|
@ -324,17 +324,8 @@ struct PairPosFormat2_4 : ValueBase
|
||||
}
|
||||
}
|
||||
|
||||
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map;
|
||||
|
||||
auto it =
|
||||
+ hb_iter (this+coverage)
|
||||
| hb_filter (glyphset)
|
||||
| hb_map_retains_sorting (glyph_map)
|
||||
;
|
||||
|
||||
out->coverage.serialize_serialize (c->serializer, it);
|
||||
return_trace (out->class1Count && out->class2Count && bool (it));
|
||||
bool ret = out->coverage.serialize_subset(c, coverage, this);
|
||||
return_trace (out->class1Count && out->class2Count && ret);
|
||||
}
|
||||
|
||||
|
||||
|
@ -116,7 +116,7 @@ struct ValueFormat : HBUINT16
|
||||
|
||||
if (!use_x_device && !use_y_device) return ret;
|
||||
|
||||
const VariationStore &store = c->var_store;
|
||||
const ItemVariationStore &store = c->var_store;
|
||||
auto *cache = c->var_store_cache;
|
||||
|
||||
/* pixel -> fractional pixel */
|
||||
|
@ -240,7 +240,8 @@ struct CompositeGlyphRecord
|
||||
}
|
||||
if (is_anchored ()) tx = ty = 0;
|
||||
|
||||
trans.init ((float) tx, (float) ty);
|
||||
/* set is_end_point flag to true, used by IUP delta optimization */
|
||||
trans.init ((float) tx, (float) ty, true);
|
||||
|
||||
{
|
||||
const F2DOT14 *points = (const F2DOT14 *) p;
|
||||
|
@ -103,6 +103,9 @@ struct Glyph
|
||||
}
|
||||
}
|
||||
|
||||
bool is_composite () const
|
||||
{ return type == COMPOSITE; }
|
||||
|
||||
bool get_all_points_without_var (const hb_face_t *face,
|
||||
contour_point_vector_t &points /* OUT */) const
|
||||
{
|
||||
|
@ -38,7 +38,7 @@ _write_loca (IteratorIn&& it,
|
||||
|
||||
unsigned padded_size = *it++;
|
||||
offset += padded_size;
|
||||
DEBUG_MSG (SUBSET, nullptr, "loca entry gid %u offset %u padded-size %u", gid, offset, padded_size);
|
||||
DEBUG_MSG (SUBSET, nullptr, "loca entry gid %" PRIu32 " offset %u padded-size %u", gid, offset, padded_size);
|
||||
value = offset >> right_shift;
|
||||
*dest++ = value;
|
||||
|
||||
|
@ -23,6 +23,7 @@ if '--experimental-api' not in sys.argv:
|
||||
hb_subset_repack_or_fail
|
||||
hb_subset_input_override_name_table
|
||||
hb_subset_input_set_axis_range
|
||||
hb_subset_input_get_axis_range
|
||||
""".splitlines ()
|
||||
symbols = [x for x in symbols if x not in experimental_symbols]
|
||||
symbols = "\n".join (symbols)
|
||||
|
@ -584,7 +584,7 @@ class BCP47Parser (object):
|
||||
self.grandfathered.add (subtag.lower ())
|
||||
elif line.startswith ('Description: '):
|
||||
description = line.split (' ', 1)[1].replace (' (individual language)', '')
|
||||
description = re.sub (' (\(family\)|\((individual |macro)language\)|languages)$', '',
|
||||
description = re.sub (r' (\(family\)|\((individual |macro)language\)|languages)$', '',
|
||||
description)
|
||||
if subtag in self.names:
|
||||
self.names[subtag] += '\n' + description
|
||||
|
@ -134,20 +134,23 @@ struct ClassDef : public OT::ClassDef
|
||||
|
||||
struct class_def_size_estimator_t
|
||||
{
|
||||
// TODO(garretrieger): update to support beyond64k coverage/classdef tables.
|
||||
constexpr static unsigned class_def_format1_base_size = 6;
|
||||
constexpr static unsigned class_def_format2_base_size = 4;
|
||||
constexpr static unsigned coverage_base_size = 4;
|
||||
constexpr static unsigned bytes_per_range = 6;
|
||||
constexpr static unsigned bytes_per_glyph = 2;
|
||||
|
||||
template<typename It>
|
||||
class_def_size_estimator_t (It glyph_and_class)
|
||||
: gids_consecutive (true), num_ranges_per_class (), glyphs_per_class ()
|
||||
: num_ranges_per_class (), glyphs_per_class ()
|
||||
{
|
||||
unsigned last_gid = (unsigned) -1;
|
||||
reset();
|
||||
for (auto p : + glyph_and_class)
|
||||
{
|
||||
unsigned gid = p.first;
|
||||
unsigned klass = p.second;
|
||||
|
||||
if (last_gid != (unsigned) -1 && gid != last_gid + 1)
|
||||
gids_consecutive = false;
|
||||
last_gid = gid;
|
||||
|
||||
hb_set_t* glyphs;
|
||||
if (glyphs_per_class.has (klass, &glyphs) && glyphs) {
|
||||
glyphs->add (gid);
|
||||
@ -177,28 +180,54 @@ struct class_def_size_estimator_t
|
||||
}
|
||||
}
|
||||
|
||||
// Incremental increase in the Coverage and ClassDef table size
|
||||
// (worst case) if all glyphs associated with 'klass' were added.
|
||||
unsigned incremental_coverage_size (unsigned klass) const
|
||||
{
|
||||
// Coverage takes 2 bytes per glyph worst case,
|
||||
return 2 * glyphs_per_class.get (klass).get_population ();
|
||||
void reset() {
|
||||
class_def_1_size = class_def_format1_base_size;
|
||||
class_def_2_size = class_def_format2_base_size;
|
||||
included_glyphs.clear();
|
||||
included_classes.clear();
|
||||
}
|
||||
|
||||
// Incremental increase in the Coverage and ClassDef table size
|
||||
// (worst case) if all glyphs associated with 'klass' were added.
|
||||
unsigned incremental_class_def_size (unsigned klass) const
|
||||
// Compute the size of coverage for all glyphs added via 'add_class_def_size'.
|
||||
unsigned coverage_size () const
|
||||
{
|
||||
// ClassDef takes 6 bytes per range
|
||||
unsigned class_def_2_size = 6 * num_ranges_per_class.get (klass);
|
||||
if (gids_consecutive)
|
||||
{
|
||||
// ClassDef1 takes 2 bytes per glyph, but only can be used
|
||||
// when gids are consecutive.
|
||||
return hb_min (2 * glyphs_per_class.get (klass).get_population (), class_def_2_size);
|
||||
unsigned format1_size = coverage_base_size + bytes_per_glyph * included_glyphs.get_population();
|
||||
unsigned format2_size = coverage_base_size + bytes_per_range * num_glyph_ranges();
|
||||
return hb_min(format1_size, format2_size);
|
||||
}
|
||||
|
||||
// Compute the new size of the ClassDef table if all glyphs associated with 'klass' were added.
|
||||
unsigned add_class_def_size (unsigned klass)
|
||||
{
|
||||
if (!included_classes.has(klass)) {
|
||||
hb_set_t* glyphs = nullptr;
|
||||
if (glyphs_per_class.has(klass, &glyphs)) {
|
||||
included_glyphs.union_(*glyphs);
|
||||
}
|
||||
|
||||
class_def_1_size = class_def_format1_base_size;
|
||||
if (!included_glyphs.is_empty()) {
|
||||
unsigned min_glyph = included_glyphs.get_min();
|
||||
unsigned max_glyph = included_glyphs.get_max();
|
||||
class_def_1_size += bytes_per_glyph * (max_glyph - min_glyph + 1);
|
||||
}
|
||||
|
||||
class_def_2_size += bytes_per_range * num_ranges_per_class.get (klass);
|
||||
|
||||
included_classes.add(klass);
|
||||
}
|
||||
|
||||
return class_def_2_size;
|
||||
return hb_min (class_def_1_size, class_def_2_size);
|
||||
}
|
||||
|
||||
unsigned num_glyph_ranges() const {
|
||||
hb_codepoint_t start = HB_SET_VALUE_INVALID;
|
||||
hb_codepoint_t end = HB_SET_VALUE_INVALID;
|
||||
|
||||
unsigned count = 0;
|
||||
while (included_glyphs.next_range (&start, &end)) {
|
||||
count++;
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
bool in_error ()
|
||||
@ -214,9 +243,12 @@ struct class_def_size_estimator_t
|
||||
}
|
||||
|
||||
private:
|
||||
bool gids_consecutive;
|
||||
hb_hashmap_t<unsigned, unsigned> num_ranges_per_class;
|
||||
hb_hashmap_t<unsigned, hb_set_t> glyphs_per_class;
|
||||
hb_set_t included_classes;
|
||||
hb_set_t included_glyphs;
|
||||
unsigned class_def_1_size;
|
||||
unsigned class_def_2_size;
|
||||
};
|
||||
|
||||
|
||||
|
@ -195,6 +195,15 @@ struct graph_t
|
||||
return incoming_edges_;
|
||||
}
|
||||
|
||||
unsigned incoming_edges_from_parent (unsigned parent_index) const {
|
||||
if (single_parent != (unsigned) -1) {
|
||||
return single_parent == parent_index ? 1 : 0;
|
||||
}
|
||||
|
||||
unsigned* count;
|
||||
return parents.has(parent_index, &count) ? *count : 0;
|
||||
}
|
||||
|
||||
void reset_parents ()
|
||||
{
|
||||
incoming_edges_ = 0;
|
||||
@ -334,6 +343,16 @@ struct graph_t
|
||||
return true;
|
||||
}
|
||||
|
||||
bool give_max_priority ()
|
||||
{
|
||||
bool result = false;
|
||||
while (!has_max_priority()) {
|
||||
result = true;
|
||||
priority++;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
bool has_max_priority () const {
|
||||
return priority >= 3;
|
||||
}
|
||||
@ -1023,6 +1042,11 @@ struct graph_t
|
||||
* Creates a copy of child and re-assigns the link from
|
||||
* parent to the clone. The copy is a shallow copy, objects
|
||||
* linked from child are not duplicated.
|
||||
*
|
||||
* Returns the index of the newly created duplicate.
|
||||
*
|
||||
* If the child_idx only has incoming edges from parent_idx, this
|
||||
* will do nothing and return the original child_idx.
|
||||
*/
|
||||
unsigned duplicate_if_shared (unsigned parent_idx, unsigned child_idx)
|
||||
{
|
||||
@ -1036,18 +1060,20 @@ struct graph_t
|
||||
* Creates a copy of child and re-assigns the link from
|
||||
* parent to the clone. The copy is a shallow copy, objects
|
||||
* linked from child are not duplicated.
|
||||
*
|
||||
* Returns the index of the newly created duplicate.
|
||||
*
|
||||
* If the child_idx only has incoming edges from parent_idx,
|
||||
* duplication isn't possible and this will return -1.
|
||||
*/
|
||||
unsigned duplicate (unsigned parent_idx, unsigned child_idx)
|
||||
{
|
||||
update_parents ();
|
||||
|
||||
unsigned links_to_child = 0;
|
||||
for (const auto& l : vertices_[parent_idx].obj.all_links ())
|
||||
{
|
||||
if (l.objidx == child_idx) links_to_child++;
|
||||
}
|
||||
const auto& child = vertices_[child_idx];
|
||||
unsigned links_to_child = child.incoming_edges_from_parent(parent_idx);
|
||||
|
||||
if (vertices_[child_idx].incoming_edges () <= links_to_child)
|
||||
if (child.incoming_edges () <= links_to_child)
|
||||
{
|
||||
// Can't duplicate this node, doing so would orphan the original one as all remaining links
|
||||
// to child are from parent.
|
||||
@ -1060,7 +1086,7 @@ struct graph_t
|
||||
parent_idx, child_idx);
|
||||
|
||||
unsigned clone_idx = duplicate (child_idx);
|
||||
if (clone_idx == (unsigned) -1) return false;
|
||||
if (clone_idx == (unsigned) -1) return -1;
|
||||
// duplicate shifts the root node idx, so if parent_idx was root update it.
|
||||
if (parent_idx == clone_idx) parent_idx++;
|
||||
|
||||
@ -1076,6 +1102,62 @@ struct graph_t
|
||||
return clone_idx;
|
||||
}
|
||||
|
||||
/*
|
||||
* Creates a copy of child and re-assigns the links from
|
||||
* parents to the clone. The copy is a shallow copy, objects
|
||||
* linked from child are not duplicated.
|
||||
*
|
||||
* Returns the index of the newly created duplicate.
|
||||
*
|
||||
* If the child_idx only has incoming edges from parents,
|
||||
* duplication isn't possible or duplication fails and this will
|
||||
* return -1.
|
||||
*/
|
||||
unsigned duplicate (const hb_set_t* parents, unsigned child_idx)
|
||||
{
|
||||
if (parents->is_empty()) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
update_parents ();
|
||||
|
||||
const auto& child = vertices_[child_idx];
|
||||
unsigned links_to_child = 0;
|
||||
unsigned last_parent = parents->get_max();
|
||||
unsigned first_parent = parents->get_min();
|
||||
for (unsigned parent_idx : *parents) {
|
||||
links_to_child += child.incoming_edges_from_parent(parent_idx);
|
||||
}
|
||||
|
||||
if (child.incoming_edges () <= links_to_child)
|
||||
{
|
||||
// Can't duplicate this node, doing so would orphan the original one as all remaining links
|
||||
// to child are from parent.
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, " Not duplicating %u, ..., %u => %u", first_parent, last_parent, child_idx);
|
||||
return -1;
|
||||
}
|
||||
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, " Duplicating %u, ..., %u => %u", first_parent, last_parent, child_idx);
|
||||
|
||||
unsigned clone_idx = duplicate (child_idx);
|
||||
if (clone_idx == (unsigned) -1) return false;
|
||||
|
||||
for (unsigned parent_idx : *parents) {
|
||||
// duplicate shifts the root node idx, so if parent_idx was root update it.
|
||||
if (parent_idx == clone_idx) parent_idx++;
|
||||
auto& parent = vertices_[parent_idx];
|
||||
for (auto& l : parent.obj.all_links_writer ())
|
||||
{
|
||||
if (l.objidx != child_idx)
|
||||
continue;
|
||||
|
||||
reassign_link (l, parent_idx, clone_idx);
|
||||
}
|
||||
}
|
||||
|
||||
return clone_idx;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Adds a new node to the graph, not connected to anything.
|
||||
|
@ -247,8 +247,8 @@ struct PairPosFormat2 : public OT::Layout::GPOS_impl::PairPosFormat2_4<SmallType
|
||||
for (unsigned i = 0; i < class1_count; i++)
|
||||
{
|
||||
unsigned accumulated_delta = class1_record_size;
|
||||
coverage_size += estimator.incremental_coverage_size (i);
|
||||
class_def_1_size += estimator.incremental_class_def_size (i);
|
||||
class_def_1_size = estimator.add_class_def_size (i);
|
||||
coverage_size = estimator.coverage_size ();
|
||||
max_coverage_size = hb_max (max_coverage_size, coverage_size);
|
||||
max_class_def_1_size = hb_max (max_class_def_1_size, class_def_1_size);
|
||||
|
||||
@ -280,8 +280,10 @@ struct PairPosFormat2 : public OT::Layout::GPOS_impl::PairPosFormat2_4<SmallType
|
||||
split_points.push (i);
|
||||
// split does not include i, so add the size for i when we reset the size counters.
|
||||
accumulated = base_size + accumulated_delta;
|
||||
coverage_size = 4 + estimator.incremental_coverage_size (i);
|
||||
class_def_1_size = 4 + estimator.incremental_class_def_size (i);
|
||||
|
||||
estimator.reset();
|
||||
class_def_1_size = estimator.add_class_def_size(i);
|
||||
coverage_size = estimator.coverage_size();
|
||||
visited.clear (); // node sharing isn't allowed between splits.
|
||||
}
|
||||
}
|
||||
|
@ -26,27 +26,119 @@
|
||||
|
||||
#include "gsubgpos-context.hh"
|
||||
#include "classdef-graph.hh"
|
||||
#include "hb-iter.hh"
|
||||
#include "hb-serialize.hh"
|
||||
|
||||
typedef hb_codepoint_pair_t gid_and_class_t;
|
||||
typedef hb_vector_t<gid_and_class_t> gid_and_class_list_t;
|
||||
|
||||
template<typename It>
|
||||
static unsigned actual_class_def_size(It glyph_and_class) {
|
||||
char buffer[100];
|
||||
hb_serialize_context_t serializer(buffer, 100);
|
||||
OT::ClassDef_serialize (&serializer, glyph_and_class);
|
||||
serializer.end_serialize ();
|
||||
assert(!serializer.in_error());
|
||||
|
||||
static bool incremental_size_is (const gid_and_class_list_t& list, unsigned klass,
|
||||
unsigned cov_expected, unsigned class_def_expected)
|
||||
hb_blob_t* blob = serializer.copy_blob();
|
||||
unsigned size = hb_blob_get_length(blob);
|
||||
hb_blob_destroy(blob);
|
||||
return size;
|
||||
}
|
||||
|
||||
static unsigned actual_class_def_size(gid_and_class_list_t consecutive_map, hb_vector_t<unsigned> classes) {
|
||||
auto filtered_it =
|
||||
+ consecutive_map.as_sorted_array().iter()
|
||||
| hb_filter([&] (unsigned c) {
|
||||
for (unsigned klass : classes) {
|
||||
if (c == klass) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}, hb_second);
|
||||
return actual_class_def_size(+ filtered_it);
|
||||
}
|
||||
|
||||
template<typename It>
|
||||
static unsigned actual_coverage_size(It glyphs) {
|
||||
char buffer[100];
|
||||
hb_serialize_context_t serializer(buffer, 100);
|
||||
OT::Layout::Common::Coverage_serialize (&serializer, glyphs);
|
||||
serializer.end_serialize ();
|
||||
assert(!serializer.in_error());
|
||||
|
||||
hb_blob_t* blob = serializer.copy_blob();
|
||||
unsigned size = hb_blob_get_length(blob);
|
||||
hb_blob_destroy(blob);
|
||||
return size;
|
||||
}
|
||||
|
||||
static unsigned actual_coverage_size(gid_and_class_list_t consecutive_map, hb_vector_t<unsigned> classes) {
|
||||
auto filtered_it =
|
||||
+ consecutive_map.as_sorted_array().iter()
|
||||
| hb_filter([&] (unsigned c) {
|
||||
for (unsigned klass : classes) {
|
||||
if (c == klass) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}, hb_second);
|
||||
return actual_coverage_size(+ filtered_it | hb_map_retains_sorting(hb_first));
|
||||
}
|
||||
|
||||
static bool check_coverage_size(graph::class_def_size_estimator_t& estimator,
|
||||
const gid_and_class_list_t& map,
|
||||
hb_vector_t<unsigned> klasses)
|
||||
{
|
||||
graph::class_def_size_estimator_t estimator (list.iter ());
|
||||
unsigned result = estimator.coverage_size();
|
||||
unsigned expected = actual_coverage_size(map, klasses);
|
||||
if (result != expected) {
|
||||
printf ("FAIL: estimated coverage expected size %u but was %u\n", expected, result);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
unsigned result = estimator.incremental_coverage_size (klass);
|
||||
if (result != cov_expected)
|
||||
{
|
||||
printf ("FAIL: coverage expected size %u but was %u\n", cov_expected, result);
|
||||
static bool check_add_class_def_size(graph::class_def_size_estimator_t& estimator,
|
||||
const gid_and_class_list_t& map,
|
||||
unsigned klass, hb_vector_t<unsigned> klasses)
|
||||
{
|
||||
unsigned result = estimator.add_class_def_size(klass);
|
||||
unsigned expected = actual_class_def_size(map, klasses);
|
||||
if (result != expected) {
|
||||
printf ("FAIL: estimated class def expected size %u but was %u\n", expected, result);
|
||||
return false;
|
||||
}
|
||||
|
||||
result = estimator.incremental_class_def_size (klass);
|
||||
if (result != class_def_expected)
|
||||
return check_coverage_size(estimator, map, klasses);
|
||||
}
|
||||
|
||||
static bool check_add_class_def_size (const gid_and_class_list_t& list, unsigned klass)
|
||||
{
|
||||
graph::class_def_size_estimator_t estimator (list.iter ());
|
||||
|
||||
unsigned result = estimator.add_class_def_size (klass);
|
||||
auto filtered_it =
|
||||
+ list.as_sorted_array().iter()
|
||||
| hb_filter([&] (unsigned c) {
|
||||
return c == klass;
|
||||
}, hb_second);
|
||||
|
||||
unsigned expected = actual_class_def_size(filtered_it);
|
||||
if (result != expected)
|
||||
{
|
||||
printf ("FAIL: class def expected size %u but was %u\n", class_def_expected, result);
|
||||
printf ("FAIL: class def expected size %u but was %u\n", expected, result);
|
||||
return false;
|
||||
}
|
||||
|
||||
auto cov_it = + filtered_it | hb_map_retains_sorting(hb_first);
|
||||
result = estimator.coverage_size ();
|
||||
expected = actual_coverage_size(cov_it);
|
||||
if (result != expected)
|
||||
{
|
||||
printf ("FAIL: coverage expected size %u but was %u\n", expected, result);
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -57,43 +149,45 @@ static void test_class_and_coverage_size_estimates ()
|
||||
{
|
||||
gid_and_class_list_t empty = {
|
||||
};
|
||||
assert (incremental_size_is (empty, 0, 0, 0));
|
||||
assert (incremental_size_is (empty, 1, 0, 0));
|
||||
assert (check_add_class_def_size (empty, 0));
|
||||
assert (check_add_class_def_size (empty, 1));
|
||||
|
||||
gid_and_class_list_t class_zero = {
|
||||
{5, 0},
|
||||
};
|
||||
assert (incremental_size_is (class_zero, 0, 2, 0));
|
||||
assert (check_add_class_def_size (class_zero, 0));
|
||||
|
||||
gid_and_class_list_t consecutive = {
|
||||
{4, 0},
|
||||
{5, 0},
|
||||
|
||||
{6, 1},
|
||||
{7, 1},
|
||||
|
||||
{8, 2},
|
||||
{9, 2},
|
||||
{10, 2},
|
||||
{11, 2},
|
||||
};
|
||||
assert (incremental_size_is (consecutive, 0, 4, 0));
|
||||
assert (incremental_size_is (consecutive, 1, 4, 4));
|
||||
assert (incremental_size_is (consecutive, 2, 8, 6));
|
||||
assert (check_add_class_def_size (consecutive, 0));
|
||||
assert (check_add_class_def_size (consecutive, 1));
|
||||
assert (check_add_class_def_size (consecutive, 2));
|
||||
|
||||
gid_and_class_list_t non_consecutive = {
|
||||
{4, 0},
|
||||
{5, 0},
|
||||
{6, 0},
|
||||
|
||||
{6, 1},
|
||||
{7, 1},
|
||||
{8, 1},
|
||||
{10, 1},
|
||||
|
||||
{9, 2},
|
||||
{10, 2},
|
||||
{11, 2},
|
||||
{12, 2},
|
||||
{13, 2},
|
||||
};
|
||||
assert (incremental_size_is (non_consecutive, 0, 4, 0));
|
||||
assert (incremental_size_is (non_consecutive, 1, 4, 6));
|
||||
assert (incremental_size_is (non_consecutive, 2, 8, 6));
|
||||
assert (check_add_class_def_size (non_consecutive, 0));
|
||||
assert (check_add_class_def_size (non_consecutive, 1));
|
||||
assert (check_add_class_def_size (non_consecutive, 2));
|
||||
|
||||
gid_and_class_list_t multiple_ranges = {
|
||||
{4, 0},
|
||||
@ -108,12 +202,95 @@ static void test_class_and_coverage_size_estimates ()
|
||||
{12, 1},
|
||||
{13, 1},
|
||||
};
|
||||
assert (incremental_size_is (multiple_ranges, 0, 4, 0));
|
||||
assert (incremental_size_is (multiple_ranges, 1, 2 * 6, 3 * 6));
|
||||
assert (check_add_class_def_size (multiple_ranges, 0));
|
||||
assert (check_add_class_def_size (multiple_ranges, 1));
|
||||
}
|
||||
|
||||
static void test_running_class_and_coverage_size_estimates () {
|
||||
// #### With consecutive gids: switches formats ###
|
||||
gid_and_class_list_t consecutive_map = {
|
||||
// range 1-4 (f1: 8 bytes), (f2: 6 bytes)
|
||||
{1, 1},
|
||||
{2, 1},
|
||||
{3, 1},
|
||||
{4, 1},
|
||||
|
||||
// (f1: 2 bytes), (f2: 6 bytes)
|
||||
{5, 2},
|
||||
|
||||
// (f1: 14 bytes), (f2: 6 bytes)
|
||||
{6, 3},
|
||||
{7, 3},
|
||||
{8, 3},
|
||||
{9, 3},
|
||||
{10, 3},
|
||||
{11, 3},
|
||||
{12, 3},
|
||||
};
|
||||
|
||||
graph::class_def_size_estimator_t estimator1(consecutive_map.iter());
|
||||
assert(check_add_class_def_size(estimator1, consecutive_map, 1, {1}));
|
||||
assert(check_add_class_def_size(estimator1, consecutive_map, 2, {1, 2}));
|
||||
assert(check_add_class_def_size(estimator1, consecutive_map, 2, {1, 2})); // check that adding the same class again works
|
||||
assert(check_add_class_def_size(estimator1, consecutive_map, 3, {1, 2, 3}));
|
||||
|
||||
estimator1.reset();
|
||||
assert(check_add_class_def_size(estimator1, consecutive_map, 2, {2}));
|
||||
assert(check_add_class_def_size(estimator1, consecutive_map, 3, {2, 3}));
|
||||
|
||||
// #### With non-consecutive gids: always uses format 2 ###
|
||||
gid_and_class_list_t non_consecutive_map = {
|
||||
// range 1-4 (f1: 8 bytes), (f2: 6 bytes)
|
||||
{1, 1},
|
||||
{2, 1},
|
||||
{3, 1},
|
||||
{4, 1},
|
||||
|
||||
// (f1: 2 bytes), (f2: 12 bytes)
|
||||
{6, 2},
|
||||
{8, 2},
|
||||
|
||||
// (f1: 14 bytes), (f2: 6 bytes)
|
||||
{9, 3},
|
||||
{10, 3},
|
||||
{11, 3},
|
||||
{12, 3},
|
||||
{13, 3},
|
||||
{14, 3},
|
||||
{15, 3},
|
||||
};
|
||||
|
||||
graph::class_def_size_estimator_t estimator2(non_consecutive_map.iter());
|
||||
assert(check_add_class_def_size(estimator2, non_consecutive_map, 1, {1}));
|
||||
assert(check_add_class_def_size(estimator2, non_consecutive_map, 2, {1, 2}));
|
||||
assert(check_add_class_def_size(estimator2, non_consecutive_map, 3, {1, 2, 3}));
|
||||
|
||||
estimator2.reset();
|
||||
assert(check_add_class_def_size(estimator2, non_consecutive_map, 2, {2}));
|
||||
assert(check_add_class_def_size(estimator2, non_consecutive_map, 3, {2, 3}));
|
||||
}
|
||||
|
||||
static void test_running_class_size_estimates_with_locally_consecutive_glyphs () {
|
||||
gid_and_class_list_t map = {
|
||||
{1, 1},
|
||||
{6, 2},
|
||||
{7, 3},
|
||||
};
|
||||
|
||||
graph::class_def_size_estimator_t estimator(map.iter());
|
||||
assert(check_add_class_def_size(estimator, map, 1, {1}));
|
||||
assert(check_add_class_def_size(estimator, map, 2, {1, 2}));
|
||||
assert(check_add_class_def_size(estimator, map, 3, {1, 2, 3}));
|
||||
|
||||
estimator.reset();
|
||||
assert(check_add_class_def_size(estimator, map, 2, {2}));
|
||||
assert(check_add_class_def_size(estimator, map, 3, {2, 3}));
|
||||
}
|
||||
|
||||
int
|
||||
main (int argc, char **argv)
|
||||
{
|
||||
test_class_and_coverage_size_estimates ();
|
||||
test_running_class_and_coverage_size_estimates ();
|
||||
test_running_class_size_estimates_with_locally_consecutive_glyphs ();
|
||||
}
|
||||
|
@ -8,5 +8,6 @@ Description: HarfBuzz cairo integration
|
||||
Version: %VERSION%
|
||||
|
||||
Requires: harfbuzz = %VERSION%
|
||||
Requires.private: cairo
|
||||
Libs: -L${libdir} -lharfbuzz-cairo
|
||||
Cflags: -I${includedir}/harfbuzz
|
||||
|
@ -54,6 +54,7 @@
|
||||
#include "hb-subset-cff1.cc"
|
||||
#include "hb-subset-cff2.cc"
|
||||
#include "hb-subset-input.cc"
|
||||
#include "hb-subset-instancer-iup.cc"
|
||||
#include "hb-subset-instancer-solver.cc"
|
||||
#include "hb-subset-plan.cc"
|
||||
#include "hb-subset-repacker.cc"
|
||||
|
@ -552,6 +552,7 @@ struct LigatureSubtable
|
||||
{
|
||||
DEBUG_MSG (APPLY, nullptr, "Skipping ligature component");
|
||||
if (unlikely (!buffer->move_to (match_positions[--match_length % ARRAY_LENGTH (match_positions)]))) return;
|
||||
buffer->cur().unicode_props() |= UPROPS_MASK_IGNORABLE;
|
||||
if (unlikely (!buffer->replace_glyph (DELETED_GLYPH))) return;
|
||||
}
|
||||
|
||||
|
@ -671,7 +671,7 @@ struct hb_pair_t
|
||||
return 0;
|
||||
}
|
||||
|
||||
friend void swap (hb_pair_t& a, hb_pair_t& b)
|
||||
friend void swap (hb_pair_t& a, hb_pair_t& b) noexcept
|
||||
{
|
||||
hb_swap (a.first, b.first);
|
||||
hb_swap (a.second, b.second);
|
||||
@ -1053,6 +1053,18 @@ _hb_cmp_method (const void *pkey, const void *pval, Ts... ds)
|
||||
return val.cmp (key, ds...);
|
||||
}
|
||||
|
||||
template <typename K, typename V>
|
||||
static int
|
||||
_hb_cmp_operator (const void *pkey, const void *pval)
|
||||
{
|
||||
const K& key = * (const K*) pkey;
|
||||
const V& val = * (const V*) pval;
|
||||
|
||||
if (key < val) return -1;
|
||||
if (key > val) return 1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
template <typename V, typename K, typename ...Ts>
|
||||
static inline bool
|
||||
hb_bsearch_impl (unsigned *pos, /* Out */
|
||||
|
@ -39,10 +39,10 @@ struct hb_bit_set_invertible_t
|
||||
|
||||
hb_bit_set_invertible_t () = default;
|
||||
hb_bit_set_invertible_t (const hb_bit_set_invertible_t& o) = default;
|
||||
hb_bit_set_invertible_t (hb_bit_set_invertible_t&& other) : hb_bit_set_invertible_t () { hb_swap (*this, other); }
|
||||
hb_bit_set_invertible_t (hb_bit_set_invertible_t&& other) noexcept : hb_bit_set_invertible_t () { hb_swap (*this, other); }
|
||||
hb_bit_set_invertible_t& operator= (const hb_bit_set_invertible_t& o) = default;
|
||||
hb_bit_set_invertible_t& operator= (hb_bit_set_invertible_t&& other) { hb_swap (*this, other); return *this; }
|
||||
friend void swap (hb_bit_set_invertible_t &a, hb_bit_set_invertible_t &b)
|
||||
hb_bit_set_invertible_t& operator= (hb_bit_set_invertible_t&& other) noexcept { hb_swap (*this, other); return *this; }
|
||||
friend void swap (hb_bit_set_invertible_t &a, hb_bit_set_invertible_t &b) noexcept
|
||||
{
|
||||
if (likely (!a.s.successful || !b.s.successful))
|
||||
return;
|
||||
|
@ -38,10 +38,10 @@ struct hb_bit_set_t
|
||||
~hb_bit_set_t () = default;
|
||||
|
||||
hb_bit_set_t (const hb_bit_set_t& other) : hb_bit_set_t () { set (other, true); }
|
||||
hb_bit_set_t ( hb_bit_set_t&& other) : hb_bit_set_t () { hb_swap (*this, other); }
|
||||
hb_bit_set_t ( hb_bit_set_t&& other) noexcept : hb_bit_set_t () { hb_swap (*this, other); }
|
||||
hb_bit_set_t& operator= (const hb_bit_set_t& other) { set (other); return *this; }
|
||||
hb_bit_set_t& operator= (hb_bit_set_t&& other) { hb_swap (*this, other); return *this; }
|
||||
friend void swap (hb_bit_set_t &a, hb_bit_set_t &b)
|
||||
hb_bit_set_t& operator= (hb_bit_set_t&& other) noexcept { hb_swap (*this, other); return *this; }
|
||||
friend void swap (hb_bit_set_t &a, hb_bit_set_t &b) noexcept
|
||||
{
|
||||
if (likely (!a.successful || !b.successful))
|
||||
return;
|
||||
|
@ -598,6 +598,11 @@ _open_resource_fork (const char *file_name, hb_mapped_file_t *file)
|
||||
* Creates a new blob containing the data from the
|
||||
* specified binary font file.
|
||||
*
|
||||
* The filename is passed directly to the system on all platforms,
|
||||
* except on Windows, where the filename is interpreted as UTF-8.
|
||||
* Only if the filename is not valid UTF-8, it will be interpreted
|
||||
* according to the system codepage.
|
||||
*
|
||||
* Returns: An #hb_blob_t pointer with the content of the file,
|
||||
* or hb_blob_get_empty() if failed.
|
||||
*
|
||||
@ -617,6 +622,11 @@ hb_blob_create_from_file (const char *file_name)
|
||||
* Creates a new blob containing the data from the
|
||||
* specified binary font file.
|
||||
*
|
||||
* The filename is passed directly to the system on all platforms,
|
||||
* except on Windows, where the filename is interpreted as UTF-8.
|
||||
* Only if the filename is not valid UTF-8, it will be interpreted
|
||||
* according to the system codepage.
|
||||
*
|
||||
* Returns: An #hb_blob_t pointer with the content of the file,
|
||||
* or `NULL` if failed.
|
||||
*
|
||||
@ -672,10 +682,19 @@ fail_without_close:
|
||||
if (unlikely (!file)) return nullptr;
|
||||
|
||||
HANDLE fd;
|
||||
int conversion;
|
||||
unsigned int size = strlen (file_name) + 1;
|
||||
wchar_t * wchar_file_name = (wchar_t *) hb_malloc (sizeof (wchar_t) * size);
|
||||
if (unlikely (!wchar_file_name)) goto fail_without_close;
|
||||
mbstowcs (wchar_file_name, file_name, size);
|
||||
|
||||
/* Assume file name is given in UTF-8 encoding */
|
||||
conversion = MultiByteToWideChar(CP_UTF8, MB_ERR_INVALID_CHARS, file_name, -1, wchar_file_name, size);
|
||||
if (conversion <= 0)
|
||||
{
|
||||
/* Conversion failed due to invalid UTF-8 characters,
|
||||
Repeat conversion based on system code page */
|
||||
mbstowcs(wchar_file_name, file_name, size);
|
||||
}
|
||||
#if !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) && WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_APP)
|
||||
{
|
||||
CREATEFILE2_EXTENDED_PARAMETERS ceparams = { 0 };
|
||||
|
@ -149,7 +149,7 @@ buffer_verify_unsafe_to_break (hb_buffer_t *buffer,
|
||||
}
|
||||
assert (text_start < text_end);
|
||||
|
||||
if (0)
|
||||
if (false)
|
||||
printf("start %u end %u text start %u end %u\n", start, end, text_start, text_end);
|
||||
|
||||
hb_buffer_clear_contents (fragment);
|
||||
@ -288,7 +288,7 @@ buffer_verify_unsafe_to_concat (hb_buffer_t *buffer,
|
||||
}
|
||||
assert (text_start < text_end);
|
||||
|
||||
if (0)
|
||||
if (false)
|
||||
printf("start %u end %u text start %u end %u\n", start, end, text_start, text_end);
|
||||
|
||||
#if 0
|
||||
|
@ -54,8 +54,8 @@ struct top_dict_values_t : dict_values_t<OPSTR>
|
||||
}
|
||||
void fini () { dict_values_t<OPSTR>::fini (); }
|
||||
|
||||
unsigned int charStringsOffset;
|
||||
unsigned int FDArrayOffset;
|
||||
int charStringsOffset;
|
||||
int FDArrayOffset;
|
||||
};
|
||||
|
||||
struct dict_opset_t : opset_t<number_t>
|
||||
@ -157,11 +157,11 @@ struct top_dict_opset_t : dict_opset_t
|
||||
{
|
||||
switch (op) {
|
||||
case OpCode_CharStrings:
|
||||
dictval.charStringsOffset = env.argStack.pop_uint ();
|
||||
dictval.charStringsOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
case OpCode_FDArray:
|
||||
dictval.FDArrayOffset = env.argStack.pop_uint ();
|
||||
dictval.FDArrayOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
case OpCode_FontMatrix:
|
||||
|
@ -168,7 +168,7 @@ struct cff2_cs_interp_env_t : cs_interp_env_t<ELEM, CFF2Subrs>
|
||||
protected:
|
||||
const int *coords;
|
||||
unsigned int num_coords;
|
||||
const CFF2VariationStore *varStore;
|
||||
const CFF2ItemVariationStore *varStore;
|
||||
unsigned int region_count;
|
||||
unsigned int ivs;
|
||||
hb_vector_t<float> scalars;
|
||||
|
@ -996,7 +996,7 @@ hb_feature_to_string (hb_feature_t *feature,
|
||||
if (feature->value > 1)
|
||||
{
|
||||
s[len++] = '=';
|
||||
len += hb_max (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%u", feature->value));
|
||||
len += hb_max (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%" PRIu32, feature->value));
|
||||
}
|
||||
assert (len < ARRAY_LENGTH (s));
|
||||
len = hb_min (len, size - 1);
|
||||
|
@ -47,14 +47,10 @@
|
||||
# endif /* !__cplusplus */
|
||||
#endif
|
||||
|
||||
#if defined (_SVR4) || defined (SVR4) || defined (__OpenBSD__) || \
|
||||
defined (_sgi) || defined (__sun) || defined (sun) || \
|
||||
defined (__digital__) || defined (__HP_cc)
|
||||
# include <inttypes.h>
|
||||
#elif defined (_AIX)
|
||||
#if defined (_AIX)
|
||||
# include <sys/inttypes.h>
|
||||
#elif defined (_MSC_VER) && _MSC_VER < 1600
|
||||
/* VS 2010 (_MSC_VER 1600) has stdint.h */
|
||||
#elif defined (_MSC_VER) && _MSC_VER < 1800
|
||||
/* VS 2013 (_MSC_VER 1800) has inttypes.h */
|
||||
typedef __int8 int8_t;
|
||||
typedef unsigned __int8 uint8_t;
|
||||
typedef __int16 int16_t;
|
||||
@ -63,10 +59,8 @@ typedef __int32 int32_t;
|
||||
typedef unsigned __int32 uint32_t;
|
||||
typedef __int64 int64_t;
|
||||
typedef unsigned __int64 uint64_t;
|
||||
#elif defined (__KERNEL__)
|
||||
# include <linux/types.h>
|
||||
#else
|
||||
# include <stdint.h>
|
||||
# include <inttypes.h>
|
||||
#endif
|
||||
|
||||
#if defined(__GNUC__) && ((__GNUC__ > 3) || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))
|
||||
|
@ -56,15 +56,15 @@ struct shared_ptr
|
||||
|
||||
explicit shared_ptr (T *p = nullptr) : p (p) {}
|
||||
shared_ptr (const shared_ptr &o) : p (v::reference (o.p)) {}
|
||||
shared_ptr (shared_ptr &&o) : p (o.p) { o.p = nullptr; }
|
||||
shared_ptr (shared_ptr &&o) noexcept : p (o.p) { o.p = nullptr; }
|
||||
shared_ptr& operator = (const shared_ptr &o) { if (p != o.p) { destroy (); p = o.p; reference (); } return *this; }
|
||||
shared_ptr& operator = (shared_ptr &&o) { v::destroy (p); p = o.p; o.p = nullptr; return *this; }
|
||||
shared_ptr& operator = (shared_ptr &&o) noexcept { v::destroy (p); p = o.p; o.p = nullptr; return *this; }
|
||||
~shared_ptr () { v::destroy (p); p = nullptr; }
|
||||
|
||||
T* get() const { return p; }
|
||||
|
||||
void swap (shared_ptr &o) { std::swap (p, o.p); }
|
||||
friend void swap (shared_ptr &a, shared_ptr &b) { std::swap (a.p, b.p); }
|
||||
void swap (shared_ptr &o) noexcept { std::swap (p, o.p); }
|
||||
friend void swap (shared_ptr &a, shared_ptr &b) noexcept { std::swap (a.p, b.p); }
|
||||
|
||||
operator T * () const { return p; }
|
||||
T& operator * () const { return *get (); }
|
||||
@ -98,16 +98,16 @@ struct unique_ptr
|
||||
|
||||
explicit unique_ptr (T *p = nullptr) : p (p) {}
|
||||
unique_ptr (const unique_ptr &o) = delete;
|
||||
unique_ptr (unique_ptr &&o) : p (o.p) { o.p = nullptr; }
|
||||
unique_ptr (unique_ptr &&o) noexcept : p (o.p) { o.p = nullptr; }
|
||||
unique_ptr& operator = (const unique_ptr &o) = delete;
|
||||
unique_ptr& operator = (unique_ptr &&o) { v::destroy (p); p = o.p; o.p = nullptr; return *this; }
|
||||
unique_ptr& operator = (unique_ptr &&o) noexcept { v::destroy (p); p = o.p; o.p = nullptr; return *this; }
|
||||
~unique_ptr () { v::destroy (p); p = nullptr; }
|
||||
|
||||
T* get() const { return p; }
|
||||
T* release () { T* v = p; p = nullptr; return v; }
|
||||
|
||||
void swap (unique_ptr &o) { std::swap (p, o.p); }
|
||||
friend void swap (unique_ptr &a, unique_ptr &b) { std::swap (a.p, b.p); }
|
||||
void swap (unique_ptr &o) noexcept { std::swap (p, o.p); }
|
||||
friend void swap (unique_ptr &a, unique_ptr &b) noexcept { std::swap (a.p, b.p); }
|
||||
|
||||
operator T * () const { return p; }
|
||||
T& operator * () const { return *get (); }
|
||||
|
@ -173,7 +173,7 @@ _hb_directwrite_shaper_face_data_create (hb_face_t *face)
|
||||
|
||||
t_DWriteCreateFactory p_DWriteCreateFactory;
|
||||
|
||||
#if defined(__GNUC__)
|
||||
#if defined(__GNUC__) || defined(__clang__)
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wcast-function-type"
|
||||
#endif
|
||||
@ -181,7 +181,7 @@ _hb_directwrite_shaper_face_data_create (hb_face_t *face)
|
||||
p_DWriteCreateFactory = (t_DWriteCreateFactory)
|
||||
GetProcAddress (data->dwrite_dll, "DWriteCreateFactory");
|
||||
|
||||
#if defined(__GNUC__)
|
||||
#if defined(__GNUC__) || defined(__clang__)
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
|
||||
|
@ -651,7 +651,7 @@ struct hb_font_t
|
||||
{
|
||||
if (get_glyph_name (glyph, s, size)) return;
|
||||
|
||||
if (size && snprintf (s, size, "gid%u", glyph) < 0)
|
||||
if (size && snprintf (s, size, "gid%" PRIu32, glyph) < 0)
|
||||
*s = '\0';
|
||||
}
|
||||
|
||||
|
@ -224,7 +224,7 @@ _hb_ft_hb_font_check_changed (hb_font_t *font,
|
||||
*
|
||||
* Sets the FT_Load_Glyph load flags for the specified #hb_font_t.
|
||||
*
|
||||
* For more information, see
|
||||
* For more information, see
|
||||
* <https://freetype.org/freetype2/docs/reference/ft2-glyph_retrieval.html#ft_load_xxx>
|
||||
*
|
||||
* This function works with #hb_font_t objects created by
|
||||
@ -252,7 +252,7 @@ hb_ft_font_set_load_flags (hb_font_t *font, int load_flags)
|
||||
*
|
||||
* Fetches the FT_Load_Glyph load flags of the specified #hb_font_t.
|
||||
*
|
||||
* For more information, see
|
||||
* For more information, see
|
||||
* <https://freetype.org/freetype2/docs/reference/ft2-glyph_retrieval.html#ft_load_xxx>
|
||||
*
|
||||
* This function works with #hb_font_t objects created by
|
||||
@ -1118,10 +1118,10 @@ _hb_ft_reference_table (hb_face_t *face HB_UNUSED, hb_tag_t tag, void *user_data
|
||||
* This variant of the function does not provide any life-cycle management.
|
||||
*
|
||||
* Most client programs should use hb_ft_face_create_referenced()
|
||||
* (or, perhaps, hb_ft_face_create_cached()) instead.
|
||||
* (or, perhaps, hb_ft_face_create_cached()) instead.
|
||||
*
|
||||
* If you know you have valid reasons not to use hb_ft_face_create_referenced(),
|
||||
* then it is the client program's responsibility to destroy @ft_face
|
||||
* then it is the client program's responsibility to destroy @ft_face
|
||||
* after the #hb_face_t face object has been destroyed.
|
||||
*
|
||||
* Return value: (transfer full): the new #hb_face_t face object
|
||||
@ -1215,7 +1215,7 @@ hb_ft_face_finalize (void *arg)
|
||||
hb_face_t *
|
||||
hb_ft_face_create_cached (FT_Face ft_face)
|
||||
{
|
||||
if (unlikely (!ft_face->generic.data || ft_face->generic.finalizer != (FT_Generic_Finalizer) hb_ft_face_finalize))
|
||||
if (unlikely (!ft_face->generic.data || ft_face->generic.finalizer != hb_ft_face_finalize))
|
||||
{
|
||||
if (ft_face->generic.finalizer)
|
||||
ft_face->generic.finalizer (ft_face);
|
||||
@ -1241,13 +1241,13 @@ hb_ft_face_create_cached (FT_Face ft_face)
|
||||
* This variant of the function does not provide any life-cycle management.
|
||||
*
|
||||
* Most client programs should use hb_ft_font_create_referenced()
|
||||
* instead.
|
||||
* instead.
|
||||
*
|
||||
* If you know you have valid reasons not to use hb_ft_font_create_referenced(),
|
||||
* then it is the client program's responsibility to destroy @ft_face
|
||||
* then it is the client program's responsibility to destroy @ft_face
|
||||
* after the #hb_font_t font object has been destroyed.
|
||||
*
|
||||
* HarfBuzz will use the @destroy callback on the #hb_font_t font object
|
||||
* HarfBuzz will use the @destroy callback on the #hb_font_t font object
|
||||
* if it is supplied when you use this function. However, even if @destroy
|
||||
* is provided, it is the client program's responsibility to destroy @ft_face,
|
||||
* and it is the client program's responsibility to ensure that @ft_face is
|
||||
|
@ -93,15 +93,16 @@ hb_icu_script_to_script (UScriptCode script)
|
||||
UScriptCode
|
||||
hb_icu_script_from_script (hb_script_t script)
|
||||
{
|
||||
UScriptCode out = USCRIPT_INVALID_CODE;
|
||||
|
||||
if (unlikely (script == HB_SCRIPT_INVALID))
|
||||
return USCRIPT_INVALID_CODE;
|
||||
return out;
|
||||
|
||||
unsigned int numScriptCode = 1 + u_getIntPropertyMaxValue (UCHAR_SCRIPT);
|
||||
for (unsigned int i = 0; i < numScriptCode; i++)
|
||||
if (unlikely (hb_icu_script_to_script ((UScriptCode) i) == script))
|
||||
return (UScriptCode) i;
|
||||
UErrorCode icu_err = U_ZERO_ERROR;
|
||||
const unsigned char buf[5] = {HB_UNTAG (script), 0};
|
||||
uscript_getCode ((const char *) buf, &out, 1, &icu_err);
|
||||
|
||||
return USCRIPT_UNKNOWN;
|
||||
return out;
|
||||
}
|
||||
|
||||
|
||||
|
@ -106,7 +106,7 @@
|
||||
#endif
|
||||
|
||||
#ifndef HB_COLRV1_MAX_EDGE_COUNT
|
||||
#define HB_COLRV1_MAX_EDGE_COUNT 65536
|
||||
#define HB_COLRV1_MAX_EDGE_COUNT 2048
|
||||
#endif
|
||||
|
||||
|
||||
|
@ -70,9 +70,9 @@ struct hb_hashmap_t
|
||||
|
||||
alloc (o.population); hb_copy (o, *this);
|
||||
}
|
||||
hb_hashmap_t (hb_hashmap_t&& o) : hb_hashmap_t () { hb_swap (*this, o); }
|
||||
hb_hashmap_t (hb_hashmap_t&& o) noexcept : hb_hashmap_t () { hb_swap (*this, o); }
|
||||
hb_hashmap_t& operator= (const hb_hashmap_t& o) { reset (); alloc (o.population); hb_copy (o, *this); return *this; }
|
||||
hb_hashmap_t& operator= (hb_hashmap_t&& o) { hb_swap (*this, o); return *this; }
|
||||
hb_hashmap_t& operator= (hb_hashmap_t&& o) noexcept { hb_swap (*this, o); return *this; }
|
||||
|
||||
hb_hashmap_t (std::initializer_list<hb_pair_t<K, V>> lst) : hb_hashmap_t ()
|
||||
{
|
||||
@ -137,26 +137,23 @@ struct hb_hashmap_t
|
||||
};
|
||||
|
||||
hb_object_header_t header;
|
||||
unsigned int successful : 1; /* Allocations successful */
|
||||
unsigned int population : 31; /* Not including tombstones. */
|
||||
bool successful; /* Allocations successful */
|
||||
unsigned short max_chain_length;
|
||||
unsigned int population; /* Not including tombstones. */
|
||||
unsigned int occupancy; /* Including tombstones. */
|
||||
unsigned int mask;
|
||||
unsigned int prime;
|
||||
unsigned int max_chain_length;
|
||||
item_t *items;
|
||||
|
||||
friend void swap (hb_hashmap_t& a, hb_hashmap_t& b)
|
||||
friend void swap (hb_hashmap_t& a, hb_hashmap_t& b) noexcept
|
||||
{
|
||||
if (unlikely (!a.successful || !b.successful))
|
||||
return;
|
||||
unsigned tmp = a.population;
|
||||
a.population = b.population;
|
||||
b.population = tmp;
|
||||
//hb_swap (a.population, b.population);
|
||||
hb_swap (a.max_chain_length, b.max_chain_length);
|
||||
hb_swap (a.population, b.population);
|
||||
hb_swap (a.occupancy, b.occupancy);
|
||||
hb_swap (a.mask, b.mask);
|
||||
hb_swap (a.prime, b.prime);
|
||||
hb_swap (a.max_chain_length, b.max_chain_length);
|
||||
hb_swap (a.items, b.items);
|
||||
}
|
||||
void init ()
|
||||
@ -164,10 +161,10 @@ struct hb_hashmap_t
|
||||
hb_object_init (this);
|
||||
|
||||
successful = true;
|
||||
max_chain_length = 0;
|
||||
population = occupancy = 0;
|
||||
mask = 0;
|
||||
prime = 0;
|
||||
max_chain_length = 0;
|
||||
items = nullptr;
|
||||
}
|
||||
void fini ()
|
||||
@ -558,7 +555,7 @@ struct hb_map_t : hb_hashmap_t<hb_codepoint_t,
|
||||
~hb_map_t () = default;
|
||||
hb_map_t () : hashmap () {}
|
||||
hb_map_t (const hb_map_t &o) : hashmap ((hashmap &) o) {}
|
||||
hb_map_t (hb_map_t &&o) : hashmap (std::move ((hashmap &) o)) {}
|
||||
hb_map_t (hb_map_t &&o) noexcept : hashmap (std::move ((hashmap &) o)) {}
|
||||
hb_map_t& operator= (const hb_map_t&) = default;
|
||||
hb_map_t& operator= (hb_map_t&&) = default;
|
||||
hb_map_t (std::initializer_list<hb_codepoint_pair_t> lst) : hashmap (lst) {}
|
||||
|
@ -325,7 +325,7 @@ retry:
|
||||
hb_user_data_array_t *user_data = obj->header.user_data.get_acquire ();
|
||||
if (unlikely (!user_data))
|
||||
{
|
||||
user_data = (hb_user_data_array_t *) hb_calloc (sizeof (hb_user_data_array_t), 1);
|
||||
user_data = (hb_user_data_array_t *) hb_calloc (1, sizeof (hb_user_data_array_t));
|
||||
if (unlikely (!user_data))
|
||||
return false;
|
||||
user_data->init ();
|
||||
|
@ -985,6 +985,13 @@ struct SortedArrayOf : ArrayOf<Type, LenType>
|
||||
return_trace (ret);
|
||||
}
|
||||
|
||||
SortedArrayOf* copy (hb_serialize_context_t *c) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
SortedArrayOf* out = reinterpret_cast<SortedArrayOf *> (ArrayOf<Type, LenType>::copy (c));
|
||||
return_trace (out);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
Type &bsearch (const T &x, Type ¬_found = Crap (Type))
|
||||
{ return *as_array ().bsearch (x, ¬_found); }
|
||||
|
@ -41,10 +41,21 @@ using namespace OT;
|
||||
using objidx_t = hb_serialize_context_t::objidx_t;
|
||||
using whence_t = hb_serialize_context_t::whence_t;
|
||||
|
||||
/* utility macro */
|
||||
template<typename Type>
|
||||
static inline const Type& StructAtOffsetOrNull (const void *P, unsigned int offset)
|
||||
{ return offset ? StructAtOffset<Type> (P, offset) : Null (Type); }
|
||||
/* CFF offsets can technically be negative */
|
||||
template<typename Type, typename ...Ts>
|
||||
static inline const Type& StructAtOffsetOrNull (const void *P, int offset, hb_sanitize_context_t &sc, Ts&&... ds)
|
||||
{
|
||||
if (!offset) return Null (Type);
|
||||
|
||||
const char *p = (const char *) P + offset;
|
||||
if (!sc.check_point (p)) return Null (Type);
|
||||
|
||||
const Type &obj = *reinterpret_cast<const Type *> (p);
|
||||
if (!obj.sanitize (&sc, std::forward<Ts> (ds)...)) return Null (Type);
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
|
||||
struct code_pair_t
|
||||
{
|
||||
|
@ -763,9 +763,9 @@ struct cff1_top_dict_values_t : top_dict_values_t<cff1_top_dict_val_t>
|
||||
unsigned int ros_supplement;
|
||||
unsigned int cidCount;
|
||||
|
||||
unsigned int EncodingOffset;
|
||||
unsigned int CharsetOffset;
|
||||
unsigned int FDSelectOffset;
|
||||
int EncodingOffset;
|
||||
int CharsetOffset;
|
||||
int FDSelectOffset;
|
||||
table_info_t privateDictInfo;
|
||||
};
|
||||
|
||||
@ -821,24 +821,24 @@ struct cff1_top_dict_opset_t : top_dict_opset_t<cff1_top_dict_val_t>
|
||||
break;
|
||||
|
||||
case OpCode_Encoding:
|
||||
dictval.EncodingOffset = env.argStack.pop_uint ();
|
||||
dictval.EncodingOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
if (unlikely (dictval.EncodingOffset == 0)) return;
|
||||
break;
|
||||
|
||||
case OpCode_charset:
|
||||
dictval.CharsetOffset = env.argStack.pop_uint ();
|
||||
dictval.CharsetOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
if (unlikely (dictval.CharsetOffset == 0)) return;
|
||||
break;
|
||||
|
||||
case OpCode_FDSelect:
|
||||
dictval.FDSelectOffset = env.argStack.pop_uint ();
|
||||
dictval.FDSelectOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
|
||||
case OpCode_Private:
|
||||
dictval.privateDictInfo.offset = env.argStack.pop_uint ();
|
||||
dictval.privateDictInfo.offset = env.argStack.pop_int ();
|
||||
dictval.privateDictInfo.size = env.argStack.pop_uint ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
@ -913,7 +913,7 @@ struct cff1_private_dict_values_base_t : dict_values_t<VAL>
|
||||
}
|
||||
void fini () { dict_values_t<VAL>::fini (); }
|
||||
|
||||
unsigned int subrsOffset;
|
||||
int subrsOffset;
|
||||
const CFF1Subrs *localSubrs;
|
||||
};
|
||||
|
||||
@ -948,7 +948,7 @@ struct cff1_private_dict_opset_t : dict_opset_t
|
||||
env.clear_args ();
|
||||
break;
|
||||
case OpCode_Subrs:
|
||||
dictval.subrsOffset = env.argStack.pop_uint ();
|
||||
dictval.subrsOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
|
||||
@ -990,7 +990,7 @@ struct cff1_private_dict_opset_subset_t : dict_opset_t
|
||||
break;
|
||||
|
||||
case OpCode_Subrs:
|
||||
dictval.subrsOffset = env.argStack.pop_uint ();
|
||||
dictval.subrsOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
|
||||
@ -1090,8 +1090,8 @@ struct cff1
|
||||
goto fail;
|
||||
hb_barrier ();
|
||||
|
||||
topDictIndex = &StructAtOffset<CFF1TopDictIndex> (nameIndex, nameIndex->get_size ());
|
||||
if ((topDictIndex == &Null (CFF1TopDictIndex)) || !topDictIndex->sanitize (&sc) || (topDictIndex->count == 0))
|
||||
topDictIndex = &StructAtOffsetOrNull<CFF1TopDictIndex> (nameIndex, nameIndex->get_size (), sc);
|
||||
if (topDictIndex == &Null (CFF1TopDictIndex) || (topDictIndex->count == 0))
|
||||
goto fail;
|
||||
hb_barrier ();
|
||||
|
||||
@ -1108,20 +1108,18 @@ struct cff1
|
||||
charset = &Null (Charset);
|
||||
else
|
||||
{
|
||||
charset = &StructAtOffsetOrNull<Charset> (cff, topDict.CharsetOffset);
|
||||
if (unlikely ((charset == &Null (Charset)) || !charset->sanitize (&sc, &num_charset_entries))) goto fail;
|
||||
hb_barrier ();
|
||||
charset = &StructAtOffsetOrNull<Charset> (cff, topDict.CharsetOffset, sc, &num_charset_entries);
|
||||
if (unlikely (charset == &Null (Charset))) goto fail;
|
||||
}
|
||||
|
||||
fdCount = 1;
|
||||
if (is_CID ())
|
||||
{
|
||||
fdArray = &StructAtOffsetOrNull<CFF1FDArray> (cff, topDict.FDArrayOffset);
|
||||
fdSelect = &StructAtOffsetOrNull<CFF1FDSelect> (cff, topDict.FDSelectOffset);
|
||||
if (unlikely ((fdArray == &Null (CFF1FDArray)) || !fdArray->sanitize (&sc) ||
|
||||
(fdSelect == &Null (CFF1FDSelect)) || !fdSelect->sanitize (&sc, fdArray->count)))
|
||||
fdArray = &StructAtOffsetOrNull<CFF1FDArray> (cff, topDict.FDArrayOffset, sc);
|
||||
fdSelect = &StructAtOffsetOrNull<CFF1FDSelect> (cff, topDict.FDSelectOffset, sc, fdArray->count);
|
||||
if (unlikely (fdArray == &Null (CFF1FDArray) ||
|
||||
fdSelect == &Null (CFF1FDSelect)))
|
||||
goto fail;
|
||||
hb_barrier ();
|
||||
|
||||
fdCount = fdArray->count;
|
||||
}
|
||||
@ -1140,27 +1138,19 @@ struct cff1
|
||||
{
|
||||
if (!is_predef_encoding ())
|
||||
{
|
||||
encoding = &StructAtOffsetOrNull<Encoding> (cff, topDict.EncodingOffset);
|
||||
if (unlikely ((encoding == &Null (Encoding)) || !encoding->sanitize (&sc))) goto fail;
|
||||
hb_barrier ();
|
||||
encoding = &StructAtOffsetOrNull<Encoding> (cff, topDict.EncodingOffset, sc);
|
||||
if (unlikely (encoding == &Null (Encoding))) goto fail;
|
||||
}
|
||||
}
|
||||
|
||||
stringIndex = &StructAtOffset<CFF1StringIndex> (topDictIndex, topDictIndex->get_size ());
|
||||
if ((stringIndex == &Null (CFF1StringIndex)) || !stringIndex->sanitize (&sc))
|
||||
stringIndex = &StructAtOffsetOrNull<CFF1StringIndex> (topDictIndex, topDictIndex->get_size (), sc);
|
||||
if (stringIndex == &Null (CFF1StringIndex))
|
||||
goto fail;
|
||||
hb_barrier ();
|
||||
|
||||
globalSubrs = &StructAtOffset<CFF1Subrs> (stringIndex, stringIndex->get_size ());
|
||||
if ((globalSubrs != &Null (CFF1Subrs)) && !globalSubrs->sanitize (&sc))
|
||||
globalSubrs = &StructAtOffsetOrNull<CFF1Subrs> (stringIndex, stringIndex->get_size (), sc);
|
||||
charStrings = &StructAtOffsetOrNull<CFF1CharStrings> (cff, topDict.charStringsOffset, sc);
|
||||
if (charStrings == &Null (CFF1CharStrings))
|
||||
goto fail;
|
||||
hb_barrier ();
|
||||
|
||||
charStrings = &StructAtOffsetOrNull<CFF1CharStrings> (cff, topDict.charStringsOffset);
|
||||
|
||||
if ((charStrings == &Null (CFF1CharStrings)) || unlikely (!charStrings->sanitize (&sc)))
|
||||
goto fail;
|
||||
hb_barrier ();
|
||||
|
||||
num_glyphs = charStrings->count;
|
||||
if (num_glyphs != sc.get_num_glyphs ())
|
||||
@ -1188,19 +1178,13 @@ struct cff1
|
||||
font->init ();
|
||||
if (unlikely (!font_interp.interpret (*font))) goto fail;
|
||||
PRIVDICTVAL *priv = &privateDicts[i];
|
||||
const hb_ubytes_t privDictStr = StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
|
||||
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
|
||||
hb_barrier ();
|
||||
const hb_ubytes_t privDictStr = StructAtOffsetOrNull<UnsizedByteStr> (cff, font->privateDictInfo.offset, sc, font->privateDictInfo.size).as_ubytes (font->privateDictInfo.size);
|
||||
num_interp_env_t env2 (privDictStr);
|
||||
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp (env2);
|
||||
priv->init ();
|
||||
if (unlikely (!priv_interp.interpret (*priv))) goto fail;
|
||||
|
||||
priv->localSubrs = &StructAtOffsetOrNull<CFF1Subrs> (&privDictStr, priv->subrsOffset);
|
||||
if (priv->localSubrs != &Null (CFF1Subrs) &&
|
||||
unlikely (!priv->localSubrs->sanitize (&sc)))
|
||||
goto fail;
|
||||
hb_barrier ();
|
||||
priv->localSubrs = &StructAtOffsetOrNull<CFF1Subrs> (&privDictStr, priv->subrsOffset, sc);
|
||||
}
|
||||
}
|
||||
else /* non-CID */
|
||||
@ -1208,18 +1192,13 @@ struct cff1
|
||||
cff1_top_dict_values_t *font = &topDict;
|
||||
PRIVDICTVAL *priv = &privateDicts[0];
|
||||
|
||||
const hb_ubytes_t privDictStr = StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
|
||||
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
|
||||
hb_barrier ();
|
||||
const hb_ubytes_t privDictStr = StructAtOffsetOrNull<UnsizedByteStr> (cff, font->privateDictInfo.offset, sc, font->privateDictInfo.size).as_ubytes (font->privateDictInfo.size);
|
||||
num_interp_env_t env (privDictStr);
|
||||
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp (env);
|
||||
priv->init ();
|
||||
if (unlikely (!priv_interp.interpret (*priv))) goto fail;
|
||||
|
||||
priv->localSubrs = &StructAtOffsetOrNull<CFF1Subrs> (&privDictStr, priv->subrsOffset);
|
||||
if (priv->localSubrs != &Null (CFF1Subrs) &&
|
||||
unlikely (!priv->localSubrs->sanitize (&sc)))
|
||||
goto fail;
|
||||
priv->localSubrs = &StructAtOffsetOrNull<CFF1Subrs> (&privDictStr, priv->subrsOffset, sc);
|
||||
hb_barrier ();
|
||||
}
|
||||
|
||||
@ -1437,7 +1416,7 @@ struct cff1
|
||||
hb_sorted_vector_t<gname_t> *names = glyph_names.get_acquire ();
|
||||
if (unlikely (!names))
|
||||
{
|
||||
names = (hb_sorted_vector_t<gname_t> *) hb_calloc (sizeof (hb_sorted_vector_t<gname_t>), 1);
|
||||
names = (hb_sorted_vector_t<gname_t> *) hb_calloc (1, sizeof (hb_sorted_vector_t<gname_t>));
|
||||
if (likely (names))
|
||||
{
|
||||
names->init ();
|
||||
|
@ -111,7 +111,7 @@ struct CFF2FDSelect
|
||||
DEFINE_SIZE_MIN (2);
|
||||
};
|
||||
|
||||
struct CFF2VariationStore
|
||||
struct CFF2ItemVariationStore
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
@ -122,11 +122,11 @@ struct CFF2VariationStore
|
||||
varStore.sanitize (c));
|
||||
}
|
||||
|
||||
bool serialize (hb_serialize_context_t *c, const CFF2VariationStore *varStore)
|
||||
bool serialize (hb_serialize_context_t *c, const CFF2ItemVariationStore *varStore)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
unsigned int size_ = varStore->get_size ();
|
||||
CFF2VariationStore *dest = c->allocate_size<CFF2VariationStore> (size_);
|
||||
CFF2ItemVariationStore *dest = c->allocate_size<CFF2ItemVariationStore> (size_);
|
||||
if (unlikely (!dest)) return_trace (false);
|
||||
hb_memcpy (dest, varStore, size_);
|
||||
return_trace (true);
|
||||
@ -135,9 +135,9 @@ struct CFF2VariationStore
|
||||
unsigned int get_size () const { return HBUINT16::static_size + size; }
|
||||
|
||||
HBUINT16 size;
|
||||
VariationStore varStore;
|
||||
ItemVariationStore varStore;
|
||||
|
||||
DEFINE_SIZE_MIN (2 + VariationStore::min_size);
|
||||
DEFINE_SIZE_MIN (2 + ItemVariationStore::min_size);
|
||||
};
|
||||
|
||||
struct cff2_top_dict_values_t : top_dict_values_t<>
|
||||
@ -150,8 +150,8 @@ struct cff2_top_dict_values_t : top_dict_values_t<>
|
||||
}
|
||||
void fini () { top_dict_values_t<>::fini (); }
|
||||
|
||||
unsigned int vstoreOffset;
|
||||
unsigned int FDSelectOffset;
|
||||
int vstoreOffset;
|
||||
int FDSelectOffset;
|
||||
};
|
||||
|
||||
struct cff2_top_dict_opset_t : top_dict_opset_t<>
|
||||
@ -169,11 +169,11 @@ struct cff2_top_dict_opset_t : top_dict_opset_t<>
|
||||
break;
|
||||
|
||||
case OpCode_vstore:
|
||||
dictval.vstoreOffset = env.argStack.pop_uint ();
|
||||
dictval.vstoreOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
case OpCode_FDSelect:
|
||||
dictval.FDSelectOffset = env.argStack.pop_uint ();
|
||||
dictval.FDSelectOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
|
||||
@ -241,7 +241,7 @@ struct cff2_private_dict_values_base_t : dict_values_t<VAL>
|
||||
}
|
||||
void fini () { dict_values_t<VAL>::fini (); }
|
||||
|
||||
unsigned int subrsOffset;
|
||||
int subrsOffset;
|
||||
const CFF2Subrs *localSubrs;
|
||||
unsigned int ivs;
|
||||
};
|
||||
@ -295,7 +295,7 @@ struct cff2_private_dict_opset_t : dict_opset_t
|
||||
env.clear_args ();
|
||||
break;
|
||||
case OpCode_Subrs:
|
||||
dictval.subrsOffset = env.argStack.pop_uint ();
|
||||
dictval.subrsOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
case OpCode_vsindexdict:
|
||||
@ -344,7 +344,7 @@ struct cff2_private_dict_opset_subset_t : dict_opset_t
|
||||
return;
|
||||
|
||||
case OpCode_Subrs:
|
||||
dictval.subrsOffset = env.argStack.pop_uint ();
|
||||
dictval.subrsOffset = env.argStack.pop_int ();
|
||||
env.clear_args ();
|
||||
break;
|
||||
|
||||
@ -426,18 +426,15 @@ struct cff2
|
||||
if (unlikely (!top_interp.interpret (topDict))) goto fail;
|
||||
}
|
||||
|
||||
globalSubrs = &StructAtOffset<CFF2Subrs> (cff2, cff2->topDict + cff2->topDictSize);
|
||||
varStore = &StructAtOffsetOrNull<CFF2VariationStore> (cff2, topDict.vstoreOffset);
|
||||
charStrings = &StructAtOffsetOrNull<CFF2CharStrings> (cff2, topDict.charStringsOffset);
|
||||
fdArray = &StructAtOffsetOrNull<CFF2FDArray> (cff2, topDict.FDArrayOffset);
|
||||
fdSelect = &StructAtOffsetOrNull<CFF2FDSelect> (cff2, topDict.FDSelectOffset);
|
||||
globalSubrs = &StructAtOffsetOrNull<CFF2Subrs> (cff2, cff2->topDict + cff2->topDictSize, sc);
|
||||
varStore = &StructAtOffsetOrNull<CFF2ItemVariationStore> (cff2, topDict.vstoreOffset, sc);
|
||||
charStrings = &StructAtOffsetOrNull<CFF2CharStrings> (cff2, topDict.charStringsOffset, sc);
|
||||
fdArray = &StructAtOffsetOrNull<CFF2FDArray> (cff2, topDict.FDArrayOffset, sc);
|
||||
fdSelect = &StructAtOffsetOrNull<CFF2FDSelect> (cff2, topDict.FDSelectOffset, sc, fdArray->count);
|
||||
|
||||
if (((varStore != &Null (CFF2VariationStore)) && unlikely (!varStore->sanitize (&sc))) ||
|
||||
(charStrings == &Null (CFF2CharStrings)) || unlikely (!charStrings->sanitize (&sc)) ||
|
||||
(globalSubrs == &Null (CFF2Subrs)) || unlikely (!globalSubrs->sanitize (&sc)) ||
|
||||
(fdArray == &Null (CFF2FDArray)) || unlikely (!fdArray->sanitize (&sc)) ||
|
||||
!hb_barrier () ||
|
||||
(((fdSelect != &Null (CFF2FDSelect)) && unlikely (!fdSelect->sanitize (&sc, fdArray->count)))))
|
||||
if (charStrings == &Null (CFF2CharStrings) ||
|
||||
globalSubrs == &Null (CFF2Subrs) ||
|
||||
fdArray == &Null (CFF2FDArray))
|
||||
goto fail;
|
||||
|
||||
num_glyphs = charStrings->count;
|
||||
@ -462,19 +459,13 @@ struct cff2
|
||||
font->init ();
|
||||
if (unlikely (!font_interp.interpret (*font))) goto fail;
|
||||
|
||||
const hb_ubytes_t privDictStr = StructAtOffsetOrNull<UnsizedByteStr> (cff2, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
|
||||
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
|
||||
hb_barrier ();
|
||||
const hb_ubytes_t privDictStr = StructAtOffsetOrNull<UnsizedByteStr> (cff2, font->privateDictInfo.offset, sc, font->privateDictInfo.size).as_ubytes (font->privateDictInfo.size);
|
||||
cff2_priv_dict_interp_env_t env2 (privDictStr);
|
||||
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL, cff2_priv_dict_interp_env_t> priv_interp (env2);
|
||||
privateDicts[i].init ();
|
||||
if (unlikely (!priv_interp.interpret (privateDicts[i]))) goto fail;
|
||||
|
||||
privateDicts[i].localSubrs = &StructAtOffsetOrNull<CFF2Subrs> (&privDictStr[0], privateDicts[i].subrsOffset);
|
||||
if (privateDicts[i].localSubrs != &Null (CFF2Subrs) &&
|
||||
unlikely (!privateDicts[i].localSubrs->sanitize (&sc)))
|
||||
goto fail;
|
||||
hb_barrier ();
|
||||
privateDicts[i].localSubrs = &StructAtOffsetOrNull<CFF2Subrs> (&privDictStr[0], privateDicts[i].subrsOffset, sc);
|
||||
}
|
||||
|
||||
return;
|
||||
@ -509,7 +500,7 @@ struct cff2
|
||||
hb_blob_t *blob = nullptr;
|
||||
cff2_top_dict_values_t topDict;
|
||||
const CFF2Subrs *globalSubrs = nullptr;
|
||||
const CFF2VariationStore *varStore = nullptr;
|
||||
const CFF2ItemVariationStore *varStore = nullptr;
|
||||
const CFF2CharStrings *charStrings = nullptr;
|
||||
const CFF2FDArray *fdArray = nullptr;
|
||||
const CFF2FDSelect *fdSelect = nullptr;
|
||||
|
@ -41,6 +41,30 @@
|
||||
|
||||
namespace OT {
|
||||
|
||||
static inline uint8_t unicode_to_macroman (hb_codepoint_t u)
|
||||
{
|
||||
uint16_t mapping[] = {
|
||||
0x00C4, 0x00C5, 0x00C7, 0x00C9, 0x00D1, 0x00D6, 0x00DC, 0x00E1,
|
||||
0x00E0, 0x00E2, 0x00E4, 0x00E3, 0x00E5, 0x00E7, 0x00E9, 0x00E8,
|
||||
0x00EA, 0x00EB, 0x00ED, 0x00EC, 0x00EE, 0x00EF, 0x00F1, 0x00F3,
|
||||
0x00F2, 0x00F4, 0x00F6, 0x00F5, 0x00FA, 0x00F9, 0x00FB, 0x00FC,
|
||||
0x2020, 0x00B0, 0x00A2, 0x00A3, 0x00A7, 0x2022, 0x00B6, 0x00DF,
|
||||
0x00AE, 0x00A9, 0x2122, 0x00B4, 0x00A8, 0x2260, 0x00C6, 0x00D8,
|
||||
0x221E, 0x00B1, 0x2264, 0x2265, 0x00A5, 0x00B5, 0x2202, 0x2211,
|
||||
0x220F, 0x03C0, 0x222B, 0x00AA, 0x00BA, 0x03A9, 0x00E6, 0x00F8,
|
||||
0x00BF, 0x00A1, 0x00AC, 0x221A, 0x0192, 0x2248, 0x2206, 0x00AB,
|
||||
0x00BB, 0x2026, 0x00A0, 0x00C0, 0x00C3, 0x00D5, 0x0152, 0x0153,
|
||||
0x2013, 0x2014, 0x201C, 0x201D, 0x2018, 0x2019, 0x00F7, 0x25CA,
|
||||
0x00FF, 0x0178, 0x2044, 0x20AC, 0x2039, 0x203A, 0xFB01, 0xFB02,
|
||||
0x2021, 0x00B7, 0x201A, 0x201E, 0x2030, 0x00C2, 0x00CA, 0x00C1,
|
||||
0x00CB, 0x00C8, 0x00CD, 0x00CE, 0x00CF, 0x00CC, 0x00D3, 0x00D4,
|
||||
0xF8FF, 0x00D2, 0x00DA, 0x00DB, 0x00D9, 0x0131, 0x02C6, 0x02DC,
|
||||
0x00AF, 0x02D8, 0x02D9, 0x02DA, 0x00B8, 0x02DD, 0x02DB, 0x02C7
|
||||
};
|
||||
uint16_t *c = hb_bsearch (u, mapping, ARRAY_LENGTH (mapping), sizeof (mapping[0]),
|
||||
_hb_cmp_operator<uint16_t, uint16_t>);
|
||||
return c ? (c - mapping) + 0x7F : 0;
|
||||
}
|
||||
|
||||
struct CmapSubtableFormat0
|
||||
{
|
||||
@ -1465,8 +1489,11 @@ struct EncodingRecord
|
||||
int ret;
|
||||
ret = platformID.cmp (other.platformID);
|
||||
if (ret) return ret;
|
||||
ret = encodingID.cmp (other.encodingID);
|
||||
if (ret) return ret;
|
||||
if (other.encodingID != 0xFFFF)
|
||||
{
|
||||
ret = encodingID.cmp (other.encodingID);
|
||||
if (ret) return ret;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -1814,9 +1841,13 @@ struct cmap
|
||||
c->plan));
|
||||
}
|
||||
|
||||
const CmapSubtable *find_best_subtable (bool *symbol = nullptr) const
|
||||
const CmapSubtable *find_best_subtable (bool *symbol = nullptr,
|
||||
bool *mac = nullptr,
|
||||
bool *macroman = nullptr) const
|
||||
{
|
||||
if (symbol) *symbol = false;
|
||||
if (mac) *mac = false;
|
||||
if (macroman) *macroman = false;
|
||||
|
||||
const CmapSubtable *subtable;
|
||||
|
||||
@ -1841,6 +1872,20 @@ struct cmap
|
||||
if ((subtable = this->find_subtable (0, 1))) return subtable;
|
||||
if ((subtable = this->find_subtable (0, 0))) return subtable;
|
||||
|
||||
/* MacRoman subtable. */
|
||||
if ((subtable = this->find_subtable (1, 0)))
|
||||
{
|
||||
if (mac) *mac = true;
|
||||
if (macroman) *macroman = true;
|
||||
return subtable;
|
||||
}
|
||||
/* Any other Mac subtable; we just map ASCII for these. */
|
||||
if ((subtable = this->find_subtable (1, 0xFFFF)))
|
||||
{
|
||||
if (mac) *mac = true;
|
||||
return subtable;
|
||||
}
|
||||
|
||||
/* Meh. */
|
||||
return &Null (CmapSubtable);
|
||||
}
|
||||
@ -1852,8 +1897,8 @@ struct cmap
|
||||
accelerator_t (hb_face_t *face)
|
||||
{
|
||||
this->table = hb_sanitize_context_t ().reference_table<cmap> (face);
|
||||
bool symbol;
|
||||
this->subtable = table->find_best_subtable (&symbol);
|
||||
bool symbol, mac, macroman;
|
||||
this->subtable = table->find_best_subtable (&symbol, &mac, ¯oman);
|
||||
this->subtable_uvs = &Null (CmapSubtableFormat14);
|
||||
{
|
||||
const CmapSubtable *st = table->find_subtable (0, 5);
|
||||
@ -1862,6 +1907,7 @@ struct cmap
|
||||
}
|
||||
|
||||
this->get_glyph_data = subtable;
|
||||
#ifndef HB_NO_CMAP_LEGACY_SUBTABLES
|
||||
if (unlikely (symbol))
|
||||
{
|
||||
switch ((unsigned) face->table.OS2->get_font_page ()) {
|
||||
@ -1881,7 +1927,16 @@ struct cmap
|
||||
break;
|
||||
}
|
||||
}
|
||||
else if (unlikely (macroman))
|
||||
{
|
||||
this->get_glyph_funcZ = get_glyph_from_macroman<CmapSubtable>;
|
||||
}
|
||||
else if (unlikely (mac))
|
||||
{
|
||||
this->get_glyph_funcZ = get_glyph_from_ascii<CmapSubtable>;
|
||||
}
|
||||
else
|
||||
#endif
|
||||
{
|
||||
switch (subtable->u.format) {
|
||||
/* Accelerate format 4 and format 12. */
|
||||
@ -1924,7 +1979,7 @@ struct cmap
|
||||
hb_codepoint_t *glyph,
|
||||
cache_t *cache = nullptr) const
|
||||
{
|
||||
if (unlikely (!this->get_glyph_funcZ)) return 0;
|
||||
if (unlikely (!this->get_glyph_funcZ)) return false;
|
||||
return _cached_get (unicode, glyph, cache);
|
||||
}
|
||||
|
||||
@ -2006,6 +2061,28 @@ struct cmap
|
||||
return false;
|
||||
}
|
||||
|
||||
template <typename Type>
|
||||
HB_INTERNAL static bool get_glyph_from_ascii (const void *obj,
|
||||
hb_codepoint_t codepoint,
|
||||
hb_codepoint_t *glyph)
|
||||
{
|
||||
const Type *typed_obj = (const Type *) obj;
|
||||
return codepoint < 0x80 && typed_obj->get_glyph (codepoint, glyph);
|
||||
}
|
||||
|
||||
template <typename Type>
|
||||
HB_INTERNAL static bool get_glyph_from_macroman (const void *obj,
|
||||
hb_codepoint_t codepoint,
|
||||
hb_codepoint_t *glyph)
|
||||
{
|
||||
if (get_glyph_from_ascii<Type> (obj, codepoint, glyph))
|
||||
return true;
|
||||
|
||||
const Type *typed_obj = (const Type *) obj;
|
||||
unsigned c = unicode_to_macroman (codepoint);
|
||||
return c && typed_obj->get_glyph (c, glyph);
|
||||
}
|
||||
|
||||
private:
|
||||
hb_nonnull_ptr_t<const CmapSubtable> subtable;
|
||||
hb_nonnull_ptr_t<const CmapSubtableFormat14> subtable_uvs;
|
||||
@ -2035,28 +2112,6 @@ struct cmap
|
||||
return &(this+result.subtable);
|
||||
}
|
||||
|
||||
const EncodingRecord *find_encodingrec (unsigned int platform_id,
|
||||
unsigned int encoding_id) const
|
||||
{
|
||||
EncodingRecord key;
|
||||
key.platformID = platform_id;
|
||||
key.encodingID = encoding_id;
|
||||
|
||||
return encodingRecord.as_array ().bsearch (key);
|
||||
}
|
||||
|
||||
bool find_subtable (unsigned format) const
|
||||
{
|
||||
auto it =
|
||||
+ hb_iter (encodingRecord)
|
||||
| hb_map (&EncodingRecord::subtable)
|
||||
| hb_map (hb_add (this))
|
||||
| hb_filter ([&] (const CmapSubtable& _) { return _.u.format == format; })
|
||||
;
|
||||
|
||||
return it.len ();
|
||||
}
|
||||
|
||||
public:
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
@ -208,12 +208,12 @@ hb_ot_get_glyph_h_advances (hb_font_t* font, void* font_data,
|
||||
|
||||
#if !defined(HB_NO_VAR) && !defined(HB_NO_OT_FONT_ADVANCE_CACHE)
|
||||
const OT::HVAR &HVAR = *hmtx.var_table;
|
||||
const OT::VariationStore &varStore = &HVAR + HVAR.varStore;
|
||||
OT::VariationStore::cache_t *varStore_cache = font->num_coords * count >= 128 ? varStore.create_cache () : nullptr;
|
||||
const OT::ItemVariationStore &varStore = &HVAR + HVAR.varStore;
|
||||
OT::ItemVariationStore::cache_t *varStore_cache = font->num_coords * count >= 128 ? varStore.create_cache () : nullptr;
|
||||
|
||||
bool use_cache = font->num_coords;
|
||||
#else
|
||||
OT::VariationStore::cache_t *varStore_cache = nullptr;
|
||||
OT::ItemVariationStore::cache_t *varStore_cache = nullptr;
|
||||
bool use_cache = false;
|
||||
#endif
|
||||
|
||||
@ -277,7 +277,7 @@ hb_ot_get_glyph_h_advances (hb_font_t* font, void* font_data,
|
||||
}
|
||||
|
||||
#if !defined(HB_NO_VAR) && !defined(HB_NO_OT_FONT_ADVANCE_CACHE)
|
||||
OT::VariationStore::destroy_cache (varStore_cache);
|
||||
OT::ItemVariationStore::destroy_cache (varStore_cache);
|
||||
#endif
|
||||
|
||||
if (font->x_strength && !font->embolden_in_place)
|
||||
@ -313,10 +313,10 @@ hb_ot_get_glyph_v_advances (hb_font_t* font, void* font_data,
|
||||
{
|
||||
#if !defined(HB_NO_VAR) && !defined(HB_NO_OT_FONT_ADVANCE_CACHE)
|
||||
const OT::VVAR &VVAR = *vmtx.var_table;
|
||||
const OT::VariationStore &varStore = &VVAR + VVAR.varStore;
|
||||
OT::VariationStore::cache_t *varStore_cache = font->num_coords ? varStore.create_cache () : nullptr;
|
||||
const OT::ItemVariationStore &varStore = &VVAR + VVAR.varStore;
|
||||
OT::ItemVariationStore::cache_t *varStore_cache = font->num_coords ? varStore.create_cache () : nullptr;
|
||||
#else
|
||||
OT::VariationStore::cache_t *varStore_cache = nullptr;
|
||||
OT::ItemVariationStore::cache_t *varStore_cache = nullptr;
|
||||
#endif
|
||||
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
@ -327,7 +327,7 @@ hb_ot_get_glyph_v_advances (hb_font_t* font, void* font_data,
|
||||
}
|
||||
|
||||
#if !defined(HB_NO_VAR) && !defined(HB_NO_OT_FONT_ADVANCE_CACHE)
|
||||
OT::VariationStore::destroy_cache (varStore_cache);
|
||||
OT::ItemVariationStore::destroy_cache (varStore_cache);
|
||||
#endif
|
||||
}
|
||||
else
|
||||
|
@ -145,6 +145,29 @@ struct hmtxvmtx
|
||||
table->minTrailingBearing = min_rsb;
|
||||
table->maxExtent = max_extent;
|
||||
}
|
||||
|
||||
if (T::is_horizontal)
|
||||
{
|
||||
const auto &OS2 = *c->plan->source->table.OS2;
|
||||
if (OS2.has_data () &&
|
||||
table->ascender == OS2.sTypoAscender &&
|
||||
table->descender == OS2.sTypoDescender &&
|
||||
table->lineGap == OS2.sTypoLineGap)
|
||||
{
|
||||
table->ascender = static_cast<int> (roundf (OS2.sTypoAscender +
|
||||
MVAR.get_var (HB_OT_METRICS_TAG_HORIZONTAL_ASCENDER,
|
||||
c->plan->normalized_coords.arrayZ,
|
||||
c->plan->normalized_coords.length)));
|
||||
table->descender = static_cast<int> (roundf (OS2.sTypoDescender +
|
||||
MVAR.get_var (HB_OT_METRICS_TAG_HORIZONTAL_DESCENDER,
|
||||
c->plan->normalized_coords.arrayZ,
|
||||
c->plan->normalized_coords.length)));
|
||||
table->lineGap = static_cast<int> (roundf (OS2.sTypoLineGap +
|
||||
MVAR.get_var (HB_OT_METRICS_TAG_HORIZONTAL_LINE_GAP,
|
||||
c->plan->normalized_coords.arrayZ,
|
||||
c->plan->normalized_coords.length)));
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -374,7 +397,7 @@ struct hmtxvmtx
|
||||
|
||||
unsigned get_advance_with_var_unscaled (hb_codepoint_t glyph,
|
||||
hb_font_t *font,
|
||||
VariationStore::cache_t *store_cache = nullptr) const
|
||||
ItemVariationStore::cache_t *store_cache = nullptr) const
|
||||
{
|
||||
unsigned int advance = get_advance_without_var_unscaled (glyph);
|
||||
|
||||
|
@ -46,6 +46,12 @@ struct BaseCoordFormat1
|
||||
return HB_DIRECTION_IS_HORIZONTAL (direction) ? font->em_scale_y (coordinate) : font->em_scale_x (coordinate);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
return_trace ((bool) c->serializer->embed (*this));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
@ -67,6 +73,17 @@ struct BaseCoordFormat2
|
||||
return HB_DIRECTION_IS_HORIZONTAL (direction) ? font->em_scale_y (coordinate) : font->em_scale_x (coordinate);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
return_trace (c->serializer->check_assign (out->referenceGlyph,
|
||||
c->plan->glyph_map->get (referenceGlyph),
|
||||
HB_SERIALIZE_ERROR_INT_OVERFLOW));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
@ -86,7 +103,7 @@ struct BaseCoordFormat2
|
||||
struct BaseCoordFormat3
|
||||
{
|
||||
hb_position_t get_coord (hb_font_t *font,
|
||||
const VariationStore &var_store,
|
||||
const ItemVariationStore &var_store,
|
||||
hb_direction_t direction) const
|
||||
{
|
||||
const Device &device = this+deviceTable;
|
||||
@ -96,6 +113,23 @@ struct BaseCoordFormat3
|
||||
: font->em_scale_x (coordinate) + device.get_x_delta (font, var_store);
|
||||
}
|
||||
|
||||
void collect_variation_indices (hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
unsigned varidx = (this+deviceTable).get_variation_index ();
|
||||
varidx_set.add (varidx);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
return_trace (out->deviceTable.serialize_copy (c->serializer, deviceTable,
|
||||
this, 0,
|
||||
hb_serialize_context_t::Head,
|
||||
&c->plan->base_variation_idx_map));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
@ -120,7 +154,7 @@ struct BaseCoord
|
||||
bool has_data () const { return u.format; }
|
||||
|
||||
hb_position_t get_coord (hb_font_t *font,
|
||||
const VariationStore &var_store,
|
||||
const ItemVariationStore &var_store,
|
||||
hb_direction_t direction) const
|
||||
{
|
||||
switch (u.format) {
|
||||
@ -131,6 +165,27 @@ struct BaseCoord
|
||||
}
|
||||
}
|
||||
|
||||
void collect_variation_indices (hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
switch (u.format) {
|
||||
case 3: u.format3.collect_variation_indices (varidx_set);
|
||||
default:return;
|
||||
}
|
||||
}
|
||||
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
|
||||
TRACE_DISPATCH (this, u.format);
|
||||
switch (u.format) {
|
||||
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
|
||||
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
|
||||
case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
|
||||
default:return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
@ -161,12 +216,37 @@ struct FeatMinMaxRecord
|
||||
|
||||
bool has_data () const { return tag; }
|
||||
|
||||
hb_tag_t get_feature_tag () const { return tag; }
|
||||
|
||||
void get_min_max (const BaseCoord **min, const BaseCoord **max) const
|
||||
{
|
||||
if (likely (min)) *min = &(this+minCoord);
|
||||
if (likely (max)) *max = &(this+maxCoord);
|
||||
}
|
||||
|
||||
void collect_variation_indices (const hb_subset_plan_t* plan,
|
||||
const void *base,
|
||||
hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
if (!plan->layout_features.has (tag))
|
||||
return;
|
||||
|
||||
(base+minCoord).collect_variation_indices (varidx_set);
|
||||
(base+maxCoord).collect_variation_indices (varidx_set);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const void *base) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
if (!(out->minCoord.serialize_subset (c, minCoord, base)))
|
||||
return_trace (false);
|
||||
|
||||
return_trace (out->maxCoord.serialize_subset (c, maxCoord, base));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
@ -206,6 +286,39 @@ struct MinMax
|
||||
}
|
||||
}
|
||||
|
||||
void collect_variation_indices (const hb_subset_plan_t* plan,
|
||||
hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
(this+minCoord).collect_variation_indices (varidx_set);
|
||||
(this+maxCoord).collect_variation_indices (varidx_set);
|
||||
for (const FeatMinMaxRecord& record : featMinMaxRecords)
|
||||
record.collect_variation_indices (plan, this, varidx_set);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
if (!(out->minCoord.serialize_subset (c, minCoord, this)) ||
|
||||
!(out->maxCoord.serialize_subset (c, maxCoord, this)))
|
||||
return_trace (false);
|
||||
|
||||
unsigned len = 0;
|
||||
for (const FeatMinMaxRecord& _ : featMinMaxRecords)
|
||||
{
|
||||
hb_tag_t feature_tag = _.get_feature_tag ();
|
||||
if (!c->plan->layout_features.has (feature_tag))
|
||||
continue;
|
||||
|
||||
if (!_.subset (c, this)) return false;
|
||||
len++;
|
||||
}
|
||||
return_trace (c->serializer->check_assign (out->featMinMaxRecords.len, len,
|
||||
HB_SERIALIZE_ERROR_INT_OVERFLOW));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
@ -240,6 +353,26 @@ struct BaseValues
|
||||
return this+baseCoords[baseline_tag_index];
|
||||
}
|
||||
|
||||
void collect_variation_indices (hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
for (const auto& _ : baseCoords)
|
||||
(this+_).collect_variation_indices (varidx_set);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
out->defaultIndex = defaultIndex;
|
||||
|
||||
for (const auto& _ : baseCoords)
|
||||
if (!subset_offset_array (c, out->baseCoords, this) (_))
|
||||
return_trace (false);
|
||||
|
||||
return_trace (bool (out->baseCoords));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
@ -270,6 +403,20 @@ struct BaseLangSysRecord
|
||||
|
||||
const MinMax &get_min_max () const { return this+minMax; }
|
||||
|
||||
void collect_variation_indices (const hb_subset_plan_t* plan,
|
||||
hb_set_t& varidx_set /* OUT */) const
|
||||
{ (this+minMax).collect_variation_indices (plan, varidx_set); }
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const void *base) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
return_trace (out->minMax.serialize_subset (c, minMax, base));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
@ -300,6 +447,35 @@ struct BaseScript
|
||||
bool has_values () const { return baseValues; }
|
||||
bool has_min_max () const { return defaultMinMax; /* TODO What if only per-language is present? */ }
|
||||
|
||||
void collect_variation_indices (const hb_subset_plan_t* plan,
|
||||
hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
(this+baseValues).collect_variation_indices (varidx_set);
|
||||
(this+defaultMinMax).collect_variation_indices (plan, varidx_set);
|
||||
|
||||
for (const BaseLangSysRecord& _ : baseLangSysRecords)
|
||||
_.collect_variation_indices (plan, varidx_set);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
if (baseValues && !out->baseValues.serialize_subset (c, baseValues, this))
|
||||
return_trace (false);
|
||||
|
||||
if (defaultMinMax && !out->defaultMinMax.serialize_subset (c, defaultMinMax, this))
|
||||
return_trace (false);
|
||||
|
||||
for (const auto& _ : baseLangSysRecords)
|
||||
if (!_.subset (c, this)) return_trace (false);
|
||||
|
||||
return_trace (c->serializer->check_assign (out->baseLangSysRecords.len, baseLangSysRecords.len,
|
||||
HB_SERIALIZE_ERROR_INT_OVERFLOW));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
@ -332,9 +508,31 @@ struct BaseScriptRecord
|
||||
|
||||
bool has_data () const { return baseScriptTag; }
|
||||
|
||||
hb_tag_t get_script_tag () const { return baseScriptTag; }
|
||||
|
||||
const BaseScript &get_base_script (const BaseScriptList *list) const
|
||||
{ return list+baseScript; }
|
||||
|
||||
void collect_variation_indices (const hb_subset_plan_t* plan,
|
||||
const void* list,
|
||||
hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
if (!plan->layout_scripts.has (baseScriptTag))
|
||||
return;
|
||||
|
||||
(list+baseScript).collect_variation_indices (plan, varidx_set);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const void *base) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
return_trace (out->baseScript.serialize_subset (c, baseScript, base));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
@ -361,6 +559,33 @@ struct BaseScriptList
|
||||
return record->has_data () ? record->get_base_script (this) : Null (BaseScript);
|
||||
}
|
||||
|
||||
void collect_variation_indices (const hb_subset_plan_t* plan,
|
||||
hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
for (const BaseScriptRecord& _ : baseScriptRecords)
|
||||
_.collect_variation_indices (plan, this, varidx_set);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
unsigned len = 0;
|
||||
for (const BaseScriptRecord& _ : baseScriptRecords)
|
||||
{
|
||||
hb_tag_t script_tag = _.get_script_tag ();
|
||||
if (!c->plan->layout_scripts.has (script_tag))
|
||||
continue;
|
||||
|
||||
if (!_.subset (c, this)) return false;
|
||||
len++;
|
||||
}
|
||||
return_trace (c->serializer->check_assign (out->baseScriptRecords.len, len,
|
||||
HB_SERIALIZE_ERROR_INT_OVERFLOW));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
@ -422,6 +647,20 @@ struct Axis
|
||||
return true;
|
||||
}
|
||||
|
||||
void collect_variation_indices (const hb_subset_plan_t* plan,
|
||||
hb_set_t& varidx_set /* OUT */) const
|
||||
{ (this+baseScriptList).collect_variation_indices (plan, varidx_set); }
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
out->baseTagList.serialize_copy (c->serializer, baseTagList, this);
|
||||
return_trace (out->baseScriptList.serialize_subset (c, baseScriptList, this));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
@ -453,8 +692,41 @@ struct BASE
|
||||
const Axis &get_axis (hb_direction_t direction) const
|
||||
{ return HB_DIRECTION_IS_VERTICAL (direction) ? this+vAxis : this+hAxis; }
|
||||
|
||||
const VariationStore &get_var_store () const
|
||||
{ return version.to_int () < 0x00010001u ? Null (VariationStore) : this+varStore; }
|
||||
bool has_var_store () const
|
||||
{ return version.to_int () >= 0x00010001u && varStore != 0; }
|
||||
|
||||
const ItemVariationStore &get_var_store () const
|
||||
{ return version.to_int () < 0x00010001u ? Null (ItemVariationStore) : this+varStore; }
|
||||
|
||||
void collect_variation_indices (const hb_subset_plan_t* plan,
|
||||
hb_set_t& varidx_set /* OUT */) const
|
||||
{
|
||||
(this+hAxis).collect_variation_indices (plan, varidx_set);
|
||||
(this+vAxis).collect_variation_indices (plan, varidx_set);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
out->version = version;
|
||||
if (hAxis && !out->hAxis.serialize_subset (c, hAxis, this))
|
||||
return_trace (false);
|
||||
|
||||
if (vAxis && !out->vAxis.serialize_subset (c, vAxis, this))
|
||||
return_trace (false);
|
||||
|
||||
if (has_var_store ())
|
||||
{
|
||||
if (!c->serializer->allocate_size<Offset32To<ItemVariationStore>> (Offset32To<ItemVariationStore>::static_size))
|
||||
return_trace (false);
|
||||
return_trace (out->varStore.serialize_subset (c, varStore, this, c->plan->base_varstore_inner_maps.as_array ()));
|
||||
}
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool get_baseline (hb_font_t *font,
|
||||
hb_tag_t baseline_tag,
|
||||
@ -487,7 +759,7 @@ struct BASE
|
||||
&min_coord, &max_coord))
|
||||
return false;
|
||||
|
||||
const VariationStore &var_store = get_var_store ();
|
||||
const ItemVariationStore &var_store = get_var_store ();
|
||||
if (likely (min && min_coord)) *min = min_coord->get_coord (font, var_store, direction);
|
||||
if (likely (max && max_coord)) *max = max_coord->get_coord (font, var_store, direction);
|
||||
return true;
|
||||
@ -510,7 +782,7 @@ struct BASE
|
||||
* of BASE table (may be NULL) */
|
||||
Offset16To<Axis>vAxis; /* Offset to vertical Axis table, from beginning
|
||||
* of BASE table (may be NULL) */
|
||||
Offset32To<VariationStore>
|
||||
Offset32To<ItemVariationStore>
|
||||
varStore; /* Offset to the table of Item Variation
|
||||
* Store--from beginning of BASE
|
||||
* header (may be NULL). Introduced
|
||||
|
@ -188,7 +188,7 @@ struct hb_subset_layout_context_t :
|
||||
unsigned lookup_index_count;
|
||||
};
|
||||
|
||||
struct VariationStore;
|
||||
struct ItemVariationStore;
|
||||
struct hb_collect_variation_indices_context_t :
|
||||
hb_dispatch_context_t<hb_collect_variation_indices_context_t>
|
||||
{
|
||||
@ -3036,7 +3036,7 @@ struct VarData
|
||||
DEFINE_SIZE_ARRAY (6, regionIndices);
|
||||
};
|
||||
|
||||
struct VariationStore
|
||||
struct ItemVariationStore
|
||||
{
|
||||
friend struct item_variations_t;
|
||||
using cache_t = VarRegionList::cache_t;
|
||||
@ -3141,7 +3141,7 @@ struct VariationStore
|
||||
}
|
||||
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
const VariationStore *src,
|
||||
const ItemVariationStore *src,
|
||||
const hb_array_t <const hb_inc_bimap_t> &inner_maps)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
@ -3197,7 +3197,7 @@ struct VariationStore
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
VariationStore *copy (hb_serialize_context_t *c) const
|
||||
ItemVariationStore *copy (hb_serialize_context_t *c) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
auto *out = c->start_embed (this);
|
||||
@ -3227,7 +3227,7 @@ struct VariationStore
|
||||
return_trace (false);
|
||||
#endif
|
||||
|
||||
VariationStore *varstore_prime = c->serializer->start_embed<VariationStore> ();
|
||||
ItemVariationStore *varstore_prime = c->serializer->start_embed<ItemVariationStore> ();
|
||||
if (unlikely (!varstore_prime)) return_trace (false);
|
||||
|
||||
varstore_prime->serialize (c->serializer, this, inner_maps);
|
||||
@ -4030,13 +4030,13 @@ struct VariationDevice
|
||||
private:
|
||||
|
||||
hb_position_t get_x_delta (hb_font_t *font,
|
||||
const VariationStore &store,
|
||||
VariationStore::cache_t *store_cache = nullptr) const
|
||||
const ItemVariationStore &store,
|
||||
ItemVariationStore::cache_t *store_cache = nullptr) const
|
||||
{ return font->em_scalef_x (get_delta (font, store, store_cache)); }
|
||||
|
||||
hb_position_t get_y_delta (hb_font_t *font,
|
||||
const VariationStore &store,
|
||||
VariationStore::cache_t *store_cache = nullptr) const
|
||||
const ItemVariationStore &store,
|
||||
ItemVariationStore::cache_t *store_cache = nullptr) const
|
||||
{ return font->em_scalef_y (get_delta (font, store, store_cache)); }
|
||||
|
||||
VariationDevice* copy (hb_serialize_context_t *c,
|
||||
@ -4070,10 +4070,10 @@ struct VariationDevice
|
||||
private:
|
||||
|
||||
float get_delta (hb_font_t *font,
|
||||
const VariationStore &store,
|
||||
VariationStore::cache_t *store_cache = nullptr) const
|
||||
const ItemVariationStore &store,
|
||||
ItemVariationStore::cache_t *store_cache = nullptr) const
|
||||
{
|
||||
return store.get_delta (varIdx, font->coords, font->num_coords, (VariationStore::cache_t *) store_cache);
|
||||
return store.get_delta (varIdx, font->coords, font->num_coords, (ItemVariationStore::cache_t *) store_cache);
|
||||
}
|
||||
|
||||
protected:
|
||||
@ -4097,8 +4097,8 @@ struct DeviceHeader
|
||||
struct Device
|
||||
{
|
||||
hb_position_t get_x_delta (hb_font_t *font,
|
||||
const VariationStore &store=Null (VariationStore),
|
||||
VariationStore::cache_t *store_cache = nullptr) const
|
||||
const ItemVariationStore &store=Null (ItemVariationStore),
|
||||
ItemVariationStore::cache_t *store_cache = nullptr) const
|
||||
{
|
||||
switch (u.b.format)
|
||||
{
|
||||
@ -4115,8 +4115,8 @@ struct Device
|
||||
}
|
||||
}
|
||||
hb_position_t get_y_delta (hb_font_t *font,
|
||||
const VariationStore &store=Null (VariationStore),
|
||||
VariationStore::cache_t *store_cache = nullptr) const
|
||||
const ItemVariationStore &store=Null (ItemVariationStore),
|
||||
ItemVariationStore::cache_t *store_cache = nullptr) const
|
||||
{
|
||||
switch (u.b.format)
|
||||
{
|
||||
|
@ -708,8 +708,8 @@ struct hb_ot_apply_context_t :
|
||||
recurse_func_t recurse_func = nullptr;
|
||||
const GDEF &gdef;
|
||||
const GDEF::accelerator_t &gdef_accel;
|
||||
const VariationStore &var_store;
|
||||
VariationStore::cache_t *var_store_cache;
|
||||
const ItemVariationStore &var_store;
|
||||
ItemVariationStore::cache_t *var_store_cache;
|
||||
hb_set_digest_t digest;
|
||||
|
||||
hb_direction_t direction;
|
||||
@ -766,7 +766,7 @@ struct hb_ot_apply_context_t :
|
||||
~hb_ot_apply_context_t ()
|
||||
{
|
||||
#ifndef HB_NO_VAR
|
||||
VariationStore::destroy_cache (var_store_cache);
|
||||
ItemVariationStore::destroy_cache (var_store_cache);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
@ -2127,7 +2127,7 @@ hb_ot_layout_get_font_extents (hb_font_t *font,
|
||||
hb_tag_t language_tag,
|
||||
hb_font_extents_t *extents)
|
||||
{
|
||||
hb_position_t min, max;
|
||||
hb_position_t min = 0, max = 0;
|
||||
if (font->face->table.BASE->get_min_max (font, direction, script_tag, language_tag, HB_TAG_NONE,
|
||||
&min, &max))
|
||||
{
|
||||
|
@ -344,27 +344,20 @@ struct MathKern
|
||||
const MathValueRecord* kernValue = mathValueRecordsZ.arrayZ + heightCount;
|
||||
int sign = font->y_scale < 0 ? -1 : +1;
|
||||
|
||||
/* The description of the MathKern table is a ambiguous, but interpreting
|
||||
* "between the two heights found at those indexes" for 0 < i < len as
|
||||
*
|
||||
* correctionHeight[i-1] < correction_height <= correctionHeight[i]
|
||||
*
|
||||
* makes the result consistent with the limit cases and we can just use the
|
||||
* binary search algorithm of std::upper_bound:
|
||||
/* According to OpenType spec (v1.9), except for the boundary cases, the index
|
||||
* chosen for kern value should be i such that
|
||||
* correctionHeight[i-1] <= correction_height < correctionHeight[i]
|
||||
* We can use the binary search algorithm of std::upper_bound(). Or, we can
|
||||
* use the internal hb_bsearch_impl.
|
||||
*/
|
||||
unsigned int i = 0;
|
||||
unsigned int count = heightCount;
|
||||
while (count > 0)
|
||||
{
|
||||
unsigned int half = count / 2;
|
||||
hb_position_t height = correctionHeight[i + half].get_y_value (font, this);
|
||||
if (sign * height < sign * correction_height)
|
||||
{
|
||||
i += half + 1;
|
||||
count -= half + 1;
|
||||
} else
|
||||
count = half;
|
||||
}
|
||||
unsigned int pos;
|
||||
auto cmp = +[](const void* key, const void* p,
|
||||
int sign, hb_font_t* font, const MathKern* mathKern) -> int {
|
||||
return sign * *(hb_position_t*)key - sign * ((MathValueRecord*)p)->get_y_value(font, mathKern);
|
||||
};
|
||||
unsigned int i = hb_bsearch_impl(&pos, correction_height, correctionHeight,
|
||||
heightCount, MathValueRecord::static_size,
|
||||
cmp, sign, font, this) ? pos + 1 : pos;
|
||||
return kernValue[i].get_x_value (font, this);
|
||||
}
|
||||
|
||||
|
@ -560,9 +560,9 @@ apply_stch (const hb_ot_shape_plan_t *plan HB_UNUSED,
|
||||
|
||||
DEBUG_MSG (ARABIC, nullptr, "%s stretch at (%u,%u,%u)",
|
||||
step == MEASURE ? "measuring" : "cutting", context, start, end);
|
||||
DEBUG_MSG (ARABIC, nullptr, "rest of word: count=%u width %d", start - context, w_total);
|
||||
DEBUG_MSG (ARABIC, nullptr, "fixed tiles: count=%d width=%d", n_fixed, w_fixed);
|
||||
DEBUG_MSG (ARABIC, nullptr, "repeating tiles: count=%d width=%d", n_repeating, w_repeating);
|
||||
DEBUG_MSG (ARABIC, nullptr, "rest of word: count=%u width %" PRId32, start - context, w_total);
|
||||
DEBUG_MSG (ARABIC, nullptr, "fixed tiles: count=%d width=%" PRId32, n_fixed, w_fixed);
|
||||
DEBUG_MSG (ARABIC, nullptr, "repeating tiles: count=%d width=%" PRId32, n_repeating, w_repeating);
|
||||
|
||||
/* Number of additional times to repeat each repeating tile. */
|
||||
int n_copies = 0;
|
||||
@ -602,7 +602,7 @@ apply_stch (const hb_ot_shape_plan_t *plan HB_UNUSED,
|
||||
if (info[k - 1].arabic_shaping_action() == STCH_REPEATING)
|
||||
repeat += n_copies;
|
||||
|
||||
DEBUG_MSG (ARABIC, nullptr, "appending %u copies of glyph %u; j=%u",
|
||||
DEBUG_MSG (ARABIC, nullptr, "appending %u copies of glyph %" PRIu32 "; j=%u",
|
||||
repeat, info[k - 1].codepoint, j);
|
||||
pos[k - 1].x_advance = 0;
|
||||
for (unsigned int n = 0; n < repeat; n++)
|
||||
|
@ -349,7 +349,7 @@ struct AxisValueFormat4
|
||||
|
||||
struct AxisValue
|
||||
{
|
||||
bool get_value (unsigned int axis_index) const
|
||||
float get_value (unsigned int axis_index) const
|
||||
{
|
||||
switch (u.format)
|
||||
{
|
||||
@ -357,7 +357,7 @@ struct AxisValue
|
||||
case 2: return u.format2.get_value ();
|
||||
case 3: return u.format3.get_value ();
|
||||
case 4: return u.format4.get_axis_record (axis_index).get_value ();
|
||||
default:return 0;
|
||||
default:return 0.f;
|
||||
}
|
||||
}
|
||||
|
||||
@ -485,7 +485,7 @@ struct STAT
|
||||
hb_array_t<const Offset16To<AxisValue>> axis_values = get_axis_value_offsets ();
|
||||
for (unsigned int i = 0; i < axis_values.length; i++)
|
||||
{
|
||||
const AxisValue& axis_value = this+axis_values[i];
|
||||
const AxisValue& axis_value = this+offsetToAxisValueOffsets+axis_values[i];
|
||||
if (axis_value.get_axis_index () == axis_index)
|
||||
{
|
||||
if (value)
|
||||
|
@ -6,8 +6,8 @@
|
||||
*
|
||||
* on files with these headers:
|
||||
*
|
||||
* <meta name="updated_at" content="2022-09-30 11:47 PM" />
|
||||
* File-Date: 2023-08-02
|
||||
* <meta name="updated_at" content="2023-09-30 01:21 AM" />
|
||||
* File-Date: 2024-03-07
|
||||
*/
|
||||
|
||||
#ifndef HB_OT_TAG_TABLE_HH
|
||||
@ -31,7 +31,7 @@ static const LangTag ot_languages2[] = {
|
||||
{HB_TAG('b','i',' ',' '), HB_TAG('B','I','S',' ')}, /* Bislama */
|
||||
{HB_TAG('b','i',' ',' '), HB_TAG('C','P','P',' ')}, /* Bislama -> Creoles */
|
||||
{HB_TAG('b','m',' ',' '), HB_TAG('B','M','B',' ')}, /* Bambara (Bamanankan) */
|
||||
{HB_TAG('b','n',' ',' '), HB_TAG('B','E','N',' ')}, /* Bengali */
|
||||
{HB_TAG('b','n',' ',' '), HB_TAG('B','E','N',' ')}, /* Bangla */
|
||||
{HB_TAG('b','o',' ',' '), HB_TAG('T','I','B',' ')}, /* Tibetan */
|
||||
{HB_TAG('b','r',' ',' '), HB_TAG('B','R','E',' ')}, /* Breton */
|
||||
{HB_TAG('b','s',' ',' '), HB_TAG('B','O','S',' ')}, /* Bosnian */
|
||||
@ -64,7 +64,7 @@ static const LangTag ot_languages2[] = {
|
||||
{HB_TAG('f','r',' ',' '), HB_TAG('F','R','A',' ')}, /* French */
|
||||
{HB_TAG('f','y',' ',' '), HB_TAG('F','R','I',' ')}, /* Western Frisian -> Frisian */
|
||||
{HB_TAG('g','a',' ',' '), HB_TAG('I','R','I',' ')}, /* Irish */
|
||||
{HB_TAG('g','d',' ',' '), HB_TAG('G','A','E',' ')}, /* Scottish Gaelic (Gaelic) */
|
||||
{HB_TAG('g','d',' ',' '), HB_TAG('G','A','E',' ')}, /* Scottish Gaelic */
|
||||
{HB_TAG('g','l',' ',' '), HB_TAG('G','A','L',' ')}, /* Galician */
|
||||
{HB_TAG('g','n',' ',' '), HB_TAG('G','U','A',' ')}, /* Guarani [macrolanguage] */
|
||||
{HB_TAG('g','u',' ',' '), HB_TAG('G','U','J',' ')}, /* Gujarati */
|
||||
@ -132,7 +132,7 @@ static const LangTag ot_languages2[] = {
|
||||
{HB_TAG('m','l',' ',' '), HB_TAG('M','A','L',' ')}, /* Malayalam -> Malayalam Traditional */
|
||||
{HB_TAG('m','l',' ',' '), HB_TAG('M','L','R',' ')}, /* Malayalam -> Malayalam Reformed */
|
||||
{HB_TAG('m','n',' ',' '), HB_TAG('M','N','G',' ')}, /* Mongolian [macrolanguage] */
|
||||
{HB_TAG('m','o',' ',' '), HB_TAG('M','O','L',' ')}, /* Moldavian (retired code) */
|
||||
{HB_TAG('m','o',' ',' '), HB_TAG('M','O','L',' ')}, /* Moldavian (retired code) -> Romanian (Moldova) */
|
||||
{HB_TAG('m','o',' ',' '), HB_TAG('R','O','M',' ')}, /* Moldavian (retired code) -> Romanian */
|
||||
{HB_TAG('m','r',' ',' '), HB_TAG('M','A','R',' ')}, /* Marathi */
|
||||
{HB_TAG('m','s',' ',' '), HB_TAG('M','L','Y',' ')}, /* Malay [macrolanguage] */
|
||||
@ -153,7 +153,7 @@ static const LangTag ot_languages2[] = {
|
||||
{HB_TAG('o','c',' ',' '), HB_TAG('O','C','I',' ')}, /* Occitan (post 1500) */
|
||||
{HB_TAG('o','j',' ',' '), HB_TAG('O','J','B',' ')}, /* Ojibwa [macrolanguage] -> Ojibway */
|
||||
{HB_TAG('o','m',' ',' '), HB_TAG('O','R','O',' ')}, /* Oromo [macrolanguage] */
|
||||
{HB_TAG('o','r',' ',' '), HB_TAG('O','R','I',' ')}, /* Odia (formerly Oriya) [macrolanguage] */
|
||||
{HB_TAG('o','r',' ',' '), HB_TAG('O','R','I',' ')}, /* Odia [macrolanguage] */
|
||||
{HB_TAG('o','s',' ',' '), HB_TAG('O','S','S',' ')}, /* Ossetian */
|
||||
{HB_TAG('p','a',' ',' '), HB_TAG('P','A','N',' ')}, /* Punjabi */
|
||||
{HB_TAG('p','i',' ',' '), HB_TAG('P','A','L',' ')}, /* Pali */
|
||||
@ -166,7 +166,7 @@ static const LangTag ot_languages2[] = {
|
||||
{HB_TAG('r','o',' ',' '), HB_TAG('R','O','M',' ')}, /* Romanian */
|
||||
{HB_TAG('r','u',' ',' '), HB_TAG('R','U','S',' ')}, /* Russian */
|
||||
{HB_TAG('r','w',' ',' '), HB_TAG('R','U','A',' ')}, /* Kinyarwanda */
|
||||
{HB_TAG('s','a',' ',' '), HB_TAG('S','A','N',' ')}, /* Sanskrit */
|
||||
{HB_TAG('s','a',' ',' '), HB_TAG('S','A','N',' ')}, /* Sanskrit [macrolanguage] */
|
||||
{HB_TAG('s','c',' ',' '), HB_TAG('S','R','D',' ')}, /* Sardinian [macrolanguage] */
|
||||
{HB_TAG('s','d',' ',' '), HB_TAG('S','N','D',' ')}, /* Sindhi */
|
||||
{HB_TAG('s','e',' ',' '), HB_TAG('N','S','M',' ')}, /* Northern Sami */
|
||||
@ -465,6 +465,7 @@ static const LangTag ot_languages3[] = {
|
||||
{HB_TAG('c','l','d',' '), HB_TAG('S','Y','R',' ')}, /* Chaldean Neo-Aramaic -> Syriac */
|
||||
{HB_TAG('c','l','e',' '), HB_TAG('C','C','H','N')}, /* Lealao Chinantec -> Chinantec */
|
||||
{HB_TAG('c','l','j',' '), HB_TAG('Q','I','N',' ')}, /* Laitu Chin -> Chin */
|
||||
{HB_TAG('c','l','s',' '), HB_TAG('S','A','N',' ')}, /* Classical Sanskrit -> Sanskrit */
|
||||
{HB_TAG('c','l','t',' '), HB_TAG('Q','I','N',' ')}, /* Lautu Chin -> Chin */
|
||||
{HB_TAG('c','m','n',' '), HB_TAG('Z','H','S',' ')}, /* Mandarin Chinese -> Chinese, Simplified */
|
||||
{HB_TAG('c','m','r',' '), HB_TAG('Q','I','N',' ')}, /* Mro-Khimi Chin -> Chin */
|
||||
@ -637,7 +638,7 @@ static const LangTag ot_languages3[] = {
|
||||
{HB_TAG('g','a','a',' '), HB_TAG('G','A','D',' ')}, /* Ga */
|
||||
{HB_TAG('g','a','c',' '), HB_TAG('C','P','P',' ')}, /* Mixed Great Andamanese -> Creoles */
|
||||
{HB_TAG('g','a','d',' '), HB_TAG_NONE }, /* Gaddang != Ga */
|
||||
{HB_TAG('g','a','e',' '), HB_TAG_NONE }, /* Guarequena != Scottish Gaelic (Gaelic) */
|
||||
{HB_TAG('g','a','e',' '), HB_TAG_NONE }, /* Guarequena != Scottish Gaelic */
|
||||
/*{HB_TAG('g','a','g',' '), HB_TAG('G','A','G',' ')},*/ /* Gagauz */
|
||||
{HB_TAG('g','a','l',' '), HB_TAG_NONE }, /* Galolen != Galician */
|
||||
{HB_TAG('g','a','n',' '), HB_TAG('Z','H','S',' ')}, /* Gan Chinese -> Chinese, Simplified */
|
||||
@ -1160,7 +1161,7 @@ static const LangTag ot_languages3[] = {
|
||||
{HB_TAG('o','r','o',' '), HB_TAG_NONE }, /* Orokolo != Oromo */
|
||||
{HB_TAG('o','r','r',' '), HB_TAG('I','J','O',' ')}, /* Oruma -> Ijo */
|
||||
{HB_TAG('o','r','s',' '), HB_TAG('M','L','Y',' ')}, /* Orang Seletar -> Malay */
|
||||
{HB_TAG('o','r','y',' '), HB_TAG('O','R','I',' ')}, /* Odia (formerly Oriya) */
|
||||
{HB_TAG('o','r','y',' '), HB_TAG('O','R','I',' ')}, /* Odia */
|
||||
{HB_TAG('o','t','w',' '), HB_TAG('O','J','B',' ')}, /* Ottawa -> Ojibway */
|
||||
{HB_TAG('o','u','a',' '), HB_TAG('B','B','R',' ')}, /* Tagargrent -> Berber */
|
||||
{HB_TAG('p','a','a',' '), HB_TAG_NONE }, /* Papuan [collection] != Palestinian Aramaic */
|
||||
@ -1395,7 +1396,7 @@ static const LangTag ot_languages3[] = {
|
||||
/*{HB_TAG('s','n','k',' '), HB_TAG('S','N','K',' ')},*/ /* Soninke */
|
||||
{HB_TAG('s','o','g',' '), HB_TAG_NONE }, /* Sogdian != Sodo Gurage */
|
||||
/*{HB_TAG('s','o','p',' '), HB_TAG('S','O','P',' ')},*/ /* Songe */
|
||||
{HB_TAG('s','p','v',' '), HB_TAG('O','R','I',' ')}, /* Sambalpuri -> Odia (formerly Oriya) */
|
||||
{HB_TAG('s','p','v',' '), HB_TAG('O','R','I',' ')}, /* Sambalpuri -> Odia */
|
||||
{HB_TAG('s','p','y',' '), HB_TAG('K','A','L',' ')}, /* Sabaot -> Kalenjin */
|
||||
{HB_TAG('s','r','b',' '), HB_TAG_NONE }, /* Sora != Serbian */
|
||||
{HB_TAG('s','r','c',' '), HB_TAG('S','R','D',' ')}, /* Logudorese Sardinian -> Sardinian */
|
||||
@ -1533,6 +1534,7 @@ static const LangTag ot_languages3[] = {
|
||||
{HB_TAG('v','l','s',' '), HB_TAG('F','L','E',' ')}, /* Vlaams -> Dutch (Flemish) */
|
||||
{HB_TAG('v','m','w',' '), HB_TAG('M','A','K',' ')}, /* Makhuwa */
|
||||
/*{HB_TAG('v','r','o',' '), HB_TAG('V','R','O',' ')},*/ /* Võro */
|
||||
{HB_TAG('v','s','n',' '), HB_TAG('S','A','N',' ')}, /* Vedic Sanskrit -> Sanskrit */
|
||||
{HB_TAG('w','a','g',' '), HB_TAG_NONE }, /* Wa'ema != Wagdi */
|
||||
/*{HB_TAG('w','a','r',' '), HB_TAG('W','A','R',' ')},*/ /* Waray (Philippines) -> Waray-Waray */
|
||||
{HB_TAG('w','b','m',' '), HB_TAG('W','A',' ',' ')}, /* Wa */
|
||||
@ -2643,7 +2645,7 @@ out:
|
||||
/* Romanian; Moldova */
|
||||
unsigned int i;
|
||||
hb_tag_t possible_tags[] = {
|
||||
HB_TAG('M','O','L',' '), /* Moldavian */
|
||||
HB_TAG('M','O','L',' '), /* Romanian (Moldova) */
|
||||
HB_TAG('R','O','M',' '), /* Romanian */
|
||||
};
|
||||
for (i = 0; i < 2 && i < *count; i++)
|
||||
@ -2920,7 +2922,7 @@ hb_ot_ambiguous_tag_to_language (hb_tag_t tag)
|
||||
return hb_language_from_string ("mn", -1); /* Mongolian [macrolanguage] */
|
||||
case HB_TAG('M','N','K',' '): /* Maninka */
|
||||
return hb_language_from_string ("man", -1); /* Mandingo [macrolanguage] */
|
||||
case HB_TAG('M','O','L',' '): /* Moldavian */
|
||||
case HB_TAG('M','O','L',' '): /* Romanian (Moldova) */
|
||||
return hb_language_from_string ("ro-MD", -1); /* Romanian; Moldova */
|
||||
case HB_TAG('M','O','N','T'): /* Thailand Mon */
|
||||
return hb_language_from_string ("mnw-TH", -1); /* Mon; Thailand */
|
||||
@ -2958,6 +2960,8 @@ hb_ot_ambiguous_tag_to_language (hb_tag_t tag)
|
||||
return hb_language_from_string ("ro", -1); /* Romanian */
|
||||
case HB_TAG('R','O','Y',' '): /* Romany */
|
||||
return hb_language_from_string ("rom", -1); /* Romany [macrolanguage] */
|
||||
case HB_TAG('S','A','N',' '): /* Sanskrit */
|
||||
return hb_language_from_string ("sa", -1); /* Sanskrit [macrolanguage] */
|
||||
case HB_TAG('S','Q','I',' '): /* Albanian */
|
||||
return hb_language_from_string ("sq", -1); /* Albanian [macrolanguage] */
|
||||
case HB_TAG('S','R','B',' '): /* Serbian */
|
||||
|
@ -547,7 +547,7 @@ hb_ot_tag_to_language (hb_tag_t tag)
|
||||
buf[3] = '-';
|
||||
str += 4;
|
||||
}
|
||||
snprintf (str, 16, "x-hbot-%08x", tag);
|
||||
snprintf (str, 16, "x-hbot-%08" PRIx32, tag);
|
||||
return hb_language_from_string (&*buf, -1);
|
||||
}
|
||||
}
|
||||
|
@ -57,7 +57,7 @@ struct avarV2Tail
|
||||
|
||||
protected:
|
||||
Offset32To<DeltaSetIndexMap> varIdxMap; /* Offset from the beginning of 'avar' table. */
|
||||
Offset32To<VariationStore> varStore; /* Offset from the beginning of 'avar' table. */
|
||||
Offset32To<ItemVariationStore> varStore; /* Offset from the beginning of 'avar' table. */
|
||||
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (8);
|
||||
@ -230,7 +230,7 @@ struct SegmentMaps : Array16Of<AxisValueMap>
|
||||
* duplicates here */
|
||||
if (mapping.must_include ())
|
||||
continue;
|
||||
value_mappings.push (std::move (mapping));
|
||||
value_mappings.push (mapping);
|
||||
}
|
||||
|
||||
AxisValueMap m;
|
||||
@ -343,7 +343,7 @@ struct avar
|
||||
for (unsigned i = 0; i < coords_length; i++)
|
||||
coords[i] = out[i];
|
||||
|
||||
OT::VariationStore::destroy_cache (var_store_cache);
|
||||
OT::ItemVariationStore::destroy_cache (var_store_cache);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
@ -28,6 +28,7 @@
|
||||
|
||||
#include "hb-ot-layout-common.hh"
|
||||
#include "hb-priority-queue.hh"
|
||||
#include "hb-subset-instancer-iup.hh"
|
||||
|
||||
|
||||
namespace OT {
|
||||
@ -221,9 +222,9 @@ struct DeltaSetIndexMap
|
||||
};
|
||||
|
||||
|
||||
struct VarStoreInstancer
|
||||
struct ItemVarStoreInstancer
|
||||
{
|
||||
VarStoreInstancer (const VariationStore *varStore,
|
||||
ItemVarStoreInstancer (const ItemVariationStore *varStore,
|
||||
const DeltaSetIndexMap *varIdxMap,
|
||||
hb_array_t<int> coords) :
|
||||
varStore (varStore), varIdxMap (varIdxMap), coords (coords) {}
|
||||
@ -235,7 +236,7 @@ struct VarStoreInstancer
|
||||
float operator() (uint32_t varIdx, unsigned short offset = 0) const
|
||||
{ return coords ? varStore->get_delta (varIdxMap ? varIdxMap->map (VarIdx::add (varIdx, offset)) : varIdx + offset, coords) : 0; }
|
||||
|
||||
const VariationStore *varStore;
|
||||
const ItemVariationStore *varStore;
|
||||
const DeltaSetIndexMap *varIdxMap;
|
||||
hb_array_t<int> coords;
|
||||
};
|
||||
@ -460,7 +461,7 @@ struct tuple_delta_t
|
||||
tuple_delta_t () = default;
|
||||
tuple_delta_t (const tuple_delta_t& o) = default;
|
||||
|
||||
friend void swap (tuple_delta_t& a, tuple_delta_t& b)
|
||||
friend void swap (tuple_delta_t& a, tuple_delta_t& b) noexcept
|
||||
{
|
||||
hb_swap (a.axis_tuples, b.axis_tuples);
|
||||
hb_swap (a.indices, b.indices);
|
||||
@ -471,10 +472,10 @@ struct tuple_delta_t
|
||||
hb_swap (a.compiled_peak_coords, b.compiled_peak_coords);
|
||||
}
|
||||
|
||||
tuple_delta_t (tuple_delta_t&& o) : tuple_delta_t ()
|
||||
tuple_delta_t (tuple_delta_t&& o) noexcept : tuple_delta_t ()
|
||||
{ hb_swap (*this, o); }
|
||||
|
||||
tuple_delta_t& operator = (tuple_delta_t&& o)
|
||||
tuple_delta_t& operator = (tuple_delta_t&& o) noexcept
|
||||
{
|
||||
hb_swap (*this, o);
|
||||
return *this;
|
||||
@ -609,7 +610,9 @@ struct tuple_delta_t
|
||||
const hb_map_t& axes_old_index_tag_map,
|
||||
const hb_hashmap_t<const hb_vector_t<char>*, unsigned>* shared_tuples_idx_map)
|
||||
{
|
||||
if (!compiled_deltas) return false;
|
||||
/* compiled_deltas could be empty after iup delta optimization, we can skip
|
||||
* compiling this tuple and return true */
|
||||
if (!compiled_deltas) return true;
|
||||
|
||||
unsigned cur_axis_count = axes_index_map.get_population ();
|
||||
/* allocate enough memory: 1 peak + 2 intermediate coords + fixed header size */
|
||||
@ -723,22 +726,28 @@ struct tuple_delta_t
|
||||
}
|
||||
|
||||
bool compile_deltas ()
|
||||
{ return compile_deltas (indices, deltas_x, deltas_y, compiled_deltas); }
|
||||
|
||||
bool compile_deltas (const hb_vector_t<bool> &point_indices,
|
||||
const hb_vector_t<float> &x_deltas,
|
||||
const hb_vector_t<float> &y_deltas,
|
||||
hb_vector_t<char> &compiled_deltas /* OUT */)
|
||||
{
|
||||
hb_vector_t<int> rounded_deltas;
|
||||
if (unlikely (!rounded_deltas.alloc (indices.length)))
|
||||
if (unlikely (!rounded_deltas.alloc (point_indices.length)))
|
||||
return false;
|
||||
|
||||
for (unsigned i = 0; i < indices.length; i++)
|
||||
for (unsigned i = 0; i < point_indices.length; i++)
|
||||
{
|
||||
if (!indices[i]) continue;
|
||||
int rounded_delta = (int) roundf (deltas_x[i]);
|
||||
if (!point_indices[i]) continue;
|
||||
int rounded_delta = (int) roundf (x_deltas.arrayZ[i]);
|
||||
rounded_deltas.push (rounded_delta);
|
||||
}
|
||||
|
||||
if (!rounded_deltas) return false;
|
||||
if (!rounded_deltas) return true;
|
||||
/* allocate enough memories 3 * num_deltas */
|
||||
unsigned alloc_len = 3 * rounded_deltas.length;
|
||||
if (deltas_y)
|
||||
if (y_deltas)
|
||||
alloc_len *= 2;
|
||||
|
||||
if (unlikely (!compiled_deltas.resize (alloc_len))) return false;
|
||||
@ -746,14 +755,14 @@ struct tuple_delta_t
|
||||
unsigned i = 0;
|
||||
unsigned encoded_len = encode_delta_run (i, compiled_deltas.as_array (), rounded_deltas);
|
||||
|
||||
if (deltas_y)
|
||||
if (y_deltas)
|
||||
{
|
||||
/* reuse the rounded_deltas vector, check that deltas_y have the same num of deltas as deltas_x */
|
||||
/* reuse the rounded_deltas vector, check that y_deltas have the same num of deltas as x_deltas */
|
||||
unsigned j = 0;
|
||||
for (unsigned idx = 0; idx < indices.length; idx++)
|
||||
for (unsigned idx = 0; idx < point_indices.length; idx++)
|
||||
{
|
||||
if (!indices[idx]) continue;
|
||||
int rounded_delta = (int) roundf (deltas_y[idx]);
|
||||
if (!point_indices[idx]) continue;
|
||||
int rounded_delta = (int) roundf (y_deltas.arrayZ[idx]);
|
||||
|
||||
if (j >= rounded_deltas.length) return false;
|
||||
|
||||
@ -761,7 +770,7 @@ struct tuple_delta_t
|
||||
}
|
||||
|
||||
if (j != rounded_deltas.length) return false;
|
||||
/* reset i because we reuse rounded_deltas for deltas_y */
|
||||
/* reset i because we reuse rounded_deltas for y_deltas */
|
||||
i = 0;
|
||||
encoded_len += encode_delta_run (i, compiled_deltas.as_array ().sub_array (encoded_len), rounded_deltas);
|
||||
}
|
||||
@ -1020,6 +1029,171 @@ struct tuple_delta_t
|
||||
return true;
|
||||
}
|
||||
|
||||
bool optimize (const contour_point_vector_t& contour_points,
|
||||
bool is_composite,
|
||||
float tolerance = 0.5f)
|
||||
{
|
||||
unsigned count = contour_points.length;
|
||||
if (deltas_x.length != count ||
|
||||
deltas_y.length != count)
|
||||
return false;
|
||||
|
||||
hb_vector_t<bool> opt_indices;
|
||||
hb_vector_t<int> rounded_x_deltas, rounded_y_deltas;
|
||||
|
||||
if (unlikely (!rounded_x_deltas.alloc (count) ||
|
||||
!rounded_y_deltas.alloc (count)))
|
||||
return false;
|
||||
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
{
|
||||
int rounded_x_delta = (int) roundf (deltas_x.arrayZ[i]);
|
||||
int rounded_y_delta = (int) roundf (deltas_y.arrayZ[i]);
|
||||
rounded_x_deltas.push (rounded_x_delta);
|
||||
rounded_y_deltas.push (rounded_y_delta);
|
||||
}
|
||||
|
||||
if (!iup_delta_optimize (contour_points, rounded_x_deltas, rounded_y_deltas, opt_indices, tolerance))
|
||||
return false;
|
||||
|
||||
unsigned ref_count = 0;
|
||||
for (bool ref_flag : opt_indices)
|
||||
ref_count += ref_flag;
|
||||
|
||||
if (ref_count == count) return true;
|
||||
|
||||
hb_vector_t<float> opt_deltas_x, opt_deltas_y;
|
||||
bool is_comp_glyph_wo_deltas = (is_composite && ref_count == 0);
|
||||
if (is_comp_glyph_wo_deltas)
|
||||
{
|
||||
if (unlikely (!opt_deltas_x.resize (count) ||
|
||||
!opt_deltas_y.resize (count)))
|
||||
return false;
|
||||
|
||||
opt_indices.arrayZ[0] = true;
|
||||
for (unsigned i = 1; i < count; i++)
|
||||
opt_indices.arrayZ[i] = false;
|
||||
}
|
||||
|
||||
hb_vector_t<char> opt_point_data;
|
||||
if (!compile_point_set (opt_indices, opt_point_data))
|
||||
return false;
|
||||
hb_vector_t<char> opt_deltas_data;
|
||||
if (!compile_deltas (opt_indices,
|
||||
is_comp_glyph_wo_deltas ? opt_deltas_x : deltas_x,
|
||||
is_comp_glyph_wo_deltas ? opt_deltas_y : deltas_y,
|
||||
opt_deltas_data))
|
||||
return false;
|
||||
|
||||
hb_vector_t<char> point_data;
|
||||
if (!compile_point_set (indices, point_data))
|
||||
return false;
|
||||
hb_vector_t<char> deltas_data;
|
||||
if (!compile_deltas (indices, deltas_x, deltas_y, deltas_data))
|
||||
return false;
|
||||
|
||||
if (opt_point_data.length + opt_deltas_data.length < point_data.length + deltas_data.length)
|
||||
{
|
||||
indices.fini ();
|
||||
indices = std::move (opt_indices);
|
||||
|
||||
if (is_comp_glyph_wo_deltas)
|
||||
{
|
||||
deltas_x.fini ();
|
||||
deltas_x = std::move (opt_deltas_x);
|
||||
|
||||
deltas_y.fini ();
|
||||
deltas_y = std::move (opt_deltas_y);
|
||||
}
|
||||
}
|
||||
return !indices.in_error () && !deltas_x.in_error () && !deltas_y.in_error ();
|
||||
}
|
||||
|
||||
static bool compile_point_set (const hb_vector_t<bool> &point_indices,
|
||||
hb_vector_t<char>& compiled_points /* OUT */)
|
||||
{
|
||||
unsigned num_points = 0;
|
||||
for (bool i : point_indices)
|
||||
if (i) num_points++;
|
||||
|
||||
/* when iup optimization is enabled, num of referenced points could be 0 */
|
||||
if (!num_points) return true;
|
||||
|
||||
unsigned indices_length = point_indices.length;
|
||||
/* If the points set consists of all points in the glyph, it's encoded with a
|
||||
* single zero byte */
|
||||
if (num_points == indices_length)
|
||||
return compiled_points.resize (1);
|
||||
|
||||
/* allocate enough memories: 2 bytes for count + 3 bytes for each point */
|
||||
unsigned num_bytes = 2 + 3 *num_points;
|
||||
if (unlikely (!compiled_points.resize (num_bytes, false)))
|
||||
return false;
|
||||
|
||||
unsigned pos = 0;
|
||||
/* binary data starts with the total number of reference points */
|
||||
if (num_points < 0x80)
|
||||
compiled_points.arrayZ[pos++] = num_points;
|
||||
else
|
||||
{
|
||||
compiled_points.arrayZ[pos++] = ((num_points >> 8) | 0x80);
|
||||
compiled_points.arrayZ[pos++] = num_points & 0xFF;
|
||||
}
|
||||
|
||||
const unsigned max_run_length = 0x7F;
|
||||
unsigned i = 0;
|
||||
unsigned last_value = 0;
|
||||
unsigned num_encoded = 0;
|
||||
while (i < indices_length && num_encoded < num_points)
|
||||
{
|
||||
unsigned run_length = 0;
|
||||
unsigned header_pos = pos;
|
||||
compiled_points.arrayZ[pos++] = 0;
|
||||
|
||||
bool use_byte_encoding = false;
|
||||
bool new_run = true;
|
||||
while (i < indices_length && num_encoded < num_points &&
|
||||
run_length <= max_run_length)
|
||||
{
|
||||
// find out next referenced point index
|
||||
while (i < indices_length && !point_indices[i])
|
||||
i++;
|
||||
|
||||
if (i >= indices_length) break;
|
||||
|
||||
unsigned cur_value = i;
|
||||
unsigned delta = cur_value - last_value;
|
||||
|
||||
if (new_run)
|
||||
{
|
||||
use_byte_encoding = (delta <= 0xFF);
|
||||
new_run = false;
|
||||
}
|
||||
|
||||
if (use_byte_encoding && delta > 0xFF)
|
||||
break;
|
||||
|
||||
if (use_byte_encoding)
|
||||
compiled_points.arrayZ[pos++] = delta;
|
||||
else
|
||||
{
|
||||
compiled_points.arrayZ[pos++] = delta >> 8;
|
||||
compiled_points.arrayZ[pos++] = delta & 0xFF;
|
||||
}
|
||||
i++;
|
||||
last_value = cur_value;
|
||||
run_length++;
|
||||
num_encoded++;
|
||||
}
|
||||
|
||||
if (use_byte_encoding)
|
||||
compiled_points.arrayZ[header_pos] = run_length - 1;
|
||||
else
|
||||
compiled_points.arrayZ[header_pos] = (run_length - 1) | 0x80;
|
||||
}
|
||||
return compiled_points.resize (pos, false);
|
||||
}
|
||||
|
||||
static float infer_delta (float target_val, float prev_val, float next_val, float prev_delta, float next_delta)
|
||||
{
|
||||
if (prev_val == next_val)
|
||||
@ -1071,41 +1245,41 @@ struct TupleVariationData
|
||||
|
||||
private:
|
||||
/* referenced point set->compiled point data map */
|
||||
hb_hashmap_t<const hb_vector_t<bool>*, hb_bytes_t> point_data_map;
|
||||
hb_hashmap_t<const hb_vector_t<bool>*, hb_vector_t<char>> point_data_map;
|
||||
/* referenced point set-> count map, used in finding shared points */
|
||||
hb_hashmap_t<const hb_vector_t<bool>*, unsigned> point_set_count_map;
|
||||
|
||||
/* empty for non-gvar tuples.
|
||||
* shared_points_bytes is just a copy of some value in the point_data_map,
|
||||
* shared_points_bytes is a pointer to some value in the point_data_map,
|
||||
* which will be freed during map destruction. Save it for serialization, so
|
||||
* no need to do find_shared_points () again */
|
||||
hb_bytes_t shared_points_bytes;
|
||||
hb_vector_t<char> *shared_points_bytes = nullptr;
|
||||
|
||||
/* total compiled byte size as TupleVariationData format, initialized to its
|
||||
* min_size: 4 */
|
||||
unsigned compiled_byte_size = 4;
|
||||
|
||||
/* for gvar iup delta optimization: whether this is a composite glyph */
|
||||
bool is_composite = false;
|
||||
|
||||
public:
|
||||
tuple_variations_t () = default;
|
||||
tuple_variations_t (const tuple_variations_t&) = delete;
|
||||
tuple_variations_t& operator=(const tuple_variations_t&) = delete;
|
||||
tuple_variations_t (tuple_variations_t&&) = default;
|
||||
tuple_variations_t& operator=(tuple_variations_t&&) = default;
|
||||
~tuple_variations_t () { fini (); }
|
||||
void fini ()
|
||||
{
|
||||
for (auto _ : point_data_map.values ())
|
||||
_.fini ();
|
||||
|
||||
point_set_count_map.fini ();
|
||||
tuple_vars.fini ();
|
||||
}
|
||||
~tuple_variations_t () = default;
|
||||
|
||||
explicit operator bool () const { return bool (tuple_vars); }
|
||||
unsigned get_var_count () const
|
||||
{
|
||||
unsigned count = tuple_vars.length;
|
||||
if (shared_points_bytes.length)
|
||||
unsigned count = 0;
|
||||
/* when iup delta opt is enabled, compiled_deltas could be empty and we
|
||||
* should skip this tuple */
|
||||
for (auto& tuple: tuple_vars)
|
||||
if (tuple.compiled_deltas) count++;
|
||||
|
||||
if (shared_points_bytes && shared_points_bytes->length)
|
||||
count |= TupleVarCount::SharedPointNumbers;
|
||||
return count;
|
||||
}
|
||||
@ -1119,26 +1293,27 @@ struct TupleVariationData
|
||||
bool is_gvar,
|
||||
const hb_map_t *axes_old_index_tag_map,
|
||||
const hb_vector_t<unsigned> &shared_indices,
|
||||
const hb_array_t<const F2DOT14> shared_tuples)
|
||||
const hb_array_t<const F2DOT14> shared_tuples,
|
||||
bool is_composite_glyph)
|
||||
{
|
||||
do
|
||||
{
|
||||
const HBUINT8 *p = iterator.get_serialized_data ();
|
||||
unsigned int length = iterator.current_tuple->get_data_size ();
|
||||
if (unlikely (!iterator.var_data_bytes.check_range (p, length)))
|
||||
{ fini (); return false; }
|
||||
return false;
|
||||
|
||||
hb_hashmap_t<hb_tag_t, Triple> axis_tuples;
|
||||
if (!iterator.current_tuple->unpack_axis_tuples (iterator.get_axis_count (), shared_tuples, axes_old_index_tag_map, axis_tuples)
|
||||
|| axis_tuples.is_empty ())
|
||||
{ fini (); return false; }
|
||||
return false;
|
||||
|
||||
hb_vector_t<unsigned> private_indices;
|
||||
bool has_private_points = iterator.current_tuple->has_private_points ();
|
||||
const HBUINT8 *end = p + length;
|
||||
if (has_private_points &&
|
||||
!TupleVariationData::unpack_points (p, private_indices, end))
|
||||
{ fini (); return false; }
|
||||
return false;
|
||||
|
||||
const hb_vector_t<unsigned> &indices = has_private_points ? private_indices : shared_indices;
|
||||
bool apply_to_all = (indices.length == 0);
|
||||
@ -1148,24 +1323,24 @@ struct TupleVariationData
|
||||
|
||||
if (unlikely (!deltas_x.resize (num_deltas, false) ||
|
||||
!TupleVariationData::unpack_deltas (p, deltas_x, end)))
|
||||
{ fini (); return false; }
|
||||
return false;
|
||||
|
||||
hb_vector_t<int> deltas_y;
|
||||
if (is_gvar)
|
||||
{
|
||||
if (unlikely (!deltas_y.resize (num_deltas, false) ||
|
||||
!TupleVariationData::unpack_deltas (p, deltas_y, end)))
|
||||
{ fini (); return false; }
|
||||
return false;
|
||||
}
|
||||
|
||||
tuple_delta_t var;
|
||||
var.axis_tuples = std::move (axis_tuples);
|
||||
if (unlikely (!var.indices.resize (point_count) ||
|
||||
!var.deltas_x.resize (point_count, false)))
|
||||
{ fini (); return false; }
|
||||
return false;
|
||||
|
||||
if (is_gvar && unlikely (!var.deltas_y.resize (point_count, false)))
|
||||
{ fini (); return false; }
|
||||
return false;
|
||||
|
||||
for (unsigned i = 0; i < num_deltas; i++)
|
||||
{
|
||||
@ -1178,6 +1353,8 @@ struct TupleVariationData
|
||||
}
|
||||
tuple_vars.push (std::move (var));
|
||||
} while (iterator.move_to_next ());
|
||||
|
||||
is_composite = is_composite_glyph;
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -1261,7 +1438,7 @@ struct TupleVariationData
|
||||
unsigned new_len = new_vars.length + out.length;
|
||||
|
||||
if (unlikely (!new_vars.alloc (new_len, false)))
|
||||
{ fini (); return false;}
|
||||
return false;
|
||||
|
||||
for (unsigned i = 0; i < out.length; i++)
|
||||
new_vars.push (std::move (out[i]));
|
||||
@ -1272,8 +1449,9 @@ struct TupleVariationData
|
||||
return true;
|
||||
}
|
||||
|
||||
/* merge tuple variations with overlapping tents */
|
||||
void merge_tuple_variations ()
|
||||
/* merge tuple variations with overlapping tents, if iup delta optimization
|
||||
* is enabled, add default deltas to contour_points */
|
||||
bool merge_tuple_variations (contour_point_vector_t* contour_points = nullptr)
|
||||
{
|
||||
hb_vector_t<tuple_delta_t> new_vars;
|
||||
hb_hashmap_t<const hb_hashmap_t<hb_tag_t, Triple>*, unsigned> m;
|
||||
@ -1281,7 +1459,15 @@ struct TupleVariationData
|
||||
for (const tuple_delta_t& var : tuple_vars)
|
||||
{
|
||||
/* if all axes are pinned, drop the tuple variation */
|
||||
if (var.axis_tuples.is_empty ()) continue;
|
||||
if (var.axis_tuples.is_empty ())
|
||||
{
|
||||
/* if iup_delta_optimize is enabled, add deltas to contour coords */
|
||||
if (contour_points && !contour_points->add_deltas (var.deltas_x,
|
||||
var.deltas_y,
|
||||
var.indices))
|
||||
return false;
|
||||
continue;
|
||||
}
|
||||
|
||||
unsigned *idx;
|
||||
if (m.has (&(var.axis_tuples), &idx))
|
||||
@ -1291,98 +1477,14 @@ struct TupleVariationData
|
||||
else
|
||||
{
|
||||
new_vars.push (var);
|
||||
m.set (&(var.axis_tuples), i);
|
||||
if (!m.set (&(var.axis_tuples), i))
|
||||
return false;
|
||||
i++;
|
||||
}
|
||||
}
|
||||
tuple_vars.fini ();
|
||||
tuple_vars = std::move (new_vars);
|
||||
}
|
||||
|
||||
hb_bytes_t compile_point_set (const hb_vector_t<bool> &point_indices)
|
||||
{
|
||||
unsigned num_points = 0;
|
||||
for (bool i : point_indices)
|
||||
if (i) num_points++;
|
||||
|
||||
unsigned indices_length = point_indices.length;
|
||||
/* If the points set consists of all points in the glyph, it's encoded with a
|
||||
* single zero byte */
|
||||
if (num_points == indices_length)
|
||||
{
|
||||
char *p = (char *) hb_calloc (1, sizeof (char));
|
||||
if (unlikely (!p)) return hb_bytes_t ();
|
||||
|
||||
return hb_bytes_t (p, 1);
|
||||
}
|
||||
|
||||
/* allocate enough memories: 2 bytes for count + 3 bytes for each point */
|
||||
unsigned num_bytes = 2 + 3 *num_points;
|
||||
char *p = (char *) hb_calloc (num_bytes, sizeof (char));
|
||||
if (unlikely (!p)) return hb_bytes_t ();
|
||||
|
||||
unsigned pos = 0;
|
||||
/* binary data starts with the total number of reference points */
|
||||
if (num_points < 0x80)
|
||||
p[pos++] = num_points;
|
||||
else
|
||||
{
|
||||
p[pos++] = ((num_points >> 8) | 0x80);
|
||||
p[pos++] = num_points & 0xFF;
|
||||
}
|
||||
|
||||
const unsigned max_run_length = 0x7F;
|
||||
unsigned i = 0;
|
||||
unsigned last_value = 0;
|
||||
unsigned num_encoded = 0;
|
||||
while (i < indices_length && num_encoded < num_points)
|
||||
{
|
||||
unsigned run_length = 0;
|
||||
unsigned header_pos = pos;
|
||||
p[pos++] = 0;
|
||||
|
||||
bool use_byte_encoding = false;
|
||||
bool new_run = true;
|
||||
while (i < indices_length && num_encoded < num_points &&
|
||||
run_length <= max_run_length)
|
||||
{
|
||||
// find out next referenced point index
|
||||
while (i < indices_length && !point_indices[i])
|
||||
i++;
|
||||
|
||||
if (i >= indices_length) break;
|
||||
|
||||
unsigned cur_value = i;
|
||||
unsigned delta = cur_value - last_value;
|
||||
|
||||
if (new_run)
|
||||
{
|
||||
use_byte_encoding = (delta <= 0xFF);
|
||||
new_run = false;
|
||||
}
|
||||
|
||||
if (use_byte_encoding && delta > 0xFF)
|
||||
break;
|
||||
|
||||
if (use_byte_encoding)
|
||||
p[pos++] = delta;
|
||||
else
|
||||
{
|
||||
p[pos++] = delta >> 8;
|
||||
p[pos++] = delta & 0xFF;
|
||||
}
|
||||
i++;
|
||||
last_value = cur_value;
|
||||
run_length++;
|
||||
num_encoded++;
|
||||
}
|
||||
|
||||
if (use_byte_encoding)
|
||||
p[header_pos] = run_length - 1;
|
||||
else
|
||||
p[header_pos] = (run_length - 1) | 0x80;
|
||||
}
|
||||
return hb_bytes_t (p, pos);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* compile all point set and store byte data in a point_set->hb_bytes_t hashmap,
|
||||
@ -1402,11 +1504,11 @@ struct TupleVariationData
|
||||
continue;
|
||||
}
|
||||
|
||||
hb_bytes_t compiled_data = compile_point_set (*points_set);
|
||||
if (unlikely (compiled_data == hb_bytes_t ()))
|
||||
hb_vector_t<char> compiled_point_data;
|
||||
if (!tuple_delta_t::compile_point_set (*points_set, compiled_point_data))
|
||||
return false;
|
||||
|
||||
if (!point_data_map.set (points_set, compiled_data) ||
|
||||
if (!point_data_map.set (points_set, std::move (compiled_point_data)) ||
|
||||
!point_set_count_map.set (points_set, 1))
|
||||
return false;
|
||||
}
|
||||
@ -1414,31 +1516,33 @@ struct TupleVariationData
|
||||
}
|
||||
|
||||
/* find shared points set which saves most bytes */
|
||||
hb_bytes_t find_shared_points ()
|
||||
void find_shared_points ()
|
||||
{
|
||||
unsigned max_saved_bytes = 0;
|
||||
hb_bytes_t res{};
|
||||
|
||||
for (const auto& _ : point_data_map.iter ())
|
||||
for (const auto& _ : point_data_map.iter_ref ())
|
||||
{
|
||||
const hb_vector_t<bool>* points_set = _.first;
|
||||
unsigned data_length = _.second.length;
|
||||
if (!data_length) continue;
|
||||
unsigned *count;
|
||||
if (unlikely (!point_set_count_map.has (points_set, &count) ||
|
||||
*count <= 1))
|
||||
return hb_bytes_t ();
|
||||
{
|
||||
shared_points_bytes = nullptr;
|
||||
return;
|
||||
}
|
||||
|
||||
unsigned saved_bytes = data_length * ((*count) -1);
|
||||
if (saved_bytes > max_saved_bytes)
|
||||
{
|
||||
max_saved_bytes = saved_bytes;
|
||||
res = _.second;
|
||||
shared_points_bytes = &(_.second);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
bool calc_inferred_deltas (contour_point_vector_t& contour_points)
|
||||
bool calc_inferred_deltas (const contour_point_vector_t& contour_points)
|
||||
{
|
||||
for (tuple_delta_t& var : tuple_vars)
|
||||
if (!var.calc_inferred_deltas (contour_points))
|
||||
@ -1447,10 +1551,21 @@ struct TupleVariationData
|
||||
return true;
|
||||
}
|
||||
|
||||
bool iup_optimize (const contour_point_vector_t& contour_points)
|
||||
{
|
||||
for (tuple_delta_t& var : tuple_vars)
|
||||
{
|
||||
if (!var.optimize (contour_points, is_composite))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public:
|
||||
bool instantiate (const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location,
|
||||
const hb_hashmap_t<hb_tag_t, TripleDistances>& axes_triple_distances,
|
||||
contour_point_vector_t* contour_points = nullptr)
|
||||
contour_point_vector_t* contour_points = nullptr,
|
||||
bool optimize = false)
|
||||
{
|
||||
if (!tuple_vars) return true;
|
||||
if (!change_tuple_variations_axis_limits (normalized_axes_location, axes_triple_distances))
|
||||
@ -1460,7 +1575,14 @@ struct TupleVariationData
|
||||
if (!calc_inferred_deltas (*contour_points))
|
||||
return false;
|
||||
|
||||
merge_tuple_variations ();
|
||||
/* if iup delta opt is on, contour_points can't be null */
|
||||
if (optimize && !contour_points)
|
||||
return false;
|
||||
|
||||
if (!merge_tuple_variations (optimize ? contour_points : nullptr))
|
||||
return false;
|
||||
|
||||
if (optimize && !iup_optimize (*contour_points)) return false;
|
||||
return !tuple_vars.in_error ();
|
||||
}
|
||||
|
||||
@ -1475,21 +1597,27 @@ struct TupleVariationData
|
||||
|
||||
if (use_shared_points)
|
||||
{
|
||||
shared_points_bytes = find_shared_points ();
|
||||
compiled_byte_size += shared_points_bytes.length;
|
||||
find_shared_points ();
|
||||
if (shared_points_bytes)
|
||||
compiled_byte_size += shared_points_bytes->length;
|
||||
}
|
||||
// compile delta and tuple var header for each tuple variation
|
||||
for (auto& tuple: tuple_vars)
|
||||
{
|
||||
const hb_vector_t<bool>* points_set = &(tuple.indices);
|
||||
hb_bytes_t *points_data;
|
||||
hb_vector_t<char> *points_data;
|
||||
if (unlikely (!point_data_map.has (points_set, &points_data)))
|
||||
return false;
|
||||
|
||||
/* when iup optimization is enabled, num of referenced points could be 0
|
||||
* and thus the compiled points bytes is empty, we should skip compiling
|
||||
* this tuple */
|
||||
if (!points_data->length)
|
||||
continue;
|
||||
if (!tuple.compile_deltas ())
|
||||
return false;
|
||||
|
||||
unsigned points_data_length = (*points_data != shared_points_bytes) ? points_data->length : 0;
|
||||
unsigned points_data_length = (points_data != shared_points_bytes) ? points_data->length : 0;
|
||||
if (!tuple.compile_tuple_var_header (axes_index_map, points_data_length, axes_old_index_tag_map,
|
||||
shared_tuples_idx_map))
|
||||
return false;
|
||||
@ -1513,18 +1641,24 @@ struct TupleVariationData
|
||||
bool serialize_var_data (hb_serialize_context_t *c, bool is_gvar) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
if (is_gvar)
|
||||
shared_points_bytes.copy (c);
|
||||
if (is_gvar && shared_points_bytes)
|
||||
{
|
||||
hb_bytes_t s (shared_points_bytes->arrayZ, shared_points_bytes->length);
|
||||
s.copy (c);
|
||||
}
|
||||
|
||||
for (const auto& tuple: tuple_vars)
|
||||
{
|
||||
const hb_vector_t<bool>* points_set = &(tuple.indices);
|
||||
hb_bytes_t *point_data;
|
||||
hb_vector_t<char> *point_data;
|
||||
if (!point_data_map.has (points_set, &point_data))
|
||||
return_trace (false);
|
||||
|
||||
if (!is_gvar || *point_data != shared_points_bytes)
|
||||
point_data->copy (c);
|
||||
if (!is_gvar || point_data != shared_points_bytes)
|
||||
{
|
||||
hb_bytes_t s (point_data->arrayZ, point_data->length);
|
||||
s.copy (c);
|
||||
}
|
||||
|
||||
tuple.compiled_deltas.as_array ().copy (c);
|
||||
if (c->in_error ()) return_trace (false);
|
||||
@ -1711,13 +1845,15 @@ struct TupleVariationData
|
||||
const hb_map_t *axes_old_index_tag_map,
|
||||
const hb_vector_t<unsigned> &shared_indices,
|
||||
const hb_array_t<const F2DOT14> shared_tuples,
|
||||
tuple_variations_t& tuple_variations /* OUT */) const
|
||||
tuple_variations_t& tuple_variations, /* OUT */
|
||||
bool is_composite_glyph = false) const
|
||||
{
|
||||
return tuple_variations.create_from_tuple_var_data (iterator, tupleVarCount,
|
||||
point_count, is_gvar,
|
||||
axes_old_index_tag_map,
|
||||
shared_indices,
|
||||
shared_tuples);
|
||||
shared_tuples,
|
||||
is_composite_glyph);
|
||||
}
|
||||
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
@ -1831,7 +1967,7 @@ struct item_variations_t
|
||||
const hb_map_t& get_varidx_map () const
|
||||
{ return varidx_map; }
|
||||
|
||||
bool instantiate (const VariationStore& varStore,
|
||||
bool instantiate (const ItemVariationStore& varStore,
|
||||
const hb_subset_plan_t *plan,
|
||||
bool optimize=true,
|
||||
bool use_no_variation_idx=true,
|
||||
@ -1845,7 +1981,7 @@ struct item_variations_t
|
||||
}
|
||||
|
||||
/* keep below APIs public only for unit test: test-item-varstore */
|
||||
bool create_from_item_varstore (const VariationStore& varStore,
|
||||
bool create_from_item_varstore (const ItemVariationStore& varStore,
|
||||
const hb_map_t& axes_old_index_tag_map,
|
||||
const hb_array_t <const hb_inc_bimap_t> inner_maps = hb_array_t<const hb_inc_bimap_t> ())
|
||||
{
|
||||
|
@ -101,10 +101,15 @@ struct glyph_variations_t
|
||||
continue;
|
||||
}
|
||||
|
||||
bool is_composite_glyph = false;
|
||||
#ifdef HB_EXPERIMENTAL_API
|
||||
is_composite_glyph = plan->composite_new_gids.has (new_gid);
|
||||
#endif
|
||||
if (!p->decompile_tuple_variations (all_contour_points->length, true /* is_gvar */,
|
||||
iterator, &(plan->axes_old_index_tag_map),
|
||||
shared_indices, shared_tuples,
|
||||
tuple_vars /* OUT */))
|
||||
tuple_vars, /* OUT */
|
||||
is_composite_glyph))
|
||||
return false;
|
||||
glyph_variations.push (std::move (tuple_vars));
|
||||
}
|
||||
@ -114,13 +119,17 @@ struct glyph_variations_t
|
||||
bool instantiate (const hb_subset_plan_t *plan)
|
||||
{
|
||||
unsigned count = plan->new_to_old_gid_list.length;
|
||||
bool iup_optimize = false;
|
||||
#ifdef HB_EXPERIMENTAL_API
|
||||
iup_optimize = plan->flags & HB_SUBSET_FLAGS_OPTIMIZE_IUP_DELTAS;
|
||||
#endif
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
{
|
||||
hb_codepoint_t new_gid = plan->new_to_old_gid_list[i].first;
|
||||
contour_point_vector_t *all_points;
|
||||
if (!plan->new_gid_contour_points_map.has (new_gid, &all_points))
|
||||
return false;
|
||||
if (!glyph_variations[i].instantiate (plan->axes_location, plan->axes_triple_distances, all_points))
|
||||
if (!glyph_variations[i].instantiate (plan->axes_location, plan->axes_triple_distances, all_points, iup_optimize))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
@ -340,7 +349,8 @@ struct gvar
|
||||
const glyph_variations_t& glyph_vars,
|
||||
Iterator it,
|
||||
unsigned axis_count,
|
||||
unsigned num_glyphs) const
|
||||
unsigned num_glyphs,
|
||||
bool force_long_offsets) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
gvar *out = c->allocate_min<gvar> ();
|
||||
@ -352,7 +362,7 @@ struct gvar
|
||||
out->glyphCountX = hb_min (0xFFFFu, num_glyphs);
|
||||
|
||||
unsigned glyph_var_data_size = glyph_vars.compiled_byte_size ();
|
||||
bool long_offset = glyph_var_data_size & ~0xFFFFu;
|
||||
bool long_offset = glyph_var_data_size & ~0xFFFFu || force_long_offsets;
|
||||
out->flags = long_offset ? 1 : 0;
|
||||
|
||||
HBUINT8 *glyph_var_data_offsets = c->allocate_size<HBUINT8> ((long_offset ? 4 : 2) * (num_glyphs + 1), false);
|
||||
@ -393,7 +403,12 @@ struct gvar
|
||||
unsigned axis_count = c->plan->axes_index_map.get_population ();
|
||||
unsigned num_glyphs = c->plan->num_output_glyphs ();
|
||||
auto it = hb_iter (c->plan->new_to_old_gid_list);
|
||||
return_trace (serialize (c->serializer, glyph_vars, it, axis_count, num_glyphs));
|
||||
|
||||
bool force_long_offsets = false;
|
||||
#ifdef HB_EXPERIMENTAL_API
|
||||
force_long_offsets = c->plan->flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS;
|
||||
#endif
|
||||
return_trace (serialize (c->serializer, glyph_vars, it, axis_count, num_glyphs, force_long_offsets));
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
@ -429,7 +444,7 @@ struct gvar
|
||||
}
|
||||
|
||||
bool long_offset = (subset_data_size & ~0xFFFFu);
|
||||
#ifdef HB_EXPERIMENTAL_API
|
||||
#ifdef HB_EXPERIMENTAL_API
|
||||
long_offset = long_offset || (c->plan->flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS);
|
||||
#endif
|
||||
out->flags = long_offset ? 1 : 0;
|
||||
|
@ -188,7 +188,7 @@ struct hvarvvar_subset_plan_t
|
||||
~hvarvvar_subset_plan_t() { fini (); }
|
||||
|
||||
void init (const hb_array_t<const DeltaSetIndexMap *> &index_maps,
|
||||
const VariationStore &_var_store,
|
||||
const ItemVariationStore &_var_store,
|
||||
const hb_subset_plan_t *plan)
|
||||
{
|
||||
index_map_plans.resize (index_maps.length);
|
||||
@ -263,7 +263,7 @@ struct hvarvvar_subset_plan_t
|
||||
hb_inc_bimap_t outer_map;
|
||||
hb_vector_t<hb_inc_bimap_t> inner_maps;
|
||||
hb_vector_t<index_map_subset_plan_t> index_map_plans;
|
||||
const VariationStore *var_store;
|
||||
const ItemVariationStore *var_store;
|
||||
|
||||
protected:
|
||||
hb_vector_t<hb_set_t *> inner_sets;
|
||||
@ -296,7 +296,7 @@ struct HVARVVAR
|
||||
rsbMap.sanitize (c, this));
|
||||
}
|
||||
|
||||
const VariationStore& get_var_store () const
|
||||
const ItemVariationStore& get_var_store () const
|
||||
{ return this+varStore; }
|
||||
|
||||
void listup_index_maps (hb_vector_t<const DeltaSetIndexMap *> &index_maps) const
|
||||
@ -384,7 +384,7 @@ struct HVARVVAR
|
||||
|
||||
float get_advance_delta_unscaled (hb_codepoint_t glyph,
|
||||
const int *coords, unsigned int coord_count,
|
||||
VariationStore::cache_t *store_cache = nullptr) const
|
||||
ItemVariationStore::cache_t *store_cache = nullptr) const
|
||||
{
|
||||
uint32_t varidx = (this+advMap).map (glyph);
|
||||
return (this+varStore).get_delta (varidx,
|
||||
@ -405,7 +405,7 @@ struct HVARVVAR
|
||||
public:
|
||||
FixedVersion<>version; /* Version of the metrics variation table
|
||||
* initially set to 0x00010000u */
|
||||
Offset32To<VariationStore>
|
||||
Offset32To<ItemVariationStore>
|
||||
varStore; /* Offset to item variation store table. */
|
||||
Offset32To<DeltaSetIndexMap>
|
||||
advMap; /* Offset to advance var-idx mapping. */
|
||||
|
@ -56,7 +56,7 @@ struct VariationValueRecord
|
||||
|
||||
public:
|
||||
Tag valueTag; /* Four-byte tag identifying a font-wide measure. */
|
||||
VarIdx varIdx; /* Outer/inner index into VariationStore item. */
|
||||
VarIdx varIdx; /* Outer/inner index into ItemVariationStore item. */
|
||||
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (8);
|
||||
@ -106,7 +106,7 @@ struct MVAR
|
||||
out->valueRecordCount = valueRecordCount;
|
||||
|
||||
item_variations_t item_vars;
|
||||
const VariationStore& src_var_store = this+varStore;
|
||||
const ItemVariationStore& src_var_store = this+varStore;
|
||||
|
||||
if (!item_vars.instantiate (src_var_store, c->plan))
|
||||
return_trace (false);
|
||||
@ -159,7 +159,7 @@ protected:
|
||||
HBUINT16 valueRecordSize;/* The size in bytes of each value record —
|
||||
* must be greater than zero. */
|
||||
HBUINT16 valueRecordCount;/* The number of value records — may be zero. */
|
||||
Offset16To<VariationStore>
|
||||
Offset16To<ItemVariationStore>
|
||||
varStore; /* Offset to item variation store table. */
|
||||
UnsizedArrayOf<HBUINT8>
|
||||
valuesZ; /* Array of value records. The records must be
|
||||
|
@ -163,7 +163,7 @@ struct hb_priority_queue_t
|
||||
goto repeat;
|
||||
}
|
||||
|
||||
void swap (unsigned a, unsigned b)
|
||||
void swap (unsigned a, unsigned b) noexcept
|
||||
{
|
||||
assert (a < heap.length);
|
||||
assert (b < heap.length);
|
||||
|
@ -238,6 +238,54 @@ bool _try_isolating_subgraphs (const hb_vector_t<graph::overflow_record_t>& over
|
||||
return true;
|
||||
}
|
||||
|
||||
static inline
|
||||
bool _resolve_shared_overflow(const hb_vector_t<graph::overflow_record_t>& overflows,
|
||||
int overflow_index,
|
||||
graph_t& sorted_graph)
|
||||
{
|
||||
const graph::overflow_record_t& r = overflows[overflow_index];
|
||||
|
||||
// Find all of the parents in overflowing links that link to this
|
||||
// same child node. We will then try duplicating the child node and
|
||||
// re-assigning all of these parents to the duplicate.
|
||||
hb_set_t parents;
|
||||
parents.add(r.parent);
|
||||
for (int i = overflow_index - 1; i >= 0; i--) {
|
||||
const graph::overflow_record_t& r2 = overflows[i];
|
||||
if (r2.child == r.child) {
|
||||
parents.add(r2.parent);
|
||||
}
|
||||
}
|
||||
|
||||
unsigned result = sorted_graph.duplicate(&parents, r.child);
|
||||
if (result == (unsigned) -1 && parents.get_population() > 2) {
|
||||
// All links to the child are overflowing, so we can't include all
|
||||
// in the duplication. Remove one parent from the duplication.
|
||||
// Remove the lowest index parent, which will be the closest to the child.
|
||||
parents.del(parents.get_min());
|
||||
result = sorted_graph.duplicate(&parents, r.child);
|
||||
}
|
||||
|
||||
if (result == (unsigned) -1) return result;
|
||||
|
||||
if (parents.get_population() > 1) {
|
||||
// If the duplicated node has more than one parent pre-emptively raise it's priority to the maximum.
|
||||
// This will place it close to the parents. Node's with only one parent, don't need this as normal overflow
|
||||
// resolution will raise priority if needed.
|
||||
//
|
||||
// Reasoning: most of the parents to this child are likely at the same layer in the graph. Duplicating
|
||||
// the child will theoretically allow it to be placed closer to it's parents. However, due to the shortest
|
||||
// distance sort by default it's placement will remain in the same layer, thus it will remain in roughly the
|
||||
// same position (and distance from parents) as the original child node. The overflow resolution will attempt
|
||||
// to move nodes closer, but only for non-shared nodes. Since this node is shared, it will simply be given
|
||||
// further duplication which defeats the attempt to duplicate with multiple parents. To fix this we
|
||||
// pre-emptively raise priority now which allows the duplicated node to pack into the same layer as it's parents.
|
||||
sorted_graph.vertices_[result].give_max_priority();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
static inline
|
||||
bool _process_overflows (const hb_vector_t<graph::overflow_record_t>& overflows,
|
||||
hb_set_t& priority_bumped_parents,
|
||||
@ -254,7 +302,7 @@ bool _process_overflows (const hb_vector_t<graph::overflow_record_t>& overflows,
|
||||
{
|
||||
// The child object is shared, we may be able to eliminate the overflow
|
||||
// by duplicating it.
|
||||
if (sorted_graph.duplicate (r.parent, r.child) == (unsigned) -1) continue;
|
||||
if (!_resolve_shared_overflow(overflows, i, sorted_graph)) continue;
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -388,7 +436,7 @@ template<typename T>
|
||||
inline hb_blob_t*
|
||||
hb_resolve_overflows (const T& packed,
|
||||
hb_tag_t table_tag,
|
||||
unsigned max_rounds = 20,
|
||||
unsigned max_rounds = 32,
|
||||
bool recalculate_extensions = false) {
|
||||
graph_t sorted_graph (packed);
|
||||
if (sorted_graph.in_error ())
|
||||
|
@ -91,7 +91,7 @@ struct hb_serialize_context_t
|
||||
}
|
||||
#endif
|
||||
|
||||
friend void swap (object_t& a, object_t& b)
|
||||
friend void swap (object_t& a, object_t& b) noexcept
|
||||
{
|
||||
hb_swap (a.head, b.head);
|
||||
hb_swap (a.tail, b.tail);
|
||||
@ -156,9 +156,9 @@ struct hb_serialize_context_t
|
||||
object_t *next;
|
||||
|
||||
auto all_links () const HB_AUTO_RETURN
|
||||
(( hb_concat (this->real_links, this->virtual_links) ));
|
||||
(( hb_concat (real_links, virtual_links) ));
|
||||
auto all_links_writer () HB_AUTO_RETURN
|
||||
(( hb_concat (this->real_links.writer (), this->virtual_links.writer ()) ));
|
||||
(( hb_concat (real_links.writer (), virtual_links.writer ()) ));
|
||||
};
|
||||
|
||||
struct snapshot_t
|
||||
|
@ -44,10 +44,10 @@ struct hb_sparseset_t
|
||||
~hb_sparseset_t () { fini (); }
|
||||
|
||||
hb_sparseset_t (const hb_sparseset_t& other) : hb_sparseset_t () { set (other); }
|
||||
hb_sparseset_t (hb_sparseset_t&& other) : hb_sparseset_t () { s = std::move (other.s); }
|
||||
hb_sparseset_t (hb_sparseset_t&& other) noexcept : hb_sparseset_t () { s = std::move (other.s); }
|
||||
hb_sparseset_t& operator = (const hb_sparseset_t& other) { set (other); return *this; }
|
||||
hb_sparseset_t& operator = (hb_sparseset_t&& other) { s = std::move (other.s); return *this; }
|
||||
friend void swap (hb_sparseset_t& a, hb_sparseset_t& b) { hb_swap (a.s, b.s); }
|
||||
hb_sparseset_t& operator = (hb_sparseset_t&& other) noexcept { s = std::move (other.s); return *this; }
|
||||
friend void swap (hb_sparseset_t& a, hb_sparseset_t& b) noexcept { hb_swap (a.s, b.s); }
|
||||
|
||||
hb_sparseset_t (std::initializer_list<hb_codepoint_t> lst) : hb_sparseset_t ()
|
||||
{
|
||||
@ -166,7 +166,7 @@ struct hb_set_t : hb_sparseset_t<hb_bit_set_invertible_t>
|
||||
~hb_set_t () = default;
|
||||
hb_set_t () : sparseset () {};
|
||||
hb_set_t (const hb_set_t &o) : sparseset ((sparseset &) o) {};
|
||||
hb_set_t (hb_set_t&& o) : sparseset (std::move ((sparseset &) o)) {}
|
||||
hb_set_t (hb_set_t&& o) noexcept : sparseset (std::move ((sparseset &) o)) {}
|
||||
hb_set_t& operator = (const hb_set_t&) = default;
|
||||
hb_set_t& operator = (hb_set_t&&) = default;
|
||||
hb_set_t (std::initializer_list<hb_codepoint_t> lst) : sparseset (lst) {}
|
||||
|
@ -248,7 +248,7 @@ struct cff2_subr_subsetter_t : subr_subsetter_t<cff2_subr_subsetter_t, CFF2Subrs
|
||||
struct cff2_private_blend_encoder_param_t
|
||||
{
|
||||
cff2_private_blend_encoder_param_t (hb_serialize_context_t *c,
|
||||
const CFF2VariationStore *varStore,
|
||||
const CFF2ItemVariationStore *varStore,
|
||||
hb_array_t<int> normalized_coords) :
|
||||
c (c), varStore (varStore), normalized_coords (normalized_coords) {}
|
||||
|
||||
@ -284,7 +284,7 @@ struct cff2_private_blend_encoder_param_t
|
||||
unsigned ivs = 0;
|
||||
unsigned region_count = 0;
|
||||
hb_vector_t<float> scalars;
|
||||
const CFF2VariationStore *varStore = nullptr;
|
||||
const CFF2ItemVariationStore *varStore = nullptr;
|
||||
hb_array_t<int> normalized_coords;
|
||||
};
|
||||
|
||||
@ -378,7 +378,7 @@ struct cff2_private_dict_blend_opset_t : dict_opset_t
|
||||
struct cff2_private_dict_op_serializer_t : op_serializer_t
|
||||
{
|
||||
cff2_private_dict_op_serializer_t (bool desubroutinize_, bool drop_hints_, bool pinned_,
|
||||
const CFF::CFF2VariationStore* varStore_,
|
||||
const CFF::CFF2ItemVariationStore* varStore_,
|
||||
hb_array_t<int> normalized_coords_)
|
||||
: desubroutinize (desubroutinize_), drop_hints (drop_hints_), pinned (pinned_),
|
||||
varStore (varStore_), normalized_coords (normalized_coords_) {}
|
||||
@ -416,7 +416,7 @@ struct cff2_private_dict_op_serializer_t : op_serializer_t
|
||||
const bool desubroutinize;
|
||||
const bool drop_hints;
|
||||
const bool pinned;
|
||||
const CFF::CFF2VariationStore* varStore;
|
||||
const CFF::CFF2ItemVariationStore* varStore;
|
||||
hb_array_t<int> normalized_coords;
|
||||
};
|
||||
|
||||
@ -628,10 +628,10 @@ OT::cff2::accelerator_subset_t::serialize (hb_serialize_context_t *c,
|
||||
}
|
||||
|
||||
/* variation store */
|
||||
if (varStore != &Null (CFF2VariationStore) &&
|
||||
if (varStore != &Null (CFF2ItemVariationStore) &&
|
||||
!plan.pinned)
|
||||
{
|
||||
auto *dest = c->push<CFF2VariationStore> ();
|
||||
auto *dest = c->push<CFF2ItemVariationStore> ();
|
||||
if (unlikely (!dest->serialize (c, varStore)))
|
||||
{
|
||||
c->pop_discard ();
|
||||
|
@ -24,6 +24,7 @@
|
||||
* Google Author(s): Garret Rieger, Rod Sheeter, Behdad Esfahbod
|
||||
*/
|
||||
|
||||
#include "hb-subset-instancer-solver.hh"
|
||||
#include "hb-subset.hh"
|
||||
#include "hb-set.hh"
|
||||
#include "hb-utf.hh"
|
||||
@ -50,7 +51,6 @@ hb_subset_input_t::hb_subset_input_t ()
|
||||
HB_TAG ('k', 'e', 'r', 'n'),
|
||||
|
||||
// Copied from fontTools:
|
||||
HB_TAG ('B', 'A', 'S', 'E'),
|
||||
HB_TAG ('J', 'S', 'T', 'F'),
|
||||
HB_TAG ('D', 'S', 'I', 'G'),
|
||||
HB_TAG ('E', 'B', 'D', 'T'),
|
||||
@ -417,6 +417,46 @@ hb_subset_input_keep_everything (hb_subset_input_t *input)
|
||||
}
|
||||
|
||||
#ifndef HB_NO_VAR
|
||||
/**
|
||||
* hb_subset_input_pin_all_axes_to_default: (skip)
|
||||
* @input: a #hb_subset_input_t object.
|
||||
* @face: a #hb_face_t object.
|
||||
*
|
||||
* Pin all axes to default locations in the given subset input object.
|
||||
*
|
||||
* All axes in a font must be pinned. Additionally, `CFF2` table, if present,
|
||||
* will be de-subroutinized.
|
||||
*
|
||||
* Return value: `true` if success, `false` otherwise
|
||||
*
|
||||
* Since: 8.3.1
|
||||
**/
|
||||
HB_EXTERN hb_bool_t
|
||||
hb_subset_input_pin_all_axes_to_default (hb_subset_input_t *input,
|
||||
hb_face_t *face)
|
||||
{
|
||||
unsigned axis_count = hb_ot_var_get_axis_count (face);
|
||||
if (!axis_count) return false;
|
||||
|
||||
hb_ot_var_axis_info_t *axis_infos = (hb_ot_var_axis_info_t *) hb_calloc (axis_count, sizeof (hb_ot_var_axis_info_t));
|
||||
if (unlikely (!axis_infos)) return false;
|
||||
|
||||
(void) hb_ot_var_get_axis_infos (face, 0, &axis_count, axis_infos);
|
||||
|
||||
for (unsigned i = 0; i < axis_count; i++)
|
||||
{
|
||||
hb_tag_t axis_tag = axis_infos[i].tag;
|
||||
float default_val = axis_infos[i].default_value;
|
||||
if (!input->axes_location.set (axis_tag, Triple (default_val, default_val, default_val)))
|
||||
{
|
||||
hb_free (axis_infos);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
hb_free (axis_infos);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_subset_input_pin_axis_to_default: (skip)
|
||||
* @input: a #hb_subset_input_t object.
|
||||
@ -481,16 +521,13 @@ hb_subset_input_pin_axis_location (hb_subset_input_t *input,
|
||||
* @input: a #hb_subset_input_t object.
|
||||
* @face: a #hb_face_t object.
|
||||
* @axis_tag: Tag of the axis
|
||||
* @axis_min_value: Minimum value of the axis variation range to set
|
||||
* @axis_max_value: Maximum value of the axis variation range to set
|
||||
* @axis_def_value: Default value of the axis variation range to set, in case of
|
||||
* null, it'll be determined automatically
|
||||
* @axis_min_value: Minimum value of the axis variation range to set, if NaN the existing min will be used.
|
||||
* @axis_max_value: Maximum value of the axis variation range to set if NaN the existing max will be used.
|
||||
* @axis_def_value: Default value of the axis variation range to set, if NaN the existing default will be used.
|
||||
*
|
||||
* Restricting the range of variation on an axis in the given subset input object.
|
||||
* New min/default/max values will be clamped if they're not within the fvar axis range.
|
||||
* If the new default value is null:
|
||||
* If the fvar axis default value is within the new range, then new default
|
||||
* value is the same as original default value.
|
||||
*
|
||||
* If the fvar axis default value is not within the new range, the new default
|
||||
* value will be changed to the new min or max value, whichever is closer to the fvar
|
||||
* axis default.
|
||||
@ -509,21 +546,57 @@ hb_subset_input_set_axis_range (hb_subset_input_t *input,
|
||||
hb_tag_t axis_tag,
|
||||
float axis_min_value,
|
||||
float axis_max_value,
|
||||
float *axis_def_value /* IN, maybe NULL */)
|
||||
float axis_def_value)
|
||||
{
|
||||
if (axis_min_value > axis_max_value)
|
||||
return false;
|
||||
|
||||
hb_ot_var_axis_info_t axis_info;
|
||||
if (!hb_ot_var_find_axis_info (face, axis_tag, &axis_info))
|
||||
return false;
|
||||
|
||||
float new_min_val = hb_clamp(axis_min_value, axis_info.min_value, axis_info.max_value);
|
||||
float new_max_val = hb_clamp(axis_max_value, axis_info.min_value, axis_info.max_value);
|
||||
float new_default_val = axis_def_value ? *axis_def_value : axis_info.default_value;
|
||||
new_default_val = hb_clamp(new_default_val, new_min_val, new_max_val);
|
||||
float min = !std::isnan(axis_min_value) ? axis_min_value : axis_info.min_value;
|
||||
float max = !std::isnan(axis_max_value) ? axis_max_value : axis_info.max_value;
|
||||
float def = !std::isnan(axis_def_value) ? axis_def_value : axis_info.default_value;
|
||||
|
||||
if (min > max)
|
||||
return false;
|
||||
|
||||
float new_min_val = hb_clamp(min, axis_info.min_value, axis_info.max_value);
|
||||
float new_max_val = hb_clamp(max, axis_info.min_value, axis_info.max_value);
|
||||
float new_default_val = hb_clamp(def, new_min_val, new_max_val);
|
||||
return input->axes_location.set (axis_tag, Triple (new_min_val, new_default_val, new_max_val));
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_subset_input_get_axis_range: (skip)
|
||||
* @input: a #hb_subset_input_t object.
|
||||
* @axis_tag: Tag of the axis
|
||||
* @axis_min_value: Set to the previously configured minimum value of the axis variation range.
|
||||
* @axis_max_value: Set to the previously configured maximum value of the axis variation range.
|
||||
* @axis_def_value: Set to the previously configured default value of the axis variation range.
|
||||
*
|
||||
* Gets the axis range assigned by previous calls to hb_subset_input_set_axis_range.
|
||||
*
|
||||
* Return value: `true` if a range has been set for this axis tag, `false` otherwise.
|
||||
*
|
||||
* XSince: EXPERIMENTAL
|
||||
**/
|
||||
HB_EXTERN hb_bool_t
|
||||
hb_subset_input_get_axis_range (hb_subset_input_t *input,
|
||||
hb_tag_t axis_tag,
|
||||
float *axis_min_value,
|
||||
float *axis_max_value,
|
||||
float *axis_def_value)
|
||||
|
||||
{
|
||||
Triple* triple;
|
||||
if (!input->axes_location.has(axis_tag, &triple)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
*axis_min_value = triple->minimum;
|
||||
*axis_def_value = triple->middle;
|
||||
*axis_max_value = triple->maximum;
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
||||
|
532
gfx/harfbuzz/src/hb-subset-instancer-iup.cc
Normal file
532
gfx/harfbuzz/src/hb-subset-instancer-iup.cc
Normal file
@ -0,0 +1,532 @@
|
||||
/*
|
||||
* Copyright © 2024 Google, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*/
|
||||
|
||||
#include "hb-subset-instancer-iup.hh"
|
||||
|
||||
/* This file is a straight port of the following:
|
||||
*
|
||||
* https://github.com/fonttools/fonttools/blob/main/Lib/fontTools/varLib/iup.py
|
||||
*
|
||||
* Where that file returns optimzied deltas vector, we return optimized
|
||||
* referenced point indices.
|
||||
*/
|
||||
|
||||
constexpr static unsigned MAX_LOOKBACK = 8;
|
||||
|
||||
static void _iup_contour_bound_forced_set (const hb_array_t<const contour_point_t> contour_points,
|
||||
const hb_array_t<const int> x_deltas,
|
||||
const hb_array_t<const int> y_deltas,
|
||||
hb_set_t& forced_set, /* OUT */
|
||||
float tolerance = 0.f)
|
||||
{
|
||||
unsigned len = contour_points.length;
|
||||
unsigned next_i = 0;
|
||||
for (int i = len - 1; i >= 0; i--)
|
||||
{
|
||||
unsigned last_i = (len + i -1) % len;
|
||||
for (unsigned j = 0; j < 2; j++)
|
||||
{
|
||||
float cj, lcj, ncj;
|
||||
int dj, ldj, ndj;
|
||||
if (j == 0)
|
||||
{
|
||||
cj = contour_points.arrayZ[i].x;
|
||||
dj = x_deltas.arrayZ[i];
|
||||
lcj = contour_points.arrayZ[last_i].x;
|
||||
ldj = x_deltas.arrayZ[last_i];
|
||||
ncj = contour_points.arrayZ[next_i].x;
|
||||
ndj = x_deltas.arrayZ[next_i];
|
||||
}
|
||||
else
|
||||
{
|
||||
cj = contour_points.arrayZ[i].y;
|
||||
dj = y_deltas.arrayZ[i];
|
||||
lcj = contour_points.arrayZ[last_i].y;
|
||||
ldj = y_deltas.arrayZ[last_i];
|
||||
ncj = contour_points.arrayZ[next_i].y;
|
||||
ndj = y_deltas.arrayZ[next_i];
|
||||
}
|
||||
|
||||
float c1, c2;
|
||||
int d1, d2;
|
||||
if (lcj <= ncj)
|
||||
{
|
||||
c1 = lcj;
|
||||
c2 = ncj;
|
||||
d1 = ldj;
|
||||
d2 = ndj;
|
||||
}
|
||||
else
|
||||
{
|
||||
c1 = ncj;
|
||||
c2 = lcj;
|
||||
d1 = ndj;
|
||||
d2 = ldj;
|
||||
}
|
||||
|
||||
bool force = false;
|
||||
if (c1 == c2)
|
||||
{
|
||||
if (abs (d1 - d2) > tolerance && abs (dj) > tolerance)
|
||||
force = true;
|
||||
}
|
||||
else if (c1 <= cj && cj <= c2)
|
||||
{
|
||||
if (!(hb_min (d1, d2) - tolerance <= dj &&
|
||||
dj <= hb_max (d1, d2) + tolerance))
|
||||
force = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (d1 != d2)
|
||||
{
|
||||
if (cj < c1)
|
||||
{
|
||||
if (abs (dj) > tolerance &&
|
||||
abs (dj - d1) > tolerance &&
|
||||
((dj - tolerance < d1) != (d1 < d2)))
|
||||
force = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (abs (dj) > tolerance &&
|
||||
abs (dj - d2) > tolerance &&
|
||||
((d2 < dj + tolerance) != (d1 < d2)))
|
||||
force = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (force)
|
||||
{
|
||||
forced_set.add (i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
next_i = i;
|
||||
}
|
||||
}
|
||||
|
||||
template <typename T,
|
||||
hb_enable_if (hb_is_trivially_copyable (T))>
|
||||
static bool rotate_array (const hb_array_t<const T>& org_array,
|
||||
int k,
|
||||
hb_vector_t<T>& out)
|
||||
{
|
||||
unsigned n = org_array.length;
|
||||
if (!n) return true;
|
||||
if (unlikely (!out.resize (n, false)))
|
||||
return false;
|
||||
|
||||
unsigned item_size = hb_static_size (T);
|
||||
if (k < 0)
|
||||
k = n - (-k) % n;
|
||||
else
|
||||
k %= n;
|
||||
|
||||
hb_memcpy ((void *) out.arrayZ, (const void *) (org_array.arrayZ + n - k), k * item_size);
|
||||
hb_memcpy ((void *) (out.arrayZ + k), (const void *) org_array.arrayZ, (n - k) * item_size);
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool rotate_set (const hb_set_t& org_set,
|
||||
int k,
|
||||
unsigned n,
|
||||
hb_set_t& out)
|
||||
{
|
||||
if (!n) return false;
|
||||
k %= n;
|
||||
if (k < 0)
|
||||
k = n + k;
|
||||
|
||||
if (k == 0)
|
||||
{
|
||||
out.set (org_set);
|
||||
}
|
||||
else
|
||||
{
|
||||
for (auto v : org_set)
|
||||
out.add ((v + k) % n);
|
||||
}
|
||||
return !out.in_error ();
|
||||
}
|
||||
|
||||
/* Given two reference coordinates (start and end of contour_points array),
|
||||
* output interpolated deltas for points in between */
|
||||
static bool _iup_segment (const hb_array_t<const contour_point_t> contour_points,
|
||||
const hb_array_t<const int> x_deltas,
|
||||
const hb_array_t<const int> y_deltas,
|
||||
const contour_point_t& p1, const contour_point_t& p2,
|
||||
int p1_dx, int p2_dx,
|
||||
int p1_dy, int p2_dy,
|
||||
hb_vector_t<float>& interp_x_deltas, /* OUT */
|
||||
hb_vector_t<float>& interp_y_deltas /* OUT */)
|
||||
{
|
||||
unsigned n = contour_points.length;
|
||||
if (unlikely (!interp_x_deltas.resize (n, false) ||
|
||||
!interp_y_deltas.resize (n, false)))
|
||||
return false;
|
||||
|
||||
for (unsigned j = 0; j < 2; j++)
|
||||
{
|
||||
float x1, x2, d1, d2;
|
||||
float *out;
|
||||
if (j == 0)
|
||||
{
|
||||
x1 = p1.x;
|
||||
x2 = p2.x;
|
||||
d1 = p1_dx;
|
||||
d2 = p2_dx;
|
||||
out = interp_x_deltas.arrayZ;
|
||||
}
|
||||
else
|
||||
{
|
||||
x1 = p1.y;
|
||||
x2 = p2.y;
|
||||
d1 = p1_dy;
|
||||
d2 = p2_dy;
|
||||
out = interp_y_deltas.arrayZ;
|
||||
}
|
||||
|
||||
if (x1 == x2)
|
||||
{
|
||||
if (d1 == d2)
|
||||
{
|
||||
for (unsigned i = 0; i < n; i++)
|
||||
out[i] = d1;
|
||||
}
|
||||
else
|
||||
{
|
||||
for (unsigned i = 0; i < n; i++)
|
||||
out[i] = 0.f;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (x1 > x2)
|
||||
{
|
||||
hb_swap (x1, x2);
|
||||
hb_swap (d1, d2);
|
||||
}
|
||||
|
||||
float scale = (d2 - d1) / (x2 - x1);
|
||||
for (unsigned i = 0; i < n; i++)
|
||||
{
|
||||
float x = j == 0 ? contour_points.arrayZ[i].x : contour_points.arrayZ[i].y;
|
||||
float d;
|
||||
if (x <= x1)
|
||||
d = d1;
|
||||
else if (x >= x2)
|
||||
d = d2;
|
||||
else
|
||||
d = d1 + (x - x1) * scale;
|
||||
|
||||
out[i] = d;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool _can_iup_in_between (const hb_array_t<const contour_point_t> contour_points,
|
||||
const hb_array_t<const int> x_deltas,
|
||||
const hb_array_t<const int> y_deltas,
|
||||
const contour_point_t& p1, const contour_point_t& p2,
|
||||
int p1_dx, int p2_dx,
|
||||
int p1_dy, int p2_dy,
|
||||
float tolerance)
|
||||
{
|
||||
hb_vector_t<float> interp_x_deltas, interp_y_deltas;
|
||||
if (!_iup_segment (contour_points, x_deltas, y_deltas,
|
||||
p1, p2, p1_dx, p2_dx, p1_dy, p2_dy,
|
||||
interp_x_deltas, interp_y_deltas))
|
||||
return false;
|
||||
|
||||
unsigned num = contour_points.length;
|
||||
|
||||
for (unsigned i = 0; i < num; i++)
|
||||
{
|
||||
float dx = x_deltas.arrayZ[i] - interp_x_deltas.arrayZ[i];
|
||||
float dy = y_deltas.arrayZ[i] - interp_y_deltas.arrayZ[i];
|
||||
|
||||
if (sqrtf ((float)dx * dx + (float)dy * dy) > tolerance)
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool _iup_contour_optimize_dp (const contour_point_vector_t& contour_points,
|
||||
const hb_vector_t<int>& x_deltas,
|
||||
const hb_vector_t<int>& y_deltas,
|
||||
const hb_set_t& forced_set,
|
||||
float tolerance,
|
||||
unsigned lookback,
|
||||
hb_vector_t<unsigned>& costs, /* OUT */
|
||||
hb_vector_t<int>& chain /* OUT */)
|
||||
{
|
||||
unsigned n = contour_points.length;
|
||||
if (unlikely (!costs.resize (n, false) ||
|
||||
!chain.resize (n, false)))
|
||||
return false;
|
||||
|
||||
lookback = hb_min (lookback, MAX_LOOKBACK);
|
||||
|
||||
for (unsigned i = 0; i < n; i++)
|
||||
{
|
||||
unsigned best_cost = (i == 0 ? 1 : costs.arrayZ[i-1] + 1);
|
||||
|
||||
costs.arrayZ[i] = best_cost;
|
||||
chain.arrayZ[i] = (i == 0 ? -1 : i - 1);
|
||||
|
||||
if (i > 0 && forced_set.has (i - 1))
|
||||
continue;
|
||||
|
||||
int lookback_index = hb_max ((int) i - (int) lookback + 1, -1);
|
||||
for (int j = i - 2; j >= lookback_index; j--)
|
||||
{
|
||||
unsigned cost = j == -1 ? 1 : costs.arrayZ[j] + 1;
|
||||
/* num points between i and j */
|
||||
unsigned num_points = i - j - 1;
|
||||
unsigned p1 = (j == -1 ? n - 1 : j);
|
||||
if (cost < best_cost &&
|
||||
_can_iup_in_between (contour_points.as_array ().sub_array (j + 1, num_points),
|
||||
x_deltas.as_array ().sub_array (j + 1, num_points),
|
||||
y_deltas.as_array ().sub_array (j + 1, num_points),
|
||||
contour_points.arrayZ[p1], contour_points.arrayZ[i],
|
||||
x_deltas.arrayZ[p1], x_deltas.arrayZ[i],
|
||||
y_deltas.arrayZ[p1], y_deltas.arrayZ[i],
|
||||
tolerance))
|
||||
{
|
||||
best_cost = cost;
|
||||
costs.arrayZ[i] = best_cost;
|
||||
chain.arrayZ[i] = j;
|
||||
}
|
||||
|
||||
if (j > 0 && forced_set.has (j))
|
||||
break;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool _iup_contour_optimize (const hb_array_t<const contour_point_t> contour_points,
|
||||
const hb_array_t<const int> x_deltas,
|
||||
const hb_array_t<const int> y_deltas,
|
||||
hb_array_t<bool> opt_indices, /* OUT */
|
||||
float tolerance = 0.f)
|
||||
{
|
||||
unsigned n = contour_points.length;
|
||||
if (opt_indices.length != n ||
|
||||
x_deltas.length != n ||
|
||||
y_deltas.length != n)
|
||||
return false;
|
||||
|
||||
bool all_within_tolerance = true;
|
||||
for (unsigned i = 0; i < n; i++)
|
||||
{
|
||||
int dx = x_deltas.arrayZ[i];
|
||||
int dy = y_deltas.arrayZ[i];
|
||||
if (sqrtf ((float)dx * dx + (float)dy * dy) > tolerance)
|
||||
{
|
||||
all_within_tolerance = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/* If all are within tolerance distance, do nothing, opt_indices is
|
||||
* initilized to false */
|
||||
if (all_within_tolerance)
|
||||
return true;
|
||||
|
||||
/* If there's exactly one point, return it */
|
||||
if (n == 1)
|
||||
{
|
||||
opt_indices.arrayZ[0] = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
/* If all deltas are exactly the same, return just one (the first one) */
|
||||
bool all_deltas_are_equal = true;
|
||||
for (unsigned i = 1; i < n; i++)
|
||||
if (x_deltas.arrayZ[i] != x_deltas.arrayZ[0] ||
|
||||
y_deltas.arrayZ[i] != y_deltas.arrayZ[0])
|
||||
{
|
||||
all_deltas_are_equal = false;
|
||||
break;
|
||||
}
|
||||
|
||||
if (all_deltas_are_equal)
|
||||
{
|
||||
opt_indices.arrayZ[0] = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
/* else, solve the general problem using Dynamic Programming */
|
||||
hb_set_t forced_set;
|
||||
_iup_contour_bound_forced_set (contour_points, x_deltas, y_deltas, forced_set, tolerance);
|
||||
|
||||
if (!forced_set.is_empty ())
|
||||
{
|
||||
int k = n - 1 - forced_set.get_max ();
|
||||
if (k < 0)
|
||||
return false;
|
||||
|
||||
hb_vector_t<int> rot_x_deltas, rot_y_deltas;
|
||||
contour_point_vector_t rot_points;
|
||||
hb_set_t rot_forced_set;
|
||||
if (!rotate_array (contour_points, k, rot_points) ||
|
||||
!rotate_array (x_deltas, k, rot_x_deltas) ||
|
||||
!rotate_array (y_deltas, k, rot_y_deltas) ||
|
||||
!rotate_set (forced_set, k, n, rot_forced_set))
|
||||
return false;
|
||||
|
||||
hb_vector_t<unsigned> costs;
|
||||
hb_vector_t<int> chain;
|
||||
|
||||
if (!_iup_contour_optimize_dp (rot_points, rot_x_deltas, rot_y_deltas,
|
||||
rot_forced_set, tolerance, n,
|
||||
costs, chain))
|
||||
return false;
|
||||
|
||||
hb_set_t solution;
|
||||
int index = n - 1;
|
||||
while (index != -1)
|
||||
{
|
||||
solution.add (index);
|
||||
index = chain.arrayZ[index];
|
||||
}
|
||||
|
||||
if (solution.is_empty () ||
|
||||
forced_set.get_population () > solution.get_population ())
|
||||
return false;
|
||||
|
||||
for (unsigned i : solution)
|
||||
opt_indices.arrayZ[i] = true;
|
||||
|
||||
hb_vector_t<bool> rot_indices;
|
||||
const hb_array_t<const bool> opt_indices_array (opt_indices.arrayZ, opt_indices.length);
|
||||
rotate_array (opt_indices_array, -k, rot_indices);
|
||||
|
||||
for (unsigned i = 0; i < n; i++)
|
||||
opt_indices.arrayZ[i] = rot_indices.arrayZ[i];
|
||||
}
|
||||
else
|
||||
{
|
||||
hb_vector_t<int> repeat_x_deltas, repeat_y_deltas;
|
||||
contour_point_vector_t repeat_points;
|
||||
|
||||
if (unlikely (!repeat_x_deltas.resize (n * 2, false) ||
|
||||
!repeat_y_deltas.resize (n * 2, false) ||
|
||||
!repeat_points.resize (n * 2, false)))
|
||||
return false;
|
||||
|
||||
unsigned contour_point_size = hb_static_size (contour_point_t);
|
||||
for (unsigned i = 0; i < n; i++)
|
||||
{
|
||||
hb_memcpy ((void *) repeat_x_deltas.arrayZ, (const void *) x_deltas.arrayZ, n * sizeof (float));
|
||||
hb_memcpy ((void *) (repeat_x_deltas.arrayZ + n), (const void *) x_deltas.arrayZ, n * sizeof (float));
|
||||
|
||||
hb_memcpy ((void *) repeat_y_deltas.arrayZ, (const void *) y_deltas.arrayZ, n * sizeof (float));
|
||||
hb_memcpy ((void *) (repeat_y_deltas.arrayZ + n), (const void *) y_deltas.arrayZ, n * sizeof (float));
|
||||
|
||||
hb_memcpy ((void *) repeat_points.arrayZ, (const void *) contour_points.arrayZ, n * contour_point_size);
|
||||
hb_memcpy ((void *) (repeat_points.arrayZ + n), (const void *) contour_points.arrayZ, n * contour_point_size);
|
||||
}
|
||||
|
||||
hb_vector_t<unsigned> costs;
|
||||
hb_vector_t<int> chain;
|
||||
if (!_iup_contour_optimize_dp (repeat_points, repeat_x_deltas, repeat_y_deltas,
|
||||
forced_set, tolerance, n,
|
||||
costs, chain))
|
||||
return false;
|
||||
|
||||
unsigned best_cost = n + 1;
|
||||
int len = costs.length;
|
||||
hb_set_t best_sol;
|
||||
for (int start = n - 1; start < len; start++)
|
||||
{
|
||||
hb_set_t solution;
|
||||
int i = start;
|
||||
int lookback = start - (int) n;
|
||||
while (i > lookback)
|
||||
{
|
||||
solution.add (i % n);
|
||||
i = chain.arrayZ[i];
|
||||
}
|
||||
if (i == lookback)
|
||||
{
|
||||
unsigned cost_i = i < 0 ? 0 : costs.arrayZ[i];
|
||||
unsigned cost = costs.arrayZ[start] - cost_i;
|
||||
if (cost <= best_cost)
|
||||
{
|
||||
best_sol.set (solution);
|
||||
best_cost = cost;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (unsigned i = 0; i < n; i++)
|
||||
if (best_sol.has (i))
|
||||
opt_indices.arrayZ[i] = true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool iup_delta_optimize (const contour_point_vector_t& contour_points,
|
||||
const hb_vector_t<int>& x_deltas,
|
||||
const hb_vector_t<int>& y_deltas,
|
||||
hb_vector_t<bool>& opt_indices, /* OUT */
|
||||
float tolerance)
|
||||
{
|
||||
if (!opt_indices.resize (contour_points.length))
|
||||
return false;
|
||||
|
||||
hb_vector_t<unsigned> end_points;
|
||||
unsigned count = contour_points.length;
|
||||
if (unlikely (!end_points.alloc (count)))
|
||||
return false;
|
||||
|
||||
for (unsigned i = 0; i < count - 4; i++)
|
||||
if (contour_points.arrayZ[i].is_end_point)
|
||||
end_points.push (i);
|
||||
|
||||
/* phantom points */
|
||||
for (unsigned i = count - 4; i < count; i++)
|
||||
end_points.push (i);
|
||||
|
||||
if (end_points.in_error ()) return false;
|
||||
|
||||
unsigned start = 0;
|
||||
for (unsigned end : end_points)
|
||||
{
|
||||
unsigned len = end - start + 1;
|
||||
if (!_iup_contour_optimize (contour_points.as_array ().sub_array (start, len),
|
||||
x_deltas.as_array ().sub_array (start, len),
|
||||
y_deltas.as_array ().sub_array (start, len),
|
||||
opt_indices.as_array ().sub_array (start, len),
|
||||
tolerance))
|
||||
return false;
|
||||
start = end + 1;
|
||||
}
|
||||
return true;
|
||||
}
|
37
gfx/harfbuzz/src/hb-subset-instancer-iup.hh
Normal file
37
gfx/harfbuzz/src/hb-subset-instancer-iup.hh
Normal file
@ -0,0 +1,37 @@
|
||||
/*
|
||||
* Copyright © 2024 Google, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*/
|
||||
|
||||
#ifndef HB_SUBSET_INSTANCER_IUP_HH
|
||||
#define HB_SUBSET_INSTANCER_IUP_HH
|
||||
|
||||
#include "hb-subset-plan.hh"
|
||||
/* given contour points and deltas, optimize a set of referenced points within error
|
||||
* tolerance. Returns optimized referenced point indices */
|
||||
HB_INTERNAL bool iup_delta_optimize (const contour_point_vector_t& contour_points,
|
||||
const hb_vector_t<int>& x_deltas,
|
||||
const hb_vector_t<int>& y_deltas,
|
||||
hb_vector_t<bool>& opt_indices, /* OUT */
|
||||
float tolerance = 0.f);
|
||||
|
||||
#endif /* HB_SUBSET_INSTANCER_IUP_HH */
|
@ -256,7 +256,10 @@ _solve (Triple tent, Triple axisLimit, bool negative = false)
|
||||
*/
|
||||
float newUpper = peak + (1 - gain) * (upper - peak);
|
||||
assert (axisMax <= newUpper); // Because outGain > gain
|
||||
if (newUpper <= axisDef + (axisMax - axisDef) * 2)
|
||||
/* Disabled because ots doesn't like us:
|
||||
* https://github.com/fonttools/fonttools/issues/3350 */
|
||||
|
||||
if (false && (newUpper <= axisDef + (axisMax - axisDef) * 2))
|
||||
{
|
||||
upper = newUpper;
|
||||
if (!negative && axisDef + (axisMax - axisDef) * MAX_F2DOT14 < upper)
|
||||
|
@ -140,6 +140,15 @@ HB_SUBSET_PLAN_MEMBER (mutable hb_vector_t<unsigned>, bounds_height_vec)
|
||||
//map: new_gid -> contour points vector
|
||||
HB_SUBSET_PLAN_MEMBER (mutable hb_hashmap_t E(<hb_codepoint_t, contour_point_vector_t>), new_gid_contour_points_map)
|
||||
|
||||
//new gids set for composite glyphs
|
||||
HB_SUBSET_PLAN_MEMBER (hb_set_t, composite_new_gids)
|
||||
|
||||
//Old BASE item variation index -> (New varidx, 0) mapping
|
||||
HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, hb_pair_t E(<unsigned, int>)>), base_variation_idx_map)
|
||||
|
||||
//BASE table varstore retained varidx mapping
|
||||
HB_SUBSET_PLAN_MEMBER (hb_vector_t<hb_inc_bimap_t>, base_varstore_inner_maps)
|
||||
|
||||
#ifdef HB_EXPERIMENTAL_API
|
||||
// name table overrides map: hb_ot_name_record_ids_t-> name string new value or
|
||||
// None to indicate should remove
|
||||
|
@ -32,6 +32,7 @@
|
||||
|
||||
#include "hb-ot-cmap-table.hh"
|
||||
#include "hb-ot-glyf-table.hh"
|
||||
#include "hb-ot-layout-base-table.hh"
|
||||
#include "hb-ot-layout-gdef-table.hh"
|
||||
#include "hb-ot-layout-gpos-table.hh"
|
||||
#include "hb-ot-layout-gsub-table.hh"
|
||||
@ -431,6 +432,52 @@ _collect_layout_variation_indices (hb_subset_plan_t* plan)
|
||||
gdef.destroy ();
|
||||
gpos.destroy ();
|
||||
}
|
||||
|
||||
#ifndef HB_NO_BASE
|
||||
/* used by BASE table only, delta is always set to 0 in the output map */
|
||||
static inline void
|
||||
_remap_variation_indices (const hb_set_t& indices,
|
||||
unsigned subtable_count,
|
||||
hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>>& variation_idx_delta_map /* OUT */)
|
||||
{
|
||||
unsigned new_major = 0, new_minor = 0;
|
||||
unsigned last_major = (indices.get_min ()) >> 16;
|
||||
for (unsigned idx : indices)
|
||||
{
|
||||
uint16_t major = idx >> 16;
|
||||
if (major >= subtable_count) break;
|
||||
if (major != last_major)
|
||||
{
|
||||
new_minor = 0;
|
||||
++new_major;
|
||||
}
|
||||
|
||||
unsigned new_idx = (new_major << 16) + new_minor;
|
||||
variation_idx_delta_map.set (idx, hb_pair_t<unsigned, int> (new_idx, 0));
|
||||
++new_minor;
|
||||
last_major = major;
|
||||
}
|
||||
}
|
||||
|
||||
static inline void
|
||||
_collect_base_variation_indices (hb_subset_plan_t* plan)
|
||||
{
|
||||
hb_blob_ptr_t<OT::BASE> base = plan->source_table<OT::BASE> ();
|
||||
if (!base->has_var_store ())
|
||||
{
|
||||
base.destroy ();
|
||||
return;
|
||||
}
|
||||
|
||||
hb_set_t varidx_set;
|
||||
base->collect_variation_indices (plan, varidx_set);
|
||||
unsigned subtable_count = base->get_var_store ().get_sub_table_count ();
|
||||
base.destroy ();
|
||||
|
||||
_remap_variation_indices (varidx_set, subtable_count, plan->base_variation_idx_map);
|
||||
_generate_varstore_inner_maps (varidx_set, subtable_count, plan->base_varstore_inner_maps);
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
||||
static inline void
|
||||
@ -994,8 +1041,8 @@ _update_instance_metrics_map_from_cff2 (hb_subset_plan_t *plan)
|
||||
OT::cff2::accelerator_t cff2 (plan->source);
|
||||
if (!cff2.is_valid ()) return;
|
||||
|
||||
hb_font_t *font = nullptr;
|
||||
if (unlikely (!plan->check_success (font = _get_hb_font_with_variations (plan))))
|
||||
hb_font_t *font = _get_hb_font_with_variations (plan);
|
||||
if (unlikely (!plan->check_success (font != nullptr)))
|
||||
{
|
||||
hb_font_destroy (font);
|
||||
return;
|
||||
@ -1073,8 +1120,8 @@ _update_instance_metrics_map_from_cff2 (hb_subset_plan_t *plan)
|
||||
static bool
|
||||
_get_instance_glyphs_contour_points (hb_subset_plan_t *plan)
|
||||
{
|
||||
/* contour_points vector only needed for updating gvar table (infer delta)
|
||||
* during partial instancing */
|
||||
/* contour_points vector only needed for updating gvar table (infer delta and
|
||||
* iup delta optimization) during partial instancing */
|
||||
if (plan->user_axes_location.is_empty () || plan->all_axes_pinned)
|
||||
return true;
|
||||
|
||||
@ -1092,10 +1139,17 @@ _get_instance_glyphs_contour_points (hb_subset_plan_t *plan)
|
||||
}
|
||||
|
||||
hb_codepoint_t old_gid = _.second;
|
||||
if (unlikely (!glyf.glyph_for_gid (old_gid).get_all_points_without_var (plan->source, all_points)))
|
||||
auto glyph = glyf.glyph_for_gid (old_gid);
|
||||
if (unlikely (!glyph.get_all_points_without_var (plan->source, all_points)))
|
||||
return false;
|
||||
if (unlikely (!plan->new_gid_contour_points_map.set (new_gid, all_points)))
|
||||
return false;
|
||||
|
||||
#ifdef HB_EXPERIMENTAL_API
|
||||
/* composite new gids are only needed by iup delta optimization */
|
||||
if ((plan->flags & HB_SUBSET_FLAGS_OPTIMIZE_IUP_DELTAS) && glyph.is_composite ())
|
||||
plan->composite_new_gids.add (new_gid);
|
||||
#endif
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@ -1205,6 +1259,13 @@ hb_subset_plan_t::hb_subset_plan_t (hb_face_t *face,
|
||||
if (!drop_tables.has (HB_OT_TAG_GDEF))
|
||||
_remap_used_mark_sets (this, used_mark_sets_map);
|
||||
|
||||
#ifndef HB_NO_VAR
|
||||
#ifndef HB_NO_BASE
|
||||
if (!drop_tables.has (HB_OT_TAG_BASE))
|
||||
_collect_base_variation_indices (this);
|
||||
#endif
|
||||
#endif
|
||||
|
||||
if (unlikely (in_error ()))
|
||||
return;
|
||||
|
||||
|
@ -78,6 +78,13 @@ struct contour_point_t
|
||||
y = x * matrix[1] + y * matrix[3];
|
||||
x = x_;
|
||||
}
|
||||
|
||||
void add_delta (float delta_x, float delta_y)
|
||||
{
|
||||
x += delta_x;
|
||||
y += delta_y;
|
||||
}
|
||||
|
||||
HB_ALWAYS_INLINE
|
||||
void translate (const contour_point_t &p) { x += p.x; y += p.y; }
|
||||
|
||||
@ -99,6 +106,22 @@ struct contour_point_vector_t : hb_vector_t<contour_point_t>
|
||||
unsigned count = a.length;
|
||||
hb_memcpy (arrayZ, a.arrayZ, count * sizeof (arrayZ[0]));
|
||||
}
|
||||
|
||||
bool add_deltas (const hb_vector_t<float> deltas_x,
|
||||
const hb_vector_t<float> deltas_y,
|
||||
const hb_vector_t<bool> indices)
|
||||
{
|
||||
if (indices.length != deltas_x.length ||
|
||||
indices.length != deltas_y.length)
|
||||
return false;
|
||||
|
||||
for (unsigned i = 0; i < indices.length; i++)
|
||||
{
|
||||
if (!indices.arrayZ[i]) continue;
|
||||
arrayZ[i].add_delta (deltas_x.arrayZ[i], deltas_y.arrayZ[i]);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
namespace OT {
|
||||
@ -147,7 +170,7 @@ struct hb_subset_plan_t
|
||||
bool gsub_insert_catch_all_feature_variation_rec;
|
||||
bool gpos_insert_catch_all_feature_variation_rec;
|
||||
|
||||
// whether GDEF VarStore is retained
|
||||
// whether GDEF ItemVariationStore is retained
|
||||
mutable bool has_gdef_varstore;
|
||||
|
||||
#define HB_SUBSET_PLAN_MEMBER(Type, Name) Type Name;
|
||||
|
@ -48,6 +48,7 @@
|
||||
#include "hb-ot-cff2-table.hh"
|
||||
#include "hb-ot-vorg-table.hh"
|
||||
#include "hb-ot-name-table.hh"
|
||||
#include "hb-ot-layout-base-table.hh"
|
||||
#include "hb-ot-layout-gsub-table.hh"
|
||||
#include "hb-ot-layout-gpos-table.hh"
|
||||
#include "hb-ot-var-avar-table.hh"
|
||||
@ -503,6 +504,7 @@ _subset_table (hb_subset_plan_t *plan,
|
||||
case HB_OT_TAG_CBLC: return _subset<const OT::CBLC> (plan, buf);
|
||||
case HB_OT_TAG_CBDT: return true; /* skip CBDT, handled by CBLC */
|
||||
case HB_OT_TAG_MATH: return _subset<const OT::MATH> (plan, buf);
|
||||
case HB_OT_TAG_BASE: return _subset<const OT::BASE> (plan, buf);
|
||||
|
||||
#ifndef HB_NO_SUBSET_CFF
|
||||
case HB_OT_TAG_CFF1: return _subset<const OT::cff1> (plan, buf);
|
||||
@ -548,6 +550,7 @@ _subset_table (hb_subset_plan_t *plan,
|
||||
}
|
||||
#endif
|
||||
return _passthrough (plan, tag);
|
||||
|
||||
default:
|
||||
if (plan->flags & HB_SUBSET_FLAGS_PASSTHROUGH_UNRECOGNIZED)
|
||||
return _passthrough (plan, tag);
|
||||
|
@ -76,6 +76,8 @@ typedef struct hb_subset_plan_t hb_subset_plan_t;
|
||||
* @HB_SUBSET_FLAGS_IFTB_REQUIREMENTS: If set enforce requirements on the output subset
|
||||
* to allow it to be used with incremental font transfer IFTB patches. Primarily,
|
||||
* this forces all outline data to use long (32 bit) offsets. Since: EXPERIMENTAL
|
||||
* @HB_SUBSET_FLAGS_OPTIMIZE_IUP_DELTAS: If set perform IUP delta optimization on the
|
||||
* remaining gvar table's deltas. Since: EXPERIMENTAL
|
||||
*
|
||||
* List of boolean properties that can be configured on the subset input.
|
||||
*
|
||||
@ -95,6 +97,7 @@ typedef enum { /*< flags >*/
|
||||
HB_SUBSET_FLAGS_NO_LAYOUT_CLOSURE = 0x00000200u,
|
||||
#ifdef HB_EXPERIMENTAL_API
|
||||
HB_SUBSET_FLAGS_IFTB_REQUIREMENTS = 0x00000400u,
|
||||
HB_SUBSET_FLAGS_OPTIMIZE_IUP_DELTAS = 0x00000800u,
|
||||
#endif
|
||||
} hb_subset_flags_t;
|
||||
|
||||
@ -170,6 +173,10 @@ HB_EXTERN void
|
||||
hb_subset_input_set_flags (hb_subset_input_t *input,
|
||||
unsigned value);
|
||||
|
||||
HB_EXTERN hb_bool_t
|
||||
hb_subset_input_pin_all_axes_to_default (hb_subset_input_t *input,
|
||||
hb_face_t *face);
|
||||
|
||||
HB_EXTERN hb_bool_t
|
||||
hb_subset_input_pin_axis_to_default (hb_subset_input_t *input,
|
||||
hb_face_t *face,
|
||||
@ -182,13 +189,20 @@ hb_subset_input_pin_axis_location (hb_subset_input_t *input,
|
||||
float axis_value);
|
||||
|
||||
#ifdef HB_EXPERIMENTAL_API
|
||||
HB_EXTERN hb_bool_t
|
||||
hb_subset_input_get_axis_range (hb_subset_input_t *input,
|
||||
hb_tag_t axis_tag,
|
||||
float *axis_min_value,
|
||||
float *axis_max_value,
|
||||
float *axis_def_value);
|
||||
|
||||
HB_EXTERN hb_bool_t
|
||||
hb_subset_input_set_axis_range (hb_subset_input_t *input,
|
||||
hb_face_t *face,
|
||||
hb_tag_t axis_tag,
|
||||
float axis_min_value,
|
||||
float axis_max_value,
|
||||
float *axis_def_value);
|
||||
float axis_def_value);
|
||||
|
||||
HB_EXTERN hb_bool_t
|
||||
hb_subset_input_override_name_table (hb_subset_input_t *input,
|
||||
|
@ -78,7 +78,7 @@ struct hb_vector_t
|
||||
if (unlikely (in_error ())) return;
|
||||
copy_array (o);
|
||||
}
|
||||
hb_vector_t (hb_vector_t &&o)
|
||||
hb_vector_t (hb_vector_t &&o) noexcept
|
||||
{
|
||||
allocated = o.allocated;
|
||||
length = o.length;
|
||||
@ -122,7 +122,7 @@ struct hb_vector_t
|
||||
resize (0);
|
||||
}
|
||||
|
||||
friend void swap (hb_vector_t& a, hb_vector_t& b)
|
||||
friend void swap (hb_vector_t& a, hb_vector_t& b) noexcept
|
||||
{
|
||||
hb_swap (a.allocated, b.allocated);
|
||||
hb_swap (a.length, b.length);
|
||||
@ -139,7 +139,7 @@ struct hb_vector_t
|
||||
|
||||
return *this;
|
||||
}
|
||||
hb_vector_t& operator = (hb_vector_t &&o)
|
||||
hb_vector_t& operator = (hb_vector_t &&o) noexcept
|
||||
{
|
||||
hb_swap (*this, o);
|
||||
return *this;
|
||||
|
@ -53,14 +53,14 @@ HB_BEGIN_DECLS
|
||||
*
|
||||
* The micro component of the library version available at compile-time.
|
||||
*/
|
||||
#define HB_VERSION_MICRO 0
|
||||
#define HB_VERSION_MICRO 1
|
||||
|
||||
/**
|
||||
* HB_VERSION_STRING:
|
||||
*
|
||||
* A string literal containing the library version available at compile-time.
|
||||
*/
|
||||
#define HB_VERSION_STRING "8.3.0"
|
||||
#define HB_VERSION_STRING "8.3.1"
|
||||
|
||||
/**
|
||||
* HB_VERSION_ATLEAST:
|
||||
|
@ -64,6 +64,7 @@
|
||||
#pragma GCC diagnostic error "-Wbitwise-instead-of-logical"
|
||||
#pragma GCC diagnostic error "-Wcast-align"
|
||||
#pragma GCC diagnostic error "-Wcast-function-type"
|
||||
#pragma GCC diagnostic error "-Wcast-function-type-strict"
|
||||
#pragma GCC diagnostic error "-Wconstant-conversion"
|
||||
#pragma GCC diagnostic error "-Wcomma"
|
||||
#pragma GCC diagnostic error "-Wdelete-non-virtual-dtor"
|
||||
@ -177,6 +178,11 @@
|
||||
#define HB_EXTERN __declspec (dllexport) extern
|
||||
#endif
|
||||
|
||||
// https://github.com/harfbuzz/harfbuzz/pull/4619
|
||||
#ifndef __STDC_FORMAT_MACROS
|
||||
#define __STDC_FORMAT_MACROS 1
|
||||
#endif
|
||||
|
||||
#include "hb.h"
|
||||
#define HB_H_IN
|
||||
#include "hb-ot.h"
|
||||
@ -212,6 +218,12 @@
|
||||
#include <winapifamily.h>
|
||||
#endif
|
||||
|
||||
#ifndef PRId32
|
||||
# define PRId32 "d"
|
||||
# define PRIu32 "u"
|
||||
# define PRIx32 "x"
|
||||
#endif
|
||||
|
||||
#define HB_PASTE1(a,b) a##b
|
||||
#define HB_PASTE(a,b) HB_PASTE1(a,b)
|
||||
|
||||
|
@ -371,6 +371,8 @@ hb_subset_sources = files(
|
||||
'hb-subset-cff2.cc',
|
||||
'hb-subset-input.cc',
|
||||
'hb-subset-input.hh',
|
||||
'hb-subset-instancer-iup.hh',
|
||||
'hb-subset-instancer-iup.cc',
|
||||
'hb-subset-instancer-solver.hh',
|
||||
'hb-subset-instancer-solver.cc',
|
||||
'hb-subset-plan.cc',
|
||||
@ -680,7 +682,8 @@ if conf.get('HAVE_CAIRO', 0) == 1
|
||||
endif
|
||||
|
||||
if get_option('tests').enabled()
|
||||
# TODO: MSVC gives the following,
|
||||
# TODO: Microsoft LINK gives the following because extern, non dllexport
|
||||
# symbols can only be used when linking against a static library
|
||||
# error LNK2019: unresolved external symbol "unsigned __int64 const * const _hb_NullPool"
|
||||
if cpp.get_define('_MSC_FULL_VER') == ''
|
||||
noinst_programs = {
|
||||
@ -722,13 +725,13 @@ if get_option('tests').enabled()
|
||||
'test-repacker': ['test-repacker.cc', 'hb-static.cc', 'graph/gsubgpos-context.cc'],
|
||||
'test-instancer-solver': ['test-subset-instancer-solver.cc', 'hb-subset-instancer-solver.cc', 'hb-static.cc'],
|
||||
'test-priority-queue': ['test-priority-queue.cc', 'hb-static.cc'],
|
||||
'test-tuple-varstore': ['test-tuple-varstore.cc', 'hb-subset-instancer-solver.cc', 'hb-static.cc'],
|
||||
'test-item-varstore': ['test-item-varstore.cc', 'hb-subset-instancer-solver.cc', 'hb-static.cc'],
|
||||
'test-tuple-varstore': ['test-tuple-varstore.cc', 'hb-subset-instancer-solver.cc', 'hb-subset-instancer-iup.cc', 'hb-static.cc'],
|
||||
'test-item-varstore': ['test-item-varstore.cc', 'hb-subset-instancer-solver.cc', 'hb-subset-instancer-iup.cc', 'hb-static.cc'],
|
||||
'test-unicode-ranges': ['test-unicode-ranges.cc'],
|
||||
}
|
||||
foreach name, source : compiled_tests
|
||||
if cpp.get_argument_syntax() == 'msvc' and source.contains('hb-static.cc')
|
||||
# TODO: MSVC doesn't like tests having hb-static.cc, fix them
|
||||
if cpp.get_define('_MSC_FULL_VER') != '' and source.contains('hb-static.cc')
|
||||
# TODO: Microsoft compilers cannot link tests using hb-static.cc, fix them
|
||||
continue
|
||||
endif
|
||||
test(name, executable(name, source,
|
||||
|
@ -86,6 +86,7 @@ UNIFIED_SOURCES += [
|
||||
'hb-shaper.cc',
|
||||
'hb-static.cc',
|
||||
'hb-style.cc',
|
||||
'hb-subset-instancer-iup.cc',
|
||||
'hb-unicode.cc',
|
||||
'hb-wasm-api.cc',
|
||||
'hb-wasm-shape.cc',
|
||||
|
Loading…
Reference in New Issue
Block a user