aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorrobot-contrib <robot-contrib@yandex-team.com>2023-12-17 11:17:16 +0300
committerrobot-contrib <robot-contrib@yandex-team.com>2023-12-17 11:37:17 +0300
commit85fcbbd6d8436b62e46d5a4e2ed1a6d72b61dc62 (patch)
tree0db5875d7d5725c79befec0b06c37ea297ba001e
parente0fee3f62fb672ae80623ec56a78649a8acc1fdd (diff)
downloadydb-85fcbbd6d8436b62e46d5a4e2ed1a6d72b61dc62.tar.gz
Update contrib/python/fonttools to 4.46.0
-rw-r--r--contrib/python/fonttools/.dist-info/METADATA18
-rw-r--r--contrib/python/fonttools/fontTools/__init__.py2
-rw-r--r--contrib/python/fonttools/fontTools/misc/bezierTools.py16
-rw-r--r--contrib/python/fonttools/fontTools/varLib/__init__.py66
-rw-r--r--contrib/python/fonttools/fontTools/varLib/featureVars.py110
-rw-r--r--contrib/python/fonttools/fontTools/varLib/interpolatable.py1147
-rw-r--r--contrib/python/fonttools/fontTools/varLib/interpolatableHelpers.py383
-rw-r--r--contrib/python/fonttools/fontTools/varLib/interpolatablePlot.py849
-rw-r--r--contrib/python/fonttools/fontTools/varLib/interpolatableTestContourOrder.py74
-rw-r--r--contrib/python/fonttools/fontTools/varLib/interpolatableTestStartingPoint.py105
-rw-r--r--contrib/python/fonttools/ya.make5
11 files changed, 1938 insertions, 837 deletions
diff --git a/contrib/python/fonttools/.dist-info/METADATA b/contrib/python/fonttools/.dist-info/METADATA
index 9c1a26c22c..9026fc11f8 100644
--- a/contrib/python/fonttools/.dist-info/METADATA
+++ b/contrib/python/fonttools/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: fonttools
-Version: 4.45.1
+Version: 4.46.0
Summary: Tools to manipulate font files
Home-page: http://github.com/fonttools/fonttools
Author: Just van Rossum
@@ -366,6 +366,22 @@ Have fun!
Changelog
~~~~~~~~~
+4.46.0 (released 2023-12-02)
+----------------------------
+
+- [featureVars] Allow to register the same set of substitution rules to multiple features.
+ The ``addFeatureVariations`` function can now take a list of featureTags; similarly, the
+ lib key 'com.github.fonttools.varLib.featureVarsFeatureTag' can now take a
+ comma-separateed string of feature tags (e.g. "salt,ss01") instead of a single tag (#3360).
+- [featureVars] Don't overwrite GSUB FeatureVariations, but append new records to it
+ for features which are not already there. But raise ``VarLibError`` if the feature tag
+ already has feature variations associated with it (#3363).
+- [varLib] Added ``addGSUBFeatureVariations`` function to add GSUB Feature Variations
+ to an existing variable font from rules defined in a DesignSpace document (#3362).
+- [varLib.interpolatable] Various bugfixes and rendering improvements. In particular,
+ a new test for "underweight" glyphs. The new test reports quite a few false-positives
+ though. Please send feedback.
+
4.45.1 (released 2023-11-23)
----------------------------
diff --git a/contrib/python/fonttools/fontTools/__init__.py b/contrib/python/fonttools/fontTools/__init__.py
index 6e396b5084..dfe589402f 100644
--- a/contrib/python/fonttools/fontTools/__init__.py
+++ b/contrib/python/fonttools/fontTools/__init__.py
@@ -3,6 +3,6 @@ from fontTools.misc.loggingTools import configLogger
log = logging.getLogger(__name__)
-version = __version__ = "4.45.1"
+version = __version__ = "4.46.0"
__all__ = ["version", "log", "configLogger"]
diff --git a/contrib/python/fonttools/fontTools/misc/bezierTools.py b/contrib/python/fonttools/fontTools/misc/bezierTools.py
index 21ab0a5d0a..a1a707b098 100644
--- a/contrib/python/fonttools/fontTools/misc/bezierTools.py
+++ b/contrib/python/fonttools/fontTools/misc/bezierTools.py
@@ -1370,6 +1370,11 @@ def _curve_curve_intersections_t(
return unique_values
+def _is_linelike(segment):
+ maybeline = _alignment_transformation(segment).transformPoints(segment)
+ return all(math.isclose(p[1], 0.0) for p in maybeline)
+
+
def curveCurveIntersections(curve1, curve2):
"""Finds intersections between a curve and a curve.
@@ -1391,6 +1396,17 @@ def curveCurveIntersections(curve1, curve2):
>>> intersections[0].pt
(81.7831487395506, 109.88904552375288)
"""
+ if _is_linelike(curve1):
+ line1 = curve1[0], curve1[-1]
+ if _is_linelike(curve2):
+ line2 = curve2[0], curve2[-1]
+ return lineLineIntersections(*line1, *line2)
+ else:
+ return curveLineIntersections(curve2, line1)
+ elif _is_linelike(curve2):
+ line2 = curve2[0], curve2[-1]
+ return curveLineIntersections(curve1, line2)
+
intersection_ts = _curve_curve_intersections_t(curve1, curve2)
return [
Intersection(pt=segmentPointAtT(curve1, ts[0]), t1=ts[0], t2=ts[1])
diff --git a/contrib/python/fonttools/fontTools/varLib/__init__.py b/contrib/python/fonttools/fontTools/varLib/__init__.py
index b130d5b2a4..46834f6433 100644
--- a/contrib/python/fonttools/fontTools/varLib/__init__.py
+++ b/contrib/python/fonttools/fontTools/varLib/__init__.py
@@ -52,7 +52,8 @@ from .errors import VarLibError, VarLibValidationError
log = logging.getLogger("fontTools.varLib")
# This is a lib key for the designspace document. The value should be
-# an OpenType feature tag, to be used as the FeatureVariations feature.
+# a comma-separated list of OpenType feature tag(s), to be used as the
+# FeatureVariations feature.
# If present, the DesignSpace <rules processing="..."> flag is ignored.
FEAVAR_FEATURETAG_LIB_KEY = "com.github.fonttools.varLib.featureVarsFeatureTag"
@@ -781,7 +782,9 @@ def _merge_OTL(font, model, master_fonts, axisTags):
font["GPOS"].table.remap_device_varidxes(varidx_map)
-def _add_GSUB_feature_variations(font, axes, internal_axis_supports, rules, featureTag):
+def _add_GSUB_feature_variations(
+ font, axes, internal_axis_supports, rules, featureTags
+):
def normalize(name, value):
return models.normalizeLocation({name: value}, internal_axis_supports)[name]
@@ -812,7 +815,7 @@ def _add_GSUB_feature_variations(font, axes, internal_axis_supports, rules, feat
conditional_subs.append((region, subs))
- addFeatureVariations(font, conditional_subs, featureTag)
+ addFeatureVariations(font, conditional_subs, featureTags)
_DesignSpaceData = namedtuple(
@@ -860,7 +863,7 @@ def _add_COLR(font, model, master_fonts, axisTags, colr_layer_reuse=True):
colr.VarIndexMap = builder.buildDeltaSetIndexMap(varIdxes)
-def load_designspace(designspace):
+def load_designspace(designspace, log_enabled=True):
# TODO: remove this and always assume 'designspace' is a DesignSpaceDocument,
# never a file path, as that's already handled by caller
if hasattr(designspace, "sources"): # Assume a DesignspaceDocument
@@ -908,10 +911,11 @@ def load_designspace(designspace):
axis.labelNames["en"] = tostr(axis_name)
axes[axis_name] = axis
- log.info("Axes:\n%s", pformat([axis.asdict() for axis in axes.values()]))
+ if log_enabled:
+ log.info("Axes:\n%s", pformat([axis.asdict() for axis in axes.values()]))
axisMappings = ds.axisMappings
- if axisMappings:
+ if axisMappings and log_enabled:
log.info("Mappings:\n%s", pformat(axisMappings))
# Check all master and instance locations are valid and fill in defaults
@@ -941,20 +945,23 @@ def load_designspace(designspace):
# Normalize master locations
internal_master_locs = [o.getFullDesignLocation(ds) for o in masters]
- log.info("Internal master locations:\n%s", pformat(internal_master_locs))
+ if log_enabled:
+ log.info("Internal master locations:\n%s", pformat(internal_master_locs))
# TODO This mapping should ideally be moved closer to logic in _add_fvar/avar
internal_axis_supports = {}
for axis in axes.values():
triple = (axis.minimum, axis.default, axis.maximum)
internal_axis_supports[axis.name] = [axis.map_forward(v) for v in triple]
- log.info("Internal axis supports:\n%s", pformat(internal_axis_supports))
+ if log_enabled:
+ log.info("Internal axis supports:\n%s", pformat(internal_axis_supports))
normalized_master_locs = [
models.normalizeLocation(m, internal_axis_supports)
for m in internal_master_locs
]
- log.info("Normalized master locations:\n%s", pformat(normalized_master_locs))
+ if log_enabled:
+ log.info("Normalized master locations:\n%s", pformat(normalized_master_locs))
# Find base master
base_idx = None
@@ -969,7 +976,8 @@ def load_designspace(designspace):
raise VarLibValidationError(
"Base master not found; no master at default location?"
)
- log.info("Index of base master: %s", base_idx)
+ if log_enabled:
+ log.info("Index of base master: %s", base_idx)
return _DesignSpaceData(
axes,
@@ -1204,11 +1212,9 @@ def build(
if "cvar" not in exclude and "glyf" in vf:
_merge_TTHinting(vf, model, master_fonts)
if "GSUB" not in exclude and ds.rules:
- featureTag = ds.lib.get(
- FEAVAR_FEATURETAG_LIB_KEY, "rclt" if ds.rulesProcessingLast else "rvrn"
- )
+ featureTags = _feature_variations_tags(ds)
_add_GSUB_feature_variations(
- vf, ds.axes, ds.internal_axis_supports, ds.rules, featureTag
+ vf, ds.axes, ds.internal_axis_supports, ds.rules, featureTags
)
if "CFF2" not in exclude and ("CFF " in vf or "CFF2" in vf):
_add_CFF2(vf, model, master_fonts)
@@ -1299,6 +1305,38 @@ class MasterFinder(object):
return os.path.normpath(path)
+def _feature_variations_tags(ds):
+ raw_tags = ds.lib.get(
+ FEAVAR_FEATURETAG_LIB_KEY,
+ "rclt" if ds.rulesProcessingLast else "rvrn",
+ )
+ return sorted({t.strip() for t in raw_tags.split(",")})
+
+
+def addGSUBFeatureVariations(vf, designspace, featureTags=(), *, log_enabled=False):
+ """Add GSUB FeatureVariations table to variable font, based on DesignSpace rules.
+
+ Args:
+ vf: A TTFont object representing the variable font.
+ designspace: A DesignSpaceDocument object.
+ featureTags: Optional feature tag(s) to use for the FeatureVariations records.
+ If unset, the key 'com.github.fonttools.varLib.featureVarsFeatureTag' is
+ looked up in the DS <lib> and used; otherwise the default is 'rclt' if
+ the <rules processing="last"> attribute is set, else 'rvrn'.
+ See <https://fonttools.readthedocs.io/en/latest/designspaceLib/xml.html#rules-element>
+ log_enabled: If True, log info about DS axes and sources. Default is False, as
+ the same info may have already been logged as part of varLib.build.
+ """
+ ds = load_designspace(designspace, log_enabled=log_enabled)
+ if not ds.rules:
+ return
+ if not featureTags:
+ featureTags = _feature_variations_tags(ds)
+ _add_GSUB_feature_variations(
+ vf, ds.axes, ds.internal_axis_supports, ds.rules, featureTags
+ )
+
+
def main(args=None):
"""Build variable fonts from a designspace file and masters"""
from argparse import ArgumentParser
diff --git a/contrib/python/fonttools/fontTools/varLib/featureVars.py b/contrib/python/fonttools/fontTools/varLib/featureVars.py
index f0403d76e4..a6beb5c7d2 100644
--- a/contrib/python/fonttools/fontTools/varLib/featureVars.py
+++ b/contrib/python/fonttools/fontTools/varLib/featureVars.py
@@ -43,9 +43,18 @@ def addFeatureVariations(font, conditionalSubstitutions, featureTag="rvrn"):
# ... ]
# >>> addFeatureVariations(f, condSubst)
# >>> f.save(dstPath)
+
+ The `featureTag` parameter takes either a str or a iterable of str (the single str
+ is kept for backwards compatibility), and defines which feature(s) will be
+ associated with the feature variations.
+ Note, if this is "rvrn", then the substitution lookup will be inserted at the
+ beginning of the lookup list so that it is processed before others, otherwise
+ for any other feature tags it will be appended last.
"""
- processLast = featureTag != "rvrn"
+ # process first when "rvrn" is the only listed tag
+ featureTags = [featureTag] if isinstance(featureTag, str) else sorted(featureTag)
+ processLast = "rvrn" not in featureTags or len(featureTags) > 1
_checkSubstitutionGlyphsExist(
glyphNames=set(font.getGlyphOrder()),
@@ -60,6 +69,14 @@ def addFeatureVariations(font, conditionalSubstitutions, featureTag="rvrn"):
)
if "GSUB" not in font:
font["GSUB"] = buildGSUB()
+ else:
+ existingTags = _existingVariableFeatures(font["GSUB"].table).intersection(
+ featureTags
+ )
+ if existingTags:
+ raise VarLibError(
+ f"FeatureVariations already exist for feature tag(s): {existingTags}"
+ )
# setup lookups
lookupMap = buildSubstitutionLookups(
@@ -75,7 +92,17 @@ def addFeatureVariations(font, conditionalSubstitutions, featureTag="rvrn"):
(conditionSet, [lookupMap[s] for s in substitutions])
)
- addFeatureVariationsRaw(font, font["GSUB"].table, conditionsAndLookups, featureTag)
+ addFeatureVariationsRaw(font, font["GSUB"].table, conditionsAndLookups, featureTags)
+
+
+def _existingVariableFeatures(table):
+ existingFeatureVarsTags = set()
+ if hasattr(table, "FeatureVariations") and table.FeatureVariations is not None:
+ features = table.FeatureList.FeatureRecord
+ for fvr in table.FeatureVariations.FeatureVariationRecord:
+ for ftsr in fvr.FeatureTableSubstitution.SubstitutionRecord:
+ existingFeatureVarsTags.add(features[ftsr.FeatureIndex].FeatureTag)
+ return existingFeatureVarsTags
def _checkSubstitutionGlyphsExist(glyphNames, substitutions):
@@ -324,46 +351,64 @@ def addFeatureVariationsRaw(font, table, conditionalSubstitutions, featureTag="r
"""Low level implementation of addFeatureVariations that directly
models the possibilities of the FeatureVariations table."""
- processLast = featureTag != "rvrn"
+ featureTags = [featureTag] if isinstance(featureTag, str) else sorted(featureTag)
+ processLast = "rvrn" not in featureTags or len(featureTags) > 1
#
- # if there is no <featureTag> feature:
+ # if a <featureTag> feature is not present:
# make empty <featureTag> feature
# sort features, get <featureTag> feature index
# add <featureTag> feature to all scripts
+ # if a <featureTag> feature is present:
+ # reuse <featureTag> feature index
# make lookups
# add feature variations
#
if table.Version < 0x00010001:
table.Version = 0x00010001 # allow table.FeatureVariations
- table.FeatureVariations = None # delete any existing FeatureVariations
+ varFeatureIndices = set()
- varFeatureIndices = []
- for index, feature in enumerate(table.FeatureList.FeatureRecord):
- if feature.FeatureTag == featureTag:
- varFeatureIndices.append(index)
+ existingTags = {
+ feature.FeatureTag
+ for feature in table.FeatureList.FeatureRecord
+ if feature.FeatureTag in featureTags
+ }
- if not varFeatureIndices:
- varFeature = buildFeatureRecord(featureTag, [])
- table.FeatureList.FeatureRecord.append(varFeature)
+ newTags = set(featureTags) - existingTags
+ if newTags:
+ varFeatures = []
+ for featureTag in sorted(newTags):
+ varFeature = buildFeatureRecord(featureTag, [])
+ table.FeatureList.FeatureRecord.append(varFeature)
+ varFeatures.append(varFeature)
table.FeatureList.FeatureCount = len(table.FeatureList.FeatureRecord)
sortFeatureList(table)
- varFeatureIndex = table.FeatureList.FeatureRecord.index(varFeature)
-
- for scriptRecord in table.ScriptList.ScriptRecord:
- if scriptRecord.Script.DefaultLangSys is None:
- raise VarLibError(
- "Feature variations require that the script "
- f"'{scriptRecord.ScriptTag}' defines a default language system."
- )
- langSystems = [lsr.LangSys for lsr in scriptRecord.Script.LangSysRecord]
- for langSys in [scriptRecord.Script.DefaultLangSys] + langSystems:
- langSys.FeatureIndex.append(varFeatureIndex)
- langSys.FeatureCount = len(langSys.FeatureIndex)
- varFeatureIndices = [varFeatureIndex]
+ for varFeature in varFeatures:
+ varFeatureIndex = table.FeatureList.FeatureRecord.index(varFeature)
+
+ for scriptRecord in table.ScriptList.ScriptRecord:
+ if scriptRecord.Script.DefaultLangSys is None:
+ raise VarLibError(
+ "Feature variations require that the script "
+ f"'{scriptRecord.ScriptTag}' defines a default language system."
+ )
+ langSystems = [lsr.LangSys for lsr in scriptRecord.Script.LangSysRecord]
+ for langSys in [scriptRecord.Script.DefaultLangSys] + langSystems:
+ langSys.FeatureIndex.append(varFeatureIndex)
+ langSys.FeatureCount = len(langSys.FeatureIndex)
+ varFeatureIndices.add(varFeatureIndex)
+
+ if existingTags:
+ # indices may have changed if we inserted new features and sorted feature list
+ # so we must do this after the above
+ varFeatureIndices.update(
+ index
+ for index, feature in enumerate(table.FeatureList.FeatureRecord)
+ if feature.FeatureTag in existingTags
+ )
axisIndices = {
axis.axisTag: axisIndex for axisIndex, axis in enumerate(font["fvar"].axes)
@@ -380,7 +425,7 @@ def addFeatureVariationsRaw(font, table, conditionalSubstitutions, featureTag="r
ct = buildConditionTable(axisIndices[axisTag], minValue, maxValue)
conditionTable.append(ct)
records = []
- for varFeatureIndex in varFeatureIndices:
+ for varFeatureIndex in sorted(varFeatureIndices):
existingLookupIndices = table.FeatureList.FeatureRecord[
varFeatureIndex
].Feature.LookupListIndex
@@ -399,7 +444,18 @@ def addFeatureVariationsRaw(font, table, conditionalSubstitutions, featureTag="r
buildFeatureVariationRecord(conditionTable, records)
)
- table.FeatureVariations = buildFeatureVariations(featureVariationRecords)
+ if hasattr(table, "FeatureVariations") and table.FeatureVariations is not None:
+ if table.FeatureVariations.Version != 0x00010000:
+ raise VarLibError(
+ "Unsupported FeatureVariations table version: "
+ f"0x{table.FeatureVariations.Version:08x} (expected 0x00010000)."
+ )
+ table.FeatureVariations.FeatureVariationRecord.extend(featureVariationRecords)
+ table.FeatureVariations.FeatureVariationCount = len(
+ table.FeatureVariations.FeatureVariationRecord
+ )
+ else:
+ table.FeatureVariations = buildFeatureVariations(featureVariationRecords)
#
diff --git a/contrib/python/fonttools/fontTools/varLib/interpolatable.py b/contrib/python/fonttools/fontTools/varLib/interpolatable.py
index 9b72b4f502..f03e946207 100644
--- a/contrib/python/fonttools/fontTools/varLib/interpolatable.py
+++ b/contrib/python/fonttools/fontTools/varLib/interpolatable.py
@@ -6,300 +6,134 @@ Call as:
$ fonttools varLib.interpolatable font1 font2 ...
"""
-from fontTools.pens.basePen import AbstractPen, BasePen
-from fontTools.pens.pointPen import AbstractPointPen, SegmentToPointPen
-from fontTools.pens.recordingPen import RecordingPen
+from .interpolatableHelpers import *
+from .interpolatableTestContourOrder import test_contour_order
+from .interpolatableTestStartingPoint import test_starting_point
+from fontTools.pens.recordingPen import RecordingPen, DecomposingRecordingPen
+from fontTools.pens.transformPen import TransformPen
from fontTools.pens.statisticsPen import StatisticsPen, StatisticsControlPen
from fontTools.pens.momentsPen import OpenContourError
from fontTools.varLib.models import piecewiseLinearMap, normalizeLocation
from fontTools.misc.fixedTools import floatToFixedToStr
from fontTools.misc.transform import Transform
-from collections import defaultdict, deque
+from collections import defaultdict
+from types import SimpleNamespace
from functools import wraps
from pprint import pformat
-from math import sqrt, copysign, atan2, pi
-import itertools
+from math import sqrt, atan2, pi
import logging
log = logging.getLogger("fontTools.varLib.interpolatable")
-
-def _rot_list(l, k):
- """Rotate list by k items forward. Ie. item at position 0 will be
- at position k in returned list. Negative k is allowed."""
- return l[-k:] + l[:-k]
-
-
-class PerContourPen(BasePen):
- def __init__(self, Pen, glyphset=None):
- BasePen.__init__(self, glyphset)
- self._glyphset = glyphset
- self._Pen = Pen
- self._pen = None
- self.value = []
-
- def _moveTo(self, p0):
- self._newItem()
- self._pen.moveTo(p0)
-
- def _lineTo(self, p1):
- self._pen.lineTo(p1)
-
- def _qCurveToOne(self, p1, p2):
- self._pen.qCurveTo(p1, p2)
-
- def _curveToOne(self, p1, p2, p3):
- self._pen.curveTo(p1, p2, p3)
-
- def _closePath(self):
- self._pen.closePath()
- self._pen = None
-
- def _endPath(self):
- self._pen.endPath()
- self._pen = None
-
- def _newItem(self):
- self._pen = pen = self._Pen()
- self.value.append(pen)
-
-
-class PerContourOrComponentPen(PerContourPen):
- def addComponent(self, glyphName, transformation):
- self._newItem()
- self.value[-1].addComponent(glyphName, transformation)
-
-
-class SimpleRecordingPointPen(AbstractPointPen):
- def __init__(self):
- self.value = []
-
- def beginPath(self, identifier=None, **kwargs):
- pass
-
- def endPath(self) -> None:
- pass
-
- def addPoint(self, pt, segmentType=None):
- self.value.append((pt, False if segmentType is None else True))
-
-
-def _vdiff_hypot2(v0, v1):
- s = 0
- for x0, x1 in zip(v0, v1):
- d = x1 - x0
- s += d * d
- return s
-
-
-def _vdiff_hypot2_complex(v0, v1):
- s = 0
- for x0, x1 in zip(v0, v1):
- d = x1 - x0
- s += d.real * d.real + d.imag * d.imag
- # This does the same but seems to be slower:
- # s += (d * d.conjugate()).real
- return s
-
-
-def _hypot2_complex(d):
- return d.real * d.real + d.imag * d.imag
-
-
-def _matching_cost(G, matching):
- return sum(G[i][j] for i, j in enumerate(matching))
-
-
-def min_cost_perfect_bipartite_matching_scipy(G):
- n = len(G)
- rows, cols = linear_sum_assignment(G)
- assert (rows == list(range(n))).all()
- return list(cols), _matching_cost(G, cols)
-
-
-def min_cost_perfect_bipartite_matching_munkres(G):
- n = len(G)
- cols = [None] * n
- for row, col in Munkres().compute(G):
- cols[row] = col
- return cols, _matching_cost(G, cols)
-
-
-def min_cost_perfect_bipartite_matching_bruteforce(G):
- n = len(G)
-
- if n > 6:
- raise Exception("Install Python module 'munkres' or 'scipy >= 0.17.0'")
-
- # Otherwise just brute-force
- permutations = itertools.permutations(range(n))
- best = list(next(permutations))
- best_cost = _matching_cost(G, best)
- for p in permutations:
- cost = _matching_cost(G, p)
- if cost < best_cost:
- best, best_cost = list(p), cost
- return best, best_cost
-
-
-try:
- from scipy.optimize import linear_sum_assignment
-
- min_cost_perfect_bipartite_matching = min_cost_perfect_bipartite_matching_scipy
-except ImportError:
- try:
- from munkres import Munkres
-
- min_cost_perfect_bipartite_matching = (
- min_cost_perfect_bipartite_matching_munkres
- )
- except ImportError:
- min_cost_perfect_bipartite_matching = (
- min_cost_perfect_bipartite_matching_bruteforce
- )
-
-
-def _contour_vector_from_stats(stats):
- # Don't change the order of items here.
- # It's okay to add to the end, but otherwise, other
- # code depends on it. Search for "covariance".
- size = sqrt(abs(stats.area))
- return (
- copysign((size), stats.area),
- stats.meanX,
- stats.meanY,
- stats.stddevX * 2,
- stats.stddevY * 2,
- stats.correlation * size,
+DEFAULT_TOLERANCE = 0.95
+DEFAULT_KINKINESS = 0.5
+DEFAULT_KINKINESS_LENGTH = 0.002 # ratio of UPEM
+DEFAULT_UPEM = 1000
+
+
+class Glyph:
+ ITEMS = (
+ "recordings",
+ "recordingsNormalized",
+ "greenStats",
+ "controlStats",
+ "greenVectors",
+ "greenVectorsNormalized",
+ "controlVectors",
+ "nodeTypes",
+ "isomorphisms",
+ "points",
+ "openContours",
)
+ def __init__(self, glyphname, glyphset):
+ self.name = glyphname
+ for item in self.ITEMS:
+ setattr(self, item, [])
+ self._populate(glyphset)
-def _points_characteristic_bits(points):
- bits = 0
- for pt, b in reversed(points):
- bits = (bits << 1) | b
- return bits
-
-
-_NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR = 4
-
-
-def _points_complex_vector(points):
- vector = []
- if not points:
- return vector
- points = [complex(*pt) for pt, _ in points]
- n = len(points)
- assert _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR == 4
- points.extend(points[: _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR - 1])
- while len(points) < _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR:
- points.extend(points[: _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR - 1])
- for i in range(n):
- # The weights are magic numbers.
-
- # The point itself
- p0 = points[i]
- vector.append(p0)
-
- # The vector to the next point
- p1 = points[i + 1]
- d0 = p1 - p0
- vector.append(d0 * 3)
-
- # The turn vector
- p2 = points[i + 2]
- d1 = p2 - p1
- vector.append(d1 - d0)
-
- # The angle to the next point, as a cross product;
- # Square root of, to match dimentionality of distance.
- cross = d0.real * d1.imag - d0.imag * d1.real
- cross = copysign(sqrt(abs(cross)), cross)
- vector.append(cross * 4)
-
- return vector
-
-
-def _add_isomorphisms(points, isomorphisms, reverse):
- reference_bits = _points_characteristic_bits(points)
- n = len(points)
+ def _fill_in(self, ix):
+ for item in self.ITEMS:
+ if len(getattr(self, item)) == ix:
+ getattr(self, item).append(None)
- # if points[0][0] == points[-1][0]:
- # abort
+ def _populate(self, glyphset):
+ glyph = glyphset[self.name]
+ self.doesnt_exist = glyph is None
+ if self.doesnt_exist:
+ return
- if reverse:
- points = points[::-1]
- bits = _points_characteristic_bits(points)
- else:
- bits = reference_bits
-
- vector = _points_complex_vector(points)
-
- assert len(vector) % n == 0
- mult = len(vector) // n
- mask = (1 << n) - 1
-
- for i in range(n):
- b = ((bits << (n - i)) & mask) | (bits >> i)
- if b == reference_bits:
- isomorphisms.append(
- (_rot_list(vector, -i * mult), n - 1 - i if reverse else i, reverse)
- )
+ perContourPen = PerContourOrComponentPen(RecordingPen, glyphset=glyphset)
+ try:
+ glyph.draw(perContourPen, outputImpliedClosingLine=True)
+ except TypeError:
+ glyph.draw(perContourPen)
+ self.recordings = perContourPen.value
+ del perContourPen
+
+ for ix, contour in enumerate(self.recordings):
+ nodeTypes = [op for op, arg in contour.value]
+ self.nodeTypes.append(nodeTypes)
+
+ greenStats = StatisticsPen(glyphset=glyphset)
+ controlStats = StatisticsControlPen(glyphset=glyphset)
+ try:
+ contour.replay(greenStats)
+ contour.replay(controlStats)
+ self.openContours.append(False)
+ except OpenContourError as e:
+ self.openContours.append(True)
+ self._fill_in(ix)
+ continue
+ self.greenStats.append(greenStats)
+ self.controlStats.append(controlStats)
+ self.greenVectors.append(contour_vector_from_stats(greenStats))
+ self.controlVectors.append(contour_vector_from_stats(controlStats))
+ # Save a "normalized" version of the outlines
+ try:
+ rpen = DecomposingRecordingPen(glyphset)
+ tpen = TransformPen(
+ rpen, transform_from_stats(greenStats, inverse=True)
+ )
+ contour.replay(tpen)
+ self.recordingsNormalized.append(rpen)
+ except ZeroDivisionError:
+ self.recordingsNormalized.append(None)
+
+ greenStats = StatisticsPen(glyphset=glyphset)
+ rpen.replay(greenStats)
+ self.greenVectorsNormalized.append(contour_vector_from_stats(greenStats))
+
+ # Check starting point
+ if nodeTypes[0] == "addComponent":
+ self._fill_in(ix)
+ continue
-def _find_parents_and_order(glyphsets, locations):
- parents = [None] + list(range(len(glyphsets) - 1))
- order = list(range(len(glyphsets)))
- if locations:
- # Order base master first
- bases = (i for i, l in enumerate(locations) if all(v == 0 for v in l.values()))
- if bases:
- base = next(bases)
- logging.info("Base master index %s, location %s", base, locations[base])
+ assert nodeTypes[0] == "moveTo"
+ assert nodeTypes[-1] in ("closePath", "endPath")
+ points = SimpleRecordingPointPen()
+ converter = SegmentToPointPen(points, False)
+ contour.replay(converter)
+ # points.value is a list of pt,bool where bool is true if on-curve and false if off-curve;
+ # now check all rotations and mirror-rotations of the contour and build list of isomorphic
+ # possible starting points.
+ self.points.append(points.value)
+
+ isomorphisms = []
+ self.isomorphisms.append(isomorphisms)
+
+ # Add rotations
+ add_isomorphisms(points.value, isomorphisms, False)
+ # Add mirrored rotations
+ add_isomorphisms(points.value, isomorphisms, True)
+
+ def draw(self, pen, countor_idx=None):
+ if countor_idx is None:
+ for contour in self.recordings:
+ contour.draw(pen)
else:
- base = 0
- logging.warning("No base master location found")
-
- # Form a minimum spanning tree of the locations
- try:
- from scipy.sparse.csgraph import minimum_spanning_tree
-
- graph = [[0] * len(locations) for _ in range(len(locations))]
- axes = set()
- for l in locations:
- axes.update(l.keys())
- axes = sorted(axes)
- vectors = [tuple(l.get(k, 0) for k in axes) for l in locations]
- for i, j in itertools.combinations(range(len(locations)), 2):
- graph[i][j] = _vdiff_hypot2(vectors[i], vectors[j])
-
- tree = minimum_spanning_tree(graph)
- rows, cols = tree.nonzero()
- graph = defaultdict(set)
- for row, col in zip(rows, cols):
- graph[row].add(col)
- graph[col].add(row)
-
- # Traverse graph from the base and assign parents
- parents = [None] * len(locations)
- order = []
- visited = set()
- queue = deque([base])
- while queue:
- i = queue.popleft()
- visited.add(i)
- order.append(i)
- for j in sorted(graph[i]):
- if j not in visited:
- parents[j] = i
- queue.append(j)
-
- except ImportError:
- pass
-
- log.info("Parents: %s", parents)
- log.info("Order: %s", order)
- return parents, order
+ self.recordings[countor_idx].draw(pen)
def test_gen(
@@ -309,18 +143,26 @@ def test_gen(
ignore_missing=False,
*,
locations=None,
- tolerance=0.95,
+ tolerance=DEFAULT_TOLERANCE,
+ kinkiness=DEFAULT_KINKINESS,
+ upem=DEFAULT_UPEM,
show_all=False,
):
- if names is None:
- names = glyphsets
+ if tolerance >= 10:
+ tolerance *= 0.01
+ assert 0 <= tolerance <= 1
+ if kinkiness >= 10:
+ kinkiness *= 0.01
+ assert 0 <= kinkiness
+
+ names = names or [repr(g) for g in glyphsets]
if glyphs is None:
# `glyphs = glyphsets[0].keys()` is faster, certainly, but doesn't allow for sparse TTFs/OTFs given out of order
# ... risks the sparse master being the first one, and only processing a subset of the glyphs
glyphs = {g for glyphset in glyphsets for g in glyphset.keys()}
- parents, order = _find_parents_and_order(glyphsets, locations)
+ parents, order = find_parents_and_order(glyphsets, locations)
def grand_parent(i, glyphname):
if i is None:
@@ -334,110 +176,57 @@ def test_gen(
for glyph_name in glyphs:
log.info("Testing glyph %s", glyph_name)
- allGreenVectors = []
- allControlVectors = []
- allNodeTypes = []
- allContourIsomorphisms = []
- allContourPoints = []
- allGlyphs = [glyphset[glyph_name] for glyphset in glyphsets]
+ allGlyphs = [Glyph(glyph_name, glyphset) for glyphset in glyphsets]
if len([1 for glyph in allGlyphs if glyph is not None]) <= 1:
continue
for master_idx, (glyph, glyphset, name) in enumerate(
zip(allGlyphs, glyphsets, names)
):
- if glyph is None:
+ if glyph.doesnt_exist:
if not ignore_missing:
yield (
glyph_name,
{"type": "missing", "master": name, "master_idx": master_idx},
)
- allNodeTypes.append(None)
- allControlVectors.append(None)
- allGreenVectors.append(None)
- allContourIsomorphisms.append(None)
- allContourPoints.append(None)
continue
- perContourPen = PerContourOrComponentPen(RecordingPen, glyphset=glyphset)
- try:
- glyph.draw(perContourPen, outputImpliedClosingLine=True)
- except TypeError:
- glyph.draw(perContourPen)
- contourPens = perContourPen.value
- del perContourPen
-
- contourControlVectors = []
- contourGreenVectors = []
- contourIsomorphisms = []
- contourPoints = []
- nodeTypes = []
- allNodeTypes.append(nodeTypes)
- allControlVectors.append(contourControlVectors)
- allGreenVectors.append(contourGreenVectors)
- allContourIsomorphisms.append(contourIsomorphisms)
- allContourPoints.append(contourPoints)
- for ix, contour in enumerate(contourPens):
- contourOps = tuple(op for op, arg in contour.value)
- nodeTypes.append(contourOps)
-
- greenStats = StatisticsPen(glyphset=glyphset)
- controlStats = StatisticsControlPen(glyphset=glyphset)
- try:
- contour.replay(greenStats)
- contour.replay(controlStats)
- except OpenContourError as e:
- yield (
- glyph_name,
- {
- "master": name,
- "master_idx": master_idx,
- "contour": ix,
- "type": "open_path",
- },
- )
- continue
- contourGreenVectors.append(_contour_vector_from_stats(greenStats))
- contourControlVectors.append(_contour_vector_from_stats(controlStats))
-
- # Check starting point
- if contourOps[0] == "addComponent":
+ has_open = False
+ for ix, open in enumerate(glyph.openContours):
+ if not open:
continue
- assert contourOps[0] == "moveTo"
- assert contourOps[-1] in ("closePath", "endPath")
- points = SimpleRecordingPointPen()
- converter = SegmentToPointPen(points, False)
- contour.replay(converter)
- # points.value is a list of pt,bool where bool is true if on-curve and false if off-curve;
- # now check all rotations and mirror-rotations of the contour and build list of isomorphic
- # possible starting points.
-
- isomorphisms = []
- contourIsomorphisms.append(isomorphisms)
-
- # Add rotations
- _add_isomorphisms(points.value, isomorphisms, False)
- # Add mirrored rotations
- _add_isomorphisms(points.value, isomorphisms, True)
-
- contourPoints.append(points.value)
+ has_open = True
+ yield (
+ glyph_name,
+ {
+ "master": name,
+ "master_idx": master_idx,
+ "contour": ix,
+ "type": "open_path",
+ },
+ )
+ if has_open:
+ continue
- matchings = [None] * len(allControlVectors)
+ matchings = [None] * len(glyphsets)
for m1idx in order:
- if allNodeTypes[m1idx] is None:
+ glyph1 = allGlyphs[m1idx]
+ if glyph1 is None or not glyph1.nodeTypes:
continue
m0idx = grand_parent(m1idx, glyph_name)
if m0idx is None:
continue
- if allNodeTypes[m0idx] is None:
+ glyph0 = allGlyphs[m0idx]
+ if glyph0 is None or not glyph0.nodeTypes:
continue
- showed = False
+ #
+ # Basic compatibility checks
+ #
- m1 = allNodeTypes[m1idx]
- m0 = allNodeTypes[m0idx]
+ m1 = glyph0.nodeTypes
+ m0 = glyph1.nodeTypes
if len(m0) != len(m1):
- showed = True
yield (
glyph_name,
{
@@ -457,7 +246,6 @@ def test_gen(
if nodes1 == nodes2:
continue
if len(nodes1) != len(nodes2):
- showed = True
yield (
glyph_name,
{
@@ -474,7 +262,6 @@ def test_gen(
continue
for nodeIx, (n1, n2) in enumerate(zip(nodes1, nodes2)):
if n1 != n2:
- showed = True
yield (
glyph_name,
{
@@ -491,193 +278,88 @@ def test_gen(
)
continue
- m1Control = allControlVectors[m1idx]
- m1Green = allGreenVectors[m1idx]
- m0Control = allControlVectors[m0idx]
- m0Green = allGreenVectors[m0idx]
- if len(m1Control) > 1:
- identity_matching = list(range(len(m0Control)))
-
- # We try matching both the StatisticsControlPen vector
- # and the StatisticsPen vector.
- # If either method found a identity matching, accept it.
- # This is crucial for fonts like Kablammo[MORF].ttf and
- # Nabla[EDPT,EHLT].ttf, since they really confuse the
- # StatisticsPen vector because of their area=0 contours.
- #
- # TODO: Optimize by only computing the StatisticsPen vector
- # and then checking if it is the identity vector. Only if
- # not, compute the StatisticsControlPen vector and check both.
-
- costsControl = [
- [_vdiff_hypot2(v0, v1) for v1 in m1Control] for v0 in m0Control
- ]
- (
- matching_control,
- matching_cost_control,
- ) = min_cost_perfect_bipartite_matching(costsControl)
- identity_cost_control = sum(
- costsControl[i][i] for i in range(len(m0Control))
- )
- done = matching_cost_control == identity_cost_control
-
- if not done:
- costsGreen = [
- [_vdiff_hypot2(v0, v1) for v1 in m1Green] for v0 in m0Green
- ]
- (
- matching_green,
- matching_cost_green,
- ) = min_cost_perfect_bipartite_matching(costsGreen)
- identity_cost_green = sum(
- costsGreen[i][i] for i in range(len(m0Control))
- )
- done = matching_cost_green == identity_cost_green
-
- if not done:
- # Otherwise, use the worst of the two matchings.
- if (
- matching_cost_control / identity_cost_control
- < matching_cost_green / identity_cost_green
- ):
- matching = matching_control
- matching_cost = matching_cost_control
- identity_cost = identity_cost_control
- else:
- matching = matching_green
- matching_cost = matching_cost_green
- identity_cost = identity_cost_green
-
- if matching_cost < identity_cost * tolerance:
- # print(matching_cost_control / identity_cost_control, matching_cost_green / identity_cost_green)
-
- showed = True
- yield (
- glyph_name,
- {
- "type": "contour_order",
- "master_1": names[m0idx],
- "master_2": names[m1idx],
- "master_1_idx": m0idx,
- "master_2_idx": m1idx,
- "value_1": list(range(len(m0Control))),
- "value_2": matching,
- },
- )
- matchings[m1idx] = matching
+ #
+ # "contour_order" check
+ #
- m1 = allContourIsomorphisms[m1idx]
- m0 = allContourIsomorphisms[m0idx]
+ matching, matching_cost, identity_cost = test_contour_order(glyph0, glyph1)
+ if matching_cost < identity_cost * tolerance:
+ log.debug(
+ "matching_ratio %g",
+ matching_cost / identity_cost,
+ )
+ this_tolerance = matching_cost / identity_cost
+ log.debug("tolerance: %g", this_tolerance)
+ yield (
+ glyph_name,
+ {
+ "type": "contour_order",
+ "master_1": names[m0idx],
+ "master_2": names[m1idx],
+ "master_1_idx": m0idx,
+ "master_2_idx": m1idx,
+ "value_1": list(range(len(matching))),
+ "value_2": matching,
+ "tolerance": this_tolerance,
+ },
+ )
+ matchings[m1idx] = matching
+
+ #
+ # "wrong_start_point" / weight check
+ #
+
+ m0Isomorphisms = glyph0.isomorphisms
+ m1Isomorphisms = glyph1.isomorphisms
+ m0Vectors = glyph0.greenVectors
+ m1Vectors = glyph1.greenVectors
+ m0VectorsNormalized = glyph0.greenVectorsNormalized
+ m1VectorsNormalized = glyph1.greenVectorsNormalized
+ recording0 = glyph0.recordings
+ recording1 = glyph1.recordings
+ recording0Normalized = glyph0.recordingsNormalized
+ recording1Normalized = glyph1.recordingsNormalized
# If contour-order is wrong, adjust it
- if matchings[m1idx] is not None and m1: # m1 is empty for composite glyphs
- m1 = [m1[i] for i in matchings[m1idx]]
-
- for ix, (contour0, contour1) in enumerate(zip(m0, m1)):
- if len(contour0) == 0 or len(contour0) != len(contour1):
+ matching = matchings[m1idx]
+ if (
+ matching is not None and m1Isomorphisms
+ ): # m1 is empty for composite glyphs
+ m1Isomorphisms = [m1Isomorphisms[i] for i in matching]
+ m1Vectors = [m1Vectors[i] for i in matching]
+ m1VectorsNormalized = [m1VectorsNormalized[i] for i in matching]
+ recording1 = [recording1[i] for i in matching]
+ recording1Normalized = [recording1Normalized[i] for i in matching]
+
+ midRecording = []
+ for c0, c1 in zip(recording0, recording1):
+ try:
+ midRecording.append(lerp_recordings(c0, c1))
+ except ValueError:
+ # Mismatch because of the reordering above
+ midRecording.append(None)
+
+ for ix, (contour0, contour1) in enumerate(
+ zip(m0Isomorphisms, m1Isomorphisms)
+ ):
+ if (
+ contour0 is None
+ or contour1 is None
+ or len(contour0) == 0
+ or len(contour0) != len(contour1)
+ ):
# We already reported this; or nothing to do; or not compatible
# after reordering above.
continue
- c0 = contour0[0]
- # Next few lines duplicated below.
- costs = [_vdiff_hypot2_complex(c0[0], c1[0]) for c1 in contour1]
- min_cost_idx, min_cost = min(enumerate(costs), key=lambda x: x[1])
- first_cost = costs[0]
-
- if min_cost < first_cost * tolerance:
- # c0 is the first isomorphism of the m0 master
- # contour1 is list of all isomorphisms of the m1 master
- #
- # If the two shapes are both circle-ish and slightly
- # rotated, we detect wrong start point. This is for
- # example the case hundreds of times in
- # RobotoSerif-Italic[GRAD,opsz,wdth,wght].ttf
- #
- # If the proposed point is only one off from the first
- # point (and not reversed), try harder:
- #
- # Find the major eigenvector of the covariance matrix,
- # and rotate the contours by that angle. Then find the
- # closest point again. If it matches this time, let it
- # pass.
-
- proposed_point = contour1[min_cost_idx][1]
- reverse = contour1[min_cost_idx][2]
- num_points = len(allContourPoints[m1idx][ix])
- leeway = 3
- okay = False
- if not reverse and (
- proposed_point <= leeway
- or proposed_point >= num_points - leeway
- ):
- # Try harder
-
- m0Vectors = allGreenVectors[m1idx][ix]
- m1Vectors = allGreenVectors[m1idx][ix]
-
- # Recover the covariance matrix from the GreenVectors.
- # This is a 2x2 matrix.
- transforms = []
- for vector in (m0Vectors, m1Vectors):
- meanX = vector[1]
- meanY = vector[2]
- stddevX = vector[3] / 2
- stddevY = vector[4] / 2
- correlation = vector[5] / abs(vector[0])
-
- # https://cookierobotics.com/007/
- a = stddevX * stddevX # VarianceX
- c = stddevY * stddevY # VarianceY
- b = correlation * stddevX * stddevY # Covariance
-
- delta = (((a - c) * 0.5) ** 2 + b * b) ** 0.5
- lambda1 = (a + c) * 0.5 + delta # Major eigenvalue
- lambda2 = (a + c) * 0.5 - delta # Minor eigenvalue
- theta = (
- atan2(lambda1 - a, b)
- if b != 0
- else (pi * 0.5 if a < c else 0)
- )
- trans = Transform()
- trans = trans.translate(meanX, meanY)
- trans = trans.rotate(theta)
- trans = trans.scale(sqrt(lambda1), sqrt(lambda2))
- transforms.append(trans)
-
- trans = transforms[0]
- new_c0 = (
- [
- complex(*trans.transformPoint((pt.real, pt.imag)))
- for pt in c0[0]
- ],
- ) + c0[1:]
- trans = transforms[1]
- new_contour1 = []
- for c1 in contour1:
- new_c1 = (
- [
- complex(*trans.transformPoint((pt.real, pt.imag)))
- for pt in c1[0]
- ],
- ) + c1[1:]
- new_contour1.append(new_c1)
-
- # Next few lines duplicate from above.
- costs = [
- _vdiff_hypot2_complex(new_c0[0], new_c1[0])
- for new_c1 in new_contour1
- ]
- min_cost_idx, min_cost = min(
- enumerate(costs), key=lambda x: x[1]
- )
- first_cost = costs[0]
- # Only accept a perfect match
- if min_cost_idx == 0:
- okay = True
+ proposed_point, reverse, min_cost, first_cost = test_starting_point(
+ glyph0, glyph1, ix, tolerance, matching
+ )
- if not okay:
- showed = True
+ if proposed_point or reverse:
+ this_tolerance = min_cost / first_cost
+ log.debug("tolerance: %g", this_tolerance)
+ if min_cost < first_cost * tolerance:
yield (
glyph_name,
{
@@ -690,63 +372,245 @@ def test_gen(
"value_1": 0,
"value_2": proposed_point,
"reversed": reverse,
+ "tolerance": this_tolerance,
},
)
else:
- # If first_cost is Too Largeâ„¢, do further inspection.
- # This can happen specially in the case of TrueType
- # fonts, where the original contour had wrong start point,
- # but because of the cubic->quadratic conversion, we don't
- # have many isomorphisms to work with.
-
- # The threshold here is all black magic. It's just here to
- # speed things up so we don't end up doing a full matching
- # on every contour that is correct.
- threshold = (
- len(c0[0]) * (allControlVectors[m0idx][ix][0] * 0.5) ** 2 / 4
- ) # Magic only
- c1 = contour1[min_cost_idx]
-
- # If point counts are different it's because of the contour
- # reordering above. We can in theory still try, but our
- # bipartite-matching implementations currently assume
- # equal number of vertices on both sides. I'm lazy to update
- # all three different implementations!
-
- if len(c0[0]) == len(c1[0]) and first_cost > threshold:
- # Do a quick(!) matching between the points. If it's way off,
- # flag it. This can happen specially in the case of TrueType
- # fonts, where the original contour had wrong start point, but
- # because of the cubic->quadratic conversion, we don't have many
- # isomorphisms.
- points0 = c0[0][::_NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR]
- points1 = c1[0][::_NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR]
-
- graph = [
- [_hypot2_complex(p0 - p1) for p1 in points1]
- for p0 in points0
- ]
- matching, matching_cost = min_cost_perfect_bipartite_matching(
- graph
- )
- identity_cost = sum(graph[i][i] for i in range(len(graph)))
+ # Weight check.
+ #
+ # If contour could be mid-interpolated, and the two
+ # contours have the same area sign, proceeed.
+ #
+ # The sign difference can happen if it's a werido
+ # self-intersecting contour; ignore it.
+ contour = midRecording[ix]
+
+ normalized = False
+ if contour and (m0Vectors[ix][0] < 0) == (m1Vectors[ix][0] < 0):
+ if normalized:
+ midStats = StatisticsPen(glyphset=None)
+ tpen = TransformPen(
+ midStats, transform_from_stats(midStats, inverse=True)
+ )
+ contour.replay(tpen)
+ else:
+ midStats = StatisticsPen(glyphset=None)
+ contour.replay(midStats)
- if matching_cost < identity_cost / 8: # Heuristic
- # print(matching_cost, identity_cost, matching)
- showed = True
- yield (
- glyph_name,
- {
- "type": "wrong_structure",
- "contour": ix,
- "master_1": names[m0idx],
- "master_2": names[m1idx],
- "master_1_idx": m0idx,
- "master_2_idx": m1idx,
- },
+ midVector = contour_vector_from_stats(midStats)
+
+ m0Vec = (
+ m0Vectors[ix] if not normalized else m0VectorsNormalized[ix]
+ )
+ m1Vec = (
+ m1Vectors[ix] if not normalized else m1VectorsNormalized[ix]
+ )
+ size0 = m0Vec[0] * m0Vec[0]
+ size1 = m1Vec[0] * m1Vec[0]
+ midSize = midVector[0] * midVector[0]
+
+ power = 1
+ t = tolerance**power
+
+ for overweight, problem_type in enumerate(
+ ("underweight", "overweight")
+ ):
+ if overweight:
+ expectedSize = sqrt(size0 * size1)
+ expectedSize = (size0 + size1) - expectedSize
+ expectedSize = size1 + (midSize - size1)
+ continue
+ else:
+ expectedSize = sqrt(size0 * size1)
+
+ log.debug(
+ "%s: actual size %g; threshold size %g, master sizes: %g, %g",
+ problem_type,
+ midSize,
+ expectedSize,
+ size0,
+ size1,
)
- if show_all and not showed:
+ size0, size1 = sorted((size0, size1))
+
+ if (
+ not overweight
+ and expectedSize * tolerance > midSize + 1e-5
+ ) or (
+ overweight and 1e-5 + expectedSize / tolerance < midSize
+ ):
+ try:
+ if overweight:
+ this_tolerance = (expectedSize / midSize) ** (
+ 1 / power
+ )
+ else:
+ this_tolerance = (midSize / expectedSize) ** (
+ 1 / power
+ )
+ except ZeroDivisionError:
+ this_tolerance = 0
+ log.debug("tolerance %g", this_tolerance)
+ yield (
+ glyph_name,
+ {
+ "type": problem_type,
+ "contour": ix,
+ "master_1": names[m0idx],
+ "master_2": names[m1idx],
+ "master_1_idx": m0idx,
+ "master_2_idx": m1idx,
+ "tolerance": this_tolerance,
+ },
+ )
+
+ #
+ # "kink" detector
+ #
+ m0 = glyph0.points
+ m1 = glyph1.points
+
+ # If contour-order is wrong, adjust it
+ if matchings[m1idx] is not None and m1: # m1 is empty for composite glyphs
+ m1 = [m1[i] for i in matchings[m1idx]]
+
+ t = 0.1 # ~sin(radian(6)) for tolerance 0.95
+ deviation_threshold = (
+ upem * DEFAULT_KINKINESS_LENGTH * DEFAULT_KINKINESS / kinkiness
+ )
+
+ for ix, (contour0, contour1) in enumerate(zip(m0, m1)):
+ if (
+ contour0 is None
+ or contour1 is None
+ or len(contour0) == 0
+ or len(contour0) != len(contour1)
+ ):
+ # We already reported this; or nothing to do; or not compatible
+ # after reordering above.
+ continue
+
+ # Walk the contour, keeping track of three consecutive points, with
+ # middle one being an on-curve. If the three are co-linear then
+ # check for kinky-ness.
+ for i in range(len(contour0)):
+ pt0 = contour0[i]
+ pt1 = contour1[i]
+ if not pt0[1] or not pt1[1]:
+ # Skip off-curves
+ continue
+ pt0_prev = contour0[i - 1]
+ pt1_prev = contour1[i - 1]
+ pt0_next = contour0[(i + 1) % len(contour0)]
+ pt1_next = contour1[(i + 1) % len(contour1)]
+
+ if pt0_prev[1] and pt1_prev[1]:
+ # At least one off-curve is required
+ continue
+ if pt0_prev[1] and pt1_prev[1]:
+ # At least one off-curve is required
+ continue
+
+ pt0 = complex(*pt0[0])
+ pt1 = complex(*pt1[0])
+ pt0_prev = complex(*pt0_prev[0])
+ pt1_prev = complex(*pt1_prev[0])
+ pt0_next = complex(*pt0_next[0])
+ pt1_next = complex(*pt1_next[0])
+
+ # We have three consecutive points. Check whether
+ # they are colinear.
+ d0_prev = pt0 - pt0_prev
+ d0_next = pt0_next - pt0
+ d1_prev = pt1 - pt1_prev
+ d1_next = pt1_next - pt1
+
+ sin0 = d0_prev.real * d0_next.imag - d0_prev.imag * d0_next.real
+ sin1 = d1_prev.real * d1_next.imag - d1_prev.imag * d1_next.real
+ try:
+ sin0 /= abs(d0_prev) * abs(d0_next)
+ sin1 /= abs(d1_prev) * abs(d1_next)
+ except ZeroDivisionError:
+ continue
+
+ if abs(sin0) > t or abs(sin1) > t:
+ # Not colinear / not smooth.
+ continue
+
+ # Check the mid-point is actually, well, in the middle.
+ dot0 = d0_prev.real * d0_next.real + d0_prev.imag * d0_next.imag
+ dot1 = d1_prev.real * d1_next.real + d1_prev.imag * d1_next.imag
+ if dot0 < 0 or dot1 < 0:
+ # Sharp corner.
+ continue
+
+ # Fine, if handle ratios are similar...
+ r0 = abs(d0_prev) / (abs(d0_prev) + abs(d0_next))
+ r1 = abs(d1_prev) / (abs(d1_prev) + abs(d1_next))
+ r_diff = abs(r0 - r1)
+ if abs(r_diff) < t:
+ # Smooth enough.
+ continue
+
+ mid = (pt0 + pt1) / 2
+ mid_prev = (pt0_prev + pt1_prev) / 2
+ mid_next = (pt0_next + pt1_next) / 2
+
+ mid_d0 = mid - mid_prev
+ mid_d1 = mid_next - mid
+
+ sin_mid = mid_d0.real * mid_d1.imag - mid_d0.imag * mid_d1.real
+ try:
+ sin_mid /= abs(mid_d0) * abs(mid_d1)
+ except ZeroDivisionError:
+ continue
+
+ # ...or if the angles are similar.
+ if abs(sin_mid) * (tolerance * kinkiness) <= t:
+ # Smooth enough.
+ continue
+
+ # How visible is the kink?
+
+ cross = sin_mid * abs(mid_d0) * abs(mid_d1)
+ arc_len = abs(mid_d0 + mid_d1)
+ deviation = abs(cross / arc_len)
+ if deviation < deviation_threshold:
+ continue
+ deviation_ratio = deviation / arc_len
+ if deviation_ratio > t:
+ continue
+
+ this_tolerance = t / (abs(sin_mid) * kinkiness)
+
+ log.debug(
+ "deviation %g; deviation_ratio %g; sin_mid %g; r_diff %g",
+ deviation,
+ deviation_ratio,
+ sin_mid,
+ r_diff,
+ )
+ log.debug("tolerance %g", this_tolerance)
+ yield (
+ glyph_name,
+ {
+ "type": "kink",
+ "contour": ix,
+ "master_1": names[m0idx],
+ "master_2": names[m1idx],
+ "master_1_idx": m0idx,
+ "master_2_idx": m1idx,
+ "value": i,
+ "tolerance": this_tolerance,
+ },
+ )
+
+ #
+ # --show-all
+ #
+
+ if show_all:
yield (
glyph_name,
{
@@ -799,7 +663,13 @@ def main(args=None):
"--tolerance",
action="store",
type=float,
- help="Error tolerance. Default 0.95",
+ help="Error tolerance. Between 0 and 1. Default %s" % DEFAULT_TOLERANCE,
+ )
+ parser.add_argument(
+ "--kinkiness",
+ action="store",
+ type=float,
+ help="How aggressively report kinks. Default %s" % DEFAULT_KINKINESS,
)
parser.add_argument(
"--json",
@@ -812,6 +682,11 @@ def main(args=None):
help="Output report in PDF format",
)
parser.add_argument(
+ "--ps",
+ action="store",
+ help="Output report in PostScript format",
+ )
+ parser.add_argument(
"--html",
action="store",
help="Output report in HTML format",
@@ -846,12 +721,15 @@ def main(args=None):
help="Name of the master to use in the report. If not provided, all are used.",
)
parser.add_argument("-v", "--verbose", action="store_true", help="Run verbosely.")
+ parser.add_argument("--debug", action="store_true", help="Run with debug output.")
args = parser.parse_args(args)
from fontTools import configLogger
configLogger(level=("INFO" if args.verbose else "ERROR"))
+ if args.debug:
+ configLogger(level="DEBUG")
glyphs = args.glyphs.split() if args.glyphs else None
@@ -860,6 +738,7 @@ def main(args=None):
fonts = []
names = []
locations = []
+ upem = DEFAULT_UPEM
original_args_inputs = tuple(args.inputs)
@@ -884,6 +763,7 @@ def main(args=None):
from glyphsLib import GSFont, to_designspace
gsfont = GSFont(args.inputs[0])
+ upem = gsfont.upm
designspace = to_designspace(gsfont)
fonts = [source.font for source in designspace.sources]
names = ["%s-%s" % (f.info.familyName, f.info.styleName) for f in fonts]
@@ -902,6 +782,7 @@ def main(args=None):
from fontTools.ttLib import TTFont
font = TTFont(args.inputs[0])
+ upem = font["head"].unitsPerEm
if "gvar" in font:
# Is variable font
@@ -980,11 +861,17 @@ def main(args=None):
if filename.endswith(".ufo"):
from fontTools.ufoLib import UFOReader
- fonts.append(UFOReader(filename))
+ font = UFOReader(filename)
+ info = SimpleNamespace()
+ font.readInfo(info)
+ upem = info.unitsPerEm
+ fonts.append(font)
else:
from fontTools.ttLib import TTFont
- fonts.append(TTFont(filename))
+ font = TTFont(filename)
+ upem = font["head"].unitsPerEm
+ fonts.append(font)
names.append(basename(filename).rsplit(".", 1)[0])
@@ -1023,6 +910,8 @@ def main(args=None):
# Normalize locations
locations = [normalizeLocation(loc, axis_triples) for loc in locations]
+ tolerance = args.tolerance or DEFAULT_TOLERANCE
+ kinkiness = args.kinkiness if args.kinkiness is not None else DEFAULT_KINKINESS
try:
log.info("Running on %d glyphsets", len(glyphsets))
@@ -1032,8 +921,10 @@ def main(args=None):
glyphs=glyphs,
names=names,
locations=locations,
+ upem=upem,
ignore_missing=args.ignore_missing,
- tolerance=args.tolerance or 0.95,
+ tolerance=tolerance,
+ kinkiness=kinkiness,
show_all=args.show_all,
)
problems = defaultdict(list)
@@ -1141,11 +1032,32 @@ def main(args=None):
),
file=f,
)
- elif p["type"] == "wrong_structure":
+ elif p["type"] == "underweight":
+ print(
+ " Contour %d interpolation is underweight: %s, %s"
+ % (
+ p["contour"],
+ p["master_1"],
+ p["master_2"],
+ ),
+ file=f,
+ )
+ elif p["type"] == "overweight":
+ print(
+ " Contour %d interpolation is overweight: %s, %s"
+ % (
+ p["contour"],
+ p["master_1"],
+ p["master_2"],
+ ),
+ file=f,
+ )
+ elif p["type"] == "kink":
print(
- " Contour %d structures differ: %s, %s"
+ " Contour %d has a kink at %s: %s, %s"
% (
p["contour"],
+ p["value"],
p["master_1"],
p["master_2"],
),
@@ -1153,7 +1065,7 @@ def main(args=None):
)
elif p["type"] == "nothing":
print(
- " Nothing wrong between %s and %s"
+ " Showing %s and %s"
% (
p["master_1"],
p["master_2"],
@@ -1169,17 +1081,45 @@ def main(args=None):
from .interpolatablePlot import InterpolatablePDF
with InterpolatablePDF(args.pdf, glyphsets=glyphsets, names=names) as pdf:
+ pdf.add_title_page(
+ original_args_inputs, tolerance=tolerance, kinkiness=kinkiness
+ )
pdf.add_problems(problems)
if not problems and not args.quiet:
pdf.draw_cupcake()
+ if args.ps:
+ log.info("Writing PS to %s", args.pdf)
+ from .interpolatablePlot import InterpolatablePS
+
+ with InterpolatablePS(args.ps, glyphsets=glyphsets, names=names) as ps:
+ ps.add_title_page(
+ original_args_inputs, tolerance=tolerance, kinkiness=kinkiness
+ )
+ ps.add_problems(problems)
+ if not problems and not args.quiet:
+ ps.draw_cupcake()
+
if args.html:
log.info("Writing HTML to %s", args.html)
from .interpolatablePlot import InterpolatableSVG
svgs = []
+ glyph_starts = {}
with InterpolatableSVG(svgs, glyphsets=glyphsets, names=names) as svg:
- svg.add_problems(problems)
+ svg.add_title_page(
+ original_args_inputs,
+ show_tolerance=False,
+ tolerance=tolerance,
+ kinkiness=kinkiness,
+ )
+ for glyph, glyph_problems in problems.items():
+ glyph_starts[len(svgs)] = glyph
+ svg.add_problems(
+ {glyph: glyph_problems},
+ show_tolerance=False,
+ show_page_number=False,
+ )
if not problems and not args.quiet:
svg.draw_cupcake()
@@ -1187,8 +1127,13 @@ def main(args=None):
with open(args.html, "wb") as f:
f.write(b"<!DOCTYPE html>\n")
- f.write(b"<html><body align=center>\n")
- for svg in svgs:
+ f.write(
+ b'<html><body align="center" style="font-family: sans-serif; text-color: #222">\n'
+ )
+ f.write(b"<title>fonttools varLib.interpolatable report</title>\n")
+ for i, svg in enumerate(svgs):
+ if i in glyph_starts:
+ f.write(f"<h1>Glyph {glyph_starts[i]}</h1>\n".encode("utf-8"))
f.write("<img src='data:image/svg+xml;base64,".encode("utf-8"))
f.write(base64.b64encode(svg))
f.write(b"' />\n")
diff --git a/contrib/python/fonttools/fontTools/varLib/interpolatableHelpers.py b/contrib/python/fonttools/fontTools/varLib/interpolatableHelpers.py
new file mode 100644
index 0000000000..513e5f7409
--- /dev/null
+++ b/contrib/python/fonttools/fontTools/varLib/interpolatableHelpers.py
@@ -0,0 +1,383 @@
+from fontTools.pens.basePen import AbstractPen, BasePen, DecomposingPen
+from fontTools.pens.pointPen import AbstractPointPen, SegmentToPointPen
+from fontTools.pens.recordingPen import RecordingPen, DecomposingRecordingPen
+from fontTools.misc.transform import Transform
+from collections import defaultdict, deque
+from math import sqrt, copysign, atan2, pi
+import itertools
+
+import logging
+
+log = logging.getLogger("fontTools.varLib.interpolatable")
+
+
+def rot_list(l, k):
+ """Rotate list by k items forward. Ie. item at position 0 will be
+ at position k in returned list. Negative k is allowed."""
+ return l[-k:] + l[:-k]
+
+
+class PerContourPen(BasePen):
+ def __init__(self, Pen, glyphset=None):
+ BasePen.__init__(self, glyphset)
+ self._glyphset = glyphset
+ self._Pen = Pen
+ self._pen = None
+ self.value = []
+
+ def _moveTo(self, p0):
+ self._newItem()
+ self._pen.moveTo(p0)
+
+ def _lineTo(self, p1):
+ self._pen.lineTo(p1)
+
+ def _qCurveToOne(self, p1, p2):
+ self._pen.qCurveTo(p1, p2)
+
+ def _curveToOne(self, p1, p2, p3):
+ self._pen.curveTo(p1, p2, p3)
+
+ def _closePath(self):
+ self._pen.closePath()
+ self._pen = None
+
+ def _endPath(self):
+ self._pen.endPath()
+ self._pen = None
+
+ def _newItem(self):
+ self._pen = pen = self._Pen()
+ self.value.append(pen)
+
+
+class PerContourOrComponentPen(PerContourPen):
+ def addComponent(self, glyphName, transformation):
+ self._newItem()
+ self.value[-1].addComponent(glyphName, transformation)
+
+
+class SimpleRecordingPointPen(AbstractPointPen):
+ def __init__(self):
+ self.value = []
+
+ def beginPath(self, identifier=None, **kwargs):
+ pass
+
+ def endPath(self) -> None:
+ pass
+
+ def addPoint(self, pt, segmentType=None):
+ self.value.append((pt, False if segmentType is None else True))
+
+
+def vdiff_hypot2(v0, v1):
+ s = 0
+ for x0, x1 in zip(v0, v1):
+ d = x1 - x0
+ s += d * d
+ return s
+
+
+def vdiff_hypot2_complex(v0, v1):
+ s = 0
+ for x0, x1 in zip(v0, v1):
+ d = x1 - x0
+ s += d.real * d.real + d.imag * d.imag
+ # This does the same but seems to be slower:
+ # s += (d * d.conjugate()).real
+ return s
+
+
+def matching_cost(G, matching):
+ return sum(G[i][j] for i, j in enumerate(matching))
+
+
+def min_cost_perfect_bipartite_matching_scipy(G):
+ n = len(G)
+ rows, cols = linear_sum_assignment(G)
+ assert (rows == list(range(n))).all()
+ return list(cols), matching_cost(G, cols)
+
+
+def min_cost_perfect_bipartite_matching_munkres(G):
+ n = len(G)
+ cols = [None] * n
+ for row, col in Munkres().compute(G):
+ cols[row] = col
+ return cols, matching_cost(G, cols)
+
+
+def min_cost_perfect_bipartite_matching_bruteforce(G):
+ n = len(G)
+
+ if n > 6:
+ raise Exception("Install Python module 'munkres' or 'scipy >= 0.17.0'")
+
+ # Otherwise just brute-force
+ permutations = itertools.permutations(range(n))
+ best = list(next(permutations))
+ best_cost = matching_cost(G, best)
+ for p in permutations:
+ cost = matching_cost(G, p)
+ if cost < best_cost:
+ best, best_cost = list(p), cost
+ return best, best_cost
+
+
+try:
+ from scipy.optimize import linear_sum_assignment
+
+ min_cost_perfect_bipartite_matching = min_cost_perfect_bipartite_matching_scipy
+except ImportError:
+ try:
+ from munkres import Munkres
+
+ min_cost_perfect_bipartite_matching = (
+ min_cost_perfect_bipartite_matching_munkres
+ )
+ except ImportError:
+ min_cost_perfect_bipartite_matching = (
+ min_cost_perfect_bipartite_matching_bruteforce
+ )
+
+
+def contour_vector_from_stats(stats):
+ # Don't change the order of items here.
+ # It's okay to add to the end, but otherwise, other
+ # code depends on it. Search for "covariance".
+ size = sqrt(abs(stats.area))
+ return (
+ copysign((size), stats.area),
+ stats.meanX,
+ stats.meanY,
+ stats.stddevX * 2,
+ stats.stddevY * 2,
+ stats.correlation * size,
+ )
+
+
+def matching_for_vectors(m0, m1):
+ n = len(m0)
+
+ identity_matching = list(range(n))
+
+ costs = [[vdiff_hypot2(v0, v1) for v1 in m1] for v0 in m0]
+ (
+ matching,
+ matching_cost,
+ ) = min_cost_perfect_bipartite_matching(costs)
+ identity_cost = sum(costs[i][i] for i in range(n))
+ return matching, matching_cost, identity_cost
+
+
+def points_characteristic_bits(points):
+ bits = 0
+ for pt, b in reversed(points):
+ bits = (bits << 1) | b
+ return bits
+
+
+_NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR = 4
+
+
+def points_complex_vector(points):
+ vector = []
+ if not points:
+ return vector
+ points = [complex(*pt) for pt, _ in points]
+ n = len(points)
+ assert _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR == 4
+ points.extend(points[: _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR - 1])
+ while len(points) < _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR:
+ points.extend(points[: _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR - 1])
+ for i in range(n):
+ # The weights are magic numbers.
+
+ # The point itself
+ p0 = points[i]
+ vector.append(p0)
+
+ # The vector to the next point
+ p1 = points[i + 1]
+ d0 = p1 - p0
+ vector.append(d0 * 3)
+
+ # The turn vector
+ p2 = points[i + 2]
+ d1 = p2 - p1
+ vector.append(d1 - d0)
+
+ # The angle to the next point, as a cross product;
+ # Square root of, to match dimentionality of distance.
+ cross = d0.real * d1.imag - d0.imag * d1.real
+ cross = copysign(sqrt(abs(cross)), cross)
+ vector.append(cross * 4)
+
+ return vector
+
+
+def add_isomorphisms(points, isomorphisms, reverse):
+ reference_bits = points_characteristic_bits(points)
+ n = len(points)
+
+ # if points[0][0] == points[-1][0]:
+ # abort
+
+ if reverse:
+ points = points[::-1]
+ bits = points_characteristic_bits(points)
+ else:
+ bits = reference_bits
+
+ vector = points_complex_vector(points)
+
+ assert len(vector) % n == 0
+ mult = len(vector) // n
+ mask = (1 << n) - 1
+
+ for i in range(n):
+ b = ((bits << (n - i)) & mask) | (bits >> i)
+ if b == reference_bits:
+ isomorphisms.append(
+ (rot_list(vector, -i * mult), n - 1 - i if reverse else i, reverse)
+ )
+
+
+def find_parents_and_order(glyphsets, locations):
+ parents = [None] + list(range(len(glyphsets) - 1))
+ order = list(range(len(glyphsets)))
+ if locations:
+ # Order base master first
+ bases = (i for i, l in enumerate(locations) if all(v == 0 for v in l.values()))
+ if bases:
+ base = next(bases)
+ logging.info("Base master index %s, location %s", base, locations[base])
+ else:
+ base = 0
+ logging.warning("No base master location found")
+
+ # Form a minimum spanning tree of the locations
+ try:
+ from scipy.sparse.csgraph import minimum_spanning_tree
+
+ graph = [[0] * len(locations) for _ in range(len(locations))]
+ axes = set()
+ for l in locations:
+ axes.update(l.keys())
+ axes = sorted(axes)
+ vectors = [tuple(l.get(k, 0) for k in axes) for l in locations]
+ for i, j in itertools.combinations(range(len(locations)), 2):
+ graph[i][j] = vdiff_hypot2(vectors[i], vectors[j])
+
+ tree = minimum_spanning_tree(graph)
+ rows, cols = tree.nonzero()
+ graph = defaultdict(set)
+ for row, col in zip(rows, cols):
+ graph[row].add(col)
+ graph[col].add(row)
+
+ # Traverse graph from the base and assign parents
+ parents = [None] * len(locations)
+ order = []
+ visited = set()
+ queue = deque([base])
+ while queue:
+ i = queue.popleft()
+ visited.add(i)
+ order.append(i)
+ for j in sorted(graph[i]):
+ if j not in visited:
+ parents[j] = i
+ queue.append(j)
+
+ except ImportError:
+ pass
+
+ log.info("Parents: %s", parents)
+ log.info("Order: %s", order)
+ return parents, order
+
+
+def transform_from_stats(stats, inverse=False):
+ # https://cookierobotics.com/007/
+ a = stats.varianceX
+ b = stats.covariance
+ c = stats.varianceY
+
+ delta = (((a - c) * 0.5) ** 2 + b * b) ** 0.5
+ lambda1 = (a + c) * 0.5 + delta # Major eigenvalue
+ lambda2 = (a + c) * 0.5 - delta # Minor eigenvalue
+ theta = atan2(lambda1 - a, b) if b != 0 else (pi * 0.5 if a < c else 0)
+ trans = Transform()
+
+ if lambda2 < 0:
+ # XXX This is a hack.
+ # The problem is that the covariance matrix is singular.
+ # This happens when the contour is a line, or a circle.
+ # In that case, the covariance matrix is not a good
+ # representation of the contour.
+ # We should probably detect this earlier and avoid
+ # computing the covariance matrix in the first place.
+ # But for now, we just avoid the division by zero.
+ lambda2 = 0
+
+ if inverse:
+ trans = trans.translate(-stats.meanX, -stats.meanY)
+ trans = trans.rotate(-theta)
+ trans = trans.scale(1 / sqrt(lambda1), 1 / sqrt(lambda2))
+ else:
+ trans = trans.scale(sqrt(lambda1), sqrt(lambda2))
+ trans = trans.rotate(theta)
+ trans = trans.translate(stats.meanX, stats.meanY)
+
+ return trans
+
+
+class LerpGlyphSet:
+ def __init__(self, glyphset1, glyphset2, factor=0.5):
+ self.glyphset1 = glyphset1
+ self.glyphset2 = glyphset2
+ self.factor = factor
+
+ def __getitem__(self, glyphname):
+ return LerpGlyph(glyphname, self)
+
+
+class LerpGlyph:
+ def __init__(self, glyphname, glyphset):
+ self.glyphset = glyphset
+ self.glyphname = glyphname
+
+ def draw(self, pen):
+ recording1 = DecomposingRecordingPen(self.glyphset.glyphset1)
+ self.glyphset.glyphset1[self.glyphname].draw(recording1)
+ recording2 = DecomposingRecordingPen(self.glyphset.glyphset2)
+ self.glyphset.glyphset2[self.glyphname].draw(recording2)
+
+ factor = self.glyphset.factor
+ for (op1, args1), (op2, args2) in zip(recording1.value, recording2.value):
+ if op1 != op2:
+ raise ValueError("Mismatching operations: %s, %s" % (op1, op2))
+ mid_args = [
+ (x1 + (x2 - x1) * factor, y1 + (y2 - y1) * factor)
+ for (x1, y1), (x2, y2) in zip(args1, args2)
+ ]
+ getattr(pen, op1)(*mid_args)
+
+
+def lerp_recordings(recording1, recording2, factor=0.5):
+ pen = RecordingPen()
+ value = pen.value
+ for (op1, args1), (op2, args2) in zip(recording1.value, recording2.value):
+ if op1 != op2:
+ raise ValueError("Mismatched operations: %s, %s" % (op1, op2))
+ if op1 == "addComponent":
+ mid_args = args1 # XXX Interpolate transformation?
+ else:
+ mid_args = [
+ (x1 + (x2 - x1) * factor, y1 + (y2 - y1) * factor)
+ for (x1, y1), (x2, y2) in zip(args1, args2)
+ ]
+ value.append((op1, mid_args))
+ return pen
diff --git a/contrib/python/fonttools/fontTools/varLib/interpolatablePlot.py b/contrib/python/fonttools/fontTools/varLib/interpolatablePlot.py
index a4e86b3dba..eef4a47160 100644
--- a/contrib/python/fonttools/fontTools/varLib/interpolatablePlot.py
+++ b/contrib/python/fonttools/fontTools/varLib/interpolatablePlot.py
@@ -1,3 +1,4 @@
+from fontTools.ttLib import TTFont
from fontTools.pens.recordingPen import (
RecordingPen,
DecomposingRecordingPen,
@@ -13,49 +14,19 @@ from fontTools.pens.pointPen import (
from fontTools.varLib.interpolatable import (
PerContourOrComponentPen,
SimpleRecordingPointPen,
+ LerpGlyphSet,
)
from itertools import cycle
from functools import wraps
from io import BytesIO
import cairo
import math
+import os
import logging
log = logging.getLogger("fontTools.varLib.interpolatable")
-class LerpGlyphSet:
- def __init__(self, glyphset1, glyphset2, factor=0.5):
- self.glyphset1 = glyphset1
- self.glyphset2 = glyphset2
- self.factor = factor
-
- def __getitem__(self, glyphname):
- return LerpGlyph(glyphname, self)
-
-
-class LerpGlyph:
- def __init__(self, glyphname, glyphset):
- self.glyphset = glyphset
- self.glyphname = glyphname
-
- def draw(self, pen):
- recording1 = DecomposingRecordingPen(self.glyphset.glyphset1)
- self.glyphset.glyphset1[self.glyphname].draw(recording1)
- recording2 = DecomposingRecordingPen(self.glyphset.glyphset2)
- self.glyphset.glyphset2[self.glyphname].draw(recording2)
-
- factor = self.glyphset.factor
- for (op1, args1), (op2, args2) in zip(recording1.value, recording2.value):
- if op1 != op2:
- raise ValueError("Mismatching operations: %s, %s" % (op1, op2))
- mid_args = [
- (x1 + (x2 - x1) * factor, y1 + (y2 - y1) * factor)
- for (x1, y1), (x2, y2) in zip(args1, args2)
- ]
- getattr(pen, op1)(*mid_args)
-
-
class OverridingDict(dict):
def __init__(self, parent_dict):
self.parent_dict = parent_dict
@@ -69,6 +40,7 @@ class InterpolatablePlot:
height = 480
pad = 16
line_height = 36
+ page_number = 1
head_color = (0.3, 0.3, 0.3)
label_color = (0.2, 0.2, 0.2)
border_color = (0.9, 0.9, 0.9)
@@ -76,21 +48,27 @@ class InterpolatablePlot:
fill_color = (0.8, 0.8, 0.8)
stroke_color = (0.1, 0.1, 0.1)
stroke_width = 2
- oncurve_node_color = (0, 0.8, 0)
+ oncurve_node_color = (0, 0.8, 0, 0.7)
oncurve_node_diameter = 10
- offcurve_node_color = (0, 0.5, 0)
+ offcurve_node_color = (0, 0.5, 0, 0.7)
offcurve_node_diameter = 8
- handle_color = (0.2, 1, 0.2)
+ handle_color = (0, 0.5, 0, 0.7)
handle_width = 1
- other_start_point_color = (0, 0, 1)
- reversed_start_point_color = (0, 1, 0)
- start_point_color = (1, 0, 0)
- start_point_width = 15
- start_handle_width = 5
- start_handle_length = 100
- start_handle_arrow_length = 5
+ corrected_start_point_color = (0, 0.9, 0, 0.7)
+ corrected_start_point_size = 15
+ wrong_start_point_color = (1, 0, 0, 0.7)
+ start_point_color = (0, 0, 1, 0.7)
+ start_arrow_length = 20
+ kink_point_size = 10
+ kink_point_color = (1, 0, 1, 0.7)
+ kink_circle_size = 25
+ kink_circle_stroke_width = 1.5
+ kink_circle_color = (1, 0, 1, 0.7)
contour_colors = ((1, 0, 0), (0, 0, 1), (0, 1, 0), (1, 1, 0), (1, 0, 1), (0, 1, 1))
contour_alpha = 0.5
+ weight_issue_contour_color = (0, 0, 0, 0.4)
+ no_issues_label = "Your font's good! Have a cupcake..."
+ no_issues_label_color = (0, 0.5, 0)
cupcake_color = (0.3, 0, 0.3)
cupcake = r"""
,@.
@@ -117,8 +95,19 @@ class InterpolatablePlot:
\\\\ |||| |||| |||| //
||||||||||||||||||||||||
"""
- shrug_color = (0, 0.3, 0.3)
+ emoticon_color = (0, 0.3, 0.3)
shrug = r"""\_(")_/"""
+ underweight = r"""
+ o
+/|\
+/ \
+"""
+ overweight = r"""
+ o
+/O\
+/ \
+"""
+ yay = r""" \o/ """
def __init__(self, out, glyphsets, names=None, **kwargs):
self.out = out
@@ -140,9 +129,221 @@ class InterpolatablePlot:
raise NotImplementedError
def show_page(self):
- raise NotImplementedError
+ self.page_number += 1
+
+ def total_width(self):
+ return self.width * 2 + self.pad * 3
+
+ def total_height(self):
+ return (
+ self.pad
+ + self.line_height
+ + self.pad
+ + self.line_height
+ + self.pad
+ + 2 * (self.height + self.pad * 2 + self.line_height)
+ + self.pad
+ )
+
+ def add_title_page(
+ self, files, *, show_tolerance=True, tolerance=None, kinkiness=None
+ ):
+ self.set_size(self.total_width(), self.total_height())
+
+ pad = self.pad
+ width = self.total_width() - 3 * self.pad
+ height = self.total_height() - 2 * self.pad
+ x = y = pad
+
+ self.draw_label("Problem report for:", x=x, y=y, bold=True, width=width)
+ y += self.line_height
+
+ import hashlib
+
+ for file in files:
+ base_file = os.path.basename(file)
+ y += self.line_height
+ self.draw_label(base_file, x=x, y=y, bold=True, width=width)
+ y += self.line_height
+
+ h = hashlib.sha1(open(file, "rb").read()).hexdigest()
+ self.draw_label("sha1: %s" % h, x=x + pad, y=y, width=width)
+ y += self.line_height
+
+ if file.endswith(".ttf"):
+ ttFont = TTFont(file)
+ name = ttFont["name"] if "name" in ttFont else None
+ if name:
+ for what, nameIDs in (
+ ("Family name", (21, 16, 1)),
+ ("Version", (5,)),
+ ):
+ n = name.getFirstDebugName(nameIDs)
+ if n is None:
+ continue
+ self.draw_label(
+ "%s: %s" % (what, n), x=x + pad, y=y, width=width
+ )
+ y += self.line_height
+ elif file.endswith(".glyphs"):
+ from glyphsLib import GSFont
+
+ f = GSFont(file)
+ for what, field in (
+ ("Family name", "familyName"),
+ ("VersionMajor", "versionMajor"),
+ ("VersionMinor", "_versionMinor"),
+ ):
+ self.draw_label(
+ "%s: %s" % (what, getattr(f, field)),
+ x=x + pad,
+ y=y,
+ width=width,
+ )
+ y += self.line_height
+
+ self.draw_legend(
+ show_tolerance=show_tolerance, tolerance=tolerance, kinkiness=kinkiness
+ )
+ self.show_page()
+
+ def draw_legend(self, *, show_tolerance=True, tolerance=None, kinkiness=None):
+ cr = cairo.Context(self.surface)
+
+ x = self.pad
+ y = self.total_height() - self.pad - self.line_height * 2
+ width = self.total_width() - 2 * self.pad
+
+ xx = x + self.pad * 2
+ xxx = x + self.pad * 4
+
+ if show_tolerance:
+ self.draw_label(
+ "Tolerance: badness; closer to zero the worse", x=xxx, y=y, width=width
+ )
+ y -= self.pad + self.line_height
+
+ self.draw_label("Underweight contours", x=xxx, y=y, width=width)
+ cr.rectangle(xx - self.pad * 0.7, y, 1.5 * self.pad, self.line_height)
+ cr.set_source_rgb(*self.fill_color)
+ cr.fill_preserve()
+ if self.stroke_color:
+ cr.set_source_rgb(*self.stroke_color)
+ cr.set_line_width(self.stroke_width)
+ cr.stroke_preserve()
+ cr.set_source_rgba(*self.weight_issue_contour_color)
+ cr.fill()
+ y -= self.pad + self.line_height
+
+ self.draw_label(
+ "Colored contours: contours with the wrong order", x=xxx, y=y, width=width
+ )
+ cr.rectangle(xx - self.pad * 0.7, y, 1.5 * self.pad, self.line_height)
+ if self.fill_color:
+ cr.set_source_rgb(*self.fill_color)
+ cr.fill_preserve()
+ if self.stroke_color:
+ cr.set_source_rgb(*self.stroke_color)
+ cr.set_line_width(self.stroke_width)
+ cr.stroke_preserve()
+ cr.set_source_rgba(*self.contour_colors[0], self.contour_alpha)
+ cr.fill()
+ y -= self.pad + self.line_height
+
+ self.draw_label("Kink artifact", x=xxx, y=y, width=width)
+ self.draw_circle(
+ cr,
+ x=xx,
+ y=y + self.line_height * 0.5,
+ diameter=self.kink_circle_size,
+ stroke_width=self.kink_circle_stroke_width,
+ color=self.kink_circle_color,
+ )
+ y -= self.pad + self.line_height
+
+ self.draw_label("Point causing kink in the contour", x=xxx, y=y, width=width)
+ self.draw_dot(
+ cr,
+ x=xx,
+ y=y + self.line_height * 0.5,
+ diameter=self.kink_point_size,
+ color=self.kink_point_color,
+ )
+ y -= self.pad + self.line_height
+
+ self.draw_label("Suggested new contour start point", x=xxx, y=y, width=width)
+ self.draw_dot(
+ cr,
+ x=xx,
+ y=y + self.line_height * 0.5,
+ diameter=self.corrected_start_point_size,
+ color=self.corrected_start_point_color,
+ )
+ y -= self.pad + self.line_height
+
+ self.draw_label(
+ "Contour start point in contours with wrong direction",
+ x=xxx,
+ y=y,
+ width=width,
+ )
+ self.draw_arrow(
+ cr,
+ x=xx - self.start_arrow_length * 0.3,
+ y=y + self.line_height * 0.5,
+ color=self.wrong_start_point_color,
+ )
+ y -= self.pad + self.line_height
+
+ self.draw_label(
+ "Contour start point when the first two points overlap",
+ x=xxx,
+ y=y,
+ width=width,
+ )
+ self.draw_dot(
+ cr,
+ x=xx,
+ y=y + self.line_height * 0.5,
+ diameter=self.corrected_start_point_size,
+ color=self.start_point_color,
+ )
+ y -= self.pad + self.line_height
+
+ self.draw_label("Contour start point and direction", x=xxx, y=y, width=width)
+ self.draw_arrow(
+ cr,
+ x=xx - self.start_arrow_length * 0.3,
+ y=y + self.line_height * 0.5,
+ color=self.start_point_color,
+ )
+ y -= self.pad + self.line_height
+
+ self.draw_label("Legend:", x=x, y=y, width=width, bold=True)
+ y -= self.pad + self.line_height
+
+ if kinkiness is not None:
+ self.draw_label(
+ "Kink-reporting aggressiveness: %g" % kinkiness,
+ x=xxx,
+ y=y,
+ width=width,
+ )
+ y -= self.pad + self.line_height
- def add_problems(self, problems):
+ if tolerance is not None:
+ self.draw_label(
+ "Error tolerance: %g" % tolerance,
+ x=xxx,
+ y=y,
+ width=width,
+ )
+ y -= self.pad + self.line_height
+
+ self.draw_label("Parameters:", x=x, y=y, width=width, bold=True)
+ y -= self.pad + self.line_height
+
+ def add_problems(self, problems, *, show_tolerance=True, show_page_number=True):
for glyph, glyph_problems in problems.items():
last_masters = None
current_glyph_problems = []
@@ -157,16 +358,28 @@ class InterpolatablePlot:
continue
# Flush
if current_glyph_problems:
- self.add_problem(glyph, current_glyph_problems)
+ self.add_problem(
+ glyph,
+ current_glyph_problems,
+ show_tolerance=show_tolerance,
+ show_page_number=show_page_number,
+ )
self.show_page()
current_glyph_problems = []
last_masters = masters
current_glyph_problems.append(p)
if current_glyph_problems:
- self.add_problem(glyph, current_glyph_problems)
+ self.add_problem(
+ glyph,
+ current_glyph_problems,
+ show_tolerance=show_tolerance,
+ show_page_number=show_page_number,
+ )
self.show_page()
- def add_problem(self, glyphname, problems):
+ def add_problem(
+ self, glyphname, problems, *, show_tolerance=True, show_page_number=True
+ ):
if type(problems) not in (list, tuple):
problems = [problems]
@@ -190,31 +403,42 @@ class InterpolatablePlot:
)
master_indices.insert(0, sample_glyph)
- total_width = self.width * 2 + 3 * self.pad
- total_height = (
- self.pad
- + self.line_height
- + self.pad
- + len(master_indices) * (self.height + self.pad * 2 + self.line_height)
- + self.pad
- )
-
- self.set_size(total_width, total_height)
+ self.set_size(self.total_width(), self.total_height())
x = self.pad
y = self.pad
- self.draw_label(glyphname, x=x, y=y, color=self.head_color, align=0, bold=True)
+ self.draw_label(
+ "Glyph name: " + glyphname,
+ x=x,
+ y=y,
+ color=self.head_color,
+ align=0,
+ bold=True,
+ )
+ tolerance = min(p.get("tolerance", 1) for p in problems)
+ if tolerance < 1 and show_tolerance:
+ self.draw_label(
+ "tolerance: %.2f" % tolerance,
+ x=x,
+ y=y,
+ width=self.total_width() - 2 * self.pad,
+ align=1,
+ bold=True,
+ )
+ y += self.line_height + self.pad
self.draw_label(
problem_type,
- x=x + self.width + self.pad,
+ x=x,
y=y,
+ width=self.total_width() - 2 * self.pad,
color=self.head_color,
- align=1,
+ align=0.5,
bold=True,
)
y += self.line_height + self.pad
+ scales = []
for which, master_idx in enumerate(master_indices):
glyphset = self.glyphsets[master_idx]
name = self.names[master_idx]
@@ -223,18 +447,29 @@ class InterpolatablePlot:
y += self.line_height + self.pad
if glyphset[glyphname] is not None:
- self.draw_glyph(glyphset, glyphname, problems, which, x=x, y=y)
+ scales.append(
+ self.draw_glyph(glyphset, glyphname, problems, which, x=x, y=y)
+ )
else:
- self.draw_shrug(x=x, y=y)
+ self.draw_emoticon(self.shrug, x=x, y=y)
y += self.height + self.pad
if any(
- pt in ("nothing", "wrong_start_point", "contour_order", "wrong_structure")
+ pt
+ in (
+ "nothing",
+ "wrong_start_point",
+ "contour_order",
+ "kink",
+ "underweight",
+ "overweight",
+ )
for pt in problem_types
):
x = self.pad + self.width + self.pad
y = self.pad
y += self.line_height + self.pad
+ y += self.line_height + self.pad
glyphset1 = self.glyphsets[master_indices[0]]
glyphset2 = self.glyphsets[master_indices[1]]
@@ -248,19 +483,36 @@ class InterpolatablePlot:
midway_glyphset = LerpGlyphSet(glyphset1, glyphset2)
self.draw_glyph(
- midway_glyphset, glyphname, {"type": "midway"}, None, x=x, y=y
+ midway_glyphset,
+ glyphname,
+ [{"type": "midway"}]
+ + [
+ p
+ for p in problems
+ if p["type"] in ("kink", "underweight", "overweight")
+ ],
+ None,
+ x=x,
+ y=y,
+ scale=min(scales),
)
+
y += self.height + self.pad
- # Draw the fixed mid-way of the two masters
+ if any(
+ pt
+ in (
+ "wrong_start_point",
+ "contour_order",
+ "kink",
+ )
+ for pt in problem_types
+ ):
+ # Draw the proposed fix
self.draw_label("proposed fix", x=x, y=y, color=self.head_color, align=0.5)
y += self.line_height + self.pad
- if problem_type == "wrong_structure":
- self.draw_shrug(x=x, y=y)
- return
-
overriding1 = OverridingDict(glyphset1)
overriding2 = OverridingDict(glyphset2)
perContourPen1 = PerContourOrComponentPen(
@@ -321,6 +573,75 @@ class InterpolatablePlot:
# Replace the wrong contours
wrongContour1.value = segment1.value
wrongContour2.value = segment2.value
+ perContourPen1.value[problem["contour"]] = wrongContour1
+ perContourPen2.value[problem["contour"]] = wrongContour2
+
+ for problem in problems:
+ # If we have a kink, try to fix it.
+ if problem["type"] == "kink":
+ # Save the wrong contours
+ wrongContour1 = perContourPen1.value[problem["contour"]]
+ wrongContour2 = perContourPen2.value[problem["contour"]]
+
+ # Convert the wrong contours to point pens
+ points1 = RecordingPointPen()
+ converter = SegmentToPointPen(points1, False)
+ wrongContour1.replay(converter)
+ points2 = RecordingPointPen()
+ converter = SegmentToPointPen(points2, False)
+ wrongContour2.replay(converter)
+
+ i = problem["value"]
+
+ # Position points to be around the same ratio
+ # beginPath / endPath dance
+ j = i + 1
+ pt0 = points1.value[j][1][0]
+ pt1 = points2.value[j][1][0]
+ j_prev = (i - 1) % (len(points1.value) - 2) + 1
+ pt0_prev = points1.value[j_prev][1][0]
+ pt1_prev = points2.value[j_prev][1][0]
+ j_next = (i + 1) % (len(points1.value) - 2) + 1
+ pt0_next = points1.value[j_next][1][0]
+ pt1_next = points2.value[j_next][1][0]
+
+ pt0 = complex(*pt0)
+ pt1 = complex(*pt1)
+ pt0_prev = complex(*pt0_prev)
+ pt1_prev = complex(*pt1_prev)
+ pt0_next = complex(*pt0_next)
+ pt1_next = complex(*pt1_next)
+
+ # Find the ratio of the distance between the points
+ r0 = abs(pt0 - pt0_prev) / abs(pt0_next - pt0_prev)
+ r1 = abs(pt1 - pt1_prev) / abs(pt1_next - pt1_prev)
+ r_mid = (r0 + r1) / 2
+
+ pt0 = pt0_prev + r_mid * (pt0_next - pt0_prev)
+ pt1 = pt1_prev + r_mid * (pt1_next - pt1_prev)
+
+ points1.value[j] = (
+ points1.value[j][0],
+ (((pt0.real, pt0.imag),) + points1.value[j][1][1:]),
+ points1.value[j][2],
+ )
+ points2.value[j] = (
+ points2.value[j][0],
+ (((pt1.real, pt1.imag),) + points2.value[j][1][1:]),
+ points2.value[j][2],
+ )
+
+ # Convert the point pens back to segment pens
+ segment1 = RecordingPen()
+ converter = PointToSegmentPen(segment1, True)
+ points1.replay(converter)
+ segment2 = RecordingPen()
+ converter = PointToSegmentPen(segment2, True)
+ points2.replay(converter)
+
+ # Replace the wrong contours
+ wrongContour1.value = segment1.value
+ wrongContour2.value = segment2.value
# Assemble
fixed1 = RecordingPen()
@@ -338,13 +659,54 @@ class InterpolatablePlot:
try:
midway_glyphset = LerpGlyphSet(overriding1, overriding2)
self.draw_glyph(
- midway_glyphset, glyphname, {"type": "fixed"}, None, x=x, y=y
+ midway_glyphset,
+ glyphname,
+ {"type": "fixed"},
+ None,
+ x=x,
+ y=y,
+ scale=min(scales),
)
except ValueError:
- self.draw_shrug(x=x, y=y)
+ self.draw_emoticon(self.shrug, x=x, y=y)
y += self.height + self.pad
- def draw_label(self, label, *, x, y, color=(0, 0, 0), align=0, bold=False):
+ else:
+ emoticon = self.shrug
+ if "underweight" in problem_types:
+ emoticon = self.underweight
+ elif "overweight" in problem_types:
+ emoticon = self.overweight
+ elif "nothing" in problem_types:
+ emoticon = self.yay
+ self.draw_emoticon(emoticon, x=x, y=y)
+
+ if show_page_number:
+ self.draw_label(
+ str(self.page_number),
+ x=0,
+ y=self.total_height() - self.line_height,
+ width=self.total_width(),
+ color=self.head_color,
+ align=0.5,
+ )
+
+ def draw_label(
+ self,
+ label,
+ *,
+ x=0,
+ y=0,
+ color=(0, 0, 0),
+ align=0,
+ bold=False,
+ width=None,
+ height=None,
+ ):
+ if width is None:
+ width = self.width
+ if height is None:
+ height = self.height
cr = cairo.Context(self.surface)
cr.select_font_face(
"@cairo:",
@@ -360,23 +722,24 @@ class InterpolatablePlot:
cr.set_source_rgb(*color)
extents = cr.text_extents(label)
- if extents.width > self.width:
+ if extents.width > width:
# Shrink
- font_size *= self.width / extents.width
+ font_size *= width / extents.width
cr.set_font_size(font_size)
font_extents = cr.font_extents()
extents = cr.text_extents(label)
# Center
- label_x = x + (self.width - extents.width) * align
+ label_x = x + (width - extents.width) * align
label_y = y + font_extents[0]
cr.move_to(label_x, label_y)
cr.show_text(label)
- def draw_glyph(self, glyphset, glyphname, problems, which, *, x=0, y=0):
+ def draw_glyph(self, glyphset, glyphname, problems, which, *, x=0, y=0, scale=None):
if type(problems) not in (list, tuple):
problems = [problems]
+ midway = any(problem["type"] == "midway" for problem in problems)
problem_type = problems[0]["type"]
problem_types = set(problem["type"] for problem in problems)
if not all(pt == problem_type for pt in problem_types):
@@ -385,16 +748,23 @@ class InterpolatablePlot:
recording = RecordingPen()
glyph.draw(recording)
+ decomposedRecording = DecomposingRecordingPen(glyphset)
+ glyph.draw(decomposedRecording)
boundsPen = ControlBoundsPen(glyphset)
- recording.replay(boundsPen)
+ decomposedRecording.replay(boundsPen)
+ bounds = boundsPen.bounds
+ if bounds is None:
+ bounds = (0, 0, 0, 0)
- glyph_width = boundsPen.bounds[2] - boundsPen.bounds[0]
- glyph_height = boundsPen.bounds[3] - boundsPen.bounds[1]
+ glyph_width = bounds[2] - bounds[0]
+ glyph_height = bounds[3] - bounds[1]
- scale = None
if glyph_width:
- scale = self.width / glyph_width
+ if scale is None:
+ scale = self.width / glyph_width
+ else:
+ scale = min(scale, self.height / glyph_height)
if glyph_height:
if scale is None:
scale = self.height / glyph_height
@@ -411,59 +781,73 @@ class InterpolatablePlot:
(self.height - glyph_height * scale) / 2,
)
cr.scale(scale, -scale)
- cr.translate(-boundsPen.bounds[0], -boundsPen.bounds[3])
+ cr.translate(-bounds[0], -bounds[3])
if self.border_color:
cr.set_source_rgb(*self.border_color)
- cr.rectangle(
- boundsPen.bounds[0], boundsPen.bounds[1], glyph_width, glyph_height
- )
+ cr.rectangle(bounds[0], bounds[1], glyph_width, glyph_height)
cr.set_line_width(self.border_width / scale)
cr.stroke()
- if self.fill_color and problem_type != "open_path":
+ if self.fill_color or self.stroke_color:
pen = CairoPen(glyphset, cr)
- recording.replay(pen)
- cr.set_source_rgb(*self.fill_color)
- cr.fill()
+ decomposedRecording.replay(pen)
- if self.stroke_color:
- pen = CairoPen(glyphset, cr)
- recording.replay(pen)
- cr.set_source_rgb(*self.stroke_color)
- cr.set_line_width(self.stroke_width / scale)
- cr.stroke()
+ if self.fill_color and problem_type != "open_path":
+ cr.set_source_rgb(*self.fill_color)
+ cr.fill_preserve()
- if problem_type in (
- "nothing",
- "node_count",
- "node_incompatibility",
- "wrong_structure",
+ if self.stroke_color:
+ cr.set_source_rgb(*self.stroke_color)
+ cr.set_line_width(self.stroke_width / scale)
+ cr.stroke_preserve()
+
+ cr.new_path()
+
+ if "underweight" in problem_types or "overweight" in problem_types:
+ perContourPen = PerContourOrComponentPen(RecordingPen, glyphset=glyphset)
+ recording.replay(perContourPen)
+ for problem in problems:
+ if problem["type"] in ("underweight", "overweight"):
+ contour = perContourPen.value[problem["contour"]]
+ contour.replay(CairoPen(glyphset, cr))
+ cr.set_source_rgba(*self.weight_issue_contour_color)
+ cr.fill()
+
+ if any(
+ t in problem_types
+ for t in {
+ "nothing",
+ "node_count",
+ "node_incompatibility",
+ }
):
cr.set_line_cap(cairo.LINE_CAP_ROUND)
# Oncurve nodes
- for segment, args in recording.value:
+ for segment, args in decomposedRecording.value:
if not args:
continue
x, y = args[-1]
cr.move_to(x, y)
cr.line_to(x, y)
- cr.set_source_rgb(*self.oncurve_node_color)
+ cr.set_source_rgba(*self.oncurve_node_color)
cr.set_line_width(self.oncurve_node_diameter / scale)
cr.stroke()
# Offcurve nodes
- for segment, args in recording.value:
+ for segment, args in decomposedRecording.value:
+ if not args:
+ continue
for x, y in args[:-1]:
cr.move_to(x, y)
cr.line_to(x, y)
- cr.set_source_rgb(*self.offcurve_node_color)
+ cr.set_source_rgba(*self.offcurve_node_color)
cr.set_line_width(self.offcurve_node_diameter / scale)
cr.stroke()
# Handles
- for segment, args in recording.value:
+ for segment, args in decomposedRecording.value:
if not args:
pass
elif segment in ("moveTo", "lineTo"):
@@ -481,9 +865,9 @@ class InterpolatablePlot:
cr.new_sub_path()
cr.move_to(*args[-1])
else:
- assert False
+ continue
- cr.set_source_rgb(*self.handle_color)
+ cr.set_source_rgba(*self.handle_color)
cr.set_line_width(self.handle_width / scale)
cr.stroke()
@@ -505,7 +889,7 @@ class InterpolatablePlot:
cr.fill()
for problem in problems:
- if problem["type"] in ("nothing", "wrong_start_point", "wrong_structure"):
+ if problem["type"] in ("nothing", "wrong_start_point"):
idx = problem.get("contour")
# Draw suggested point
@@ -513,67 +897,50 @@ class InterpolatablePlot:
perContourPen = PerContourOrComponentPen(
RecordingPen, glyphset=glyphset
)
- recording.replay(perContourPen)
+ decomposedRecording.replay(perContourPen)
points = SimpleRecordingPointPen()
converter = SegmentToPointPen(points, False)
perContourPen.value[
idx if matching is None else matching[idx]
].replay(converter)
targetPoint = points.value[problem["value_2"]][0]
- cr.move_to(*targetPoint)
- cr.line_to(*targetPoint)
- cr.set_line_cap(cairo.LINE_CAP_ROUND)
- cr.set_source_rgb(*self.other_start_point_color)
- cr.set_line_width(self.start_point_width / scale)
- cr.stroke()
-
- # Draw start point
- cr.set_line_cap(cairo.LINE_CAP_ROUND)
- i = 0
- for segment, args in recording.value:
- if segment == "moveTo":
- if idx is None or i == idx:
- cr.move_to(*args[0])
- cr.line_to(*args[0])
- i += 1
+ cr.save()
+ cr.translate(*targetPoint)
+ cr.scale(1 / scale, 1 / scale)
+ self.draw_dot(
+ cr,
+ diameter=self.corrected_start_point_size,
+ color=self.corrected_start_point_color,
+ )
+ cr.restore()
+ # Draw start-point arrow
if which == 0 or not problem.get("reversed"):
- cr.set_source_rgb(*self.start_point_color)
+ color = self.start_point_color
else:
- cr.set_source_rgb(*self.reversed_start_point_color)
- cr.set_line_width(self.start_point_width / scale)
- cr.stroke()
-
- # Draw arrow
- cr.set_line_cap(cairo.LINE_CAP_SQUARE)
+ color = self.wrong_start_point_color
first_pt = None
i = 0
- for segment, args in recording.value:
+ cr.save()
+ for segment, args in decomposedRecording.value:
if segment == "moveTo":
first_pt = args[0]
continue
if first_pt is None:
continue
- second_pt = args[0]
+ if segment == "closePath":
+ second_pt = first_pt
+ else:
+ second_pt = args[0]
if idx is None or i == idx:
+ cr.save()
first_pt = complex(*first_pt)
second_pt = complex(*second_pt)
length = abs(second_pt - first_pt)
+ cr.translate(first_pt.real, first_pt.imag)
if length:
- # Draw handle
- length *= scale
- second_pt = (
- first_pt
- + (second_pt - first_pt)
- / length
- * self.start_handle_length
- )
- cr.move_to(first_pt.real, first_pt.imag)
- cr.line_to(second_pt.real, second_pt.imag)
# Draw arrowhead
- cr.save()
- cr.translate(second_pt.real, second_pt.imag)
cr.rotate(
math.atan2(
second_pt.imag - first_pt.imag,
@@ -581,64 +948,164 @@ class InterpolatablePlot:
)
)
cr.scale(1 / scale, 1 / scale)
- cr.translate(self.start_handle_width, 0)
- cr.move_to(0, 0)
- cr.line_to(
- -self.start_handle_arrow_length,
- -self.start_handle_arrow_length,
- )
- cr.line_to(
- -self.start_handle_arrow_length,
- self.start_handle_arrow_length,
+ self.draw_arrow(cr, color=color)
+ else:
+ # Draw circle
+ cr.scale(1 / scale, 1 / scale)
+ self.draw_dot(
+ cr,
+ diameter=self.corrected_start_point_size,
+ color=color,
)
- cr.close_path()
- cr.restore()
+ cr.restore()
+
+ if idx is not None:
+ break
first_pt = None
i += 1
- cr.set_line_width(self.start_handle_width / scale)
- cr.stroke()
+ cr.restore()
- def draw_cupcake(self):
- self.set_size(self.width, self.height)
- cupcake = self.cupcake.splitlines()
+ if problem["type"] == "kink":
+ idx = problem.get("contour")
+ perContourPen = PerContourOrComponentPen(
+ RecordingPen, glyphset=glyphset
+ )
+ decomposedRecording.replay(perContourPen)
+ points = SimpleRecordingPointPen()
+ converter = SegmentToPointPen(points, False)
+ perContourPen.value[idx if matching is None else matching[idx]].replay(
+ converter
+ )
+
+ targetPoint = points.value[problem["value"]][0]
+ cr.save()
+ cr.translate(*targetPoint)
+ cr.scale(1 / scale, 1 / scale)
+ if midway:
+ self.draw_circle(
+ cr,
+ diameter=self.kink_circle_size,
+ stroke_width=self.kink_circle_stroke_width,
+ color=self.kink_circle_color,
+ )
+ else:
+ self.draw_dot(
+ cr,
+ diameter=self.kink_point_size,
+ color=self.kink_point_color,
+ )
+ cr.restore()
+
+ return scale
+
+ def draw_dot(self, cr, *, x=0, y=0, color=(0, 0, 0), diameter=10):
+ cr.save()
+ cr.set_line_width(diameter)
+ cr.set_line_cap(cairo.LINE_CAP_ROUND)
+ cr.move_to(x, y)
+ cr.line_to(x, y)
+ if len(color) == 3:
+ color = color + (1,)
+ cr.set_source_rgba(*color)
+ cr.stroke()
+ cr.restore()
+
+ def draw_circle(
+ self, cr, *, x=0, y=0, color=(0, 0, 0), diameter=10, stroke_width=1
+ ):
+ cr.save()
+ cr.set_line_width(stroke_width)
+ cr.set_line_cap(cairo.LINE_CAP_SQUARE)
+ cr.arc(x, y, diameter / 2, 0, 2 * math.pi)
+ if len(color) == 3:
+ color = color + (1,)
+ cr.set_source_rgba(*color)
+ cr.stroke()
+ cr.restore()
+
+ def draw_arrow(self, cr, *, x=0, y=0, color=(0, 0, 0)):
+ cr.save()
+ if len(color) == 3:
+ color = color + (1,)
+ cr.set_source_rgba(*color)
+ cr.translate(self.start_arrow_length + x, y)
+ cr.move_to(0, 0)
+ cr.line_to(
+ -self.start_arrow_length,
+ -self.start_arrow_length * 0.4,
+ )
+ cr.line_to(
+ -self.start_arrow_length,
+ self.start_arrow_length * 0.4,
+ )
+ cr.close_path()
+ cr.fill()
+ cr.restore()
+
+ def draw_text(self, text, *, x=0, y=0, color=(0, 0, 0), width=None, height=None):
+ if width is None:
+ width = self.width
+ if height is None:
+ height = self.height
+
+ text = text.splitlines()
cr = cairo.Context(self.surface)
- cr.set_source_rgb(*self.cupcake_color)
+ cr.set_source_rgb(*color)
cr.set_font_size(self.line_height)
cr.select_font_face(
"@cairo:monospace", cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL
)
- width = 0
- height = 0
- for line in cupcake:
+ text_width = 0
+ text_height = 0
+ font_extents = cr.font_extents()
+ font_line_height = font_extents[2]
+ font_ascent = font_extents[0]
+ for line in text:
extents = cr.text_extents(line)
- width = max(width, extents.width)
- height += extents.height
- if not width:
+ text_width = max(text_width, extents.x_advance)
+ text_height += font_line_height
+ if not text_width:
return
- cr.scale(self.width / width, self.height / height)
- for line in cupcake:
- cr.translate(0, cr.text_extents(line).height)
+ cr.translate(x, y)
+ scale = min(width / text_width, height / text_height)
+ # center
+ cr.translate(
+ (width - text_width * scale) / 2, (height - text_height * scale) / 2
+ )
+ cr.scale(scale, scale)
+
+ cr.translate(0, font_ascent)
+ for line in text:
cr.move_to(0, 0)
cr.show_text(line)
+ cr.translate(0, font_line_height)
- def draw_shrug(self, x=0, y=0):
- cr = cairo.Context(self.surface)
- cr.translate(x, y)
- cr.set_source_rgb(*self.shrug_color)
- cr.set_font_size(self.line_height)
- cr.select_font_face(
- "@cairo:monospace", cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL
+ def draw_cupcake(self):
+ self.set_size(self.total_width(), self.total_height())
+
+ self.draw_label(
+ self.no_issues_label,
+ x=self.pad,
+ y=self.pad,
+ color=self.no_issues_label_color,
+ width=self.total_width() - 2 * self.pad,
+ align=0.5,
+ bold=True,
+ )
+
+ self.draw_text(
+ self.cupcake,
+ x=self.pad,
+ y=self.pad + self.line_height,
+ width=self.total_width() - 2 * self.pad,
+ height=self.total_height() - 2 * self.pad - self.line_height,
+ color=self.cupcake_color,
)
- extents = cr.text_extents(self.shrug)
- if not extents.width:
- return
- cr.translate(0, self.height * 0.6)
- scale = self.width / extents.width
- cr.scale(scale, scale)
- cr.move_to(-extents.x_bearing, 0)
- cr.show_text(self.shrug)
+
+ def draw_emoticon(self, emoticon, x=0, y=0):
+ self.draw_text(emoticon, x=x, y=y, color=self.emoticon_color)
class InterpolatablePostscriptLike(InterpolatablePlot):
@@ -653,12 +1120,9 @@ class InterpolatablePostscriptLike(InterpolatablePlot):
self.surface.set_size(width, height)
def show_page(self):
+ super().show_page()
self.surface.show_page()
- def __enter__(self):
- self.surface = cairo.PSSurface(self.out, self.width, self.height)
- return self
-
class InterpolatablePS(InterpolatablePostscriptLike):
def __enter__(self):
@@ -694,6 +1158,7 @@ class InterpolatableSVG(InterpolatablePlot):
self.surface = cairo.SVGSurface(self.sink, width, height)
def show_page(self):
+ super().show_page()
self.surface.finish()
self.out.append(self.sink.getvalue())
self.surface = None
diff --git a/contrib/python/fonttools/fontTools/varLib/interpolatableTestContourOrder.py b/contrib/python/fonttools/fontTools/varLib/interpolatableTestContourOrder.py
new file mode 100644
index 0000000000..d089e43576
--- /dev/null
+++ b/contrib/python/fonttools/fontTools/varLib/interpolatableTestContourOrder.py
@@ -0,0 +1,74 @@
+from .interpolatableHelpers import *
+
+
+def test_contour_order(glyph0, glyph1):
+ # We try matching both the StatisticsControlPen vector
+ # and the StatisticsPen vector.
+ #
+ # If either method found a identity matching, accept it.
+ # This is crucial for fonts like Kablammo[MORF].ttf and
+ # Nabla[EDPT,EHLT].ttf, since they really confuse the
+ # StatisticsPen vector because of their area=0 contours.
+
+ n = len(glyph0.controlVectors)
+ matching = None
+ matching_cost = 0
+ identity_cost = 0
+ done = n <= 1
+ if not done:
+ m0Control = glyph0.controlVectors
+ m1Control = glyph1.controlVectors
+ (
+ matching_control,
+ matching_cost_control,
+ identity_cost_control,
+ ) = matching_for_vectors(m0Control, m1Control)
+ done = matching_cost_control == identity_cost_control
+ if not done:
+ m0Green = glyph0.greenVectors
+ m1Green = glyph1.greenVectors
+ (
+ matching_green,
+ matching_cost_green,
+ identity_cost_green,
+ ) = matching_for_vectors(m0Green, m1Green)
+ done = matching_cost_green == identity_cost_green
+
+ if not done:
+ # See if reversing contours in one master helps.
+ # That's a common problem. Then the wrong_start_point
+ # test will fix them.
+ #
+ # Reverse the sign of the area (0); the rest stay the same.
+ if not done:
+ m1ControlReversed = [(-m[0],) + m[1:] for m in m1Control]
+ (
+ matching_control_reversed,
+ matching_cost_control_reversed,
+ identity_cost_control_reversed,
+ ) = matching_for_vectors(m0Control, m1ControlReversed)
+ done = matching_cost_control_reversed == identity_cost_control_reversed
+ if not done:
+ m1GreenReversed = [(-m[0],) + m[1:] for m in m1Green]
+ (
+ matching_control_reversed,
+ matching_cost_control_reversed,
+ identity_cost_control_reversed,
+ ) = matching_for_vectors(m0Control, m1ControlReversed)
+ done = matching_cost_control_reversed == identity_cost_control_reversed
+
+ if not done:
+ # Otherwise, use the worst of the two matchings.
+ if (
+ matching_cost_control / identity_cost_control
+ < matching_cost_green / identity_cost_green
+ ):
+ matching = matching_control
+ matching_cost = matching_cost_control
+ identity_cost = identity_cost_control
+ else:
+ matching = matching_green
+ matching_cost = matching_cost_green
+ identity_cost = identity_cost_green
+
+ return matching, matching_cost, identity_cost
diff --git a/contrib/python/fonttools/fontTools/varLib/interpolatableTestStartingPoint.py b/contrib/python/fonttools/fontTools/varLib/interpolatableTestStartingPoint.py
new file mode 100644
index 0000000000..9f742a14f5
--- /dev/null
+++ b/contrib/python/fonttools/fontTools/varLib/interpolatableTestStartingPoint.py
@@ -0,0 +1,105 @@
+from .interpolatableHelpers import *
+
+
+def test_starting_point(glyph0, glyph1, ix, tolerance, matching):
+ if matching is None:
+ matching = list(range(len(glyph0.isomorphisms)))
+ contour0 = glyph0.isomorphisms[ix]
+ contour1 = glyph1.isomorphisms[matching[ix]]
+ m0Vectors = glyph0.greenVectors
+ m1Vectors = [glyph1.greenVectors[i] for i in matching]
+
+ proposed_point = 0
+ reverse = False
+ min_cost = first_cost = 1
+
+ c0 = contour0[0]
+ # Next few lines duplicated below.
+ costs = [vdiff_hypot2_complex(c0[0], c1[0]) for c1 in contour1]
+ min_cost_idx, min_cost = min(enumerate(costs), key=lambda x: x[1])
+ first_cost = costs[0]
+
+ if min_cost < first_cost * tolerance:
+ this_tolerance = min_cost / first_cost
+ # c0 is the first isomorphism of the m0 master
+ # contour1 is list of all isomorphisms of the m1 master
+ #
+ # If the two shapes are both circle-ish and slightly
+ # rotated, we detect wrong start point. This is for
+ # example the case hundreds of times in
+ # RobotoSerif-Italic[GRAD,opsz,wdth,wght].ttf
+ #
+ # If the proposed point is only one off from the first
+ # point (and not reversed), try harder:
+ #
+ # Find the major eigenvector of the covariance matrix,
+ # and rotate the contours by that angle. Then find the
+ # closest point again. If it matches this time, let it
+ # pass.
+
+ proposed_point = contour1[min_cost_idx][1]
+ reverse = contour1[min_cost_idx][2]
+ num_points = len(glyph1.points[ix])
+ leeway = 3
+ if not reverse and (
+ proposed_point <= leeway or proposed_point >= num_points - leeway
+ ):
+ # Try harder
+
+ # Recover the covariance matrix from the GreenVectors.
+ # This is a 2x2 matrix.
+ transforms = []
+ for vector in (m0Vectors[ix], m1Vectors[ix]):
+ meanX = vector[1]
+ meanY = vector[2]
+ stddevX = vector[3] * 0.5
+ stddevY = vector[4] * 0.5
+ correlation = vector[5] / abs(vector[0])
+
+ # https://cookierobotics.com/007/
+ a = stddevX * stddevX # VarianceX
+ c = stddevY * stddevY # VarianceY
+ b = correlation * stddevX * stddevY # Covariance
+
+ delta = (((a - c) * 0.5) ** 2 + b * b) ** 0.5
+ lambda1 = (a + c) * 0.5 + delta # Major eigenvalue
+ lambda2 = (a + c) * 0.5 - delta # Minor eigenvalue
+ theta = atan2(lambda1 - a, b) if b != 0 else (pi * 0.5 if a < c else 0)
+ trans = Transform()
+ # Don't translate here. We are working on the complex-vector
+ # that includes more than just the points. It's horrible what
+ # we are doing anyway...
+ # trans = trans.translate(meanX, meanY)
+ trans = trans.rotate(theta)
+ trans = trans.scale(sqrt(lambda1), sqrt(lambda2))
+ transforms.append(trans)
+
+ trans = transforms[0]
+ new_c0 = (
+ [complex(*trans.transformPoint((pt.real, pt.imag))) for pt in c0[0]],
+ ) + c0[1:]
+ trans = transforms[1]
+ new_contour1 = []
+ for c1 in contour1:
+ new_c1 = (
+ [
+ complex(*trans.transformPoint((pt.real, pt.imag)))
+ for pt in c1[0]
+ ],
+ ) + c1[1:]
+ new_contour1.append(new_c1)
+
+ # Next few lines duplicate from above.
+ costs = [
+ vdiff_hypot2_complex(new_c0[0], new_c1[0]) for new_c1 in new_contour1
+ ]
+ min_cost_idx, min_cost = min(enumerate(costs), key=lambda x: x[1])
+ first_cost = costs[0]
+ if min_cost < first_cost * tolerance:
+ # Don't report this
+ # min_cost = first_cost
+ # reverse = False
+ # proposed_point = 0 # new_contour1[min_cost_idx][1]
+ pass
+
+ return proposed_point, reverse, min_cost, first_cost
diff --git a/contrib/python/fonttools/ya.make b/contrib/python/fonttools/ya.make
index 2e2c1c626e..c2de0cc284 100644
--- a/contrib/python/fonttools/ya.make
+++ b/contrib/python/fonttools/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(4.45.1)
+VERSION(4.46.0)
LICENSE(MIT)
@@ -299,7 +299,10 @@ PY_SRCS(
fontTools/varLib/instancer/names.py
fontTools/varLib/instancer/solver.py
fontTools/varLib/interpolatable.py
+ fontTools/varLib/interpolatableHelpers.py
fontTools/varLib/interpolatablePlot.py
+ fontTools/varLib/interpolatableTestContourOrder.py
+ fontTools/varLib/interpolatableTestStartingPoint.py
fontTools/varLib/interpolate_layout.py
fontTools/varLib/iup.py
fontTools/varLib/merger.py