summaryrefslogtreecommitdiffstats
path: root/contrib/python/fonttools/fontTools/varLib/interpolatable.py
diff options
context:
space:
mode:
authorrobot-contrib <[email protected]>2023-12-05 15:07:45 +0300
committerrobot-contrib <[email protected]>2023-12-05 16:18:28 +0300
commit1a85d20071d4b1e481452e8d0b81f2e3e888803c (patch)
treedea60d2096da94e025ed6aefcd672715db9bffea /contrib/python/fonttools/fontTools/varLib/interpolatable.py
parentd7830d26621b08d508ba92fccf9e4971cc8ded61 (diff)
Update contrib/python/fonttools to 4.45.0
Diffstat (limited to 'contrib/python/fonttools/fontTools/varLib/interpolatable.py')
-rw-r--r--contrib/python/fonttools/fontTools/varLib/interpolatable.py682
1 files changed, 482 insertions, 200 deletions
diff --git a/contrib/python/fonttools/fontTools/varLib/interpolatable.py b/contrib/python/fonttools/fontTools/varLib/interpolatable.py
index 05ed3f768eb..74dd15b9689 100644
--- a/contrib/python/fonttools/fontTools/varLib/interpolatable.py
+++ b/contrib/python/fonttools/fontTools/varLib/interpolatable.py
@@ -9,13 +9,15 @@ $ fonttools varLib.interpolatable font1 font2 ...
from fontTools.pens.basePen import AbstractPen, BasePen
from fontTools.pens.pointPen import AbstractPointPen, SegmentToPointPen
from fontTools.pens.recordingPen import RecordingPen
-from fontTools.pens.statisticsPen import StatisticsPen
+from fontTools.pens.statisticsPen import StatisticsPen, StatisticsControlPen
from fontTools.pens.momentsPen import OpenContourError
-from fontTools.varLib.models import piecewiseLinearMap
-from collections import defaultdict
-import math
+from fontTools.varLib.models import piecewiseLinearMap, normalizeLocation
+from fontTools.misc.fixedTools import floatToFixedToStr
+from collections import defaultdict, deque
+from functools import wraps
+from pprint import pformat
+from math import sqrt, copysign
import itertools
-import sys
import logging
log = logging.getLogger("fontTools.varLib.interpolatable")
@@ -67,7 +69,7 @@ class PerContourOrComponentPen(PerContourPen):
self.value[-1].addComponent(glyphName, transformation)
-class RecordingPointPen(AbstractPointPen):
+class SimpleRecordingPointPen(AbstractPointPen):
def __init__(self):
self.value = []
@@ -150,133 +152,266 @@ except ImportError:
)
-def test_gen(glyphsets, glyphs=None, names=None, ignore_missing=False):
+def _contour_vector_from_stats(stats):
+ size = sqrt(abs(stats.area))
+ return (
+ copysign((size), stats.area),
+ stats.meanX,
+ stats.meanY,
+ stats.stddevX * 2,
+ stats.stddevY * 2,
+ stats.correlation * size,
+ )
+
+
+def _points_characteristic_bits(points):
+ bits = 0
+ for pt, b in reversed(points):
+ bits = (bits << 1) | b
+ return bits
+
+
+def _points_complex_vector(points):
+ vector = []
+ points = [complex(*pt) for pt, _ in points]
+ n = len(points)
+ points.extend(points[:2])
+ for i in range(n):
+ p0 = points[i]
+
+ # The point itself
+ vector.append(p0)
+
+ # The distance to the next point;
+ # Emphasized by 2 empirically
+ p1 = points[i + 1]
+ d0 = p1 - p0
+ vector.append(d0 * 2)
+
+ """
+ # The angle to the next point, as a cross product;
+ # Square root of, to match dimentionality of distance.
+ p2 = points[i + 2]
+ d1 = p2 - p1
+ cross = d0.real * d1.imag - d0.imag * d1.real
+ cross = copysign(sqrt(abs(cross)), cross)
+ vector.append(cross)
+ """
+
+ return vector
+
+
+def _add_isomorphisms(points, isomorphisms, reverse):
+ reference_bits = _points_characteristic_bits(points)
+ n = len(points)
+
+ # if points[0][0] == points[-1][0]:
+ # abort
+
+ if reverse:
+ points = points[::-1]
+ bits = _points_characteristic_bits(points)
+ else:
+ bits = reference_bits
+
+ vector = _points_complex_vector(points)
+
+ assert len(vector) % n == 0
+ mult = len(vector) // n
+ mask = (1 << n) - 1
+
+ for i in range(n):
+ b = ((bits << (n - i)) & mask) | (bits >> i)
+ if b == reference_bits:
+ isomorphisms.append(
+ (_rot_list(vector, -i * mult), n - 1 - i if reverse else i, reverse)
+ )
+
+
+def _find_parents_and_order(glyphsets, locations):
+ parents = [None] + list(range(len(glyphsets) - 1))
+ order = list(range(len(glyphsets)))
+ if locations:
+ # Order base master first
+ bases = (i for i, l in enumerate(locations) if all(v == 0 for v in l.values()))
+ if bases:
+ base = next(bases)
+ logging.info("Base master index %s, location %s", base, locations[base])
+ else:
+ base = 0
+ logging.warning("No base master location found")
+
+ # Form a minimum spanning tree of the locations
+ try:
+ from scipy.sparse.csgraph import minimum_spanning_tree
+
+ graph = [[0] * len(locations) for _ in range(len(locations))]
+ axes = set()
+ for l in locations:
+ axes.update(l.keys())
+ axes = sorted(axes)
+ vectors = [tuple(l.get(k, 0) for k in axes) for l in locations]
+ for i, j in itertools.combinations(range(len(locations)), 2):
+ graph[i][j] = _vdiff_hypot2(vectors[i], vectors[j])
+
+ tree = minimum_spanning_tree(graph)
+ rows, cols = tree.nonzero()
+ graph = defaultdict(set)
+ for row, col in zip(rows, cols):
+ graph[row].add(col)
+ graph[col].add(row)
+
+ # Traverse graph from the base and assign parents
+ parents = [None] * len(locations)
+ order = []
+ visited = set()
+ queue = deque([base])
+ while queue:
+ i = queue.popleft()
+ visited.add(i)
+ order.append(i)
+ for j in sorted(graph[i]):
+ if j not in visited:
+ parents[j] = i
+ queue.append(j)
+
+ except ImportError:
+ pass
+
+ log.info("Parents: %s", parents)
+ log.info("Order: %s", order)
+ return parents, order
+
+
+def test_gen(
+ glyphsets,
+ glyphs=None,
+ names=None,
+ ignore_missing=False,
+ *,
+ locations=None,
+ tolerance=0.95,
+):
if names is None:
names = glyphsets
+
if glyphs is None:
# `glyphs = glyphsets[0].keys()` is faster, certainly, but doesn't allow for sparse TTFs/OTFs given out of order
# ... risks the sparse master being the first one, and only processing a subset of the glyphs
glyphs = {g for glyphset in glyphsets for g in glyphset.keys()}
- hist = []
+ parents, order = _find_parents_and_order(glyphsets, locations)
+
+ def grand_parent(i, glyphname):
+ if i is None:
+ return None
+ i = parents[i]
+ if i is None:
+ return None
+ while parents[i] is not None and glyphsets[i][glyphname] is None:
+ i = parents[i]
+ return i
for glyph_name in glyphs:
- try:
- m0idx = 0
- allVectors = []
- allNodeTypes = []
- allContourIsomorphisms = []
- allGlyphs = [glyphset[glyph_name] for glyphset in glyphsets]
- if len([1 for glyph in allGlyphs if glyph is not None]) <= 1:
+ log.info("Testing glyph %s", glyph_name)
+ allGreenVectors = []
+ allControlVectors = []
+ allNodeTypes = []
+ allContourIsomorphisms = []
+ allGlyphs = [glyphset[glyph_name] for glyphset in glyphsets]
+ if len([1 for glyph in allGlyphs if glyph is not None]) <= 1:
+ continue
+ for glyph, glyphset, name in zip(allGlyphs, glyphsets, names):
+ if glyph is None:
+ if not ignore_missing:
+ yield (glyph_name, {"type": "missing", "master": name})
+ allNodeTypes.append(None)
+ allControlVectors.append(None)
+ allGreenVectors.append(None)
+ allContourIsomorphisms.append(None)
continue
- for glyph, glyphset, name in zip(allGlyphs, glyphsets, names):
- if glyph is None:
- if not ignore_missing:
- yield (glyph_name, {"type": "missing", "master": name})
- allNodeTypes.append(None)
- allVectors.append(None)
- allContourIsomorphisms.append(None)
- continue
- perContourPen = PerContourOrComponentPen(
- RecordingPen, glyphset=glyphset
- )
+ perContourPen = PerContourOrComponentPen(RecordingPen, glyphset=glyphset)
+ try:
+ glyph.draw(perContourPen, outputImpliedClosingLine=True)
+ except TypeError:
+ glyph.draw(perContourPen)
+ contourPens = perContourPen.value
+ del perContourPen
+
+ contourControlVectors = []
+ contourGreenVectors = []
+ contourIsomorphisms = []
+ nodeTypes = []
+ allNodeTypes.append(nodeTypes)
+ allControlVectors.append(contourControlVectors)
+ allGreenVectors.append(contourGreenVectors)
+ allContourIsomorphisms.append(contourIsomorphisms)
+ for ix, contour in enumerate(contourPens):
+ contourOps = tuple(op for op, arg in contour.value)
+ nodeTypes.append(contourOps)
+
+ greenStats = StatisticsPen(glyphset=glyphset)
+ controlStats = StatisticsControlPen(glyphset=glyphset)
try:
- glyph.draw(perContourPen, outputImpliedClosingLine=True)
- except TypeError:
- glyph.draw(perContourPen)
- contourPens = perContourPen.value
- del perContourPen
-
- contourVectors = []
- contourIsomorphisms = []
- nodeTypes = []
- allNodeTypes.append(nodeTypes)
- allVectors.append(contourVectors)
- allContourIsomorphisms.append(contourIsomorphisms)
- for ix, contour in enumerate(contourPens):
- nodeVecs = tuple(instruction[0] for instruction in contour.value)
- nodeTypes.append(nodeVecs)
-
- stats = StatisticsPen(glyphset=glyphset)
- try:
- contour.replay(stats)
- except OpenContourError as e:
- yield (
- glyph_name,
- {"master": name, "contour": ix, "type": "open_path"},
- )
- continue
- size = math.sqrt(abs(stats.area)) * 0.5
- vector = (
- int(size),
- int(stats.meanX),
- int(stats.meanY),
- int(stats.stddevX * 2),
- int(stats.stddevY * 2),
- int(stats.correlation * size),
- )
- contourVectors.append(vector)
- # print(vector)
-
- # Check starting point
- if nodeVecs[0] == "addComponent":
- continue
- assert nodeVecs[0] == "moveTo"
- assert nodeVecs[-1] in ("closePath", "endPath")
- points = RecordingPointPen()
- converter = SegmentToPointPen(points, False)
- contour.replay(converter)
- # points.value is a list of pt,bool where bool is true if on-curve and false if off-curve;
- # now check all rotations and mirror-rotations of the contour and build list of isomorphic
- # possible starting points.
- bits = 0
- for pt, b in points.value:
- bits = (bits << 1) | b
- n = len(points.value)
- mask = (1 << n) - 1
- isomorphisms = []
- contourIsomorphisms.append(isomorphisms)
- complexPoints = [complex(*pt) for pt, bl in points.value]
- for i in range(n):
- b = ((bits << i) & mask) | ((bits >> (n - i)))
- if b == bits:
- isomorphisms.append(_rot_list(complexPoints, i))
- # Add mirrored rotations
- mirrored = list(reversed(points.value))
- reversed_bits = 0
- for pt, b in mirrored:
- reversed_bits = (reversed_bits << 1) | b
- complexPoints = list(reversed(complexPoints))
- for i in range(n):
- b = ((reversed_bits << i) & mask) | ((reversed_bits >> (n - i)))
- if b == bits:
- isomorphisms.append(_rot_list(complexPoints, i))
-
- # m0idx should be the index of the first non-None item in allNodeTypes,
- # else give it the last item.
- m0idx = next(
- (i for i, x in enumerate(allNodeTypes) if x is not None),
- len(allNodeTypes) - 1,
- )
- # m0 is the first non-None item in allNodeTypes, or last one if all None
- m0 = allNodeTypes[m0idx]
- for i, m1 in enumerate(allNodeTypes[m0idx + 1 :]):
- if m1 is None:
- continue
- if len(m0) != len(m1):
+ contour.replay(greenStats)
+ contour.replay(controlStats)
+ except OpenContourError as e:
yield (
glyph_name,
- {
- "type": "path_count",
- "master_1": names[m0idx],
- "master_2": names[m0idx + i + 1],
- "value_1": len(m0),
- "value_2": len(m1),
- },
+ {"master": name, "contour": ix, "type": "open_path"},
)
- if m0 == m1:
continue
+ contourGreenVectors.append(_contour_vector_from_stats(greenStats))
+ contourControlVectors.append(_contour_vector_from_stats(controlStats))
+
+ # Check starting point
+ if contourOps[0] == "addComponent":
+ continue
+ assert contourOps[0] == "moveTo"
+ assert contourOps[-1] in ("closePath", "endPath")
+ points = SimpleRecordingPointPen()
+ converter = SegmentToPointPen(points, False)
+ contour.replay(converter)
+ # points.value is a list of pt,bool where bool is true if on-curve and false if off-curve;
+ # now check all rotations and mirror-rotations of the contour and build list of isomorphic
+ # possible starting points.
+
+ isomorphisms = []
+ contourIsomorphisms.append(isomorphisms)
+
+ # Add rotations
+ _add_isomorphisms(points.value, isomorphisms, False)
+ # Add mirrored rotations
+ _add_isomorphisms(points.value, isomorphisms, True)
+
+ matchings = [None] * len(allControlVectors)
+
+ for m1idx in order:
+ if allNodeTypes[m1idx] is None:
+ continue
+ m0idx = grand_parent(m1idx, glyph_name)
+ if m0idx is None:
+ continue
+ if allNodeTypes[m0idx] is None:
+ continue
+
+ m1 = allNodeTypes[m1idx]
+ m0 = allNodeTypes[m0idx]
+ if len(m0) != len(m1):
+ yield (
+ glyph_name,
+ {
+ "type": "path_count",
+ "master_1": names[m0idx],
+ "master_2": names[m1idx],
+ "value_1": len(m0),
+ "value_2": len(m1),
+ },
+ )
+ continue
+
+ if m0 != m1:
for pathIx, (nodes1, nodes2) in enumerate(zip(m0, m1)):
if nodes1 == nodes2:
continue
@@ -287,7 +422,7 @@ def test_gen(glyphsets, glyphs=None, names=None, ignore_missing=False):
"type": "node_count",
"path": pathIx,
"master_1": names[m0idx],
- "master_2": names[m0idx + i + 1],
+ "master_2": names[m1idx],
"value_1": len(nodes1),
"value_2": len(nodes2),
},
@@ -302,89 +437,126 @@ def test_gen(glyphsets, glyphs=None, names=None, ignore_missing=False):
"path": pathIx,
"node": nodeIx,
"master_1": names[m0idx],
- "master_2": names[m0idx + i + 1],
+ "master_2": names[m1idx],
"value_1": n1,
"value_2": n2,
},
)
continue
- # m0idx should be the index of the first non-None item in allVectors,
- # else give it the last item.
- m0idx = next(
- (i for i, x in enumerate(allVectors) if x is not None),
- len(allVectors) - 1,
- )
- # m0 is the first non-None item in allVectors, or last one if all None
- m0 = allVectors[m0idx]
- if m0 is not None and len(m0) > 1:
- for i, m1 in enumerate(allVectors[m0idx + 1 :]):
- if m1 is None:
- continue
- if len(m0) != len(m1):
- # We already reported this
- continue
- costs = [[_vdiff_hypot2(v0, v1) for v1 in m1] for v0 in m0]
- matching, matching_cost = min_cost_perfect_bipartite_matching(costs)
- identity_matching = list(range(len(m0)))
- identity_cost = sum(costs[i][i] for i in range(len(m0)))
+ m1Control = allControlVectors[m1idx]
+ m1Green = allGreenVectors[m1idx]
+ m0Control = allControlVectors[m0idx]
+ m0Green = allGreenVectors[m0idx]
+ if len(m1Control) > 1:
+ identity_matching = list(range(len(m0Control)))
+
+ # We try matching both the StatisticsControlPen vector
+ # and the StatisticsPen vector.
+ # If either method found a identity matching, accept it.
+ # This is crucial for fonts like Kablammo[MORF].ttf and
+ # Nabla[EDPT,EHLT].ttf, since they really confuse the
+ # StatisticsPen vector because of their area=0 contours.
+ #
+ # TODO: Optimize by only computing the StatisticsPen vector
+ # and then checking if it is the identity vector. Only if
+ # not, compute the StatisticsControlPen vector and check both.
+
+ costsControl = [
+ [_vdiff_hypot2(v0, v1) for v1 in m1Control] for v0 in m0Control
+ ]
+ (
+ matching_control,
+ matching_cost_control,
+ ) = min_cost_perfect_bipartite_matching(costsControl)
+ identity_cost_control = sum(
+ costsControl[i][i] for i in range(len(m0Control))
+ )
+ done = matching_cost_control == identity_cost_control
+
+ if not done:
+ costsGreen = [
+ [_vdiff_hypot2(v0, v1) for v1 in m1Green] for v0 in m0Green
+ ]
+ (
+ matching_green,
+ matching_cost_green,
+ ) = min_cost_perfect_bipartite_matching(costsGreen)
+ identity_cost_green = sum(
+ costsGreen[i][i] for i in range(len(m0Control))
+ )
+ done = matching_cost_green == identity_cost_green
+
+ if not done:
+ # Otherwise, use the worst of the two matchings.
if (
- matching != identity_matching
- and matching_cost < identity_cost * 0.95
+ matching_cost_control / identity_cost_control
+ < matching_cost_green / identity_cost_green
):
+ matching = matching_control
+ matching_cost = matching_cost_control
+ identity_cost = identity_cost_control
+ else:
+ matching = matching_green
+ matching_cost = matching_cost_green
+ identity_cost = identity_cost_green
+
+ if matching_cost < identity_cost * tolerance:
+ # print(matching_cost_control / identity_cost_control, matching_cost_green / identity_cost_green)
+
yield (
glyph_name,
{
"type": "contour_order",
"master_1": names[m0idx],
- "master_2": names[m0idx + i + 1],
- "value_1": list(range(len(m0))),
+ "master_2": names[m1idx],
+ "value_1": list(range(len(m0Control))),
"value_2": matching,
},
)
- break
+ matchings[m1idx] = matching
- # m0idx should be the index of the first non-None item in allContourIsomorphisms,
- # else give it the last item.
- m0idx = next(
- (i for i, x in enumerate(allContourIsomorphisms) if x is not None),
- len(allVectors) - 1,
- )
- # m0 is the first non-None item in allContourIsomorphisms, or last one if all None
+ m1 = allContourIsomorphisms[m1idx]
m0 = allContourIsomorphisms[m0idx]
- if m0:
- for i, m1 in enumerate(allContourIsomorphisms[m0idx + 1 :]):
- if m1 is None:
- continue
- if len(m0) != len(m1):
- # We already reported this
+
+ for ix, (contour0, contour1) in enumerate(zip(m0, m1)):
+ if len(contour0) == 0 or len(contour0) != len(contour1):
+ # We already reported this; or nothing to do
+ continue
+
+ c0 = contour0[0]
+ costs = [_vdiff_hypot2_complex(c0[0], c1[0]) for c1 in contour1]
+ min_cost_idx, min_cost = min(enumerate(costs), key=lambda x: x[1])
+ first_cost = costs[0]
+ if min_cost < first_cost * tolerance:
+ reverse = contour1[min_cost_idx][2]
+
+ # If contour-order is wrong, don't report a reversing
+ if (
+ reverse
+ and matchings[m1idx] is not None
+ and matchings[m1idx][ix] != ix
+ ):
continue
- for ix, (contour0, contour1) in enumerate(zip(m0, m1)):
- c0 = contour0[0]
- costs = [_vdiff_hypot2_complex(c0, c1) for c1 in contour1]
- min_cost = min(costs)
- first_cost = costs[0]
- if min_cost < first_cost * 0.95:
- yield (
- glyph_name,
- {
- "type": "wrong_start_point",
- "contour": ix,
- "master_1": names[m0idx],
- "master_2": names[m0idx + i + 1],
- },
- )
- except ValueError as e:
- yield (
- glyph_name,
- {"type": "math_error", "master": name, "error": e},
- )
+ yield (
+ glyph_name,
+ {
+ "type": "wrong_start_point",
+ "contour": ix,
+ "master_1": names[m0idx],
+ "master_2": names[m1idx],
+ "value_1": 0,
+ "value_2": contour1[min_cost_idx][1],
+ "reversed": reverse,
+ },
+ )
-def test(glyphsets, glyphs=None, names=None, ignore_missing=False):
+@wraps(test_gen)
+def test(*args, **kwargs):
problems = defaultdict(list)
- for glyphname, problem in test_gen(glyphsets, glyphs, names, ignore_missing):
+ for glyphname, problem in test_gen(*args, **kwargs):
problems[glyphname].append(problem)
return problems
@@ -401,6 +573,7 @@ def recursivelyAddGlyph(glyphname, glyphset, ttGlyphSet, glyf):
def main(args=None):
"""Test for interpolatability issues between fonts"""
import argparse
+ import sys
parser = argparse.ArgumentParser(
"fonttools varLib.interpolatable",
@@ -412,16 +585,37 @@ def main(args=None):
help="Space-separate name of glyphs to check",
)
parser.add_argument(
+ "--tolerance",
+ action="store",
+ type=float,
+ help="Error tolerance. Default 0.95",
+ )
+ parser.add_argument(
"--json",
action="store_true",
help="Output report in JSON format",
)
parser.add_argument(
+ "--pdf",
+ action="store",
+ help="Output report in PDF format",
+ )
+ parser.add_argument(
+ "--html",
+ action="store",
+ help="Output report in HTML format",
+ )
+ parser.add_argument(
"--quiet",
action="store_true",
help="Only exit with code 1 or 0, no output",
)
parser.add_argument(
+ "--output",
+ action="store",
+ help="Output file for the problem report; Default: stdout",
+ )
+ parser.add_argument(
"--ignore-missing",
action="store_true",
help="Will not report glyphs missing from sparse masters as errors",
@@ -447,21 +641,42 @@ def main(args=None):
fonts = []
names = []
+ locations = []
if len(args.inputs) == 1:
+ designspace = None
if args.inputs[0].endswith(".designspace"):
from fontTools.designspaceLib import DesignSpaceDocument
designspace = DesignSpaceDocument.fromfile(args.inputs[0])
args.inputs = [master.path for master in designspace.sources]
+ locations = [master.location for master in designspace.sources]
+ axis_triples = {
+ a.name: (a.minimum, a.default, a.maximum) for a in designspace.axes
+ }
+ axis_mappings = {a.name: a.map for a in designspace.axes}
+ axis_triples = {
+ k: tuple(piecewiseLinearMap(v, dict(axis_mappings[k])) for v in vv)
+ for k, vv in axis_triples.items()
+ }
elif args.inputs[0].endswith(".glyphs"):
- from glyphsLib import GSFont, to_ufos
+ from glyphsLib import GSFont, to_designspace
gsfont = GSFont(args.inputs[0])
- fonts.extend(to_ufos(gsfont))
+ designspace = to_designspace(gsfont)
+ fonts = [source.font for source in designspace.sources]
names = ["%s-%s" % (f.info.familyName, f.info.styleName) for f in fonts]
args.inputs = []
+ locations = [master.location for master in designspace.sources]
+ axis_triples = {
+ a.name: (a.minimum, a.default, a.maximum) for a in designspace.axes
+ }
+ axis_mappings = {a.name: a.map for a in designspace.axes}
+ axis_triples = {
+ k: tuple(piecewiseLinearMap(v, dict(axis_mappings[k])) for v in vv)
+ for k, vv in axis_triples.items()
+ }
elif args.inputs[0].endswith(".ttf"):
from fontTools.ttLib import TTFont
@@ -515,20 +730,32 @@ def main(args=None):
names = ["''"]
fonts = [font.getGlyphSet()]
+ locations = [{}]
+ axis_triples = {a: (-1, 0, +1) for a in sorted(axisMapping.keys())}
for locTuple in sorted(glyphsets.keys(), key=lambda v: (len(v), v)):
name = (
"'"
+ " ".join(
- "%s=%s" % (k, piecewiseLinearMap(v, axisMapping[k]))
+ "%s=%s"
+ % (
+ k,
+ floatToFixedToStr(
+ piecewiseLinearMap(v, axisMapping[k]), 14
+ ),
+ )
for k, v in locTuple
)
+ "'"
)
names.append(name)
fonts.append(glyphsets[locTuple])
+ locations.append(dict(locTuple))
args.ignore_missing = True
args.inputs = []
+ if not locations:
+ locations = [{} for _ in fonts]
+
for filename in args.inputs:
if filename.endswith(".ufo"):
from fontTools.ufoLib import UFOReader
@@ -549,6 +776,9 @@ def main(args=None):
glyphset = font
glyphsets.append({k: glyphset[k] for k in glyphset.keys()})
+ if len(glyphsets) == 1:
+ return None
+
if not glyphs:
glyphs = sorted(set([gn for glyphset in glyphsets for gn in glyphset.keys()]))
@@ -560,12 +790,23 @@ def main(args=None):
for gn in diff:
glyphset[gn] = None
+ # Normalize locations
+ locations = [normalizeLocation(loc, axis_triples) for loc in locations]
+
log.info("Running on %d glyphsets", len(glyphsets))
+ log.info("Locations: %s", pformat(locations))
problems_gen = test_gen(
- glyphsets, glyphs=glyphs, names=names, ignore_missing=args.ignore_missing
+ glyphsets,
+ glyphs=glyphs,
+ names=names,
+ locations=locations,
+ ignore_missing=args.ignore_missing,
+ tolerance=args.tolerance or 0.95,
)
problems = defaultdict(list)
+ f = sys.stdout if args.output is None else open(args.output, "w")
+
if not args.quiet:
if args.json:
import json
@@ -573,24 +814,35 @@ def main(args=None):
for glyphname, problem in problems_gen:
problems[glyphname].append(problem)
- print(json.dumps(problems))
+ print(json.dumps(problems), file=f)
else:
last_glyphname = None
for glyphname, p in problems_gen:
problems[glyphname].append(p)
if glyphname != last_glyphname:
- print(f"Glyph {glyphname} was not compatible: ")
+ print(f"Glyph {glyphname} was not compatible:", file=f)
last_glyphname = glyphname
+ last_masters = None
+
+ masters = (
+ (p["master"]) if "master" in p else (p["master_1"], p["master_2"])
+ )
+ if masters != last_masters:
+ print(f" Masters: %s:" % ", ".join(masters), file=f)
+ last_masters = masters
if p["type"] == "missing":
- print(" Glyph was missing in master %s" % p["master"])
+ print(" Glyph was missing in master %s" % p["master"], file=f)
if p["type"] == "open_path":
- print(" Glyph has an open path in master %s" % p["master"])
+ print(
+ " Glyph has an open path in master %s" % p["master"], file=f
+ )
if p["type"] == "path_count":
print(
" Path count differs: %i in %s, %i in %s"
- % (p["value_1"], p["master_1"], p["value_2"], p["master_2"])
+ % (p["value_1"], p["master_1"], p["value_2"], p["master_2"]),
+ file=f,
)
if p["type"] == "node_count":
print(
@@ -601,7 +853,8 @@ def main(args=None):
p["master_1"],
p["value_2"],
p["master_2"],
- )
+ ),
+ file=f,
)
if p["type"] == "node_incompatibility":
print(
@@ -613,7 +866,8 @@ def main(args=None):
p["master_1"],
p["value_2"],
p["master_2"],
- )
+ ),
+ file=f,
)
if p["type"] == "contour_order":
print(
@@ -623,29 +877,57 @@ def main(args=None):
p["master_1"],
p["value_2"],
p["master_2"],
- )
+ ),
+ file=f,
)
if p["type"] == "wrong_start_point":
print(
- " Contour %d start point differs: %s, %s"
+ " Contour %d start point differs: %s in %s, %s in %s; reversed: %s"
% (
p["contour"],
+ p["value_1"],
p["master_1"],
+ p["value_2"],
p["master_2"],
- )
- )
- if p["type"] == "math_error":
- print(
- " Miscellaneous error in %s: %s"
- % (
- p["master"],
- p["error"],
- )
+ p["reversed"],
+ ),
+ file=f,
)
else:
for glyphname, problem in problems_gen:
problems[glyphname].append(problem)
+ if args.pdf:
+ log.info("Writing PDF to %s", args.pdf)
+ from .interpolatablePlot import InterpolatablePDF
+
+ with InterpolatablePDF(args.pdf, glyphsets=glyphsets, names=names) as pdf:
+ pdf.add_problems(problems)
+ if not problems and not args.quiet:
+ pdf.draw_cupcake()
+
+ if args.html:
+ log.info("Writing HTML to %s", args.html)
+ from .interpolatablePlot import InterpolatableSVG
+
+ svgs = []
+ with InterpolatableSVG(svgs, glyphsets=glyphsets, names=names) as svg:
+ svg.add_problems(problems)
+ if not problems and not args.quiet:
+ svg.draw_cupcake()
+
+ import base64
+
+ with open(args.html, "wb") as f:
+ f.write(b"<!DOCTYPE html>\n")
+ f.write(b"<html><body align=center>\n")
+ for svg in svgs:
+ f.write("<img src='data:image/svg+xml;base64,".encode("utf-8"))
+ f.write(base64.b64encode(svg))
+ f.write(b"' />\n")
+ f.write(b"<hr>\n")
+ f.write(b"</body></html>\n")
+
if problems:
return problems