aesthetic cleanup
parent
b1dfc9e8f7
commit
7ebc56a831
|
@ -20,7 +20,8 @@ Example:
|
||||||
>> path = result.path
|
>> path = result.path
|
||||||
>> # Do something with the transformed Path object.
|
>> # Do something with the transformed Path object.
|
||||||
>> element = result.element
|
>> element = result.element
|
||||||
>> # Inspect the raw SVG element. This gives access to the path's attributes
|
>> # Inspect the raw SVG element. This gives access to the
|
||||||
|
>> # path's attributes
|
||||||
>> transform = result.transform
|
>> transform = result.transform
|
||||||
>> # Use the transform that was applied to the path.
|
>> # Use the transform that was applied to the path.
|
||||||
>> foo(doc.tree) # do stuff using ElementTree's functionality
|
>> foo(doc.tree) # do stuff using ElementTree's functionality
|
||||||
|
@ -38,14 +39,14 @@ from __future__ import division, absolute_import, print_function
|
||||||
import os
|
import os
|
||||||
import collections
|
import collections
|
||||||
import xml.etree.ElementTree as etree
|
import xml.etree.ElementTree as etree
|
||||||
from xml.etree.ElementTree import Element, SubElement, register_namespace, _namespace_map
|
from xml.etree.ElementTree import Element, SubElement, register_namespace
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
# Internal dependencies
|
# Internal dependencies
|
||||||
from .parser import parse_path
|
from .parser import parse_path
|
||||||
from .parser import parse_transform
|
from .parser import parse_transform
|
||||||
from .svg_to_paths import (path2pathd, ellipse2pathd, line2pathd, polyline2pathd,
|
from .svg_to_paths import (path2pathd, ellipse2pathd, line2pathd,
|
||||||
polygon2pathd, rect2pathd)
|
polyline2pathd, polygon2pathd, rect2pathd)
|
||||||
from .misctools import open_in_browser
|
from .misctools import open_in_browser
|
||||||
from .path import *
|
from .path import *
|
||||||
|
|
||||||
|
@ -73,58 +74,71 @@ CONVERT_ONLY_PATHS = {'path': path2pathd}
|
||||||
SVG_GROUP_TAG = 'svg:g'
|
SVG_GROUP_TAG = 'svg:g'
|
||||||
|
|
||||||
|
|
||||||
def flatten_all_paths(
|
def flatten_all_paths(group, group_filter=lambda x: True,
|
||||||
group,
|
path_filter=lambda x: True, path_conversions=CONVERSIONS,
|
||||||
group_filter=lambda x: True,
|
group_search_xpath=SVG_GROUP_TAG):
|
||||||
path_filter=lambda x: True,
|
"""Returns the paths inside a group (recursively), expressing the
|
||||||
path_conversions=CONVERSIONS,
|
paths in the base coordinates.
|
||||||
group_search_xpath=SVG_GROUP_TAG):
|
|
||||||
"""Returns the paths inside a group (recursively), expressing the paths in the base coordinates.
|
|
||||||
|
|
||||||
Note that if the group being passed in is nested inside some parent group(s), we cannot take the parent group(s)
|
Note that if the group being passed in is nested inside some parent
|
||||||
into account, because xml.etree.Element has no pointer to its parent. You should use Document.flatten_group(group)
|
group(s), we cannot take the parent group(s) into account, because
|
||||||
to flatten a specific nested group into the root coordinates.
|
xml.etree.Element has no pointer to its parent. You should use
|
||||||
|
Document.flatten_group(group) to flatten a specific nested group into
|
||||||
|
the root coordinates.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
group is an Element
|
group is an Element
|
||||||
path_conversions (dict): A dictionary to convert from an SVG element to a path data string. Any element tags
|
path_conversions (dict):
|
||||||
that are not included in this dictionary will be ignored (including the `path` tag).
|
A dictionary to convert from an SVG element to a path data
|
||||||
To only convert explicit path elements, pass in path_conversions=CONVERT_ONLY_PATHS.
|
string. Any element tags that are not included in this
|
||||||
|
dictionary will be ignored (including the `path` tag). To
|
||||||
|
only convert explicit path elements, pass in
|
||||||
|
`path_conversions=CONVERT_ONLY_PATHS`.
|
||||||
"""
|
"""
|
||||||
if not isinstance(group, Element):
|
if not isinstance(group, Element):
|
||||||
raise TypeError('Must provide an xml.etree.Element object. Instead you provided {0}'.format(type(group)))
|
raise TypeError('Must provide an xml.etree.Element object. '
|
||||||
|
'Instead you provided {0}'.format(type(group)))
|
||||||
|
|
||||||
# Stop right away if the group_selector rejects this group
|
# Stop right away if the group_selector rejects this group
|
||||||
if not group_filter(group):
|
if not group_filter(group):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# To handle the transforms efficiently, we'll traverse the tree of groups depth-first using a stack of tuples.
|
# To handle the transforms efficiently, we'll traverse the tree of
|
||||||
# The first entry in the tuple is a group element and the second entry is its transform. As we pop each entry in
|
# groups depth-first using a stack of tuples.
|
||||||
# the stack, we will add all its child group elements to the stack.
|
# The first entry in the tuple is a group element and the second
|
||||||
StackElement = collections.namedtuple('StackElement', ['group', 'transform'])
|
# entry is its transform. As we pop each entry in the stack, we
|
||||||
|
# will add all its child group elements to the stack.
|
||||||
|
StackElement = collections.namedtuple('StackElement',
|
||||||
|
['group', 'transform'])
|
||||||
|
|
||||||
def new_stack_element(element, last_tf):
|
def new_stack_element(element, last_tf):
|
||||||
return StackElement(element, last_tf.dot(parse_transform(element.get('transform'))))
|
return StackElement(element, last_tf.dot(
|
||||||
|
parse_transform(element.get('transform'))))
|
||||||
|
|
||||||
def get_relevant_children(parent, last_tf):
|
def get_relevant_children(parent, last_tf):
|
||||||
children = []
|
children = []
|
||||||
for elem in filter(group_filter, parent.iterfind(group_search_xpath, SVG_NAMESPACE)):
|
for elem in filter(group_filter,
|
||||||
|
parent.iterfind(group_search_xpath, SVG_NAMESPACE)):
|
||||||
children.append(new_stack_element(elem, last_tf))
|
children.append(new_stack_element(elem, last_tf))
|
||||||
return children
|
return children
|
||||||
|
|
||||||
stack = [new_stack_element(group, np.identity(3))]
|
stack = [new_stack_element(group, np.identity(3))]
|
||||||
|
|
||||||
FlattenedPath = collections.namedtuple('FlattenedPath', ['path', 'element', 'transform'])
|
FlattenedPath = collections.namedtuple('FlattenedPath',
|
||||||
|
['path', 'element', 'transform'])
|
||||||
paths = []
|
paths = []
|
||||||
|
|
||||||
while stack:
|
while stack:
|
||||||
top = stack.pop()
|
top = stack.pop()
|
||||||
|
|
||||||
# For each element type that we know how to convert into path data, parse the element after confirming that
|
# For each element type that we know how to convert into path
|
||||||
# the path_filter accepts it.
|
# data, parse the element after confirming that the path_filter
|
||||||
|
# accepts it.
|
||||||
for key, converter in path_conversions.items():
|
for key, converter in path_conversions.items():
|
||||||
for path_elem in filter(path_filter, top.group.iterfind('svg:'+key, SVG_NAMESPACE)):
|
for path_elem in filter(path_filter, top.group.iterfind(
|
||||||
path_tf = top.transform.dot(parse_transform(path_elem.get('transform')))
|
'svg:'+key, SVG_NAMESPACE)):
|
||||||
|
path_tf = top.transform.dot(
|
||||||
|
parse_transform(path_elem.get('transform')))
|
||||||
path = transform(parse_path(converter(path_elem)), path_tf)
|
path = transform(parse_path(converter(path_elem)), path_tf)
|
||||||
paths.append(FlattenedPath(path, path_elem, path_tf))
|
paths.append(FlattenedPath(path, path_elem, path_tf))
|
||||||
|
|
||||||
|
@ -143,16 +157,20 @@ def flatten_group(
|
||||||
group_search_xpath=SVG_GROUP_TAG):
|
group_search_xpath=SVG_GROUP_TAG):
|
||||||
"""Flatten all the paths in a specific group.
|
"""Flatten all the paths in a specific group.
|
||||||
|
|
||||||
The paths will be flattened into the 'root' frame. Note that root needs to be
|
The paths will be flattened into the 'root' frame. Note that root
|
||||||
an ancestor of the group that is being flattened. Otherwise, no paths will be returned."""
|
needs to be an ancestor of the group that is being flattened.
|
||||||
|
Otherwise, no paths will be returned."""
|
||||||
|
|
||||||
if not any(group_to_flatten is descendant for descendant in root.iter()):
|
if not any(group_to_flatten is descendant for descendant in root.iter()):
|
||||||
warnings.warn('The requested group_to_flatten is not a descendant of root')
|
warnings.warn('The requested group_to_flatten is not a '
|
||||||
# We will shortcut here, because it is impossible for any paths to be returned anyhow.
|
'descendant of root')
|
||||||
|
# We will shortcut here, because it is impossible for any paths
|
||||||
|
# to be returned anyhow.
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# We create a set of the unique IDs of each element that we wish to flatten, if those elements are groups.
|
# We create a set of the unique IDs of each element that we wish to
|
||||||
# Any groups outside of this set will be skipped while we flatten the paths.
|
# flatten, if those elements are groups. Any groups outside of this
|
||||||
|
# set will be skipped while we flatten the paths.
|
||||||
desired_groups = set()
|
desired_groups = set()
|
||||||
if recursive:
|
if recursive:
|
||||||
for group in group_to_flatten.iter():
|
for group in group_to_flatten.iter():
|
||||||
|
@ -163,7 +181,8 @@ def flatten_group(
|
||||||
def desired_group_filter(x):
|
def desired_group_filter(x):
|
||||||
return (id(x) in desired_groups) and group_filter(x)
|
return (id(x) in desired_groups) and group_filter(x)
|
||||||
|
|
||||||
return flatten_all_paths(root, desired_group_filter, path_filter, path_conversions, group_search_xpath)
|
return flatten_all_paths(root, desired_group_filter, path_filter,
|
||||||
|
path_conversions, group_search_xpath)
|
||||||
|
|
||||||
|
|
||||||
class Document:
|
class Document:
|
||||||
|
@ -199,8 +218,10 @@ class Document:
|
||||||
group_filter=lambda x: True,
|
group_filter=lambda x: True,
|
||||||
path_filter=lambda x: True,
|
path_filter=lambda x: True,
|
||||||
path_conversions=CONVERSIONS):
|
path_conversions=CONVERSIONS):
|
||||||
"""Forward the tree of this document into the more general flatten_all_paths function and return the result."""
|
"""Forward the tree of this document into the more general
|
||||||
return flatten_all_paths(self.tree.getroot(), group_filter, path_filter, path_conversions)
|
flatten_all_paths function and return the result."""
|
||||||
|
return flatten_all_paths(self.tree.getroot(), group_filter,
|
||||||
|
path_filter, path_conversions)
|
||||||
|
|
||||||
def flatten_group(self,
|
def flatten_group(self,
|
||||||
group,
|
group,
|
||||||
|
@ -209,33 +230,40 @@ class Document:
|
||||||
path_filter=lambda x: True,
|
path_filter=lambda x: True,
|
||||||
path_conversions=CONVERSIONS):
|
path_conversions=CONVERSIONS):
|
||||||
if all(isinstance(s, str) for s in group):
|
if all(isinstance(s, str) for s in group):
|
||||||
# If we're given a list of strings, assume it represents a nested sequence
|
# If we're given a list of strings, assume it represents a
|
||||||
|
# nested sequence
|
||||||
group = self.get_or_add_group(group)
|
group = self.get_or_add_group(group)
|
||||||
elif not isinstance(group, Element):
|
elif not isinstance(group, Element):
|
||||||
raise TypeError('Must provide a list of strings that represent a nested group name, '
|
raise TypeError(
|
||||||
'or provide an xml.etree.Element object. Instead you provided {0}'.format(group))
|
'Must provide a list of strings that represent a nested '
|
||||||
|
'group name, or provide an xml.etree.Element object. '
|
||||||
|
'Instead you provided {0}'.format(group))
|
||||||
|
|
||||||
return flatten_group(group, self.tree.getroot(), recursive, group_filter, path_filter, path_conversions)
|
return flatten_group(group, self.tree.getroot(), recursive,
|
||||||
|
group_filter, path_filter, path_conversions)
|
||||||
|
|
||||||
def add_path(self, path, attribs=None, group=None):
|
def add_path(self, path, attribs=None, group=None):
|
||||||
"""Add a new path to the SVG."""
|
"""Add a new path to the SVG."""
|
||||||
|
|
||||||
# If we are not given a parent, assume that the path does not have a group
|
# If not given a parent, assume that the path does not have a group
|
||||||
if group is None:
|
if group is None:
|
||||||
group = self.tree.getroot()
|
group = self.tree.getroot()
|
||||||
|
|
||||||
# If we are given a list of strings (one or more), assume it represents a sequence of nested group names
|
# If given a list of strings (one or more), assume it represents
|
||||||
|
# a sequence of nested group names
|
||||||
elif all(isinstance(elem, str) for elem in group):
|
elif all(isinstance(elem, str) for elem in group):
|
||||||
group = self.get_or_add_group(group)
|
group = self.get_or_add_group(group)
|
||||||
|
|
||||||
elif not isinstance(group, Element):
|
elif not isinstance(group, Element):
|
||||||
raise TypeError('Must provide a list of strings or an xml.etree.Element object. '
|
raise TypeError(
|
||||||
'Instead you provided {0}'.format(group))
|
'Must provide a list of strings or an xml.etree.Element '
|
||||||
|
'object. Instead you provided {0}'.format(group))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Make sure that the group belongs to this Document object
|
# Make sure that the group belongs to this Document object
|
||||||
if not self.contains_group(group):
|
if not self.contains_group(group):
|
||||||
warnings.warn('The requested group does not belong to this Document')
|
warnings.warn('The requested group does not belong to '
|
||||||
|
'this Document')
|
||||||
|
|
||||||
# TODO: It might be better to use duck-typing here with a try-except
|
# TODO: It might be better to use duck-typing here with a try-except
|
||||||
if isinstance(path, Path):
|
if isinstance(path, Path):
|
||||||
|
@ -243,11 +271,13 @@ class Document:
|
||||||
elif is_path_segment(path):
|
elif is_path_segment(path):
|
||||||
path_svg = Path(path).d()
|
path_svg = Path(path).d()
|
||||||
elif isinstance(path, str):
|
elif isinstance(path, str):
|
||||||
# Assume this is a valid d-string. TODO: Should we sanity check the input string?
|
# Assume this is a valid d-string.
|
||||||
|
# TODO: Should we sanity check the input string?
|
||||||
path_svg = path
|
path_svg = path
|
||||||
else:
|
else:
|
||||||
raise TypeError('Must provide a Path, a path segment type, or a valid SVG path d-string. '
|
raise TypeError(
|
||||||
'Instead you provided {0}'.format(path))
|
'Must provide a Path, a path segment type, or a valid '
|
||||||
|
'SVG path d-string. Instead you provided {0}'.format(path))
|
||||||
|
|
||||||
if attribs is None:
|
if attribs is None:
|
||||||
attribs = {}
|
attribs = {}
|
||||||
|
@ -262,16 +292,20 @@ class Document:
|
||||||
return any(group is owned for owned in self.tree.iter())
|
return any(group is owned for owned in self.tree.iter())
|
||||||
|
|
||||||
def get_or_add_group(self, nested_names, name_attr='id'):
|
def get_or_add_group(self, nested_names, name_attr='id'):
|
||||||
"""Get a group from the tree, or add a new one with the given name structure.
|
"""Get a group from the tree, or add a new one with the given
|
||||||
|
name structure.
|
||||||
|
|
||||||
*nested_names* is a list of strings which represent group names. Each group name will be nested inside of the
|
`nested_names` is a list of strings which represent group names.
|
||||||
previous group name.
|
Each group name will be nested inside of the previous group name.
|
||||||
|
|
||||||
*name_attr* is the group attribute that is being used to represent the group's name. Default is 'id', but some
|
`name_attr` is the group attribute that is being used to
|
||||||
SVGs may contain custom name labels, like 'inkscape:label'.
|
represent the group's name. Default is 'id', but some SVGs may
|
||||||
|
contain custom name labels, like 'inkscape:label'.
|
||||||
|
|
||||||
Returns the requested group. If the requested group did not exist, this function will create it, as well as all
|
Returns the requested group. If the requested group did not
|
||||||
parent groups that it requires. All created groups will be left with blank attributes.
|
exist, this function will create it, as well as all parent
|
||||||
|
groups that it requires. All created groups will be left with
|
||||||
|
blank attributes.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
group = self.tree.getroot()
|
group = self.tree.getroot()
|
||||||
|
@ -285,14 +319,15 @@ class Document:
|
||||||
break
|
break
|
||||||
|
|
||||||
if prev_group is group:
|
if prev_group is group:
|
||||||
# The group we're looking for does not exist, so let's create the group structure
|
# The group we're looking for does not exist, so let's
|
||||||
|
# create the group structure
|
||||||
nested_names.insert(0, next_name)
|
nested_names.insert(0, next_name)
|
||||||
|
|
||||||
while nested_names:
|
while nested_names:
|
||||||
next_name = nested_names.pop(0)
|
next_name = nested_names.pop(0)
|
||||||
group = self.add_group({'id': next_name}, group)
|
group = self.add_group({'id': next_name}, group)
|
||||||
# Now nested_names will be empty, so the topmost while-loop will end
|
# Now nested_names will be empty, so the topmost
|
||||||
|
# while-loop will end
|
||||||
return group
|
return group
|
||||||
|
|
||||||
def add_group(self, group_attribs=None, parent=None):
|
def add_group(self, group_attribs=None, parent=None):
|
||||||
|
@ -300,14 +335,16 @@ class Document:
|
||||||
if parent is None:
|
if parent is None:
|
||||||
parent = self.tree.getroot()
|
parent = self.tree.getroot()
|
||||||
elif not self.contains_group(parent):
|
elif not self.contains_group(parent):
|
||||||
warnings.warn('The requested group {0} does not belong to this Document'.format(parent))
|
warnings.warn('The requested group {0} does not belong to '
|
||||||
|
'this Document'.format(parent))
|
||||||
|
|
||||||
if group_attribs is None:
|
if group_attribs is None:
|
||||||
group_attribs = {}
|
group_attribs = {}
|
||||||
else:
|
else:
|
||||||
group_attribs = group_attribs.copy()
|
group_attribs = group_attribs.copy()
|
||||||
|
|
||||||
return SubElement(parent, '{{{0}}}g'.format(SVG_NAMESPACE['svg']), group_attribs)
|
return SubElement(parent, '{{{0}}}g'.format(
|
||||||
|
SVG_NAMESPACE['svg']), group_attribs)
|
||||||
|
|
||||||
def save(self, filename=None):
|
def save(self, filename=None):
|
||||||
if filename is None:
|
if filename is None:
|
||||||
|
|
|
@ -269,7 +269,8 @@ def transform(curve, tf):
|
||||||
if isinstance(curve, Path):
|
if isinstance(curve, Path):
|
||||||
return Path(*[transform(segment, tf) for segment in curve])
|
return Path(*[transform(segment, tf) for segment in curve])
|
||||||
elif is_bezier_segment(curve):
|
elif is_bezier_segment(curve):
|
||||||
return bpoints2bezier([to_complex(tf.dot(to_point(p))) for p in curve.bpoints()])
|
return bpoints2bezier([to_complex(tf.dot(to_point(p)))
|
||||||
|
for p in curve.bpoints()])
|
||||||
elif isinstance(curve, Arc):
|
elif isinstance(curve, Arc):
|
||||||
new_start = to_complex(tf.dot(to_point(curve.start)))
|
new_start = to_complex(tf.dot(to_point(curve.start)))
|
||||||
new_end = to_complex(tf.dot(to_point(curve.end)))
|
new_end = to_complex(tf.dot(to_point(curve.end)))
|
||||||
|
@ -458,12 +459,15 @@ def inv_arclength(curve, s, s_tol=ILENGTH_S_TOL, maxits=ILENGTH_MAXITS,
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
if isinstance(curve, Path):
|
if isinstance(curve, Path):
|
||||||
seg_lengths = [seg.length(error=error, min_depth=min_depth) for seg in curve]
|
seg_lengths = [seg.length(error=error, min_depth=min_depth)
|
||||||
|
for seg in curve]
|
||||||
lsum = 0
|
lsum = 0
|
||||||
# Find which segment the point we search for is located on
|
# Find which segment the point we search for is located on
|
||||||
for k, len_k in enumerate(seg_lengths):
|
for k, len_k in enumerate(seg_lengths):
|
||||||
if lsum <= s <= lsum + len_k:
|
if lsum <= s <= lsum + len_k:
|
||||||
t = inv_arclength(curve[k], s - lsum, s_tol=s_tol, maxits=maxits, error=error, min_depth=min_depth)
|
t = inv_arclength(curve[k], s - lsum, s_tol=s_tol,
|
||||||
|
maxits=maxits, error=error,
|
||||||
|
min_depth=min_depth)
|
||||||
return curve.t2T(k, t)
|
return curve.t2T(k, t)
|
||||||
lsum += len_k
|
lsum += len_k
|
||||||
return 1
|
return 1
|
||||||
|
@ -1694,7 +1698,6 @@ class Arc(object):
|
||||||
xmin = max(xtrema)
|
xmin = max(xtrema)
|
||||||
return min(xtrema), max(xtrema), min(ytrema), max(ytrema)
|
return min(xtrema), max(xtrema), min(ytrema), max(ytrema)
|
||||||
|
|
||||||
|
|
||||||
def split(self, t):
|
def split(self, t):
|
||||||
"""returns two segments, whose union is this segment and which join
|
"""returns two segments, whose union is this segment and which join
|
||||||
at self.point(t)."""
|
at self.point(t)."""
|
||||||
|
@ -1716,48 +1719,46 @@ class Arc(object):
|
||||||
and maximize, respectively, the distance,
|
and maximize, respectively, the distance,
|
||||||
d = |self.point(t)-origin|."""
|
d = |self.point(t)-origin|."""
|
||||||
|
|
||||||
u1orig = self.u1transform(origin)
|
# u1orig = self.u1transform(origin)
|
||||||
if abs(u1orig) == 1: # origin lies on ellipse
|
# if abs(u1orig) == 1: # origin lies on ellipse
|
||||||
t = self.phase2t(phase(u1orig))
|
# t = self.phase2t(phase(u1orig))
|
||||||
d_min = 0
|
# d_min = 0
|
||||||
|
#
|
||||||
# Transform to a coordinate system where the ellipse is centered
|
# # Transform to a coordinate system where the ellipse is centered
|
||||||
# at the origin and its axes are horizontal/vertical
|
# # at the origin and its axes are horizontal/vertical
|
||||||
zeta0 = self.centeriso(origin)
|
# zeta0 = self.centeriso(origin)
|
||||||
a, b = self.radius.real, self.radius.imag
|
# a, b = self.radius.real, self.radius.imag
|
||||||
x0, y0 = zeta0.real, zeta0.imag
|
# x0, y0 = zeta0.real, zeta0.imag
|
||||||
|
#
|
||||||
# Find t s.t. z'(t)
|
# # Find t s.t. z'(t)
|
||||||
a2mb2 = (a**2 - b**2)
|
# a2mb2 = (a**2 - b**2)
|
||||||
if u1orig.imag: # x != x0
|
# if u1orig.imag: # x != x0
|
||||||
|
#
|
||||||
coeffs = [a2mb2**2,
|
# coeffs = [a2mb2**2,
|
||||||
2*a2mb2*b**2*y0,
|
# 2*a2mb2*b**2*y0,
|
||||||
(-a**4 + (2*a**2 - b**2 + y0**2)*b**2 + x0**2)*b**2,
|
# (-a**4 + (2*a**2 - b**2 + y0**2)*b**2 + x0**2)*b**2,
|
||||||
-2*a2mb2*b**4*y0,
|
# -2*a2mb2*b**4*y0,
|
||||||
-b**6*y0**2]
|
# -b**6*y0**2]
|
||||||
ys = polyroots(coeffs, realroots=True,
|
# ys = polyroots(coeffs, realroots=True,
|
||||||
condition=lambda r: -b <= r <= b)
|
# condition=lambda r: -b <= r <= b)
|
||||||
xs = (a*sqrt(1 - y**2/b**2) for y in ys)
|
# xs = (a*sqrt(1 - y**2/b**2) for y in ys)
|
||||||
|
#
|
||||||
|
# ts = [self.phase2t(phase(self.u1transform(self.icenteriso(
|
||||||
|
# complex(x, y))))) for x, y in zip(xs, ys)]
|
||||||
ts = [self.phase2t(phase(self.u1transform(self.icenteriso(
|
#
|
||||||
complex(x, y))))) for x, y in zip(xs, ys)]
|
# else: # This case is very similar, see notes and assume instead y0!=y
|
||||||
|
# b2ma2 = (b**2 - a**2)
|
||||||
else: # This case is very similar, see notes and assume instead y0!=y
|
# coeffs = [b2ma2**2,
|
||||||
b2ma2 = (b**2 - a**2)
|
# 2*b2ma2*a**2*x0,
|
||||||
coeffs = [b2ma2**2,
|
# (-b**4 + (2*b**2 - a**2 + x0**2)*a**2 + y0**2)*a**2,
|
||||||
2*b2ma2*a**2*x0,
|
# -2*b2ma2*a**4*x0,
|
||||||
(-b**4 + (2*b**2 - a**2 + x0**2)*a**2 + y0**2)*a**2,
|
# -a**6*x0**2]
|
||||||
-2*b2ma2*a**4*x0,
|
# xs = polyroots(coeffs, realroots=True,
|
||||||
-a**6*x0**2]
|
# condition=lambda r: -a <= r <= a)
|
||||||
xs = polyroots(coeffs, realroots=True,
|
# ys = (b*sqrt(1 - x**2/a**2) for x in xs)
|
||||||
condition=lambda r: -a <= r <= a)
|
#
|
||||||
ys = (b*sqrt(1 - x**2/a**2) for x in xs)
|
# ts = [self.phase2t(phase(self.u1transform(self.icenteriso(
|
||||||
|
# complex(x, y))))) for x, y in zip(xs, ys)]
|
||||||
ts = [self.phase2t(phase(self.u1transform(self.icenteriso(
|
|
||||||
complex(x, y))))) for x, y in zip(xs, ys)]
|
|
||||||
|
|
||||||
raise _NotImplemented4ArcException
|
raise _NotImplemented4ArcException
|
||||||
|
|
||||||
|
@ -2156,7 +2157,8 @@ class Path(MutableSequence):
|
||||||
(seg_idx - 1) % len(self._segments)]
|
(seg_idx - 1) % len(self._segments)]
|
||||||
if not seg.joins_smoothly_with(previous_seg_in_path):
|
if not seg.joins_smoothly_with(previous_seg_in_path):
|
||||||
return float('inf')
|
return float('inf')
|
||||||
elif np.isclose(t, 1) and (seg_idx != len(self) - 1 or self.end==self.start):
|
elif np.isclose(t, 1) and (seg_idx != len(self) - 1 or
|
||||||
|
self.end == self.start):
|
||||||
next_seg_in_path = self._segments[
|
next_seg_in_path = self._segments[
|
||||||
(seg_idx + 1) % len(self._segments)]
|
(seg_idx + 1) % len(self._segments)]
|
||||||
if not next_seg_in_path.joins_smoothly_with(seg):
|
if not next_seg_in_path.joins_smoothly_with(seg):
|
||||||
|
@ -2223,7 +2225,8 @@ class Path(MutableSequence):
|
||||||
# redundant intersection. This code block checks for and removes said
|
# redundant intersection. This code block checks for and removes said
|
||||||
# redundancies.
|
# redundancies.
|
||||||
if intersection_list:
|
if intersection_list:
|
||||||
pts = [seg1.point(_t1) for _T1, _seg1, _t1 in list(zip(*intersection_list))[0]]
|
pts = [seg1.point(_t1)
|
||||||
|
for _T1, _seg1, _t1 in list(zip(*intersection_list))[0]]
|
||||||
indices2remove = []
|
indices2remove = []
|
||||||
for ind1 in range(len(pts)):
|
for ind1 in range(len(pts)):
|
||||||
for ind2 in range(ind1 + 1, len(pts)):
|
for ind2 in range(ind1 + 1, len(pts)):
|
||||||
|
|
|
@ -6,13 +6,15 @@ import numpy as np
|
||||||
|
|
||||||
|
|
||||||
def get_desired_path(name, paths):
|
def get_desired_path(name, paths):
|
||||||
return next(p for p in paths if p.element.get('{some://testuri}name') == name)
|
return next(p for p in paths
|
||||||
|
if p.element.get('{some://testuri}name') == name)
|
||||||
|
|
||||||
|
|
||||||
class TestGroups(unittest.TestCase):
|
class TestGroups(unittest.TestCase):
|
||||||
|
|
||||||
def check_values(self, v, z):
|
def check_values(self, v, z):
|
||||||
# Check that the components of 2D vector v match the components of complex number z
|
# Check that the components of 2D vector v match the components
|
||||||
|
# of complex number z
|
||||||
self.assertAlmostEqual(v[0], z.real)
|
self.assertAlmostEqual(v[0], z.real)
|
||||||
self.assertAlmostEqual(v[1], z.imag)
|
self.assertAlmostEqual(v[1], z.imag)
|
||||||
|
|
||||||
|
@ -20,8 +22,10 @@ class TestGroups(unittest.TestCase):
|
||||||
# Check that the endpoints of the line have been correctly transformed.
|
# Check that the endpoints of the line have been correctly transformed.
|
||||||
# * tf is the transform that should have been applied.
|
# * tf is the transform that should have been applied.
|
||||||
# * v_s_vals is a 2D list of the values of the line's start point
|
# * v_s_vals is a 2D list of the values of the line's start point
|
||||||
# * v_e_relative_vals is a 2D list of the values of the line's end point relative to the start point
|
# * v_e_relative_vals is a 2D list of the values of the line's
|
||||||
# * name is the path name (value of the test:name attribute in the SVG document)
|
# end point relative to the start point
|
||||||
|
# * name is the path name (value of the test:name attribute in
|
||||||
|
# the SVG document)
|
||||||
# * paths is the output of doc.flatten_all_paths()
|
# * paths is the output of doc.flatten_all_paths()
|
||||||
v_s_vals.append(1.0)
|
v_s_vals.append(1.0)
|
||||||
v_e_relative_vals.append(0.0)
|
v_e_relative_vals.append(0.0)
|
||||||
|
@ -34,22 +38,30 @@ class TestGroups(unittest.TestCase):
|
||||||
self.check_values(tf.dot(v_e), actual.path.end)
|
self.check_values(tf.dot(v_e), actual.path.end)
|
||||||
|
|
||||||
def test_group_flatten(self):
|
def test_group_flatten(self):
|
||||||
# Test the Document.flatten_all_paths() function against the groups.svg test file.
|
# Test the Document.flatten_all_paths() function against the
|
||||||
# There are 12 paths in that file, with various levels of being nested inside of group transforms.
|
# groups.svg test file.
|
||||||
# The check_line function is used to reduce the boilerplate, since all the tests are very similar.
|
# There are 12 paths in that file, with various levels of being
|
||||||
# This test covers each of the different types of transforms that are specified by the SVG standard.
|
# nested inside of group transforms.
|
||||||
|
# The check_line function is used to reduce the boilerplate,
|
||||||
|
# since all the tests are very similar.
|
||||||
|
# This test covers each of the different types of transforms
|
||||||
|
# that are specified by the SVG standard.
|
||||||
doc = Document(join(dirname(__file__), 'groups.svg'))
|
doc = Document(join(dirname(__file__), 'groups.svg'))
|
||||||
|
|
||||||
result = doc.flatten_all_paths()
|
result = doc.flatten_all_paths()
|
||||||
self.assertEqual(12, len(result))
|
self.assertEqual(12, len(result))
|
||||||
|
|
||||||
tf_matrix_group = np.array([[1.5, 0.0, -40.0], [0.0, 0.5, 20.0], [0.0, 0.0, 1.0]])
|
tf_matrix_group = np.array([[1.5, 0.0, -40.0],
|
||||||
|
[0.0, 0.5, 20.0],
|
||||||
|
[0.0, 0.0, 1.0]])
|
||||||
|
|
||||||
self.check_line(tf_matrix_group,
|
self.check_line(tf_matrix_group,
|
||||||
[183, 183], [0.0, -50],
|
[183, 183], [0.0, -50],
|
||||||
'path00', result)
|
'path00', result)
|
||||||
|
|
||||||
tf_scale_group = np.array([[1.25, 0.0, 0.0], [0.0, 1.25, 0.0], [0.0, 0.0, 1.0]])
|
tf_scale_group = np.array([[1.25, 0.0, 0.0],
|
||||||
|
[0.0, 1.25, 0.0],
|
||||||
|
[0.0, 0.0, 1.0]])
|
||||||
|
|
||||||
self.check_line(tf_matrix_group.dot(tf_scale_group),
|
self.check_line(tf_matrix_group.dot(tf_scale_group),
|
||||||
[122, 320], [-50.0, 0.0],
|
[122, 320], [-50.0, 0.0],
|
||||||
|
@ -63,19 +75,27 @@ class TestGroups(unittest.TestCase):
|
||||||
[150, 200], [-50, 25],
|
[150, 200], [-50, 25],
|
||||||
'path03', result)
|
'path03', result)
|
||||||
|
|
||||||
tf_nested_translate_group = np.array([[1, 0, 20], [0, 1, 0], [0, 0, 1]])
|
tf_nested_translate_group = np.array([[1, 0, 20],
|
||||||
|
[0, 1, 0],
|
||||||
|
[0, 0, 1]])
|
||||||
|
|
||||||
self.check_line(tf_matrix_group.dot(tf_scale_group).dot(tf_nested_translate_group),
|
self.check_line(tf_matrix_group.dot(tf_scale_group
|
||||||
|
).dot(tf_nested_translate_group),
|
||||||
[150, 200], [-50, 25],
|
[150, 200], [-50, 25],
|
||||||
'path04', result)
|
'path04', result)
|
||||||
|
|
||||||
tf_nested_translate_xy_group = np.array([[1, 0, 20], [0, 1, 30], [0, 0, 1]])
|
tf_nested_translate_xy_group = np.array([[1, 0, 20],
|
||||||
|
[0, 1, 30],
|
||||||
|
[0, 0, 1]])
|
||||||
|
|
||||||
self.check_line(tf_matrix_group.dot(tf_scale_group).dot(tf_nested_translate_xy_group),
|
self.check_line(tf_matrix_group.dot(tf_scale_group
|
||||||
|
).dot(tf_nested_translate_xy_group),
|
||||||
[150, 200], [-50, 25],
|
[150, 200], [-50, 25],
|
||||||
'path05', result)
|
'path05', result)
|
||||||
|
|
||||||
tf_scale_xy_group = np.array([[0.5, 0, 0], [0, 1.5, 0.0], [0, 0, 1]])
|
tf_scale_xy_group = np.array([[0.5, 0, 0],
|
||||||
|
[0, 1.5, 0.0],
|
||||||
|
[0, 0, 1]])
|
||||||
|
|
||||||
self.check_line(tf_matrix_group.dot(tf_scale_xy_group),
|
self.check_line(tf_matrix_group.dot(tf_scale_xy_group),
|
||||||
[122, 320], [-50, 0],
|
[122, 320], [-50, 0],
|
||||||
|
@ -92,35 +112,46 @@ class TestGroups(unittest.TestCase):
|
||||||
|
|
||||||
a_08 = 45.0*np.pi/180.0
|
a_08 = 45.0*np.pi/180.0
|
||||||
tf_rotate_xy_group_R = np.array([[np.cos(a_08), -np.sin(a_08), 0],
|
tf_rotate_xy_group_R = np.array([[np.cos(a_08), -np.sin(a_08), 0],
|
||||||
[np.sin(a_08), np.cos(a_08), 0],
|
[np.sin(a_08), np.cos(a_08), 0],
|
||||||
[0, 0, 1]])
|
[0, 0, 1]])
|
||||||
tf_rotate_xy_group_T = np.array([[1, 0, 183], [0, 1, 183], [0, 0, 1]])
|
tf_rotate_xy_group_T = np.array([[1, 0, 183],
|
||||||
tf_rotate_xy_group = tf_rotate_xy_group_T.dot(tf_rotate_xy_group_R).dot(np.linalg.inv(tf_rotate_xy_group_T))
|
[0, 1, 183],
|
||||||
|
[0, 0, 1]])
|
||||||
|
tf_rotate_xy_group = tf_rotate_xy_group_T.dot(
|
||||||
|
tf_rotate_xy_group_R).dot(
|
||||||
|
np.linalg.inv(tf_rotate_xy_group_T))
|
||||||
|
|
||||||
self.check_line(tf_matrix_group.dot(tf_rotate_xy_group),
|
self.check_line(tf_matrix_group.dot(tf_rotate_xy_group),
|
||||||
[183, 183], [0, 30],
|
[183, 183], [0, 30],
|
||||||
'path08', result)
|
'path08', result)
|
||||||
|
|
||||||
a_09 = 5.0*np.pi/180.0
|
a_09 = 5.0*np.pi/180.0
|
||||||
tf_skew_x_group = np.array([[1, np.tan(a_09), 0], [0, 1, 0], [0, 0, 1]])
|
tf_skew_x_group = np.array([[1, np.tan(a_09), 0],
|
||||||
|
[0, 1, 0],
|
||||||
|
[0, 0, 1]])
|
||||||
|
|
||||||
self.check_line(tf_matrix_group.dot(tf_skew_x_group),
|
self.check_line(tf_matrix_group.dot(tf_skew_x_group),
|
||||||
[183, 183], [40, 40],
|
[183, 183], [40, 40],
|
||||||
'path09', result)
|
'path09', result)
|
||||||
|
|
||||||
a_10 = 5.0*np.pi/180.0
|
a_10 = 5.0*np.pi/180.0
|
||||||
tf_skew_y_group = np.array([[1, 0, 0], [np.tan(a_10), 1, 0], [0, 0, 1]])
|
tf_skew_y_group = np.array([[1, 0, 0],
|
||||||
|
[np.tan(a_10), 1, 0],
|
||||||
|
[0, 0, 1]])
|
||||||
|
|
||||||
self.check_line(tf_matrix_group.dot(tf_skew_y_group),
|
self.check_line(tf_matrix_group.dot(tf_skew_y_group),
|
||||||
[183, 183], [40, 40],
|
[183, 183], [40, 40],
|
||||||
'path10', result)
|
'path10', result)
|
||||||
|
|
||||||
# This last test is for handling transforms that are defined as attributes of a <path> element.
|
# This last test is for handling transforms that are defined as
|
||||||
|
# attributes of a <path> element.
|
||||||
a_11 = -40*np.pi/180.0
|
a_11 = -40*np.pi/180.0
|
||||||
tf_path11_R = np.array([[np.cos(a_11), -np.sin(a_11), 0],
|
tf_path11_R = np.array([[np.cos(a_11), -np.sin(a_11), 0],
|
||||||
[np.sin(a_11), np.cos(a_11), 0],
|
[np.sin(a_11), np.cos(a_11), 0],
|
||||||
[0, 0, 1]])
|
[0, 0, 1]])
|
||||||
tf_path11_T = np.array([[1, 0, 100], [0, 1, 100], [0, 0, 1]])
|
tf_path11_T = np.array([[1, 0, 100],
|
||||||
|
[0, 1, 100],
|
||||||
|
[0, 0, 1]])
|
||||||
tf_path11 = tf_path11_T.dot(tf_path11_R).dot(np.linalg.inv(tf_path11_T))
|
tf_path11 = tf_path11_T.dot(tf_path11_R).dot(np.linalg.inv(tf_path11_T))
|
||||||
|
|
||||||
self.check_line(tf_matrix_group.dot(tf_skew_y_group).dot(tf_path11),
|
self.check_line(tf_matrix_group.dot(tf_skew_y_group).dot(tf_path11),
|
||||||
|
@ -129,13 +160,13 @@ class TestGroups(unittest.TestCase):
|
||||||
|
|
||||||
def check_group_count(self, doc, expected_count):
|
def check_group_count(self, doc, expected_count):
|
||||||
count = 0
|
count = 0
|
||||||
for group in doc.tree.getroot().iter('{{{0}}}g'.format(SVG_NAMESPACE['svg'])):
|
for _ in doc.tree.getroot().iter('{{{0}}}g'.format(SVG_NAMESPACE['svg'])):
|
||||||
count += 1
|
count += 1
|
||||||
|
|
||||||
self.assertEqual(expected_count, count)
|
self.assertEqual(expected_count, count)
|
||||||
|
|
||||||
def test_add_group(self):
|
def test_add_group(self):
|
||||||
# Test the Document.add_group() function and related Document functions.
|
# Test `Document.add_group()` function and related Document functions.
|
||||||
doc = Document(None)
|
doc = Document(None)
|
||||||
self.check_group_count(doc, 0)
|
self.check_group_count(doc, 0)
|
||||||
|
|
||||||
|
@ -161,21 +192,30 @@ class TestGroups(unittest.TestCase):
|
||||||
|
|
||||||
# Test that we can retrieve each new group from the document
|
# Test that we can retrieve each new group from the document
|
||||||
self.assertEqual(base_group, doc.get_or_add_group(['base_group']))
|
self.assertEqual(base_group, doc.get_or_add_group(['base_group']))
|
||||||
self.assertEqual(child_group, doc.get_or_add_group(['base_group', 'child_group']))
|
self.assertEqual(child_group, doc.get_or_add_group(
|
||||||
self.assertEqual(grandchild_group, doc.get_or_add_group(['base_group', 'child_group', 'grandchild_group']))
|
['base_group', 'child_group']))
|
||||||
self.assertEqual(sibling_group, doc.get_or_add_group(['base_group', 'sibling_group']))
|
self.assertEqual(grandchild_group, doc.get_or_add_group(
|
||||||
|
['base_group', 'child_group', 'grandchild_group']))
|
||||||
|
self.assertEqual(sibling_group, doc.get_or_add_group(
|
||||||
|
['base_group', 'sibling_group']))
|
||||||
|
|
||||||
# Create a new nested group
|
# Create a new nested group
|
||||||
new_child = doc.get_or_add_group(['base_group', 'new_parent', 'new_child'])
|
new_child = doc.get_or_add_group(
|
||||||
|
['base_group', 'new_parent', 'new_child'])
|
||||||
self.check_group_count(doc, 6)
|
self.check_group_count(doc, 6)
|
||||||
self.assertEqual(new_child, doc.get_or_add_group(['base_group', 'new_parent', 'new_child']))
|
self.assertEqual(new_child, doc.get_or_add_group(
|
||||||
|
['base_group', 'new_parent', 'new_child']))
|
||||||
|
|
||||||
new_leaf = doc.get_or_add_group(['base_group', 'new_parent', 'new_child', 'new_leaf'])
|
new_leaf = doc.get_or_add_group(
|
||||||
self.assertEqual(new_leaf, doc.get_or_add_group(['base_group', 'new_parent', 'new_child', 'new_leaf']))
|
['base_group', 'new_parent', 'new_child', 'new_leaf'])
|
||||||
|
self.assertEqual(new_leaf, doc.get_or_add_group([
|
||||||
|
'base_group', 'new_parent', 'new_child', 'new_leaf']))
|
||||||
self.check_group_count(doc, 7)
|
self.check_group_count(doc, 7)
|
||||||
|
|
||||||
path_d = 'M 206.07112,858.41289 L 206.07112,-2.02031 C -50.738,-81.14814 -20.36402,-105.87055 ' \
|
path_d = ('M 206.07112,858.41289 L 206.07112,-2.02031 '
|
||||||
'52.52793,-101.01525 L 103.03556,0.0 L 0.0,111.11678'
|
'C -50.738,-81.14814 -20.36402,-105.87055 52.52793,-101.01525 '
|
||||||
|
'L 103.03556,0.0 '
|
||||||
|
'L 0.0,111.11678')
|
||||||
|
|
||||||
svg_path = doc.add_path(path_d, group=new_leaf)
|
svg_path = doc.add_path(path_d, group=new_leaf)
|
||||||
self.assertEqual(path_d, svg_path.get('d'))
|
self.assertEqual(path_d, svg_path.get('d'))
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# Note: This file was taken mostly as is from the svg.path module (v 2.0)
|
# Note: This file was taken mostly as is from the svg.path module (v 2.0)
|
||||||
#------------------------------------------------------------------------------
|
|
||||||
from __future__ import division, absolute_import, print_function
|
from __future__ import division, absolute_import, print_function
|
||||||
import unittest
|
import unittest
|
||||||
from svgpathtools import *
|
from svgpathtools import *
|
||||||
|
@ -12,7 +11,8 @@ def construct_rotation_tf(a, x, y):
|
||||||
tf_offset = np.identity(3)
|
tf_offset = np.identity(3)
|
||||||
tf_offset[0:2, 2:3] = np.array([[x], [y]])
|
tf_offset[0:2, 2:3] = np.array([[x], [y]])
|
||||||
tf_rotate = np.identity(3)
|
tf_rotate = np.identity(3)
|
||||||
tf_rotate[0:2, 0:2] = np.array([[np.cos(a), -np.sin(a)], [np.sin(a), np.cos(a)]])
|
tf_rotate[0:2, 0:2] = np.array([[np.cos(a), -np.sin(a)],
|
||||||
|
[np.sin(a), np.cos(a)]])
|
||||||
tf_offset_neg = np.identity(3)
|
tf_offset_neg = np.identity(3)
|
||||||
tf_offset_neg[0:2, 2:3] = np.array([[-x], [-y]])
|
tf_offset_neg[0:2, 2:3] = np.array([[-x], [-y]])
|
||||||
|
|
||||||
|
@ -31,11 +31,10 @@ class TestParser(unittest.TestCase):
|
||||||
|
|
||||||
# for Z command behavior when there is multiple subpaths
|
# for Z command behavior when there is multiple subpaths
|
||||||
path1 = parse_path('M 0 0 L 50 20 M 100 100 L 300 100 L 200 300 z')
|
path1 = parse_path('M 0 0 L 50 20 M 100 100 L 300 100 L 200 300 z')
|
||||||
self.assertEqual(path1, Path(
|
self.assertEqual(path1, Path(Line(0 + 0j, 50 + 20j),
|
||||||
Line(0 + 0j, 50 + 20j),
|
Line(100 + 100j, 300 + 100j),
|
||||||
Line(100 + 100j, 300 + 100j),
|
Line(300 + 100j, 200 + 300j),
|
||||||
Line(300 + 100j, 200 + 300j),
|
Line(200 + 300j, 100 + 100j)))
|
||||||
Line(200 + 300j, 100 + 100j)))
|
|
||||||
|
|
||||||
path1 = parse_path('M 100 100 L 200 200')
|
path1 = parse_path('M 100 100 L 200 200')
|
||||||
path2 = parse_path('M100 100L200 200')
|
path2 = parse_path('M100 100L200 200')
|
||||||
|
@ -47,46 +46,68 @@ class TestParser(unittest.TestCase):
|
||||||
|
|
||||||
path1 = parse_path("""M100,200 C100,100 250,100 250,200
|
path1 = parse_path("""M100,200 C100,100 250,100 250,200
|
||||||
S400,300 400,200""")
|
S400,300 400,200""")
|
||||||
self.assertEqual(path1,
|
self.assertEqual(path1, Path(CubicBezier(100 + 200j,
|
||||||
Path(CubicBezier(100 + 200j, 100 + 100j, 250 + 100j, 250 + 200j),
|
100 + 100j,
|
||||||
CubicBezier(250 + 200j, 250 + 300j, 400 + 300j, 400 + 200j)))
|
250 + 100j,
|
||||||
|
250 + 200j),
|
||||||
|
CubicBezier(250 + 200j,
|
||||||
|
250 + 300j,
|
||||||
|
400 + 300j,
|
||||||
|
400 + 200j)))
|
||||||
|
|
||||||
path1 = parse_path('M100,200 C100,100 400,100 400,200')
|
path1 = parse_path('M100,200 C100,100 400,100 400,200')
|
||||||
self.assertEqual(path1,
|
self.assertEqual(path1, Path(CubicBezier(100 + 200j,
|
||||||
Path(CubicBezier(100 + 200j, 100 + 100j, 400 + 100j, 400 + 200j)))
|
100 + 100j,
|
||||||
|
400 + 100j,
|
||||||
|
400 + 200j)))
|
||||||
|
|
||||||
path1 = parse_path('M100,500 C25,400 475,400 400,500')
|
path1 = parse_path('M100,500 C25,400 475,400 400,500')
|
||||||
self.assertEqual(path1,
|
self.assertEqual(path1, Path(CubicBezier(100 + 500j,
|
||||||
Path(CubicBezier(100 + 500j, 25 + 400j, 475 + 400j, 400 + 500j)))
|
25 + 400j,
|
||||||
|
475 + 400j,
|
||||||
|
400 + 500j)))
|
||||||
|
|
||||||
path1 = parse_path('M100,800 C175,700 325,700 400,800')
|
path1 = parse_path('M100,800 C175,700 325,700 400,800')
|
||||||
self.assertEqual(path1,
|
self.assertEqual(path1, Path(CubicBezier(100 + 800j,
|
||||||
Path(CubicBezier(100 + 800j, 175 + 700j, 325 + 700j, 400 + 800j)))
|
175 + 700j,
|
||||||
|
325 + 700j,
|
||||||
|
400 + 800j)))
|
||||||
|
|
||||||
path1 = parse_path('M600,200 C675,100 975,100 900,200')
|
path1 = parse_path('M600,200 C675,100 975,100 900,200')
|
||||||
self.assertEqual(path1,
|
self.assertEqual(path1, Path(CubicBezier(600 + 200j,
|
||||||
Path(CubicBezier(600 + 200j, 675 + 100j, 975 + 100j, 900 + 200j)))
|
675 + 100j,
|
||||||
|
975 + 100j,
|
||||||
|
900 + 200j)))
|
||||||
|
|
||||||
path1 = parse_path('M600,500 C600,350 900,650 900,500')
|
path1 = parse_path('M600,500 C600,350 900,650 900,500')
|
||||||
self.assertEqual(path1,
|
self.assertEqual(path1, Path(CubicBezier(600 + 500j,
|
||||||
Path(CubicBezier(600 + 500j, 600 + 350j, 900 + 650j, 900 + 500j)))
|
600 + 350j,
|
||||||
|
900 + 650j,
|
||||||
|
900 + 500j)))
|
||||||
|
|
||||||
path1 = parse_path("""M600,800 C625,700 725,700 750,800
|
path1 = parse_path("""M600,800 C625,700 725,700 750,800
|
||||||
S875,900 900,800""")
|
S875,900 900,800""")
|
||||||
self.assertEqual(path1,
|
self.assertEqual(path1, Path(CubicBezier(600 + 800j,
|
||||||
Path(CubicBezier(600 + 800j, 625 + 700j, 725 + 700j, 750 + 800j),
|
625 + 700j,
|
||||||
CubicBezier(750 + 800j, 775 + 900j, 875 + 900j, 900 + 800j)))
|
725 + 700j,
|
||||||
|
750 + 800j),
|
||||||
|
CubicBezier(750 + 800j,
|
||||||
|
775 + 900j,
|
||||||
|
875 + 900j,
|
||||||
|
900 + 800j)))
|
||||||
|
|
||||||
path1 = parse_path('M200,300 Q400,50 600,300 T1000,300')
|
path1 = parse_path('M200,300 Q400,50 600,300 T1000,300')
|
||||||
self.assertEqual(path1,
|
self.assertEqual(path1, Path(QuadraticBezier(200 + 300j,
|
||||||
Path(QuadraticBezier(200 + 300j, 400 + 50j, 600 + 300j),
|
400 + 50j,
|
||||||
QuadraticBezier(600 + 300j, 800 + 550j, 1000 + 300j)))
|
600 + 300j),
|
||||||
|
QuadraticBezier(600 + 300j,
|
||||||
|
800 + 550j,
|
||||||
|
1000 + 300j)))
|
||||||
|
|
||||||
path1 = parse_path('M300,200 h-150 a150,150 0 1,0 150,-150 z')
|
path1 = parse_path('M300,200 h-150 a150,150 0 1,0 150,-150 z')
|
||||||
self.assertEqual(path1,
|
self.assertEqual(path1, Path(Line(300 + 200j, 150 + 200j),
|
||||||
Path(Line(300 + 200j, 150 + 200j),
|
Arc(150 + 200j, 150 + 150j, 0, 1, 0, 300 + 50j),
|
||||||
Arc(150 + 200j, 150 + 150j, 0, 1, 0, 300 + 50j),
|
Line(300 + 50j, 300 + 200j)))
|
||||||
Line(300 + 50j, 300 + 200j)))
|
|
||||||
|
|
||||||
path1 = parse_path('M275,175 v-150 a150,150 0 0,0 -150,150 z')
|
path1 = parse_path('M275,175 v-150 a150,150 0 0,0 -150,150 z')
|
||||||
self.assertEqual(path1,
|
self.assertEqual(path1,
|
||||||
|
@ -115,26 +136,32 @@ class TestParser(unittest.TestCase):
|
||||||
|
|
||||||
# Relative moveto:
|
# Relative moveto:
|
||||||
path1 = parse_path('M 0 0 L 50 20 m 50 80 L 300 100 L 200 300 z')
|
path1 = parse_path('M 0 0 L 50 20 m 50 80 L 300 100 L 200 300 z')
|
||||||
self.assertEqual(path1, Path(
|
self.assertEqual(path1, Path(Line(0 + 0j, 50 + 20j),
|
||||||
Line(0 + 0j, 50 + 20j),
|
Line(100 + 100j, 300 + 100j),
|
||||||
Line(100 + 100j, 300 + 100j),
|
Line(300 + 100j, 200 + 300j),
|
||||||
Line(300 + 100j, 200 + 300j),
|
Line(200 + 300j, 100 + 100j)))
|
||||||
Line(200 + 300j, 100 + 100j)))
|
|
||||||
|
|
||||||
# Initial smooth and relative CubicBezier
|
# Initial smooth and relative CubicBezier
|
||||||
path1 = parse_path("""M100,200 s 150,-100 150,0""")
|
path1 = parse_path("""M100,200 s 150,-100 150,0""")
|
||||||
self.assertEqual(path1,
|
self.assertEqual(path1,
|
||||||
Path(CubicBezier(100 + 200j, 100 + 200j, 250 + 100j, 250 + 200j)))
|
Path(CubicBezier(100 + 200j,
|
||||||
|
100 + 200j,
|
||||||
|
250 + 100j,
|
||||||
|
250 + 200j)))
|
||||||
|
|
||||||
# Initial smooth and relative QuadraticBezier
|
# Initial smooth and relative QuadraticBezier
|
||||||
path1 = parse_path("""M100,200 t 150,0""")
|
path1 = parse_path("""M100,200 t 150,0""")
|
||||||
self.assertEqual(path1,
|
self.assertEqual(path1,
|
||||||
Path(QuadraticBezier(100 + 200j, 100 + 200j, 250 + 200j)))
|
Path(QuadraticBezier(100 + 200j,
|
||||||
|
100 + 200j,
|
||||||
|
250 + 200j)))
|
||||||
|
|
||||||
# Relative QuadraticBezier
|
# Relative QuadraticBezier
|
||||||
path1 = parse_path("""M100,200 q 0,0 150,0""")
|
path1 = parse_path("""M100,200 q 0,0 150,0""")
|
||||||
self.assertEqual(path1,
|
self.assertEqual(path1,
|
||||||
Path(QuadraticBezier(100 + 200j, 100 + 200j, 250 + 200j)))
|
Path(QuadraticBezier(100 + 200j,
|
||||||
|
100 + 200j,
|
||||||
|
250 + 200j)))
|
||||||
|
|
||||||
def test_negative(self):
|
def test_negative(self):
|
||||||
"""You don't need spaces before a minus-sign"""
|
"""You don't need spaces before a minus-sign"""
|
||||||
|
@ -144,20 +171,24 @@ class TestParser(unittest.TestCase):
|
||||||
|
|
||||||
def test_numbers(self):
|
def test_numbers(self):
|
||||||
"""Exponents and other number format cases"""
|
"""Exponents and other number format cases"""
|
||||||
# It can be e or E, the plus is optional, and a minimum of +/-3.4e38 must be supported.
|
# It can be e or E, the plus is optional, and a minimum of
|
||||||
|
# +/-3.4e38 must be supported.
|
||||||
path1 = parse_path('M-3.4e38 3.4E+38L-3.4E-38,3.4e-38')
|
path1 = parse_path('M-3.4e38 3.4E+38L-3.4E-38,3.4e-38')
|
||||||
path2 = Path(Line(-3.4e+38 + 3.4e+38j, -3.4e-38 + 3.4e-38j))
|
path2 = Path(Line(-3.4e+38 + 3.4e+38j, -3.4e-38 + 3.4e-38j))
|
||||||
self.assertEqual(path1, path2)
|
self.assertEqual(path1, path2)
|
||||||
|
|
||||||
def test_errors(self):
|
def test_errors(self):
|
||||||
self.assertRaises(ValueError, parse_path, 'M 100 100 L 200 200 Z 100 200')
|
self.assertRaises(ValueError, parse_path,
|
||||||
|
'M 100 100 L 200 200 Z 100 200')
|
||||||
|
|
||||||
|
|
||||||
def test_transform(self):
|
def test_transform(self):
|
||||||
|
|
||||||
tf_matrix = svgpathtools.parser.parse_transform('matrix(1.0 2.0 3.0 4.0 5.0 6.0)')
|
tf_matrix = svgpathtools.parser.parse_transform(
|
||||||
|
'matrix(1.0 2.0 3.0 4.0 5.0 6.0)')
|
||||||
expected_tf_matrix = np.identity(3)
|
expected_tf_matrix = np.identity(3)
|
||||||
expected_tf_matrix[0:2, 0:3] = np.array([[1.0, 3.0, 5.0], [2.0, 4.0, 6.0]])
|
expected_tf_matrix[0:2, 0:3] = np.array([[1.0, 3.0, 5.0],
|
||||||
|
[2.0, 4.0, 6.0]])
|
||||||
self.assertTrue(np.array_equal(expected_tf_matrix, tf_matrix))
|
self.assertTrue(np.array_equal(expected_tf_matrix, tf_matrix))
|
||||||
|
|
||||||
# Try a test with no y specified
|
# Try a test with no y specified
|
||||||
|
@ -170,7 +201,8 @@ class TestParser(unittest.TestCase):
|
||||||
|
|
||||||
# Now specify y
|
# Now specify y
|
||||||
expected_tf_translate[1, 2] = 45.5
|
expected_tf_translate[1, 2] = 45.5
|
||||||
tf_translate = svgpathtools.parser.parse_transform('translate(-36 45.5)')
|
tf_translate = svgpathtools.parser.parse_transform(
|
||||||
|
'translate(-36 45.5)')
|
||||||
self.assertTrue(np.array_equal(expected_tf_translate, tf_translate))
|
self.assertTrue(np.array_equal(expected_tf_translate, tf_translate))
|
||||||
|
|
||||||
# Try a test with no y specified
|
# Try a test with no y specified
|
||||||
|
|
Loading…
Reference in New Issue