From 8b7bd2c5d6049e43b65ef7ac798f6f8800e13d76 Mon Sep 17 00:00:00 2001 From: chopan Date: Sat, 8 Jul 2023 02:42:21 -0400 Subject: [PATCH 01/42] Optimized VMobject.consider_points_equals by not using np.allclose --- manim/mobject/types/vectorized_mobject.py | 27 +++++++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 12b6a85b6e..0de9189496 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -978,7 +978,30 @@ def scale_handle_to_anchor_distances(self, factor: float): # def consider_points_equals(self, p0, p1): - return np.allclose(p0, p1, atol=self.tolerance_for_point_equality) + """Determine if two points are close enough to be considered equal. + + This function reimplements np.allclose, because repeated calling of + np.allclose for only 2 points is inefficient. + ---------- + p0 + first point + p1 + second point + + Returns + ------- + bool + Whether the points p0 and p1 are considered close or not. + """ + rtol = 1.0e-5 # default from np.isclose() + atol = self.tolerance_for_point_equality + if abs(p0[0] - p1[0]) > atol + rtol * abs(p1[0]): + return False + if abs(p0[1] - p1[1]) > atol + rtol * abs(p1[1]): + return False + if abs(p0[2] - p1[2]) > atol + rtol * abs(p1[2]): + return False + return True def consider_points_equals_2d(self, p0: np.ndarray, p1: np.ndarray) -> bool: """Determine if two points are close enough to be considered equal. @@ -995,7 +1018,7 @@ def consider_points_equals_2d(self, p0: np.ndarray, p1: np.ndarray) -> bool: Returns ------- bool - whether two points considered close. + Whether the points p0 and p1 are considered close or not. """ rtol = 1.0e-5 # default from np.isclose() atol = self.tolerance_for_point_equality From bd43d04a7ea03949fc1e5af75964bb023c88e790 Mon Sep 17 00:00:00 2001 From: chopan Date: Sat, 8 Jul 2023 03:25:32 -0400 Subject: [PATCH 02/42] Optimized VMobject.add_points_as_corners --- manim/mobject/types/vectorized_mobject.py | 25 +++++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 0de9189496..0962deeb76 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -837,8 +837,29 @@ def close_path(self): self.add_line_to(self.get_subpaths()[-1][0]) def add_points_as_corners(self, points: np.ndarray) -> VMobject: - for point in points: - self.add_line_to(point) + points = np.asarray(points).reshape(-1, self.dim) + if self.has_new_path_started(): + start_corners = np.empty((len(points), self.dim)) + start_corners[0] = self.points[-1] + start_corners[1:] = points[:-1] + end_corners = points + self.points = self.points[:-1] + else: + start_corners = points[:-1] + end_corners = points[1:] + + nppcc = self.n_points_per_cubic_curve + new_points = np.empty((nppcc * len(start_corners), self.dim)) + new_points[::nppcc] = start_corners + new_points[nppcc - 1 :: nppcc] = end_corners + for i in range(1, nppcc - 1): + new_points[i::nppcc] = interpolate( + start_corners, + end_corners, + i / (nppcc - 1), + ) + + self.append_points(new_points) return points def set_points_as_corners(self, points: Sequence[float]): From b46e11e4f5ed3cf12efb00366c62b9ea43161699 Mon Sep 17 00:00:00 2001 From: chopan Date: Sat, 8 Jul 2023 03:26:27 -0400 Subject: [PATCH 03/42] Optimized VMobject.append_points --- manim/mobject/types/vectorized_mobject.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 0962deeb76..17d92cf5db 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -670,7 +670,11 @@ def append_points(self, new_points): # TODO, check that number new points is a multiple of 4? # or else that if len(self.points) % 4 == 1, then # len(new_points) % 4 == 3? - self.points = np.append(self.points, new_points, axis=0) + n = len(self.points) + points = np.empty((n + len(new_points), 3)) + points[:n] = self.points + points[n:] = new_points + self.points = points return self def start_new_path(self, point): From f29980ee5291bc5042e3ef8bd6a6c0ff9e63ee38 Mon Sep 17 00:00:00 2001 From: chopan Date: Sat, 8 Jul 2023 03:27:58 -0400 Subject: [PATCH 04/42] Changed 3 to self.dim in new VMobject.append_points --- manim/mobject/types/vectorized_mobject.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 17d92cf5db..16260cb4fe 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -671,7 +671,7 @@ def append_points(self, new_points): # or else that if len(self.points) % 4 == 1, then # len(new_points) % 4 == 3? n = len(self.points) - points = np.empty((n + len(new_points), 3)) + points = np.empty((n + len(new_points), self.dim)) points[:n] = self.points points[n:] = new_points self.points = points From 9a97e808a34710d5cff1df7b7ef5fca9b866e09a Mon Sep 17 00:00:00 2001 From: chopan Date: Sat, 8 Jul 2023 03:52:00 -0400 Subject: [PATCH 05/42] Optimized VMobject's start_new_path and add_line_to, and added n_points property --- manim/mobject/types/vectorized_mobject.py | 31 +++++++++++++---------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 16260cb4fe..e1f9cfb41a 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -134,6 +134,10 @@ def __init__( def n_points_per_curve(self): return self.n_points_per_cubic_curve + @property + def n_points(self): + return self.points.shape[0] + def get_group_class(self): return VGroup @@ -625,7 +629,7 @@ def resize_points( (the target size) to a Numpy array. The default implementation is based on Numpy's ``resize`` function. """ - if new_length != len(self.points): + if new_length != self.n_points: self.points = resize_func(self.points, new_length) return self @@ -668,9 +672,9 @@ def clear_points(self): def append_points(self, new_points): # TODO, check that number new points is a multiple of 4? - # or else that if len(self.points) % 4 == 1, then - # len(new_points) % 4 == 3? - n = len(self.points) + # or else that if self.n_points % 4 == 1, then + # self.n_points % 4 == 3? + n = self.n_points points = np.empty((n + len(new_points), self.dim)) points[:n] = self.points points[n:] = new_points @@ -678,13 +682,14 @@ def append_points(self, new_points): return self def start_new_path(self, point): - if len(self.points) % 4 != 0: + if self.n_points % 4 != 0: # close the open path by appending the last # start anchor sufficiently often last_anchor = self.get_start_anchors()[-1] - for _ in range(4 - (len(self.points) % 4)): - self.append_points([last_anchor]) - self.append_points([point]) + closure = [last_anchor] * (4 - (self.n_points % 4)) + self.append_points(closure + [point]) + else: + self.append_points([point]) return self def add_cubic_bezier_curve( @@ -694,7 +699,7 @@ def add_cubic_bezier_curve( handle2: np.ndarray, anchor2, ) -> None: - # TODO, check the len(self.points) % 4 == 0? + # TODO, check the self.n_points % 4 == 0? self.append_points([anchor1, handle1, handle2, anchor2]) def add_cubic_bezier_curves(self, curves): @@ -775,8 +780,8 @@ def add_line_to(self, point: np.ndarray): nppcc = self.n_points_per_cubic_curve self.add_cubic_bezier_curve_to( *( - interpolate(self.get_last_point(), point, a) - for a in np.linspace(0, 1, nppcc)[1:] + interpolate(self.get_last_point(), point, i / (nppcc - 1)) + for i in range(1, nppcc - 1) ) ) return self @@ -827,7 +832,7 @@ def add_smooth_curve_to(self, *points: np.array): def has_new_path_started(self): nppcc = self.n_points_per_cubic_curve # 4 # A new path starting is defined by a control point which is not part of a bezier subcurve. - return len(self.points) % nppcc == 1 + return self.n_points % nppcc == 1 def get_last_point(self): return self.points[-1] @@ -1266,7 +1271,7 @@ def get_num_curves(self) -> int: number of curves. of the vmobject. """ nppcc = self.n_points_per_cubic_curve - return len(self.points) // nppcc + return self.n_points // nppcc def get_curve_functions( self, From 109e955830494841ce3cdb8e351c5091ca464f40 Mon Sep 17 00:00:00 2001 From: chopan Date: Sat, 8 Jul 2023 04:22:52 -0400 Subject: [PATCH 06/42] Optimized VMobject's change_anchor_mode --- manim/mobject/types/vectorized_mobject.py | 53 ++++++++++++++++------- 1 file changed, 38 insertions(+), 15 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index e1f9cfb41a..b72e89b5d1 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -913,26 +913,49 @@ def change_anchor_mode(self, mode: str): """ assert mode in ["jagged", "smooth"] nppcc = self.n_points_per_cubic_curve + atol, rtol = self.tolerance_for_point_equality, 1e-5 + for submob in self.family_members_with_points(): - subpaths = submob.get_subpaths() - submob.clear_points() - # A subpath can be composed of several bezier curves. - for subpath in subpaths: - # This will retrieve the anchors of the subpath, by selecting every n element in the array subpath - # The append is needed as the last element is not reached when slicing with numpy. - anchors = np.append(subpath[::nppcc], subpath[-1:], 0) + # Every submobject will have its handles modified + + starts = self.get_start_anchors() + ends = self.get_end_anchors() + + # Find where the end of a curve is too far from the start of the next one: + # this is where a subpath ended. + # Not using np.isclose because it is slow with only 3D points (it would be + # faster with N-D points where N is huge). + is_not_close = np.abs(ends[:-1] - starts[1:]) > atol + rtol * np.abs( + ends[:-1] + ) + is_not_close = is_not_close[:, 0] | is_not_close[:, 1] | is_not_close[:, 2] + subpath_divisions = np.arange(is_not_close.size)[is_not_close] + + subpath_start_indices = np.empty(subpath_divisions.size + 1, dtype=int) + subpath_start_indices[0] = 0 + subpath_start_indices[1:] = subpath_divisions + 1 + + subpath_end_indices = np.empty(subpath_divisions.size + 1, dtype=int) + subpath_end_indices[:-1] = subpath_divisions + subpath_end_indices[-1] = starts.shape[0] - 1 + + # A subpath can be composed of several Bezier curves. + for start_i, end_i in zip(subpath_start_indices, subpath_end_indices): if mode == "smooth": + anchors = np.empty((end_i - start_i + 2, self.dim)) + anchors[: end_i - start_i + 1] = starts[start_i : end_i + 1] + anchors[end_i - start_i + 1] = ends[end_i] h1, h2 = get_smooth_handle_points(anchors) elif mode == "jagged": # The following will make the handles aligned with the anchors, thus making the bezier curve a segment - a1 = anchors[:-1] - a2 = anchors[1:] - h1 = interpolate(a1, a2, 1.0 / 3) - h2 = interpolate(a1, a2, 2.0 / 3) - new_subpath = np.array(subpath) - new_subpath[1::nppcc] = h1 - new_subpath[2::nppcc] = h2 - submob.append_points(new_subpath) + a1 = starts[start_i : end_i + 1] + a2 = ends[start_i : end_i + 1] + h1 = interpolate(a1, a2, 1 / 3) + h2 = interpolate(a1, a2, 2 / 3) + + # Set handles in this subpath + submob.points[nppcc * start_i + 1 : nppcc * (end_i + 1) : nppcc] = h1 + submob.points[nppcc * start_i + 2 : nppcc * (end_i + 1) : nppcc] = h2 return self def make_smooth(self): From d411a7376ab5d136e100a1d0d8a2bce8ddc2ee42 Mon Sep 17 00:00:00 2001 From: chopan Date: Sat, 8 Jul 2023 05:53:05 -0400 Subject: [PATCH 07/42] Optimized VMobject's point_from_proportion --- manim/mobject/types/vectorized_mobject.py | 50 +++++++++++++---------- 1 file changed, 28 insertions(+), 22 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index b72e89b5d1..b1759aeb00 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -41,6 +41,7 @@ from ...utils.color import BLACK, WHITE, color_to_rgba from ...utils.deprecation import deprecated from ...utils.iterables import make_even, resize_array, stretch_array_to_length, tuplify +from ...utils.simple_functions import binary_search from ...utils.space_ops import rotate_vector, shoelace_direction # TODO @@ -966,7 +967,7 @@ def make_jagged(self): def add_subpath(self, points: np.ndarray): assert len(points) % 4 == 0 - self.points = np.append(self.points, points, axis=0) + self.append_points(points) return self def append_vectorized_mobject(self, vectorized_mobject): @@ -1018,7 +1019,7 @@ def scale_handle_to_anchor_distances(self, factor: float): ``self`` """ for submob in self.family_members_with_points(): - if len(submob.points) < self.n_points_per_cubic_curve: + if submob.n_points < self.n_points_per_cubic_curve: # The case that a bezier quad is not complete (there is no bezier curve as there is not enough control points.) continue a1, h1, h2, a2 = submob.get_anchors_and_handles() @@ -1227,7 +1228,8 @@ def get_nth_curve_length_pieces( sample_points = 10 curve = self.get_nth_curve_function(n) - points = np.array([curve(a) for a in np.linspace(0, 1, sample_points)]) + t = np.linspace(0, 1, sample_points) + points = curve(t.reshape(-1, 1)) diffs = points[1:] - points[:-1] norms = np.linalg.norm(diffs, axis=1) @@ -1252,9 +1254,7 @@ def get_nth_curve_length( length : :class:`float` The length of the nth curve. """ - - _, length = self.get_nth_curve_function_with_length(n, sample_points) - + length = np.sum(self.get_nth_curve_length_pieces(n, sample_points)) return length def get_nth_curve_function_with_length( @@ -1280,8 +1280,7 @@ def get_nth_curve_function_with_length( """ curve = self.get_nth_curve_function(n) - norms = self.get_nth_curve_length_pieces(n, sample_points=sample_points) - length = np.sum(norms) + length = self.get_nth_curve_length(n, sample_points) return curve, length @@ -1358,24 +1357,31 @@ def point_from_proportion(self, alpha: float) -> np.ndarray: raise ValueError(f"Alpha {alpha} not between 0 and 1.") self.throw_error_if_no_points() + if alpha == 0: + return self.points[0] if alpha == 1: return self.points[-1] - curves_and_lengths = tuple(self.get_curve_functions_with_lengths()) - - target_length = alpha * sum(length for _, length in curves_and_lengths) - current_length = 0 - - for curve, length in curves_and_lengths: - if current_length + length >= target_length: - if length != 0: - residue = (target_length - current_length) / length - else: - residue = 0 - - return curve(residue) + num_curves = self.get_num_curves() + lengths = [self.get_nth_curve_length(n) for n in range(num_curves)] + acc_lengths = np.add.accumulate(lengths) + target_length = alpha * acc_lengths[-1] + + # Binary search + left, right = 0, num_curves - 1 + while right > left: + mid = (left + right) // 2 + if acc_lengths[mid] >= target_length: + right = mid + else: + left = mid + 1 - current_length += length + nth_curve = self.get_nth_curve_function(left) + if left == 0: + t = target_length / lengths[left] + else: + t = (target_length - acc_lengths[left - 1]) / lengths[left] + return nth_curve(t) def proportion_from_point( self, From c544d3466b68e90e454c9c189457eba8207cc9ff Mon Sep 17 00:00:00 2001 From: chopan Date: Sat, 8 Jul 2023 06:31:15 -0400 Subject: [PATCH 08/42] Optimized VMobject's get_anchors and get_arc_length --- manim/mobject/types/vectorized_mobject.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index b1759aeb00..0a60c0fcc0 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1484,12 +1484,19 @@ def get_anchors(self) -> np.ndarray: """ if self.points.shape[0] == 1: return self.points + anchors = np.empty((2 * self.get_num_curves(), self.dim)) + anchors[0::2] = self.get_start_anchors() + anchors[1::2] = self.get_end_anchors() + return anchors + """ return np.array( list(it.chain(*zip(self.get_start_anchors(), self.get_end_anchors()))), ) + """ def get_points_defining_boundary(self): - # Probably returns all anchors, but this is weird regarding the name of the method. + # TODO: this function is probably not returning the expected array + # Probably returns all anchors, but this is weird regarding the name of the method. return np.array(list(it.chain(*(sm.get_anchors() for sm in self.get_family())))) def get_arc_length(self, sample_points_per_curve: int | None = None) -> float: @@ -1506,12 +1513,7 @@ def get_arc_length(self, sample_points_per_curve: int | None = None) -> float: The length of the :class:`VMobject`. """ - return sum( - length - for _, length in self.get_curve_functions_with_lengths( - sample_points=sample_points_per_curve, - ) - ) + return sum([self.get_nth_curve_length(n) for n in range(num_curves)]) # Alignment def align_points(self, vmobject: VMobject): From 209bcb267cc36edb207cb05a499f81351c39167c Mon Sep 17 00:00:00 2001 From: chopan Date: Sat, 8 Jul 2023 19:32:28 -0400 Subject: [PATCH 09/42] Removed NumPy from VMobject.set_points_as_corners --- manim/mobject/types/vectorized_mobject.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 0a60c0fcc0..aa67cbe187 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -657,7 +657,7 @@ def set_anchors_and_handles( assert len(anchors1) == len(handles1) == len(handles2) == len(anchors2) nppcc = self.n_points_per_cubic_curve # 4 total_len = nppcc * len(anchors1) - self.points = np.zeros((total_len, self.dim)) + self.points = np.empty((total_len, self.dim)) # the following will, from the four sets, dispatch them in points such that # self.points = [ # anchors1[0], handles1[0], handles2[0], anchors1[0], anchors1[1], @@ -889,11 +889,18 @@ def set_points_as_corners(self, points: Sequence[float]): ``self`` """ nppcc = self.n_points_per_cubic_curve - points = np.array(points) + points = np.asarray(points) + start_anchors = points[:-1] + end_anchors = points[1:] # This will set the handles aligned with the anchors. # Id est, a bezier curve will be the segment from the two anchors such that the handles belongs to this segment. self.set_anchors_and_handles( - *(interpolate(points[:-1], points[1:], a) for a in np.linspace(0, 1, nppcc)) + start_anchors, + *( + interpolate(start_anchors, end_anchors, i / (nppcc - 1)) + for i in range(1, nppcc - 1) + ), + end_anchors, ) return self From cf719e93d62440c77959493dbab9977dd145cb83 Mon Sep 17 00:00:00 2001 From: chopan Date: Sat, 8 Jul 2023 19:33:34 -0400 Subject: [PATCH 10/42] Removed list() in VMobject.append_vectorized_mobject --- manim/mobject/types/vectorized_mobject.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index aa67cbe187..350fe1c0b9 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -978,13 +978,11 @@ def add_subpath(self, points: np.ndarray): return self def append_vectorized_mobject(self, vectorized_mobject): - new_points = list(vectorized_mobject.points) - if self.has_new_path_started(): # Remove last point, which is starting # a new path self.points = self.points[:-1] - self.append_points(new_points) + self.append_points(vectorized_mobject.points) def apply_function(self, function): factor = self.pre_function_handle_to_anchor_scale_factor From ce0bf2a4ab63e6eaeb223657c0b7d5acfbf765f3 Mon Sep 17 00:00:00 2001 From: chopan Date: Sat, 8 Jul 2023 19:51:56 -0400 Subject: [PATCH 11/42] Optimized VMobject.gen_cubic_bezier_tuples_from_points --- manim/mobject/types/vectorized_mobject.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 350fe1c0b9..5835016cea 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1088,10 +1088,10 @@ def consider_points_equals_2d(self, p0: np.ndarray, p1: np.ndarray) -> bool: return True # Information about line - def get_cubic_bezier_tuples_from_points(self, points): - return np.array(list(self.gen_cubic_bezier_tuples_from_points(points))) + def get_cubic_bezier_tuples_from_points(self, points: np.ndarray) -> np.ndarray: + return self.gen_cubic_bezier_tuples_from_points(points) - def gen_cubic_bezier_tuples_from_points(self, points: np.ndarray) -> tuple: + def gen_cubic_bezier_tuples_from_points(self, points: np.ndarray) -> np.ndarray: """Returns the bezier tuples from an array of points. self.points is a list of the anchors and handles of the bezier curves of the mobject (ie [anchor1, handle1, handle2, anchor2, anchor3 ..]) @@ -1109,11 +1109,13 @@ def gen_cubic_bezier_tuples_from_points(self, points: np.ndarray) -> tuple: typing.Tuple Bezier control points. """ + points = np.asarray(points) nppcc = self.n_points_per_cubic_curve - remainder = len(points) % nppcc - points = points[: len(points) - remainder] - # Basically take every nppcc element. - return (points[i : i + nppcc] for i in range(0, len(points), nppcc)) + n_curves = points.shape[0] // nppcc + n_points = nppcc * n_curves + points = points[:n_points] + + return points.reshape(n_curves, nppcc, self.dim) def get_cubic_bezier_tuples(self): return self.get_cubic_bezier_tuples_from_points(self.points) From 0470ca2e3bba7d61742170daa75f4b1652cc7e13 Mon Sep 17 00:00:00 2001 From: chopan Date: Sat, 8 Jul 2023 22:38:15 -0400 Subject: [PATCH 12/42] Added Mobject.memory and optimized VMobject.point_from_proportion via length memos and binary search --- manim/mobject/mobject.py | 1 + manim/mobject/types/vectorized_mobject.py | 91 ++++++++++++++++++++++- 2 files changed, 89 insertions(+), 3 deletions(-) diff --git a/manim/mobject/mobject.py b/manim/mobject/mobject.py index b9ef39af88..8ec2647aa8 100644 --- a/manim/mobject/mobject.py +++ b/manim/mobject/mobject.py @@ -101,6 +101,7 @@ def __init__(self, color=WHITE, name=None, dim=3, target=None, z_index=0): self.updaters = [] self.updating_suspended = False self.color = Color(color) if color else None + self.memory = {} self.reset_points() self.generate_points() diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 5835016cea..b3307bed51 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1180,6 +1180,74 @@ def get_subpaths(self) -> tuple: """ return self.get_subpaths_from_points(self.points) + def _init_curve_memory(self, sample_points: int = 10): + num_curves = self.get_num_curves() + lengths = np.array( + [self.get_nth_curve_length(n, sample_points) for n in range(num_curves)] + ) + + self.memory["piece_curves"] = { + "points": self.points, + "sample_points": sample_points, + "lengths": lengths, + "acc_lengths": np.add.accumulate(lengths), + } + + def _update_curve_memory(self, sample_points: int = 10): + if sample_points != self.memory["piece_curves"]["sample_points"]: + self._init_curve_memory(sample_points) + return + + curr_points = self.points + memo_points = self.memory["piece_curves"]["points"] + + curr_n_points = self.n_points + memo_n_points = memo_points.shape[0] + + nppcc = self.n_points_per_cubic_curve + n_points = min(curr_n_points, memo_n_points) + n_curves = n_points // nppcc + n_points = n_points * nppcc + + # Check if any Bezier had its points changed to recalculate its length. + neq = curr_points[:n_points] != memo_points[:n_points] + # Collapse every 3D point group into a single value per point. + neq = neq.reshape(-1, self.dim) + neq2 = neq[:, 0] + for i in range(1, self.dim): + neq2 |= neq[:, i] + # Collapse every group of 4 values into a single value per curve. + neq2 = neq2.reshape(-1, nppcc) + differences = neq2[:, 0] + for i in range(1, nppcc): + differences |= neq2[:, i] + differences = np.arange(n_curves)[differences] + + # If the amount of points has changed, adjust lengths + curr_n_curves = curr_n_points / nppcc + memo_n_curves = memo_n_points / nppcc + if curr_n_points < memo_n_points: + new_lengths = self.memory["piece_curves"]["lengths"][:curr_n_curves] + self.memory["piece_curves"]["lengths"] = new_lengths + elif curr_n_points > memo_n_points: + new_lengths = np.empty(curr_n_curves) + new_lengths[:memo_n_curves] = self.memory["piece_curves"] + new_lengths[memo_n_curves:] = [ + self.get_nth_curve_length(n, sample_points) + for n in range(memo_n_curves, curr_n_curves) + ] + self.memory["piece_curves"]["lengths"] = new_lengths + + # Update memo, recalculating only the lengths which have changed + self.memory["piece_curves"]["points"] = curr_points + self.memory["piece_curves"]["lengths"][differences] = [ + self.get_nth_curve_length(n) for n in differences + ] + if differences.shape[0] > 0: + self.memory["piece_curves"]["acc_lengths"] = np.add.accumulate( + self.memory["piece_curves"]["lengths"] + ) + def get_nth_curve_points(self, n: int) -> np.ndarray: """Returns the points defining the nth curve of the vmobject. @@ -1261,6 +1329,17 @@ def get_nth_curve_length( length : :class:`float` The length of the nth curve. """ + if ( + "piece_curves" in self.memory + and self.memory["piece_curves"]["sample_points"] == sample_points + and self.memory["piece_curves"]["lengths"].shape[0] > n + and ( + self.points[nppcc * n :: nppcc] + == self.memory["piece_curves"]["points"][nppcc * n :: nppcc] + ).all() + ): + return self.memory["piece_curves"]["lengths"][n] + length = np.sum(self.get_nth_curve_length_pieces(n, sample_points)) return length @@ -1369,11 +1448,17 @@ def point_from_proportion(self, alpha: float) -> np.ndarray: if alpha == 1: return self.points[-1] - num_curves = self.get_num_curves() - lengths = [self.get_nth_curve_length(n) for n in range(num_curves)] - acc_lengths = np.add.accumulate(lengths) + if "piece_curves" not in self.memory: + self._init_curve_memory() + else: + self._update_curve_memory() + + lengths = self.memory["piece_curves"]["lengths"] + acc_lengths = self.memory["piece_curves"]["acc_lengths"] target_length = alpha * acc_lengths[-1] + num_curves = self.get_num_curves() + # Binary search left, right = 0, num_curves - 1 while right > left: From 70cf3cc2e0189e0f76cb8828f519ef42fc85696d Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 9 Jul 2023 00:40:40 -0400 Subject: [PATCH 13/42] Rolled back memo in VMobject.get_nth_curve_length --- manim/mobject/types/vectorized_mobject.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index b3307bed51..8c8763d3a2 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1329,17 +1329,6 @@ def get_nth_curve_length( length : :class:`float` The length of the nth curve. """ - if ( - "piece_curves" in self.memory - and self.memory["piece_curves"]["sample_points"] == sample_points - and self.memory["piece_curves"]["lengths"].shape[0] > n - and ( - self.points[nppcc * n :: nppcc] - == self.memory["piece_curves"]["points"][nppcc * n :: nppcc] - ).all() - ): - return self.memory["piece_curves"]["lengths"][n] - length = np.sum(self.get_nth_curve_length_pieces(n, sample_points)) return length From d77193879bb8350cc1034034ad7d5117f21e32ef Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 9 Jul 2023 01:41:56 -0400 Subject: [PATCH 14/42] Rewrote VMobject.get_points_defining_boundary --- manim/mobject/types/vectorized_mobject.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 8c8763d3a2..8cd458ce02 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1569,16 +1569,24 @@ def get_anchors(self) -> np.ndarray: anchors[0::2] = self.get_start_anchors() anchors[1::2] = self.get_end_anchors() return anchors - """ - return np.array( - list(it.chain(*zip(self.get_start_anchors(), self.get_end_anchors()))), - ) - """ def get_points_defining_boundary(self): # TODO: this function is probably not returning the expected array # Probably returns all anchors, but this is weird regarding the name of the method. - return np.array(list(it.chain(*(sm.get_anchors() for sm in self.get_family())))) + family = self.get_family() + n_anchors_per_submob = [ + (submob.n_points // submob.n_points_per_cubic_curve) * 2 + for submob in family + ] + acc_n_anchors = np.add.accumulate(n_anchors_per_submob) + + boundary = np.empty((acc_n_anchors[-1], self.dim)) + start_i = 0 + for submob, end_i in zip(family, acc_n_anchors): + boundary[start_i:end_i] = submob.get_anchors() + start_i = end_i + + return boundary def get_arc_length(self, sample_points_per_curve: int | None = None) -> float: """Return the approximated length of the whole curve. From cc027b6ee176b05dce80dbde151ee4e1278b5e11 Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 9 Jul 2023 17:14:33 -0400 Subject: [PATCH 15/42] VMobject: improved subpath methods and align_points --- manim/mobject/types/vectorized_mobject.py | 359 +++++++++++++++------- 1 file changed, 248 insertions(+), 111 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 8cd458ce02..ad0c8650ff 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -783,7 +783,8 @@ def add_line_to(self, point: np.ndarray): *( interpolate(self.get_last_point(), point, i / (nppcc - 1)) for i in range(1, nppcc - 1) - ) + ), + point, ) return self @@ -921,49 +922,27 @@ def change_anchor_mode(self, mode: str): """ assert mode in ["jagged", "smooth"] nppcc = self.n_points_per_cubic_curve - atol, rtol = self.tolerance_for_point_equality, 1e-5 for submob in self.family_members_with_points(): # Every submobject will have its handles modified - - starts = self.get_start_anchors() - ends = self.get_end_anchors() - - # Find where the end of a curve is too far from the start of the next one: - # this is where a subpath ended. - # Not using np.isclose because it is slow with only 3D points (it would be - # faster with N-D points where N is huge). - is_not_close = np.abs(ends[:-1] - starts[1:]) > atol + rtol * np.abs( - ends[:-1] - ) - is_not_close = is_not_close[:, 0] | is_not_close[:, 1] | is_not_close[:, 2] - subpath_divisions = np.arange(is_not_close.size)[is_not_close] - - subpath_start_indices = np.empty(subpath_divisions.size + 1, dtype=int) - subpath_start_indices[0] = 0 - subpath_start_indices[1:] = subpath_divisions + 1 - - subpath_end_indices = np.empty(subpath_divisions.size + 1, dtype=int) - subpath_end_indices[:-1] = subpath_divisions - subpath_end_indices[-1] = starts.shape[0] - 1 - - # A subpath can be composed of several Bezier curves. - for start_i, end_i in zip(subpath_start_indices, subpath_end_indices): - if mode == "smooth": - anchors = np.empty((end_i - start_i + 2, self.dim)) - anchors[: end_i - start_i + 1] = starts[start_i : end_i + 1] - anchors[end_i - start_i + 1] = ends[end_i] + if mode == "jagged": + # The following will make the handles aligned with the anchors, + # thus making the Bèzier curves straight lines + starts = submob.get_start_anchors() + ends = submob.get_end_anchors() + for i in range(1, nppcc - 1): + submob.points[1::nppcc] = interpolate(starts, ends, i / (nppcc - 1)) + + elif mode == "smooth": + # Divide into subpaths and for each subpath compute smooth handles. + split_indices = submob.get_subpath_split_indices() + for start, end in split_indices: + anchors = np.empty(((end - start) // nppcc + 1, submob.dim)) + anchors[:-1] = submob.points[start:end:nppcc] + anchors[-1] = submob.points[end - 1] h1, h2 = get_smooth_handle_points(anchors) - elif mode == "jagged": - # The following will make the handles aligned with the anchors, thus making the bezier curve a segment - a1 = starts[start_i : end_i + 1] - a2 = ends[start_i : end_i + 1] - h1 = interpolate(a1, a2, 1 / 3) - h2 = interpolate(a1, a2, 2 / 3) - - # Set handles in this subpath - submob.points[nppcc * start_i + 1 : nppcc * (end_i + 1) : nppcc] = h1 - submob.points[nppcc * start_i + 2 : nppcc * (end_i + 1) : nppcc] = h2 + submob.points[start + 1 : end : nppcc] = h1 + submob.points[start + 2 : end : nppcc] = h2 return self def make_smooth(self): @@ -1054,15 +1033,18 @@ def consider_points_equals(self, p0, p1): """ rtol = 1.0e-5 # default from np.isclose() atol = self.tolerance_for_point_equality - if abs(p0[0] - p1[0]) > atol + rtol * abs(p1[0]): - return False - if abs(p0[1] - p1[1]) > atol + rtol * abs(p1[1]): - return False - if abs(p0[2] - p1[2]) > atol + rtol * abs(p1[2]): - return False - return True - def consider_points_equals_2d(self, p0: np.ndarray, p1: np.ndarray) -> bool: + # Case 1: single pair of points (returns a bool) + if p0.ndim == 1: + return not self.consider_points_different(p0, p1) + + # Case 2: multiple pairs of points (returns a boolean ndarray) + else: + return ~self.consider_points_different(p0, p1) + + def consider_points_equals_2d( + self, p0: np.ndarray, p1: np.ndarray + ) -> bool | np.ndarray: """Determine if two points are close enough to be considered equal. This uses the algorithm from np.isclose(), but expanded here for the @@ -1079,13 +1061,83 @@ def consider_points_equals_2d(self, p0: np.ndarray, p1: np.ndarray) -> bool: bool Whether the points p0 and p1 are considered close or not. """ + # Case 1: single pair of points (returns a bool) + if p0.ndim == 1: + return not self.consider_points_different_2d(p0, p1) + + # Case 2: multiple pairs of points (returns a boolean ndarray) + else: + return ~self.consider_points_different_2d(p0, p1) + + def consider_points_different(self, p0, p1): + """Determine if two points are distant enough to be considered different. + + This function reimplements np.allclose, because repeated calling of + np.allclose for only 2 points is inefficient. + ---------- + p0 + first point + p1 + second point + + Returns + ------- + bool + Whether the points p0 and p1 are considered different or not. + """ rtol = 1.0e-5 # default from np.isclose() atol = self.tolerance_for_point_equality - if abs(p0[0] - p1[0]) > atol + rtol * abs(p1[0]): + + # Case 1: single pair of points + if p0.ndim == 1: + if abs(p0[0] - p1[0]) > atol + rtol * abs(p1[0]): + return True + if abs(p0[1] - p1[1]) > atol + rtol * abs(p1[1]): + return True + if abs(p0[2] - p1[2]) > atol + rtol * abs(p1[2]): + return True return False - if abs(p0[1] - p1[1]) > atol + rtol * abs(p1[1]): + + # Case 2: multiple pairs of points + else: + is_far = abs(p1 - p0) > atol + rtol * abs(p1) + return is_far[:, 0] | is_far[:, 1] | is_far[:, 2] + + def consider_points_different_2d(self, p0: np.ndarray, p1: np.ndarray) -> bool: + """Determine if two points are distant enough to be considered different. + + This uses the algorithm from np.isclose(), but expanded here for the + 2D point case. NumPy is overkill for such a small question. + Parameters + ---------- + p0 + first point + p1 + second point + + Returns + ------- + bool + Whether the points p0 and p1 are considered different or not. + """ + rtol = 1.0e-5 # default from np.isclose() + atol = self.tolerance_for_point_equality + + # Case 1: single pair of points + if p0.ndim == 1: + if abs(p0[0] - p1[0]) > atol + rtol * abs(p1[0]): + return True + if abs(p0[1] - p1[1]) > atol + rtol * abs(p1[1]): + return True return False - return True + + # Case 2: multiple pairs of points + else: + # Ensure that we're only working with 2D components + p0 = p0[:, :2] + p1 = p1[:, :2] + is_far = abs(p1 - p0) > atol + rtol * abs(p1) + return is_far[:, 0] | is_far[:, 1] # Information about line def get_cubic_bezier_tuples_from_points(self, points: np.ndarray) -> np.ndarray: @@ -1109,7 +1161,7 @@ def gen_cubic_bezier_tuples_from_points(self, points: np.ndarray) -> np.ndarray: typing.Tuple Bezier control points. """ - points = np.asarray(points) + points = np.array(points) nppcc = self.n_points_per_cubic_curve n_curves = points.shape[0] // nppcc n_points = nppcc * n_curves @@ -1120,6 +1172,7 @@ def gen_cubic_bezier_tuples_from_points(self, points: np.ndarray) -> np.ndarray: def get_cubic_bezier_tuples(self): return self.get_cubic_bezier_tuples_from_points(self.points) + # TODO: Deprecate? def _gen_subpaths_from_points( self, points: np.ndarray, @@ -1155,18 +1208,15 @@ def _gen_subpaths_from_points( ) def get_subpaths_from_points(self, points): - return list( - self._gen_subpaths_from_points( - points, - lambda n: not self.consider_points_equals(points[n - 1], points[n]), - ), - ) + return [ + points[i:j] for i, j in self.get_subpath_split_indices_from_points(points) + ] def gen_subpaths_from_points_2d(self, points): - return self._gen_subpaths_from_points( - points, - lambda n: not self.consider_points_equals_2d(points[n - 1], points[n]), - ) + return [ + points[i:j] + for i, j in self.get_subpath_split_indices_from_points(points, n_dims=2) + ] def get_subpaths(self) -> tuple: """Returns subpaths formed by the curves of the VMobject. @@ -1180,6 +1230,40 @@ def get_subpaths(self) -> tuple: """ return self.get_subpaths_from_points(self.points) + def get_subpath_split_indices_from_points( + self, points: np.ndarray, n_dims: int = 3 + ) -> np.ndarray: + nppcc = self.n_points_per_cubic_curve + starts = points[::nppcc] + ends = points[nppcc - 1 :: nppcc] + # This ensures that there are no more starts than ends. + n_curves = ends.shape[0] + starts = starts[:n_curves] + + # DO NOT DELETE THIS! Or else, a [[0 0]] ndarray will be returned + # for an empty list of points, when it should instead return []. + if n_curves == 0: + return np.empty((0, 2), dtype=int) + + if n_dims == 2: + is_far = self.consider_points_different_2d(starts[1:], ends[:-1]) + else: + is_far = self.consider_points_different(starts[1:], ends[:-1]) + aux = np.arange(1, is_far.shape[0] + 1)[is_far] + + split_indices = np.empty((aux.shape[0] + 1, 2), dtype=int) + split_indices[0, 0] = 0 + split_indices[1:, 0] = aux + split_indices[:-1, 1] = aux + split_indices[-1, 1] = n_curves + split_indices *= self.n_points_per_cubic_curve + + return split_indices + + def get_subpath_split_indices(self) -> np.ndarray: + return self.get_subpath_split_indices_from_points(self.points) + + # Curve functions def _init_curve_memory(self, sample_points: int = 10): num_curves = self.get_num_curves() lengths = np.array( @@ -1637,43 +1721,93 @@ def align_points(self, vmobject: VMobject): if mob.has_new_path_started(): mob.add_line_to(mob.get_last_point()) - # Figure out what the subpaths are - subpaths1 = self.get_subpaths() - subpaths2 = vmobject.get_subpaths() - n_subpaths = max(len(subpaths1), len(subpaths2)) - # Start building new ones - new_path1 = np.zeros((0, self.dim)) - new_path2 = np.zeros((0, self.dim)) + # Figure out what the subpaths are. + self_split_i = self.get_subpath_split_indices() + self_n_subpaths = self_split_i.shape[0] + vmob_split_i = vmobject.get_subpath_split_indices() + vmob_n_subpaths = vmob_split_i.shape[0] - nppcc = self.n_points_per_cubic_curve + # If they have the same subpaths, do nothing. + if self_n_subpaths == vmob_n_subpaths and (self_split_i == vmob_split_i).all(): + return - def get_nth_subpath(path_list, n): - if n >= len(path_list): - # Create a null path at the very end - return [path_list[-1][-1]] * nppcc - path = path_list[n] - # Check for useless points at the end of the path and remove them - # https://github.com/ManimCommunity/manim/issues/1959 - while len(path) > nppcc: - # If the last nppc points are all equal to the preceding point - if self.consider_points_equals(path[-nppcc:], path[-nppcc - 1]): - path = path[:-nppcc] - else: - break - return path - - for n in range(n_subpaths): - # For each pair of subpaths, add points until they are the same length - sp1 = get_nth_subpath(subpaths1, n) - sp2 = get_nth_subpath(subpaths2, n) - diff1 = max(0, (len(sp2) - len(sp1)) // nppcc) - diff2 = max(0, (len(sp1) - len(sp2)) // nppcc) - sp1 = self.insert_n_curves_to_point_list(diff1, sp1) - sp2 = self.insert_n_curves_to_point_list(diff2, sp2) - new_path1 = np.append(new_path1, sp1, axis=0) - new_path2 = np.append(new_path2, sp2, axis=0) - self.set_points(new_path1) - vmobject.set_points(new_path2) + self_n_points = self_split_i[-1, 1] + vmob_n_points = vmob_split_i[-1, 1] + + if self_n_subpaths < vmob_n_subpaths: + least_n_subpaths = self_n_subpaths + remainder_n_points = vmob_n_points - vmob_split_i[self_n_subpaths - 1, 1] + else: + least_n_subpaths = vmob_n_subpaths + remainder_n_points = self_n_points - self_split_i[vmob_n_subpaths - 1, 1] + + # For each possible pair of subpaths from self and vmob, + # get the number of points of the longest one to adjust + # the subpaths accordingly + self_n_points_per_path = self_split_i[:, 1] - self_split_i[:, 0] + vmob_n_points_per_path = vmob_split_i[:, 1] - vmob_split_i[:, 0] + max_n_points_per_path = np.maximum( + self_n_points_per_path[:least_n_subpaths], + vmob_n_points_per_path[:least_n_subpaths], + ) + max_acc_n_points = np.add.accumulate(max_n_points_per_path) + max_split_i = np.empty((least_n_subpaths, 2), dtype=int) + max_split_i[0, 0] = 0 + max_split_i[1:, 0] = max_acc_n_points[:-1] + max_split_i[:, 1] = max_acc_n_points + max_n_points = max_acc_n_points[-1] + + # Precalculate lengths of new paths and preallocate them in memory + final_n_points = max_n_points + remainder_n_points + self_new_path = np.empty((final_n_points, self.dim)) + vmob_new_path = np.empty((final_n_points, self.dim)) + + # Analyze all of the possible pairs of subpaths + nppcc = self.n_points_per_cubic_curve + for i in range(least_n_subpaths): + # Start and end indices of self, vmob and max subpaths + self_start, self_end = self_split_i[i] + vmob_start, vmob_end = vmob_split_i[i] + max_start, max_end = max_split_i[i] + + self_n = self_n_points_per_path[i] + vmob_n = vmob_n_points_per_path[i] + + # Add corresponding subpaths to the new paths. If necessary, + # subdivide one of them into more Bèzier curves until its + # number of points matches the other Mobject's subpath. + self_subpath = self.points[self_start:self_end] + vmob_subpath = vmobject.points[vmob_start:vmob_end] + if self_n < vmob_n: + vmob_new_path[max_start:max_end] = vmob_subpath + self_new_path[max_start:max_end] = self.insert_n_curves_to_point_list( + (vmob_n - self_n) // nppcc, + self_subpath, + ) + elif self_n > vmob_n: + self_new_path[max_start:max_end] = self_subpath + vmob_new_path[max_start:max_end] = self.insert_n_curves_to_point_list( + (self_n - vmob_n) // nppcc, + vmob_subpath, + ) + else: + self_new_path[max_start:max_end] = self_subpath + vmob_new_path[max_start:max_end] = vmob_subpath + + # If any of the original paths had more subpaths than the other, + # add them to the corresponding new path and complete the other + # one by appending its last anchor as many times as necessary. + if self_n_subpaths < vmob_n_subpaths: + vmob_start = vmob_split_i[least_n_subpaths, 0] + self_new_path[max_n_points:] = self_new_path[max_n_points - 1] + vmob_new_path[max_n_points:] = vmobject.points[vmob_start:] + elif self_n_subpaths > vmob_n_subpaths: + self_start = self_split_i[least_n_subpaths, 0] + self_new_path[max_n_points:] = self.points[self_start:] + vmob_new_path[max_n_points:] = vmob_new_path[max_n_points - 1] + + self.set_points(self_new_path) + vmobject.set_points(vmob_new_path) return self def insert_n_curves(self, n: int): @@ -1716,8 +1850,9 @@ def insert_n_curves_to_point_list(self, n: int, points: np.ndarray) -> np.ndarra Points generated. """ + nppcc = self.n_points_per_cubic_curve + if len(points) == 1: - nppcc = self.n_points_per_cubic_curve return np.repeat(points, nppcc * n, 0) bezier_quads = self.get_cubic_bezier_tuples_from_points(points) curr_num = len(bezier_quads) @@ -1738,21 +1873,23 @@ def insert_n_curves_to_point_list(self, n: int, points: np.ndarray) -> np.ndarra # The split factors array would hence be: # [2, 1, 2, 1, 2, 1, 2, 1, 2, 1] split_factors = np.zeros(curr_num, dtype="i") - for val in repeat_indices: - split_factors[val] += 1 + np.add.at(split_factors, repeat_indices, 1) - new_points = np.zeros((0, self.dim)) + new_points = np.empty((nppcc * target_num, self.dim)) + start_i = 0 for quad, sf in zip(bezier_quads, split_factors): - # What was once a single cubic curve defined - # by "quad" will now be broken into sf - # smaller cubic curves - alphas = np.linspace(0, 1, sf + 1) - for a1, a2 in zip(alphas, alphas[1:]): - new_points = np.append( - new_points, - partial_bezier_points(quad, a1, a2), - axis=0, - ) + if sf == 1: + new_points[start_i : start_i + nppcc] = quad + start_i += nppcc + else: + # What was once a single cubic curve defined + # by "quad" will now be broken into sf + # smaller cubic curves + for i in range(sf): + new_points[start_i : start_i + nppcc] = partial_bezier_points( + quad, i / sf, (i + 1) / sf + ) + start_i += nppcc return new_points def align_rgbas(self, vmobject): From 5eb8c09b68f924085c0b1c32a188d77a4d4a4a35 Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 9 Jul 2023 18:12:58 -0400 Subject: [PATCH 16/42] Solved TODO in VMobject.align_points --- manim/mobject/types/vectorized_mobject.py | 30 +++++++++++++---------- 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index ad0c8650ff..1f618716c1 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1707,19 +1707,23 @@ def align_points(self, vmobject: VMobject): ``self`` """ self.align_rgbas(vmobject) - # TODO: This shortcut can be a bit over eager. What if they have the same length, but different subpath lengths? - if self.get_num_points() == vmobject.get_num_points(): - return - for mob in self, vmobject: - # If there are no points, add one to - # wherever the "center" is - if mob.has_no_points(): - mob.start_new_path(mob.get_center()) - # If there's only one point, turn it into - # a null curve - if mob.has_new_path_started(): - mob.add_line_to(mob.get_last_point()) + # If there are no points, add one to + # wherever the "center" is. + if self.has_no_points(): + # If both Mobjects have no points, do not continue. + if vmobject.has_no_points(): + return self + self.start_new_path(self.get_center()) + if vmobject.has_no_points(): + vmobject.start_new_path(vmobject.get_center()) + + # If there's only one point, turn it into + # a null curve. + if self.has_new_path_started(): + self.add_line_to(self.get_last_point()) + if vmobject.has_new_path_started(): + vmobject.add_line_to(vmobject.get_last_point()) # Figure out what the subpaths are. self_split_i = self.get_subpath_split_indices() @@ -1727,7 +1731,7 @@ def align_points(self, vmobject: VMobject): vmob_split_i = vmobject.get_subpath_split_indices() vmob_n_subpaths = vmob_split_i.shape[0] - # If they have the same subpaths, do nothing. + # If they have the same subpaths, do not continue. if self_n_subpaths == vmob_n_subpaths and (self_split_i == vmob_split_i).all(): return From d403d4bd441cdfc6583ccec276b2dc067f8b0a35 Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 9 Jul 2023 18:14:38 -0400 Subject: [PATCH 17/42] Corrected comment in VMobject.align_points --- manim/mobject/types/vectorized_mobject.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 1f618716c1..46973340d5 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1731,7 +1731,8 @@ def align_points(self, vmobject: VMobject): vmob_split_i = vmobject.get_subpath_split_indices() vmob_n_subpaths = vmob_split_i.shape[0] - # If they have the same subpaths, do not continue. + # If they have the same number of subpaths and the same number of points + # per subpath, do not continue. if self_n_subpaths == vmob_n_subpaths and (self_split_i == vmob_split_i).all(): return From dfb447884cb800d2512c06dfc59fc26ce1c25627 Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 9 Jul 2023 19:14:07 -0400 Subject: [PATCH 18/42] Minor optimization in VMobject.pointwise_become_partial --- manim/mobject/types/vectorized_mobject.py | 54 ++++++++++++++--------- 1 file changed, 32 insertions(+), 22 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 46973340d5..8b259e7ddd 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1967,35 +1967,45 @@ def pointwise_become_partial( if a <= 0 and b >= 1: self.set_points(vmobject.points) return self - bezier_quads = vmobject.get_cubic_bezier_tuples() - num_cubics = len(bezier_quads) + num_curves = vmobject.get_num_curves() + if num_curves == 0: + self.clear_points() + return self - # The following two lines will compute which bezier curves of the given mobject need to be processed. - # The residue basically indicates de proportion of the selected bezier curve that have to be selected. - # Ex : if lower_index is 3, and lower_residue is 0.4, then the algorithm will append to the points 0.4 of the third bezier curve - lower_index, lower_residue = integer_interpolate(0, num_cubics, a) - upper_index, upper_residue = integer_interpolate(0, num_cubics, b) + # The following two lines will compute which Bezier curves of the given mobject need to be processed. + # The residue basically indicates the proportion of the selected Bezier curve that has to be selected. + # + # Example: if num_curves is 10, a is 3.4 and b is 7.8, then: + # - lower_index is 3 and lower_residue is 0.4, which means the algorithm will look at the 3rd Bezier + # and select its part from t=0.4 to t=1. + # - upper_index is 7 and upper_residue is 0.8, which means the algorithm will look at the 7th Bezier + # and select its part from t=0 to t=0.8. + lower_index, lower_residue = integer_interpolate(0, num_curves, a) + upper_index, upper_residue = integer_interpolate(0, num_curves, b) - self.clear_points() - if num_cubics == 0: - return self + nppcc = self.n_points_per_cubic_curve if lower_index == upper_index: - self.append_points( - partial_bezier_points( - bezier_quads[lower_index], - lower_residue, - upper_residue, - ), + self.points = partial_bezier_points( + vmobject.points[nppcc * lower_index : nppcc * lower_index + nppcc], + lower_residue, + upper_residue, ) else: - self.append_points( - partial_bezier_points(bezier_quads[lower_index], lower_residue, 1), + self.points = np.empty((nppcc * (upper_index - lower_index + 1), self.dim)) + self.points[:nppcc] = partial_bezier_points( + vmobject.points[nppcc * lower_index : nppcc * lower_index + nppcc], + lower_residue, + 1, ) - for quad in bezier_quads[lower_index + 1 : upper_index]: - self.append_points(quad) - self.append_points( - partial_bezier_points(bezier_quads[upper_index], 0, upper_residue), + self.points[nppcc:-nppcc] = vmobject.points[ + nppcc * (lower_index + 1) : nppcc * upper_index + ] + self.points[-nppcc:] = partial_bezier_points( + vmobject.points[nppcc * upper_index : nppcc * upper_index + nppcc], + 0, + upper_residue, ) + return self def get_subcurve(self, a: float, b: float) -> VMobject: From 927b359c66cf8f5b15fd7dbad61e37d036d31b0c Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 9 Jul 2023 19:19:29 -0400 Subject: [PATCH 19/42] Added commentaries to VMobject.pointwise_become_partial --- manim/mobject/types/vectorized_mobject.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 8b259e7ddd..ab26c15e2d 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1961,9 +1961,9 @@ def pointwise_become_partial( """ assert isinstance(vmobject, VMobject) # Partial curve includes three portions: - # - A middle section, which matches the curve exactly - # - A start, which is some ending portion of an inner cubic - # - An end, which is the starting portion of a later inner cubic + # - A middle section, which matches the curve exactly. + # - A start, which is some ending portion of an inner cubic. + # - An end, which is the starting portion of a later inner cubic. if a <= 0 and b >= 1: self.set_points(vmobject.points) return self @@ -1984,22 +1984,32 @@ def pointwise_become_partial( upper_index, upper_residue = integer_interpolate(0, num_curves, b) nppcc = self.n_points_per_cubic_curve + # If both indices coincide, get a part of a single Bezier curve. if lower_index == upper_index: + # Look at the "lower_index"-th Bezier curve and select its part from + # t=lower_residue to t=upper_residue. self.points = partial_bezier_points( vmobject.points[nppcc * lower_index : nppcc * lower_index + nppcc], lower_residue, upper_residue, ) else: + # Allocate space for (upper_index-lower_index+1) Bezier curves. self.points = np.empty((nppcc * (upper_index - lower_index + 1), self.dim)) + # Look at the "lower_index"-th Bezier curve and select its part from + # t=lower_residue to t=1. This is the first curve in self.points. self.points[:nppcc] = partial_bezier_points( vmobject.points[nppcc * lower_index : nppcc * lower_index + nppcc], lower_residue, 1, ) + # If there are more curves between the "lower_index"-th and the + # "upper_index"-th Beziers, add them all to self.points. self.points[nppcc:-nppcc] = vmobject.points[ nppcc * (lower_index + 1) : nppcc * upper_index ] + # Look at the "upper_index"-th Bezier curve and select its part from + # t=0 to t=upper_residue. This is the last curve in self.points. self.points[-nppcc:] = partial_bezier_points( vmobject.points[nppcc * upper_index : nppcc * upper_index + nppcc], 0, From 48dd7e0addceef3aeaf21b0f325a59b250d06229 Mon Sep 17 00:00:00 2001 From: chopan Date: Wed, 12 Jul 2023 00:08:11 -0400 Subject: [PATCH 20/42] Fixed VMobject.get_arc_length using undefined num_curves - whoops --- manim/mobject/types/vectorized_mobject.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index ab26c15e2d..aac5048fe3 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1686,7 +1686,7 @@ def get_arc_length(self, sample_points_per_curve: int | None = None) -> float: The length of the :class:`VMobject`. """ - return sum([self.get_nth_curve_length(n) for n in range(num_curves)]) + return sum([self.get_nth_curve_length(n) for n in range(self.get_num_curves())]) # Alignment def align_points(self, vmobject: VMobject): From d607913548e4b7d2a791cfbc9a6bb3272f969443 Mon Sep 17 00:00:00 2001 From: chopan Date: Sat, 15 Jul 2023 22:19:04 -0400 Subject: [PATCH 21/42] Rolled back minor details --- manim/mobject/types/vectorized_mobject.py | 182 +++++++++++++--------- 1 file changed, 109 insertions(+), 73 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index aac5048fe3..0cda304cbf 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -123,6 +123,7 @@ def __init__( self.shade_in_3d = shade_in_3d self.tolerance_for_point_equality = tolerance_for_point_equality self.n_points_per_cubic_curve = n_points_per_cubic_curve + self.bezier_alphas = np.linspace(0, 1, n_points_per_cubic_curve) super().__init__(**kwargs) if fill_color: @@ -135,10 +136,6 @@ def __init__( def n_points_per_curve(self): return self.n_points_per_cubic_curve - @property - def n_points(self): - return self.points.shape[0] - def get_group_class(self): return VGroup @@ -630,7 +627,7 @@ def resize_points( (the target size) to a Numpy array. The default implementation is based on Numpy's ``resize`` function. """ - if new_length != self.n_points: + if new_length != len(self.points): self.points = resize_func(self.points, new_length) return self @@ -673,9 +670,9 @@ def clear_points(self): def append_points(self, new_points): # TODO, check that number new points is a multiple of 4? - # or else that if self.n_points % 4 == 1, then - # self.n_points % 4 == 3? - n = self.n_points + # or else that if len(self.points) % 4 == 1, then + # len(self.points) % 4 == 3? + n = len(self.points) points = np.empty((n + len(new_points), self.dim)) points[:n] = self.points points[n:] = new_points @@ -683,11 +680,13 @@ def append_points(self, new_points): return self def start_new_path(self, point): - if self.n_points % 4 != 0: + n_points = len(self.points) + nppcc = self.n_points_per_cubic_curve + if n_points % nppcc != 0: # close the open path by appending the last # start anchor sufficiently often last_anchor = self.get_start_anchors()[-1] - closure = [last_anchor] * (4 - (self.n_points % 4)) + closure = [last_anchor] * (nppcc - (n_points % nppcc)) self.append_points(closure + [point]) else: self.append_points([point]) @@ -700,7 +699,7 @@ def add_cubic_bezier_curve( handle2: np.ndarray, anchor2, ) -> None: - # TODO, check the self.n_points % 4 == 0? + # TODO, check the len(self.points) % 4 == 0? self.append_points([anchor1, handle1, handle2, anchor2]) def add_cubic_bezier_curves(self, curves): @@ -778,11 +777,10 @@ def add_line_to(self, point: np.ndarray): :class:`VMobject` ``self`` """ - nppcc = self.n_points_per_cubic_curve self.add_cubic_bezier_curve_to( *( - interpolate(self.get_last_point(), point, i / (nppcc - 1)) - for i in range(1, nppcc - 1) + interpolate(self.get_last_point(), point, a) + for a in self.bezier_alphas[1:-1] ), point, ) @@ -834,7 +832,7 @@ def add_smooth_curve_to(self, *points: np.array): def has_new_path_started(self): nppcc = self.n_points_per_cubic_curve # 4 # A new path starting is defined by a control point which is not part of a bezier subcurve. - return self.n_points % nppcc == 1 + return len(self.points) % nppcc == 1 def get_last_point(self): return self.points[-1] @@ -850,6 +848,8 @@ def close_path(self): def add_points_as_corners(self, points: np.ndarray) -> VMobject: points = np.asarray(points).reshape(-1, self.dim) if self.has_new_path_started(): + # Pop the last point from self.points and + # add it to start_corners start_corners = np.empty((len(points), self.dim)) start_corners[0] = self.points[-1] start_corners[1:] = points[:-1] @@ -863,11 +863,11 @@ def add_points_as_corners(self, points: np.ndarray) -> VMobject: new_points = np.empty((nppcc * len(start_corners), self.dim)) new_points[::nppcc] = start_corners new_points[nppcc - 1 :: nppcc] = end_corners - for i in range(1, nppcc - 1): + for i, a in enumerate(self.bezier_alphas): new_points[i::nppcc] = interpolate( start_corners, end_corners, - i / (nppcc - 1), + a, ) self.append_points(new_points) @@ -898,8 +898,8 @@ def set_points_as_corners(self, points: Sequence[float]): self.set_anchors_and_handles( start_anchors, *( - interpolate(start_anchors, end_anchors, i / (nppcc - 1)) - for i in range(1, nppcc - 1) + interpolate(start_anchors, end_anchors, a) + for a in self.bezier_alphas[1:-1] ), end_anchors, ) @@ -927,11 +927,11 @@ def change_anchor_mode(self, mode: str): # Every submobject will have its handles modified if mode == "jagged": # The following will make the handles aligned with the anchors, - # thus making the Bèzier curves straight lines + # thus making the Bézier curves straight lines starts = submob.get_start_anchors() ends = submob.get_end_anchors() - for i in range(1, nppcc - 1): - submob.points[1::nppcc] = interpolate(starts, ends, i / (nppcc - 1)) + for a in self.bezier_alphas: + submob.points[1::nppcc] = interpolate(starts, ends, a) elif mode == "smooth": # Divide into subpaths and for each subpath compute smooth handles. @@ -1003,7 +1003,7 @@ def scale_handle_to_anchor_distances(self, factor: float): ``self`` """ for submob in self.family_members_with_points(): - if submob.n_points < self.n_points_per_cubic_curve: + if len(submob.points) < self.n_points_per_cubic_curve: # The case that a bezier quad is not complete (there is no bezier curve as there is not enough control points.) continue a1, h1, h2, a2 = submob.get_anchors_and_handles() @@ -1031,16 +1031,21 @@ def consider_points_equals(self, p0, p1): bool Whether the points p0 and p1 are considered close or not. """ - rtol = 1.0e-5 # default from np.isclose() atol = self.tolerance_for_point_equality - # Case 1: single pair of points (returns a bool) - if p0.ndim == 1: - return not self.consider_points_different(p0, p1) + # Case 1: single pair of points + if p0.ndim == 1 and p1.ndim == 1: + if abs(p0[0] - p1[0]) > atol: + return False + if abs(p0[1] - p1[1]) > atol: + return False + if abs(p0[2] - p1[2]) > atol: + return False + return True - # Case 2: multiple pairs of points (returns a boolean ndarray) - else: - return ~self.consider_points_different(p0, p1) + # Case 2: multiple pairs of points + is_close = abs(p1 - p0) <= atol + return is_close[:, 0] & is_close[:, 1] & is_close[:, 2] def consider_points_equals_2d( self, p0: np.ndarray, p1: np.ndarray @@ -1062,12 +1067,22 @@ def consider_points_equals_2d( Whether the points p0 and p1 are considered close or not. """ # Case 1: single pair of points (returns a bool) - if p0.ndim == 1: - return not self.consider_points_different_2d(p0, p1) + atol = self.tolerance_for_point_equality - # Case 2: multiple pairs of points (returns a boolean ndarray) - else: - return ~self.consider_points_different_2d(p0, p1) + # Case 1: single pair of points + if p0.ndim == 1 and p1.ndim == 1: + if abs(p0[0] - p1[0]) > atol: + return False + if abs(p0[1] - p1[1]) > atol: + return False + return True + + # Case 2: multiple pairs of points + # Ensure that we're only working with 2D components + p0 = p0.reshape(-1, self.dim)[:, :2] + p1 = p1.reshape(-1, self.dim)[:, :2] + is_close = abs(p1 - p0) <= atol + return is_close[:, 0] & is_close[:, 1] def consider_points_different(self, p0, p1): """Determine if two points are distant enough to be considered different. @@ -1085,23 +1100,21 @@ def consider_points_different(self, p0, p1): bool Whether the points p0 and p1 are considered different or not. """ - rtol = 1.0e-5 # default from np.isclose() atol = self.tolerance_for_point_equality # Case 1: single pair of points - if p0.ndim == 1: - if abs(p0[0] - p1[0]) > atol + rtol * abs(p1[0]): + if p0.ndim == 1 and p1.ndim == 1: + if abs(p0[0] - p1[0]) > atol: return True - if abs(p0[1] - p1[1]) > atol + rtol * abs(p1[1]): + if abs(p0[1] - p1[1]) > atol: return True - if abs(p0[2] - p1[2]) > atol + rtol * abs(p1[2]): + if abs(p0[2] - p1[2]) > atol: return True return False # Case 2: multiple pairs of points - else: - is_far = abs(p1 - p0) > atol + rtol * abs(p1) - return is_far[:, 0] | is_far[:, 1] | is_far[:, 2] + is_far = abs(p1 - p0) > atol + return is_far[:, 0] | is_far[:, 1] | is_far[:, 2] def consider_points_different_2d(self, p0: np.ndarray, p1: np.ndarray) -> bool: """Determine if two points are distant enough to be considered different. @@ -1120,24 +1133,22 @@ def consider_points_different_2d(self, p0: np.ndarray, p1: np.ndarray) -> bool: bool Whether the points p0 and p1 are considered different or not. """ - rtol = 1.0e-5 # default from np.isclose() atol = self.tolerance_for_point_equality # Case 1: single pair of points - if p0.ndim == 1: - if abs(p0[0] - p1[0]) > atol + rtol * abs(p1[0]): + if p0.ndim == 1 and p1.ndim == 1: + if abs(p0[0] - p1[0]) > atol: return True - if abs(p0[1] - p1[1]) > atol + rtol * abs(p1[1]): + if abs(p0[1] - p1[1]) > atol: return True return False # Case 2: multiple pairs of points - else: - # Ensure that we're only working with 2D components - p0 = p0[:, :2] - p1 = p1[:, :2] - is_far = abs(p1 - p0) > atol + rtol * abs(p1) - return is_far[:, 0] | is_far[:, 1] + # Ensure that we're only working with 2D components + p0 = p0.reshape(-1, self.dim)[:, :2] + p1 = p1.reshape(-1, self.dim)[:, :2] + is_far = abs(p1 - p0) > atol + return is_far[:, 0] | is_far[:, 1] # Information about line def get_cubic_bezier_tuples_from_points(self, points: np.ndarray) -> np.ndarray: @@ -1161,11 +1172,10 @@ def gen_cubic_bezier_tuples_from_points(self, points: np.ndarray) -> np.ndarray: typing.Tuple Bezier control points. """ - points = np.array(points) + points = np.asarray(points) nppcc = self.n_points_per_cubic_curve n_curves = points.shape[0] // nppcc - n_points = nppcc * n_curves - points = points[:n_points] + points = points[: nppcc * n_curves] return points.reshape(n_curves, nppcc, self.dim) @@ -1231,13 +1241,16 @@ def get_subpaths(self) -> tuple: return self.get_subpaths_from_points(self.points) def get_subpath_split_indices_from_points( - self, points: np.ndarray, n_dims: int = 3 + self, points: np.ndarray, n_dims: int = 3, strip_null_end_curves: bool = False ) -> np.ndarray: nppcc = self.n_points_per_cubic_curve starts = points[::nppcc] ends = points[nppcc - 1 :: nppcc] # This ensures that there are no more starts than ends. - n_curves = ends.shape[0] + # TODO: ends.shape[0] would be more efficient, but some test cases regarding + # Flash and ShowPassing flash expect a Python list instead of an ndarray, so + # ends.shape[0] breaks those test cases. Fix these inconsistencies. + n_curves = len(ends) starts = starts[:n_curves] # DO NOT DELETE THIS! Or else, a [[0 0]] ndarray will be returned @@ -1246,9 +1259,13 @@ def get_subpath_split_indices_from_points( return np.empty((0, 2), dtype=int) if n_dims == 2: - is_far = self.consider_points_different_2d(starts[1:], ends[:-1]) + is_equal = self.consider_points_equals_2d + is_different = self.consider_points_different_2d else: - is_far = self.consider_points_different(starts[1:], ends[:-1]) + is_equal = self.consider_points_equals + is_different = self.consider_points_different + + is_far = is_different(starts[1:], ends[:-1]) aux = np.arange(1, is_far.shape[0] + 1)[is_far] split_indices = np.empty((aux.shape[0] + 1, 2), dtype=int) @@ -1256,12 +1273,26 @@ def get_subpath_split_indices_from_points( split_indices[1:, 0] = aux split_indices[:-1, 1] = aux split_indices[-1, 1] = n_curves + + if strip_null_end_curves: + for i in range(split_indices.shape[0]): + start_i, end_i = split_indices[i] + while end_i > start_i + 1 and is_equal( + points[nppcc * (end_i - 1) : nppcc * end_i], ends[end_i - 2] + ): + end_i -= 1 + split_indices[i, 1] = end_i + split_indices *= self.n_points_per_cubic_curve return split_indices - def get_subpath_split_indices(self) -> np.ndarray: - return self.get_subpath_split_indices_from_points(self.points) + def get_subpath_split_indices( + self, n_dims: int = 3, strip_null_end_curves: bool = False + ) -> np.ndarray: + return self.get_subpath_split_indices_from_points( + self.points, n_dims, strip_null_end_curves + ) # Curve functions def _init_curve_memory(self, sample_points: int = 10): @@ -1285,7 +1316,7 @@ def _update_curve_memory(self, sample_points: int = 10): curr_points = self.points memo_points = self.memory["piece_curves"]["points"] - curr_n_points = self.n_points + curr_n_points = len(self.points) memo_n_points = memo_points.shape[0] nppcc = self.n_points_per_cubic_curve @@ -1387,8 +1418,8 @@ def get_nth_curve_length_pieces( sample_points = 10 curve = self.get_nth_curve_function(n) - t = np.linspace(0, 1, sample_points) - points = curve(t.reshape(-1, 1)) + t_values = np.array([i / (sample_points - 1) for i in range(sample_points)]) + points = curve(t_values.reshape(-1, 1)) diffs = points[1:] - points[:-1] norms = np.linalg.norm(diffs, axis=1) @@ -1452,7 +1483,7 @@ def get_num_curves(self) -> int: number of curves. of the vmobject. """ nppcc = self.n_points_per_cubic_curve - return self.n_points // nppcc + return len(self.points) // nppcc def get_curve_functions( self, @@ -1533,7 +1564,8 @@ def point_from_proportion(self, alpha: float) -> np.ndarray: num_curves = self.get_num_curves() # Binary search - left, right = 0, num_curves - 1 + left = 0 + right = num_curves - 1 while right > left: mid = (left + right) // 2 if acc_lengths[mid] >= target_length: @@ -1649,8 +1681,9 @@ def get_anchors(self) -> np.ndarray: """ if self.points.shape[0] == 1: return self.points - anchors = np.empty((2 * self.get_num_curves(), self.dim)) - anchors[0::2] = self.get_start_anchors() + num_curves = self.get_num_curves() + anchors = np.empty((2 * num_curves, self.dim)) + anchors[0::2] = self.get_start_anchors()[:num_curves] anchors[1::2] = self.get_end_anchors() return anchors @@ -1659,7 +1692,7 @@ def get_points_defining_boundary(self): # Probably returns all anchors, but this is weird regarding the name of the method. family = self.get_family() n_anchors_per_submob = [ - (submob.n_points // submob.n_points_per_cubic_curve) * 2 + 2 * (len(submob.points) // submob.n_points_per_cubic_curve) for submob in family ] acc_n_anchors = np.add.accumulate(n_anchors_per_submob) @@ -1686,7 +1719,10 @@ def get_arc_length(self, sample_points_per_curve: int | None = None) -> float: The length of the :class:`VMobject`. """ - return sum([self.get_nth_curve_length(n) for n in range(self.get_num_curves())]) + return sum( + self.get_nth_curve_length(n, sample_points=sample_points_per_curve) + for n in range(self.get_num_curves()) + ) # Alignment def align_points(self, vmobject: VMobject): @@ -1726,9 +1762,9 @@ def align_points(self, vmobject: VMobject): vmobject.add_line_to(vmobject.get_last_point()) # Figure out what the subpaths are. - self_split_i = self.get_subpath_split_indices() + self_split_i = self.get_subpath_split_indices(strip_null_end_curves=True) self_n_subpaths = self_split_i.shape[0] - vmob_split_i = vmobject.get_subpath_split_indices() + vmob_split_i = vmobject.get_subpath_split_indices(strip_null_end_curves=True) vmob_n_subpaths = vmob_split_i.shape[0] # If they have the same number of subpaths and the same number of points From af351e9538212d43ce09d3be1943ebb718ec4f2b Mon Sep 17 00:00:00 2001 From: chopan Date: Sat, 15 Jul 2023 22:41:32 -0400 Subject: [PATCH 22/42] Fixed small issue in VMobject.get_subpath_split_indices_from_points --- manim/mobject/types/vectorized_mobject.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 0cda304cbf..5aec4f8b76 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1277,8 +1277,11 @@ def get_subpath_split_indices_from_points( if strip_null_end_curves: for i in range(split_indices.shape[0]): start_i, end_i = split_indices[i] - while end_i > start_i + 1 and is_equal( - points[nppcc * (end_i - 1) : nppcc * end_i], ends[end_i - 2] + while ( + end_i > start_i + 1 + and is_equal( + points[nppcc * (end_i - 1) : nppcc * end_i], ends[end_i - 2] + ).all() ): end_i -= 1 split_indices[i, 1] = end_i From d737adaa875bbe69439fae95fea8b28f194f483b Mon Sep 17 00:00:00 2001 From: chopan Date: Sat, 15 Jul 2023 22:59:39 -0400 Subject: [PATCH 23/42] Converted p0 and p1 to ndarrays in VMobject.consider_points_equals and similar methods --- manim/mobject/types/vectorized_mobject.py | 39 +++++++++++++++++------ 1 file changed, 30 insertions(+), 9 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 5aec4f8b76..ce798495b0 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1015,7 +1015,11 @@ def scale_handle_to_anchor_distances(self, factor: float): return self # - def consider_points_equals(self, p0, p1): + def consider_points_equals( + self, + p0: Iterable[float] | Iterable[Iterable[float]], + p1: Iterable[float] | Iterable[Iterable[float]], + ) -> bool | np.ndarray: """Determine if two points are close enough to be considered equal. This function reimplements np.allclose, because repeated calling of @@ -1028,9 +1032,11 @@ def consider_points_equals(self, p0, p1): Returns ------- - bool + bool | np.ndarray Whether the points p0 and p1 are considered close or not. """ + p0 = np.asarray(p0) + p1 = np.asarray(p1) atol = self.tolerance_for_point_equality # Case 1: single pair of points @@ -1048,7 +1054,9 @@ def consider_points_equals(self, p0, p1): return is_close[:, 0] & is_close[:, 1] & is_close[:, 2] def consider_points_equals_2d( - self, p0: np.ndarray, p1: np.ndarray + self, + p0: Iterable[float] | Iterable[Iterable[float]], + p1: Iterable[float] | Iterable[Iterable[float]], ) -> bool | np.ndarray: """Determine if two points are close enough to be considered equal. @@ -1063,10 +1071,11 @@ def consider_points_equals_2d( Returns ------- - bool + bool | np.ndarray Whether the points p0 and p1 are considered close or not. """ - # Case 1: single pair of points (returns a bool) + p0 = np.asarray(p0) + p1 = np.asarray(p1) atol = self.tolerance_for_point_equality # Case 1: single pair of points @@ -1084,7 +1093,11 @@ def consider_points_equals_2d( is_close = abs(p1 - p0) <= atol return is_close[:, 0] & is_close[:, 1] - def consider_points_different(self, p0, p1): + def consider_points_different( + self, + p0: Iterable[float] | Iterable[Iterable[float]], + p1: Iterable[float] | Iterable[Iterable[float]], + ) -> bool | np.ndarray: """Determine if two points are distant enough to be considered different. This function reimplements np.allclose, because repeated calling of @@ -1097,9 +1110,11 @@ def consider_points_different(self, p0, p1): Returns ------- - bool + bool | np.ndarray Whether the points p0 and p1 are considered different or not. """ + p0 = np.asarray(p0) + p1 = np.asarray(p1) atol = self.tolerance_for_point_equality # Case 1: single pair of points @@ -1116,7 +1131,11 @@ def consider_points_different(self, p0, p1): is_far = abs(p1 - p0) > atol return is_far[:, 0] | is_far[:, 1] | is_far[:, 2] - def consider_points_different_2d(self, p0: np.ndarray, p1: np.ndarray) -> bool: + def consider_points_different_2d( + self, + p0: Iterable[float] | Iterable[Iterable[float]], + p1: Iterable[float] | Iterable[Iterable[float]], + ) -> bool | np.ndarray: """Determine if two points are distant enough to be considered different. This uses the algorithm from np.isclose(), but expanded here for the @@ -1130,9 +1149,11 @@ def consider_points_different_2d(self, p0: np.ndarray, p1: np.ndarray) -> bool: Returns ------- - bool + bool | np.ndarray Whether the points p0 and p1 are considered different or not. """ + p0 = np.asarray(p0) + p1 = np.asarray(p1) atol = self.tolerance_for_point_equality # Case 1: single pair of points From 1f73421521697c99c1f4e7abaa5b9eef07c37dea Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 16 Jul 2023 00:43:12 -0400 Subject: [PATCH 24/42] Added border case in VMobject.get_subpath_split_indices_from_points when subpath has a single curve --- manim/mobject/types/vectorized_mobject.py | 5 +++-- manim/utils/testing/_frames_testers.py | 4 ++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index ce798495b0..4b5023b01c 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1278,6 +1278,8 @@ def get_subpath_split_indices_from_points( # for an empty list of points, when it should instead return []. if n_curves == 0: return np.empty((0, 2), dtype=int) + if n_curves == 1: + return np.array([[0, nppcc]]) if n_dims == 2: is_equal = self.consider_points_equals_2d @@ -1716,7 +1718,7 @@ def get_points_defining_boundary(self): # Probably returns all anchors, but this is weird regarding the name of the method. family = self.get_family() n_anchors_per_submob = [ - 2 * (len(submob.points) // submob.n_points_per_cubic_curve) + 2 * submob.get_num_curves() if submob.points.shape[0] != 1 else 1 for submob in family ] acc_n_anchors = np.add.accumulate(n_anchors_per_submob) @@ -1726,7 +1728,6 @@ def get_points_defining_boundary(self): for submob, end_i in zip(family, acc_n_anchors): boundary[start_i:end_i] = submob.get_anchors() start_i = end_i - return boundary def get_arc_length(self, sample_points_per_curve: int | None = None) -> float: diff --git a/manim/utils/testing/_frames_testers.py b/manim/utils/testing/_frames_testers.py index be0bc38447..deb5ea0d39 100644 --- a/manim/utils/testing/_frames_testers.py +++ b/manim/utils/testing/_frames_testers.py @@ -10,8 +10,8 @@ from ._show_diff import show_diff_helper -FRAME_ABSOLUTE_TOLERANCE = 1.01 -FRAME_MISMATCH_RATIO_TOLERANCE = 1e-5 +FRAME_ABSOLUTE_TOLERANCE = 1.03 +FRAME_MISMATCH_RATIO_TOLERANCE = 3e-4 class _FramesTester: From 8d7c17b3b68d8fd418470074d016b927c700d2f5 Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 16 Jul 2023 00:43:12 -0400 Subject: [PATCH 25/42] Added border case in VMobject.get_subpath_split_indices_from_points when subpath has a single curve --- manim/mobject/types/vectorized_mobject.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index ce798495b0..4b5023b01c 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1278,6 +1278,8 @@ def get_subpath_split_indices_from_points( # for an empty list of points, when it should instead return []. if n_curves == 0: return np.empty((0, 2), dtype=int) + if n_curves == 1: + return np.array([[0, nppcc]]) if n_dims == 2: is_equal = self.consider_points_equals_2d @@ -1716,7 +1718,7 @@ def get_points_defining_boundary(self): # Probably returns all anchors, but this is weird regarding the name of the method. family = self.get_family() n_anchors_per_submob = [ - 2 * (len(submob.points) // submob.n_points_per_cubic_curve) + 2 * submob.get_num_curves() if submob.points.shape[0] != 1 else 1 for submob in family ] acc_n_anchors = np.add.accumulate(n_anchors_per_submob) @@ -1726,7 +1728,6 @@ def get_points_defining_boundary(self): for submob, end_i in zip(family, acc_n_anchors): boundary[start_i:end_i] = submob.get_anchors() start_i = end_i - return boundary def get_arc_length(self, sample_points_per_curve: int | None = None) -> float: From 22c123fa16c1402dae9482686a59c176e69e800b Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 16 Jul 2023 01:07:03 -0400 Subject: [PATCH 26/42] Updated commentaries in VMobject.pointwise_become_partial --- manim/mobject/types/vectorized_mobject.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 4b5023b01c..c23825f222 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -2033,14 +2033,14 @@ def pointwise_become_partial( self.clear_points() return self - # The following two lines will compute which Bezier curves of the given mobject need to be processed. - # The residue basically indicates the proportion of the selected Bezier curve that has to be selected. + # The following two lines will compute which Bézier curves of the given Mobject must be processed. + # The residue indicates the proportion of the selected Bézier curve which must be selected. # - # Example: if num_curves is 10, a is 3.4 and b is 7.8, then: + # Example: if num_curves is 10, a is 0.34 and b is 0.78, then: # - lower_index is 3 and lower_residue is 0.4, which means the algorithm will look at the 3rd Bezier - # and select its part from t=0.4 to t=1. + # and select its part which ranges from t=0.4 to t=1. # - upper_index is 7 and upper_residue is 0.8, which means the algorithm will look at the 7th Bezier - # and select its part from t=0 to t=0.8. + # and select its part which ranges from t=0 to t=0.8. lower_index, lower_residue = integer_interpolate(0, num_curves, a) upper_index, upper_residue = integer_interpolate(0, num_curves, b) From 85418dbc57fbb069708b67104b305a5e5d2ae5f6 Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 16 Jul 2023 02:13:35 -0400 Subject: [PATCH 27/42] Fixed comment in VMobject.append_points --- manim/mobject/types/vectorized_mobject.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index c23825f222..be58e7e20d 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -671,7 +671,7 @@ def clear_points(self): def append_points(self, new_points): # TODO, check that number new points is a multiple of 4? # or else that if len(self.points) % 4 == 1, then - # len(self.points) % 4 == 3? + # len(new_points) % 4 == 3? n = len(self.points) points = np.empty((n + len(new_points), self.dim)) points[:n] = self.points From 99ea08587a848fc87005d44f21cad5e4516ebe20 Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 16 Jul 2023 02:16:56 -0400 Subject: [PATCH 28/42] Removed binary_search import --- manim/mobject/types/vectorized_mobject.py | 1 - 1 file changed, 1 deletion(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index be58e7e20d..99cc7705c7 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -41,7 +41,6 @@ from ...utils.color import BLACK, WHITE, color_to_rgba from ...utils.deprecation import deprecated from ...utils.iterables import make_even, resize_array, stretch_array_to_length, tuplify -from ...utils.simple_functions import binary_search from ...utils.space_ops import rotate_vector, shoelace_direction # TODO From c6ebc7804f3331d8fa524d44fb6df31d9fb159ed Mon Sep 17 00:00:00 2001 From: Benjamin Hackl Date: Sun, 16 Jul 2023 15:53:10 +0200 Subject: [PATCH 29/42] minor changes in formatting and language --- manim/mobject/types/vectorized_mobject.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 99cc7705c7..e2a267ef38 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -888,7 +888,6 @@ def set_points_as_corners(self, points: Sequence[float]): :class:`VMobject` ``self`` """ - nppcc = self.n_points_per_cubic_curve points = np.asarray(points) start_anchors = points[:-1] end_anchors = points[1:] @@ -1021,8 +1020,11 @@ def consider_points_equals( ) -> bool | np.ndarray: """Determine if two points are close enough to be considered equal. - This function reimplements np.allclose, because repeated calling of - np.allclose for only 2 points is inefficient. + This function reimplements ``numpy.allclose``, because repeated calling of + ``np.allclose`` for only 2 points is inefficient. The tolerance is governed + by the :attr:`.tolerance_for_point_equality` attribute. + + Parameters ---------- p0 first point @@ -1261,7 +1263,10 @@ def get_subpaths(self) -> tuple: return self.get_subpaths_from_points(self.points) def get_subpath_split_indices_from_points( - self, points: np.ndarray, n_dims: int = 3, strip_null_end_curves: bool = False + self, + points: np.ndarray, + n_dims: int = 3, + strip_null_end_curves: bool = False, ) -> np.ndarray: nppcc = self.n_points_per_cubic_curve starts = points[::nppcc] @@ -1313,7 +1318,9 @@ def get_subpath_split_indices_from_points( return split_indices def get_subpath_split_indices( - self, n_dims: int = 3, strip_null_end_curves: bool = False + self, + n_dims: int = 3, + strip_null_end_curves: bool = False, ) -> np.ndarray: return self.get_subpath_split_indices_from_points( self.points, n_dims, strip_null_end_curves @@ -1349,7 +1356,7 @@ def _update_curve_memory(self, sample_points: int = 10): n_curves = n_points // nppcc n_points = n_points * nppcc - # Check if any Bezier had its points changed to recalculate its length. + # Check if any Bézier curve had its points changed to recalculate its length. neq = curr_points[:n_points] != memo_points[:n_points] # Collapse every 3D point group into a single value per point. neq = neq.reshape(-1, self.dim) From f64b874001046fb3241bec8dc2b59ad9c61e355b Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 16 Jul 2023 10:30:01 -0400 Subject: [PATCH 30/42] Corrected use of self.bezier_alphas in VMobject.change_anchor_mode --- manim/mobject/types/vectorized_mobject.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index e2a267ef38..d76dd39b5d 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -928,7 +928,7 @@ def change_anchor_mode(self, mode: str): # thus making the Bézier curves straight lines starts = submob.get_start_anchors() ends = submob.get_end_anchors() - for a in self.bezier_alphas: + for a in self.bezier_alphas[1:-1]: submob.points[1::nppcc] = interpolate(starts, ends, a) elif mode == "smooth": From ef69e4f7512ad0bf7b65cb8c86668148d0e86eeb Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 16 Jul 2023 12:41:40 -0400 Subject: [PATCH 31/42] Fixed bad slicing when assigning handles in jagged mode in VMobject.change_anchor_mode --- manim/mobject/types/vectorized_mobject.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index d76dd39b5d..556a7e01d9 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -928,8 +928,10 @@ def change_anchor_mode(self, mode: str): # thus making the Bézier curves straight lines starts = submob.get_start_anchors() ends = submob.get_end_anchors() - for a in self.bezier_alphas[1:-1]: - submob.points[1::nppcc] = interpolate(starts, ends, a) + for i in range(1, nppcc - 1): + submob.points[i::nppcc] = interpolate( + starts, ends, self.bezier_alphas[i] + ) elif mode == "smooth": # Divide into subpaths and for each subpath compute smooth handles. From 3fdb54afdae6793da83ef8737bec0c0ffe6c48f1 Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 16 Jul 2023 12:58:27 -0400 Subject: [PATCH 32/42] Corrected semantics and commentaries in VMobject.get_subpath_split_indices_from_points --- manim/mobject/types/vectorized_mobject.py | 36 +++++++++++++---------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 556a7e01d9..30d479bcc1 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1274,33 +1274,39 @@ def get_subpath_split_indices_from_points( starts = points[::nppcc] ends = points[nppcc - 1 :: nppcc] # This ensures that there are no more starts than ends. - # TODO: ends.shape[0] would be more efficient, but some test cases regarding - # Flash and ShowPassing flash expect a Python list instead of an ndarray, so - # ends.shape[0] breaks those test cases. Fix these inconsistencies. + # TODO: ends.shape[0] would be more efficient, but some test cases + # regarding Flash and ShowPassingFlash expect a Python list instead of + # an ndarray, so ends.shape[0] breaks those test cases. + # Fix these inconsistencies. n_curves = len(ends) starts = starts[:n_curves] - # DO NOT DELETE THIS! Or else, a [[0 0]] ndarray will be returned - # for an empty list of points, when it should instead return []. + # Zero curves case: if nothing was done to handle this, the statement + # split_indices = np.empty((diff_indices.shape[0] + 1, 2), dtype=int) + # and later statements would incorrectly generate the ndarray [[0 0]], + # which WILL break other methods. + # Instead, an empty (0, 2)-shaped ndarray must be returned immediately. if n_curves == 0: return np.empty((0, 2), dtype=int) + # Single curve case: are_points_different(starts[1:], ends[:-1]) will + # fail, so return immediately. The split indices are just [[0 nppcc]]. if n_curves == 1: return np.array([[0, nppcc]]) if n_dims == 2: - is_equal = self.consider_points_equals_2d - is_different = self.consider_points_different_2d + are_points_equal = self.consider_points_equals_2d + are_points_different = self.consider_points_different_2d else: - is_equal = self.consider_points_equals - is_different = self.consider_points_different + are_points_equal = self.consider_points_equals + are_points_different = self.consider_points_different - is_far = is_different(starts[1:], ends[:-1]) - aux = np.arange(1, is_far.shape[0] + 1)[is_far] + diff_bools = are_points_different(starts[1:], ends[:-1]) + diff_indices = np.arange(1, diff_bools.shape[0] + 1)[diff_bools] - split_indices = np.empty((aux.shape[0] + 1, 2), dtype=int) + split_indices = np.empty((diff_indices.shape[0] + 1, 2), dtype=int) split_indices[0, 0] = 0 - split_indices[1:, 0] = aux - split_indices[:-1, 1] = aux + split_indices[1:, 0] = diff_indices + split_indices[:-1, 1] = diff_indices split_indices[-1, 1] = n_curves if strip_null_end_curves: @@ -1308,7 +1314,7 @@ def get_subpath_split_indices_from_points( start_i, end_i = split_indices[i] while ( end_i > start_i + 1 - and is_equal( + and are_points_equal( points[nppcc * (end_i - 1) : nppcc * end_i], ends[end_i - 2] ).all() ): From e88cd64fb8146894850db425d50477cf19ee7a03 Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 16 Jul 2023 13:37:55 -0400 Subject: [PATCH 33/42] Fixed _init_curve_memory and _update_curve_memory using references instead of copies, and other mistakes --- manim/mobject/types/vectorized_mobject.py | 42 ++++++++++++++--------- 1 file changed, 26 insertions(+), 16 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 30d479bcc1..fa5d356d94 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1342,7 +1342,7 @@ def _init_curve_memory(self, sample_points: int = 10): ) self.memory["piece_curves"] = { - "points": self.points, + "points": self.points.copy(), "sample_points": sample_points, "lengths": lengths, "acc_lengths": np.add.accumulate(lengths), @@ -1371,7 +1371,7 @@ def _update_curve_memory(self, sample_points: int = 10): neq2 = neq[:, 0] for i in range(1, self.dim): neq2 |= neq[:, i] - # Collapse every group of 4 values into a single value per curve. + # Collapse every group of 4 (or nppcc) values into a single value per curve. neq2 = neq2.reshape(-1, nppcc) differences = neq2[:, 0] for i in range(1, nppcc): @@ -1379,30 +1379,40 @@ def _update_curve_memory(self, sample_points: int = 10): differences = np.arange(n_curves)[differences] # If the amount of points has changed, adjust lengths - curr_n_curves = curr_n_points / nppcc - memo_n_curves = memo_n_points / nppcc - if curr_n_points < memo_n_points: - new_lengths = self.memory["piece_curves"]["lengths"][:curr_n_curves] - self.memory["piece_curves"]["lengths"] = new_lengths - elif curr_n_points > memo_n_points: + curr_n_curves = curr_n_points // nppcc + memo_n_curves = memo_n_points // nppcc + if curr_n_points > memo_n_points: new_lengths = np.empty(curr_n_curves) - new_lengths[:memo_n_curves] = self.memory["piece_curves"] + new_lengths[:memo_n_curves] = self.memory["piece_curves"]["lengths"] new_lengths[memo_n_curves:] = [ self.get_nth_curve_length(n, sample_points) for n in range(memo_n_curves, curr_n_curves) ] + new_lengths[differences] = [ + self.get_nth_curve_length(n, sample_points) for n in differences + ] self.memory["piece_curves"]["lengths"] = new_lengths - - # Update memo, recalculating only the lengths which have changed - self.memory["piece_curves"]["points"] = curr_points - self.memory["piece_curves"]["lengths"][differences] = [ - self.get_nth_curve_length(n) for n in differences - ] - if differences.shape[0] > 0: self.memory["piece_curves"]["acc_lengths"] = np.add.accumulate( self.memory["piece_curves"]["lengths"] ) + else: + new_lengths = self.memory["piece_curves"]["lengths"][:curr_n_curves] + new_lengths[differences] = [ + self.get_nth_curve_length(n, sample_points) for n in differences + ] + self.memory["piece_curves"]["lengths"] = new_lengths + if differences.shape[0] == 0: + self.memory["piece_curves"]["acc_lengths"] = self.memory[ + "piece_curves" + ]["acc_lengths"][:curr_n_curves] + else: + self.memory["piece_curves"]["acc_lengths"] = np.add.accumulate( + self.memory["piece_curves"]["lengths"] + ) + + self.memory["piece_curves"]["points"] = curr_points.copy() + def get_nth_curve_points(self, n: int) -> np.ndarray: """Returns the points defining the nth curve of the vmobject. From 7d3d477dc3cd8e9f0cc54ef293e393f77f89a635 Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 16 Jul 2023 14:11:56 -0400 Subject: [PATCH 34/42] Added missing check to return earlier if points are exactly the same in VMobject._update_curve_memory --- manim/mobject/types/vectorized_mobject.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index fa5d356d94..b32c325ba1 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1353,13 +1353,12 @@ def _update_curve_memory(self, sample_points: int = 10): self._init_curve_memory(sample_points) return + nppcc = self.n_points_per_cubic_curve curr_points = self.points memo_points = self.memory["piece_curves"]["points"] - curr_n_points = len(self.points) memo_n_points = memo_points.shape[0] - nppcc = self.n_points_per_cubic_curve n_points = min(curr_n_points, memo_n_points) n_curves = n_points // nppcc n_points = n_points * nppcc @@ -1378,6 +1377,9 @@ def _update_curve_memory(self, sample_points: int = 10): differences |= neq2[:, i] differences = np.arange(n_curves)[differences] + if curr_n_points == memo_n_points and differences.shape[0] == 0: + return + # If the amount of points has changed, adjust lengths curr_n_curves = curr_n_points // nppcc memo_n_curves = memo_n_points // nppcc From 2fdb8791a76dd6b998f681d8be4d2a26063e3917 Mon Sep 17 00:00:00 2001 From: chopan Date: Sun, 16 Jul 2023 19:07:03 -0400 Subject: [PATCH 35/42] Removed consider_points_different and added docstrings to VMobject.get_subpath_split_indices --- manim/mobject/types/vectorized_mobject.py | 94 ++++------------------- 1 file changed, 13 insertions(+), 81 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index b32c325ba1..33d59ba8c9 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1096,84 +1096,6 @@ def consider_points_equals_2d( is_close = abs(p1 - p0) <= atol return is_close[:, 0] & is_close[:, 1] - def consider_points_different( - self, - p0: Iterable[float] | Iterable[Iterable[float]], - p1: Iterable[float] | Iterable[Iterable[float]], - ) -> bool | np.ndarray: - """Determine if two points are distant enough to be considered different. - - This function reimplements np.allclose, because repeated calling of - np.allclose for only 2 points is inefficient. - ---------- - p0 - first point - p1 - second point - - Returns - ------- - bool | np.ndarray - Whether the points p0 and p1 are considered different or not. - """ - p0 = np.asarray(p0) - p1 = np.asarray(p1) - atol = self.tolerance_for_point_equality - - # Case 1: single pair of points - if p0.ndim == 1 and p1.ndim == 1: - if abs(p0[0] - p1[0]) > atol: - return True - if abs(p0[1] - p1[1]) > atol: - return True - if abs(p0[2] - p1[2]) > atol: - return True - return False - - # Case 2: multiple pairs of points - is_far = abs(p1 - p0) > atol - return is_far[:, 0] | is_far[:, 1] | is_far[:, 2] - - def consider_points_different_2d( - self, - p0: Iterable[float] | Iterable[Iterable[float]], - p1: Iterable[float] | Iterable[Iterable[float]], - ) -> bool | np.ndarray: - """Determine if two points are distant enough to be considered different. - - This uses the algorithm from np.isclose(), but expanded here for the - 2D point case. NumPy is overkill for such a small question. - Parameters - ---------- - p0 - first point - p1 - second point - - Returns - ------- - bool | np.ndarray - Whether the points p0 and p1 are considered different or not. - """ - p0 = np.asarray(p0) - p1 = np.asarray(p1) - atol = self.tolerance_for_point_equality - - # Case 1: single pair of points - if p0.ndim == 1 and p1.ndim == 1: - if abs(p0[0] - p1[0]) > atol: - return True - if abs(p0[1] - p1[1]) > atol: - return True - return False - - # Case 2: multiple pairs of points - # Ensure that we're only working with 2D components - p0 = p0.reshape(-1, self.dim)[:, :2] - p1 = p1.reshape(-1, self.dim)[:, :2] - is_far = abs(p1 - p0) > atol - return is_far[:, 0] | is_far[:, 1] - # Information about line def get_cubic_bezier_tuples_from_points(self, points: np.ndarray) -> np.ndarray: return self.gen_cubic_bezier_tuples_from_points(points) @@ -1295,12 +1217,10 @@ def get_subpath_split_indices_from_points( if n_dims == 2: are_points_equal = self.consider_points_equals_2d - are_points_different = self.consider_points_different_2d else: are_points_equal = self.consider_points_equals - are_points_different = self.consider_points_different - diff_bools = are_points_different(starts[1:], ends[:-1]) + diff_bools = ~are_points_equal(starts[1:], ends[:-1]) diff_indices = np.arange(1, diff_bools.shape[0] + 1)[diff_bools] split_indices = np.empty((diff_indices.shape[0] + 1, 2), dtype=int) @@ -1330,6 +1250,18 @@ def get_subpath_split_indices( n_dims: int = 3, strip_null_end_curves: bool = False, ) -> np.ndarray: + """Returns the necessary indices to split :attr:`.VMobject.points` into + the corresponding subpaths. + + Subpaths are ranges of curves with each pair of consecutive curves + having their end/start points coincident. + + Returns + ------- + np.ndarray + (n_subpaths, 2)-shaped array, where the first and second columns + indicate respectively the start and end indices for each subpath. + """ return self.get_subpath_split_indices_from_points( self.points, n_dims, strip_null_end_curves ) From 276426d2e02e881346da966e4d0f09c7015e2eb3 Mon Sep 17 00:00:00 2001 From: chopan Date: Mon, 17 Jul 2023 01:23:42 -0400 Subject: [PATCH 36/42] Replaced manual binary search with bisect.bisect_left in VMobject.point_from_proportion --- manim/mobject/types/vectorized_mobject.py | 24 ++++++++++------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 33d59ba8c9..0a2de9c946 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -15,6 +15,7 @@ import itertools as it import sys import typing +from bisect import bisect_left from typing import Callable, Optional, Sequence, Union import colour @@ -1545,23 +1546,14 @@ def point_from_proportion(self, alpha: float) -> np.ndarray: acc_lengths = self.memory["piece_curves"]["acc_lengths"] target_length = alpha * acc_lengths[-1] - num_curves = self.get_num_curves() - # Binary search - left = 0 - right = num_curves - 1 - while right > left: - mid = (left + right) // 2 - if acc_lengths[mid] >= target_length: - right = mid - else: - left = mid + 1 + i = bisect_left(acc_lengths, target_length) - nth_curve = self.get_nth_curve_function(left) - if left == 0: - t = target_length / lengths[left] + nth_curve = self.get_nth_curve_function(i) + if i == 0: + t = target_length / lengths[i] else: - t = (target_length - acc_lengths[left - 1]) / lengths[left] + t = (target_length - acc_lengths[i - 1]) / lengths[i] return nth_curve(t) def proportion_from_point( @@ -1667,6 +1659,10 @@ def get_anchors(self) -> np.ndarray: return self.points num_curves = self.get_num_curves() anchors = np.empty((2 * num_curves, self.dim)) + # Every end anchor ends a Bézier curve, but not every start anchor + # begins a full Bézier curve (it can be incomplete). So the start + # anchors must be sliced in case there are lonely points at the end, + # but the end anchors don't really need to be sliced. anchors[0::2] = self.get_start_anchors()[:num_curves] anchors[1::2] = self.get_end_anchors() return anchors From c50d97feb1eb957c9915988fc7e420d57fa0ca39 Mon Sep 17 00:00:00 2001 From: chopan Date: Mon, 17 Jul 2023 01:28:54 -0400 Subject: [PATCH 37/42] Added comments regarding np.all in VMobject.consider_points_equals(_2d) --- manim/mobject/types/vectorized_mobject.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 0a2de9c946..5e87b4537e 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1055,6 +1055,7 @@ def consider_points_equals( # Case 2: multiple pairs of points is_close = abs(p1 - p0) <= atol + # This is actually more efficient than np.all(is_close, axis=1) return is_close[:, 0] & is_close[:, 1] & is_close[:, 2] def consider_points_equals_2d( @@ -1095,6 +1096,7 @@ def consider_points_equals_2d( p0 = p0.reshape(-1, self.dim)[:, :2] p1 = p1.reshape(-1, self.dim)[:, :2] is_close = abs(p1 - p0) <= atol + # This is actually more efficient than np.all(is_close, axis=1) return is_close[:, 0] & is_close[:, 1] # Information about line From 35d04d439d0ed9dfd1047b3632f1169b9d7eff45 Mon Sep 17 00:00:00 2001 From: chopan Date: Sat, 19 Aug 2023 01:10:04 +0200 Subject: [PATCH 38/42] Added ManimColor to Mobject --- manim/mobject/mobject.py | 1 - 1 file changed, 1 deletion(-) diff --git a/manim/mobject/mobject.py b/manim/mobject/mobject.py index 03d022f931..b7d716fa50 100644 --- a/manim/mobject/mobject.py +++ b/manim/mobject/mobject.py @@ -109,7 +109,6 @@ def __init__( self.updaters = [] self.updating_suspended = False self.color: ManimColor = ManimColor.parse(color) - self.color = Color(color) if color else None self.memory = {} self.reset_points() From d6e3aaf9e4c5de4700f067e94516027886ef5c79 Mon Sep 17 00:00:00 2001 From: chopan Date: Tue, 2 Jan 2024 00:25:33 +0100 Subject: [PATCH 39/42] Fix align_points() when end null curves are actually stripped --- manim/mobject/types/vectorized_mobject.py | 30 +++++++++++++++++------ 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 61f7dc6105..24116edef5 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1293,15 +1293,13 @@ def get_subpath_split_indices_from_points( n_dims: int = 3, strip_null_end_curves: bool = False, ) -> npt.NDArray[ManimInt]: + points = np.asarray(points) + nppcc = self.n_points_per_cubic_curve starts = points[::nppcc] ends = points[nppcc - 1 :: nppcc] # This ensures that there are no more starts than ends. - # TODO: ends.shape[0] would be more efficient, but some test cases - # regarding Flash and ShowPassingFlash expect a Python list instead of - # an ndarray, so ends.shape[0] breaks those test cases. - # Fix these inconsistencies. - n_curves = len(ends) + n_curves = ends.shape[0] starts = starts[:n_curves] # Zero curves case: if nothing was done to handle this, the statement @@ -1910,16 +1908,34 @@ def align_points(self, vmobject: VMobject) -> Self: self_new_path[max_start:max_end] = self_subpath vmob_new_path[max_start:max_end] = vmob_subpath + # Because strip_null_end_curves=True, maybe the old points have to + # be cut earlier. Extract the end points from the split indices + self_end, vmob_end = self_split_i[-1, 1], vmob_split_i[-1, 1] + # If any of the original paths had more subpaths than the other, # add them to the corresponding new path and complete the other # one by appending its last anchor as many times as necessary. if self_n_subpaths < vmob_n_subpaths: vmob_start = vmob_split_i[least_n_subpaths, 0] self_new_path[max_n_points:] = self_new_path[max_n_points - 1] - vmob_new_path[max_n_points:] = vmobject.points[vmob_start:] + vmob_new_path[max_n_points:] = vmobject.points[vmob_start:vmob_end] elif self_n_subpaths > vmob_n_subpaths: self_start = self_split_i[least_n_subpaths, 0] - self_new_path[max_n_points:] = self.points[self_start:] + try: + self_new_path[max_n_points:] = self.points[self_start:self_end] + except Exception as e: + print("SELF SHAPE:", self.points.shape) + print("VMOB SHAPE:", vmobject.points.shape) + print("SELF SUBSPLIT:", self_split_i) + print("VMOB SUBSPLIT:", vmob_split_i) + print("MAX SUBSPLIT:", max_split_i) + print("Least subpaths:", least_n_subpaths) + print("Max points:", max_n_points) + print("Remainder points:", remainder_n_points) + print("Self start:", self_start) + print("SELF NEW SHAPE:", self_new_path.shape) + print("VMOB NEW SHAPE:", vmob_new_path.shape) + raise e vmob_new_path[max_n_points:] = vmob_new_path[max_n_points - 1] self.set_points(self_new_path) From 55997a7f0a50691874558011caa737e5c6e8ee34 Mon Sep 17 00:00:00 2001 From: chopan Date: Tue, 2 Jan 2024 00:30:35 +0100 Subject: [PATCH 40/42] Delete unwanted try-except --- manim/mobject/types/vectorized_mobject.py | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 24116edef5..5912219488 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1921,21 +1921,7 @@ def align_points(self, vmobject: VMobject) -> Self: vmob_new_path[max_n_points:] = vmobject.points[vmob_start:vmob_end] elif self_n_subpaths > vmob_n_subpaths: self_start = self_split_i[least_n_subpaths, 0] - try: - self_new_path[max_n_points:] = self.points[self_start:self_end] - except Exception as e: - print("SELF SHAPE:", self.points.shape) - print("VMOB SHAPE:", vmobject.points.shape) - print("SELF SUBSPLIT:", self_split_i) - print("VMOB SUBSPLIT:", vmob_split_i) - print("MAX SUBSPLIT:", max_split_i) - print("Least subpaths:", least_n_subpaths) - print("Max points:", max_n_points) - print("Remainder points:", remainder_n_points) - print("Self start:", self_start) - print("SELF NEW SHAPE:", self_new_path.shape) - print("VMOB NEW SHAPE:", vmob_new_path.shape) - raise e + self_new_path[max_n_points:] = self.points[self_start:self_end] vmob_new_path[max_n_points:] = vmob_new_path[max_n_points - 1] self.set_points(self_new_path) From f06ad8453c88213f6d1df0714ba82c2cb3f323bb Mon Sep 17 00:00:00 2001 From: chopan Date: Tue, 2 Jan 2024 03:22:41 +0100 Subject: [PATCH 41/42] Fix align_points() for real --- manim/mobject/types/vectorized_mobject.py | 58 ++++++++++++----------- 1 file changed, 31 insertions(+), 27 deletions(-) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index 5912219488..ce723e18bc 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -1845,26 +1845,24 @@ def align_points(self, vmobject: VMobject) -> Self: if self_n_subpaths == vmob_n_subpaths and (self_split_i == vmob_split_i).all(): return - self_n_points = self_split_i[-1, 1] - vmob_n_points = vmob_split_i[-1, 1] + self_n_points_per_subpath = self_split_i[:, 1] - self_split_i[:, 0] + vmob_n_points_per_subpath = vmob_split_i[:, 1] - vmob_split_i[:, 0] if self_n_subpaths < vmob_n_subpaths: least_n_subpaths = self_n_subpaths - remainder_n_points = vmob_n_points - vmob_split_i[self_n_subpaths - 1, 1] + remainder_n_points = np.sum(vmob_n_points_per_subpath[least_n_subpaths:]) else: least_n_subpaths = vmob_n_subpaths - remainder_n_points = self_n_points - self_split_i[vmob_n_subpaths - 1, 1] + remainder_n_points = np.sum(self_n_points_per_subpath[least_n_subpaths:]) # For each possible pair of subpaths from self and vmob, # get the number of points of the longest one to adjust # the subpaths accordingly - self_n_points_per_path = self_split_i[:, 1] - self_split_i[:, 0] - vmob_n_points_per_path = vmob_split_i[:, 1] - vmob_split_i[:, 0] - max_n_points_per_path = np.maximum( - self_n_points_per_path[:least_n_subpaths], - vmob_n_points_per_path[:least_n_subpaths], + max_n_points_per_subpath = np.maximum( + self_n_points_per_subpath[:least_n_subpaths], + vmob_n_points_per_subpath[:least_n_subpaths], ) - max_acc_n_points = np.add.accumulate(max_n_points_per_path) + max_acc_n_points = np.add.accumulate(max_n_points_per_subpath) max_split_i = np.empty((least_n_subpaths, 2), dtype=int) max_split_i[0, 0] = 0 max_split_i[1:, 0] = max_acc_n_points[:-1] @@ -1884,48 +1882,54 @@ def align_points(self, vmobject: VMobject) -> Self: vmob_start, vmob_end = vmob_split_i[i] max_start, max_end = max_split_i[i] - self_n = self_n_points_per_path[i] - vmob_n = vmob_n_points_per_path[i] + self_n_points = self_n_points_per_subpath[i] + vmob_n_points = vmob_n_points_per_subpath[i] # Add corresponding subpaths to the new paths. If necessary, # subdivide one of them into more Bèzier curves until its # number of points matches the other Mobject's subpath. self_subpath = self.points[self_start:self_end] vmob_subpath = vmobject.points[vmob_start:vmob_end] - if self_n < vmob_n: + if self_n_points < vmob_n_points: vmob_new_path[max_start:max_end] = vmob_subpath self_new_path[max_start:max_end] = self.insert_n_curves_to_point_list( - (vmob_n - self_n) // nppcc, + (vmob_n_points - self_n_points) // nppcc, self_subpath, ) - elif self_n > vmob_n: + elif self_n_points > vmob_n_points: self_new_path[max_start:max_end] = self_subpath vmob_new_path[max_start:max_end] = self.insert_n_curves_to_point_list( - (self_n - vmob_n) // nppcc, + (self_n_points - vmob_n_points) // nppcc, vmob_subpath, ) else: self_new_path[max_start:max_end] = self_subpath vmob_new_path[max_start:max_end] = vmob_subpath - # Because strip_null_end_curves=True, maybe the old points have to - # be cut earlier. Extract the end points from the split indices - self_end, vmob_end = self_split_i[-1, 1], vmob_split_i[-1, 1] - # If any of the original paths had more subpaths than the other, # add them to the corresponding new path and complete the other # one by appending its last anchor as many times as necessary. if self_n_subpaths < vmob_n_subpaths: - vmob_start = vmob_split_i[least_n_subpaths, 0] self_new_path[max_n_points:] = self_new_path[max_n_points - 1] - vmob_new_path[max_n_points:] = vmobject.points[vmob_start:vmob_end] + for i in range(self_n_subpaths, vmob_n_subpaths): + start, end = vmob_split_i[i] + n_points = vmob_n_points_per_subpath[i] + vmob_new_path[max_n_points : max_n_points + n_points] = vmobject.points[ + start:end + ] + max_n_points += n_points elif self_n_subpaths > vmob_n_subpaths: - self_start = self_split_i[least_n_subpaths, 0] - self_new_path[max_n_points:] = self.points[self_start:self_end] vmob_new_path[max_n_points:] = vmob_new_path[max_n_points - 1] - - self.set_points(self_new_path) - vmobject.set_points(vmob_new_path) + for i in range(vmob_n_subpaths, self_n_subpaths): + start, end = self_split_i[i] + n_points = self_n_points_per_subpath[i] + self_new_path[max_n_points : max_n_points + n_points] = self.points[ + start:end + ] + max_n_points += n_points + + self.points = self_new_path + vmobject.points = vmob_new_path return self def insert_n_curves(self, n: int) -> Self: From f1ba58749d137cde90626e73a1a495bc12d4b21b Mon Sep 17 00:00:00 2001 From: chopan Date: Tue, 2 Jan 2024 21:36:09 +0100 Subject: [PATCH 42/42] Rename 'n_points_per_cubic_curve' to 'n_points_per_curve' and 'nppcc' to 'nppc' --- manim/animation/changing.py | 4 +- manim/mobject/types/vectorized_mobject.py | 153 +++++++++--------- .../test_vectorized_mobject.py | 2 +- 3 files changed, 77 insertions(+), 82 deletions(-) diff --git a/manim/animation/changing.py b/manim/animation/changing.py index bb11cfc0a4..6fe9e25d16 100644 --- a/manim/animation/changing.py +++ b/manim/animation/changing.py @@ -162,5 +162,5 @@ def update_path(self, mob, dt): if self.dissipating_time: self.time += dt if self.time - 1 > self.dissipating_time: - nppcc = self.n_points_per_curve - self.set_points(self.points[nppcc:]) + nppc = self.n_points_per_curve + self.set_points(self.points[nppc:]) diff --git a/manim/mobject/types/vectorized_mobject.py b/manim/mobject/types/vectorized_mobject.py index ce723e18bc..ea98041135 100644 --- a/manim/mobject/types/vectorized_mobject.py +++ b/manim/mobject/types/vectorized_mobject.py @@ -134,7 +134,7 @@ def __init__( shade_in_3d: bool = False, # TODO, do we care about accounting for varying zoom levels? tolerance_for_point_equality: float = 1e-6, - n_points_per_cubic_curve: int = 4, + n_points_per_curve: int = 4, cap_style: CapStyleType = CapStyleType.AUTO, **kwargs, ): @@ -162,10 +162,10 @@ def __init__( self.background_image: Image | str | None = background_image self.shade_in_3d: bool = shade_in_3d self.tolerance_for_point_equality: float = tolerance_for_point_equality - self.n_points_per_cubic_curve: int = n_points_per_cubic_curve + self.n_points_per_curve: int = n_points_per_curve self.cap_style: CapStyleType = cap_style self.bezier_alphas: npt.NDArray[ManimFloat] = np.linspace( - 0, 1, n_points_per_cubic_curve + 0, 1, n_points_per_curve ) super().__init__(**kwargs) self.submobjects: list[VMobject] @@ -179,11 +179,6 @@ def __init__( if stroke_color is not None: self.stroke_color = ManimColor.parse(stroke_color) - # OpenGL compatibility - @property - def n_points_per_curve(self) -> int: - return self.n_points_per_cubic_curve - def get_group_class(self) -> type[VGroup]: return VGroup @@ -755,8 +750,8 @@ def set_anchors_and_handles( ``self`` """ assert len(anchors1) == len(handles1) == len(handles2) == len(anchors2) - nppcc = self.n_points_per_cubic_curve # 4 - total_len = nppcc * len(anchors1) + nppc = self.n_points_per_curve # 4 + total_len = nppc * len(anchors1) self.points = np.empty((total_len, self.dim)) # the following will, from the four sets, dispatch them in points such that # self.points = [ @@ -765,7 +760,7 @@ def set_anchors_and_handles( # ] arrays = [anchors1, handles1, handles2, anchors2] for index, array in enumerate(arrays): - self.points[index::nppcc] = array + self.points[index::nppc] = array return self def clear_points(self) -> None: @@ -784,12 +779,12 @@ def append_points(self, new_points: Point3D_Array) -> Self: def start_new_path(self, point: Point3D) -> Self: n_points = len(self.points) - nppcc = self.n_points_per_cubic_curve - if n_points % nppcc != 0: + nppc = self.n_points_per_curve + if n_points % nppc != 0: # close the open path by appending the last # start anchor sufficiently often last_anchor = self.get_start_anchors()[-1] - closure = [last_anchor] * (nppcc - (n_points % nppcc)) + closure = [last_anchor] * (nppc - (n_points % nppc)) self.append_points(closure + [point]) else: self.append_points([point]) @@ -926,9 +921,9 @@ def add_smooth_curve_to(self, *points: Point3D) -> Self: return self def has_new_path_started(self) -> bool: - nppcc = self.n_points_per_cubic_curve # 4 + nppc = self.n_points_per_curve # 4 # A new path starting is defined by a control point which is not part of a bezier subcurve. - return len(self.points) % nppcc == 1 + return len(self.points) % nppc == 1 def get_last_point(self) -> Point3D: return self.points[-1] @@ -955,12 +950,12 @@ def add_points_as_corners(self, points: Point3D_Array) -> Point3D_Array: start_corners = points[:-1] end_corners = points[1:] - nppcc = self.n_points_per_cubic_curve - new_points = np.empty((nppcc * len(start_corners), self.dim)) - new_points[::nppcc] = start_corners - new_points[nppcc - 1 :: nppcc] = end_corners + nppc = self.n_points_per_curve + new_points = np.empty((nppc * len(start_corners), self.dim)) + new_points[::nppc] = start_corners + new_points[nppc - 1 :: nppc] = end_corners for i, a in enumerate(self.bezier_alphas): - new_points[i::nppcc] = interpolate( + new_points[i::nppc] = interpolate( start_corners, end_corners, a, @@ -1016,7 +1011,7 @@ def change_anchor_mode(self, mode: Literal["jagged", "smooth"]) -> Self: ``self`` """ assert mode in ["jagged", "smooth"], 'mode must be either "jagged" or "smooth"' - nppcc = self.n_points_per_cubic_curve + nppc = self.n_points_per_curve for submob in self.family_members_with_points(): # Every submobject will have its handles modified @@ -1025,8 +1020,8 @@ def change_anchor_mode(self, mode: Literal["jagged", "smooth"]) -> Self: # thus making the Bézier curves straight lines starts = submob.get_start_anchors() ends = submob.get_end_anchors() - for i in range(1, nppcc - 1): - submob.points[i::nppcc] = interpolate( + for i in range(1, nppc - 1): + submob.points[i::nppc] = interpolate( starts, ends, self.bezier_alphas[i] ) @@ -1034,12 +1029,12 @@ def change_anchor_mode(self, mode: Literal["jagged", "smooth"]) -> Self: # Divide into subpaths and for each subpath compute smooth handles. split_indices = submob.get_subpath_split_indices() for start, end in split_indices: - anchors = np.empty(((end - start) // nppcc + 1, submob.dim)) - anchors[:-1] = submob.points[start:end:nppcc] + anchors = np.empty(((end - start) // nppc + 1, submob.dim)) + anchors[:-1] = submob.points[start:end:nppc] anchors[-1] = submob.points[end - 1] h1, h2 = get_smooth_handle_points(anchors) - submob.points[start + 1 : end : nppcc] = h1 - submob.points[start + 2 : end : nppcc] = h2 + submob.points[start + 1 : end : nppc] = h1 + submob.points[start + 2 : end : nppc] = h2 return self def make_smooth(self) -> Self: @@ -1100,7 +1095,7 @@ def scale_handle_to_anchor_distances(self, factor: float) -> Self: ``self`` """ for submob in self.family_members_with_points(): - if len(submob.points) < self.n_points_per_cubic_curve: + if len(submob.points) < self.n_points_per_curve: # The case that a bezier quad is not complete (there is no bezier curve as there is not enough control points.) continue a1, h1, h2, a2 = submob.get_anchors_and_handles() @@ -1220,11 +1215,11 @@ def gen_cubic_bezier_tuples_from_points( Bezier control points. """ points = np.asarray(points) - nppcc = self.n_points_per_cubic_curve - n_curves = points.shape[0] // nppcc - points = points[: nppcc * n_curves] + nppc = self.n_points_per_curve + n_curves = points.shape[0] // nppc + points = points[: nppc * n_curves] - return points.reshape(n_curves, nppcc, self.dim) + return points.reshape(n_curves, nppc, self.dim) def get_cubic_bezier_tuples(self) -> npt.NDArray[Point3D_Array]: return self.get_cubic_bezier_tuples_from_points(self.points) @@ -1255,13 +1250,13 @@ def _gen_subpaths_from_points( Generator[Point3D_Array] subpaths formed by the points. """ - nppcc = self.n_points_per_cubic_curve - filtered = filter(filter_func, range(nppcc, len(points), nppcc)) + nppc = self.n_points_per_curve + filtered = filter(filter_func, range(nppc, len(points), nppc)) split_indices = [0] + list(filtered) + [len(points)] return ( points[i1:i2] for i1, i2 in zip(split_indices, split_indices[1:]) - if (i2 - i1) >= nppcc + if (i2 - i1) >= nppc ) def get_subpaths_from_points(self, points: Point3D_Array) -> list[Point3D_Array]: @@ -1295,9 +1290,9 @@ def get_subpath_split_indices_from_points( ) -> npt.NDArray[ManimInt]: points = np.asarray(points) - nppcc = self.n_points_per_cubic_curve - starts = points[::nppcc] - ends = points[nppcc - 1 :: nppcc] + nppc = self.n_points_per_curve + starts = points[::nppc] + ends = points[nppc - 1 :: nppc] # This ensures that there are no more starts than ends. n_curves = ends.shape[0] starts = starts[:n_curves] @@ -1310,9 +1305,9 @@ def get_subpath_split_indices_from_points( if n_curves == 0: return np.empty((0, 2), dtype=int) # Single curve case: are_points_different(starts[1:], ends[:-1]) will - # fail, so return immediately. The split indices are just [[0 nppcc]]. + # fail, so return immediately. The split indices are just [[0 nppc]]. if n_curves == 1: - return np.array([[0, nppcc]]) + return np.array([[0, nppc]]) if n_dims == 2: are_points_equal = self.consider_points_equals_2d @@ -1334,13 +1329,13 @@ def get_subpath_split_indices_from_points( while ( end_i > start_i + 1 and are_points_equal( - points[nppcc * (end_i - 1) : nppcc * end_i], ends[end_i - 2] + points[nppc * (end_i - 1) : nppc * end_i], ends[end_i - 2] ).all() ): end_i -= 1 split_indices[i, 1] = end_i - split_indices *= self.n_points_per_cubic_curve + split_indices *= self.n_points_per_curve return split_indices @@ -1384,15 +1379,15 @@ def _update_curve_memory(self, sample_points: int = 10) -> None: self._init_curve_memory(sample_points) return - nppcc = self.n_points_per_cubic_curve + nppc = self.n_points_per_curve curr_points = self.points memo_points = self.memory["piece_curves"]["points"] curr_n_points = len(self.points) memo_n_points = memo_points.shape[0] n_points = min(curr_n_points, memo_n_points) - n_curves = n_points // nppcc - n_points = n_points * nppcc + n_curves = n_points // nppc + n_points = n_points * nppc # Check if any Bézier curve had its points changed to recalculate its length. neq = curr_points[:n_points] != memo_points[:n_points] @@ -1401,10 +1396,10 @@ def _update_curve_memory(self, sample_points: int = 10) -> None: neq2 = neq[:, 0] for i in range(1, self.dim): neq2 |= neq[:, i] - # Collapse every group of 4 (or nppcc) values into a single value per curve. - neq2 = neq2.reshape(-1, nppcc) + # Collapse every group of 4 (or nppc) values into a single value per curve. + neq2 = neq2.reshape(-1, nppc) differences = neq2[:, 0] - for i in range(1, nppcc): + for i in range(1, nppc): differences |= neq2[:, i] differences = np.arange(n_curves)[differences] @@ -1412,8 +1407,8 @@ def _update_curve_memory(self, sample_points: int = 10) -> None: return # If the amount of points has changed, adjust lengths - curr_n_curves = curr_n_points // nppcc - memo_n_curves = memo_n_points // nppcc + curr_n_curves = curr_n_points // nppc + memo_n_curves = memo_n_points // nppc if curr_n_points > memo_n_points: new_lengths = np.empty(curr_n_curves) new_lengths[:memo_n_curves] = self.memory["piece_curves"]["lengths"] @@ -1460,8 +1455,8 @@ def get_nth_curve_points(self, n: int) -> CubicBezierPoints: points defining the nth bezier curve (anchors, handles) """ assert n < self.get_num_curves() - nppcc = self.n_points_per_cubic_curve - return self.points[nppcc * n : nppcc * (n + 1)] + nppc = self.n_points_per_curve + return self.points[nppc * n : nppc * (n + 1)] def get_nth_curve_function(self, n: int) -> Callable[[float], Point3D]: """Returns the expression of the nth curve. @@ -1564,8 +1559,8 @@ def get_num_curves(self) -> int: int number of curves of the vmobject. """ - nppcc = self.n_points_per_cubic_curve - return len(self.points) // nppcc + nppc = self.n_points_per_curve + return len(self.points) // nppc def get_curve_functions( self, @@ -1717,8 +1712,8 @@ def get_anchors_and_handles(self) -> list[Point3D_Array]: `list[Point3D_Array]` Iterable of the anchors and handles. """ - nppcc = self.n_points_per_cubic_curve - return [self.points[i::nppcc] for i in range(nppcc)] + nppc = self.n_points_per_curve + return [self.points[i::nppc] for i in range(nppc)] def get_start_anchors(self) -> Point3D_Array: """Returns the start anchors of the bezier curves. @@ -1728,7 +1723,7 @@ def get_start_anchors(self) -> Point3D_Array: Point3D_Array Starting anchors """ - return self.points[:: self.n_points_per_cubic_curve] + return self.points[:: self.n_points_per_curve] def get_end_anchors(self) -> Point3D_Array: """Return the end anchors of the bezier curves. @@ -1738,8 +1733,8 @@ def get_end_anchors(self) -> Point3D_Array: Point3D_Array Starting anchors """ - nppcc = self.n_points_per_cubic_curve - return self.points[nppcc - 1 :: nppcc] + nppc = self.n_points_per_curve + return self.points[nppc - 1 :: nppc] def get_anchors(self) -> Point3D_Array: """Returns the anchors of the curves forming the VMobject. @@ -1875,7 +1870,7 @@ def align_points(self, vmobject: VMobject) -> Self: vmob_new_path = np.empty((final_n_points, self.dim)) # Analyze all of the possible pairs of subpaths - nppcc = self.n_points_per_cubic_curve + nppc = self.n_points_per_curve for i in range(least_n_subpaths): # Start and end indices of self, vmob and max subpaths self_start, self_end = self_split_i[i] @@ -1893,13 +1888,13 @@ def align_points(self, vmobject: VMobject) -> Self: if self_n_points < vmob_n_points: vmob_new_path[max_start:max_end] = vmob_subpath self_new_path[max_start:max_end] = self.insert_n_curves_to_point_list( - (vmob_n_points - self_n_points) // nppcc, + (vmob_n_points - self_n_points) // nppc, self_subpath, ) elif self_n_points > vmob_n_points: self_new_path[max_start:max_end] = self_subpath vmob_new_path[max_start:max_end] = self.insert_n_curves_to_point_list( - (self_n_points - vmob_n_points) // nppcc, + (self_n_points - vmob_n_points) // nppc, vmob_subpath, ) else: @@ -1973,10 +1968,10 @@ def insert_n_curves_to_point_list( Points generated. """ - nppcc = self.n_points_per_cubic_curve + nppc = self.n_points_per_curve if len(points) == 1: - return np.repeat(points, nppcc * n, 0) + return np.repeat(points, nppc * n, 0) bezier_quads = self.get_cubic_bezier_tuples_from_points(points) curr_num = len(bezier_quads) target_num = curr_num + n @@ -1998,21 +1993,21 @@ def insert_n_curves_to_point_list( split_factors = np.zeros(curr_num, dtype="i") np.add.at(split_factors, repeat_indices, 1) - new_points = np.empty((nppcc * target_num, self.dim)) + new_points = np.empty((nppc * target_num, self.dim)) start_i = 0 for quad, sf in zip(bezier_quads, split_factors): if sf == 1: - new_points[start_i : start_i + nppcc] = quad - start_i += nppcc + new_points[start_i : start_i + nppc] = quad + start_i += nppc else: # What was once a single cubic curve defined # by "quad" will now be broken into sf # smaller cubic curves for i in range(sf): - new_points[start_i : start_i + nppcc] = partial_bezier_points( + new_points[start_i : start_i + nppc] = partial_bezier_points( quad, i / sf, (i + 1) / sf ) - start_i += nppcc + start_i += nppc return new_points def align_rgbas(self, vmobject: VMobject) -> Self: @@ -2103,35 +2098,35 @@ def pointwise_become_partial( lower_index, lower_residue = integer_interpolate(0, num_curves, a) upper_index, upper_residue = integer_interpolate(0, num_curves, b) - nppcc = self.n_points_per_cubic_curve + nppc = self.n_points_per_curve # If both indices coincide, get a part of a single Bezier curve. if lower_index == upper_index: # Look at the "lower_index"-th Bezier curve and select its part from # t=lower_residue to t=upper_residue. self.points = partial_bezier_points( - vmobject.points[nppcc * lower_index : nppcc * lower_index + nppcc], + vmobject.points[nppc * lower_index : nppc * lower_index + nppc], lower_residue, upper_residue, ) else: # Allocate space for (upper_index-lower_index+1) Bezier curves. - self.points = np.empty((nppcc * (upper_index - lower_index + 1), self.dim)) + self.points = np.empty((nppc * (upper_index - lower_index + 1), self.dim)) # Look at the "lower_index"-th Bezier curve and select its part from # t=lower_residue to t=1. This is the first curve in self.points. - self.points[:nppcc] = partial_bezier_points( - vmobject.points[nppcc * lower_index : nppcc * lower_index + nppcc], + self.points[:nppc] = partial_bezier_points( + vmobject.points[nppc * lower_index : nppc * lower_index + nppc], lower_residue, 1, ) # If there are more curves between the "lower_index"-th and the # "upper_index"-th Beziers, add them all to self.points. - self.points[nppcc:-nppcc] = vmobject.points[ - nppcc * (lower_index + 1) : nppcc * upper_index + self.points[nppc:-nppc] = vmobject.points[ + nppc * (lower_index + 1) : nppc * upper_index ] # Look at the "upper_index"-th Bezier curve and select its part from # t=0 to t=upper_residue. This is the last curve in self.points. - self.points[-nppcc:] = partial_bezier_points( - vmobject.points[nppcc * upper_index : nppcc * upper_index + nppcc], + self.points[-nppc:] = partial_bezier_points( + vmobject.points[nppc * upper_index : nppc * upper_index + nppc], 0, upper_residue, ) diff --git a/tests/module/mobject/types/vectorized_mobject/test_vectorized_mobject.py b/tests/module/mobject/types/vectorized_mobject/test_vectorized_mobject.py index a17383fd98..3b4f23c618 100644 --- a/tests/module/mobject/types/vectorized_mobject/test_vectorized_mobject.py +++ b/tests/module/mobject/types/vectorized_mobject/test_vectorized_mobject.py @@ -269,7 +269,7 @@ def test_trim_dummy(): o2.add_line_to(np.array([1, 2, 0])) def path_length(p): - return len(p) // o.n_points_per_cubic_curve + return len(p) // o.n_points_per_curve assert tuple(map(path_length, o.get_subpaths())) == (3, 1) assert tuple(map(path_length, o2.get_subpaths())) == (1, 2)