From 08ee8c9d54744003421d3e394cfa5254604c00b1 Mon Sep 17 00:00:00 2001 From: Loic Messal <15694700+Tofull@users.noreply.github.com> Date: Thu, 13 Jan 2022 11:24:38 -0500 Subject: [PATCH 1/4] map numpy built-in types into ply-compatible types --- pyntcloud/io/ply.py | 49 +++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 45 insertions(+), 4 deletions(-) diff --git a/pyntcloud/io/ply.py b/pyntcloud/io/ply.py index 6deda6e..55f222b 100644 --- a/pyntcloud/io/ply.py +++ b/pyntcloud/io/ply.py @@ -248,7 +248,41 @@ def describe_element(name, df): ------- element: list[str] """ - property_formats = {'f': 'float', 'u': 'uchar', 'i': 'int', 'b': 'bool'} + # map between numpy built-in types and supported ply File Structure types + # see numpy built-in types: https://numpy.org/devdocs/reference/arrays.scalars.html#built-in-scalar-types + # see ply File Structure: http://paulbourke.net/dataformats/ply/ + _NotPlyCompatible = "not implemented in ply file structure" + property_formats = { + "b": "char", + "h": "short", + "i": "int", + "l": "double", + "q": _NotPlyCompatible, + "B": "uchar", + "H": "ushort", + "I": "uint", + "L": _NotPlyCompatible, + "Q": _NotPlyCompatible, + "e": _NotPlyCompatible, + "f": "float", + "d": "double", + "g": _NotPlyCompatible, + "F": _NotPlyCompatible, + "D": _NotPlyCompatible, + "G": _NotPlyCompatible, + "?": _NotPlyCompatible, + "M": _NotPlyCompatible, + "m": _NotPlyCompatible, + "O": _NotPlyCompatible, + "S": _NotPlyCompatible, + "U": _NotPlyCompatible, + "V": _NotPlyCompatible, + "p": _NotPlyCompatible, + "P": _NotPlyCompatible, + } + # backward compatibility with https://github.com/daavoo/pyntcloud/pull/321 + property_formats["?"] = "bool" + element = ['element ' + name + ' ' + str(len(df))] if name == 'face': @@ -256,8 +290,15 @@ def describe_element(name, df): else: for i in range(len(df.columns)): - # get first letter of dtype to infer format - f = property_formats[str(df.dtypes[i])[0]] - element.append('property ' + f + ' ' + df.columns.values[i]) + column_name = df.columns.values[i] + column_dtype = df.dtypes[i] + + f = property_formats[column_dtype.char] + if f == _NotPlyCompatible: + raise TypeError( + f"Property '{column_name}' (dtype: {column_dtype.name}) is {_NotPlyCompatible}" + ) + + element.append('property ' + f + ' ' + column_name) return element From 199e29aab71d3f42bb0ffc7abe6578987df9e607 Mon Sep 17 00:00:00 2001 From: Loic Messal <15694700+Tofull@users.noreply.github.com> Date: Thu, 13 Jan 2022 11:25:30 -0500 Subject: [PATCH 2/4] make float32 conversion optional --- pyntcloud/core_class.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/pyntcloud/core_class.py b/pyntcloud/core_class.py index 4d11267..66dda16 100644 --- a/pyntcloud/core_class.py +++ b/pyntcloud/core_class.py @@ -153,7 +153,7 @@ def from_instance(cls, library, instance, **kwargs): else: return cls(**FROM_INSTANCE[library](instance, **kwargs)) - def to_file(self, filename, also_save=None, **kwargs): + def to_file(self, filename, also_save=None, force_float32: bool = True, **kwargs): """Save PyntCloud data to file. Parameters @@ -166,9 +166,18 @@ def to_file(self, filename, also_save=None, **kwargs): Names of the attributes that will be extracted from the PyntCloud to be saved in addition to points. Usually also_save=["mesh"] + force_float32: bool, optional + Default: True + Float64 coordinates were painful to export and re-import in others softwares. + The author decided to force converting the coordinates columns into float32 by default. + See https://github.com/daavoo/pyntcloud/issues/146#issuecomment-369179245 + You can now disable this behaviour by setting force_float32=False. + kwargs: only usable in some formats """ - convert_columns_dtype(self.points, np.float64, np.float32) + if force_float32: + convert_columns_dtype(self.points, np.float64, np.float32) + ext = filename.split(".")[-1].upper() if ext not in TO_FILE: raise ValueError( From 0ea784e6e90a534d02af5c8837fa8e587b044776 Mon Sep 17 00:00:00 2001 From: Loic Messal <15694700+Tofull@users.noreply.github.com> Date: Thu, 13 Jan 2022 12:29:42 -0500 Subject: [PATCH 3/4] try downcasting columns if dtype isn't natively PLY compatible This makes columns like `voxel_n` (int_64) being downcasted to the smallest numerical dtype possible. With the data used in tests/integration/test_core_class.py::test_split_on, this column can be downcasted to int8, which is ply compatible. --- pyntcloud/io/ply.py | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/pyntcloud/io/ply.py b/pyntcloud/io/ply.py index 55f222b..e41facb 100644 --- a/pyntcloud/io/ply.py +++ b/pyntcloud/io/ply.py @@ -295,10 +295,36 @@ def describe_element(name, df): f = property_formats[column_dtype.char] if f == _NotPlyCompatible: - raise TypeError( + potential_error = TypeError( f"Property '{column_name}' (dtype: {column_dtype.name}) is {_NotPlyCompatible}" ) + # try downcasting column + column = df[column_name] + + downcasted_column = None + if pd.api.types.is_float_dtype(column): + downcasted_column = pd.to_numeric(column, downcast="float") + elif pd.api.types.is_signed_integer_dtype(column): + downcasted_column = pd.to_numeric(column, downcast="signed") + elif pd.api.types.is_unsigned_integer_dtype(column): + downcasted_column = pd.to_numeric(column, downcast="unsigned") + + if downcasted_column is None: + # column cannot be downcasted + raise potential_error + + downcasted_f = property_formats[downcasted_column.dtype.char] + if downcasted_f == _NotPlyCompatible: + # even downcasted, column is still not ply compatible + raise potential_error + + # propagate downcasted column dtype into original dataframe column + # used to keep coherency between .ply headers and binary content + df[column_name] = column.astype(downcasted_column.dtype) + + f = downcasted_f + element.append('property ' + f + ' ' + column_name) return element From f3b2cb1ef114c5f61ba285b2ee94371f7610c5a6 Mon Sep 17 00:00:00 2001 From: Loic Messal <15694700+Tofull@users.noreply.github.com> Date: Thu, 13 Jan 2022 12:43:24 -0500 Subject: [PATCH 4/4] fix most of flake8 issues --- pyntcloud/geometry/models/sphere.py | 7 +++++-- pyntcloud/io/pcd.py | 2 +- pyntcloud/io/ply.py | 2 +- pyntcloud/plot/matplotlib_backend.py | 4 ++-- pyntcloud/plot/pyvista_backend.py | 2 +- pyntcloud/plot/voxelgrid.py | 2 +- pyntcloud/samplers/mesh.py | 2 +- pyntcloud/samplers/points.py | 6 +++--- pyntcloud/structures/base.py | 2 +- pyntcloud/structures/voxelgrid.py | 2 +- setup.py | 2 +- tests/conftest.py | 6 +++--- tests/integration/filters/test_kdtree_filters.py | 1 - tests/integration/filters/test_xyz_filters.py | 1 - tests/integration/io/test_from_file.py | 1 + tests/integration/io/test_from_instance.py | 10 +++++----- tests/integration/io/test_to_file.py | 1 + tests/integration/io/test_to_instance.py | 6 +++--- tests/integration/samplers/test_mesh_samplers.py | 1 - tests/integration/samplers/test_voxelgrid_samplers.py | 2 +- .../scalar_fields/test_eigenvalues_scalar_fields.py | 4 +--- .../scalar_fields/test_normals_scalar_fields.py | 1 - .../scalar_fields/test_xyz_scalar_fields.py | 1 - tests/unit/filters/test_kdtree_filters.py | 3 ++- tests/unit/filters/test_xyz_filters.py | 1 - tests/unit/samplers/test_mesh_samplers.py | 1 - tests/unit/samplers/test_points_samplers.py | 1 - .../scalar_fields/test_eigenvalues_scalar_fields.py | 3 --- tests/unit/scalar_fields/test_normals_scalar_fields.py | 1 - .../unit/scalar_fields/test_voxlegrid_scalar_fields.py | 2 +- tests/unit/scalar_fields/test_xyz_scalar_fields.py | 1 - tests/unit/structures/test_voxelgrid_structures.py | 3 ++- 32 files changed, 38 insertions(+), 46 deletions(-) diff --git a/pyntcloud/geometry/models/sphere.py b/pyntcloud/geometry/models/sphere.py index ec383f8..366f5d0 100644 --- a/pyntcloud/geometry/models/sphere.py +++ b/pyntcloud/geometry/models/sphere.py @@ -125,8 +125,11 @@ def create_sphere(center=[0, 0, 0], radius=1, n_points=100): np_axis = round(np.sqrt(n_points - 2), 0) + 1 index = np.arange(0, np.square(np_axis) + 2, 1) - sphere = pd.DataFrame(np.zeros([np.size(index, 0), 3]), - index=index, columns=['x', 'y', 'z']) + sphere = pd.DataFrame( + np.zeros([np.size(index, 0), 3]), + index=index, + columns=['x', 'y', 'z'] + ) zmin = center[2] - radius zmax = center[2] + radius diff --git a/pyntcloud/io/pcd.py b/pyntcloud/io/pcd.py index e25b444..478cd20 100644 --- a/pyntcloud/io/pcd.py +++ b/pyntcloud/io/pcd.py @@ -122,7 +122,7 @@ def read_pcd(filename): # TODO what to use as second argument? if buf is None # (compressed > uncompressed) # should we read buf as raw binary? - #buf = lzf.decompress(compressed_data, uncompressed_size) + # buf = lzf.decompress(compressed_data, uncompressed_size) if len(buf) != uncompressed_size: raise Exception('Error decompressing data') # the data is stored field-by-field diff --git a/pyntcloud/io/ply.py b/pyntcloud/io/ply.py index e41facb..8fe1f6d 100644 --- a/pyntcloud/io/ply.py +++ b/pyntcloud/io/ply.py @@ -139,7 +139,7 @@ def read_ply(filename, allow_bool=False): data["points"][col] = data["points"][col].astype( dtypes["vertex"][n][1]) - if mesh_size : + if mesh_size: top = count + points_size names = np.array([x[0] for x in dtypes["face"]]) diff --git a/pyntcloud/plot/matplotlib_backend.py b/pyntcloud/plot/matplotlib_backend.py index a5b24b8..d0d4dc8 100644 --- a/pyntcloud/plot/matplotlib_backend.py +++ b/pyntcloud/plot/matplotlib_backend.py @@ -10,10 +10,10 @@ def set_proper_aspect_ratio(ax): extents = np.array([getattr(ax, 'get_{}lim'.format(dim))() for dim in 'xyz']) - sz = extents[:,1] - extents[:,0] + sz = extents[:, 1] - extents[:, 0] centers = np.mean(extents, axis=1) maxsize = max(abs(sz)) - r = maxsize/2 + r = maxsize / 2 for ctr, dim in zip(centers, 'xyz'): getattr(ax, 'set_{}lim'.format(dim))(ctr - r, ctr + r) diff --git a/pyntcloud/plot/pyvista_backend.py b/pyntcloud/plot/pyvista_backend.py index 4c811a8..8ccb0f9 100644 --- a/pyntcloud/plot/pyvista_backend.py +++ b/pyntcloud/plot/pyvista_backend.py @@ -72,4 +72,4 @@ def plot_with_pyvista(cloud, **kwargs): return plotter.show(use_panel=kwargs.pop("use_panel", None), title=kwargs.pop("title", None), screenshot=kwargs.pop("screenshot", False), - cpos=kwargs.pop("cpos", None) ) + cpos=kwargs.pop("cpos", None)) diff --git a/pyntcloud/plot/voxelgrid.py b/pyntcloud/plot/voxelgrid.py index 146866d..95b3de7 100644 --- a/pyntcloud/plot/voxelgrid.py +++ b/pyntcloud/plot/voxelgrid.py @@ -113,7 +113,7 @@ def plot_voxelgrid_with_pythreejs(voxel_centers, centroid, camera_position = get_centroid_and_camera_position(voxel_centers) camera = pythreejs.PerspectiveCamera(fov=90, - aspect=width/height, + aspect=width / height, position=camera_position, up=[0, 0, 1]) mesh = get_voxelgrid_pythreejs(voxel_centers, voxel_colors) diff --git a/pyntcloud/samplers/mesh.py b/pyntcloud/samplers/mesh.py index ff2b33e..3bd4565 100644 --- a/pyntcloud/samplers/mesh.py +++ b/pyntcloud/samplers/mesh.py @@ -72,7 +72,7 @@ def compute(self): # (n, 1) the 1 is for broadcasting u = np.random.uniform(low=0., high=1., size=(self.n, 1)) - v = np.random.uniform(low=0., high=1-u, size=(self.n, 1)) + v = np.random.uniform(low=0., high=1 - u, size=(self.n, 1)) result = pd.DataFrame() diff --git a/pyntcloud/samplers/points.py b/pyntcloud/samplers/points.py index ce7ed34..6a14d6c 100644 --- a/pyntcloud/samplers/points.py +++ b/pyntcloud/samplers/points.py @@ -58,7 +58,7 @@ def cal_distance(self, point, solution_set): distance_sum = np.zeros(len(point)) for pt in solution_set: - distance_sum += np.diag(np.dot((point[:, :3]-pt[:3]), self.d_metric@(point[:, :3]-pt[:3]).T)) + distance_sum += np.diag(np.dot((point[:, :3] - pt[:3]), self.d_metric @ (point[:, :3] - pt[:3]).T)) return distance_sum def compute(self): @@ -70,13 +70,13 @@ def compute(self): # the sampled points set as the return select_idx = np.random.randint(low=0, high=len(self.points)) # to remain the shape as (1, n) instead of (n, ) - solution_set = remaining_points[select_idx: select_idx+1] + solution_set = remaining_points[select_idx: select_idx + 1] remaining_points = np.delete(remaining_points, select_idx, 0) for _ in range(self.n - 1): distance_sum = self.cal_distance(remaining_points, solution_set) select_idx = np.argmax(distance_sum) - solution_set = np.concatenate([solution_set, remaining_points[select_idx:select_idx+1]], axis=0) + solution_set = np.concatenate([solution_set, remaining_points[select_idx:select_idx + 1]], axis=0) remaining_points = np.delete(remaining_points, select_idx, 0) return pd.DataFrame(solution_set, columns=self.points.columns) diff --git a/pyntcloud/structures/base.py b/pyntcloud/structures/base.py index 456629b..b771223 100644 --- a/pyntcloud/structures/base.py +++ b/pyntcloud/structures/base.py @@ -13,7 +13,7 @@ def get_and_set(self, pyntcloud): @classmethod def extract_info(cls, pyntcloud): - """ABC API""" + """ABC API""" info = { "points": pyntcloud.xyz, } diff --git a/pyntcloud/structures/voxelgrid.py b/pyntcloud/structures/voxelgrid.py index f4e49e0..0116055 100644 --- a/pyntcloud/structures/voxelgrid.py +++ b/pyntcloud/structures/voxelgrid.py @@ -118,7 +118,7 @@ def compute(self): # -1 so index are 0-based; clip for edge cases self.voxel_x = np.clip(np.searchsorted(self.segments[0], self._points[:, 0]) - 1, 0, self.x_y_z[0]) self.voxel_y = np.clip(np.searchsorted(self.segments[1], self._points[:, 1]) - 1, 0, self.x_y_z[1]) - self.voxel_z = np.clip(np.searchsorted(self.segments[2], self._points[:, 2]) - 1, 0, self.x_y_z[2]) + self.voxel_z = np.clip(np.searchsorted(self.segments[2], self._points[:, 2]) - 1, 0, self.x_y_z[2]) self.voxel_n = np.ravel_multi_index([self.voxel_x, self.voxel_y, self.voxel_z], self.x_y_z) # compute center of each voxel diff --git a/setup.py b/setup.py index b619641..5940f0f 100644 --- a/setup.py +++ b/setup.py @@ -24,7 +24,7 @@ "pandas", ], extras_require={ - 'LAS': ["pylas", "lazrs"], + 'LAS': ["pylas", "lazrs"], 'PLOT': ["ipython", "matplotlib", "pyvista"], 'NUMBA': ["numba"] }, diff --git a/tests/conftest.py b/tests/conftest.py index c3e075f..e33fcda 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -16,12 +16,12 @@ def data_path(): @pytest.fixture() def xyz(): return np.array([ - [0. , 0. , 0. ], - [0.1, 0.1, 0.1], + [0. , 0. , 0. ], # noqa: E202,E203 + [0.1, 0.1, 0.1], # noqa: E202,E203 [0.2, 0.2, 0.2], [0.5, 0.5, 0.5], [0.9, 0.9, 0.9], - [1. , 1. , 1. ]], dtype=np.float32) + [1. , 1. , 1. ]], dtype=np.float32) # noqa: E202,E203 @pytest.fixture() diff --git a/tests/integration/filters/test_kdtree_filters.py b/tests/integration/filters/test_kdtree_filters.py index 7110d7b..bfb386e 100644 --- a/tests/integration/filters/test_kdtree_filters.py +++ b/tests/integration/filters/test_kdtree_filters.py @@ -79,4 +79,3 @@ def test_SOR_expected_results(pyntcloud_with_kdtree_and_kdtree_id, k, z_max, exp z_max=z_max ) assert_array_equal(result, expected_result) - diff --git a/tests/integration/filters/test_xyz_filters.py b/tests/integration/filters/test_xyz_filters.py index 16aa5c0..30515a7 100644 --- a/tests/integration/filters/test_xyz_filters.py +++ b/tests/integration/filters/test_xyz_filters.py @@ -43,4 +43,3 @@ def test_BBOX_expected_results(simple_pyntcloud, bounding_box, expected_result): **bounding_box ) assert_array_equal(result, expected_result) - diff --git a/tests/integration/io/test_from_file.py b/tests/integration/io/test_from_file.py index 50cb30b..8189c9e 100644 --- a/tests/integration/io/test_from_file.py +++ b/tests/integration/io/test_from_file.py @@ -74,6 +74,7 @@ def test_obj_issue_226(data_path): assert "w" in cloud.points.columns + def test_obj_issue_vn(data_path): """ Fix type issue in pyntcloud/io/obj.py. diff --git a/tests/integration/io/test_from_instance.py b/tests/integration/io/test_from_instance.py index 482f2aa..9c9e6b7 100644 --- a/tests/integration/io/test_from_instance.py +++ b/tests/integration/io/test_from_instance.py @@ -5,14 +5,14 @@ try: import pyvista as pv SKIP_PYVISTA = False -except: +except: # noqa: E722 pv = None SKIP_PYVISTA = True try: import open3d as o3d SKIP_OPEN3D = False -except: +except: # noqa: E722 o3d = None SKIP_OPEN3D = True @@ -25,7 +25,7 @@ def test_pyvista_conversion(data_path): assert {'red', 'green', 'blue'}.issubset(cloud.points.columns) assert np.allclose(cloud.points[['red', 'green', 'blue']].values, original_point_cloud.point_arrays["RGB"]) assert {'nx', 'ny', 'nz'}.issubset(cloud.points.columns) - assert np.allclose(cloud.points[['nx', 'ny', 'nz']].values, original_point_cloud.point_arrays["Normals"]) + assert np.allclose(cloud.points[['nx', 'ny', 'nz']].values, original_point_cloud.point_arrays["Normals"]) assert cloud.mesh is not None @@ -64,7 +64,7 @@ def test_open3d_point_cloud(data_path): assert np.allclose(cloud.points[['red', 'green', 'blue']].values / 255., np.asarray(point_cloud.colors)) assert {'nx', 'ny', 'nz'}.issubset(cloud.points.columns) - assert np.allclose(cloud.points[['nx', 'ny', 'nz']].values, np.asarray(point_cloud.normals)) + assert np.allclose(cloud.points[['nx', 'ny', 'nz']].values, np.asarray(point_cloud.normals)) @pytest.mark.skipif(SKIP_OPEN3D, reason="Requires Open3D") @@ -80,4 +80,4 @@ def test_open3d_triangle_mesh(data_path): assert np.allclose(cloud.points[['red', 'green', 'blue']].values / 255., triangle_mesh.vertex_colors) assert {'nx', 'ny', 'nz'}.issubset(cloud.points.columns) - assert np.allclose(cloud.points[['nx', 'ny', 'nz']].values, triangle_mesh.vertex_normals) + assert np.allclose(cloud.points[['nx', 'ny', 'nz']].values, triangle_mesh.vertex_normals) diff --git a/tests/integration/io/test_to_file.py b/tests/integration/io/test_to_file.py index 9c18879..1d1e0be 100644 --- a/tests/integration/io/test_to_file.py +++ b/tests/integration/io/test_to_file.py @@ -35,6 +35,7 @@ def test_to_file(tmpdir, diamond, extension, color, mesh, comments): if comments: assert written_file.comments == ["PyntCloud is cool"] + def test_to_bin_raises_ValueError_if_invalid_kwargs(tmpdir, diamond): with pytest.raises(ValueError): diamond.to_file(str(tmpdir.join("written.bin")), also_save=["mesh"]) diff --git a/tests/integration/io/test_to_instance.py b/tests/integration/io/test_to_instance.py index 3d72d7f..51ea745 100644 --- a/tests/integration/io/test_to_instance.py +++ b/tests/integration/io/test_to_instance.py @@ -5,14 +5,14 @@ try: import pyvista as pv SKIP_PYVISTA = False -except: +except: # noqa: E722 pv = None SKIP_PYVISTA = True try: import open3d as o3d SKIP_OPEN3D = False -except: +except: # noqa: E722 o3d = None SKIP_OPEN3D = True @@ -41,4 +41,4 @@ def test_open3d_triangle_mesh_conversion(data_path): triangle_mesh = cloud.to_instance("open3d") assert isinstance(triangle_mesh, o3d.geometry.TriangleMesh) assert np.allclose(cloud.xyz, triangle_mesh.vertices) - assert np.allclose(cloud.mesh.values, triangle_mesh.triangles) \ No newline at end of file + assert np.allclose(cloud.mesh.values, triangle_mesh.triangles) diff --git a/tests/integration/samplers/test_mesh_samplers.py b/tests/integration/samplers/test_mesh_samplers.py index b03842b..4fe2ce4 100644 --- a/tests/integration/samplers/test_mesh_samplers.py +++ b/tests/integration/samplers/test_mesh_samplers.py @@ -78,4 +78,3 @@ def test_mesh_random_sampling_sampled_points_bounds(diamond, n): assert all(sample[["x", "y", "z"]].values.max(0) <= diamond.xyz.max(0)) assert all(sample[["x", "y", "z"]].values.min(0) >= diamond.xyz.min(0)) - diff --git a/tests/integration/samplers/test_voxelgrid_samplers.py b/tests/integration/samplers/test_voxelgrid_samplers.py index af0536d..0b41f49 100644 --- a/tests/integration/samplers/test_voxelgrid_samplers.py +++ b/tests/integration/samplers/test_voxelgrid_samplers.py @@ -133,7 +133,7 @@ def test_voxelgrid_highest_expected_values(simple_pyntcloud, size_x, expected_n, voxelgrid_id = simple_pyntcloud.add_structure( "voxelgrid", size_x=size_x) - sample = simple_pyntcloud.get_sample( + sample = simple_pyntcloud.get_sample( "voxelgrid_highest", voxelgrid_id=voxelgrid_id) diff --git a/tests/integration/scalar_fields/test_eigenvalues_scalar_fields.py b/tests/integration/scalar_fields/test_eigenvalues_scalar_fields.py index 40cfc0b..94c81b0 100644 --- a/tests/integration/scalar_fields/test_eigenvalues_scalar_fields.py +++ b/tests/integration/scalar_fields/test_eigenvalues_scalar_fields.py @@ -2,6 +2,7 @@ import numpy as np + @pytest.mark.parametrize("scalar_field_name", [ "anisotropy", "planarity" @@ -46,6 +47,3 @@ def test_eigen_sum_values(pyntcloud_and_eigenvalues): ev=ev) scalar_field_values = cloud.points[scalar_field].values assert all(scalar_field_values > 0) - - - diff --git a/tests/integration/scalar_fields/test_normals_scalar_fields.py b/tests/integration/scalar_fields/test_normals_scalar_fields.py index 5c40b6a..6e15e0f 100644 --- a/tests/integration/scalar_fields/test_normals_scalar_fields.py +++ b/tests/integration/scalar_fields/test_normals_scalar_fields.py @@ -34,4 +34,3 @@ def test_normal_scalar_fields_bounds(pyntcloud_with_rgb_and_normals, scalar_fiel scalar_field_values = pyntcloud_with_rgb_and_normals.points[scalar_field] assert all(scalar_field_values >= min_val) assert all(scalar_field_values <= max_val) - diff --git a/tests/integration/scalar_fields/test_xyz_scalar_fields.py b/tests/integration/scalar_fields/test_xyz_scalar_fields.py index aaa7624..f9c5d09 100644 --- a/tests/integration/scalar_fields/test_xyz_scalar_fields.py +++ b/tests/integration/scalar_fields/test_xyz_scalar_fields.py @@ -68,4 +68,3 @@ def test_cylindrical_coords_bounds(pyntcloud_with_rgb_and_normals): degrees=False) assert all(pyntcloud_with_rgb_and_normals.points["angular_cylindrical"] >= - (np.pi / 2)) assert all(pyntcloud_with_rgb_and_normals.points["angular_cylindrical"] <= (np.pi * 1.5)) - diff --git a/tests/unit/filters/test_kdtree_filters.py b/tests/unit/filters/test_kdtree_filters.py index eaadaeb..c127cce 100644 --- a/tests/unit/filters/test_kdtree_filters.py +++ b/tests/unit/filters/test_kdtree_filters.py @@ -8,6 +8,7 @@ StatisticalOutlierRemovalFilter ) + @pytest.mark.parametrize("kdtree_id", [ "FOO", "K", @@ -75,4 +76,4 @@ def test_SORFilter_expected_results(pyntcloud_with_kdtree_and_kdtree_id, k, z_ma filter.extract_info() result = filter.compute() - assert_array_equal(result, expected_result) \ No newline at end of file + assert_array_equal(result, expected_result) diff --git a/tests/unit/filters/test_xyz_filters.py b/tests/unit/filters/test_xyz_filters.py index 08ca3c9..67b6bdc 100644 --- a/tests/unit/filters/test_xyz_filters.py +++ b/tests/unit/filters/test_xyz_filters.py @@ -50,4 +50,3 @@ def test_BoundingBoxFilter_expected_results(simple_pyntcloud, bounding_box, expe result = bbox_filter.compute() assert_array_equal(result, expected_result) - diff --git a/tests/unit/samplers/test_mesh_samplers.py b/tests/unit/samplers/test_mesh_samplers.py index 64ab354..f5e5a85 100644 --- a/tests/unit/samplers/test_mesh_samplers.py +++ b/tests/unit/samplers/test_mesh_samplers.py @@ -65,4 +65,3 @@ def test_RandomMeshSampler_sampled_points_bounds(diamond, n): sample = sampler.compute() assert all(sample[["x", "y", "z"]].values.max(0) <= diamond.xyz.max(0)) assert all(sample[["x", "y", "z"]].values.min(0) >= diamond.xyz.min(0)) - diff --git a/tests/unit/samplers/test_points_samplers.py b/tests/unit/samplers/test_points_samplers.py index c0f5fea..9981768 100644 --- a/tests/unit/samplers/test_points_samplers.py +++ b/tests/unit/samplers/test_points_samplers.py @@ -40,4 +40,3 @@ def test_RandomPointsSampler_sampled_points_are_from_original(simple_pyntcloud): sample = sampler.compute() assert point_in_array_2D(sample, simple_pyntcloud.xyz) - diff --git a/tests/unit/scalar_fields/test_eigenvalues_scalar_fields.py b/tests/unit/scalar_fields/test_eigenvalues_scalar_fields.py index 0e8ab90..150b3a9 100644 --- a/tests/unit/scalar_fields/test_eigenvalues_scalar_fields.py +++ b/tests/unit/scalar_fields/test_eigenvalues_scalar_fields.py @@ -64,6 +64,3 @@ def test_EigenSum_values(pyntcloud_and_eigenvalues): scalar_field.compute() scalar_field_values = next(iter(scalar_field.to_be_added.values())) assert all(scalar_field_values > 0) - - - diff --git a/tests/unit/scalar_fields/test_normals_scalar_fields.py b/tests/unit/scalar_fields/test_normals_scalar_fields.py index 9c62095..a4f01ff 100644 --- a/tests/unit/scalar_fields/test_normals_scalar_fields.py +++ b/tests/unit/scalar_fields/test_normals_scalar_fields.py @@ -43,4 +43,3 @@ def test_NormalsScalarFields_bounds(pyntcloud_with_rgb_and_normals, scalar_field scalar_field_values = next(iter(scalar_field.to_be_added.values())) assert all(scalar_field_values >= min_val) assert all(scalar_field_values <= max_val) - diff --git a/tests/unit/scalar_fields/test_voxlegrid_scalar_fields.py b/tests/unit/scalar_fields/test_voxlegrid_scalar_fields.py index d6df167..3e71334 100644 --- a/tests/unit/scalar_fields/test_voxlegrid_scalar_fields.py +++ b/tests/unit/scalar_fields/test_voxlegrid_scalar_fields.py @@ -76,4 +76,4 @@ def test_EuclideanClusters_values(pyntcloud_with_clusters_and_voxelgrid_id): with np.errstate(divide='ignore', invalid='ignore'): scalar_field.compute() scalar_field_values = next(iter(scalar_field.to_be_added.values())) - assert all(scalar_field_values[:5] != scalar_field_values[5:]) \ No newline at end of file + assert all(scalar_field_values[:5] != scalar_field_values[5:]) diff --git a/tests/unit/scalar_fields/test_xyz_scalar_fields.py b/tests/unit/scalar_fields/test_xyz_scalar_fields.py index 5390439..819efb0 100644 --- a/tests/unit/scalar_fields/test_xyz_scalar_fields.py +++ b/tests/unit/scalar_fields/test_xyz_scalar_fields.py @@ -96,4 +96,3 @@ def test_CylindricalCoordinates_bounds(pyntcloud_with_rgb_and_normals): assert all(scalar_field.to_be_added["angular_cylindrical"] >= - (np.pi / 2)) assert all(scalar_field.to_be_added["angular_cylindrical"] <= (np.pi * 1.5)) - diff --git a/tests/unit/structures/test_voxelgrid_structures.py b/tests/unit/structures/test_voxelgrid_structures.py index 789894d..1018984 100644 --- a/tests/unit/structures/test_voxelgrid_structures.py +++ b/tests/unit/structures/test_voxelgrid_structures.py @@ -61,7 +61,8 @@ def test_regular_bounding_box_changes_the_shape_of_the_bounding_box(x, y, z): "x": np.array(x, dtype=np.float32), "y": np.array(y, dtype=np.float32), "z": np.array(z, dtype=np.float32) - })) + } + )) voxelgrid = VoxelGrid(points=cloud.xyz, n_x=2, n_y=2, n_z=2, regular_bounding_box=False) voxelgrid.compute()