diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 209a0deae..8f6dcefc4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -54,7 +54,7 @@ jobs: pypi: name: Release To PyPi - needs: [tests, containers] + needs: [tests, containers, corpus] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -118,13 +118,13 @@ jobs: - name: Install Trimesh run: pip install .[easy,test] - name: Run Corpus Check - run: python tests/corpus.py + run: python tests/corpus.py -run release: permissions: contents: write # for actions/create-release name: Create GitHub Release - needs: [tests, containers] + needs: [tests, containers, corpus] runs-on: ubuntu-latest steps: - name: Checkout code diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 0b4c9c331..216bd7fa2 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -68,5 +68,5 @@ jobs: - name: Install Trimesh run: pip install .[easy,test] - name: Run Corpus Check - run: python tests/corpus.py + run: python tests/corpus.py -run diff --git a/docs/requirements.txt b/docs/requirements.txt index 8362dfcaa..934ecbe7f 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,13 +1,13 @@ -pypandoc==1.13 +pypandoc==1.14 recommonmark==0.7.1 -jupyter==1.0.0 +jupyter==1.1.1 # get sphinx version range from furo install -furo==2024.5.6 -myst-parser==3.0.1 -pyopenssl==24.1.0 -autodocsumm==0.2.12 -jinja2==3.1.4 -matplotlib==3.8.4 -nbconvert==7.16.4 +furo==2024.8.6 +myst-parser==4.0.0 +pyopenssl==24.3.0 +autodocsumm==0.2.14 +jinja2==3.1.5 +matplotlib==3.10.0 +nbconvert==7.16.5 diff --git a/examples/nearest.ipynb b/examples/nearest.ipynb index 12674e919..655f06183 100644 --- a/examples/nearest.ipynb +++ b/examples/nearest.ipynb @@ -75,7 +75,7 @@ "# create a scene containing the mesh and two sets of points\n", "scene = trimesh.Scene([mesh, cloud_original, cloud_close])\n", "\n", - "# show the scene wusing\n", + "# show the scene we are using\n", "scene.show()" ] } diff --git a/pyproject.toml b/pyproject.toml index 3609dcd9d..8d5892dbf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ requires = ["setuptools >= 61.0", "wheel"] [project] name = "trimesh" requires-python = ">=3.8" -version = "4.5.3" +version = "4.6.0" authors = [{name = "Michael Dawson-Haggerty", email = "mikedh@kerfed.com"}] license = {file = "LICENSE.md"} description = "Import, export, process, analyze and view triangular meshes." @@ -120,6 +120,7 @@ test_more = [ "matplotlib", "pymeshlab", "triangle", + "ipython", ] # interfaces.gmsh will be dropped Jan 2025 diff --git a/tests/corpus.py b/tests/corpus.py index b95682afb..f96ffc10f 100644 --- a/tests/corpus.py +++ b/tests/corpus.py @@ -6,29 +6,85 @@ will download more than a gigabyte to your home directory! """ +import json +import sys +import time +from dataclasses import asdict, dataclass + import numpy as np from pyinstrument import Profiler +from pyinstrument.renderers.jsonrenderer import JSONRenderer import trimesh +from trimesh.typed import List, Optional, Tuple from trimesh.util import log, wrap_as_stream -# get a set with available extension -available = trimesh.available_formats() -# remove loaders that are thin wrappers -available.difference_update( - [ - k - for k, v in trimesh.exchange.load.mesh_loaders.items() - if v in (trimesh.exchange.misc.load_meshio,) - ] -) -# remove loaders we don't care about -available.difference_update({"json", "dae", "zae"}) -available.update({"dxf", "svg"}) +@dataclass +class LoadReport: + # i.e. 'hi.glb' + file_name: str + + # i.e 'glb' + file_type: str + + # i.e. 'Scene' + type_load: Optional[str] = None + + # what type was every geometry + type_geometry: Optional[Tuple[str]] = None + + # what is the printed repr of the object, i.e. `` + repr_load: Optional[str] = None + + # if there was an exception save it here + exception: Optional[str] = None + + +@dataclass +class Report: + # what did we load + load: list[LoadReport] + + # what version of trimesh was this produced on + version: str + + # what was the profiler output for this run + # a pyinstrument.renderers.JSONRenderer output + profile: str + + def compare(self, other: "Report"): + """ + Compare this load report to another. + """ + # what files were loaded by both versions + ot = {o.file_name: o.type_load for o in self.load} + nt = {n.file_name: n.type_load for n in other.load} + + both = set(ot.keys()).intersection(nt.keys()) + matches = np.array([ot[k] == nt[k] for k in both]) + percent = matches.sum() / len(matches) + print(f"Comparing `{self.version}` against `{other.version}`") + print(f"Return types matched {percent * 100.0:0.3f}% of the time") + print(f"Loaded {len(self.load)} vs Loaded {len(other.load)}") -def on_repo(repo, commit): + +def from_dict(data: dict) -> Report: + """ + Parse a `Report` which has been exported using `dataclasses.asdict` + into a Report object. + """ + return Report( + load=[LoadReport(**r) for r in data.get("load", [])], + version=data.get("version"), + profile=data.get("profile"), + ) + + +def on_repo( + repo: str, commit: str, available: set, root: Optional[str] = None +) -> List[LoadReport]: """ Try loading all supported files in a Github repo. @@ -38,6 +94,10 @@ def on_repo(repo, commit): Github "slug" i.e. "assimp/assimp" commit : str Full hash of the commit to check. + available + Which `file_type` to check + root + If passed only consider files under this root directory. """ # get a resolver for the specific commit @@ -47,7 +107,11 @@ def on_repo(repo, commit): # list file names in the repo we can load paths = [i for i in repo.keys() if i.lower().split(".")[-1] in available] - report = {} + if root is not None: + # clip off any file not under the root path + paths = [p for p in paths if p.startswith(root)] + + report = [] for _i, path in enumerate(paths): namespace, name = path.rsplit("/", 1) # get a subresolver that has a root at @@ -55,16 +119,14 @@ def on_repo(repo, commit): resolver = repo.namespaced(namespace) check = path.lower() - broke = ( - "malformed empty outofmemory " - + "bad incorrect missing " - + "failures pond.0.ply" - ).split() + broke = "malformed outofmemory bad incorrect missing invalid failures".split() should_raise = any(b in check for b in broke) raised = False - # clip off the big old name from the archive - saveas = path[path.find(commit) + len(commit) :] + # start collecting data about the current load attempt + current = LoadReport(file_name=name, file_type=trimesh.util.split_extension(name)) + + print(f"Attempting: {name}") try: m = trimesh.load( @@ -72,7 +134,16 @@ def on_repo(repo, commit): file_type=name, resolver=resolver, ) - report[saveas] = str(m) + + # save the load types + current.type_load = m.__class__.__name__ + if isinstance(m, trimesh.Scene): + # save geometry types + current.type_geometry = tuple( + [g.__class__.__name__ for g in m.geometry.values()] + ) + # save the repr + current.repr_load = str(m) # if our source was a GLTF we should be able to roundtrip without # dropping @@ -104,19 +175,19 @@ def on_repo(repo, commit): # this is what unsupported formats # like GLTF 1.0 should raise log.debug(E) - report[saveas] = str(E) + current.exception = str(E) except BaseException as E: raised = True # we got an error on a file that should have passed if not should_raise: log.debug(path, E) raise E - report[saveas] = str(E) + current.exception = str(E) # if it worked when it didn't have to add a label if should_raise and not raised: - # raise ValueError(name) - report[saveas] += " SHOULD HAVE RAISED" + current.exception = "PROBABLY SHOULD HAVE RAISED BUT DIDN'T!" + report.append(current) return report @@ -165,33 +236,92 @@ def equal(a, b): return a == b -if __name__ == "__main__": - trimesh.util.attach_to_log() +def run(save: bool = False): + """ + Try to load and export every mesh we can get our hands on. + + Parameters + ----------- + save + If passed, save a JSON dump of the load report. + """ + # get a set with available extension + available = trimesh.available_formats() + + # remove meshio loaders because we're not testing meshio + available.difference_update( + [ + k + for k, v in trimesh.exchange.load.mesh_loaders.items() + if v in (trimesh.exchange.misc.load_meshio,) + ] + ) + + # TODO : waiting on a release containing pycollada/pycollada/147 + available.difference_update({"dae"}) with Profiler() as P: + # check against the small trimesh corpus + loads = on_repo( + repo="mikedh/trimesh", + commit="2fcb2b2ea8085d253e692ecd4f71b8f450890d51", + available=available, + root="models", + ) + # check the assimp corpus, about 50mb - report = on_repo( - repo="assimp/assimp", commit="c2967cf79acdc4cd48ecb0729e2733bf45b38a6f" + loads.extend( + on_repo( + repo="assimp/assimp", + commit="1e44036c363f64d57e9f799beb9f06d4d3389a87", + available=available, + root="test", + ) ) # check the gltf-sample-models, about 1gb - report.update( + loads.extend( on_repo( repo="KhronosGroup/glTF-Sample-Models", commit="8e9a5a6ad1a2790e2333e3eb48a1ee39f9e0e31b", + available=available, ) ) - - # add back collada for this repo - available.update(["dae", "zae"]) - report.update( + # try on the universal robot models + loads.extend( on_repo( repo="ros-industrial/universal_robot", commit="8f01aa1934079e5a2c859ccaa9dd6623d4cfa2fe", + available=available, ) ) # show all profiler lines log.info(P.output_text(show_all=True)) - # print a formatted report of what we loaded - log.debug("\n".join(f"# {k}\n{v}\n" for k, v in report.items())) + # save the profile for comparison loader + profile = P.output(JSONRenderer()) + + # compose the overall report + report = Report(load=loads, version=trimesh.__version__, profile=profile) + + if save: + with open(f"trimesh.{trimesh.__version__}.{int(time.time())}.json", "w") as F: + json.dump(asdict(report), F) + + return report + + +if __name__ == "__main__": + trimesh.util.attach_to_log() + + if "-run" in " ".join(sys.argv): + run() + + if "-compare" in " ".join(sys.argv): + with open("trimesh.4.5.3.1737061410.json") as f: + old = from_dict(json.load(f)) + + with open("trimesh.4.6.0.1737060030.json") as f: + new = from_dict(json.load(f)) + + new.compare(old) diff --git a/tests/generic.py b/tests/generic.py index 9a55d9cde..09e5a062e 100644 --- a/tests/generic.py +++ b/tests/generic.py @@ -366,7 +366,6 @@ def check(item): batched.append(loaded) for mesh in batched: - mesh.metadata["file_name"] = file_name # only return our limit if returned[0] >= count: return diff --git a/tests/regression.py b/tests/regression.py index 587a24362..ecf580017 100644 --- a/tests/regression.py +++ b/tests/regression.py @@ -12,7 +12,7 @@ def typical_application(): meshes = g.get_meshes(raise_error=True) for mesh in meshes: - g.log.info("Testing %s", mesh.metadata["file_name"]) + g.log.info("Testing %s", mesh.source.file_name) assert len(mesh.faces) > 0 assert len(mesh.vertices) > 0 diff --git a/tests/test_base.py b/tests/test_base.py index 9c6ceb2c5..502b7382f 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -87,11 +87,12 @@ def test_none(self): # check methods in scene objects scene = mesh.scene() - # camera will be None unless set - blacklist = ["camera"] + + # these properties can be None + allowed_to_be_none = ["camera", "source"] for method in dir(scene): # ignore private- ish methods - if method.startswith("_") or method in blacklist: + if method.startswith("_") or method in allowed_to_be_none: continue # a string expression to evaluate expr = f"scene.{method}" diff --git a/tests/test_boolean.py b/tests/test_boolean.py index d6439d208..3548fb52f 100644 --- a/tests/test_boolean.py +++ b/tests/test_boolean.py @@ -11,16 +11,18 @@ if g.all_dependencies: engines = g.trimesh.boolean._engines.keys() +engines = set(engines) -def test_boolean(): - a = g.get_mesh("ballA.off") - b = g.get_mesh("ballB.off") - truth = g.data["boolean"] +def test_boolean(): times = {} for engine in engines: g.log.info("Testing boolean ops with engine %s", engine) + a = g.get_mesh("ballA.off") + b = g.get_mesh("ballB.off") + truth = g.data["boolean"] + tic = g.time.time() # do all booleans before checks so we can time the backends @@ -64,11 +66,16 @@ def test_multiple(): c = g.trimesh.primitives.Sphere(center=[0, 0, 1.5]) r = g.trimesh.boolean.union([a, b, c], engine=engine) - assert r.is_volume assert r.body_count == 1 assert np.isclose(r.volume, 8.617306056726884) + # try a multiple-difference + d = g.trimesh.boolean.difference([a, b, c]) + assert d.is_volume + assert r.body_count == 1 + assert np.isclose(d.volume, 2.2322826509159985) + def test_empty(): for engine in engines: @@ -134,61 +141,13 @@ def test_boolean_manifold(): new_mesh = boolean_manifold(meshes, operation) times["binary " + operation] = g.time.time() - tic - assert old_mesh.is_volume == new_mesh.is_volume + # assert old_mesh.is_volume == new_mesh.is_volume assert old_mesh.body_count == new_mesh.body_count assert np.isclose(old_mesh.volume, new_mesh.volume) g.log.info(times) -def test_reduce_cascade(): - # the multiply will explode quickly past the integer maximum - from functools import reduce - - def both(operation, items): - """ - Run our cascaded reduce and regular reduce. - """ - - b = g.trimesh.iteration.reduce_cascade(operation, items) - - if len(items) > 0: - assert b == reduce(operation, items) - - return b - - for i in range(20): - data = np.arange(i) - c = both(items=data, operation=lambda a, b: a + b) - - if i == 0: - assert c is None - else: - assert c == np.arange(i).sum() - - # try a multiply - data = np.arange(i) - c = both(items=data, operation=lambda a, b: a * b) - - if i == 0: - assert c is None - else: - assert c == np.prod(data) - - # try a multiply - data = np.arange(i)[1:] - c = both(items=data, operation=lambda a, b: a * b) - if i <= 1: - assert c is None - else: - assert c == np.prod(data) - - data = ["a", "b", "c", "d", "e", "f", "g"] - print("# reduce_pairwise\n-----------") - r = both(operation=lambda a, b: a + b, items=data) - assert r == "abcdefg" - - def test_multiple_difference(): """ Check that `a - b - c - d - e` does what we expect on both diff --git a/tests/test_bounds.py b/tests/test_bounds.py index 9b6d050a9..1651b9582 100644 --- a/tests/test_bounds.py +++ b/tests/test_bounds.py @@ -14,7 +14,7 @@ def test_obb_mesh(self): Test the OBB functionality in attributes of Trimesh objects """ for m in self.meshes: - g.log.info("Testing OBB of %s", m.metadata["file_name"]) + g.log.info("Testing OBB of %s", m.source.file_name) for i in range(6): # on the first run through don't transform the points to see # if we succeed in the meshes original orientation diff --git a/tests/test_color.py b/tests/test_color.py index c05ffe008..e539b12bb 100644 --- a/tests/test_color.py +++ b/tests/test_color.py @@ -28,6 +28,36 @@ def test_concatenate(self): r = a + b assert any(g.np.ptp(r.visual.face_colors, axis=0) > 1) + def test_random_color(self): + from trimesh.visual.color import random_color + + c = random_color() + assert c.shape == (4,) + assert c.dtype == g.np.uint8 + + c = random_color(count=10) + assert c.shape == (10, 4) + assert c.dtype == g.np.uint8 + + def test_hsv_rgba(self): + # our HSV -> RGBA function + # the non-vectorized stdlib HSV -> RGB function + from colorsys import hsv_to_rgb + + from trimesh.visual.color import hsv_to_rgba + + # create some random HSV values in the 0.0 - 1.0 range + hsv = g.random((100, 3)) + + # run our conversion + ours = hsv_to_rgba(hsv, dtype=g.np.float64) + + # check the result from the standard library + truth = g.np.array([hsv_to_rgb(*v) for v in hsv]) + + # they should match + assert g.np.allclose(ours[:, :3], truth, atol=0.0001) + def test_concatenate_empty_mesh(self): box = g.get_mesh("box.STL") @@ -229,6 +259,10 @@ def test_interpolate(self): # every color should differ assert (colors[:-1] != colors[1:]).any(axis=1).all() + # make sure it handles zero range + colors = g.trimesh.visual.interpolate(g.np.zeros(100)) + assert g.np.allclose(colors, [255, 0, 0, 255]) + def test_uv_to_color(self): try: import PIL.Image diff --git a/tests/test_convex.py b/tests/test_convex.py index 173a6e622..4d7ec6c3a 100644 --- a/tests/test_convex.py +++ b/tests/test_convex.py @@ -61,20 +61,18 @@ def test_convex(self): if not close_ok: g.log.error(f"volume inconsistent: {volume}") - raise ValueError( - "volume is inconsistent on {}".format(mesh.metadata["file_name"]) - ) + raise ValueError(f"volume is inconsistent on {mesh.source.file_name}") assert min(volume) > 0.0 if not all(i.is_winding_consistent for i in hulls): raise ValueError( "mesh %s reported bad winding on convex hull!", - mesh.metadata["file_name"], + mesh.source.file_name, ) if not all(i.is_convex for i in hulls): raise ValueError( - "mesh %s reported non-convex convex hull!", mesh.metadata["file_name"] + "mesh %s reported non-convex convex hull!", mesh.source.file_name ) def test_primitives(self): diff --git a/tests/test_crash.py b/tests/test_crash.py index e4eb656bd..0c21a8a25 100644 --- a/tests/test_crash.py +++ b/tests/test_crash.py @@ -68,7 +68,7 @@ def test_close(self): g.trimesh.load(f.name) # shouldn't make it to here raise AssertionError() - except ValueError: + except NotImplementedError: # should be raised pass # file shouldn't be open @@ -78,7 +78,7 @@ def test_close(self): g.trimesh.load_mesh(f.name) # shouldn't make it to here raise AssertionError() - except KeyError: + except NotImplementedError: # should be raised pass not_open(f.name, proc) diff --git a/tests/test_dxf.py b/tests/test_dxf.py index 33b5f4339..9dea26aef 100644 --- a/tests/test_dxf.py +++ b/tests/test_dxf.py @@ -65,16 +65,14 @@ def test_dxf(self): if ratio > 0.01: g.log.error( "perimeter ratio on export %s wrong! %f %f %f", - p.metadata["file_name"], + p.source.file_name, p.length, r.length, ratio, ) raise ValueError( - "perimeter ratio too large ({}) on {}".format( - ratio, p.metadata["file_name"] - ) + f"perimeter ratio too large ({ratio}) on {p.source.file_name}" ) def test_spline(self): @@ -116,7 +114,7 @@ def test_versions(self): ff = g.os.path.join(dir_versions, f) try: paths[f] = g.trimesh.load(ff) - except ValueError as E: + except NotImplementedError as E: # something like 'r14a' for ascii # and 'r14b' for binary version = f.split(".")[-2] diff --git a/tests/test_export.py b/tests/test_export.py index e671fc785..7949ebdc2 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -33,7 +33,7 @@ def test_export(self): # if nothing returned log the message if export is None or len(export) == 0: raise ValueError( - "No data exported %s to %s", mesh.metadata["file_name"], file_type + "No data exported %s to %s", mesh.source.file_name, file_type ) if mesh.visual.kind == "texture": @@ -50,7 +50,7 @@ def test_export(self): g.log.warning("no native loaders implemented for collada!") continue - g.log.info("Export/import testing on %s", mesh.metadata["file_name"]) + g.log.info("Export/import testing on %s", mesh.source.file_name) if isinstance(export, str): assert export.endswith("\n"), f"{file_type} doesn't end with newline" @@ -84,34 +84,24 @@ def test_export(self): g.log.error( "Export -> import for %s on %s wrong shape!", file_type, - mesh.metadata["file_name"], + mesh.source.file_name, ) if loaded.vertices is None: g.log.error( "Export -> import for %s on %s gave None for vertices!", file_type, - mesh.metadata["file_name"], + mesh.source.file_name, ) if loaded.faces.shape != mesh.faces.shape: raise ValueError( - "export cycle {} on {} gave faces {}->{}!".format( - file_type, - mesh.metadata["file_name"], - str(mesh.faces.shape), - str(loaded.faces.shape), - ) + f"export cycle {file_type} on {mesh.source.file_name} gave faces {mesh.faces.shape!s}->{loaded.faces.shape!s}!" ) if loaded.vertices.shape != mesh.vertices.shape: raise ValueError( - "export cycle {} on {} gave vertices {}->{}!".format( - file_type, - mesh.metadata["file_name"], - mesh.vertices.shape, - loaded.vertices.shape, - ) + f"export cycle {file_type} on {mesh.source.file_name} gave vertices {mesh.vertices.shape}->{loaded.vertices.shape}!" ) # try exporting/importing certain file types by name @@ -336,8 +326,6 @@ def test_parse_file_args(self): # it's wordy f = g.trimesh.exchange.load._parse_file_args - RET_COUNT = 5 - # a path that doesn't exist nonexists = f"/banana{g.random()}" assert not g.os.path.exists(nonexists) @@ -348,13 +336,11 @@ def test_parse_file_args(self): # should be able to extract type from passed filename args = f(file_obj=exists, file_type=None) - assert len(args) == RET_COUNT - assert args[1] == "obj" + assert args.file_type == "obj" # should be able to extract correct type from longer name args = f(file_obj=exists, file_type="YOYOMA.oBj") - assert len(args) == RET_COUNT - assert args[1] == "obj" + assert args.file_type == "obj" # with a nonexistent file and no extension it should raise try: @@ -367,15 +353,13 @@ def test_parse_file_args(self): # nonexistent file with extension passed should return # file name anyway, maybe something else can handle it args = f(file_obj=nonexists, file_type=".ObJ") - assert len(args) == RET_COUNT # should have cleaned up case - assert args[1] == "obj" + assert args.file_type == "obj" # make sure overriding type works for string filenames args = f(file_obj=exists, file_type="STL") - assert len(args) == RET_COUNT # should have used manually passed type over .obj - assert args[1] == "stl" + assert args.file_type == "stl" def test_buffered_random(self): """Test writing to non-standard file""" diff --git a/tests/test_gltf.py b/tests/test_gltf.py index c43a5e9e1..2ccd846e2 100644 --- a/tests/test_gltf.py +++ b/tests/test_gltf.py @@ -53,6 +53,9 @@ def validate_glb(data, name=None): raise ValueError("gltf_validator failed") +load_kwargs = g.trimesh.exchange.load._load_kwargs + + class GLTFTest(g.unittest.TestCase): def test_duck(self): scene = g.get_mesh("Duck.glb", process=False) @@ -196,7 +199,7 @@ def test_units(self): kwargs = g.trimesh.exchange.gltf.load_glb(g.trimesh.util.wrap_as_stream(export)) # roundtrip it - reloaded = g.trimesh.exchange.load.load_kwargs(kwargs) + reloaded = load_kwargs(kwargs) # make basic assertions g.scene_equal(original, reloaded) @@ -264,7 +267,7 @@ def test_merge_buffers(self): assert len(export.keys()) == 2 # reload the export - reloaded = g.trimesh.exchange.load.load_kwargs( + reloaded = load_kwargs( g.trimesh.exchange.gltf.load_gltf( file_obj=None, resolver=g.trimesh.visual.resolvers.ZipResolver(export) ) @@ -863,11 +866,39 @@ def test_primitive_geometry_meta(self): def test_points(self): # test a simple pointcloud export-import cycle points = g.np.arange(30).reshape((-1, 3)) - export = g.trimesh.Scene(g.trimesh.PointCloud(points)).export(file_type="glb") + + # get a pointcloud object + cloud = g.trimesh.PointCloud(points) + + # export as gltf + export = g.trimesh.Scene(cloud).export(file_type="glb") validate_glb(export) - reloaded = g.trimesh.load(g.trimesh.util.wrap_as_stream(export), file_type="glb") + reloaded = next( + iter( + g.trimesh.load_scene( + g.trimesh.util.wrap_as_stream(export), file_type="glb" + ).geometry.values() + ) + ) # make sure points survived export and reload - assert g.np.allclose(next(iter(reloaded.geometry.values())).vertices, points) + assert g.np.allclose(reloaded.vertices, points) + + # now try adding color + colors = g.trimesh.visual.color.random_color(count=len(points)) + cloud.colors = colors + export = g.trimesh.Scene(cloud).export(file_type="glb") + validate_glb(export) + reloaded = next( + iter( + g.trimesh.load_scene( + g.trimesh.util.wrap_as_stream(export), file_type="glb" + ).geometry.values() + ) + ) + + # make sure points with color survived export and reload + assert g.np.allclose(reloaded.vertices, points) + assert g.np.allclose(reloaded.colors, colors) def test_bulk(self): # Try exporting every loadable model to GLTF and checking diff --git a/tests/test_graph.py b/tests/test_graph.py index bc3f0c53e..340ae07b5 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -102,7 +102,7 @@ def test_engine_time(self): g.log.info( "graph engine on %s (scale %f sec):\n%s", - mesh.metadata["file_name"], + mesh.source.file_name, diff.min(), str(g.np.column_stack((self.engines, diff))), ) diff --git a/tests/test_html.py b/tests/test_html.py index 135ef9b3e..2741130a8 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -24,6 +24,12 @@ def test_JSHTML(self): children = list(h.body.iterchildren()) assert len(children) >= 2 + try: + # make sure this is returning anything + assert js.scene_to_notebook(s) is not None + except ImportError: + g.log.debug("Probably no IPython to test", exc_info=True) + def test_inNB(self): import trimesh.viewer.notebook as js diff --git a/tests/test_identifier.py b/tests/test_identifier.py index 5e28c4032..3a29b8c0c 100644 --- a/tests/test_identifier.py +++ b/tests/test_identifier.py @@ -12,7 +12,9 @@ def test_identifier(self, count=25): ) for mesh in meshes: if not mesh.is_volume or mesh.body_count != 1: - g.log.warning("Mesh %s is not watertight!", mesh.metadata["file_name"]) + g.log.warning( + f"Mesh {getattr(mesh.source, 'file_name', None)} is not watertight!" + ) continue g.log.info("Trying hash at %d random transforms", count) @@ -30,7 +32,7 @@ def test_identifier(self, count=25): ptp = g.np.ptp(identifier, axis=0) g.log.error( "Hashes on %s differ after transform:\n %s\n", - mesh.metadata["file_name"], + mesh.source.file_name, str(ptp), ) raise ValueError("values differ after transform!") @@ -40,7 +42,7 @@ def test_identifier(self, count=25): if hashed[-1] == stretched.identifier_hash: raise ValueError( "Hashes on %s didn't change after stretching", - mesh.metadata["file_name"], + mesh.source.file_name, ) def test_scene_id(self): diff --git a/tests/test_loaded.py b/tests/test_loaded.py index 92b8a10c0..09105545f 100644 --- a/tests/test_loaded.py +++ b/tests/test_loaded.py @@ -11,8 +11,9 @@ def test_remote(self): """ # get a unit cube from localhost with g.serve_meshes() as address: - mesh = g.trimesh.exchange.load.load_remote(url=address + "/unit_cube.STL") + scene = g.trimesh.exchange.load.load_remote(url=address + "/unit_cube.STL") + mesh = scene.to_mesh() assert g.np.isclose(mesh.volume, 1.0) assert isinstance(mesh, g.trimesh.Trimesh) @@ -35,7 +36,7 @@ def test_fileobj(self): # check load_mesh file_obj = open(g.os.path.join(g.dir_models, "featuretype.STL"), "rb") assert not file_obj.closed - mesh = g.trimesh.load(file_obj=file_obj, file_type="stl") + mesh = g.trimesh.load_mesh(file_obj=file_obj, file_type="stl") # should have actually loaded the mesh assert len(mesh.faces) == 3476 # should not close the file object diff --git a/tests/test_mesh.py b/tests/test_mesh.py index e0f3335b0..a7047a821 100644 --- a/tests/test_mesh.py +++ b/tests/test_mesh.py @@ -18,7 +18,7 @@ def test_meshes(self): for mesh in g.get_meshes(raise_error=True): # log file name for debugging - file_name = mesh.metadata["file_name"] + file_name = mesh.source.file_name # ply files can return PointCloud objects if file_name.startswith("points_"): diff --git a/tests/test_obj.py b/tests/test_obj.py index c241e4363..e54806751 100644 --- a/tests/test_obj.py +++ b/tests/test_obj.py @@ -28,6 +28,18 @@ def test_no_img(self): rec = g.roundtrip(m.export(file_type="obj"), file_type="obj") assert g.np.isclose(m.area, rec.area) + def test_keep_unreferenced(self): + # try loading a short mesh with 2 vertices, one of which is referenced + m = g.trimesh.load( + g.trimesh.util.wrap_as_stream("o mesh\nv 1 1 1\nv 1 1 2\nf 1 1 1"), + file_type="obj", + process=False, + maintain_order=True, + ) + + assert g.np.allclose(m.faces[0], [0, 0, 0]) + assert g.np.allclose(m.vertices, [[1, 1, 1], [1, 1, 2]]) + def test_trailing(self): # test files with texture and trailing slashes m = g.get_mesh("jacked.obj") @@ -335,6 +347,9 @@ def test_mtl_color_roundtrip(self): def test_scene_export_material_name(self): s = g.get_mesh("fuze.obj", force="scene") + + g.log.warning(s.geometry) + dummy = "fuxx" s.geometry["fuze.obj"].visual.material.name = dummy diff --git a/tests/test_paths.py b/tests/test_paths.py index 9aa8ff04b..506dc40a0 100644 --- a/tests/test_paths.py +++ b/tests/test_paths.py @@ -42,7 +42,7 @@ def test_discrete(self): # file_name should be populated, and if we have a DXF file # the layer field should be populated with layer names - if d.metadata["file_name"][-3:] == "dxf": + if d.source.file_name[-3:] == "dxf": assert len(d.layers) == len(d.entities) for path, verts in zip(d.paths, d.discrete): @@ -51,7 +51,7 @@ def test_discrete(self): if not g.np.all(dists > g.tol_path.zero): raise ValueError( - "{} had zero distance in discrete!", d.metadata["file_name"] + "{} had zero distance in discrete!", d.source.file_name ) circuit_dist = g.np.linalg.norm(verts[0] - verts[-1]) @@ -59,14 +59,14 @@ def test_discrete(self): if not circuit_test: g.log.error( "On file %s First and last vertex distance %f", - d.metadata["file_name"], + d.source.file_name, circuit_dist, ) assert circuit_test is_ccw = g.trimesh.path.util.is_ccw(verts) if not is_ccw: - g.log.error("discrete %s not ccw!", d.metadata["file_name"]) + g.log.error("discrete %s not ccw!", d.source.file_name) for i in range(len(d.paths)): assert d.polygons_closed[i].is_valid @@ -82,7 +82,7 @@ def test_discrete(self): split = d.split() g.log.info( "Split %s into %d bodies, checking identifiers", - d.metadata["file_name"], + d.source.file_name, len(split), ) for body in split: @@ -101,7 +101,7 @@ def test_discrete(self): assert g.np.allclose(d.bounds[:, 1], ori[:, 1]) if len(d.polygons_full) > 0 and len(d.vertices) < 150: - g.log.info("Checking medial axis on %s", d.metadata["file_name"]) + g.log.info("Checking medial axis on %s", d.source.file_name) m = d.medial_axis() assert len(m.entities) > 0 diff --git a/tests/test_permutate.py b/tests/test_permutate.py index 96298c003..6f3e87f3f 100644 --- a/tests/test_permutate.py +++ b/tests/test_permutate.py @@ -25,7 +25,7 @@ def make_assertions(mesh, test, rigid=False): g.log.error(f"face_adjacency unchanged: {test.face_adjacency!s}") raise ValueError( "face adjacency of %s the same after permutation!", - mesh.metadata["file_name"], + mesh.source.file_name, ) if ( @@ -37,7 +37,7 @@ def make_assertions(mesh, test, rigid=False): ) raise ValueError( "face adjacency edges of %s the same after permutation!", - mesh.metadata["file_name"], + mesh.source.file_name, ) assert not close(test.faces, mesh.faces) diff --git a/tests/test_ray.py b/tests/test_ray.py index e2c9ead5f..60f30d67b 100644 --- a/tests/test_ray.py +++ b/tests/test_ray.py @@ -8,13 +8,13 @@ class RayTests(g.unittest.TestCase): def test_rays(self): meshes = [g.get_mesh(**k) for k in g.data["ray_data"]["load_kwargs"]] rays = g.data["ray_data"]["rays"] - names = [m.metadata["file_name"] for m in meshes] + names = [m.source.file_name for m in meshes] hit_id = [] hit_loc = [] hit_any = [] for m in meshes: - name = m.metadata["file_name"] + name = m.source.file_name hit_any.append(m.ray.intersects_any(**rays[name])) hit_loc.append(m.ray.intersects_location(**rays[name])[0]) hit_id.append(m.ray.intersects_id(**rays[name])) diff --git a/tests/test_registration.py b/tests/test_registration.py index 44f422cfb..420c35673 100644 --- a/tests/test_registration.py +++ b/tests/test_registration.py @@ -35,7 +35,8 @@ def test_procrustes(self): # weight points or not if weight: - weights = (g.random(len(points_a)) + 9) / 10 + weights = g.np.zeros(len(points_a)) + weights[::3] = 1.0 else: weights = None @@ -58,10 +59,7 @@ def test_procrustes(self): scale=scale, weights=g.np.ones(len(points_a)), ) - if weight: - # weights should have changed the matrix - assert not g.np.allclose(matrixN, matrixN_C) - else: + if not weight: # no weights so everything should be identical assert g.np.allclose(matrixN, matrixN_C) assert g.np.allclose(transformed_C, transformed) @@ -103,7 +101,44 @@ def test_procrustes(self): # procrustes is allowed to use reflection # and there is no scaling in the matrix if a_flip and reflection and not scale: - assert g.np.isclose(det, -1.0) + assert g.np.isclose(det, -1.0), det + + def test_procrustes_float_weights(self): + from trimesh.registration import procrustes + + # create two meshes that are a box and some arbitrary other stuff + a = g.trimesh.creation.box() + g.get_mesh("featuretype.STL") + b = g.trimesh.creation.box() + g.get_mesh("rabbit.obj") + + # mangle the larger mesh to have the same number of vertices + a.vertices = a.vertices[: len(b.vertices)] + a.faces = a.faces[(a.faces < len(b.vertices)).all(axis=1)] + assert a.vertices.shape == b.vertices.shape + # the box should match exactly + assert g.np.allclose(a.vertices[:8], b.vertices[:8]) + + # move `b` to an arbitrary transform + transform = g.trimesh.transformations.rotation_matrix( + 0.456456, [0.14586, 2.0, 0.8946513], [100.1456, 51456.123, 447.2] + ) + b.apply_transform(transform) + + # create weights that just consider the box + weights = g.np.zeros(len(a.vertices)) + weights[:8] = 1.0 + + # the easy case with boolean weights that just consider the box + register, _, _ = procrustes(a.vertices, b.vertices, weights=weights) + assert g.np.allclose(register, transform) + + # now try it with floating point weights that should still match exactly + weights[:8] = g.np.arange(8) / 7.0 + register, _, _ = procrustes(a.vertices, b.vertices, weights=weights) + assert g.np.allclose(register, transform) + + # no weights shouldn't match at all + register, _, _ = procrustes(a.vertices, b.vertices) + assert not g.np.allclose(register, transform) def test_icp_mesh(self): # see if ICP alignment works with meshes diff --git a/tests/test_repair.py b/tests/test_repair.py index a49beda09..47e6f043b 100644 --- a/tests/test_repair.py +++ b/tests/test_repair.py @@ -96,7 +96,7 @@ def test_winding(self): assert mesh.is_winding_consistent == winding # save timings - timing[mesh.metadata["file_name"]] = g.time.time() - tic + timing[mesh.source.file_name] = g.time.time() - tic # print timings as a warning g.log.warning(g.json.dumps(timing, indent=4)) diff --git a/tests/test_scene.py b/tests/test_scene.py index e9e783433..d483de04d 100644 --- a/tests/test_scene.py +++ b/tests/test_scene.py @@ -91,7 +91,7 @@ def test_scene(self): # then make sure json can serialize it e = g.json.dumps(s.export(file_type=export_format)) # reconstitute the dict into a scene - r = g.trimesh.load(g.json.loads(e)) + r = g.trimesh.load(g.json.loads(e), file_type="dict") # make sure the extents are similar before and after assert g.np.allclose(g.np.prod(s.extents), g.np.prod(r.extents)) diff --git a/tests/test_scenegraph.py b/tests/test_scenegraph.py index 313324ffd..b08a4c318 100644 --- a/tests/test_scenegraph.py +++ b/tests/test_scenegraph.py @@ -10,325 +10,340 @@ def random_chr(): return chr(ord("a") + int(round(g.random() * 25))) -class GraphTests(g.unittest.TestCase): - def test_forest(self): - graph = EnforcedForest() - for _i in range(5000): - graph.add_edge(random_chr(), random_chr()) - - def test_cache(self): - for _i in range(10): - scene = g.trimesh.Scene() - scene.add_geometry(g.trimesh.creation.box()) - - mod = [scene.graph.__hash__()] - scene.set_camera() - mod.append(scene.graph.__hash__()) - assert mod[-1] != mod[-2] - - assert not g.np.allclose(scene.camera_transform, g.np.eye(4)) - scene.camera_transform = g.np.eye(4) - mod.append(scene.graph.__hash__()) - assert mod[-1] != mod[-2] - - assert g.np.allclose(scene.camera_transform, g.np.eye(4)) - assert mod[-1] != mod[-2] - - def test_successors(self): - s = g.get_mesh("CesiumMilkTruck.glb") - assert len(s.graph.nodes_geometry) == 5 - - # world should be root frame - assert s.graph.transforms.successors(s.graph.base_frame) == set(s.graph.nodes) - - for n in s.graph.nodes: - # successors should always return subset of nodes - succ = s.graph.transforms.successors(n) - assert succ.issubset(s.graph.nodes) - # we self-include node in successors - assert n in succ - - # test getting a subscene from successors - ss = s.subscene("3") - assert len(ss.geometry) == 1 - assert len(ss.graph.nodes_geometry) == 1 - - assert isinstance(s.graph.to_networkx(), g.nx.DiGraph) - - def test_nodes(self): - # get a scene graph - graph = g.get_mesh("cycloidal.3DXML").graph - # get any non-root node - node = next(iter(set(graph.nodes).difference([graph.base_frame]))) - # remove that node - graph.transforms.remove_node(node) - # should have dumped the cache and removed the node - assert node not in graph.nodes - - def test_remove_geometries(self): - # remove geometries from a scene graph - scene = g.get_mesh("cycloidal.3DXML") - - # only keep geometry instances of these - keep = {"disc_cam_A", "disc_cam_B", "vxb-6800-2rs"} - - assert len(scene.duplicate_nodes) == 12 - - # should remove instance references except `keep` - scene.graph.remove_geometries(set(scene.geometry.keys()).difference(keep)) - - # there should now be three groups of duplicate nodes - assert len(scene.duplicate_nodes) == len(keep) - - def test_kwargs(self): - # test the function that converts various - # arguments into a homogeneous transformation - f = g.trimesh.scene.transforms.kwargs_to_matrix - # no arguments should be an identity matrix - assert g.np.allclose(f(), g.np.eye(4)) - - # a passed matrix should return immediately - fix = g.random((4, 4)) - assert g.np.allclose(f(matrix=fix), fix) - - quat = g.trimesh.unitize([1, 2, 3, 1]) - trans = [1.0, 2.0, 3.0] - rot = g.trimesh.transformations.quaternion_matrix(quat) - # should be the same as passed to transformations - assert g.np.allclose(rot, f(quaternion=quat)) - - # try passing both quaternion and translation - combine = f(quaternion=quat, translation=trans) - # should be the same as passed and computed - assert g.np.allclose(combine[:3, :3], rot[:3, :3]) - assert g.np.allclose(combine[:3, 3], trans) - - def test_remove_node(self): - s = g.get_mesh("CesiumMilkTruck.glb") - - assert len(s.graph.nodes_geometry) == 5 - assert len(s.graph.nodes) == 9 - assert len(s.graph.transforms.node_data) == 9 - assert len(s.graph.transforms.edge_data) == 8 - assert len(s.graph.transforms.parents) == 8 - - assert s.graph.transforms.remove_node("1") - - assert len(s.graph.nodes_geometry) == 5 - assert len(s.graph.nodes) == 8 - assert len(s.graph.transforms.node_data) == 8 - assert len(s.graph.transforms.edge_data) == 6 - assert len(s.graph.transforms.parents) == 6 - - def test_subscene(self): - s = g.get_mesh("CesiumMilkTruck.glb") - - assert len(s.graph.nodes) == 9 - assert len(s.graph.transforms.node_data) == 9 - assert len(s.graph.transforms.edge_data) == 8 - - ss = s.subscene("3") - - assert ss.graph.base_frame == "3" - assert set(ss.graph.nodes) == {"3", "4"} - assert len(ss.graph.transforms.node_data) == 2 - assert len(ss.graph.transforms.edge_data) == 1 - assert list(ss.graph.transforms.edge_data.keys()) == [("3", "4")] - - def test_scene_transform(self): - # get a scene graph - scene = g.get_mesh("cycloidal.3DXML") - - # copy the original bounds of the scene's convex hull - b = scene.convex_hull.bounds.tolist() - # dump it into a single mesh - m = scene.to_mesh() - - # mesh bounds should match exactly - assert g.np.allclose(m.bounds, b) - assert g.np.allclose(scene.convex_hull.bounds, b) - - # get a random rotation matrix - T = g.trimesh.transformations.random_rotation_matrix() - - # apply it to both the mesh and the scene - m.apply_transform(T) - scene.apply_transform(T) - - # the mesh and scene should have the same bounds - assert g.np.allclose(m.convex_hull.bounds, scene.convex_hull.bounds) - # should have moved from original position - assert not g.np.allclose(m.convex_hull.bounds, b) - - def test_simplify(self): - if not g.trimesh.util.has_module("fast_simplification"): - return - - # get a scene graph - scene: g.trimesh.Scene = g.get_mesh("cycloidal.3DXML") - - original = scene.to_mesh() - - scene.simplify_quadric_decimation(percent=0.0, aggression=0) - assert len(scene.to_mesh().vertices) < len(original.vertices) - - def test_reverse(self): - tf = g.trimesh.transformations - - s = g.trimesh.scene.Scene() - s.add_geometry( - g.trimesh.creation.box(), - parent_node_name="world", - node_name="foo", - transform=tf.translation_matrix([0, 0, 1]), - ) - - s.add_geometry( - g.trimesh.creation.box(), - parent_node_name="foo", - node_name="foo2", - transform=tf.translation_matrix([0, 0, 1]), - ) - - assert len(s.graph.transforms.edge_data) == 2 - a = s.graph.get(frame_from="world", frame_to="foo2") - - assert len(s.graph.transforms.edge_data) == 2 - - # try going backward - i = s.graph.get(frame_from="foo2", frame_to="world") - # matrix should be inverted if you're going the other way - assert g.np.allclose(a[0], g.np.linalg.inv(i[0])) - - # try getting foo2 with shorthand - b = s.graph.get(frame_to="foo2") - c = s.graph["foo2"] - # matrix should be inverted if you're going the other way - assert g.np.allclose(a[0], c[0]) - assert g.np.allclose(b[0], c[0]) - - # get should not have edited edge data - assert len(s.graph.transforms.edge_data) == 2 - - def test_shortest_path(self): - # compare the EnforcedForest shortest path algo - # to the more general networkx.shortest_path algo - if g.sys.version_info < (3, 7): - # old networkx is a lot different - return - - tf = g.trimesh.transformations - # start with a known good random tree - edges = [tuple(row) for row in g.data["random_tree"]] - tree = g.nx.from_edgelist(edges, create_using=g.nx.DiGraph) - - r_choices = g.random((len(edges), 2)) - r_matrices = g.random_transforms(len(edges)) - edgelist = {} - for e, r_choice, r_mat in zip(edges, r_choices, r_matrices): - data = {} - if r_choice[0] > 0.5: - # if a matrix is omitted but an edge exists it is - # the same as passing an identity matrix - data["matrix"] = r_mat - if r_choice[1] > 0.4: - # a geometry is not required for a node - data["geometry"] = str(int(r_choice[1] * 1e8)) - edgelist[e] = data - - # now apply the random data to an EnforcedForest - forest = g.trimesh.scene.transforms.EnforcedForest() - for k, v in edgelist.items(): - forest.add_edge(*k, **v) - - # generate a lot of random queries - queries = g.np.random.choice(list(forest.nodes), 10000).reshape((-1, 2)) - # filter out any self-queries as networkx doesn't handle them - queries = queries[g.np.ptp(queries, axis=1) > 0] - - # now run our shortest path algorithm in a profiler - with g.Profiler() as P: - ours = [forest.shortest_path(*q) for q in queries] - # print this way to avoid a python2 syntax error - g.log.debug(P.output_text()) - - # check truth from networkx with an undirected graph - undir = tree.to_undirected() - with g.Profiler() as P: - truth = [g.nx.shortest_path(undir, *q) for q in queries] - g.log.debug(P.output_text()) - - # now compare our shortest path with networkx - for a, b, q in zip(truth, ours, queries): - if tuple(a) != tuple(b): - # raise the query that killed us - raise ValueError(q) - - # now try creating this as a full scenegraph - sg = g.trimesh.scene.transforms.SceneGraph() - [ - sg.update(frame_from=k[0], frame_to=k[1], **kwargs) - for k, kwargs in edgelist.items() - ] - - with g.Profiler() as P: - matgeom = [sg.get(frame_from=q[0], frame_to=q[1]) for q in queries] - g.log.debug(P.output_text()) - - # all of the matrices should be rigid transforms - assert all(tf.is_rigid(mat) for mat, _ in matgeom) - - def test_scaling_order(self): - s = g.trimesh.creation.box().scene() - scaling = 1.0 / 3.0 - c = s.scaled(scaling) - factor = c.geometry["geometry_0"].vertices / s.geometry["geometry_0"].vertices - assert g.np.allclose(factor, scaling) - # should be returning itself - r = s.apply_translation([10.5, 10.5, 10.5]) - assert g.np.allclose(r.bounds, [[10, 10, 10], [11, 11, 11]]) - assert g.np.allclose(s.bounds, [[10, 10, 10], [11, 11, 11]]) - - def test_translation_cache(self): - # scene with non-geometry nodes - c = g.get_mesh("cycloidal.3DXML") - s = c.scaled(1.0 / c.extents) - # get the pre-translation bounds - ori = s.bounds.copy() - # apply a translation - s.apply_translation([10, 10, 10]) - assert g.np.allclose(s.bounds, ori + 10) - - def test_translation_origin(self): - # check to see if we can translate to the origin - c = g.get_mesh("cycloidal.3DXML") - c.apply_transform(g.trimesh.transformations.random_rotation_matrix()) - s = c.scaled(1.0 / c.extents) - # shouldn't be at the origin - assert not g.np.allclose(s.bounds[0], 0.0) - # should move to the origin - s.apply_translation(-s.bounds[0]) - assert g.np.allclose(s.bounds[0], 0) - - def test_reconstruct(self): - original = g.get_mesh("cycloidal.3DXML") - assert isinstance(original, g.trimesh.Scene) - - # get the scene as "baked" meshes with no scene graph - dupe = g.trimesh.Scene(original.dump()) - assert len(dupe.geometry) > len(original.geometry) - - with g.Profiler() as P: - # reconstruct the instancing using `duplicate_nodes` and `procrustes` - rec = dupe.reconstruct_instances() - g.log.info(P.output_text()) - - assert len(rec.graph.nodes_geometry) == len(original.graph.nodes_geometry) - assert len(rec.geometry) == len(original.geometry) - assert g.np.allclose(rec.extents, original.extents, rtol=1e-8) - assert g.np.allclose(rec.center_mass, original.center_mass, rtol=1e-8) +def test_forest(): + graph = EnforcedForest() + for _i in range(5000): + graph.add_edge(random_chr(), random_chr()) + + +def test_cache(): + for _i in range(10): + scene = g.trimesh.Scene() + scene.add_geometry(g.trimesh.creation.box()) + + mod = [scene.graph.__hash__()] + scene.set_camera() + mod.append(scene.graph.__hash__()) + assert mod[-1] != mod[-2] + + assert not g.np.allclose(scene.camera_transform, g.np.eye(4)) + scene.camera_transform = g.np.eye(4) + mod.append(scene.graph.__hash__()) + assert mod[-1] != mod[-2] + + assert g.np.allclose(scene.camera_transform, g.np.eye(4)) + assert mod[-1] != mod[-2] + + +def test_successors(): + s = g.get_mesh("CesiumMilkTruck.glb") + assert len(s.graph.nodes_geometry) == 5 + + # world should be root frame + assert s.graph.transforms.successors(s.graph.base_frame) == set(s.graph.nodes) + + for n in s.graph.nodes: + # successors should always return subset of nodes + succ = s.graph.transforms.successors(n) + assert succ.issubset(s.graph.nodes) + # we self-include node in successors + assert n in succ + + # test getting a subscene from successors + ss = s.subscene("3") + assert len(ss.geometry) == 1 + assert len(ss.graph.nodes_geometry) == 1 + + assert isinstance(s.graph.to_networkx(), g.nx.DiGraph) + + +def test_nodes(): + # get a scene graph + graph = g.get_mesh("cycloidal.3DXML").graph + # get any non-root node + node = next(iter(set(graph.nodes).difference([graph.base_frame]))) + # remove that node + graph.transforms.remove_node(node) + # should have dumped the cache and removed the node + assert node not in graph.nodes + + +def test_remove_geometries(): + # remove geometries from a scene graph + scene = g.get_mesh("cycloidal.3DXML") + + # only keep geometry instances of these + keep = {"disc_cam_A", "disc_cam_B", "vxb-6800-2rs"} + + assert len(scene.duplicate_nodes) == 12 + + # should remove instance references except `keep` + scene.graph.remove_geometries(set(scene.geometry.keys()).difference(keep)) + + # there should now be three groups of duplicate nodes + assert len(scene.duplicate_nodes) == len(keep) + + +def test_kwargs(): + # test the function that converts various + # arguments into a homogeneous transformation + f = g.trimesh.scene.transforms.kwargs_to_matrix + # no arguments should be an identity matrix + assert g.np.allclose(f(), g.np.eye(4)) + + # a passed matrix should return immediately + fix = g.random((4, 4)) + assert g.np.allclose(f(matrix=fix), fix) + + quat = g.trimesh.unitize([1, 2, 3, 1]) + trans = [1.0, 2.0, 3.0] + rot = g.trimesh.transformations.quaternion_matrix(quat) + # should be the same as passed to transformations + assert g.np.allclose(rot, f(quaternion=quat)) + + # try passing both quaternion and translation + combine = f(quaternion=quat, translation=trans) + # should be the same as passed and computed + assert g.np.allclose(combine[:3, :3], rot[:3, :3]) + assert g.np.allclose(combine[:3, 3], trans) + + +def test_remove_node(): + s = g.get_mesh("CesiumMilkTruck.glb") + + assert len(s.graph.nodes_geometry) == 5 + assert len(s.graph.nodes) == 9 + assert len(s.graph.transforms.node_data) == 9 + assert len(s.graph.transforms.edge_data) == 8 + assert len(s.graph.transforms.parents) == 8 + + assert s.graph.transforms.remove_node("1") + + assert len(s.graph.nodes_geometry) == 5 + assert len(s.graph.nodes) == 8 + assert len(s.graph.transforms.node_data) == 8 + assert len(s.graph.transforms.edge_data) == 6 + assert len(s.graph.transforms.parents) == 6 + + +def test_subscene(): + s = g.get_mesh("CesiumMilkTruck.glb") + + assert len(s.graph.nodes) == 9 + assert len(s.graph.transforms.node_data) == 9 + assert len(s.graph.transforms.edge_data) == 8 + + ss = s.subscene("3") + + assert ss.graph.base_frame == "3" + assert set(ss.graph.nodes) == {"3", "4"} + assert len(ss.graph.transforms.node_data) == 2 + assert len(ss.graph.transforms.edge_data) == 1 + assert list(ss.graph.transforms.edge_data.keys()) == [("3", "4")] + + +def test_scene_transform(): + # get a scene graph + scene = g.get_mesh("cycloidal.3DXML") + + # copy the original bounds of the scene's convex hull + b = scene.convex_hull.bounds.tolist() + # dump it into a single mesh + m = scene.to_mesh() + + # mesh bounds should match exactly + assert g.np.allclose(m.bounds, b) + assert g.np.allclose(scene.convex_hull.bounds, b) + + # get a random rotation matrix + T = g.trimesh.transformations.random_rotation_matrix() + + # apply it to both the mesh and the scene + m.apply_transform(T) + scene.apply_transform(T) + + # the mesh and scene should have the same bounds + assert g.np.allclose(m.convex_hull.bounds, scene.convex_hull.bounds) + # should have moved from original position + assert not g.np.allclose(m.convex_hull.bounds, b) + + +def test_simplify(): + if not g.trimesh.util.has_module("fast_simplification"): + return + + # get a scene graph + scene: g.trimesh.Scene = g.get_mesh("cycloidal.3DXML") + + original = scene.to_mesh() + + scene.simplify_quadric_decimation(percent=0.0, aggression=0) + assert len(scene.to_mesh().vertices) < len(original.vertices) + + +def test_reverse(): + tf = g.trimesh.transformations + + s = g.trimesh.scene.Scene() + s.add_geometry( + g.trimesh.creation.box(), + parent_node_name="world", + node_name="foo", + transform=tf.translation_matrix([0, 0, 1]), + ) + + s.add_geometry( + g.trimesh.creation.box(), + parent_node_name="foo", + node_name="foo2", + transform=tf.translation_matrix([0, 0, 1]), + ) + + assert len(s.graph.transforms.edge_data) == 2 + a = s.graph.get(frame_from="world", frame_to="foo2") + + assert len(s.graph.transforms.edge_data) == 2 + + # try going backward + i = s.graph.get(frame_from="foo2", frame_to="world") + # matrix should be inverted if you're going the other way + assert g.np.allclose(a[0], g.np.linalg.inv(i[0])) + + # try getting foo2 with shorthand + b = s.graph.get(frame_to="foo2") + c = s.graph["foo2"] + # matrix should be inverted if you're going the other way + assert g.np.allclose(a[0], c[0]) + assert g.np.allclose(b[0], c[0]) + + # get should not have edited edge data + assert len(s.graph.transforms.edge_data) == 2 + + +def test_shortest_path(): + # compare the EnforcedForest shortest path algo + # to the more general networkx.shortest_path algo + if g.sys.version_info < (3, 7): + # old networkx is a lot different + return + + tf = g.trimesh.transformations + # start with a known good random tree + edges = [tuple(row) for row in g.data["random_tree"]] + tree = g.nx.from_edgelist(edges, create_using=g.nx.DiGraph) + + r_choices = g.random((len(edges), 2)) + r_matrices = g.random_transforms(len(edges)) + edgelist = {} + for e, r_choice, r_mat in zip(edges, r_choices, r_matrices): + data = {} + if r_choice[0] > 0.5: + # if a matrix is omitted but an edge exists it is + # the same as passing an identity matrix + data["matrix"] = r_mat + if r_choice[1] > 0.4: + # a geometry is not required for a node + data["geometry"] = str(int(r_choice[1] * 1e8)) + edgelist[e] = data + + # now apply the random data to an EnforcedForest + forest = g.trimesh.scene.transforms.EnforcedForest() + for k, v in edgelist.items(): + forest.add_edge(*k, **v) + + # generate a lot of random queries + queries = g.np.random.choice(list(forest.nodes), 10000).reshape((-1, 2)) + # filter out any self-queries as networkx doesn't handle them + queries = queries[g.np.ptp(queries, axis=1) > 0] + + # now run our shortest path algorithm in a profiler + with g.Profiler() as P: + ours = [forest.shortest_path(*q) for q in queries] + # print this way to avoid a python2 syntax error + g.log.debug(P.output_text()) + + # check truth from networkx with an undirected graph + undir = tree.to_undirected() + with g.Profiler() as P: + truth = [g.nx.shortest_path(undir, *q) for q in queries] + g.log.debug(P.output_text()) + + # now compare our shortest path with networkx + for a, b, q in zip(truth, ours, queries): + if tuple(a) != tuple(b): + # raise the query that killed us + raise ValueError(q) + + # now try creating this as a full scenegraph + sg = g.trimesh.scene.transforms.SceneGraph() + [ + sg.update(frame_from=k[0], frame_to=k[1], **kwargs) + for k, kwargs in edgelist.items() + ] + + with g.Profiler() as P: + matgeom = [sg.get(frame_from=q[0], frame_to=q[1]) for q in queries] + g.log.debug(P.output_text()) + + # all of the matrices should be rigid transforms + assert all(tf.is_rigid(mat) for mat, _ in matgeom) + + +def test_scaling_order(): + s = g.trimesh.creation.box().scene() + scaling = 1.0 / 3.0 + c = s.scaled(scaling) + factor = c.geometry["geometry_0"].vertices / s.geometry["geometry_0"].vertices + assert g.np.allclose(factor, scaling) + # should be returning itself + r = s.apply_translation([10.5, 10.5, 10.5]) + assert g.np.allclose(r.bounds, [[10, 10, 10], [11, 11, 11]]) + assert g.np.allclose(s.bounds, [[10, 10, 10], [11, 11, 11]]) + + +def test_translation_cache(): + # scene with non-geometry nodes + c = g.get_mesh("cycloidal.3DXML") + s = c.scaled(1.0 / c.extents) + # get the pre-translation bounds + ori = s.bounds.copy() + # apply a translation + s.apply_translation([10, 10, 10]) + assert g.np.allclose(s.bounds, ori + 10) + + +def test_translation_origin(): + # check to see if we can translate to the origin + c = g.get_mesh("cycloidal.3DXML") + c.apply_transform(g.trimesh.transformations.random_rotation_matrix()) + s = c.scaled(1.0 / c.extents) + # shouldn't be at the origin + assert not g.np.allclose(s.bounds[0], 0.0) + # should move to the origin + s.apply_translation(-s.bounds[0]) + assert g.np.allclose(s.bounds[0], 0) + + +def test_reconstruct(): + original = g.get_mesh("cycloidal.3DXML") + assert isinstance(original, g.trimesh.Scene) + + # get the scene as "baked" meshes with no scene graph + dupe = g.trimesh.Scene(original.dump()) + assert len(dupe.geometry) > len(original.geometry) + + with g.Profiler() as P: + # reconstruct the instancing using `duplicate_nodes` and `procrustes` + rec = dupe.reconstruct_instances() + g.log.info(P.output_text()) + + assert len(rec.graph.nodes_geometry) == len(original.graph.nodes_geometry) + assert len(rec.geometry) == len(original.geometry) + assert g.np.allclose(rec.extents, original.extents, rtol=1e-8) + assert g.np.allclose(rec.center_mass, original.center_mass, rtol=1e-8) if __name__ == "__main__": g.trimesh.util.attach_to_log() - g.unittest.main() + + test_reconstruct() diff --git a/tests/test_svg.py b/tests/test_svg.py index 04127806c..8cebf8abb 100644 --- a/tests/test_svg.py +++ b/tests/test_svg.py @@ -126,7 +126,7 @@ def test_roundtrip(self): assert g.np.isclose(a.area, b.area) assert a.body_count == b.body_count - assert r.metadata["file_path"].endswith(fn[3:]) + # assert r.metadata["file_path"].endswith(fn[3:]) if __name__ == "__main__": diff --git a/tests/test_texture.py b/tests/test_texture.py index 09df5c163..44be47c6c 100644 --- a/tests/test_texture.py +++ b/tests/test_texture.py @@ -55,7 +55,7 @@ def test_fuze(self): # see if web resolvers work tex = g.trimesh.exchange.load.load_remote( url=address + "/fuze.obj", process=False - ) + ).geometry["fuze.obj"] g.check_fuze(tex) # see if web + zip resolvers work diff --git a/tests/test_voxel.py b/tests/test_voxel.py index f5d82c985..341ecf620 100644 --- a/tests/test_voxel.py +++ b/tests/test_voxel.py @@ -10,13 +10,17 @@ def test_voxel(self): Test that voxels work at all """ for m in [ - g.get_mesh("featuretype.STL"), + g.get_mesh("featuretype.STL", force="mesh"), g.trimesh.primitives.Box(), g.trimesh.primitives.Sphere(), ]: for pitch in [0.1, 0.1 - g.tol.merge]: surface = m.voxelized(pitch=pitch) + scene = g.trimesh.Scene(surface) + assert len(scene.geometry) == 1 + assert g.np.allclose(scene.bounds, surface.bounds) + # make sure the voxelized pitch is similar to passed assert g.np.allclose(surface.pitch, pitch) diff --git a/trimesh/__init__.py b/trimesh/__init__.py index 40d3b85c3..ec75e39cc 100644 --- a/trimesh/__init__.py +++ b/trimesh/__init__.py @@ -54,6 +54,7 @@ load_mesh, load_path, load_remote, + load_scene, ) # geometry objects @@ -108,6 +109,7 @@ "load_mesh", "load_path", "load_remote", + "load_scene", "nsphere", "path", "permutate", diff --git a/trimesh/__main__.py b/trimesh/__main__.py index 3f9889944..8240cda81 100644 --- a/trimesh/__main__.py +++ b/trimesh/__main__.py @@ -30,7 +30,8 @@ def main(): args = parser.parse_args() if args.file_name is None: - scene = None + parser.print_help() + return else: scene = load(args.file_name) diff --git a/trimesh/base.py b/trimesh/base.py index c59bde5de..39ae64a2c 100644 --- a/trimesh/base.py +++ b/trimesh/base.py @@ -39,7 +39,7 @@ from .constants import log, tol from .exceptions import ExceptionWrapper from .exchange.export import export_mesh -from .parent import Geometry3D +from .parent import Geometry3D, LoadSource from .scene import Scene from .triangles import MassProperties from .typed import ( @@ -99,6 +99,7 @@ def __init__( use_embree: bool = True, initial_cache: Optional[Dict[str, ndarray]] = None, visual: Optional[Union[ColorVisuals, TextureVisuals]] = None, + source: Optional[LoadSource] = None, **kwargs, ) -> None: """ @@ -202,6 +203,9 @@ def __init__( elif metadata is not None: raise ValueError(f"metadata should be a dict or None, got {metadata!s}") + # where was this loaded from + self.source = source + # store per-face and per-vertex attributes which will # be updated when an update_faces call is made self.face_attributes = {} @@ -2180,7 +2184,8 @@ def section( Curve of intersection or None if it was not hit by plane. """ # turn line segments into Path2D/Path3D objects - from .exchange.load import load_path + from .path.exchange.misc import lines_to_path + from .path.path import Path3D # return a single cross section in 3D lines, face_index = intersections.mesh_plane( @@ -2195,13 +2200,14 @@ def section( if len(lines) == 0: return None - # otherwise load the line segments into a Path3D object - path = load_path(lines) + # otherwise load the line segments into the keyword arguments + # for a Path3D object. + path = lines_to_path(lines) # add the face index info into metadata - path.metadata["face_index"] = face_index + # path.metadata["face_index"] = face_index - return path + return Path3D(**path) def section_multiplane( self, @@ -2553,7 +2559,7 @@ def simplify_quadric_decimation( A number between 0.0 and 1.0 for how much face_count Target number of faces desired in the resulting mesh. - agression + aggression An integer between `0` and `10`, the scale being roughly `0` is "slow and good" and `10` being "fast and bad." diff --git a/trimesh/creation.py b/trimesh/creation.py index 2687a79fd..97df23a5c 100644 --- a/trimesh/creation.py +++ b/trimesh/creation.py @@ -67,7 +67,7 @@ def revolve( passed will be a full revolution (`angle = 2*pi`) cap If not a full revolution (`0.0 < angle < 2 * pi`) - and cap is True attempt to add a tesselated cap. + and cap is True attempt to add a tessellated cap. sections Number of sections result should have If not specified default is 32 per revolution diff --git a/trimesh/exchange/gltf.py b/trimesh/exchange/gltf.py index 63217877c..cedf12ecb 100644 --- a/trimesh/exchange/gltf.py +++ b/trimesh/exchange/gltf.py @@ -9,15 +9,16 @@ import base64 import json from collections import OrderedDict, defaultdict, deque +from copy import deepcopy import numpy as np from .. import rendering, resources, transformations, util, visual from ..caching import hash_fast from ..constants import log, tol -from ..resolvers import Resolver, ZipResolver +from ..resolvers import ResolverLike, ZipResolver from ..scene.cameras import Camera -from ..typed import Mapping, NDArray, Optional, Stream, Union +from ..typed import Dict, List, NDArray, Optional, Stream from ..util import triangle_strips_to_faces, unique_name from ..visual.gloss import specular_to_pbr @@ -50,9 +51,6 @@ } } -# we can accept dict resolvers -ResolverLike = Union[Resolver, Mapping] - # GL geometry modes _GL_LINES = 1 _GL_POINTS = 0 @@ -455,22 +453,22 @@ def load_glb( return kwargs -def _uri_to_bytes(uri, resolver): +def _uri_to_bytes(uri: str, resolver: ResolverLike) -> bytes: """ Take a URI string and load it as a a filename or as base64. Parameters -------------- - uri : string + uri Usually a filename or something like: "data:object/stuff,base64,AABA112A..." - resolver : trimesh.visual.Resolver + resolver A resolver to load referenced assets Returns --------------- - data : bytes + data Loaded data from URI """ # see if the URI has base64 data @@ -1254,12 +1252,19 @@ def _parse_textures(header, views, resolver=None): images = [None] * len(header["images"]) # loop through images for i, img in enumerate(header["images"]): + if img.get("mimeType", "") == "image/ktx2": + log.debug("`image/ktx2` textures are unsupported, skipping!") + continue # get the bytes representing an image if "bufferView" in img: blob = views[img["bufferView"]] elif "uri" in img: - # will get bytes from filesystem or base64 URI - blob = _uri_to_bytes(uri=img["uri"], resolver=resolver) + try: + # will get bytes from filesystem or base64 URI + blob = _uri_to_bytes(uri=img["uri"], resolver=resolver) + except BaseException: + log.debug(f"unable to load image from: {img.keys()}", exc_info=True) + continue else: log.debug(f"unable to load image from: {img.keys()}") continue @@ -1269,7 +1274,7 @@ def _parse_textures(header, views, resolver=None): # load the buffer into a PIL image images[i] = PIL.Image.open(util.wrap_as_stream(blob)) except BaseException: - log.error("failed to load image!", exc_info=True) + log.debug("failed to load image!", exc_info=True) return images @@ -1312,9 +1317,12 @@ def parse_values_and_textures(input_dict): ) if webp is not None: idx = webp - else: + elif "source" in texture: # fallback (or primary, if extensions are not present) idx = texture["source"] + else: + # no source available + continue # store the actual image as the value result[k] = images[idx] except BaseException: @@ -1350,9 +1358,9 @@ def parse_values_and_textures(input_dict): def _read_buffers( - header, - buffers, - mesh_kwargs, + header: Dict, + buffers: List[bytes], + mesh_kwargs: Dict, resolver: Optional[ResolverLike], ignore_broken: bool = False, merge_primitives: bool = False, @@ -1479,10 +1487,15 @@ def _read_buffers( for index, m in enumerate(header.get("meshes", [])): try: # GLTF spec indicates implicit units are meters - metadata = {"units": "meters"} + metadata = { + "units": "meters", + "from_gltf_primitive": len(m["primitives"]) > 1, + } + # try to load all mesh metadata if isinstance(m.get("extras"), dict): metadata.update(m["extras"]) + # put any mesh extensions in a field of the metadata if "extensions" in m: metadata["gltf_extensions"] = m["extensions"] @@ -1490,8 +1503,11 @@ def _read_buffers( for p in m["primitives"]: # if we don't have a triangular mesh continue # if not specified assume it is a mesh - kwargs = {"metadata": {}, "process": False} - kwargs.update(mesh_kwargs) + kwargs = deepcopy(mesh_kwargs) + if kwargs.get("metadata", None) is None: + kwargs["metadata"] = {} + if "process" not in kwargs: + kwargs["process"] = False kwargs["metadata"].update(metadata) # i.e. GL_LINES, GL_TRIANGLES, etc # specification says the default mode is GL_TRIANGLES @@ -1511,6 +1527,24 @@ def _read_buffers( kwargs["entities"] = [Line(points=np.arange(len(kwargs["vertices"])))] elif mode == _GL_POINTS: kwargs["vertices"] = access[attr["POSITION"]] + visuals = None + if "COLOR_0" in attr: + try: + # try to load vertex colors from the accessors + colors = access[attr["COLOR_0"]] + if len(colors) == len(kwargs["vertices"]): + if visuals is None: + # just pass to mesh as vertex color + kwargs["vertex_colors"] = colors.copy() + else: + # we ALSO have texture so save as vertex + # attribute + visuals.vertex_attributes["color"] = colors.copy() + except BaseException: + # survive failed colors + log.debug("failed to load colors", exc_info=True) + if visuals is not None: + kwargs["visual"] = visuals elif mode in (_GL_TRIANGLES, _GL_STRIP): # get vertices from accessors kwargs["vertices"] = access[attr["POSITION"]] @@ -1573,14 +1607,6 @@ def _read_buffers( if visuals is not None: kwargs["visual"] = visuals - # By default the created mesh is not from primitive, - # in case it is the value will be updated - # each primitive gets it's own Trimesh object - if len(m["primitives"]) > 1: - kwargs["metadata"]["from_gltf_primitive"] = True - else: - kwargs["metadata"]["from_gltf_primitive"] = False - # custom attributes starting with a `_` custom = { a: access[attr[a]] for a in attr.keys() if a.startswith("_") @@ -1814,18 +1840,18 @@ def _read_buffers( "base_frame": base_frame, "camera": camera, "camera_transform": camera_transform, + "metadata": {}, } + try: # load any scene extras into scene.metadata # use a try except to avoid nested key checks - result["metadata"] = header["scenes"][header["scene"]]["extras"] + result["metadata"].update(header["scenes"][header["scene"]]["extras"]) except BaseException: pass try: # load any scene extensions into a field of scene.metadata # use a try except to avoid nested key checks - if "metadata" not in result: - result["metadata"] = {} result["metadata"]["gltf_extensions"] = header["extensions"] except BaseException: pass diff --git a/trimesh/exchange/load.py b/trimesh/exchange/load.py index b6951dc77..abf5e49f8 100644 --- a/trimesh/exchange/load.py +++ b/trimesh/exchange/load.py @@ -6,11 +6,11 @@ from .. import resolvers, util from ..base import Trimesh from ..exceptions import ExceptionWrapper -from ..parent import Geometry +from ..parent import Geometry, LoadSource from ..points import PointCloud from ..scene.scene import Scene, append_scenes -from ..typed import Dict, List, Loadable, Optional, Union -from ..util import log, now +from ..typed import Dict, Loadable, Optional +from ..util import log from . import misc from .binvox import _binvox_loaders from .cascade import _cascade_loaders @@ -31,8 +31,8 @@ except BaseException as E: # save a traceback to see why path didn't import load_path = ExceptionWrapper(E) - # no path formats available + # no path formats available def path_formats() -> set: return set() @@ -71,13 +71,15 @@ def available_formats() -> set: def load( file_obj: Loadable, file_type: Optional[str] = None, - resolver: Union[resolvers.Resolver, Dict, None] = None, + resolver: Optional[resolvers.ResolverLike] = None, force: Optional[str] = None, + allow_remote: bool = False, **kwargs, -) -> Union[Geometry, List[Geometry]]: +) -> Geometry: """ - Load a mesh or vectorized path into objects like - Trimesh, Path2D, Path3D, Scene + For new code the typed load functions `trimesh.load_scene` or `trimesh.load_mesh` + are recommended over `trimesh.load` which is a backwards-compatibility wrapper + that mimics the behavior of the old function and can return any geometry type. Parameters ----------- @@ -90,6 +92,8 @@ def load( force : None or str For 'mesh': try to coerce scenes into a single mesh For 'scene': try to coerce everything into a scene + allow_remote + If True allow this load call to work on a remote URL. kwargs : dict Passed to geometry __init__ @@ -98,77 +102,158 @@ def load( geometry : Trimesh, Path2D, Path3D, Scene Loaded geometry as trimesh classes """ - # check to see if we're trying to load something - # that is already a native trimesh Geometry subclass - if isinstance(file_obj, Geometry): - log.info("Load called on %s object, returning input", file_obj.__class__.__name__) - return file_obj - # parse the file arguments into clean loadable form - ( - file_obj, # file- like object - file_type, # str, what kind of file - metadata, # dict, any metadata from file name - opened, # bool, did we open the file ourselves - resolver, # object to load referenced resources - ) = _parse_file_args(file_obj=file_obj, file_type=file_type, resolver=resolver) + # call the most general loading case into a `Scene`. + loaded = load_scene( + file_obj=file_obj, + file_type=file_type, + resolver=resolver, + allow_remote=allow_remote, + **kwargs, + ) + + if force == "mesh": + # new code should use `load_mesh` for this + log.debug( + "`trimesh.load(force='mesh')` is a compatibility wrapper for `trimesh.load_mesh`" + ) + return loaded.to_mesh() + elif force == "scene": + # new code should use `load_scene` for this + log.debug( + "`trimesh.load(force='scene')` is a compatibility wrapper for `trimesh.load_scene`" + ) + return loaded + + ########################################### + # we are matching deprecated behavior here! + # matching old behavior you should probably use `load_scene` + kind = loaded.source.file_type + always_scene = {"glb", "gltf", "zip", "3dxml", "tar.gz"} + + if kind not in always_scene and len(loaded.geometry) == 1: + geom = next(iter(loaded.geometry.values())) + geom.metadata.update(loaded.metadata) + + if isinstance(geom, PointCloud) or kind in { + "obj", + "stl", + "ply", + "svg", + "binvox", + "xaml", + "dxf", + "off", + "msh", + }: + return geom + + return loaded + + +def load_scene( + file_obj: Loadable, + file_type: Optional[str] = None, + resolver: Optional[resolvers.ResolverLike] = None, + allow_remote: bool = False, + metadata: Optional[Dict] = None, + **kwargs, +) -> Scene: + """ + Load geometry into the `trimesh.Scene` container. This may contain + any `parent.Geometry` object, including `Trimesh`, `Path2D`, `Path3D`, + or a `PointCloud`. + + Parameters + ----------- + file_obj : str, or file- like object + The source of the data to be loadeded + file_type: str + What kind of file type do we have (eg: 'stl') + resolver : trimesh.visual.Resolver + Object to load referenced assets like materials and textures + force : None or str + For 'mesh': try to coerce scenes into a single mesh + For 'scene': try to coerce everything into a scene + allow_remote + If True allow this load call to work on a remote URL. + kwargs : dict + Passed to geometry __init__ + + Returns + --------- + geometry : Trimesh, Path2D, Path3D, Scene + Loaded geometry as trimesh classes + """ + + # parse all possible values of file objects into simple types + arg = _parse_file_args( + file_obj=file_obj, + file_type=file_type, + resolver=resolver, + allow_remote=allow_remote, + ) try: - if isinstance(file_obj, dict): - # if we've been passed a dict treat it as kwargs - kwargs.update(file_obj) - loaded = load_kwargs(kwargs) - elif file_type in path_formats(): + if arg.file_type in path_formats(): # path formats get loaded with path loader - loaded = load_path(file_obj, file_type=file_type, **kwargs) - elif file_type in mesh_loaders: + loaded = load_path( + file_obj=arg.file_obj, + file_type=arg.file_type, + metadata=metadata, + **kwargs, + ) + elif arg.file_type in ["svg", "dxf"]: + # call the dummy function to raise the import error + # this prevents the exception from being super opaque + load_path() + elif isinstance(file_obj, dict): + loaded = _load_kwargs(file_obj) + elif arg.file_type in mesh_loaders: # mesh loaders use mesh loader - loaded = load_mesh(file_obj, file_type=file_type, resolver=resolver, **kwargs) - elif file_type in compressed_loaders: + + loaded = _load_kwargs( + mesh_loaders[arg.file_type]( + file_obj=arg.file_obj, + file_type=arg.file_type, + resolver=arg.resolver, + metadata=metadata, + **kwargs, + ) + ) + elif arg.file_type in compressed_loaders: # for archives, like ZIP files - loaded = load_compressed(file_obj, file_type=file_type, **kwargs) - elif file_type in voxel_loaders: - loaded = voxel_loaders[file_type]( - file_obj, file_type=file_type, resolver=resolver, **kwargs + loaded = _load_compressed(arg.file_obj, file_type=arg.file_type, **kwargs) + elif arg.file_type in voxel_loaders: + loaded = voxel_loaders[arg.file_type]( + file_obj=arg.file_obj, + file_type=arg.file_type, + resolver=arg.resolver, + **kwargs, ) else: - if file_type in ["svg", "dxf"]: - # call the dummy function to raise the import error - # this prevents the exception from being super opaque - load_path() - else: - raise ValueError(f"File type: {file_type} not supported") + raise NotImplementedError(f"file_type '{arg.file_type}' not supported") + finally: + # if we opened the file ourselves from a file name # close any opened files even if we crashed out - if opened: - file_obj.close() - - # add load metadata ('file_name') to each loaded geometry - for i in util.make_sequence(loaded): - i.metadata.update(metadata) + if arg.was_opened: + arg.file_obj.close() - # if we opened the file in this function ourselves from a - # file name clean up after ourselves by closing it - if opened: - file_obj.close() + if not isinstance(loaded, Scene): + loaded = Scene(loaded) - # combine a scene into a single mesh - if force == "mesh" and isinstance(loaded, Scene): - return util.concatenate(loaded.dump()) - if force == "scene" and not isinstance(loaded, Scene): - return Scene(loaded) + # tack that sumbitch on + loaded.source = arg + for g in loaded.geometry.values(): + g.source = arg return loaded -def load_mesh( - file_obj: Loadable, - file_type: Optional[str] = None, - resolver: Union[resolvers.Resolver, Dict, None] = None, - **kwargs, -) -> Union[Geometry, List[Geometry]]: +def load_mesh(*args, **kwargs) -> Trimesh: """ - Load a mesh file into a Trimesh object. + Load a file into a Trimesh object. Parameters ----------- @@ -184,46 +269,10 @@ def load_mesh( mesh Loaded geometry data. """ + return load_scene(*args, **kwargs).to_mesh() - # parse the file arguments into clean loadable form - ( - file_obj, # file-like object - file_type, # str: what kind of file - metadata, # dict: any metadata from file name - opened, # bool: did we open the file ourselves - resolver, # Resolver: to load referenced resources - ) = _parse_file_args(file_obj=file_obj, file_type=file_type, resolver=resolver) - - try: - # make sure we keep passed kwargs to loader - # but also make sure loader keys override passed keys - loader = mesh_loaders[file_type] - tic = now() - results = loader(file_obj, file_type=file_type, resolver=resolver, **kwargs) - if not isinstance(results, list): - results = [results] - - loaded = [] - for result in results: - kwargs.update(result) - loaded.append(load_kwargs(kwargs)) - loaded[-1].metadata.update(metadata) - - # todo : remove this - if len(loaded) == 1: - loaded = loaded[0] - - # show the repr for loaded, loader used, and time - log.debug(f"loaded {loaded!s} using `{loader.__name__}` in {now() - tic:0.4f}s") - finally: - # if we failed to load close file - if opened: - file_obj.close() - - return loaded - -def load_compressed(file_obj, file_type=None, resolver=None, mixed=False, **kwargs): +def _load_compressed(file_obj, file_type=None, resolver=None, mixed=False, **kwargs): """ Given a compressed archive load all the geometry that we can from it. @@ -245,86 +294,68 @@ def load_compressed(file_obj, file_type=None, resolver=None, mixed=False, **kwar """ # parse the file arguments into clean loadable form - ( - file_obj, # file- like object - file_type, # str, what kind of file - metadata, # dict, any metadata from file name - opened, # bool, did we open the file ourselves - resolver, # object to load referenced resources - ) = _parse_file_args(file_obj=file_obj, file_type=file_type, resolver=resolver) + arg = _parse_file_args(file_obj=file_obj, file_type=file_type, resolver=resolver) - try: - # a dict of 'name' : file-like object - files = util.decompress(file_obj=file_obj, file_type=file_type) - # store loaded geometries as a list - geometries = [] + # store loaded geometries as a list + geometries = [] - # so loaders can access textures/etc - resolver = resolvers.ZipResolver(files) + # so loaders can access textures/etc + archive = util.decompress(file_obj=arg.file_obj, file_type=arg.file_type) + resolver = resolvers.ZipResolver(archive) - # try to save the files with meaningful metadata - if "file_path" in metadata: - archive_name = metadata["file_path"] - else: - archive_name = "archive" + # try to save the files with meaningful metadata + # archive_name = arg.file_path or "archive" + meta_archive = {} - # populate our available formats - if mixed: - available = available_formats() + # populate our available formats + if mixed: + available = available_formats() + else: + # all types contained in ZIP archive + contains = {util.split_extension(n).lower() for n in resolver.keys()} + # if there are no mesh formats available + if contains.isdisjoint(mesh_formats()): + available = path_formats() else: - # all types contained in ZIP archive - contains = {util.split_extension(n).lower() for n in files.keys()} - # if there are no mesh formats available - if contains.isdisjoint(mesh_formats()): - available = path_formats() - else: - available = mesh_formats() - - meta_archive = {} - for name, data in files.items(): - try: - # only load formats that we support - compressed_type = util.split_extension(name).lower() - - # if file has metadata type include it - if compressed_type in "yaml": - import yaml - - meta_archive[name] = yaml.safe_load(data) - elif compressed_type in "json": - import json - - meta_archive[name] = json.loads(data) - - if compressed_type not in available: - # don't raise an exception, just try the next one - continue - # store the file name relative to the archive - metadata["file_name"] = archive_name + "/" + os.path.basename(name) - # load the individual geometry - loaded = load( - file_obj=data, + available = mesh_formats() + + for file_name, file_obj in archive.items(): + try: + # only load formats that we support + compressed_type = util.split_extension(file_name).lower() + + # if file has metadata type include it + if compressed_type in ("yaml", "yml"): + import yaml + + continue + meta_archive[file_name] = yaml.safe_load(file_obj) + elif compressed_type == "json": + import json + + meta_archive[file_name] = json.load(file_obj) + continue + elif compressed_type not in available: + # don't raise an exception, just try the next one + continue + + # load the individual geometry + geometries.append( + load_scene( + file_obj=file_obj, file_type=compressed_type, resolver=resolver, - metadata=metadata, **kwargs, ) + ) - # some loaders return multiple geometries - if util.is_sequence(loaded): - # if the loader has returned a list of meshes - geometries.extend(loaded) - else: - # if the loader has returned a single geometry - geometries.append(loaded) - except BaseException: - log.debug("failed to load file in zip", exc_info=True) + except BaseException: + log.debug("failed to load file in zip", exc_info=True) - finally: - # if we opened the file in this function - # clean up after ourselves - if opened: - file_obj.close() + # if we opened the file in this function + # clean up after ourselves + if arg.was_opened: + arg.file_obj.close() # append meshes or scenes into a single Scene object result = append_scenes(geometries) @@ -336,61 +367,34 @@ def load_compressed(file_obj, file_type=None, resolver=None, mixed=False, **kwar return result -def load_remote(url, **kwargs): +def load_remote(url: str, **kwargs) -> Scene: """ Load a mesh at a remote URL into a local trimesh object. - This must be called explicitly rather than automatically - from trimesh.load to ensure users don't accidentally make - network requests. + This is a thin wrapper around: + `trimesh.load_scene(file_obj=url, allow_remote=True, **kwargs)` Parameters ------------ - url : string + url URL containing mesh file - **kwargs : passed to `load` + **kwargs + Passed to `load_scene` Returns ------------ loaded : Trimesh, Path, Scene Loaded result """ - # import here to keep requirement soft - import httpx - - # download the mesh - response = httpx.get(url, follow_redirects=True) - response.raise_for_status() + return load_scene(file_obj=url, allow_remote=True, **kwargs) - # wrap as file object - file_obj = util.wrap_as_stream(response.content) - # so loaders can access textures/etc - resolver = resolvers.WebResolver(url) - - try: - # if we have a bunch of query parameters the type - # will be wrong so try to clean up the URL - # urllib is Python 3 only - import urllib - - # remove the url-safe encoding then split off query params - file_type = urllib.parse.unquote(url).split("?", 1)[0].split("/")[-1].strip() - except BaseException: - # otherwise just use the last chunk of URL - file_type = url.split("/")[-1].split("?", 1)[0] - - # actually load the data from the retrieved bytes - loaded = load(file_obj=file_obj, file_type=file_type, resolver=resolver, **kwargs) - return loaded - - -def load_kwargs(*args, **kwargs) -> Geometry: +def _load_kwargs(*args, **kwargs) -> Geometry: """ Load geometry from a properly formatted dict or kwargs """ - def handle_scene(): + def handle_scene() -> Scene: """ Load a scene from our kwargs. @@ -400,7 +404,7 @@ def handle_scene(): base_frame: str, base frame of graph """ graph = kwargs.get("graph", None) - geometry = {k: load_kwargs(v) for k, v in kwargs["geometry"].items()} + geometry = {k: _load_kwargs(v) for k, v in kwargs["geometry"].items()} if graph is not None: scene = Scene() @@ -443,7 +447,7 @@ def handle_scene(): return scene - def handle_mesh(): + def handle_mesh() -> Trimesh: """ Handle the keyword arguments for a Trimesh object """ @@ -500,21 +504,18 @@ def handle_pointcloud(): for func, expected in handlers: if all(i in kwargs for i in expected): # all expected kwargs exist - handler = func - # exit the loop as we found one - break - else: - raise ValueError(f"unable to determine type: {kwargs.keys()}") + return func() - return handler() + raise ValueError(f"unable to determine type: {kwargs.keys()}") def _parse_file_args( file_obj: Loadable, file_type: Optional[str], - resolver: Union[None, Dict, resolvers.Resolver] = None, + resolver: Optional[resolvers.ResolverLike] = None, + allow_remote: bool = False, **kwargs, -): +) -> LoadSource: """ Given a file_obj and a file_type try to magically convert arguments to a file-like object and a lowercase string of @@ -553,21 +554,15 @@ def _parse_file_args( Returns ----------- - file_obj : file-like object - Contains data - file_type : str - Lower case of the type of file (eg 'stl', 'dae', etc) - metadata : dict - Any metadata gathered - opened : bool - Did we open the file or not - resolver : trimesh.visual.Resolver - Resolver to load other assets + args + Populated `_FileArg` message """ - metadata = {} - opened = False - if "metadata" in kwargs and isinstance(kwargs["metadata"], dict): - metadata.update(kwargs["metadata"]) + + # keep track if we opened a file ourselves and thus are + # responsible for closing it at the end of loading + was_opened = False + # try to save a file path from various inputs + file_path = None if util.is_pathlib(file_obj): # convert pathlib objects to string @@ -575,15 +570,16 @@ def _parse_file_args( if util.is_file(file_obj) and file_type is None: raise ValueError("file_type must be set for file objects!") + if isinstance(file_obj, str): try: # os.path.isfile will return False incorrectly # if we don't give it an absolute path - file_path = os.path.expanduser(file_obj) - file_path = os.path.abspath(file_path) + file_path = os.path.abspath(os.path.expanduser(file_obj)) exists = os.path.isfile(file_path) except BaseException: exists = False + file_path = None # file obj is a string which exists on filesystm if exists: @@ -591,25 +587,39 @@ def _parse_file_args( if resolver is None: resolver = resolvers.FilePathResolver(file_path) # save the file name and path to metadata - metadata["file_path"] = file_path - metadata["file_name"] = os.path.basename(file_obj) # if file_obj is a path that exists use extension as file_type if file_type is None: file_type = util.split_extension(file_path, special=["tar.gz", "tar.bz2"]) # actually open the file file_obj = open(file_path, "rb") - opened = True + was_opened = True else: if "{" in file_obj: - # if a dict bracket is in the string, its probably a straight - # JSON + # if a bracket is in the string it's probably straight JSON file_type = "json" + file_obj = util.wrap_as_stream(file_obj) elif "https://" in file_obj or "http://" in file_obj: - # we've been passed a URL, warn to use explicit function - # and don't do network calls via magical pipeline - raise ValueError(f"use load_remote to load URL: {file_obj}") + if not allow_remote: + raise ValueError("unable to load URL with `allow_remote=False`") + + import urllib + + # remove the url-safe encoding and query params + file_type = util.split_extension( + urllib.parse.unquote(file_obj).split("?", 1)[0].split("/")[-1].strip() + ) + # create a web resolver to do the fetching and whatnot + resolver = resolvers.WebResolver(url=file_obj) + # fetch the base file + file_obj = util.wrap_as_stream(resolver.get_base()) + elif file_type is None: raise ValueError(f"string is not a file: {file_obj}") + else: + file_obj = None + elif isinstance(file_obj, dict): + file_obj = util.wrap_as_stream(json.dumps(file_obj)) + file_type = "dict" if file_type is None: file_type = file_obj.__class__.__name__ @@ -617,9 +627,7 @@ def _parse_file_args( if isinstance(file_type, str) and "." in file_type: # if someone has passed the whole filename as the file_type # use the file extension as the file_type - if "file_path" not in metadata: - metadata["file_path"] = file_type - metadata["file_name"] = os.path.basename(file_type) + file_path = file_type file_type = util.split_extension(file_type) if resolver is None and os.path.exists(file_type): resolver = resolvers.FilePathResolver(file_type) @@ -627,6 +635,16 @@ def _parse_file_args( # all our stored extensions reference in lower case file_type = file_type.lower() + # if user passed in a metadata dict add it + # if len(kwargs.get("metadata", {})) > 0: + # metadata = kwargs["metadata"] + # else: + # metadata["file_type"] = file_type + # if file_path is not None: + # metadata.update( + # {"file_path": file_path, "file_name": os.path.basename(file_path)} + # ) + # if we still have no resolver try using file_obj name if ( resolver is None @@ -636,15 +654,21 @@ def _parse_file_args( ): resolver = resolvers.FilePathResolver(file_obj.name) - return file_obj, file_type, metadata, opened, resolver + return LoadSource( + file_obj=file_obj, + file_type=file_type, + file_path=file_path, + was_opened=was_opened, + resolver=resolver, + ) # loader functions for compressed extensions compressed_loaders = { - "zip": load_compressed, - "tar.bz2": load_compressed, - "tar.gz": load_compressed, - "bz2": load_compressed, + "zip": _load_compressed, + "tar.bz2": _load_compressed, + "tar.gz": _load_compressed, + "bz2": _load_compressed, } # map file_type to loader function diff --git a/trimesh/exchange/misc.py b/trimesh/exchange/misc.py index eab91e78b..a13f176a8 100644 --- a/trimesh/exchange/misc.py +++ b/trimesh/exchange/misc.py @@ -1,9 +1,10 @@ import json +from tempfile import NamedTemporaryFile from .. import util -def load_dict(data, **kwargs): +def load_dict(file_obj, **kwargs): """ Load multiple input types into kwargs for a Trimesh constructor. Tries to extract keys: @@ -14,7 +15,7 @@ def load_dict(data, **kwargs): Parameters ---------- - data : dict + file_obj : dict accepts multiple forms -dict: has keys for vertices and faces as (n,3) numpy arrays -dict: has keys for vertices/faces (n,3) arrays encoded as dicts/base64 @@ -30,19 +31,19 @@ def load_dict(data, **kwargs): -faces: (n,3) int -face_normals: (n,3) float (optional) """ - if data is None: - raise ValueError("data passed to load_dict was None!") - if util.is_instance_named(data, "Trimesh"): - return data - if isinstance(data, str): - if "{" not in data: + if file_obj is None: + raise ValueError("file_obj passed to load_dict was None!") + if util.is_instance_named(file_obj, "Trimesh"): + return file_obj + if isinstance(file_obj, str): + if "{" not in file_obj: raise ValueError("Object is not a JSON encoded dictionary!") - data = json.loads(data.decode("utf-8")) - elif util.is_file(data): - data = json.load(data) + file_obj = json.loads(file_obj.decode("utf-8")) + elif util.is_file(file_obj): + file_obj = json.load(file_obj) - # what shape should the data be to be usable - mesh_data = { + # what shape should the file_obj be to be usable + mesh_file_obj = { "vertices": (-1, 3), "faces": (-1, (3, 4)), "face_normals": (-1, 3), @@ -51,26 +52,27 @@ def load_dict(data, **kwargs): "vertex_colors": (-1, (3, 4)), } - # now go through data structure and if anything is encoded as base64 + # now go through file_obj structure and if anything is encoded as base64 # pull it back into numpy arrays - if isinstance(data, dict): - loaded = {} - data = util.decode_keys(data, "utf-8") - for key, shape in mesh_data.items(): - if key in data: - loaded[key] = util.encoded_to_array(data[key]) - if not util.is_shape(loaded[key], shape): - raise ValueError( - "Shape of %s is %s, not %s!", - key, - str(loaded[key].shape), - str(shape), - ) - if len(key) == 0: - raise ValueError("Unable to extract any mesh data!") - return loaded - else: - raise ValueError("%s object passed to dict loader!", data.__class__.__name__) + if not isinstance(file_obj, dict): + raise ValueError(f"`{type(file_obj)}` object passed to dict loader!") + + loaded = {} + file_obj = util.decode_keys(file_obj, "utf-8") + for key, shape in mesh_file_obj.items(): + if key in file_obj: + loaded[key] = util.encoded_to_array(file_obj[key]) + if not util.is_shape(loaded[key], shape): + raise ValueError( + "Shape of %s is %s, not %s!", + key, + str(loaded[key].shape), + str(shape), + ) + if len(loaded) == 0: + raise ValueError("Unable to extract a mesh from the dict!") + + return loaded def load_meshio(file_obj, file_type=None, **kwargs): @@ -98,16 +100,26 @@ def load_meshio(file_obj, file_type=None, **kwargs): # e.g., the ones that use h5m underneath # in that case use the associated file name instead mesh = None - for file_format in file_formats: - try: - mesh = meshio.read(file_obj.name, file_format=file_format) - break - except BaseException: - util.log.debug("failed to load", exc_info=True) - if mesh is None: - raise ValueError("Failed to load file!") - - # save data as kwargs for a trimesh.Trimesh + + with NamedTemporaryFile(suffix=f".{file_type}") as temp: + temp.write(file_obj.read()) + temp.flush() + + if file_type in file_formats: + # if we've been passed the file type and don't have to guess + mesh = meshio.read(temp.name, file_format=file_type) + else: + # try the loaders in order + for file_format in file_formats: + try: + mesh = meshio.read(temp.name, file_format=file_format) + break + except BaseException: + util.log.debug("failed to load", exc_info=True) + if mesh is None: + raise ValueError("Failed to load file!") + + # save file_obj as kwargs for a trimesh.Trimesh result = {} # pass kwargs to mesh constructor result.update(kwargs) @@ -123,7 +135,7 @@ def load_meshio(file_obj, file_type=None, **kwargs): return result -_misc_loaders = {"dict": load_dict, "dict64": load_dict, "json": load_dict} +_misc_loaders = {"dict": load_dict, "dict64": load_dict} try: import meshio diff --git a/trimesh/exchange/obj.py b/trimesh/exchange/obj.py index 841d183ce..3862ea902 100644 --- a/trimesh/exchange/obj.py +++ b/trimesh/exchange/obj.py @@ -16,17 +16,20 @@ from .. import util from ..constants import log, tol +from ..resolvers import ResolverLike +from ..typed import Dict, Loadable, Optional from ..visual.color import to_float from ..visual.material import SimpleMaterial from ..visual.texture import TextureVisuals, unmerge_faces def load_obj( - file_obj, - resolver=None, - group_material=True, - skip_materials=False, - maintain_order=False, + file_obj: Loadable, + resolver: Optional[ResolverLike] = None, + group_material: bool = True, + skip_materials: bool = False, + maintain_order: bool = False, + metadata: Optional[Dict] = None, **kwargs, ): """ @@ -163,10 +166,22 @@ def load_obj( log.debug("faces have mixed data: using slow fallback!") faces, faces_tex, faces_norm = _parse_faces_fallback(face_lines) - if group_material: + if group_material and len(materials) > 1: name = material - else: + elif current_object is not None: name = current_object + else: + # try to use the file name from the resolver + # or file object if possible before defaulting + name = next( + i + for i in ( + getattr(resolver, "file_name", None), + getattr(file_obj, "name", None), + "geometry", + ) + if i is not None + ) # ensure the name is always unique name = util.unique_name(name, geometry) @@ -218,9 +233,13 @@ def load_obj( faces, faces_norm, maintain_faces=maintain_order ) else: + # face_tex is None and # generate the mask so we only include # referenced vertices in every new mesh - mask_v = np.zeros(len(v), dtype=bool) + if maintain_order: + mask_v = np.ones(len(v), dtype=bool) + else: + mask_v = np.zeros(len(v), dtype=bool) mask_v[faces] = True # reconstruct the faces with the new vertex indices @@ -269,17 +288,11 @@ def load_obj( # store geometry by name geometry[name] = mesh - if len(geometry) == 1: - # TODO : should this be removed to always return a scene? - return next(iter(geometry.values())) - # add an identity transform for every geometry graph = [{"geometry": k, "frame_to": k} for k in geometry.keys()] # convert to scene kwargs - result = {"geometry": geometry, "graph": graph} - - return result + return {"geometry": geometry, "graph": graph} def parse_mtl(mtl, resolver=None): @@ -584,15 +597,15 @@ def _parse_vertices(text): # we have a nice 2D array result[k] = array.reshape(shape) else: - # try to recover with a slightly more expensive loop - count = per_row[k] - try: - # try to get result through reshaping - result[k] = np.fromstring( - " ".join(i.split()[:count] for i in value), sep=" ", dtype=np.float64 - ).reshape(shape) - except BaseException: - pass + # we don't have a nice (n, d) array so fall back to a slow loop + # this is where mixed "some of the values but not all have vertex colors" + # problem is handled. + lines = [] + [[lines.append(v.strip().split()) for v in str.splitlines(i)] for i in value] + # we need to make a 2D array so clip it to the shortest array + count = min(len(L) for L in lines) + # make a numpy array out of the cleaned up line data + result[k] = np.array([L[:count] for L in lines], dtype=np.float64) # vertices v = result["v"] diff --git a/trimesh/graph.py b/trimesh/graph.py index 0e2894c9b..62d7c6372 100644 --- a/trimesh/graph.py +++ b/trimesh/graph.py @@ -9,7 +9,6 @@ """ import collections -import warnings import numpy as np @@ -705,8 +704,7 @@ def edges_to_coo(edges, count=None, data=None): if data is None: data = np.ones(len(edges), dtype=bool) - matrix = coo_matrix((data, edges.T), dtype=data.dtype, shape=(count, count)) - return matrix + return coo_matrix((data, edges.T), dtype=data.dtype, shape=(count, count)) def neighbors(edges, max_index=None, directed=False): @@ -743,19 +741,6 @@ def neighbors(edges, max_index=None, directed=False): return array -def smoothed(*args, **kwargs): - """ - DEPRECATED: use `trimesh.graph.smooth_shade(mesh, ...)` - """ - warnings.warn( - "`trimesh.graph.smoothed` is deprecated and will be removed in March 2024: " - + "use `trimesh.graph.smooth_shade(mesh, ...)`", - category=DeprecationWarning, - stacklevel=2, - ) - return smooth_shade(*args, **kwargs) - - def smooth_shade( mesh, angle: Optional[Number] = None, facet_minarea: Optional[Number] = 10.0 ): diff --git a/trimesh/interfaces/generic.py b/trimesh/interfaces/generic.py index 694129bf4..9faa58a5f 100644 --- a/trimesh/interfaces/generic.py +++ b/trimesh/interfaces/generic.py @@ -73,11 +73,9 @@ def run(self, command): output = check_output( command_run, stderr=subprocess.STDOUT, startupinfo=startupinfo ) - except CalledProcessError as e: - # Log output if debug is enabled - if self.debug: - log.info(e.output.decode()) - raise + except CalledProcessError as E: + # raise with the output from the process + raise RuntimeError(E.output.decode()) if self.debug: log.info(output.decode()) diff --git a/trimesh/parent.py b/trimesh/parent.py index 6b0e50bbe..7e216ddc9 100644 --- a/trimesh/parent.py +++ b/trimesh/parent.py @@ -6,6 +6,8 @@ """ import abc +import os +from dataclasses import dataclass import numpy as np @@ -13,10 +15,48 @@ from . import transformations as tf from .caching import cache_decorator from .constants import tol -from .typed import Any, ArrayLike, Dict, NDArray, Optional +from .resolvers import ResolverLike +from .typed import Any, ArrayLike, Dict, NDArray, Optional, Stream, float64 from .util import ABC +@dataclass +class LoadSource: + """ + Save information about where a particular object was loaded from. + """ + + # a file-like object that can be accessed + file_obj: Optional[Stream] + + # a cleaned file type string, i.e. "stl" + file_type: str + + # if this was originally loaded from a file path + # save it here so we can check it later. + file_path: Optional[str] + + # did we open `file_obj` ourselves? + was_opened: bool + + # a resolver for loading assets next to the file + resolver: Optional[ResolverLike] + + @property + def file_name(self) -> Optional[str]: + if self.file_path is None: + return None + return os.path.basename(self.file_path) + + def __getstate__(self) -> Dict: + # this overides the `pickle.dump` behavior for this class + # we cannot pickle a file object so return `file_obj: None` for pickles + return {k: v if k != "file_obj" else None for k, v in self.__dict__.items()} + + def __deepcopy__(self, *args): + return LoadSource(**self.__getstate__()) + + class Geometry(ABC): """ `Geometry` is the parent class for all geometry. @@ -28,6 +68,7 @@ class Geometry(ABC): # geometry should have a dict to store loose metadata metadata: Dict + source: Optional[LoadSource] = None @property @abc.abstractmethod @@ -54,7 +95,7 @@ def __hash__(self): Returns --------- - hash : int + hash Hash of current graph and geometry. """ return self._data.__hash__() # type: ignore @@ -75,7 +116,7 @@ def __add__(self, other): def export(self, file_obj, file_type=None): pass - def __repr__(self): + def __repr__(self) -> str: """ Print quick summary of the current geometry without computing properties. @@ -309,7 +350,7 @@ def bounding_primitive(self): volume_min = np.argmin([i.volume for i in options]) return options[volume_min] - def apply_obb(self, **kwargs): + def apply_obb(self, **kwargs) -> NDArray[float64]: """ Apply the oriented bounding box transform to the current mesh. diff --git a/trimesh/path/exchange/dxf.py b/trimesh/path/exchange/dxf.py index 32140681e..b8a00c075 100644 --- a/trimesh/path/exchange/dxf.py +++ b/trimesh/path/exchange/dxf.py @@ -76,7 +76,7 @@ def load_dxf(file_obj, **kwargs): # do it by encoding sentinel to bytes and subset searching if raw[:22].find(b"AutoCAD Binary DXF") != -1: # no converter to ASCII DXF available - raise ValueError("binary DXF not supported!") + raise NotImplementedError("Binary DXF is not supported!") else: # we've been passed bytes that don't have the # header for binary DXF so try decoding as UTF-8 @@ -814,7 +814,7 @@ def convert_generic(entity, vertices): def bulge_to_arcs(lines, bulge, bulge_idx, is_closed=False, metadata=None): """ - Polylines can have "vertex bulge," which means the polyline + Polylines can have "vertex bulge" which means the polyline has an arc tangent to segments, rather than meeting at a vertex. diff --git a/trimesh/path/exchange/load.py b/trimesh/path/exchange/load.py index 16dea51e6..e2805bf37 100644 --- a/trimesh/path/exchange/load.py +++ b/trimesh/path/exchange/load.py @@ -32,7 +32,7 @@ def load_path(file_obj, file_type: Optional[str] = None, **kwargs): Data as a native trimesh Path file_object """ # avoid a circular import - from ...exchange.load import load_kwargs + from ...exchange.load import _load_kwargs if isinstance(file_type, str): # we accept full file names here so make sure we @@ -72,14 +72,14 @@ def load_path(file_obj, file_type: Optional[str] = None, **kwargs): kwargs.update(misc.linestrings_to_path(file_obj)) elif isinstance(file_obj, dict): # load as kwargs - return load_kwargs(file_obj) + return _load_kwargs(file_obj) elif util.is_sequence(file_obj): # load as lines in space kwargs.update(misc.lines_to_path(file_obj)) else: raise ValueError("Not a supported object type!") - result = load_kwargs(kwargs) + result = _load_kwargs(kwargs) util.log.debug(f"loaded {result!s} in {util.now() - tic:0.4f}s") return result diff --git a/trimesh/path/exchange/misc.py b/trimesh/path/exchange/misc.py index de76fdcd0..9c977071f 100644 --- a/trimesh/path/exchange/misc.py +++ b/trimesh/path/exchange/misc.py @@ -2,7 +2,7 @@ from ... import graph, grouping, util from ...constants import tol_path -from ...typed import ArrayLike, Dict +from ...typed import ArrayLike, Dict, NDArray, Optional from ..entities import Arc, Line @@ -37,7 +37,7 @@ def dict_to_path(as_dict): return result -def lines_to_path(lines): +def lines_to_path(lines: ArrayLike, index: Optional[NDArray[np.int64]] = None) -> Dict: """ Turn line segments into a Path2D or Path3D object. @@ -45,6 +45,8 @@ def lines_to_path(lines): ------------ lines : (n, 2, dimension) or (n, dimension) float Line segments or connected polyline curve in 2D or 3D + index : (n,) int64 + If passed save an index for each line segment. Returns ----------- @@ -53,6 +55,9 @@ def lines_to_path(lines): """ lines = np.asanyarray(lines, dtype=np.float64) + if index is not None: + index = np.asanyarray(index, dtype=np.int64) + if util.is_shape(lines, (-1, (2, 3))): # the case where we have a list of points # we are going to assume they are connected diff --git a/trimesh/path/path.py b/trimesh/path/path.py index b3e9d4429..a1e3714bd 100644 --- a/trimesh/path/path.py +++ b/trimesh/path/path.py @@ -8,6 +8,7 @@ import collections import copy +import warnings from hashlib import sha256 import numpy as np @@ -18,7 +19,7 @@ from ..constants import tol_path as tol from ..geometry import plane_transform from ..points import plane_fit -from ..typed import ArrayLike, Dict, Iterable, List, NDArray, Optional, float64 +from ..typed import ArrayLike, Dict, Iterable, List, NDArray, Optional, Tuple, float64 from ..visual import to_rgba from . import ( creation, # NOQA @@ -773,12 +774,23 @@ class Path3D(Path): Hold multiple vector curves (lines, arcs, splines, etc) in 3D. """ - def to_planar( + def to_planar(self, *args, **kwargs): + """ + DEPRECATED: replace `path.to_planar`->`path.to_2D), removal 1/1/2026 + """ + warnings.warn( + "DEPRECATED: replace `path.to_planar`->`path.to_2D), removal 1/1/2026", + category=DeprecationWarning, + stacklevel=2, + ) + return self.to_2D(*args, **kwargs) + + def to_2D( self, to_2D: Optional[ArrayLike] = None, normal: Optional[ArrayLike] = None, check: bool = True, - ): + ) -> Tuple["Path2D", NDArray[float64]]: """ Check to see if current vectors are all coplanar. @@ -791,17 +803,17 @@ def to_planar( Homogeneous transformation matrix to apply, if not passed a plane will be fitted to vertices. normal : (3,) float or None - Normal of direction of plane to use. + Normal of direction of plane to use. check - Raise a ValueError if points aren't coplanar + Raise a ValueError if points aren't coplanar. Returns ----------- - planar : trimesh.path.Path2D - Current path transformed onto plane - to_3D : (4,4) float - Homeogenous transformations to move planar - back into 3D space + planar + Current path transformed onto plane + to_3D : (4, 4) float + Homeogenous transformations to move planar + back into the original 3D frame. """ # which vertices are actually referenced referenced = self.referenced_vertices diff --git a/trimesh/registration.py b/trimesh/registration.py index 15dba1781..6c82071f1 100644 --- a/trimesh/registration.py +++ b/trimesh/registration.py @@ -209,10 +209,8 @@ def procrustes( Finds the transformation T mapping a to b which minimizes the square sum distances between Ta and b, also called the cost. - Optionally specify different weights for the points in a to minimize - the weighted square sum distances between Ta and b, which can - improve transformation robustness on noisy data if the points' - probability distribution is known. + Optionally filter the points in a and b via a binary weights array. + Non-uniform weights are also supported, but won't yield the optimal rotation. Parameters ---------- @@ -221,7 +219,12 @@ def procrustes( b : (n,3) float List of points in space weights : (n,) float - List of floats representing how much weight is assigned to each point of a + List of floats representing how much weight is assigned to each point. + Binary entries can be used to filter the arrays; normalization is not required. + Translation and scaling are adjusted according to the weighting. + Note, however, that this method does not yield the optimal rotation for + non-uniform weighting, + as this would require an iterative, nonlinear optimization approach. reflection : bool If the transformation is allowed reflections translation : bool @@ -241,56 +244,51 @@ def procrustes( The cost of the transformation """ - a = np.asanyarray(a, dtype=np.float64) - b = np.asanyarray(b, dtype=np.float64) - if not util.is_shape(a, (-1, 3)) or not util.is_shape(b, (-1, 3)): + a_original = np.asanyarray(a, dtype=np.float64) + b_original = np.asanyarray(b, dtype=np.float64) + if not util.is_shape(a_original, (-1, 3)) or not util.is_shape(b_original, (-1, 3)): raise ValueError("points must be (n,3)!") - if len(a) != len(b): + if len(a_original) != len(b_original): raise ValueError("a and b must contain same number of points!") - if weights is not None: - w = np.asanyarray(weights, dtype=np.float64) - if len(w) != len(a): - raise ValueError("weights must have same length as a and b!") - w_norm = (w / w.sum()).reshape((-1, 1)) + # weights are set to uniform if not provided. + if weights is None: + weights = np.ones(len(a_original)) + w = np.maximum(np.asanyarray(weights, dtype=np.float64), 0) + if len(w) != len(a): + raise ValueError("weights must have same length as a and b!") + w_norm = (w / w.sum()).reshape((-1, 1)) + + # All zero entries are removed from further computations. + # If weights is a binary array, the optimal solution can still be found by + # simply removing the zero entries. + nonzero_weights = w_norm[:, 0] > 0.0 + a_nonzero = a_original[nonzero_weights] + b_nonzero = b_original[nonzero_weights] + w_norm = w_norm[nonzero_weights] # Remove translation component if translation: - # acenter is a weighted average of the individual points. - if weights is None: - acenter = a.mean(axis=0) - else: - acenter = (a * w_norm).sum(axis=0) - bcenter = b.mean(axis=0) + # centers are (weighted) averages of the individual points. + acenter = (a_nonzero * w_norm).sum(axis=0) + bcenter = (b_nonzero * w_norm).sum(axis=0) else: - acenter = np.zeros(a.shape[1]) - bcenter = np.zeros(b.shape[1]) + acenter = np.zeros(a_nonzero.shape[1]) + bcenter = np.zeros(b_nonzero.shape[1]) # Remove scale component if scale: - if weights is None: - ascale = np.sqrt(((a - acenter) ** 2).sum() / len(a)) - # ascale is the square root of weighted average of the - # squared difference - # between each point and acenter. - else: - ascale = np.sqrt((((a - acenter) ** 2) * w_norm).sum()) - - bscale = np.sqrt(((b - bcenter) ** 2).sum() / len(b)) + # scale is the square root of the (weighted) average of the + # squared difference between each point and the center. + ascale = np.sqrt((((a_nonzero - acenter) ** 2) * w_norm).sum()) + bscale = np.sqrt((((b_nonzero - bcenter) ** 2) * w_norm).sum()) else: ascale = 1 bscale = 1 # Use SVD to find optimal orthogonal matrix R # constrained to det(R) = 1 if necessary. - # w_mat is multiplied with the centered and scaled a, such that the points - # can be weighted differently. - if weights is None: - target = np.dot(((b - bcenter) / bscale).T, ((a - acenter) / ascale)) - else: - target = np.dot( - ((b - bcenter) / bscale).T, ((a - acenter) / ascale) * w.reshape((-1, 1)) - ) + target = np.dot(((b_nonzero - bcenter) / bscale).T, ((a_nonzero - acenter) / ascale)) u, _s, vh = np.linalg.svd(target) @@ -308,12 +306,14 @@ def procrustes( matrix = np.vstack((matrix, np.array([0.0] * (a.shape[1]) + [1.0]).reshape(1, -1))) if return_cost: - transformed = transform_points(a, matrix) - # return the mean euclidean distance squared as the cost - cost = ((b - transformed) ** 2).mean() + # Transform the original input array, including zero-weighted points + transformed = transform_points(a_original, matrix) + # The cost is the (weighted) sum of the euclidean distances between + # the transformed source points and the target points. + cost = (((b_nonzero - transformed[nonzero_weights]) ** 2) * w_norm).sum() return matrix, transformed, cost - else: - return matrix + + return matrix def icp(a, b, initial=None, threshold=1e-5, max_iterations=20, **kwargs): diff --git a/trimesh/resolvers.py b/trimesh/resolvers.py index 02e0ba62a..804e66dab 100644 --- a/trimesh/resolvers.py +++ b/trimesh/resolvers.py @@ -12,7 +12,7 @@ import os from . import caching, util -from .typed import Optional, Union +from .typed import Dict, Mapping, Optional, Union # URL parsing for remote resources via WebResolver try: @@ -37,20 +37,24 @@ def get(self, key): raise NotImplementedError() @abc.abstractmethod - def write(self, name, data): + def write(self, name: str, data): raise NotImplementedError("`write` not implemented!") @abc.abstractmethod - def namespaced(self, namespace): + def namespaced(self, namespace: str): raise NotImplementedError("`namespaced` not implemented!") - def __getitem__(self, key): + @abc.abstractmethod + def keys(self): + raise NotImplementedError("`keys` not implemented!") + + def __getitem__(self, key: str): return self.get(key) - def __setitem__(self, key, value): + def __setitem__(self, key: str, value): return self.write(key, value) - def __contains__(self, key): + def __contains__(self, key: str) -> bool: return key in self.keys() @@ -84,6 +88,9 @@ def __init__(self, source: str): if not os.path.isdir(self.parent): raise ValueError(f"path `{self.parent} `not a directory!") + self.file_path = source + self.file_name = os.path.basename(source) + def keys(self): """ List all files available to be loaded. @@ -163,7 +170,7 @@ class ZipResolver(Resolver): Resolve files inside a ZIP archive. """ - def __init__(self, archive=None, namespace=None): + def __init__(self, archive: Optional[Dict] = None, namespace: Optional[str] = None): """ Resolve files inside a ZIP archive as loaded by trimesh.util.decompress @@ -205,7 +212,7 @@ def keys(self): ] return self.archive.keys() - def write(self, key: str, value): + def write(self, key: str, value) -> None: """ Store a value in the current archive. @@ -220,7 +227,7 @@ def write(self, key: str, value): self.archive = {} self.archive[key] = value - def get(self, name: str): + def get(self, name: str) -> bytes: """ Get an asset from the ZIP archive. @@ -302,7 +309,7 @@ class WebResolver(Resolver): Resolve assets from a remote URL. """ - def __init__(self, url): + def __init__(self, url: str): """ Resolve assets from a base URL. @@ -317,7 +324,6 @@ def __init__(self, url): # parse string into namedtuple parsed = urlparse(url) - # we want a base url split = [i for i in parsed.path.split("/") if len(i) > 0] @@ -331,6 +337,11 @@ def __init__(self, url): else: # recombine into string ignoring any double slashes path = "/".join(split) + + # save the URL we were created with, i.e. + # `https://stuff.com/models/thing.glb` + self.url = url + # save the root url, i.e. `https://stuff.com/models` self.base_url = ( "/".join( i @@ -345,7 +356,9 @@ def __init__(self, url): # we should always have ended with a single slash assert self.base_url.endswith("/") - def get(self, name): + self.file_name = url.split("/")[-1] + + def get(self, name: str) -> bytes: """ Get a resource from the remote site. @@ -378,7 +391,25 @@ def get(self, name): # return the bytes of the response return response.content - def namespaced(self, namespace): + def get_base(self) -> bytes: + """ + Fetch the data at the full URL this resolver was + instantiated with, i.e. `https://stuff.com/hi.glb` + this will return the response. + + Returns + -------- + content + The value at `self.url` + """ + import httpx + + # just fetch the url we were created with + response = httpx.get(self.url, follow_redirects=True) + response.raise_for_status() + return response.content + + def namespaced(self, namespace: str) -> "WebResolver": """ Return a namespaced version of current resolver. @@ -396,7 +427,10 @@ def namespaced(self, namespace): return WebResolver(url=self.base_url + namespace) def write(self, key, value): - raise NotImplementedError("can't write to remote") + raise NotImplementedError("`WebResolver` is read-only!") + + def keys(self): + raise NotImplementedError("`WebResolver` can't list keys") class GithubResolver(Resolver): @@ -455,7 +489,6 @@ def write(self, name, data): @property def zipped(self) -> ZipResolver: """ - - opened zip file - locally saved zip file - retrieve zip file and saved @@ -476,8 +509,15 @@ def fetch() -> bytes: # download the archive or get from disc raw = self.cache.get(self.url, fetch) # create a zip resolver for the archive + # the root directory in the zip is the repo+commit so strip that off + # so the keys are usable, i.e. "models" instead of "trimesh-2232323/models" self._zip = ZipResolver( - util.decompress(util.wrap_as_stream(raw), file_type="zip") + { + k.split("/", 1)[1]: v + for k, v in util.decompress( + util.wrap_as_stream(raw), file_type="zip" + ).items() + } ) return self._zip @@ -569,3 +609,7 @@ def trim(prefix, item): strip = namespace.strip("/").split("/")[: -name.count("..")] strip.extend(name.split("..")[-1].strip("/").split("/")) yield "/".join(strip) + + +# most loaders can use a mapping in additon to a resolver +ResolverLike = Union[Resolver, Mapping] diff --git a/trimesh/resources/__init__.py b/trimesh/resources/__init__.py index f0c66241e..77b48a381 100644 --- a/trimesh/resources/__init__.py +++ b/trimesh/resources/__init__.py @@ -1,81 +1,15 @@ import json import os -import warnings +from io import BytesIO -from ..typed import Dict, Stream -from ..util import decode_text, wrap_as_stream +from ..typed import Dict # find the current absolute path to this directory _pwd = os.path.expanduser(os.path.abspath(os.path.dirname(__file__))) - # once resources are loaded cache them _cache = {} -def get( - name: str, decode: bool = True, decode_json: bool = False, as_stream: bool = False -): - """ - DERECATED JANUARY 2025 REPLACE WITH TYPED `get_json`, `get_string`, etc. - """ - warnings.warn( - "`trimesh.resources.get` is deprecated " - + "and will be removed in January 2025: " - + "replace with typed `trimesh.resources.get_*type*`", - category=DeprecationWarning, - stacklevel=2, - ) - return _get(name=name, decode=decode, decode_json=decode_json, as_stream=as_stream) - - -def _get(name: str, decode: bool, decode_json: bool, as_stream: bool): - """ - Get a resource from the `trimesh/resources` folder. - - Parameters - ------------- - name : str - File path relative to `trimesh/resources` - decode : bool - Whether or not to decode result as UTF-8 - decode_json : bool - Run `json.loads` on resource if True. - as_stream : bool - Return as a file-like object - - Returns - ------------- - resource : str, bytes, or decoded JSON - File data - """ - # key by name and decode - cache_key = (name, bool(decode), bool(decode_json), bool(as_stream)) - cached = _cache.get(cache_key) - if hasattr(cached, "seek"): - cached.seek(0) - if cached is not None: - return cached - - # get the resource using relative names - with open(os.path.join(_pwd, name), "rb") as f: - resource = f.read() - - # make sure we return it as a string if asked - if decode: - # will decode into text if possibly - resource = decode_text(resource) - - if decode_json: - resource = json.loads(resource) - elif as_stream: - resource = wrap_as_stream(resource) - - # store for later access - _cache[cache_key] = resource - - return resource - - def get_schema(name: str) -> Dict: """ Load a schema and evaluate the referenced files. @@ -95,8 +29,8 @@ def get_schema(name: str) -> Dict: # get a resolver for our base path resolver = FilePathResolver(os.path.join(_pwd, "schema", name)) - # recursively load $ref keys - return resolve(json.loads(decode_text(resolver.get(name))), resolver=resolver) + # recursively load `$ref` keys + return resolve(json.loads(resolver.get(name).decode("utf-8")), resolver=resolver) def get_json(name: str) -> Dict: @@ -106,14 +40,14 @@ def get_json(name: str) -> Dict: Parameters ------------- name : str - File path relative to `trimesh/resources` + File path relative to `trimesh/resources/{name}` Returns ------------- resource File data decoded from JSON. """ - return _get(name, decode=True, decode_json=True, as_stream=False) + return json.loads(get_bytes(name).decode("utf-8")) def get_string(name: str) -> str: @@ -122,7 +56,7 @@ def get_string(name: str) -> str: Parameters ------------- - name : str + name File path relative to `trimesh/resources` Returns @@ -130,7 +64,7 @@ def get_string(name: str) -> str: resource File data as a string. """ - return _get(name, decode=True, decode_json=False, as_stream=False) + return get_bytes(name).decode("utf-8") def get_bytes(name: str) -> bytes: @@ -139,7 +73,7 @@ def get_bytes(name: str) -> bytes: Parameters ------------- - name : str + name File path relative to `trimesh/resources` Returns @@ -147,10 +81,21 @@ def get_bytes(name: str) -> bytes: resource File data as raw bytes. """ - return _get(name, decode=False, decode_json=False, as_stream=False) + cached = _cache.get(name, None) + if cached is not None: + return cached + + # get the resource using relative names + # all templates are using POSIX relative paths + # so fix them to be platform-specific + with open(os.path.join(_pwd, *name.split("/")), "rb") as f: + resource = f.read() + + _cache[name] = resource + return resource -def get_stream(name: str) -> Stream: +def get_stream(name: str) -> BytesIO: """ Get a resource from the `trimesh/resources` folder as a binary stream. @@ -165,4 +110,4 @@ def get_stream(name: str) -> Stream: File data as a binary stream. """ - return _get(name, decode=False, decode_json=False, as_stream=True) + return BytesIO(get_bytes(name)) diff --git a/trimesh/scene/scene.py b/trimesh/scene/scene.py index 51946c470..7db16c61b 100644 --- a/trimesh/scene/scene.py +++ b/trimesh/scene/scene.py @@ -1,13 +1,14 @@ import collections import uuid import warnings +from copy import deepcopy import numpy as np from .. import caching, convex, grouping, inertia, transformations, units, util from ..constants import log from ..exchange import export -from ..parent import Geometry, Geometry3D +from ..parent import Geometry, Geometry3D, LoadSource from ..registration import procrustes from ..typed import ( ArrayLike, @@ -49,6 +50,7 @@ def __init__( camera: Optional[cameras.Camera] = None, lights: Optional[Sequence[lighting.Light]] = None, camera_transform: Optional[NDArray] = None, + source: Optional[LoadSource] = None, ): """ Create a new Scene object. @@ -87,6 +89,7 @@ def __init__( self.metadata = {} if isinstance(metadata, dict): self.metadata.update(metadata) + self.source = source if graph is not None: # if we've been passed a graph override the default @@ -186,10 +189,6 @@ def add_geometry( self.graph.transforms = concat.graph.transforms return - if not hasattr(geometry, "vertices"): - util.log.debug(f"unknown type ({type(geometry).__name__}) added to scene!") - return - # get or create a name to reference the geometry by if geom_name is not None: # if name is passed use it @@ -197,8 +196,8 @@ def add_geometry( elif "name" in geometry.metadata: # if name is in metadata use it name = geometry.metadata["name"] - elif "file_name" in geometry.metadata: - name = geometry.metadata["file_name"] + elif geometry.source is not None and geometry.source.file_name is not None: + name = geometry.source.file_name else: # try to create a simple name name = "geometry_" + str(len(self.geometry)) @@ -280,7 +279,7 @@ def simplify_quadric_decimation( A number between 0.0 and 1.0 for how much face_count Target number of faces desired in the resulting mesh. - agression + aggression An integer between `0` and `10`, the scale being roughly `0` is "slow and good" and `10` being "fast and bad." @@ -363,16 +362,15 @@ def bounds_corners(self) -> Dict[str, NDArray[float64]]: corners = {} # collect vertices for every mesh vertices = { - k: m.vertices + k: m.vertices if hasattr(m, "vertices") and len(m.vertices) > 0 else m.bounds for k, m in self.geometry.items() - if hasattr(m, "vertices") and len(m.vertices) > 0 } # handle 2D geometries vertices.update( { k: np.column_stack((v, np.zeros(len(v)))) for k, v in vertices.items() - if v.shape[1] == 2 + if v is not None and v.shape[1] == 2 } ) @@ -1438,27 +1436,23 @@ def split_scene(geometry, **kwargs): if isinstance(geometry, Scene): return geometry + # save metadata + metadata = {} + # a list of things if util.is_sequence(geometry): - metadata = {} [metadata.update(getattr(g, "metadata", {})) for g in geometry] - return Scene(geometry, metadata=metadata) - - # a single geometry so we are going to split - split = [] - metadata = {} - for g in util.make_sequence(geometry): - split.extend(g.split(**kwargs)) - metadata.update(g.metadata) - # if there is only one geometry in the mesh - # name it from the file name - if len(split) == 1 and "file_name" in metadata: - split = {metadata["file_name"]: split[0]} + source = next((g.source for g in geometry if g.source is not None), None) - scene = Scene(split, metadata=metadata) + return Scene(geometry, metadata=metadata, source=source) - return scene + # a single geometry so we are going to split + return Scene( + geometry.split(**kwargs), + metadata=deepcopy(geometry.metadata), + source=deepcopy(geometry.source), + ) def append_scenes(iterable, common=None, base_frame="world"): diff --git a/trimesh/typed.py b/trimesh/typed.py index 4118dfe84..7f6b2f177 100644 --- a/trimesh/typed.py +++ b/trimesh/typed.py @@ -13,7 +13,7 @@ from numpy import float64, floating, int64, integer, unsignedinteger # requires numpy>=1.20 -from numpy.typing import ArrayLike, NDArray +from numpy.typing import ArrayLike, DTypeLike, NDArray if version_info >= (3, 9): # use PEP585 hints on newer python @@ -63,6 +63,7 @@ "ArrayLike", "BinaryIO", "Callable", + "DTypeLike", "Dict", "Hashable", "Integer", diff --git a/trimesh/units.py b/trimesh/units.py index 16dfa7bd9..f8acf62df 100644 --- a/trimesh/units.py +++ b/trimesh/units.py @@ -100,26 +100,29 @@ def units_from_metadata(obj: Geometry, guess: bool = True) -> str: A guess of what the units might be """ + hints = [obj.metadata.get("name", None)] + if obj.source is not None: + hints.append(obj.source.file_name) + # try to guess from metadata - for key in ["file_name", "name"]: - if key not in obj.metadata: + for hint in hints: + if hint is None: continue - # get the string which might contain unit hints - hints = obj.metadata[key].lower() - if "unit" in hints: + hint = hint.lower() + if "unit" in hint: # replace all delimiter options with white space for delim in "_-.": - hints = hints.replace(delim, " ") + hint = hint.replace(delim, " ") # loop through each hint - for hint in hints.strip().split(): + for h in hint.strip().split(): # get rid of keyword and whitespace - hint = hint.replace("units", "").replace("unit", "").strip() + h = h.replace("units", "").replace("unit", "").strip() # if the hint is a valid unit return it - if hint in _lookup: - return hint + if h in _lookup: + return h if not guess: - raise ValueError("no units and not allowed to guess") + raise ValueError("No units and not allowed to guess!") # we made it to the wild ass guess section # if the scale is larger than 100 mystery units diff --git a/trimesh/util.py b/trimesh/util.py index 38fdbd7b2..e9cda96d2 100644 --- a/trimesh/util.py +++ b/trimesh/util.py @@ -5,7 +5,6 @@ import abc import base64 import collections -import copy import json import logging import random @@ -15,9 +14,8 @@ import uuid import warnings import zipfile - -# for type checking from collections.abc import Mapping +from copy import deepcopy from io import BytesIO, StringIO import numpy as np @@ -25,7 +23,7 @@ from .iteration import chain # use our wrapped types for wider version compatibility -from .typed import Dict, Iterable, Optional, Set, Union +from .typed import ArrayLike, Dict, Iterable, NDArray, Optional, Set, Union, float64 # create a default logger log = logging.getLogger("trimesh") @@ -443,7 +441,7 @@ def vector_to_spherical(cartesian): return spherical -def spherical_to_vector(spherical): +def spherical_to_vector(spherical: ArrayLike) -> NDArray[float64]: """ Convert an array of `(n, 2)` spherical angles to `(n, 3)` unit vectors. @@ -1281,13 +1279,13 @@ def comment_strip(text, starts_with="#", new_line="\n"): return result -def encoded_to_array(encoded): +def encoded_to_array(encoded: Union[Dict, ArrayLike]) -> NDArray: """ Turn a dictionary with base64 encoded strings back into a numpy array. Parameters ------------ - encoded : dict + encoded Has keys: dtype: string of dtype shape: int tuple of shape @@ -1296,7 +1294,7 @@ def encoded_to_array(encoded): Returns ---------- - array: numpy array + array """ if not isinstance(encoded, dict): @@ -1465,6 +1463,17 @@ def concatenate( if _STRICT: raise E + metadata = {} + try: + [metadata.update(deepcopy(m.metadata) for m in is_mesh)] + except BaseException: + pass + + try: + source = deepcopy(is_mesh[0].source) + except BaseException: + source = None + # create the mesh object return trimesh_type( vertices=vertices, @@ -1472,6 +1481,8 @@ def concatenate( face_normals=face_normals, vertex_normals=vertex_normals, visual=visual, + metadata=metadata, + source=source, process=False, ) @@ -1569,8 +1580,11 @@ def submesh( faces=faces, face_normals=np.vstack(normals), visual=visual, + metadata=deepcopy(mesh.metadata), + source=deepcopy(mesh.source), process=False, ) + return appended if visuals is None: @@ -1583,7 +1597,8 @@ def submesh( faces=f, face_normals=n, visual=c, - metadata=copy.deepcopy(mesh.metadata), + metadata=deepcopy(mesh.metadata), + source=deepcopy(mesh.source), process=False, ) for v, f, n, c in zip(vertices, faces, normals, visuals) @@ -1867,7 +1882,9 @@ def decompress(file_obj, file_type): if file_type.endswith("bz2"): import bz2 - return {file_obj.name[:-4]: wrap_as_stream(bz2.open(file_obj, mode="r").read())} + # get the file name if we have one otherwise default to "archive" + name = getattr(file_obj, "name", "archive1234")[:-4] + return {name: wrap_as_stream(bz2.open(file_obj, mode="r").read())} if "tar" in file_type[-6:]: import tarfile diff --git a/trimesh/visual/color.py b/trimesh/visual/color.py index d0c9f3649..0df875ee0 100644 --- a/trimesh/visual/color.py +++ b/trimesh/visual/color.py @@ -22,7 +22,6 @@ and setting or altering a value should automatically change the mode. """ -import colorsys import copy import numpy as np @@ -30,7 +29,7 @@ from .. import caching, util from ..constants import tol from ..grouping import unique_rows -from ..typed import ArrayLike, NDArray +from ..typed import ArrayLike, DTypeLike, Integer, NDArray, Optional from .base import Visuals @@ -648,26 +647,97 @@ def hex_to_rgba(color): return rgba -def random_color(dtype=np.uint8): +def hsv_to_rgba(hsv: ArrayLike, dtype: DTypeLike = np.uint8) -> NDArray: + """ + Convert an (n, 3) array of 0.0-1.0 HSV colors into an + array of RGBA colors. + + A vectorized implementation that matches `colorsys.hsv_to_rgb`. + + Parameters + ----------- + hsv + Should be `(n, 3)` array of 0.0-1.0 values. + + Returns + ------------ + rgba + An (n, 4) array of RGBA colors. + """ + + hsv = np.array(hsv, dtype=np.float64) + if len(hsv.shape) != 2 or hsv.shape[1] != 3: + raise ValueError("(n, 3) values of HSV are required") + + # expand into flat arrays for each of + # hue, saturation, and value + H, S, V = hsv.T + + # chroma + C = S * V + # check which case we fall into + Hi = H * 6.0 + X = C * (1.0 - np.abs((Hi % 2.0) - 1.0)) + # use a lookup table for an integer to match the + # cases specified on the wikipedia article + # These are indexes of C = 0 , X = 1, 0 = 2 + LUT = np.array( + [[0, 1, 2], [1, 0, 2], [2, 0, 1], [2, 1, 0], [1, 2, 0], [0, 2, 1]], dtype=np.int64 + ) + + # stack values we need so we can access them with the lookup table + stacked = np.column_stack((C, X, np.zeros_like(X))) + # get the indexes per-row + indexes = LUT[Hi.astype(np.int64)] + # multiply them by the column count so we can use them on a flat array + indexes_flat = (np.arange(len(indexes)) * 3).reshape((-1, 1)) + indexes + + # get the inermediate point along the bottom three faces of the RGB cube + RGBi = stacked.ravel()[indexes_flat] + + # stack it into the final RGBA array + RGBA = np.column_stack((RGBi + (V - C).reshape((-1, 1)), np.ones(len(H)))) + + # now check the return type and do what's necessary + dtype = np.dtype(dtype) + if dtype.kind == "f": + return RGBA.astype(dtype) + elif dtype.kind in "iu": + return (RGBA * np.iinfo(dtype).max).round().astype(dtype) + + raise ValueError(f"dtype `{dtype}` not supported") + + +def random_color(dtype: DTypeLike = np.uint8, count: Optional[Integer] = None): """ Return a random RGB color using datatype specified. Parameters ---------- - dtype: numpy dtype of result + dtype + Color type of result. + count + If passed return (count, 4) colors instead of + a single (4,) color. Returns ---------- - color: (4,) dtype, random color that looks OK + color : (4,) or (count, 4) + Random color or colors that look "OK" + """ - hue = np.random.random() + 0.61803 - hue %= 1.0 - color = np.array(colorsys.hsv_to_rgb(hue, 0.99, 0.99)) - if np.dtype(dtype).kind in "iu": - max_value = (2 ** (np.dtype(dtype).itemsize * 8)) - 1 - color *= max_value - color = np.append(color, max_value).astype(dtype) - return color + # generate a random hue + hue = (np.random.random(count or 1) + 0.61803) % 1.0 + + # saturation and "value" as constant + sv = np.ones_like(hue) * 0.99 + # convert our random hue to RGBA + colors = hsv_to_rgba(np.column_stack((hue, sv, sv))) + + # unspecified count is a single color + if count is None: + return colors[0] + return colors def vertex_to_face_color(vertex_colors, faces): @@ -799,7 +869,9 @@ def linear_color_map(values, color_range=None): return colors -def interpolate(values, color_map=None, dtype=np.uint8): +def interpolate( + values: ArrayLike, color_map: Optional[str] = None, dtype: DTypeLike = np.uint8 +): """ Given a 1D list of values, return interpolated colors for the range. @@ -829,15 +901,22 @@ def interpolate(values, color_map=None, dtype=np.uint8): # make input always float values = np.asanyarray(values, dtype=np.float64).ravel() + # offset to zero + values -= values.min() + # get the value range to avoid dividing by zero + values_ptp = np.ptp(values) + if values_ptp > 0.0: + values /= values_ptp + # scale values to 0.0 - 1.0 and get colors - colors = cmap((values - values.min()) / np.ptp(values)) + colors = cmap(values) # convert to 0-255 RGBA rgba = to_rgba(colors, dtype=dtype) return rgba -def uv_to_color(uv, image): +def uv_to_color(uv, image) -> NDArray[np.uint8]: """ Get the color in a texture image. @@ -866,7 +945,8 @@ def uv_to_color(uv, image): # access colors from pixel locations # make sure image is RGBA before getting values colors = np.asanyarray(image.convert("RGBA"))[ - y.round().astype(np.int64), x.round().astype(np.int64) + y.round().astype(np.int64) % image.height, + x.round().astype(np.int64) % image.width, ] # conversion to RGBA should have corrected shape @@ -876,7 +956,7 @@ def uv_to_color(uv, image): return colors -def uv_to_interpolated_color(uv, image): +def uv_to_interpolated_color(uv, image) -> NDArray[np.uint8]: """ Get the color from texture image using bilinear sampling.