From c370b1d4c133c9d154df6616d40f3786575ea34c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=A9mie=20Dumas?= Date: Thu, 30 May 2024 15:19:44 -0700 Subject: [PATCH] Sync with 467e6b8 (#35) --- CMakeLists.txt | 14 +++ VERSION | 2 +- cmake/recipes/external/blosc.cmake | 37 ++++++ cmake/recipes/external/libfive.cmake | 4 +- cmake/recipes/external/nanobind.cmake | 2 +- cmake/recipes/external/opensubdiv.cmake | 114 ++++++++++-------- cmake/recipes/external/openvdb.cmake | 18 ++- cmake/recipes/external/spdlog.cmake | 3 +- cmake/recipes/external/tbb.cmake | 11 +- cmake/recipes/external/tracy.cmake | 2 +- modules/bvh/include/lagrange/bvh/create_BVH.h | 3 - .../core/include/lagrange/utils/fmt_eigen.h | 6 +- modules/core/python/scripts/meshconvert.py | 8 +- modules/core/python/scripts/meshstat.py | 15 +-- modules/core/python/src/bind_surface_mesh.h | 26 +++- modules/core/python/src/tensor_utils.cpp | 2 +- .../core/python/tests/test_cast_attribute.py | 4 +- .../core/python/tests/test_combine_meshes.py | 4 +- .../tests/test_compute_dihedral_angles.py | 4 +- .../python/tests/test_compute_edge_lengths.py | 4 +- .../python/tests/test_filter_attributes.py | 4 +- .../python/tests/test_permute_vertices.py | 4 +- .../lagrange/io/legacy/load_mesh_assimp.h | 6 +- .../lagrange/io/legacy/load_mesh_ply.h | 4 +- modules/io/python/tests/test_io.py | 12 +- modules/python/CMakeLists.txt | 25 +++- modules/python/lagrange/__init__.py | 3 +- modules/python/lagrange/_logging.py | 4 +- .../include/lagrange/scene/RemeshingOptions.h | 4 + .../lagrange/scene/internal/bake_scaling.h | 54 +++++++++ .../scene/python/scripts/extract_texture.py | 10 +- modules/scene/python/src/bind_value.h | 1 - .../scene/python/tests/test_simple_scene.py | 4 +- modules/scene/src/internal/bake_scaling.cpp | 96 +++++++++++++++ pyproject.toml | 28 ++++- 35 files changed, 396 insertions(+), 146 deletions(-) create mode 100644 cmake/recipes/external/blosc.cmake create mode 100644 modules/scene/include/lagrange/scene/internal/bake_scaling.h create mode 100644 modules/scene/src/internal/bake_scaling.cpp diff --git a/CMakeLists.txt b/CMakeLists.txt index 4cae4f74..77983958 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -32,6 +32,7 @@ endif() cmake_policy(SET CMP0054 NEW) # Only interpret if() arguments as variables or keywords when unquoted. cmake_policy(SET CMP0076 NEW) # target_sources() command converts relative paths to absolute. set(CMAKE_POLICY_DEFAULT_CMP0063 NEW) # Honor visibility properties for all target types. +set(CMAKE_POLICY_DEFAULT_CMP0069 NEW) # INTERPROCEDURAL_OPTIMIZATION is enforced when enabled. set(CMAKE_POLICY_DEFAULT_CMP0077 NEW) # Avoid overriding normal variables with option() set(CMAKE_POLICY_DEFAULT_CMP0091 NEW) # MSVC runtime library flags are selected by an abstraction. set(CMAKE_POLICY_DEFAULT_CMP0126 NEW) # Avoid overriding normal variables with set(CACHE) @@ -272,6 +273,19 @@ if(LAGRANGE_MODULE_ANORIGAMI OR LAGRANGE_MODULE_MESHPROC OR LAGRANGE_MODULE_CONT endif() endif() +if(LAGRANGE_TOPLEVEL_PROJECT) + # Enable Blosc/Zlib for OpenVDB in toplevel builds + option(USE_BLOSC "" ON) + option(USE_ZLIB "" ON) + + # ASM (used by Blosc) and MASM (used by legacy TBB) may be needed in the same build, so we enable + # them both at the top-level, otherwise this CMake error can be triggered: + # https://gitlab.kitware.com/cmake/cmake/-/issues/25042 + if(MSVC) + enable_language(ASM ASM_MASM) + endif() +endif() + # On Linux & Windows we use MKL to provide BLAS/LAPACK. Since it comes precompiled with /MD on Windows, # we need to use the MSVC runtime library flag globally for the whole project. file(READ "cmake/lagrange/lagrangeMklModules.txt" LAGRANGE_MKL_MODULES) diff --git a/VERSION b/VERSION index fe675047..0b31cc63 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -6.22.0 +6.23.0 diff --git a/cmake/recipes/external/blosc.cmake b/cmake/recipes/external/blosc.cmake new file mode 100644 index 00000000..ee0700d3 --- /dev/null +++ b/cmake/recipes/external/blosc.cmake @@ -0,0 +1,37 @@ +# +# Copyright 2024 Adobe. All rights reserved. +# This file is licensed to you under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed under +# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS +# OF ANY KIND, either express or implied. See the License for the specific language +# governing permissions and limitations under the License. +# +if(TARGET Blosc::blosc) + return() +endif() + +message(STATUS "Third-party (external): creating target 'Blosc::blosc'") + +# TODO: Use external zlib (via miniz) +# option(PREFER_EXTERNAL_ZLIB "Find and use external Zlib library instead of included sources." ON) +# include(miniz) + +block() + set(BUILD_TESTS OFF) + + include(CPM) + CPMAddPackage( + NAME blosc + GITHUB_REPOSITORY Blosc/c-blosc + GIT_TAG v1.21.5 + ) +endblock() + +set_target_properties(blosc_static PROPERTIES POSITION_INDEPENDENT_CODE ON) + +if(NOT TARGET Blosc::blosc) + add_library(Blosc::blosc ALIAS blosc_static) +endif() diff --git a/cmake/recipes/external/libfive.cmake b/cmake/recipes/external/libfive.cmake index 76356dc7..681db3e9 100644 --- a/cmake/recipes/external/libfive.cmake +++ b/cmake/recipes/external/libfive.cmake @@ -15,8 +15,6 @@ endif() message(STATUS "Third-party (external): creating target 'libfive::libfive'") -set(LIBFIVE_HASH 248c15c57abd2b1b9ea0e05d0a40f579d225f00f) - include(eigen) include(boost) @@ -24,7 +22,7 @@ include(CPM) CPMAddPackage( NAME libfive GITHUB_REPOSITORY libfive/libfive - GIT_TAG ${LIBFIVE_HASH} + GIT_TAG 248c15c57abd2b1b9ea0e05d0a40f579d225f00f DOWNLOAD_ONLY ON ) diff --git a/cmake/recipes/external/nanobind.cmake b/cmake/recipes/external/nanobind.cmake index 22ab804a..ca5a97b6 100644 --- a/cmake/recipes/external/nanobind.cmake +++ b/cmake/recipes/external/nanobind.cmake @@ -19,7 +19,7 @@ include(CPM) CPMAddPackage( NAME nanobind GITHUB_REPOSITORY wjakob/nanobind - GIT_TAG v1.8.0 + GIT_TAG v2.0.0 DOWNLOAD_ONLY ON ) diff --git a/cmake/recipes/external/opensubdiv.cmake b/cmake/recipes/external/opensubdiv.cmake index 2810032d..d98beb73 100644 --- a/cmake/recipes/external/opensubdiv.cmake +++ b/cmake/recipes/external/opensubdiv.cmake @@ -15,61 +15,71 @@ endif() message(STATUS "Third-party (external): creating target 'opensubdiv::opensubdiv'") -include(CPM) -CPMAddPackage( - NAME opensubdiv - GITHUB_REPOSITORY PixarAnimationStudios/OpenSubdiv - GIT_TAG tags/v3_4_4 - DOWNLOAD_ONLY ON -) +block() + set(NO_EXAMPLES ON) + set(NO_TUTORIALS ON) + set(NO_REGRESSION ON) + set(NO_PTEX ON) + set(NO_DOC ON) + set(NO_OMP ON) + set(NO_TBB ON) + set(NO_CUDA ON) + set(NO_OPENCL ON) + set(NO_CLEW ON) + set(NO_OPENGL ON) + set(NO_METAL ON) + set(NO_DX ON) + set(NO_TESTS ON) + set(NO_GLTESTS ON) + set(NO_GLEW ON) + set(NO_GLFW ON) + set(NO_GLFW_X11 ON) + set(NO_MACOS_FRAMEWORK ON) -# TODO: Use upstream CMake + Enable TBB -add_library(opensubdiv) -add_library(opensubdiv::opensubdiv ALIAS opensubdiv) + # We trick OpenSubdiv's CMake into _not_ calling `find_package(TBB)` by setting `TBB_FOUND` to `ON`. + set(TBB_FOUND ON) + set(TBB_CXX_FLAGS "") + include(tbb) -set_target_properties(opensubdiv PROPERTIES FOLDER third_party) - -include(GNUInstallDirs) -target_include_directories(opensubdiv SYSTEM PUBLIC - $ - $ -) - -if(CMAKE_HOST_WIN32) - target_compile_definitions(opensubdiv PUBLIC _USE_MATH_DEFINES) -endif() - -set_target_properties(opensubdiv PROPERTIES POSITION_INDEPENDENT_CODE ON) + include(CPM) + CPMAddPackage( + NAME opensubdiv + GITHUB_REPOSITORY PixarAnimationStudios/OpenSubdiv + GIT_TAG v3_6_0 + ) -file(GLOB SRC_FILES - "${opensubdiv_SOURCE_DIR}/opensubdiv/far/*.h" - "${opensubdiv_SOURCE_DIR}/opensubdiv/far/*.cpp" - "${opensubdiv_SOURCE_DIR}/opensubdiv/sdc/*.h" - "${opensubdiv_SOURCE_DIR}/opensubdiv/sdc/*.cpp" - "${opensubdiv_SOURCE_DIR}/opensubdiv/vtr/*.h" - "${opensubdiv_SOURCE_DIR}/opensubdiv/vtr/*.cpp" -) -source_group( - TREE "${opensubdiv_SOURCE_DIR}/opensubdiv/" - FILES ${SRC_FILES} -) -target_sources(opensubdiv PRIVATE ${SRC_FILES}) + # Note: OpenSubdiv doesn't support being compiled as a shared library on Windows: + # https://github.com/PixarAnimationStudios/OpenSubdiv/issues/71 + if(BUILD_SHARED_LIBS AND TARGET osd_dynamic_cpu) + add_library(opensubdiv::opensubdiv ALIAS osd_dynamic_cpu) + set(OPENSUBDIV_TARGET osd_dynamic_cpu) + else() + add_library(opensubdiv::opensubdiv ALIAS osd_static_cpu) + set(OPENSUBDIV_TARGET osd_static_cpu) + endif() -if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "AppleClang" OR - "${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang") - target_compile_options(opensubdiv PRIVATE - "-Wno-unused-function" - ) -elseif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") - target_compile_options(opensubdiv PRIVATE - "-Wno-class-memaccess" - "-Wno-cast-function-type" - "-Wno-strict-aliasing" + # OpenSubdiv's code uses relative header include paths, and fails to properly set a transitive include directory + # that propagates to dependent targets, so we need to set it up manually. + include(GNUInstallDirs) + target_include_directories(${OPENSUBDIV_TARGET} SYSTEM PUBLIC + $ + $ ) -endif() -# Install rules -set(CMAKE_INSTALL_DEFAULT_COMPONENT_NAME opensubdiv) -install(DIRECTORY ${opensubdiv_SOURCE_DIR}/opensubdiv DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) -install(TARGETS opensubdiv EXPORT Opensubdiv_Targets) -install(EXPORT Opensubdiv_Targets DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/opensubdiv NAMESPACE opensubdiv::) + # Set folders for MSVC + foreach(name IN ITEMS bfr_obj far_obj osd_cpu_obj osd_static_cpu sdc_obj vtr_obj) + if(TARGET ${name}) + set_target_properties(${name} PROPERTIES FOLDER third_party/opensubdiv/opensubdiv) + endif() + endforeach() + foreach(name IN ITEMS regression_common_obj regression_far_utils_obj) + if(TARGET ${name}) + set_target_properties(${name} PROPERTIES FOLDER third_party/opensubdiv/regression) + endif() + endforeach() + foreach(name IN ITEMS public_headers) + if(TARGET ${name}) + set_target_properties(${name} PROPERTIES FOLDER third_party/opensubdiv/public_headers) + endif() + endforeach() +endblock() diff --git a/cmake/recipes/external/openvdb.cmake b/cmake/recipes/external/openvdb.cmake index 90c0ed9a..a2aec1f9 100644 --- a/cmake/recipes/external/openvdb.cmake +++ b/cmake/recipes/external/openvdb.cmake @@ -34,8 +34,8 @@ include(CMakeDependentOption) cmake_dependent_option(OPENVDB_INSTALL_CMAKE_MODULES "" OFF "OPENVDB_BUILD_CORE" OFF) # TODO: Enable Blosc/Zlib -option(USE_BLOSC "" OFF) # maybe later -option(USE_ZLIB "" OFF) # maybe later +option(USE_BLOSC "" OFF) +option(USE_ZLIB "" OFF) option(USE_LOG4CPLUS "" OFF) # maybe later option(USE_EXR "" OFF) option(USE_CCACHE "" OFF) @@ -114,6 +114,20 @@ function(openvdb_import_target) ignore_package(TBB) ignore_package(Boost) + if(USE_ZLIB) + ignore_package(ZLIB) + include(miniz) + if(NOT TARGET ZLIB::ZLIB) + get_target_property(_aliased miniz::miniz ALIASED_TARGET) + add_library(ZLIB::ZLIB ALIAS ${_aliased}) + endif() + endif() + + if(USE_BLOSC) + include(blosc) + ignore_package(Blosc) + endif() + # Ready to include openvdb CMake include(CPM) CPMAddPackage( diff --git a/cmake/recipes/external/spdlog.cmake b/cmake/recipes/external/spdlog.cmake index 310d8188..83f6d35c 100644 --- a/cmake/recipes/external/spdlog.cmake +++ b/cmake/recipes/external/spdlog.cmake @@ -25,6 +25,7 @@ endif() set(CMAKE_INSTALL_DEFAULT_COMPONENT_NAME "spdlog") # Versions of fmt bundled with spdlog: +# - spdlog 1.14.1 -> fmt 10.2.1 # - spdlog 1.13.0 -> fmt 9.1.0 # - spdlog 1.12.0 -> fmt 9.1.0 # - spdlog 1.11.0 -> fmt 9.1.0 @@ -33,7 +34,7 @@ include(CPM) CPMAddPackage( NAME spdlog GITHUB_REPOSITORY gabime/spdlog - GIT_TAG v1.13.0 + GIT_TAG v1.14.1 ) set_target_properties(spdlog PROPERTIES POSITION_INDEPENDENT_CODE ON) diff --git a/cmake/recipes/external/tbb.cmake b/cmake/recipes/external/tbb.cmake index 64fd9e1e..e6ead952 100644 --- a/cmake/recipes/external/tbb.cmake +++ b/cmake/recipes/external/tbb.cmake @@ -141,9 +141,16 @@ endif() install(TARGETS tbb_tbb EXPORT TBB) # Set -fPIC flag and IDE folder name for tbb targets -foreach(name IN ITEMS tbb_def_files tbb_static tbb tbbmalloc tbbmalloc_static tbb_tbb) +foreach(name IN ITEMS tbb_def_files tbb_static tbb tbbmalloc tbbmalloc_static tbbmalloc_proxy tbb_tbb) if(TARGET ${name}) set_target_properties(${name} PROPERTIES POSITION_INDEPENDENT_CODE ON) - set_target_properties(${name} PROPERTIES FOLDER third_party) + set_target_properties(${name} PROPERTIES FOLDER third_party/tbb) endif() endforeach() + +# Silence some compiler warnings +if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") + target_compile_options(tbb PRIVATE + "-Wno-class-memaccess" + ) +endif() diff --git a/cmake/recipes/external/tracy.cmake b/cmake/recipes/external/tracy.cmake index 2906e510..c710ce1e 100644 --- a/cmake/recipes/external/tracy.cmake +++ b/cmake/recipes/external/tracy.cmake @@ -19,5 +19,5 @@ include(CPM) CPMAddPackage( NAME tracy GITHUB_REPOSITORY wolfpld/tracy - GIT_TAG v0.9.1 + GIT_TAG v0.10 ) diff --git a/modules/bvh/include/lagrange/bvh/create_BVH.h b/modules/bvh/include/lagrange/bvh/create_BVH.h index 302ea325..f645d819 100644 --- a/modules/bvh/include/lagrange/bvh/create_BVH.h +++ b/modules/bvh/include/lagrange/bvh/create_BVH.h @@ -45,9 +45,6 @@ std::unique_ptr> create_BVH( // default: // throw std::runtime_error("Unsupported BVH engine type: " + bvhtype_to_string(engine_type)); } - - // Don't complain dear compiler - return nullptr; } /** diff --git a/modules/core/include/lagrange/utils/fmt_eigen.h b/modules/core/include/lagrange/utils/fmt_eigen.h index f70ee790..74797a07 100644 --- a/modules/core/include/lagrange/utils/fmt_eigen.h +++ b/modules/core/include/lagrange/utils/fmt_eigen.h @@ -63,7 +63,9 @@ struct fmt::formatter, T for (Eigen::Index ic = 0; ic < a.cols(); ic++) { out = fmt::format_to(out, "{} ", this->nested(a(ir, ic))); } - out = fmt::format_to(out, "\n"); + if (ir + 1 < a.rows()) { + out = fmt::format_to(out, "\n"); + } } return out; }); @@ -148,7 +150,7 @@ struct fmt::is_range< #include #include #include - // clang-format on +// clang-format on template struct fmt::is_range< diff --git a/modules/core/python/scripts/meshconvert.py b/modules/core/python/scripts/meshconvert.py index c10972a4..01dc150e 100755 --- a/modules/core/python/scripts/meshconvert.py +++ b/modules/core/python/scripts/meshconvert.py @@ -6,14 +6,10 @@ def parse_args(): - parser = argparse.ArgumentParser( - description="Convert a mesh file to a different format." - ) + parser = argparse.ArgumentParser(description="Convert a mesh file to a different format.") parser.add_argument("input_mesh", help="input mesh file") parser.add_argument("output_mesh", help="output mesh file") - parser.add_argument( - "--triangulate", "-t", action="store_true", help="triangulate the mesh" - ) + parser.add_argument("--triangulate", "-t", action="store_true", help="triangulate the mesh") parser.add_argument( "--logging-level", "-l", diff --git a/modules/core/python/scripts/meshstat.py b/modules/core/python/scripts/meshstat.py index ccdb267a..7f88d4aa 100755 --- a/modules/core/python/scripts/meshstat.py +++ b/modules/core/python/scripts/meshstat.py @@ -12,12 +12,7 @@ def print_header(message): - print( - colorama.Fore.YELLOW - + colorama.Style.BRIGHT - + message - + colorama.Style.RESET_ALL - ) + print(colorama.Fore.YELLOW + colorama.Style.BRIGHT + message + colorama.Style.RESET_ALL) def print_green(message): @@ -52,9 +47,7 @@ def print_basic_info(mesh, info): info["num_facets"] = num_facets info["num_edges"] = num_edges info["num_corners"] = num_corners - print( - f"#v: {num_vertices:<10}#f: {num_facets:<10}#e: {num_edges:<10}#c: {num_corners:<10}" - ) + print(f"#v: {num_vertices:<10}#f: {num_facets:<10}#e: {num_edges:<10}#c: {num_corners:<10}") # Mesh bbox bbox_min = np.amin(mesh.vertices, axis=0) @@ -126,9 +119,7 @@ def print_attributes(mesh): num_channels = attr.num_channels print(f"Attribute {colorama.Fore.GREEN}{name}{colorama.Style.RESET_ALL}") - print( - f" id:{id:<5}usage: {usage:<10}elem: {element_type:<10}channels: {num_channels}" - ) + print(f" id:{id:<5}usage: {usage:<10}elem: {element_type:<10}channels: {num_channels}") def load_info(mesh_file): diff --git a/modules/core/python/src/bind_surface_mesh.h b/modules/core/python/src/bind_surface_mesh.h index 2e62de36..0e71b196 100644 --- a/modules/core/python/src/bind_surface_mesh.h +++ b/modules/core/python/src/bind_surface_mesh.h @@ -310,7 +310,8 @@ void bind_surface_mesh(nanobind::module_& m) } else if (self.has_edges() && num_elements == self.get_num_edges()) { elem_type = AttributeElement::Edge; } else { - throw nb::type_error("Cannot infer attribute element type from initial_values!"); + throw nb::type_error( + "Cannot infer attribute element type from initial_values!"); } } @@ -591,7 +592,19 @@ void bind_surface_mesh(nanobind::module_& m) surface_mesh_class.def( "delete_attribute", [](MeshType& self, std::string_view name) { self.delete_attribute(name); }, - "name"_a); + "name"_a, + R"(Delete an attribute by name. + +:param name: Name of the attribute. +:type name: str)"); + surface_mesh_class.def( + "delete_attribute", + [](MeshType& self, AttributeId id) { self.delete_attribute(self.get_attribute_name(id)); }, + "id"_a, + R"(Delete an attribute by id. + +:param id: Id of the attribute. +:type id: AttributeId)"); surface_mesh_class.def("has_attribute", &MeshType::has_attribute); surface_mesh_class.def( "is_attribute_indexed", @@ -956,15 +969,18 @@ If not provided, the edges are initialized in an arbitrary order. surface_mesh_class.def( "get_matching_attribute_ids", - [](MeshType& self, AttributeElement* element, AttributeUsage* usage, Index num_channels) { + [](MeshType& self, + std::optional element, + std::optional usage, + Index num_channels) { std::vector attr_ids; attr_ids.reserve(4); self.seq_foreach_attribute_id([&](AttributeId attr_id) { const auto name = self.get_attribute_name(attr_id); if (self.attr_name_is_reserved(name)) return; const auto& attr = self.get_attribute_base(attr_id); - if (element != nullptr && attr.get_element_type() != *element) return; - if (usage != nullptr && attr.get_usage() != *usage) return; + if (element && attr.get_element_type() != *element) return; + if (usage && attr.get_usage() != *usage) return; if (num_channels != 0 && attr.get_num_channels() != num_channels) return; attr_ids.push_back(attr_id); }); diff --git a/modules/core/python/src/tensor_utils.cpp b/modules/core/python/src/tensor_utils.cpp index 89b479fb..7610b362 100644 --- a/modules/core/python/src/tensor_utils.cpp +++ b/modules/core/python/src/tensor_utils.cpp @@ -36,7 +36,7 @@ Tensor create_empty_tensor() // Tensor object must have a non-null data point. static ValueType dummy_data; size_t shape[] = {0}; - return {&dummy_data, 1, shape}; + return {&dummy_data, 1, shape, nb::handle()}; } bool is_vector(const Shape& shape) diff --git a/modules/core/python/tests/test_cast_attribute.py b/modules/core/python/tests/test_cast_attribute.py index 381d3cda..ed631379 100644 --- a/modules/core/python/tests/test_cast_attribute.py +++ b/modules/core/python/tests/test_cast_attribute.py @@ -36,9 +36,7 @@ def test_simple(self, single_triangle): assert mesh.attribute("test").dtype == np.uint8 # Cast to another attribute with type int32 - attr_id = lagrange.cast_attribute( - mesh, "test", np.int32, output_attribute_name="test2" - ) + attr_id = lagrange.cast_attribute(mesh, "test", np.int32, output_attribute_name="test2") assert mesh.get_attribute_name(attr_id) == "test2" assert mesh.attribute("test2").dtype == np.int32 assert mesh.attribute("test").dtype == np.uint8 # Same as before diff --git a/modules/core/python/tests/test_combine_meshes.py b/modules/core/python/tests/test_combine_meshes.py index 33b3b670..1248af94 100644 --- a/modules/core/python/tests/test_combine_meshes.py +++ b/modules/core/python/tests/test_combine_meshes.py @@ -34,9 +34,7 @@ def test_two_meshes(self, cube): mesh2 = lagrange.SurfaceMesh() mesh2.vertices = mesh1.vertices + 10 facets = mesh1.facets - mesh2.facets = np.copy( - facets - ) # A copy is needed because facets is not writable. + mesh2.facets = np.copy(facets) # A copy is needed because facets is not writable. out = lagrange.combine_meshes([mesh1, mesh2], True) assert np.all(out.vertices[:8] == mesh1.vertices) diff --git a/modules/core/python/tests/test_compute_dihedral_angles.py b/modules/core/python/tests/test_compute_dihedral_angles.py index abaa4fed..76946883 100644 --- a/modules/core/python/tests/test_compute_dihedral_angles.py +++ b/modules/core/python/tests/test_compute_dihedral_angles.py @@ -20,9 +20,7 @@ class TestComputeDihedralAngles: def test_cube(self, cube): mesh = cube - attr_id = lagrange.compute_dihedral_angles( - mesh, output_attribute_name="dihedral_angles" - ) + attr_id = lagrange.compute_dihedral_angles(mesh, output_attribute_name="dihedral_angles") assert mesh.has_attribute("dihedral_angles") assert mesh.get_attribute_name(attr_id) == "dihedral_angles" diff --git a/modules/core/python/tests/test_compute_edge_lengths.py b/modules/core/python/tests/test_compute_edge_lengths.py index 3be7338d..c080df16 100644 --- a/modules/core/python/tests/test_compute_edge_lengths.py +++ b/modules/core/python/tests/test_compute_edge_lengths.py @@ -20,9 +20,7 @@ class TestComputeEdgeLengths: def test_cube(self, cube): mesh = cube - attr_id = lagrange.compute_edge_lengths( - mesh, output_attribute_name="edge_lengths" - ) + attr_id = lagrange.compute_edge_lengths(mesh, output_attribute_name="edge_lengths") assert mesh.has_attribute("edge_lengths") assert attr_id == mesh.get_attribute_id("edge_lengths") diff --git a/modules/core/python/tests/test_filter_attributes.py b/modules/core/python/tests/test_filter_attributes.py index 8bdf7492..10696934 100644 --- a/modules/core/python/tests/test_filter_attributes.py +++ b/modules/core/python/tests/test_filter_attributes.py @@ -35,9 +35,7 @@ def test_excluded(self, cube_with_uv): def test_usage(self, cube_with_uv): mesh = cube_with_uv - mesh2 = lagrange.filter_attributes( - mesh, included_usages=[lagrange.AttributeUsage.UV] - ) + mesh2 = lagrange.filter_attributes(mesh, included_usages=[lagrange.AttributeUsage.UV]) assert mesh2.has_attribute("uv") mesh2 = lagrange.filter_attributes(mesh, included_usages=[]) diff --git a/modules/core/python/tests/test_permute_vertices.py b/modules/core/python/tests/test_permute_vertices.py index 62956f4d..33daec90 100644 --- a/modules/core/python/tests/test_permute_vertices.py +++ b/modules/core/python/tests/test_permute_vertices.py @@ -65,9 +65,7 @@ def test_with_uv(self, cube_with_uv): # Corner index should be unchnaged. corner_index_attr = mesh.attribute("corner_index") - assert np.all( - corner_index_attr.data == np.arange(mesh.num_corners, dtype=np.intc) - ) + assert np.all(corner_index_attr.data == np.arange(mesh.num_corners, dtype=np.intc)) for i in range(mesh.num_vertices): ci = mesh.get_first_corner_around_vertex(i) diff --git a/modules/io/include/lagrange/io/legacy/load_mesh_assimp.h b/modules/io/include/lagrange/io/legacy/load_mesh_assimp.h index 485c0cfd..529fd575 100644 --- a/modules/io/include/lagrange/io/legacy/load_mesh_assimp.h +++ b/modules/io/include/lagrange/io/legacy/load_mesh_assimp.h @@ -38,7 +38,7 @@ namespace legacy { std::unique_ptr load_scene_assimp(const lagrange::fs::path& filename); std::unique_ptr load_scene_assimp_from_memory(const void* buffer, size_t size); -template ::value>* = nullptr> std::vector> load_mesh_assimp(const lagrange::fs::path& filename); template @@ -79,7 +79,7 @@ inline std::unique_ptr load_scene_assimp_from_memory(const void* buffer return std::unique_ptr(importer.GetOrphanedScene()); } -template ::value>* /* = nullptr */> std::vector> load_mesh_assimp(const lagrange::fs::path& filename) { @@ -140,7 +140,7 @@ std::unique_ptr convert_mesh_assimp(const aiMesh* mesh) if (FacetArray::ColsAtCompileTime != Eigen::Dynamic && FacetArray::ColsAtCompileTime != nvpf) { logger().warn( "FacetArray cannot hold facets with n!={} vertex per facet, triangulating", - FacetArray::ColsAtCompileTime); + static_cast(FacetArray::ColsAtCompileTime)); triangulate = true; nvpf = 3; } diff --git a/modules/io/include/lagrange/io/legacy/load_mesh_ply.h b/modules/io/include/lagrange/io/legacy/load_mesh_ply.h index 5097a844..4ed833e0 100644 --- a/modules/io/include/lagrange/io/legacy/load_mesh_ply.h +++ b/modules/io/include/lagrange/io/legacy/load_mesh_ply.h @@ -74,7 +74,7 @@ std::unique_ptr load_mesh_ply(const fs::path& filename) comments); auto mesh = create_mesh(std::move(V), std::move(F)); - if (N.rows() == mesh->get_num_vertices()) { + if (static_cast(N.rows()) == mesh->get_num_vertices()) { logger().debug("Setting vertex normal"); mesh->add_vertex_attribute("normal"); mesh->import_vertex_attribute("normal", N); @@ -84,7 +84,7 @@ std::unique_ptr load_mesh_ply(const fs::path& filename) return std::find(VDheader.begin(), VDheader.end(), name) != VDheader.end(); }; - if (VD.rows() == mesh->get_num_vertices()) { + if (static_cast(VD.rows()) == mesh->get_num_vertices()) { if (has_attribute("red") && has_attribute("green") && has_attribute("blue")) { bool has_alpha = has_attribute("alpha"); int n = (has_alpha ? 4 : 3); diff --git a/modules/io/python/tests/test_io.py b/modules/io/python/tests/test_io.py index 08e16925..bed63d8b 100644 --- a/modules/io/python/tests/test_io.py +++ b/modules/io/python/tests/test_io.py @@ -63,9 +63,7 @@ def match_attribute(mesh, mesh2, id1, id2): attr_name = "__unit_test__" # Convert both attributes to corner attribute and compare the per-corner value. id1 = lagrange.map_attribute(mesh, id1, attr_name, lagrange.AttributeElement.Corner) - id2 = lagrange.map_attribute( - mesh2, id2, attr_name, lagrange.AttributeElement.Corner - ) + id2 = lagrange.map_attribute(mesh2, id2, attr_name, lagrange.AttributeElement.Corner) attr1 = mesh.attribute(id1) attr2 = mesh2.attribute(id2) @@ -131,9 +129,7 @@ def __save_and_load__( assert_same_attribute(mesh, mesh2, lagrange.AttributeUsage.UV, required) assert_same_attribute(mesh, mesh2, lagrange.AttributeUsage.Normal, required) if filename.suffix != ".obj": - assert_same_attribute( - mesh, mesh2, lagrange.AttributeUsage.Color, required - ) + assert_same_attribute(mesh, mesh2, lagrange.AttributeUsage.Color, required) if selected_attributes is not None: for attr_id in selected_attributes: @@ -195,9 +191,7 @@ def test_single_triangle_with_attributes(self, triangle_with_uv_normal_color): mesh = triangle_with_uv_normal_color self.save_and_load(mesh) - def test_single_triangle_with_attributes_explicit( - self, triangle_with_uv_normal_color - ): + def test_single_triangle_with_attributes_explicit(self, triangle_with_uv_normal_color): mesh = triangle_with_uv_normal_color attr_ids = mesh.get_matching_attribute_ids() assert len(attr_ids) != 0 diff --git a/modules/python/CMakeLists.txt b/modules/python/CMakeLists.txt index db508a57..82203e03 100644 --- a/modules/python/CMakeLists.txt +++ b/modules/python/CMakeLists.txt @@ -40,8 +40,6 @@ endif() # 2. installation if(SKBUILD) message(STATUS "Lagrange: installing python binding to ${SKBUILD_PLATLIB_DIR}/lagrange") - # Install python extension directly at ${CMAKE_INSTALL_PREFIX}, which will - # be set by `setup.py`. install(TARGETS lagrange_python tbb DESTINATION ${SKBUILD_PLATLIB_DIR}/lagrange COMPONENT Lagrange_Python_Runtime @@ -98,9 +96,26 @@ NB_MODULE(lagrange, m) # Lastly, add stubgen installation command. if(SKBUILD) + # Create the lagrange directory containing stubs. + file(MAKE_DIRECTORY ${SKBUILD_PLATLIB_DIR}/lagrange/lagrange) + # Generate stubs for python binding within the install location. - install(CODE - "execute_process(COMMAND ${Python_EXECUTABLE} -m nanobind_stubgen lagrange WORKING_DIRECTORY ${SKBUILD_PLATLIB_DIR}/lagrange)" - COMPONENT Lagrange_Python_Runtime) + foreach(module_name IN ITEMS ${active_modules}) + nanobind_add_stub( + lagrange_python_stubgen_${module_name} + INSTALL_TIME + MODULE lagrange.${module_name} + OUTPUT ${SKBUILD_PLATLIB_DIR}/lagrange/lagrange/${module_name}.pyi + DEPENDS lagrange_python + COMPONENT Lagrange_Python_Runtime + PYTHON_PATH ${SKBUILD_PLATLIB_DIR}/lagrange/ + VERBOSE + ) + endforeach() endif() endfunction() + +# 5. dump lagrange version +if(SKBUILD) + file(WRITE ${SKBUILD_PLATLIB_DIR}/lagrange/_version.py "__version__='${lagrange_version}'") +endif() diff --git a/modules/python/lagrange/__init__.py b/modules/python/lagrange/__init__.py index 0f9c5d2d..368e1873 100644 --- a/modules/python/lagrange/__init__.py +++ b/modules/python/lagrange/__init__.py @@ -12,5 +12,6 @@ from .lagrange import * from .lagrange.core import * from ._logging import logger +from ._version import __version__ -del _logging, lagrange +del _logging, lagrange # type: ignore diff --git a/modules/python/lagrange/_logging.py b/modules/python/lagrange/_logging.py index fc589d0e..1d2e1690 100644 --- a/modules/python/lagrange/_logging.py +++ b/modules/python/lagrange/_logging.py @@ -25,9 +25,7 @@ class ColorFormatter(logging.Formatter): red = colorama.Fore.RED bold_red = colorama.Style.BRIGHT + colorama.Fore.RED reset = colorama.Style.RESET_ALL - format_template = ( - "[%(asctime)s] [%(name)s] {color}[%(levelname)s]{reset} %(message)s" - ) + format_template = "[%(asctime)s] [%(name)s] {color}[%(levelname)s]{reset} %(message)s" FORMATS = { logging.DEBUG: format_template.format(color=grey, reset=reset), diff --git a/modules/scene/include/lagrange/scene/RemeshingOptions.h b/modules/scene/include/lagrange/scene/RemeshingOptions.h index 52bc9d66..02fdd3dc 100644 --- a/modules/scene/include/lagrange/scene/RemeshingOptions.h +++ b/modules/scene/include/lagrange/scene/RemeshingOptions.h @@ -14,6 +14,7 @@ #include #include +#include namespace lagrange::scene { @@ -61,6 +62,9 @@ struct RemeshingOptions /// Behavior for meshes without instances in the scene. UninstantiatedMeshesStrategy uninstantiated_meshes_strategy = UninstantiatedMeshesStrategy::None; + + /// Optional per-instance weights/importance. Must be > 0. + std::vector per_instance_importance; }; } // namespace lagrange::scene diff --git a/modules/scene/include/lagrange/scene/internal/bake_scaling.h b/modules/scene/include/lagrange/scene/internal/bake_scaling.h new file mode 100644 index 00000000..6b03f689 --- /dev/null +++ b/modules/scene/include/lagrange/scene/internal/bake_scaling.h @@ -0,0 +1,54 @@ +/* + * Copyright 2024 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ +#pragma once + +#include + +#include + +namespace lagrange::scene::internal { + +/// +/// Bake a uniform per-instance scaling facotr into the mesh transforms. In order to prevent any +/// numerical error when unbaking, we simply store the old transform data in the instance user data. +/// +/// @param[in] scene Scene to bake. +/// @param[in] per_instance_scaling Per instance scaling factor. +/// +/// @tparam Scalar Scene scalar type. +/// @tparam Index Scene index type. +/// @tparam Dimension Scene dimension. +/// +/// @return A new scene where each instance transform has been modified to account for the +/// provided scaling. +/// +template +SimpleScene bake_scaling( + SimpleScene scene, + const std::vector& per_instance_scaling); + +/// +/// Unbake previously baked scaling factors from the scene instance transforms. +/// +/// @param[in] scene Scene to unbake. +/// +/// @tparam Scalar Scene scalar type. +/// @tparam Index Scene index type. +/// @tparam Dimension Scene dimension. +/// +/// @return A new scene where the instance transforms/user data have been restored to their +/// previous state. +/// +template +SimpleScene unbake_scaling(SimpleScene scene); + +} // namespace lagrange::scene::internal diff --git a/modules/scene/python/scripts/extract_texture.py b/modules/scene/python/scripts/extract_texture.py index 311725bf..b4b41ff3 100755 --- a/modules/scene/python/scripts/extract_texture.py +++ b/modules/scene/python/scripts/extract_texture.py @@ -30,9 +30,7 @@ def parse_args(): def dump_texture(img, filename): img.uri = filename img_buffer = img.image - buffer = img_buffer.data.reshape( - (img_buffer.height, img_buffer.width, img_buffer.num_channels) - ) + buffer = img_buffer.data.reshape((img_buffer.height, img_buffer.width, img_buffer.num_channels)) if img_buffer.num_channels == 4: im = Image.fromarray(buffer, "RGBA") elif img_buffer.num_channels == 3: @@ -61,9 +59,9 @@ def main(): assert tex.image != lagrange.invalid_index img = scene.images[tex.image] if len(img.image.data) != 0: - texture_filename = output_filename.with_suffix( - ".png" - ).with_stem(f"{basename}_{texture_count:03}") + texture_filename = output_filename.with_suffix(".png").with_stem( + f"{basename}_{texture_count:03}" + ) dump_texture(img, texture_filename) texture_count += 1 diff --git a/modules/scene/python/src/bind_value.h b/modules/scene/python/src/bind_value.h index 84c01c4d..78354570 100644 --- a/modules/scene/python/src/bind_value.h +++ b/modules/scene/python/src/bind_value.h @@ -35,7 +35,6 @@ struct type_caster { using CasterT = make_caster; - flags |= (uint8_t)cast_flags::none_disallowed; CasterT caster; if (!caster.from_python(src, flags, cleanup)) return false; value.set(caster.operator cast_t()); diff --git a/modules/scene/python/tests/test_simple_scene.py b/modules/scene/python/tests/test_simple_scene.py index 0aad71f6..83a0c9c9 100644 --- a/modules/scene/python/tests/test_simple_scene.py +++ b/modules/scene/python/tests/test_simple_scene.py @@ -79,9 +79,7 @@ def test_multiple_instances(self): def test_scene_convert(self, single_triangle): scene = lagrange.scene.mesh_to_simple_scene(single_triangle) - scene2 = lagrange.scene.meshes_to_simple_scene( - [single_triangle, single_triangle] - ) + scene2 = lagrange.scene.meshes_to_simple_scene([single_triangle, single_triangle]) print(scene, type(scene)) mesh = lagrange.scene.simple_scene_to_mesh(scene) mesh2 = lagrange.scene.simple_scene_to_mesh(scene2) diff --git a/modules/scene/src/internal/bake_scaling.cpp b/modules/scene/src/internal/bake_scaling.cpp new file mode 100644 index 00000000..7aae12ca --- /dev/null +++ b/modules/scene/src/internal/bake_scaling.cpp @@ -0,0 +1,96 @@ +/* + * Copyright 2024 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ +#include + +#include +#include +#include + +namespace lagrange::scene::internal { + +namespace { + +template +struct UserData +{ + using AffineTransform = Eigen::Transform(Dimension), Eigen::Affine>; + + AffineTransform prev_transform; + + std::any prev_data; +}; + +} // namespace + +template +SimpleScene bake_scaling( + SimpleScene scene, + const std::vector& per_instance_scaling) +{ + using Data = UserData; + + la_runtime_assert( + per_instance_scaling.size() == scene.compute_num_instances(), + "Per-instance scaling vector must have the same size as the total number of instances in " + "the scene."); + + for (Index mesh_index = 0, global_index = 0; mesh_index < scene.get_num_meshes(); + ++mesh_index) { + for (Index instance_index = 0; instance_index < scene.get_num_instances(mesh_index); + ++instance_index, ++global_index) { + auto& instance = scene.ref_instance(mesh_index, instance_index); + instance.user_data = Data{instance.transform, std::move(instance.user_data)}; + instance.transform.scale(per_instance_scaling[global_index]); + logger().debug( + "Baking scaling factor {} into mesh {}, instance {}", + per_instance_scaling[global_index], + mesh_index, + instance_index); + } + } + + return scene; +} + +template +SimpleScene unbake_scaling(SimpleScene scene) +{ + using Data = UserData; + for (Index mesh_index = 0; mesh_index < scene.get_num_meshes(); ++mesh_index) { + for (Index instance_index = 0; instance_index < scene.get_num_instances(mesh_index); + ++instance_index) { + auto& instance = scene.ref_instance(mesh_index, instance_index); + Data* data = std::any_cast(&instance.user_data); + la_runtime_assert( + data, + fmt::format( + "Cannot unbake scaling for instance {} of mesh {}. No previous transform was " + "found.", + instance_index, + mesh_index)); + instance.transform = data->prev_transform; + instance.user_data = std::move(data->prev_data); + } + } + + return scene; +} + +#define LA_X_bake_scaling(_, Scalar, Index, Dimension) \ + template LA_SCENE_API SimpleScene bake_scaling( \ + SimpleScene scene, \ + const std::vector& per_instance_scaling); \ + template LA_SCENE_API SimpleScene unbake_scaling( \ + SimpleScene scene); +LA_SIMPLE_SCENE_X(bake_scaling, 0) + +} // namespace lagrange::scene::internal diff --git a/pyproject.toml b/pyproject.toml index 415c6ec7..dafb0a9c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [build-system] requires = [ - "scikit-build-core==0.5.1", - "nanobind-stubgen@git+https://github.com/jdumas/nanobind-stubgen@e860934", + "scikit-build-core==0.8.2", + "typing-extensions~=4.1", ] build-backend = "scikit_build_core.build" @@ -22,11 +22,33 @@ dynamic = ["version"] repo = "https://github.com/adobe/lagrange" doc = "https://opensource.adobe.com/lagrange-docs" +[project.optional-dependencies] +test = [ + "pytest>=7.0.0" +] +lint = [ + "ruff==0.4.3", + "mypy==1.1.1" +] +packaging = [ + "twine>=4.0.0" +] +dev = [ + "lagrange[test]", + "lagrange[lint]", + "lagrange[packaging]" +] + +[tool.black] +line_length = 100 + +[tool.ruff] +line-length = 100 + [tool.scikit-build] install.components = ["Lagrange_Python_Runtime"] wheel.packages = ["modules/python/lagrange"] - [tool.scikit-build.cmake.define] LAGRANGE_ASSERT_DEBUG_BREAK = false LAGRANGE_EXAMPLES = false