From 8d01832419050aed1105e666040c4e5d3a14c96c Mon Sep 17 00:00:00 2001 From: Ignacio Vizzo Date: Wed, 23 Mar 2022 15:54:45 +0100 Subject: [PATCH] Bump to version 0.1.6 --- .gitlab-ci.yml | 34 +++--- 3rdparty/find_dependencies.cmake | 15 +-- 3rdparty/pybind11/pybind11.cmake | 6 +- CMakeLists.txt | 6 +- Makefile | 5 +- ci/test-wheels.sh | 2 +- docker/builder/Dockerfile | 1 + examples/python/Makefile | 2 + examples/python/config/kitti.yaml | 6 +- examples/python/vdbfusion_pipeline.py | 2 +- setup.cfg | 2 +- setup.py | 1 + src/vdbfusion/CMakeLists.txt | 4 +- src/vdbfusion/__init__.py | 2 +- src/vdbfusion/pybind/CMakeLists.txt | 10 +- src/vdbfusion/pybind/vdb_volume.py | 122 +++++++++++++++------- src/vdbfusion/pybind/vdbfusion_pybind.cpp | 43 ++++++-- src/vdbfusion/vdbfusion/CMakeLists.txt | 2 +- src/vdbfusion/vdbfusion/VDBVolume.cpp | 29 +++++ src/vdbfusion/vdbfusion/VDBVolume.h | 7 ++ 20 files changed, 214 insertions(+), 87 deletions(-) create mode 100644 examples/python/Makefile diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 82eab56..b74f449 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -33,28 +33,22 @@ format: ubuntu:cpp:build: stage: build script: - - mkdir build && cd build && cmake .. - - make -j$(nproc --all) all - artifacts: - paths: - - build/ + - mkdir build && cd build && cmake .. && make -j$(nproc --all) all install + - make uninstall -ubuntu:cpp:install: +ubuntu:cpp:example: stage: build needs: ["ubuntu:cpp:build"] script: - - cd build - - make install + - mkdir build && cd build && cmake -DBUILD_CXX_EXAMPLE=ON .. && make -j$(nproc --all) install - make uninstall -ubuntu:cpp:example: +ubuntu:cpp:standalone_example: stage: build - needs: ["ubuntu:cpp:install"] - script: + needs: ["ubuntu:cpp:build"] script: - - mkdir build && cd build && cmake -DBUILD_CXX_EXAMPLE=ON .. && make install && cd .. - - cd examples/cpp/ && mkdir build && cd build && cmake .. - - make -j$(nproc --all) all + - mkdir build && cd build && cmake .. && make -j$(nproc --all) install && cd .. + - cd examples/cpp/ && mkdir build && cd build && cmake .. && make -j$(nproc --all) #----- ubuntu python jobs --------------------------------------------------------------------------- ubuntu:py:wheel: @@ -103,9 +97,9 @@ manylinux:wheels: image: docker:19.03.12 needs: ["ubuntu:py:wheel"] rules: + - if: $CI_COMMIT_TAG - if: $CI_MERGE_REQUEST_IID - if: $CI_PIPELINE_SOURCE == "schedule" - - if: "$CI_COMMIT_TAG != null && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH" variables: DOCKER_IMAGE: "quay.io/pypa/manylinux2014_x86_64" PLAT: "manylinux2014_x86_64" @@ -133,9 +127,9 @@ manylinux:test: needs: ["manylinux:wheels"] image: docker:19.03.12 rules: + - if: $CI_COMMIT_TAG - if: $CI_MERGE_REQUEST_IID - if: $CI_PIPELINE_SOURCE == "schedule" - - if: "$CI_COMMIT_TAG != null && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH" variables: DOCKER_IMAGE: "quay.io/pypa/manylinux2014_x86_64" PLAT: "manylinux2014_x86_64" @@ -153,12 +147,12 @@ testpypi: stage: deploy needs: ["manylinux:test"] rules: - - if: "$CI_COMMIT_TAG != null && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH" + - if: $CI_COMMIT_TAG variables: TWINE_PASSWORD: "${TESTPYPI_ACCESS_TOKEN}" TWINE_USERNAME: "__token__" script: - - twine upload --skip-existing --repository testpypi wheelhouse/*.whl + - twine upload --verbose --skip-existing --repository testpypi wheelhouse/*.whl artifacts: paths: - wheelhouse/ @@ -167,9 +161,9 @@ pypi: stage: deploy needs: ["testpypi"] rules: - - if: "$CI_COMMIT_TAG != null && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH" + - if: $CI_COMMIT_TAG variables: TWINE_PASSWORD: "${PYPI_ACCESS_TOKEN}" TWINE_USERNAME: "__token__" script: - - twine upload --skip-existing --repository pypi wheelhouse/*.whl + - twine upload --verbose --skip-existing --repository pypi wheelhouse/*.whl diff --git a/3rdparty/find_dependencies.cmake b/3rdparty/find_dependencies.cmake index 19e32da..22542fd 100644 --- a/3rdparty/find_dependencies.cmake +++ b/3rdparty/find_dependencies.cmake @@ -17,12 +17,15 @@ if(NOT USE_SYSTEM_EIGEN3 OR NOT EIGEN3_FOUND) include(${CMAKE_CURRENT_LIST_DIR}/eigen/eigen.cmake) endif() -if(USE_SYSTEM_PYBIND11) - find_package(pybind11 QUIET) -endif() -if(NOT USE_SYSTEM_PYBIND11 OR NOT pybind11_FOUND) - set(USE_SYSTEM_PYBIND11 OFF) - include(${CMAKE_CURRENT_LIST_DIR}/pybind11/pybind11.cmake) +if(BUILD_PYTHON_BINDINGS) + if(USE_SYSTEM_PYBIND11) + find_package(pybind11 QUIET) + endif() + if(NOT USE_SYSTEM_PYBIND11 OR NOT pybind11_FOUND) + message(STATUS "ASDASDAS") + set(USE_SYSTEM_PYBIND11 OFF) + include(${CMAKE_CURRENT_LIST_DIR}/pybind11/pybind11.cmake) + endif() endif() if(USE_SYSTEM_OPENVDB) diff --git a/3rdparty/pybind11/pybind11.cmake b/3rdparty/pybind11/pybind11.cmake index a939d00..4309fdc 100644 --- a/3rdparty/pybind11/pybind11.cmake +++ b/3rdparty/pybind11/pybind11.cmake @@ -2,7 +2,9 @@ include(FetchContent) FetchContent_Declare( ext_pybind11 PREFIX pybind11 - URL https://github.com/pybind/pybind11/archive/refs/tags/v2.6.2.tar.gz - URL_HASH SHA256=8ff2fff22df038f5cd02cea8af56622bc67f5b64534f1b83b9f133b8366acff2) + #TODO: Update to a release when this gets merged: https://github.com/pybind/pybind11/pull/3743 + GIT_REPOSITORY https://github.com/pybind/pybind11 + GIT_TAG master + GIT_SHALLOW ON) FetchContent_MakeAvailable(ext_pybind11) diff --git a/CMakeLists.txt b/CMakeLists.txt index e1e336e..0b21dbd 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,16 +1,16 @@ cmake_minimum_required(VERSION 3.10) -project(VDBFusion VERSION 0.1.5 LANGUAGES CXX) +project(VDBFusion VERSION 0.1.6 LANGUAGES CXX) # Setup build options option(USE_CCACHE "Build using Ccache if found on the path" ON) option(USE_SYSTEM_EIGEN3 "Use system pre-installed eigen3" ON) option(USE_SYSTEM_OPENVDB "Use system pre-installed OpenVDB" ON) option(USE_SYSTEM_PYBIND11 "Use system pre-installed pybind11" ON) -option(PYOPENVDB_SUPPORT "Add suuport for pyopenvdb, this depends on boost::python" OFF) option(SILENCE_WARNINGS "To build manylinux packages only, disable on the global scope" OFF) option(BUILD_CXX_EXAMPLE "Build C++ examples" OFF) +option(BUILD_PYTHON_BINDINGS "Build the python module" OFF) -mark_as_advanced(USE_CCACHE USE_SYSTEM_OPENVDB PYOPENVDB_SUPPORT SILENCE_WARNINGS) +mark_as_advanced(USE_CCACHE USE_SYSTEM_OPENVDB SILENCE_WARNINGS BUILD_PYTHON_BINDINGS) if(SILENCE_WARNINGS) message(WARNING "Disable all warnings for all targets in the build system, only for the CI/CD") diff --git a/Makefile b/Makefile index dcb3d96..fe34ff2 100644 --- a/Makefile +++ b/Makefile @@ -1,10 +1,13 @@ -.PHONY: docker +.PHONY: install docker install: pip3 -v install . && cp build/*/compile_commands.json build/ uninstall: pip3 -v uninstall vdbfusion +dev: + pip3 -v install -e . && cp build/*/compile_commands.json build/ + test: pytest . diff --git a/ci/test-wheels.sh b/ci/test-wheels.sh index 6f06144..e827efc 100755 --- a/ci/test-wheels.sh +++ b/ci/test-wheels.sh @@ -7,5 +7,5 @@ for PYBIN in /opt/python/*/bin/; do "${PYBIN}/pip" install -r /io/requirements.txt "${PYBIN}/pip" install -r /io/dev-requirements.txt "${PYBIN}/pip" install --no-index -f /io/wheelhouse vdbfusion - (cd "$HOME"; "${PYBIN}/pytest" /io/) + (cd "$HOME"; "${PYBIN}/pytest" --capture=sys /io/) done diff --git a/docker/builder/Dockerfile b/docker/builder/Dockerfile index e77ebf7..1bf36b2 100644 --- a/docker/builder/Dockerfile +++ b/docker/builder/Dockerfile @@ -52,6 +52,7 @@ RUN git clone --depth 1 https://github.com/nachovizzo/openvdb.git -b nacho/vdbfu && cmake \ -DOPENVDB_BUILD_PYTHON_MODULE=ON \ -DUSE_NUMPY=ON \ + -DPYOPENVDB_INSTALL_DIRECTORY="/usr/local/lib/python3.8/dist-packages" \ -DCMAKE_POSITION_INDEPENDENT_CODE=ON \ -DUSE_ZLIB=OFF \ ..\ diff --git a/examples/python/Makefile b/examples/python/Makefile new file mode 100644 index 0000000..e477ce7 --- /dev/null +++ b/examples/python/Makefile @@ -0,0 +1,2 @@ +vdbfusion: + @make -C ../../ diff --git a/examples/python/config/kitti.yaml b/examples/python/config/kitti.yaml index 95c60e8..9e70b53 100644 --- a/examples/python/config/kitti.yaml +++ b/examples/python/config/kitti.yaml @@ -5,11 +5,11 @@ space_carving: False out_dir: "results/" # Reconstruction -fill_holes: True +fill_holes: False min_weight: 5.0 # Kitti apply_pose: True -min_range: 2.0 -max_range: 70.0 +min_range: 2.5 +max_range: 120.0 correct_scan: True diff --git a/examples/python/vdbfusion_pipeline.py b/examples/python/vdbfusion_pipeline.py index 180b181..68b5a82 100644 --- a/examples/python/vdbfusion_pipeline.py +++ b/examples/python/vdbfusion_pipeline.py @@ -85,7 +85,7 @@ def _get_o3d_mesh(tsdf_volume, cfg): def _print_metrics(self): # If PYOPENVDB_SUPPORT has not been enabled then we can't report any metrics - if not hasattr(self._tsdf_volume, "tsdf"): + if not self._tsdf_volume.pyopenvdb_support_enabled: print("No metrics available, please compile with PYOPENVDB_SUPPORT") return diff --git a/setup.cfg b/setup.cfg index 03eb246..19172d9 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,7 +3,7 @@ name=vdbfusion author = Ignacio Martin Vizzo (aka Nacho) and Cyrill Stachniss author_email=ignaciovizzo@gmail.com license_files = LICENSE.txt -version=0.1.5 +version=0.1.6 description=3D Volumetric Surface Reconstruction using the VDB data structure long_description=file:README.md, long_description_content_type=text/markdown diff --git a/setup.py b/setup.py index 5114163..1ce1356 100644 --- a/setup.py +++ b/setup.py @@ -34,6 +34,7 @@ def build_extension(self, ext): # EXAMPLE_VERSION_INFO shows you how to pass a value into the C++ code # from Python. cmake_args = [ + f"-DBUILD_PYTHON_BINDINGS=ON", f"-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={extdir}", f"-DPYTHON_EXECUTABLE={sys.executable}", f"-DCMAKE_BUILD_TYPE={cfg}", # not used on MSVC, but no harm diff --git a/src/vdbfusion/CMakeLists.txt b/src/vdbfusion/CMakeLists.txt index c62502b..bcccc67 100644 --- a/src/vdbfusion/CMakeLists.txt +++ b/src/vdbfusion/CMakeLists.txt @@ -1,2 +1,4 @@ -add_subdirectory(pybind) add_subdirectory(vdbfusion) +if(BUILD_PYTHON_BINDINGS) + add_subdirectory(pybind) +endif() diff --git a/src/vdbfusion/__init__.py b/src/vdbfusion/__init__.py index f976622..41afc1f 100644 --- a/src/vdbfusion/__init__.py +++ b/src/vdbfusion/__init__.py @@ -1,2 +1,2 @@ -__version__ = "0.1.5" +__version__ = "0.1.6" from .pybind.vdb_volume import VDBVolume diff --git a/src/vdbfusion/pybind/CMakeLists.txt b/src/vdbfusion/pybind/CMakeLists.txt index c9b8089..c34cb21 100644 --- a/src/vdbfusion/pybind/CMakeLists.txt +++ b/src/vdbfusion/pybind/CMakeLists.txt @@ -1,8 +1,16 @@ pybind11_add_module(vdbfusion_pybind vdbfusion_pybind.cpp) target_compile_options(vdbfusion_pybind PRIVATE -Werror -Wall -Wextra) target_link_libraries(vdbfusion_pybind PRIVATE VDBFusion::vdbfusion) + +# PYOPENVDB_SUPPORT is defined only by the existence of the pyopenvdb library. +find_package(Python COMPONENTS Interpreter) +execute_process(COMMAND + ${PYTHON_EXECUTABLE} "-c" "import pyopenvdb; print(True)" + OUTPUT_VARIABLE PYOPENVDB_SUPPORT + ERROR_QUIET) if(PYOPENVDB_SUPPORT) - find_package(Boost COMPONENTS python REQUIRED) + find_package(Boost COMPONENTS python Required) target_compile_definitions(vdbfusion_pybind PRIVATE "$<$:PYOPENVDB_SUPPORT>") target_link_libraries(vdbfusion_pybind PRIVATE Boost::python) + message(STATUS "PYOPENVDB_SUPPORT enabled") endif() diff --git a/src/vdbfusion/pybind/vdb_volume.py b/src/vdbfusion/pybind/vdb_volume.py index 5ccd5db..c37957a 100644 --- a/src/vdbfusion/pybind/vdb_volume.py +++ b/src/vdbfusion/pybind/vdb_volume.py @@ -1,4 +1,4 @@ -from typing import Any, Callable, Tuple +from typing import Any, Optional, Tuple, Callable, overload import numpy as np @@ -6,11 +6,6 @@ class VDBVolume: - """Wrapper class around the low level C++ python bindings. - - TODO: Complete class documentation and check for data types - """ - def __init__( self, voxel_size: float, @@ -26,8 +21,8 @@ def __init__( self.voxel_size = self._volume._voxel_size self.sdf_trunc = self._volume._sdf_trunc self.space_carving = self._volume._space_carving - # If PYOPENVDB_SUPPORT has been enabled then we can acccess those attributes - if hasattr(self._volume, "_tsdf") and hasattr(self._volume, "_weights"): + self.pyopenvdb_support_enabled = self._volume.PYOPENVDB_SUPPORT_ENABLED + if self.pyopenvdb_support_enabled: self.tsdf = self._volume._tsdf self.weights = self._volume._weights @@ -39,49 +34,90 @@ def __repr__(self) -> str: f"space_carving = {self.space_carving}\n" ) + @overload def integrate( self, points: np.ndarray, - extrinsic: np.ndarray, # or origin - weighting_function: Callable[[float], float] = None, + extrinsic: np.ndarray, + weighting_function: Callable[[float], float], ) -> None: - """Explain here how to use the function. + ... - TODO: Add tag dispatching for the `origin` case - """ - assert isinstance(points, np.ndarray), "points must by np.ndarray(n, 3)" - assert points.dtype == np.float64, "points dtype must be np.float64" - assert isinstance(extrinsic, np.ndarray), "origin/extrinsic must by np.ndarray" - assert extrinsic.dtype == np.float64, "origin/extrinsic dtype must be np.float64" - assert extrinsic.shape in [ - (3,), - (3, 1), - (4, 4), - ], "origin/extrinsic must be a (3,) array or a (4,4) matrix" - # TODO: Fix this logic with singledispatchmethod - if weighting_function: - self._volume._integrate( - vdbfusion_pybind._VectorEigen3d(points), extrinsic, weighting_function - ) - self._volume._integrate(vdbfusion_pybind._VectorEigen3d(points), extrinsic) + @overload + def integrate(self, points: np.ndarray, extrinsic: np.ndarray, weight: float) -> None: + ... + + @overload + def integrate(self, points: np.ndarray, extrinsic: np.ndarray) -> None: + ... + + @overload + def integrate(self, grid, weighting_function: Callable[[float], float]) -> None: + ... + + @overload + def integrate(self, grid, weight: float) -> None: + ... + + @overload + def integrate(self, grid) -> None: + ... + + def integrate( + self, + points: Optional[np.ndarray] = None, + extrinsic: Optional[np.ndarray] = None, + grid: Optional[Any] = None, + weight: Optional[float] = None, + weighting_function: Optional[Callable[[float], float]] = None, + ) -> None: + if grid is not None: + if not self.pyopenvdb_support_enabled: + raise NotImplementedError("Please compile with PYOPENVDB_SUPPORT_ENABLED") + if weighting_function is not None: + return self._volume._integrate(grid, weighting_function) + if weight is not None: + return self._volume._integrate(grid, weight) + return self._volume._integrate(grid) + else: + assert isinstance(points, np.ndarray), "points must by np.ndarray(n, 3)" + assert points.dtype == np.float64, "points dtype must be np.float64" + assert isinstance(extrinsic, np.ndarray), "origin/extrinsic must by np.ndarray" + assert extrinsic.dtype == np.float64, "origin/extrinsic dtype must be np.float64" + assert extrinsic.shape in [ + (3,), + (3, 1), + (4, 4), + ], "origin/extrinsic must be a (3,) array or a (4,4) matrix" + + _points = vdbfusion_pybind._VectorEigen3d(points) + if weighting_function is not None: + return self._volume._integrate(_points, extrinsic, weighting_function) + if weight is not None: + return self._volume._integrate(_points, extrinsic, weight) + self._volume._integrate(_points, extrinsic) + + @overload + def update_tsdf( + self, sdf: float, ijk: np.ndarray, weighting_function: Optional[Callable[[float], float]] + ) -> None: + ... + + @overload + def update_tsdf(self, sdf: float, ijk: np.ndarray) -> None: + ... def update_tsdf( self, sdf: float, ijk: np.ndarray, - weighting_function: Callable[[float], float] = None, + weighting_function: Optional[Callable[[float], float]] = None, ) -> None: - assert isinstance(ijk, np.ndarray), "ijk must by np.ndarray(3,)" - assert ijk.dtype == np.int32, "ijk dtype must be np.int32" - if weighting_function: - self._volume._update_tsdf(sdf, ijk, weighting_function) - self._volume._update_tsdf(sdf, ijk) + if weighting_function is not None: + return self._volume._update_tsdf(sdf, ijk, weighting_function) + return self._volume._update_tsdf(sdf, ijk) - def extract_triangle_mesh( - self, - fill_holes: bool = True, - min_weight: float = 1.0, - ) -> Tuple[np.ndarray, np.ndarray]: + def extract_triangle_mesh(self, fill_holes: bool = True, min_weight: float = 0.0) -> Tuple: """Returns a the vertices and triangles representing the constructed the TriangleMesh. If you can afford to use Open3D as dependency just pass the output of this function to the @@ -102,3 +138,11 @@ def extract_vdb_grids(self, out_file: str) -> None: Contains both D(x) and W(x) grids. """ self._volume._extract_vdb_grids(out_file) + + def prune(self, min_weight: float): + """Use the W(x) weights grid to cleanup the generated signed distance field according to a + minimum weight threshold. + + This function is ideal to cleanup the TSDF grid:D(x) before exporting it. + """ + return self._volume._prune(min_weight) diff --git a/src/vdbfusion/pybind/vdbfusion_pybind.cpp b/src/vdbfusion/pybind/vdbfusion_pybind.cpp index 11d633a..f6beb26 100644 --- a/src/vdbfusion/pybind/vdbfusion_pybind.cpp +++ b/src/vdbfusion/pybind/vdbfusion_pybind.cpp @@ -57,8 +57,6 @@ PYBIND11_MODULE(vdbfusion_pybind, m) { "points"_a, "origin"_a, "weighting_function"_a) .def( "_integrate", - // Binding lambdas in python is expensive, create a default constnat weighting_function - // here to avoid ruining the C++ API with default arguments everywhere [](VDBVolume& self, const std::vector& points, const Eigen::Vector3d& origin) { self.Integrate(points, origin, [](float /*sdf*/) { return 1.0f; }); @@ -66,16 +64,45 @@ PYBIND11_MODULE(vdbfusion_pybind, m) { "points"_a, "origin"_a) .def( "_integrate", - // Binding lambdas in python is expensive, create a default constnat weighting_function - // here to avoid ruining the C++ API with default arguments everywhere + [](VDBVolume& self, const std::vector& points, + const Eigen::Vector3d& origin, float weight) { + self.Integrate(points, origin, [=](float /*sdf*/) { return weight; }); + }, + "points"_a, "origin"_a, "weight"_a) + .def( + "_integrate", [](VDBVolume& self, const std::vector& points, const Eigen::Matrix4d& extrinsics) { self.Integrate(points, extrinsics, [](float /*sdf*/) { return 1.0f; }); }, "points"_a, "extrinsic"_a) + .def( + "_integrate", + [](VDBVolume& self, const std::vector& points, + const Eigen::Matrix4d& extrinsics, float weight) { + self.Integrate(points, extrinsics, [=](float /*sdf*/) { return weight; }); + }, + "points"_a, "origin"_a, "weight"_a) +#ifdef PYOPENVDB_SUPPORT + .def("_integrate", + py::overload_cast&>( + &VDBVolume::Integrate), + "grid"_a, "weighting_function"_a) + .def( + "_integrate", + [](VDBVolume& self, openvdb::FloatGrid::Ptr grid) { + self.Integrate(grid, [](float /*sdf*/) { return 1.0f; }); + }, + "grid"_a) + .def( + "_integrate", + [](VDBVolume& self, openvdb::FloatGrid::Ptr grid, float weight) { + self.Integrate(grid, [=](float /*sdf*/) { return weight; }); + }, + "grid"_a, "weight"_a) +#endif .def( "_update_tsdf", - // Create a lambda function to wrap the numpy int array to a openvdb::Coord type [](VDBVolume& self, const float& sdf, std::vector& ijk, const std::function& weighting_function) { self.UpdateTSDF(sdf, openvdb::Coord(ijk[0], ijk[1], ijk[2]), weighting_function); @@ -97,7 +124,11 @@ PYBIND11_MODULE(vdbfusion_pybind, m) { openvdb::io::File(filename).write({self.tsdf_, self.weights_}); }, "filename"_a) -#ifdef PYOPENVDB_SUPPORT +#ifndef PYOPENVDB_SUPPORT + .def_property_readonly_static("PYOPENVDB_SUPPORT_ENABLED", [](py::object) { return false; }) +#else + .def_property_readonly_static("PYOPENVDB_SUPPORT_ENABLED", [](py::object) { return true; }) + .def("_prune", &VDBVolume::Prune, "min_weight"_a) .def_readwrite("_tsdf", &VDBVolume::tsdf_) .def_readwrite("_weights", &VDBVolume::weights_) #endif diff --git a/src/vdbfusion/vdbfusion/CMakeLists.txt b/src/vdbfusion/vdbfusion/CMakeLists.txt index 8cf14e8..210666e 100644 --- a/src/vdbfusion/vdbfusion/CMakeLists.txt +++ b/src/vdbfusion/vdbfusion/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.10) -project(VDBFusion VERSION 0.1.5 LANGUAGES CXX) +project(VDBFusion VERSION 0.1.6 LANGUAGES CXX) add_library(vdbfusion STATIC) add_library(VDBFusion::vdbfusion ALIAS vdbfusion) diff --git a/src/vdbfusion/vdbfusion/VDBVolume.cpp b/src/vdbfusion/vdbfusion/VDBVolume.cpp index e852a6c..d2b75cb 100644 --- a/src/vdbfusion/vdbfusion/VDBVolume.cpp +++ b/src/vdbfusion/vdbfusion/VDBVolume.cpp @@ -68,6 +68,15 @@ void VDBVolume::UpdateTSDF(const float& sdf, } } +void VDBVolume::Integrate(openvdb::FloatGrid::Ptr grid, + const std::function& weighting_function) { + for (auto iter = grid->cbeginValueOn(); iter.test(); ++iter) { + const auto& sdf = iter.getValue(); + const auto& voxel = iter.getCoord(); + this->UpdateTSDF(sdf, voxel, weighting_function); + } +} + void VDBVolume::Integrate(const std::vector& points, const Eigen::Vector3d& origin, const std::function& weighting_function) { @@ -116,4 +125,24 @@ void VDBVolume::Integrate(const std::vector& points, } while (dda.step()); }); } + +openvdb::FloatGrid::Ptr VDBVolume::Prune(float min_weight) const { + const auto weights = weights_->tree(); + const auto tsdf = tsdf_->tree(); + const auto background = sdf_trunc_; + openvdb::FloatGrid::Ptr clean_tsdf = openvdb::FloatGrid::create(sdf_trunc_); + clean_tsdf->setName("D(x): Pruned signed distance grid"); + clean_tsdf->setTransform(openvdb::math::Transform::createLinearTransform(voxel_size_)); + clean_tsdf->setGridClass(openvdb::GRID_LEVEL_SET); + clean_tsdf->tree().combine2Extended(tsdf, weights, [=](openvdb::CombineArgs& args) { + if (args.aIsActive() && args.b() > min_weight) { + args.setResult(args.a()); + args.setResultIsActive(true); + } else { + args.setResult(background); + args.setResultIsActive(false); + } + }); + return clean_tsdf; +} } // namespace vdbfusion diff --git a/src/vdbfusion/vdbfusion/VDBVolume.h b/src/vdbfusion/vdbfusion/VDBVolume.h index 7e937e7..b9e7f24 100644 --- a/src/vdbfusion/vdbfusion/VDBVolume.h +++ b/src/vdbfusion/vdbfusion/VDBVolume.h @@ -29,11 +29,18 @@ class VDBVolume { Integrate(points, origin, weighting_function); } + /// @brief Integrate incoming TSDF grid inside the current volume using the TSDF equations + void Integrate(openvdb::FloatGrid::Ptr grid, + const std::function& weighting_function); + /// @brief Fuse a new given sdf value at the given voxel location, thread-safe void UpdateTSDF(const float& sdf, const openvdb::Coord& voxel, const std::function& weighting_function); + /// @brief Prune TSDF grids, ideal utility to cleanup a D(x) volume before exporting it + openvdb::FloatGrid::Ptr Prune(float min_weight) const; + /// @brief Extracts a TriangleMesh as the iso-surface in the actual volume [[nodiscard]] std::tuple, std::vector> ExtractTriangleMesh(bool fill_holes = true, float min_weight = 0.5) const;