diff --git a/.github/workflows/python-main.yml b/.github/workflows/python-main.yml index 370abd88a..36d2134f3 100644 --- a/.github/workflows/python-main.yml +++ b/.github/workflows/python-main.yml @@ -52,12 +52,16 @@ jobs: run: | sudo apt update python -m pip install --upgrade pip - sudo apt install libusb-1.0-0-dev pkg-config bison autoconf libtool libxi-dev libxtst-dev libxrandr-dev libx11-dev libxft-dev libxext-dev nasm flex libudev-dev + sudo apt install libusb-1.0-0-dev pkg-config bison autoconf libtool libxi-dev libxtst-dev libxrandr-dev libx11-dev libxft-dev libxext-dev nasm flex libudev-dev automake libltdl-dev python -m pip install -r bindings/python/docs/requirements_mkdoc.txt - name: Configure project run: cmake -S . -B build -DDEPTHAI_BUILD_PYTHON=ON -DDEPTHAI_PYTHON_FORCE_DOCSTRINGS=ON -DDEPTHAI_BASALT_SUPPORT=ON -DDEPTHAI_PCL_SUPPORT=ON -DDEPTHAI_RTABMAP_SUPPORT=ON -DDEPTHAI_PYTHON_DOCSTRINGS_OUTPUT="$PWD/bindings/python/docstrings/depthai_python_docstring.hpp" - name: Build target 'pybind11_mkdoc' run: cmake --build build --target pybind11_mkdoc --parallel 4 + - name: Print out vcpkg logs if building port fails + if: failure() # Only run this if the build step fails + run: bash ./bindings/python/ci/show_vcpkg_logs.sh + - name: Upload docstring artifacts uses: actions/upload-artifact@v3 with: @@ -140,6 +144,9 @@ jobs: run: | cmake -S . -B build -DDEPTHAI_BUILD_PYTHON=ON -D CMAKE_BUILD_TYPE=Release -D DEPTHAI_PYTHON_DOCSTRINGS_INPUT=$PWD/bindings/python/docstrings/depthai_python_docstring.hpp -D DEPTHAI_PYTHON_ENABLE_TESTS=ON cmake --build build --parallel 4 + - name: Print out vcpkg logs if building port fails + if: failure() # Only run this if the build step fails + run: cd bindings/python && bash ./ci/show_vcpkg_logs.sh - name: Test run: | cmake --build build --target pytest --config Release @@ -207,6 +214,7 @@ jobs: DEPTHAI_BUILD_BASALT: OFF DEPTHAI_BUILD_PCL: ON DEPTHAI_BUILD_RTABMAP: ON + DEPTHAI_BUILD_KOMPUTE: ON VCPKG_BINARY_SOURCES: "clear;x-gha,readwrite" steps: - name: Cache .hunter folder @@ -247,6 +255,9 @@ jobs: python -m pip install --upgrade pip - name: Building wheels run: cd bindings/python && python -m pip wheel . -w ./wheelhouse/audited/ --verbose + - name: Print out vcpkg logs if building port fails + if: failure() # Only run this if the build step fails + run: cd bindings/python && bash ./ci/show_vcpkg_logs.sh - name: Archive wheel artifacts uses: actions/upload-artifact@v3 with: @@ -274,6 +285,7 @@ jobs: DEPTHAI_BUILD_BASALT: ON DEPTHAI_BUILD_PCL: ON DEPTHAI_BUILD_RTABMAP: ON + DEPTHAI_BUILD_KOMPUTE: ON steps: - name: Cache .hunter folder uses: actions/cache@v3 @@ -317,6 +329,9 @@ jobs: python -m pip install delocate - name: Building wheels run: cd bindings/python && python -m pip wheel . -w ./wheelhouse/ --verbose + - name: Print out vcpkg logs if building port fails + if: failure() # Only run this if the build step fails + run: cd bindings/python && bash ./ci/show_vcpkg_logs.sh - name: Auditing wheels run: cd bindings/python && ci/repair-whl-macos.sh `pwd`/wheelhouse/* `pwd`/wheelhouse/audited - name: Archive wheel artifacts @@ -410,6 +425,7 @@ jobs: DEPTHAI_BUILD_BASALT: ON DEPTHAI_BUILD_PCL: ON DEPTHAI_BUILD_RTABMAP: ON + DEPTHAI_BUILD_KOMPUTE: ON VCPKG_BINARY_SOURCES: "clear;x-gha,readwrite" steps: - name: Cache .hunter folder @@ -428,11 +444,6 @@ jobs: submodules: 'recursive' - name: Installing libusb1-devel dependency run: yum install -y libusb1-devel perl-core curl zip unzip tar ninja-build zlib-devel curl-devel xcb-util-renderutil-devel xcb-util-devel xcb-util-image-devel xcb-util-keysyms-devel xcb-util-wm-devel mesa-libGL-devel libxkbcommon-devel libxkbcommon-x11-devel libXi-devel libXrandr-devel libXtst-devel libudev-devel lapack-devel nasm libtool autoconf automake - - name: Installing cmake dependency - run: | - cd bindings/python - /opt/python/cp38-cp38/bin/python3.8 -m pip install cmake - ln -s /opt/python/cp38-cp38/bin/cmake /bin/ - name: Create folder structure run: cd bindings/python && mkdir -p wheelhouse/audited/ @@ -455,6 +466,9 @@ jobs: - name: Build wheels run: | cd bindings/python && for PYBIN in /opt/python/${{ matrix.python-set }}/bin; do "${PYBIN}/pip" wheel . -w ./wheelhouse/ --verbose; done + - name: Print out vcpkg logs if building port fails + if: failure() # Only run this if the build step fails + run: cd bindings/python && bash ./ci/show_vcpkg_logs.sh - name: Audit wheels run: cd bindings/python && for whl in wheelhouse/*.whl; do auditwheel repair "$whl" --plat $PLAT -w wheelhouse/audited/; done @@ -493,6 +507,7 @@ jobs: DEPTHAI_BUILD_BASALT: ON DEPTHAI_BUILD_PCL: ON DEPTHAI_BUILD_RTABMAP: ON + DEPTHAI_BUILD_KOMPUTE: ON VCPKG_MAX_CONCURRENCY: "2" steps: - name: Export GitHub Actions cache environment variables @@ -505,12 +520,7 @@ jobs: with: submodules: 'recursive' - name: Installing libusb1-devel dependency - run: yum install -y libusb1-devel perl-core curl zip unzip tar zlib-devel curl-devel xcb-util-renderutil-devel xcb-util-devel xcb-util-image-devel xcb-util-keysyms-devel xcb-util-wm-devel mesa-libGL-devel libxkbcommon-devel libxkbcommon-x11-devel libXi-devel libXrandr-devel libXtst-devel libudev-devel lapack-devel nasm libtool autoconf automake - - name: Installing cmake dependency - run: | - cd bindings/python - /opt/python/cp38-cp38/bin/python3.8 -m pip install cmake - ln -s /opt/python/cp38-cp38/bin/cmake /bin/ + run: yum install -y libusb1-devel perl-core curl zip unzip tar zlib-devel curl-devel libxcb-devel xcb-util-renderutil-devel xcb-util-devel xcb-util-image-devel xcb-util-keysyms-devel xcb-util-wm-devel mesa-libGL-devel libxkbcommon-devel libxkbcommon-x11-devel libXi-devel libXrandr-devel libXtst-devel libudev-devel lapack-devel nasm libtool autoconf automake libX11-devel pkgconfig - name: Setup ninja required for arm64 builds run: | git clone https://github.com/ninja-build/ninja.git @@ -534,6 +544,10 @@ jobs: - name: Building wheels run: | cd bindings/python && for PYBIN in /opt/python/${{ matrix.python-set }}/bin; do "${PYBIN}/pip" wheel . -w ./wheelhouse/ --verbose; done + - name: Print out vcpkg logs if building port fails + if: failure() # Only run this if the build step fails + run: cd bindings/python && bash ./ci/show_vcpkg_logs.sh + - name: Auditing wheels run: cd bindings/python && for whl in wheelhouse/*.whl; do auditwheel repair "$whl" --plat $PLAT -w wheelhouse/audited/; done - name: Archive wheel artifacts diff --git a/.github/workflows/test.workflow.yml b/.github/workflows/test.workflow.yml index e48512e4c..12ff519c6 100644 --- a/.github/workflows/test.workflow.yml +++ b/.github/workflows/test.workflow.yml @@ -80,7 +80,7 @@ jobs: export DISPLAY=:99 xdpyinfo -display $DISPLAY >/dev/null 2>&1 || (Xvfb $DISPLAY &) source venv/bin/activate # Activate virtual environment - python3 -m pip install jinja2 + python3 -m pip install jinja2 rerun-sdk open3d-cpu cmake -S . -B build -D CMAKE_BUILD_TYPE=Release -D HUNTER_ROOT=$HOME/.hun2_${{ matrix.flavor }} -D DEPTHAI_BUILD_EXAMPLES=ON -D DEPTHAI_BUILD_TESTS=ON -D DEPTHAI_TEST_EXAMPLES=ON -D DEPTHAI_BUILD_PYTHON=ON -D DEPTHAI_PYTHON_TEST_EXAMPLES=ON -D DEPTHAI_PYTHON_ENABLE_EXAMPLES=ON cmake --build build --parallel 4 --config Release cd tests diff --git a/CMakeLists.txt b/CMakeLists.txt index ed2dd190d..6ec3c57b9 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,4 +1,4 @@ -cmake_minimum_required(VERSION 3.16) +cmake_minimum_required(VERSION 3.20) # Early options option(DEPTHAI_ENABLE_LIBUSB "Enable usage of libusb and interaction with USB devices" ON) # Build AprilTag node code (note, that even if set to OFF, an AprilTagNode can be used with setRunOnHost(false)) @@ -9,6 +9,7 @@ option(DEPTHAI_ENABLE_PROTOBUF "Enable Protobuf support" ON) option(DEPTHAI_BUILD_PYTHON "Build python bindings" OFF) option(DEPTHAI_BUILD_TESTS "Build tests" OFF) option(DEPTHAI_OPENCV_SUPPORT "Enable optional OpenCV support" ON) +OPTION(DEPTHAI_ENABLE_KOMPUTE "Enable Kompute support" OFF) option(DEPTHAI_PCL_SUPPORT "Enable optional PCL support" OFF) option(DEPTHAI_BOOTSTRAP_VCPKG "Automatically bootstrap VCPKG" ON) option(DEPTHAI_MERGED_TARGET "Enable merged target build" ON) @@ -51,6 +52,9 @@ endif() if(DEPTHAI_PCL_SUPPORT) list(APPEND VCPKG_MANIFEST_FEATURES "pcl-support") endif() +if(DEPTHAI_ENABLE_KOMPUTE) +list(APPEND VCPKG_MANIFEST_FEATURES "kompute-support") +endif() # Enable backward stack printing on crash if(ANDROID OR EMSCRIPTEN) @@ -275,6 +279,7 @@ include(depthaiDependencies) # Add threads preference set(THREADS_PREFER_PTHREAD_FLAG ON) + # TODO Remove shared naming set(DEPTHAI_SHARED_3RDPARTY_INCLUDE ${CMAKE_CURRENT_LIST_DIR}/include/3rdparty/ @@ -282,6 +287,12 @@ set(DEPTHAI_SHARED_3RDPARTY_INCLUDE # Add depthai-bootloader-shared include(${CMAKE_CURRENT_LIST_DIR}/shared/depthai-bootloader-shared.cmake) +if(DEPTHAI_ENABLE_KOMPUTE) + add_subdirectory(shaders) + + list(APPEND targets_to_export shaders) +endif() + # Add flags helpers include(Flags) @@ -388,6 +399,7 @@ set(TARGET_CORE_SOURCES src/pipeline/node/host/XLinkInHost.cpp src/pipeline/node/host/XLinkOutHost.cpp src/pipeline/node/host/HostNode.cpp + src/pipeline/node/host/RGBD.cpp src/pipeline/datatype/DatatypeEnum.cpp src/pipeline/node/PointCloud.cpp src/pipeline/datatype/Buffer.cpp @@ -418,6 +430,7 @@ set(TARGET_CORE_SOURCES src/pipeline/datatype/BenchmarkReport.cpp src/pipeline/datatype/PointCloudConfig.cpp src/pipeline/datatype/PointCloudData.cpp + src/pipeline/datatype/RGBDData.cpp src/pipeline/datatype/MessageGroup.cpp src/pipeline/datatype/TransformData.cpp src/utility/H26xParsers.cpp @@ -502,6 +515,10 @@ set_target_properties(${TARGET_CORE_NAME} PROPERTIES EXPORT_NAME ${TARGET_CORE_A # Add to list of targets to export and install list(APPEND targets_to_export ${TARGET_CORE_NAME}) +if(DEPTHAI_ENABLE_KOMPUTE) + target_link_libraries(${TARGET_CORE_NAME} PRIVATE shaders kompute::kompute) + target_compile_definitions(${TARGET_CORE_NAME} PRIVATE DEPTHAI_ENABLE_KOMPUTE) +endif() # Add model_zoo helper binary add_executable(zoo_helper src/modelzoo/zoo_helper.cpp) target_compile_definitions(zoo_helper PRIVATE DEPTHAI_TARGET_CORE) diff --git a/bindings/python/CMakeLists.txt b/bindings/python/CMakeLists.txt index e11a964d4..f34663ba2 100644 --- a/bindings/python/CMakeLists.txt +++ b/bindings/python/CMakeLists.txt @@ -108,6 +108,7 @@ set(SOURCE_LIST src/pipeline/node/RecordBindings.cpp src/pipeline/node/ReplayBindings.cpp src/pipeline/node/ImageAlignBindings.cpp + src/pipeline/node/RGBDBindings.cpp src/pipeline/datatype/ADatatypeBindings.cpp src/pipeline/datatype/AprilTagConfigBindings.cpp diff --git a/bindings/python/ci/show_vcpkg_logs.sh b/bindings/python/ci/show_vcpkg_logs.sh new file mode 100644 index 000000000..b6c7630e7 --- /dev/null +++ b/bindings/python/ci/show_vcpkg_logs.sh @@ -0,0 +1,51 @@ +echo "Searching vcpkg-manifest-install.log for failing port logs..." +LOGFILE=$(find . -name "vcpkg-manifest-install.log" | head -n 1) +if [ -z "$LOGFILE" ]; then + echo "No vcpkg-manifest-install.log found!" + exit 1 +else + echo "Found log file: $LOGFILE" +fi + +# 1) Grab lines following "See logs for more information:" +# 2) From those lines, extract only the file paths ending in ".log" +# 3) Remove any leading spaces +grep -A50 "See logs for more information:" $LOGFILE \ + | grep "\.log" \ + | sed 's/^[[:space:]]*//' \ + > failed_logs.txt + +# Check if we found any logs +# If not, print a message and exit +# Otherwise, print the log paths +if [ ! -s failed_logs.txt ]; then + echo "No failed logs found!" + exit 1 +else + echo "Found the following failed logs:" + cat failed_logs.txt +fi + +# Now read each log path we found, and print it +while IFS= read -r log; do + echo "==== Showing log: $log ====" + cat "$log" || true +echo + +# Also show CMakeOutput.log (and/or CMakeError.log) within the same port’s directory +# Often it’s in the same or neighboring folder (e.g. arm64-linux-rel/CMakeFiles/CMakeOutput.log). +# We'll "walk up" one directory and search for CMakeOutput.log in CMakeFiles/. +port_dir="$(dirname "$log")" +# In some cases (e.g. config-arm64-linux-out.log), you may need to go up another level: +# port_dir="$(dirname "$port_dir")" + +# We'll now look for any CMakeOutput.log within this port’s subdirectories +found_outputs=$(find "$port_dir" -name "CMakeOutput.log" -o -name "CMakeError.log" -print 2>/dev/null) +if [ -n "$found_outputs" ]; then + for cof in $found_outputs; do + echo "==== Showing $cof ====" + cat "$cof" || true + echo + done +fi +done < failed_logs.txt diff --git a/bindings/python/setup.py b/bindings/python/setup.py index 2d3b76d3f..598e3955f 100644 --- a/bindings/python/setup.py +++ b/bindings/python/setup.py @@ -125,6 +125,8 @@ def build_extension(self, ext): cmake_args += ['-DDEPTHAI_PCL_SUPPORT=ON'] if env.get('DEPTHAI_BUILD_RTABMAP') == 'ON': cmake_args += ['-DDEPTHAI_RTABMAP_SUPPORT=ON'] + if env.get('DEPTHAI_BUILD_KOMPUTE') == 'ON': + cmake_args += ['-DDEPTHAI_KOMPUTE_SUPPORT=ON'] build_args += ['--target=depthai'] # Specify output directory and python executable diff --git a/bindings/python/src/pipeline/datatype/RGBDDataBindings.cpp b/bindings/python/src/pipeline/datatype/RGBDDataBindings.cpp new file mode 100644 index 000000000..6ba54ca88 --- /dev/null +++ b/bindings/python/src/pipeline/datatype/RGBDDataBindings.cpp @@ -0,0 +1,39 @@ + +#include "DatatypeBindings.hpp" +#include "pipeline/CommonBindings.hpp" +#include +#include + +// depthai +#include "depthai/pipeline/datatype/RGBDData.hpp" + +//pybind +#include +#include + +void bind_transformdata(pybind11::module& m, void* pCallstack){ + + using namespace dai; + + py::class_, Buffer, std::shared_ptr> rgbdData(m, "RGBDData", DOC(dai, RGBDData)); + + /////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////// + // Call the rest of the type defines, then perform the actual bindings + Callstack* callstack = (Callstack*) pCallstack; + auto cb = callstack->top(); + callstack->pop(); + cb(m, pCallstack); + // Actual bindings + /////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////// + + // Metadata / raw + rgbdData + .def(py::init<>()) + .def("__repr__", &RGBDData::str) + .def_readwrite("rgbFrame", &RGBDData::rgbFrame, DOC(dai, RGBDData, rgbFrame)) + .def_readwrite("depthFrame", &RGBDData::depthFrame, DOC(dai, RGBDData, depthFrame)); +} diff --git a/bindings/python/src/pipeline/node/NodeBindings.cpp b/bindings/python/src/pipeline/node/NodeBindings.cpp index 0e6cca987..1743bffb8 100644 --- a/bindings/python/src/pipeline/node/NodeBindings.cpp +++ b/bindings/python/src/pipeline/node/NodeBindings.cpp @@ -161,6 +161,7 @@ void bind_hostnode(pybind11::module& m, void* pCallstack); void bind_record(pybind11::module& m, void* pCallstack); void bind_replay(pybind11::module& m, void* pCallstack); void bind_imagealign(pybind11::module& m, void* pCallstack); +void bind_rgbd(pybind11::module& m, void* pCallstack); #ifdef DEPTHAI_HAVE_BASALT_SUPPORT void bind_basaltnode(pybind11::module& m, void* pCallstack); #endif @@ -208,6 +209,7 @@ void NodeBindings::addToCallstack(std::deque& callstack) { callstack.push_front(bind_record); callstack.push_front(bind_replay); callstack.push_front(bind_imagealign); + callstack.push_front(bind_rgbd); #ifdef DEPTHAI_HAVE_BASALT_SUPPORT callstack.push_front(bind_basaltnode); #endif diff --git a/bindings/python/src/pipeline/node/RGBDBindings.cpp b/bindings/python/src/pipeline/node/RGBDBindings.cpp new file mode 100644 index 000000000..08d5b57f9 --- /dev/null +++ b/bindings/python/src/pipeline/node/RGBDBindings.cpp @@ -0,0 +1,49 @@ + +#include + +#include "Common.hpp" +#include "NodeBindings.hpp" +#include "depthai/pipeline/ThreadedHostNode.hpp" +#include "depthai/pipeline/node/host/RGBD.hpp" + +extern py::handle daiNodeModule; + +void bind_rgbd(pybind11::module& m, void* pCallstack) { + using namespace dai; + using namespace dai::node; + + // declare upfront + auto rgbdNode = ADD_NODE_DERIVED(RGBD, ThreadedHostNode); + + /////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////// + // Call the rest of the type defines, then perform the actual bindings + Callstack* callstack = (Callstack*)pCallstack; + auto cb = callstack->top(); + callstack->pop(); + cb(m, pCallstack); + // Actual bindings + /////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////// + + // RGBD Node + rgbdNode + .def_property_readonly( + "inColor", [](RGBD& node) { return &node.inColor; }, py::return_value_policy::reference_internal) + .def_property_readonly( + "inDepth", [](RGBD& node) { return &node.inDepth; }, py::return_value_policy::reference_internal) + .def_readonly("pcl", &RGBD::pcl, DOC(dai, node, RGBD, pcl)) + .def("build", static_cast (RGBD::*)()>(&RGBD::build)) + .def("build", + static_cast (RGBD::*)(bool, std::pair)>(&RGBD::build), + py::arg("autocreate"), + py::arg("size"), + DOC(dai, node, RGBD, build, 2)) + .def("setDepthUnits", &RGBD::setDepthUnit, py::arg("units"), DOC(dai, node, RGBD, setDepthUnit)) + .def("useCPU", &RGBD::useCPU, DOC(dai, node, RGBD, useCPU)) + .def("useCPUMT", &RGBD::useCPUMT, py::arg("numThreads") = 2, DOC(dai, node, RGBD, useCPUMT)) + .def("useGPU", &RGBD::useGPU, py::arg("device") = 0, DOC(dai, node, RGBD, useGPU)) + .def("printDevices", &RGBD::printDevices, DOC(dai, node, RGBD, printDevices)); +} diff --git a/cmake/depthaiDependencies.cmake b/cmake/depthaiDependencies.cmake index 5be2eeecc..96d103f05 100644 --- a/cmake/depthaiDependencies.cmake +++ b/cmake/depthaiDependencies.cmake @@ -42,6 +42,10 @@ if(NOT CONFIG_MODE OR (CONFIG_MODE AND NOT DEPTHAI_SHARED_LIBS)) find_path(FP16_INCLUDE_DIR NAMES fp16.h) find_package(PNG REQUIRED) + + if(DEPTHAI_ENABLE_KOMPUTE) + find_package(kompute ${_QUIET} CONFIG REQUIRED) + endif() # libarchive for firmware packages find_package(LibArchive ${_QUIET} REQUIRED) find_package(liblzma ${_QUIET} CONFIG REQUIRED) diff --git a/cmake/ports/basalt/portfile.cmake b/cmake/ports/basalt/portfile.cmake index 2dba7e7fb..fdc313f26 100644 --- a/cmake/ports/basalt/portfile.cmake +++ b/cmake/ports/basalt/portfile.cmake @@ -2,11 +2,9 @@ vcpkg_from_github( OUT_SOURCE_PATH SOURCE_PATH REPO luxonis/basalt - REF ac3cd0fbef91e889db27a1a779a5105ba715fef7 - SHA512 1e7694168f92af5f48f462a793fff528c46a4bd01d3417daf2031b651acba70ee9d61d38a33a1be217cc6e97baa67425e1a8fcc4e57a46033954757112326206 - HEAD_REF deps_test - PATCHES - win.patch + REF bf3d2c33685b4315fa2f1407619c301c3135e891 + SHA512 346104eee47e08afd82d134b89c44163dee4528ed98875afd8bd6bd153d28f185145c5f938d0ff52423edca1434391ae6478ab3646f014c7598075babeadc054 + HEAD_REF depthai ) vcpkg_cmake_configure( SOURCE_PATH "${SOURCE_PATH}" diff --git a/cmake/ports/basalt/win.patch b/cmake/ports/basalt/win.patch deleted file mode 100644 index 2628522ae..000000000 --- a/cmake/ports/basalt/win.patch +++ /dev/null @@ -1,262 +0,0 @@ -diff --git a/CMakeLists.txt b/CMakeLists.txt -index 4094b79..99df803 100644 ---- a/CMakeLists.txt -+++ b/CMakeLists.txt -@@ -78,8 +78,9 @@ set(CMAKE_EXPORT_COMPILE_COMMANDS ON) - - - # Flags used for CHECK_CXX_SOURCE_COMPILES -+if(NOT WIN32) - set(CMAKE_REQUIRED_FLAGS "-Wno-error") -- -+endif() - - # save flags passed by user - set(BASALT_PASSED_CXX_FLAGS "${CMAKE_CXX_FLAGS}") -@@ -103,7 +104,9 @@ endif() - - # base set of compile flags - set (BASALT_CXX_FLAGS) -- -+if(WIN32) -+ set(BASALT_CXX_FLAGS "/bigobj") -+endif() - # For avoiding libfmt >9 issues, see https://github.com/strasdat/Sophus/issues/366 #issuecomment-1229178088 - add_definitions(-DFMT_DEPRECATED_OSTREAM=1) - -@@ -269,7 +272,7 @@ set(CMAKE_CXX_FLAGS "${BASALT_CXX_FLAGS}${BASALT_PASSED_CXX_FLAGS}") - find_package(OpenCV REQUIRED COMPONENTS core imgproc calib3d highgui) - message(STATUS "Found OpenCV ${OpenCV_VERSION} headers in: ${OpenCV_INCLUDE_DIRS}") - message(STATUS "Found OpenCV_LIBS: ${OpenCV_LIBS}") -- -+set(BASALT_SDK_ONLY ON) - - - if(BASALT_SDK_ONLY) -@@ -290,6 +293,7 @@ if(BASALT_SDK_ONLY) - ${PROJECT_BINARY_DIR}/include/basalt/utils/build_config.h - ) - include_directories(include) -+ - add_library(basalt_sdk - src/io/marg_data_io.cpp - src/linearization/landmark_block.cpp -@@ -314,6 +318,9 @@ if(BASALT_SDK_ONLY) - set_property(TARGET basalt_sdk PROPERTY CXX_STANDARD 17) - set_property(TARGET basalt_sdk PROPERTY CXX_STANDARD_REQUIRED ON) - set_property(TARGET basalt_sdk PROPERTY CXX_EXTENSIONS OFF) -+ include(GenerateExportHeader) -+ generate_export_header(basalt_sdk -+ EXPORT_FILE_NAME ${CMAKE_CURRENT_BINARY_DIR}/include/basalt/exports/basalt_sdkExport.h) - # target_compile_definitions(basalt_sdk PUBLIC BASALT_SDK_ONLY) - target_include_directories(basalt_sdk PRIVATE - "$" -@@ -321,7 +328,7 @@ if(BASALT_SDK_ONLY) - ) - target_include_directories(basalt_sdk PRIVATE ${PROJECT_BINARY_DIR}/include) - -- target_link_libraries(basalt_sdk PUBLIC ${OpenCV_LIBS} Threads::Threads TBB::tbb Eigen3::Eigen Sophus::Sophus magic_enum::magic_enum fmt::fmt) -+ target_link_libraries(basalt_sdk PRIVATE ${OpenCV_LIBS} Threads::Threads TBB::tbb Eigen3::Eigen Sophus::Sophus magic_enum::magic_enum fmt::fmt) - - target_compile_definitions(basalt_sdk PUBLIC ${BASALT_COMPILE_DEFINITIONS}) - -@@ -341,6 +348,12 @@ if(BASALT_SDK_ONLY) - DESTINATION include/basalt/utils - ) - -+ # Install generated export header -+ install( -+ FILES ${CMAKE_CURRENT_BINARY_DIR}/include/basalt/exports/basalt_sdkExport.h -+ DESTINATION include/basalt/exports -+ ) -+ - # install headers - install(DIRECTORY include/basalt - DESTINATION include -@@ -360,6 +373,7 @@ if(BASALT_SDK_ONLY) - ) - - -+ - else() - # Add our own custom scoped opencv target since none is provided by OpenCV itself - add_library(basalt::opencv INTERFACE IMPORTED) -diff --git a/CMakePresets.json b/CMakePresets.json -index 70f8f23..0c82642 100644 ---- a/CMakePresets.json -+++ b/CMakePresets.json -@@ -1,6 +1,6 @@ - - { -- "version": 2, -+ "version": 3, - "configurePresets": [ - { - "name": "default", -diff --git a/cmake/ports/basalt-headers/portfile.cmake b/cmake/ports/basalt-headers/portfile.cmake -index 2b38d49..fe98885 100644 ---- a/cmake/ports/basalt-headers/portfile.cmake -+++ b/cmake/ports/basalt-headers/portfile.cmake -@@ -1,10 +1,8 @@ -- -- - vcpkg_from_github( - OUT_SOURCE_PATH SOURCE_PATH - REPO luxonis/basalt-headers -- REF 9172c4e6d8c8533d5ebae8289bba2a299f30eb50 -- SHA512 39b4b88b147f4d8fb9b2423559c8f07b6e7e43a5136ff461058ab2fe33edfbd1790e6e8684abc2cbe647457752f4ff835c4ed0ed4a370141e345d1e529af2369 -+ REF 5392967dc6825838a52fa6d6ed38188a55a6acf7 -+ SHA512 7b22090aacf609e9b8417dd852607d09f0d0db144980852ec1cd73ae2e5fa0f095ef450011a8d7bf6b630f9af8a8ca5870d590de25b8c0783d843344aa48866b - HEAD_REF vcpkg_deps - ) - vcpkg_cmake_configure( -@@ -14,3 +12,7 @@ vcpkg_cmake_configure( - vcpkg_cmake_install() - - vcpkg_cmake_config_fixup(CONFIG_PATH "lib/cmake/basalt-headers") -+file(REMOVE_RECURSE "${CURRENT_PACKAGES_DIR}/debug/include") -+file(REMOVE_RECURSE "${CURRENT_PACKAGES_DIR}/debug/lib" "${CURRENT_PACKAGES_DIR}/lib") -+file(REMOVE_RECURSE "${CURRENT_PACKAGES_DIR}/debug") -+vcpkg_install_copyright(FILE_LIST "${SOURCE_PATH}/LICENSE") -\ No newline at end of file -diff --git a/cmake/ports/basalt-headers/vcpkg.json b/cmake/ports/basalt-headers/vcpkg.json -index 35912be..3a11ef7 100644 ---- a/cmake/ports/basalt-headers/vcpkg.json -+++ b/cmake/ports/basalt-headers/vcpkg.json -@@ -1,17 +1,19 @@ -- -- - { - "name": "basalt-headers", - "version-string": "0.1.0", - "description": "Reusable components of Basalt project as header-only library", - "dependencies": [ -- { -+ { - "name": "vcpkg-cmake", - "host": true - }, - { - "name": "vcpkg-cmake-config", - "host": true -- } --] -+ }, -+ "eigen3", -+ "sophus", -+ "cereal", -+ "fmt" -+ ] - } -\ No newline at end of file -diff --git a/include/basalt/optical_flow/optical_flow.h b/include/basalt/optical_flow/optical_flow.h -index b20df49..450ad63 100644 ---- a/include/basalt/optical_flow/optical_flow.h -+++ b/include/basalt/optical_flow/optical_flow.h -@@ -59,6 +59,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - #include - #endif - #include -+#include "basalt/exports/basalt_sdkExport.h" - - namespace basalt { - -@@ -249,6 +250,6 @@ class OpticalFlowTyped : public OpticalFlowBase { - - class OpticalFlowFactory { - public: -- static OpticalFlowBase::Ptr getOpticalFlow(const VioConfig& config, const Calibration& cam); -+ BASALT_SDK_EXPORT static OpticalFlowBase::Ptr getOpticalFlow(const VioConfig& config, const Calibration& cam); - }; - } // namespace basalt -diff --git a/include/basalt/utils/vio_config.h b/include/basalt/utils/vio_config.h -index 4d38532..ee96efa 100644 ---- a/include/basalt/utils/vio_config.h -+++ b/include/basalt/utils/vio_config.h -@@ -36,7 +36,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - #include - #include -- -+#include "basalt/exports/basalt_sdkExport.h" - namespace basalt { - - enum class LinearizationType { ABS_QR, ABS_SC, REL_SC }; -@@ -44,8 +44,8 @@ enum class MatchingGuessType { SAME_PIXEL, REPROJ_FIX_DEPTH, REPROJ_AVG_DEPTH }; - enum class KeyframeMargCriteria { KF_MARG_DEFAULT, KF_MARG_FORWARD_VECTOR }; - - struct VioConfig { -- VioConfig(); -- void load(const std::string& filename); -+ BASALT_SDK_EXPORT VioConfig(); -+ BASALT_SDK_EXPORT void load(const std::string& filename); - void save(const std::string& filename); - - std::string optical_flow_type; -diff --git a/include/basalt/vi_estimator/vio_estimator.h b/include/basalt/vi_estimator/vio_estimator.h -index 48222eb..d95ed19 100644 ---- a/include/basalt/vi_estimator/vio_estimator.h -+++ b/include/basalt/vi_estimator/vio_estimator.h -@@ -40,6 +40,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - #include - #include - #include -+#include "basalt/exports/basalt_sdkExport.h" -+ - - namespace basalt { - -@@ -157,7 +159,7 @@ class VioEstimatorBase { - - class VioEstimatorFactory { - public: -- static VioEstimatorBase::Ptr getVioEstimator(const VioConfig& config, const Calibration& cam, -+ BASALT_SDK_EXPORT static VioEstimatorBase::Ptr getVioEstimator(const VioConfig& config, const Calibration& cam, - const Eigen::Vector3d& g, bool use_imu, bool use_double); - }; - -diff --git a/thirdparty/CLI11 b/thirdparty/CLI11 -index 4af78be..c2ea58c 160000 ---- a/thirdparty/CLI11 -+++ b/thirdparty/CLI11 -@@ -1 +1 @@ --Subproject commit 4af78beef777e313814b4daff70e2da9171a385a -+Subproject commit c2ea58c7f9bb2a1da2d3d7f5b462121ac6a07f16 -diff --git a/thirdparty/magic_enum b/thirdparty/magic_enum -index 3d1f6a5..43a9272 160000 ---- a/thirdparty/magic_enum -+++ b/thirdparty/magic_enum -@@ -1 +1 @@ --Subproject commit 3d1f6a5a2a3fbcba077e00ad0ccc2dd9fefc2ca7 -+Subproject commit 43a9272f450fd07e85868ed277ebd793e40806df -diff --git a/vcpkg-configuration.json b/vcpkg-configuration.json -index 66747fb..2a03369 100644 ---- a/vcpkg-configuration.json -+++ b/vcpkg-configuration.json -@@ -2,7 +2,7 @@ - { - "default-registry": { - "kind": "git", -- "baseline": "509f71e53f45e46c13fa7935d2f6a45803580c07", -+ "baseline": "f56b56e01683ba9fde12b64de246ec0b3eb69b18", - "repository": "https://github.com/microsoft/vcpkg" - }, - "registries": [ -diff --git a/vcpkg.json b/vcpkg.json -index 64107c7..6889ff7 100644 ---- a/vcpkg.json -+++ b/vcpkg.json -@@ -9,7 +9,11 @@ - "cereal", - "fmt", - "tbb", -- "nlohmann-json" -+ "nlohmann-json", -+ { -+ "name": "opencv4", -+ "default-features": false -+ } - ], - "builtin-baseline": "f5ec6f30ff70f04f841436a0f36600bdbabfcfbf" - } - diff --git a/cmake/ports/kompute/portfile.cmake b/cmake/ports/kompute/portfile.cmake new file mode 100644 index 000000000..4d5a198eb --- /dev/null +++ b/cmake/ports/kompute/portfile.cmake @@ -0,0 +1,27 @@ +vcpkg_from_github( + OUT_SOURCE_PATH SOURCE_PATH + REPO KomputeProject/kompute + REF 48c127d37750ed7033bf0f74972adad0ce099716 + SHA512 d46984e2f49b6b0c3eb313c03a935e2a1e3d480e40295bb2016f82ea1a23b484323f5bb534bffc309e1b09d219a7da8cf6dcea6e0b5ab8ee9f7aa65b6440b87d + HEAD_REF master +) + +vcpkg_cmake_configure( + SOURCE_PATH ${SOURCE_PATH} + OPTIONS + -DKOMPUTE_OPT_USE_BUILT_IN_VULKAN_HEADER=OFF + -DKOMPUTE_OPT_USE_BUILT_IN_FMT=OFF + -DKOMPUTE_OPT_DISABLE_VULKAN_VERSION_CHECK=ON # Tmp fix for mac + -DKOMPUTE_OPT_INSTALL=ON + -DKOMPUTE_OPT_LOG_LEVEL=Warn +) + +vcpkg_cmake_install() + +vcpkg_cmake_config_fixup(PACKAGE_NAME "kompute" CONFIG_PATH "lib/cmake/kompute") + +file(REMOVE_RECURSE "${CURRENT_PACKAGES_DIR}/debug/include") + +file(INSTALL "${SOURCE_PATH}/LICENSE" DESTINATION "${CURRENT_PACKAGES_DIR}/share/${PORT}" RENAME copyright) + +configure_file("${CMAKE_CURRENT_LIST_DIR}/usage" "${CURRENT_PACKAGES_DIR}/share/${PORT}/usage" COPYONLY) diff --git a/cmake/ports/kompute/usage b/cmake/ports/kompute/usage new file mode 100644 index 000000000..6347e5208 --- /dev/null +++ b/cmake/ports/kompute/usage @@ -0,0 +1,17 @@ +The package kompute provides CMake targets: + + find_package(kompute CONFIG REQUIRED) + + # Compiling shader + # To add more shaders simply copy the vulkan_compile_shader command and replace it with your new shader + vulkan_compile_shader( + INFILE shader/my_shader.comp + OUTFILE shader/my_shader.hpp + NAMESPACE "shader") + + # Then add it to the library, so you can access it later in your code + add_library(shader INTERFACE "shader/my_shader.hpp") + target_include_directories(shader INTERFACE $) + + target_link_libraries(main PRIVATE shader kompute::kompute) + diff --git a/cmake/ports/kompute/vcpkg.json b/cmake/ports/kompute/vcpkg.json new file mode 100644 index 000000000..58f91ff5d --- /dev/null +++ b/cmake/ports/kompute/vcpkg.json @@ -0,0 +1,19 @@ +{ + "name": "kompute", + "version": "0.9.0", + "homepage": "https://github.com/KomputeProject/kompute", + "description": "General purpose GPU compute framework built on Vulkan to support 1000s of cross vendor graphics cards (AMD, Qualcomm, NVIDIA & friends).", + "license": "Apache-2.0", + "dependencies": [ + { + "name": "vcpkg-cmake", + "host": true + }, + { + "name": "vcpkg-cmake-config", + "host": true + }, + "vulkan", + "fmt" + ] +} diff --git a/cmake/ports/tbb/overflow-warning.patch b/cmake/ports/tbb/overflow-warning.patch deleted file mode 100644 index 941b85b66..000000000 --- a/cmake/ports/tbb/overflow-warning.patch +++ /dev/null @@ -1,14 +0,0 @@ -diff --git a/cmake/compilers/GNU.cmake b/cmake/compilers/GNU.cmake -index 871b72a..76e1900 100644 ---- a/cmake/compilers/GNU.cmake -+++ b/cmake/compilers/GNU.cmake -@@ -26,7 +26,7 @@ else() - set(TBB_DEF_FILE_PREFIX lin${TBB_ARCH}) - endif() - --set(TBB_WARNING_LEVEL -Wall -Wextra $<$:-Werror> -Wfatal-errors) -+set(TBB_WARNING_LEVEL -Wall -Wextra $<$:-Werror> -Wfatal-errors -Wno-error=stringop-overflow) - set(TBB_TEST_WARNING_FLAGS -Wshadow -Wcast-qual -Woverloaded-virtual -Wnon-virtual-dtor) - - # Depfile options (e.g. -MD) are inserted automatically in some cases. - diff --git a/cmake/ports/tbb/portfile.cmake b/cmake/ports/tbb/portfile.cmake deleted file mode 100644 index e11cdd844..000000000 --- a/cmake/ports/tbb/portfile.cmake +++ /dev/null @@ -1,23 +0,0 @@ - -vcpkg_from_github( - OUT_SOURCE_PATH SOURCE_PATH - REPO oneapi-src/oneTBB - REF v2021.12.0 - SHA512 64022bcb61cf7b2030a1bcc11168445ef9f0d69b70290233a7febb71cc7a12cc2282dddc045f84e30893efe276342f02fd78d176706268eeaefe9aac7446d4e9 - HEAD_REF master - PATCHES - overflow-warning.patch -) -vcpkg_cmake_configure( - SOURCE_PATH "${SOURCE_PATH}" - OPTIONS - -DTBB_TEST=OFF - -DCMAKE_CXX_VISIBILITY_PRESET=hidden - -DCMAKE_C_VISIBILITY_PRESET=hidden - -DCMAKE_CXX_STANDARD=17 - -DCMAKE_C_STANDARD=11 -) - -vcpkg_cmake_install() - - diff --git a/cmake/ports/tbb/vcpkg.json b/cmake/ports/tbb/vcpkg.json deleted file mode 100644 index eec1ba823..000000000 --- a/cmake/ports/tbb/vcpkg.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "tbb", - "version-string": "2021.12.0", - "description": "oneAPI Threading Building Blocks (oneTBB)", - "dependencies": [ - { - "name": "vcpkg-cmake", - "host": true - }, - { - "name": "vcpkg-cmake-config", - "host": true - } -] -} diff --git a/cmake/triplets/arm64-linux.cmake b/cmake/triplets/arm64-linux.cmake index 1553d02eb..9dbd0733a 100644 --- a/cmake/triplets/arm64-linux.cmake +++ b/cmake/triplets/arm64-linux.cmake @@ -2,10 +2,16 @@ set(VCPKG_TARGET_ARCHITECTURE arm64) set(VCPKG_CRT_LINKAGE dynamic) set(VCPKG_LIBRARY_LINKAGE static) set(VCPKG_BUILD_TYPE release) +set(PORT_DEBUG ON) if(PORT MATCHES "libusb|ffmpeg") set(VCPKG_LIBRARY_LINKAGE dynamic) set(VCPKG_FIXUP_ELF_RPATH ON) endif() +if(PORT MATCHES "vulkan-loader") + # When building in GH Actions environment, vcpkg's pkg-conf can't find XCB/X11 libraries + set(VCPKG_CMAKE_CONFIGURE_OPTIONS "-DPKG_CONFIG_EXECUTABLE=/usr/bin/pkg-config") + +endif() set(VCPKG_CMAKE_SYSTEM_NAME Linux) diff --git a/examples/cpp/CMakeLists.txt b/examples/cpp/CMakeLists.txt index 2bfa0dc8c..812205593 100644 --- a/examples/cpp/CMakeLists.txt +++ b/examples/cpp/CMakeLists.txt @@ -295,6 +295,13 @@ dai_add_example(image_align RVC2/ImageAlign/image_align.cpp OFF OFF) dai_add_example(visualizer_yolo Visualizer/visualizer_yolo.cpp ON OFF) dai_set_example_test_labels(visualizer_yolo ondevice rvc2_all rvc4) +include(FetchContent) +FetchContent_Declare(rerun_sdk URL https://github.com/rerun-io/rerun/releases/download/0.16.1/rerun_cpp_sdk.zip) +FetchContent_MakeAvailable(rerun_sdk) +dai_add_example(rgbd RGBD/rgbd.cpp ON OFF) +target_link_libraries(rgbd PRIVATE rerun_sdk) + +dai_add_example(visualizer_rgbd RGBD/visualizer_rgbd.cpp ON OFF) if(DEPTHAI_RTABMAP_SUPPORT) include(FetchContent) @@ -313,6 +320,7 @@ if(DEPTHAI_RTABMAP_SUPPORT) endif() + if(DEPTHAI_BASALT_SUPPORT) include(FetchContent) FetchContent_Declare(rerun_sdk URL https://github.com/rerun-io/rerun/releases/download/0.16.1/rerun_cpp_sdk.zip) diff --git a/examples/cpp/RGBD/rgbd.cpp b/examples/cpp/RGBD/rgbd.cpp new file mode 100644 index 000000000..c4fc51d2d --- /dev/null +++ b/examples/cpp/RGBD/rgbd.cpp @@ -0,0 +1,86 @@ +#include "depthai/depthai.hpp" +#include "rerun.hpp" +#include "rerun/archetypes/depth_image.hpp" + +rerun::Collection tensorShape(const cv::Mat& img) { + return {size_t(img.rows), size_t(img.cols), size_t(img.channels())}; +}; +class RerunNode : public dai::NodeCRTP { + public: + constexpr static const char* NAME = "RerunNode"; + + public: + void build() {} + + Input inputPCL{*this, {.name = "inPCL", .types = {{dai::DatatypeEnum::PointCloudData, true}}}}; + Input inputRGBD{*this, {.name = "inRGBD", .types = {{dai::DatatypeEnum::RGBDData, true}}}}; + + void run() override { + const auto rec = rerun::RecordingStream("rerun"); + rec.spawn().exit_on_failure(); + rec.log_static("world", rerun::ViewCoordinates::FLU); + while(isRunning()) { + auto pclIn = inputPCL.get(); + auto rgbdIn = inputRGBD.get(); + if(pclIn != nullptr) { + std::vector points; + std::vector colors; + const auto& size = pclIn->getWidth() * pclIn->getHeight(); + points.reserve(size); + colors.reserve(size); + const auto& pclData = pclIn->getPointsRGB(); + for(size_t i = 0; i < size; ++i) { + points.emplace_back(pclData[i].x, pclData[i].y, pclData[i].z); + colors.emplace_back(pclData[i].r, pclData[i].g, pclData[i].b); + } + rec.log("world/obstacle_pcl", rerun::Points3D(points).with_colors(colors).with_radii({0.01f})); + auto colorFrame = rgbdIn->rgbFrame.getCvFrame(); + cv::cvtColor(colorFrame, colorFrame, cv::COLOR_BGR2RGB); + rec.log("rgb", + rerun::Image(tensorShape(colorFrame), reinterpret_cast(colorFrame.data))); + } + } + } +}; +int main() { + using namespace std; + // Create pipeline + dai::Pipeline pipeline; + // Define sources and outputs + auto left = pipeline.create(); + auto right = pipeline.create(); + auto stereo = pipeline.create(); + auto rgbd = pipeline.create()->build(); + auto color = pipeline.create(); + auto rerun = pipeline.create(); + color->build(); + + left->build(dai::CameraBoardSocket::LEFT); + right->build(dai::CameraBoardSocket::RIGHT); + stereo->setSubpixel(true); + stereo->setExtendedDisparity(false); + stereo->setDefaultProfilePreset(dai::node::StereoDepth::PresetMode::DEFAULT); + stereo->setLeftRightCheck(true); + stereo->setRectifyEdgeFillColor(0); // black, to better see the cutout + stereo->enableDistortionCorrection(true); + stereo->initialConfig.setLeftRightCheckThreshold(10); + stereo->initialConfig.postProcessing.thresholdFilter.maxRange = 10000; + rgbd->setDepthUnit(dai::StereoDepthConfig::AlgorithmControl::DepthUnit::METER); + + auto* out = color->requestOutput(std::pair(1280, 720), dai::ImgFrame::Type::RGB888i); + + out->link(stereo->inputAlignTo); + left->requestOutput(std::pair(1280, 729))->link(stereo->left); + right->requestOutput(std::pair(1280, 729))->link(stereo->right); + + stereo->depth.link(rgbd->inDepth); + out->link(rgbd->inColor); + + // Linking + rgbd->pcl.link(rerun->inputPCL); + rgbd->rgbd.link(rerun->inputRGBD); + pipeline.start(); + auto device = pipeline.getDefaultDevice(); + device->setIrLaserDotProjectorIntensity(0.7); + pipeline.wait(); +} diff --git a/examples/cpp/RGBD/visualizer_rgbd.cpp b/examples/cpp/RGBD/visualizer_rgbd.cpp new file mode 100644 index 000000000..0a8e768ee --- /dev/null +++ b/examples/cpp/RGBD/visualizer_rgbd.cpp @@ -0,0 +1,69 @@ +#include "depthai/depthai.hpp" + +#include +#include +#include + +// Signal handling for clean shutdown +static bool isRunning = true; +void signalHandler(int signum) { + isRunning = false; +} + +int main() { + using namespace std; + // Default port values + int webSocketPort = 8765; + int httpPort = 8080; + + // Register signal handler + std::signal(SIGINT, signalHandler); + + // Create RemoteConnection + dai::RemoteConnection remoteConnector(dai::RemoteConnection::DEFAULT_ADDRESS, webSocketPort, true, httpPort); + // Create pipeline + dai::Pipeline pipeline; + auto left = pipeline.create(); + auto right = pipeline.create(); + auto stereo = pipeline.create(); + auto rgbd = pipeline.create()->build(); + auto color = pipeline.create(); + color->build(); + + left->build(dai::CameraBoardSocket::LEFT); + right->build(dai::CameraBoardSocket::RIGHT); + stereo->setSubpixel(true); + stereo->setExtendedDisparity(false); + stereo->setDefaultProfilePreset(dai::node::StereoDepth::PresetMode::DEFAULT); + stereo->setLeftRightCheck(true); + stereo->setRectifyEdgeFillColor(0); // black, to better see the cutout + stereo->enableDistortionCorrection(true); + stereo->initialConfig.setLeftRightCheckThreshold(10); + + auto* out = color->requestOutput(std::pair(1280, 720), dai::ImgFrame::Type::RGB888i); + + out->link(stereo->inputAlignTo); + left->requestOutput(std::pair(1280, 729))->link(stereo->left); + right->requestOutput(std::pair(1280, 729))->link(stereo->right); + + + stereo->depth.link(rgbd->inDepth); + out->link(rgbd->inColor); + + remoteConnector.addTopic("pcl", rgbd->pcl); + pipeline.start(); + remoteConnector.registerPipeline(pipeline); + auto device = pipeline.getDefaultDevice(); + device->setIrLaserDotProjectorIntensity(0.7); + // Main loop + while(isRunning && pipeline.isRunning()) { + int key = remoteConnector.waitKey(1); + if(key == 'q') { + std::cout << "Got 'q' key from the remote connection!" << std::endl; + break; + } + } + + std::cout << "Pipeline stopped." << std::endl; + return 0; +} diff --git a/examples/python/CMakeLists.txt b/examples/python/CMakeLists.txt index 7ac0f52e6..64d8a60eb 100644 --- a/examples/python/CMakeLists.txt +++ b/examples/python/CMakeLists.txt @@ -166,6 +166,17 @@ dai_set_example_test_labels(image_manip_resize ondevice rvc2_all rvc4 ci) add_python_example(reconnect_callback Misc/AutoReconnect/reconnect_callback.py) dai_set_example_test_labels(reconnect_callback ondevice rvc2_all rvc4 ci) + +## RGBD +add_python_example(rgbd RGBD/rgbd.py) +dai_set_example_test_labels(rgbd ondevice rvc2_all rvc4 ci) + +add_python_example(visualizer_rgbd RGBD/visualizer_rgbd.py) +dai_set_example_test_labels(visualizer_rgbd ondevice rvc2_all rvc4 ci) + +add_python_example(rgbd_o3d RGBD/rgbd_o3d.py) +dai_set_example_test_labels(rgbd_o3d ondevice rvc2_all rvc4 ci) + ## NeuralNetwork add_python_example(neural_network NeuralNetwork/neural_network.py) dai_set_example_test_labels(neural_network ondevice rvc2_all rvc4 ci) diff --git a/examples/python/RGBD/rgbd.py b/examples/python/RGBD/rgbd.py new file mode 100644 index 000000000..e8f895c67 --- /dev/null +++ b/examples/python/RGBD/rgbd.py @@ -0,0 +1,62 @@ +import time +import depthai as dai +import sys + +from pathlib import Path +installExamplesStr = Path(__file__).absolute().parents[2] / 'install_requirements.py --install_rerun' +try: + import rerun as rr +except ImportError: + sys.exit("Critical dependency missing: Rerun. Please install it using the command: '{} {}' and then rerun the script.".format(sys.executable, installExamplesStr)) + +import cv2 + +class RerunNode(dai.node.ThreadedHostNode): + def __init__(self): + dai.node.ThreadedHostNode.__init__(self) + self.inputPCL = self.createInput() + + + def run(self): + rr.init("", spawn=True) + rr.log("world", rr.ViewCoordinates.FLU) + rr.log("world/ground", rr.Boxes3D(half_sizes=[3.0, 3.0, 0.00001])) + while self.isRunning(): + inPointCloud = self.inputPCL.get() + if inPointCloud is not None: + points, colors = inPointCloud.getPointsRGB() + rr.log("world/pcl", rr.Points3D(points, colors=colors, radii=[0.01])) + +# Create pipeline + +with dai.Pipeline() as p: + fps = 30 + # Define sources and outputs + left = p.create(dai.node.Camera) + right = p.create(dai.node.Camera) + color = p.create(dai.node.Camera) + stereo = p.create(dai.node.StereoDepth) + rgbd = p.create(dai.node.RGBD).build() + color.build() + rerunViewer = RerunNode() + left.build(dai.CameraBoardSocket.CAM_B) + right.build(dai.CameraBoardSocket.CAM_C) + out = color.requestOutput((1280,720), dai.ImgFrame.Type.RGB888i) + + + out.link(stereo.inputAlignTo) + stereo.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.DEFAULT) + stereo.setRectifyEdgeFillColor(0) + stereo.enableDistortionCorrection(True) + + # Linking + left.requestOutput((1280, 720)).link(stereo.left) + right.requestOutput((1280, 720)).link(stereo.right) + stereo.depth.link(rgbd.inDepth) + out.link(rgbd.inColor) + + rgbd.pcl.link(rerunViewer.inputPCL) + + p.start() + while p.isRunning(): + time.sleep(1) diff --git a/examples/python/RGBD/rgbd_o3d.py b/examples/python/RGBD/rgbd_o3d.py new file mode 100644 index 000000000..a6a415618 --- /dev/null +++ b/examples/python/RGBD/rgbd_o3d.py @@ -0,0 +1,80 @@ + +import time +import depthai as dai +import sys +import numpy as np + +try: + import open3d as o3d +except ImportError: + sys.exit("Critical dependency missing: Open3D. Please install it using the command: '{} -m pip install open3d' and then rerun the script.".format(sys.executable)) + + +class O3DNode(dai.node.ThreadedHostNode): + def __init__(self): + dai.node.ThreadedHostNode.__init__(self) + self.inputPCL = self.createInput() + + + def run(self): + def key_callback(vis, action, mods): + global isRunning + if action == 0: + isRunning = False + pc = o3d.geometry.PointCloud() + vis = o3d.visualization.VisualizerWithKeyCallback() + vis.create_window() + vis.register_key_action_callback(81, key_callback) + pcd = o3d.geometry.PointCloud() + coordinateFrame = o3d.geometry.TriangleMesh.create_coordinate_frame(size=1000, origin=[0,0,0]) + vis.add_geometry(coordinateFrame) + first = True + while self.isRunning(): + inPointCloud = self.inputPCL.tryGet() + if inPointCloud is not None: + points, colors = inPointCloud.getPointsRGB() + pcd.points = o3d.utility.Vector3dVector(points.astype(np.float64)) + colors = (colors.reshape(-1, 3) / 255.0).astype(np.float64) + pcd.colors = o3d.utility.Vector3dVector(colors) + if first: + vis.add_geometry(pcd) + first = False + else: + vis.update_geometry(pcd) + vis.poll_events() + vis.update_renderer() + vis.destroy_window() + +# Create pipeline + +with dai.Pipeline() as p: + fps = 30 + # Define sources and outputs + left = p.create(dai.node.Camera) + right = p.create(dai.node.Camera) + color = p.create(dai.node.Camera) + stereo = p.create(dai.node.StereoDepth) + rgbd = p.create(dai.node.RGBD).build() + color.build() + o3dViewer = O3DNode() + left.build(dai.CameraBoardSocket.CAM_B) + right.build(dai.CameraBoardSocket.CAM_C) + out = color.requestOutput((1280,720), dai.ImgFrame.Type.RGB888i) + + + out.link(stereo.inputAlignTo) + stereo.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.DEFAULT) + stereo.setRectifyEdgeFillColor(0) + stereo.enableDistortionCorrection(True) + + # Linking + left.requestOutput((1280, 720)).link(stereo.left) + right.requestOutput((1280, 720)).link(stereo.right) + stereo.depth.link(rgbd.inDepth) + out.link(rgbd.inColor) + + rgbd.pcl.link(o3dViewer.inputPCL) + + p.start() + while p.isRunning(): + time.sleep(1) diff --git a/examples/python/RGBD/visualizer_rgbd.py b/examples/python/RGBD/visualizer_rgbd.py new file mode 100644 index 000000000..4627c4f5d --- /dev/null +++ b/examples/python/RGBD/visualizer_rgbd.py @@ -0,0 +1,46 @@ +import time +import depthai as dai + +from argparse import ArgumentParser + +parser = ArgumentParser() +parser.add_argument("--webSocketPort", type=int, default=8765) +parser.add_argument("--httpPort", type=int, default=8080) +args = parser.parse_args() + +with dai.Pipeline() as p: + remoteConnector = dai.RemoteConnection( + webSocketPort=args.webSocketPort, httpPort=args.httpPort + ) + left = p.create(dai.node.Camera) + right = p.create(dai.node.Camera) + color = p.create(dai.node.Camera) + stereo = p.create(dai.node.StereoDepth) + rgbd = p.create(dai.node.RGBD).build() + + color.build() + left.build(dai.CameraBoardSocket.CAM_B) + right.build(dai.CameraBoardSocket.CAM_C) + out = color.requestOutput((1280, 720), dai.ImgFrame.Type.RGB888i) + + + out.link(stereo.inputAlignTo) + stereo.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.DEFAULT) + stereo.setRectifyEdgeFillColor(0) + stereo.enableDistortionCorrection(True) + + # Linking + left.requestOutput((1280, 720)).link(stereo.left) + right.requestOutput((1280, 720)).link(stereo.right) + stereo.depth.link(rgbd.inDepth) + out.link(rgbd.inColor) + remoteConnector.addTopic("pcl", rgbd.pcl, "common") + + p.start() + remoteConnector.registerPipeline(p) + + while p.isRunning(): + key = remoteConnector.waitKey(1) + if key == ord("q"): + print("Got q key from the remote connection!") + break diff --git a/include/depthai/basalt/BasaltVIO.hpp b/include/depthai/basalt/BasaltVIO.hpp index cbc49f01b..a01cb36d3 100644 --- a/include/depthai/basalt/BasaltVIO.hpp +++ b/include/depthai/basalt/BasaltVIO.hpp @@ -1,7 +1,7 @@ #pragma once #define SOPHUS_USE_BASIC_LOGGING -#include +#include "depthai/pipeline/Subnode.hpp" #include "basalt/calibration/calibration.hpp" #include "basalt/serialization/headers_serialization.h" diff --git a/include/depthai/common/Point3fRGB.hpp b/include/depthai/common/Point3fRGB.hpp index c37086746..cbcb69c7f 100644 --- a/include/depthai/common/Point3fRGB.hpp +++ b/include/depthai/common/Point3fRGB.hpp @@ -1,5 +1,6 @@ #pragma once - +#pragma pack(push) +#pragma pack(1) // std #include @@ -22,4 +23,6 @@ struct Point3fRGB { DEPTHAI_SERIALIZE_EXT(Point3fRGB, x, y, z, r, g, b); -} // namespace dai \ No newline at end of file +} // namespace dai +#pragma pack(pop) + diff --git a/include/depthai/pipeline/datatype/DatatypeEnum.hpp b/include/depthai/pipeline/datatype/DatatypeEnum.hpp index 9684e321d..a37066ccf 100644 --- a/include/depthai/pipeline/datatype/DatatypeEnum.hpp +++ b/include/depthai/pipeline/datatype/DatatypeEnum.hpp @@ -34,6 +34,7 @@ enum class DatatypeEnum : std::int32_t { TransformData, PointCloudConfig, PointCloudData, + RGBDData, ImageAlignConfig, ImgAnnotations }; diff --git a/include/depthai/pipeline/datatype/PointCloudData.hpp b/include/depthai/pipeline/datatype/PointCloudData.hpp index de06cba01..6b5e52cb1 100644 --- a/include/depthai/pipeline/datatype/PointCloudData.hpp +++ b/include/depthai/pipeline/datatype/PointCloudData.hpp @@ -41,6 +41,8 @@ class PointCloudData : public Buffer, public ProtoSerializable { std::vector getPoints(); std::vector getPointsRGB(); + void setPoints(const std::vector& points); + void setPointsRGB(const std::vector& points); /** * Retrieves instance number diff --git a/include/depthai/pipeline/datatype/RGBDData.hpp b/include/depthai/pipeline/datatype/RGBDData.hpp new file mode 100644 index 000000000..8c7a793ce --- /dev/null +++ b/include/depthai/pipeline/datatype/RGBDData.hpp @@ -0,0 +1,30 @@ +#pragma once + +#include "depthai/pipeline/datatype/Buffer.hpp" +#include "depthai/pipeline/datatype/ImgFrame.hpp" + +namespace dai { + +/** + * RGBD message. Carries RGB and Depth frames. + */ +class RGBDData : public Buffer { + public: + /** + * Construct RGBD message. + */ + RGBDData() = default; + + virtual ~RGBDData() = default; + + ImgFrame rgbFrame; + ImgFrame depthFrame; + + void serialize(std::vector& metadata, DatatypeEnum& datatype) const override { + metadata = utility::serialize(*this); + datatype = DatatypeEnum::RGBDData; + }; + DEPTHAI_SERIALIZE(RGBDData, Buffer::sequenceNum, Buffer::ts, Buffer::tsDevice, rgbFrame, depthFrame); +}; + +} // namespace dai diff --git a/include/depthai/pipeline/datatypes.hpp b/include/depthai/pipeline/datatypes.hpp index dca47c5e6..54e8c9055 100644 --- a/include/depthai/pipeline/datatypes.hpp +++ b/include/depthai/pipeline/datatypes.hpp @@ -20,6 +20,7 @@ #include "datatype/NNData.hpp" #include "datatype/PointCloudConfig.hpp" #include "datatype/PointCloudData.hpp" +#include "datatype/RGBDData.hpp" #include "datatype/SpatialImgDetections.hpp" #include "datatype/SpatialLocationCalculatorConfig.hpp" #include "datatype/SpatialLocationCalculatorData.hpp" @@ -30,4 +31,4 @@ #include "datatype/ToFConfig.hpp" #include "datatype/TrackedFeatures.hpp" #include "datatype/Tracklets.hpp" -#include "datatype/TransformData.hpp" \ No newline at end of file +#include "datatype/TransformData.hpp" diff --git a/include/depthai/pipeline/node/StereoDepth.hpp b/include/depthai/pipeline/node/StereoDepth.hpp index 8bf607e88..2b1fa65f1 100644 --- a/include/depthai/pipeline/node/StereoDepth.hpp +++ b/include/depthai/pipeline/node/StereoDepth.hpp @@ -43,6 +43,11 @@ class StereoDepth : public DeviceNodeCRTP(shared_from_this()); } + /** + * Create StereoDepth node. Note that this API is global and if used autocreated cameras can't be reused. + * @param autoCreateCameras If true, will create left and right nodes if they don't exist + * @param presetMode Preset mode for stereo depth + */ std::shared_ptr build(bool autoCreateCameras, PresetMode presetMode = PresetMode::HIGH_ACCURACY); protected: diff --git a/include/depthai/pipeline/node/host/RGBD.hpp b/include/depthai/pipeline/node/host/RGBD.hpp new file mode 100644 index 000000000..1ff0c1979 --- /dev/null +++ b/include/depthai/pipeline/node/host/RGBD.hpp @@ -0,0 +1,76 @@ +#pragma once +#include "depthai/pipeline/Subnode.hpp" +#include "depthai/pipeline/ThreadedHostNode.hpp" +#include "depthai/pipeline/datatype/PointCloudData.hpp" +#include "depthai/pipeline/datatype/RGBDData.hpp" +#include "depthai/pipeline/datatype/StereoDepthConfig.hpp" +#include "depthai/pipeline/node/Sync.hpp" +#include "depthai/pipeline/datatype/MessageGroup.hpp" +#include "depthai/utility/Pimpl.hpp" + +namespace dai { +namespace node { + +/** + * @brief RGBD node. Combines depth and color frames into a single point cloud. + */ +class RGBD : public NodeCRTP { + public: + constexpr static const char* NAME = "RGBD"; + + RGBD(); + ~RGBD(); + Subnode sync{*this, "sync"}; + InputMap& inputs = sync->inputs; + + std::string colorInputName = "inColorSync"; + std::string depthInputName = "inDepthSync"; + Input& inColor = inputs[colorInputName]; + Input& inDepth = inputs[depthInputName]; + + /** + * Output point cloud. + */ + Output pcl{*this, {"pcl", DEFAULT_GROUP, {{DatatypeEnum::PointCloudData, true}}}}; + /** + * Output RGBD frames. + */ + Output rgbd{*this, {"rgbd", DEFAULT_GROUP, {{DatatypeEnum::RGBDData, true}}}}; + + std::shared_ptr build(); + /** + * @brief Build RGBD node with specified size. Note that this API is global and if used autocreated cameras can't be reused. + * @param autocreate If true, will create color and depth nodes if they don't exist. + * @param size Size of the frames + */ + std::shared_ptr build(bool autocreate, std::pair size); + void setDepthUnit(StereoDepthConfig::AlgorithmControl::DepthUnit depthUnit); + /** + * @brief Use single-threaded CPU for processing + */ + void useCPU(); + /** + * @brief Use multi-threaded CPU for processing + * @param numThreads Number of threads to use + */ + void useCPUMT(uint32_t numThreads=2); + /** + * @brief Use GPU for processing (needs to be compiled with Kompute support) + * @param device GPU device index + */ + void useGPU(uint32_t device=0); + /** + * @brief Print available GPU devices + */ + void printDevices(); + private: + class Impl; + Pimpl pimpl; + void run() override; + void initialize(std::shared_ptr frames); + Input inSync{*this, {"inSync", DEFAULT_GROUP, false, 0, {{DatatypeEnum::MessageGroup, true}}}}; + bool initialized = false; +}; + +} // namespace node +} // namespace dai diff --git a/include/depthai/pipeline/nodes.hpp b/include/depthai/pipeline/nodes.hpp index f1b1bccf6..34797fe51 100644 --- a/include/depthai/pipeline/nodes.hpp +++ b/include/depthai/pipeline/nodes.hpp @@ -34,6 +34,7 @@ #include "node/Warp.hpp" #include "node/XLinkIn.hpp" #include "node/XLinkOut.hpp" +#include "node/host/RGBD.hpp" #ifdef DEPTHAI_HAVE_OPENCV_SUPPORT #include "node/host/Display.hpp" #include "node/host/HostCamera.hpp" diff --git a/shaders/CMakeLists.txt b/shaders/CMakeLists.txt new file mode 100644 index 000000000..eb32d6dc5 --- /dev/null +++ b/shaders/CMakeLists.txt @@ -0,0 +1,19 @@ +cmake_minimum_required(VERSION 3.20) + +include(compileShader.cmake) + +# create shader directory in build +file(MAKE_DIRECTORY "${CMAKE_BINARY_DIR}/shaders") +set(SHADER_HEADER_PATH "${CMAKE_BINARY_DIR}/include/depthai/shaders") +# vulkan_compile_shader generates headers in source directory +compile_shader( + INFILE rgbd2pointcloud.comp OUTFILE + ${SHADER_HEADER_PATH}/rgbd2pointcloud.hpp NAMESPACE + "dai::shaders") +add_library(shaders INTERFACE + ${SHADER_HEADER_PATH}/rgbd2pointcloud.hpp) + +target_include_directories( + shaders INTERFACE $ + $) + diff --git a/shaders/bin2h.cmake b/shaders/bin2h.cmake new file mode 100644 index 000000000..82824622c --- /dev/null +++ b/shaders/bin2h.cmake @@ -0,0 +1,107 @@ + +################################################################################## +# Based on: https://github.com/sivachandran/cmake-bin2h +# +# Copyright 2020 Sivachandran Paramasivam +# +# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +################################################################################## + +include(CMakeParseArguments) + +# Function to wrap a given string into multiple lines at the given column position. +# Parameters: +# VARIABLE - The name of the CMake variable holding the string. +# AT_COLUMN - The column position at which string will be wrapped. +function(WRAP_STRING) + set(oneValueArgs VARIABLE AT_COLUMN) + cmake_parse_arguments(WRAP_STRING "${options}" "${oneValueArgs}" "" ${ARGN}) + + string(LENGTH ${${WRAP_STRING_VARIABLE}} stringLength) + math(EXPR offset "0") + + while(stringLength GREATER 0) + + if(stringLength GREATER ${WRAP_STRING_AT_COLUMN}) + math(EXPR length "${WRAP_STRING_AT_COLUMN}") + else() + math(EXPR length "${stringLength}") + endif() + + string(SUBSTRING ${${WRAP_STRING_VARIABLE}} ${offset} ${length} line) + set(lines "${lines}\n${line}") + + math(EXPR stringLength "${stringLength} - ${length}") + math(EXPR offset "${offset} + ${length}") + endwhile() + + set(${WRAP_STRING_VARIABLE} "${lines}" PARENT_SCOPE) +endfunction() + +# Function to embed contents of a file as byte array in C/C++ header file(.h). The header file +# will contain a byte array and integer variable holding the size of the array. +# Parameters +# SOURCE_FILE - The path of source file whose contents will be embedded in the header file. +# VARIABLE_NAME - The name of the variable for the byte array. The string "_SIZE" will be append +# to this name and will be used a variable name for size variable. +# HEADER_FILE - The path of header file. +# APPEND - If specified appends to the header file instead of overwriting it +# NULL_TERMINATE - If specified a null byte(zero) will be append to the byte array. This will be +# useful if the source file is a text file and we want to use the file contents +# as string. But the size variable holds size of the byte array without this +# null byte. +# HEADER_NAMESPACE - The namespace, where the array should be located in. +# IS_BIG_ENDIAN - If set to true, will not revers the byte order for the uint32_t to match the +# big endian system architecture +# Usage: +# bin2h(SOURCE_FILE "Logo.png" HEADER_FILE "Logo.h" VARIABLE_NAME "LOGO_PNG") +function(BIN2H) + set(options APPEND NULL_TERMINATE) + set(oneValueArgs SOURCE_FILE VARIABLE_NAME HEADER_FILE) + cmake_parse_arguments(BIN2H "${options}" "${oneValueArgs}" "" ${ARGN}) + + # reads source file contents as hex string + file(READ ${BIN2H_SOURCE_FILE} hexString HEX) + string(LENGTH ${hexString} hexStringLength) + + # appends null byte if asked + if(BIN2H_NULL_TERMINATE) + set(hexString "${hexString}00") + endif() + + # wraps the hex string into multiple lines at column 32(i.e. 16 bytes per line) + wrap_string(VARIABLE hexString AT_COLUMN 32) + math(EXPR arraySize "${hexStringLength} / 8") + + # adds '0x' prefix and comma suffix before and after every byte respectively + if(IS_BIG_ENDIAN) + message(STATUS "Interpreting shader in big endian...") + string(REGEX REPLACE "([0-9a-f][0-9a-f])([0-9a-f][0-9a-f])([0-9a-f][0-9a-f])([0-9a-f][0-9a-f])" "0x\\1\\2\\3\\4, " arrayValues ${hexString}) + else() + message(STATUS "Interpreting shader in little endian...") + string(REGEX REPLACE "([0-9a-f][0-9a-f])([0-9a-f][0-9a-f])([0-9a-f][0-9a-f])([0-9a-f][0-9a-f])" "0x\\4\\3\\2\\1, " arrayValues ${hexString}) + endif() + # removes trailing comma + string(REGEX REPLACE ", $" "" arrayValues ${arrayValues}) + + # converts the variable name into proper C identifier + string(MAKE_C_IDENTIFIER "${BIN2H_VARIABLE_NAME}" BIN2H_VARIABLE_NAME) + string(TOUPPER "${BIN2H_VARIABLE_NAME}" BIN2H_VARIABLE_NAME) + + # declares byte array and the length variables + set(namespaceStart "namespace ${HEADER_NAMESPACE} {") + set(namespaceEnd "} // namespace ${HEADER_NAMESPACE}") + set(arrayIncludes "#pragma once\n#include \n#include ") + set(arrayDefinition "const std::array ${BIN2H_VARIABLE_NAME} = { ${arrayValues} };") + + set(declarations "${arrayIncludes}\n\n${namespaceStart}\n${arrayDefinition}\n${namespaceEnd}\n\n") + if(BIN2H_APPEND) + file(APPEND ${BIN2H_HEADER_FILE} "${declarations}") + else() + file(WRITE ${BIN2H_HEADER_FILE} "${declarations}") + endif() +endfunction() diff --git a/shaders/bin_file_to_header.cmake b/shaders/bin_file_to_header.cmake new file mode 100644 index 000000000..3df6bfcaa --- /dev/null +++ b/shaders/bin_file_to_header.cmake @@ -0,0 +1,20 @@ + +cmake_minimum_required(VERSION 3.20) + +if(${INPUT_SHADER_FILE} STREQUAL "") + message(FATAL_ERROR "No input file path provided via 'INPUT_SHADER_FILE'.") +endif() + +if(${OUTPUT_HEADER_FILE} STREQUAL "") + message(FATAL_ERROR "No output file path provided via 'OUTPUT_HEADER_FILE'.") +endif() + +if(${HEADER_NAMESPACE} STREQUAL "") + message(FATAL_ERROR "No header namespace provided via 'HEADER_NAMESPACE'.") +endif() + +include(bin2h.cmake) + +get_filename_component(BINARY_FILE_CONTENT ${INPUT_SHADER_FILE} NAME) +bin2h(SOURCE_FILE ${INPUT_SHADER_FILE} HEADER_FILE ${OUTPUT_HEADER_FILE} VARIABLE_NAME ${BINARY_FILE_CONTENT} HEADER_NAMESPACE ${HEADER_NAMESPACE}) +file(APPEND ${OUTPUT_HEADER_FILE} "\n") diff --git a/shaders/compileShader.cmake b/shaders/compileShader.cmake new file mode 100644 index 000000000..5463cf299 --- /dev/null +++ b/shaders/compileShader.cmake @@ -0,0 +1,55 @@ + +# Setting CWD as later on it's not possible if called from outside +set_property(GLOBAL PROPERTY + VULKAN_COMPILE_SHADER_CWD_PROPERTY "${CMAKE_CURRENT_LIST_DIR}") + +function(compile_shader) + find_program(GLS_LANG_VALIDATOR_PATH NAMES glslangValidator) + if(GLS_LANG_VALIDATOR_PATH STREQUAL "GLS_LANG_VALIDATOR_PATH-NOTFOUND") + message(FATAL_ERROR "glslangValidator not found.") + return() + endif() + + cmake_parse_arguments(SHADER_COMPILE "" "INFILE;OUTFILE;NAMESPACE" "" ${ARGN}) + set(SHADER_COMPILE_INFILE_FULL "${CMAKE_CURRENT_SOURCE_DIR}/${SHADER_COMPILE_INFILE}") + set(SHADER_COMPILE_SPV_FILE_FULL "${CMAKE_CURRENT_BINARY_DIR}/${SHADER_COMPILE_INFILE}.spv") + set(SHADER_COMPILE_HEADER_FILE_FULL "${SHADER_COMPILE_OUTFILE}") + + # .comp -> .spv + add_custom_command(OUTPUT "${SHADER_COMPILE_SPV_FILE_FULL}" + COMMAND "${GLS_LANG_VALIDATOR_PATH}" + ARGS "-V" + "${SHADER_COMPILE_INFILE_FULL}" + "-o" + "${SHADER_COMPILE_SPV_FILE_FULL}" + COMMENT "Compile vulkan compute shader from file '${SHADER_COMPILE_INFILE_FULL}' to '${SHADER_COMPILE_SPV_FILE_FULL}'." + MAIN_DEPENDENCY "${SHADER_COMPILE_INFILE_FULL}") + + # Check if big or little endian + include (TestBigEndian) + TEST_BIG_ENDIAN(IS_BIG_ENDIAN) + + # .spv -> .hpp + ## Property needs to be retrieved explicitly from globals + get_property(VULKAN_COMPILE_SHADER_CWD GLOBAL PROPERTY VULKAN_COMPILE_SHADER_CWD_PROPERTY) + ## The directory may not be created so we need to ensure its present + get_filename_component(SHADER_COMPILE_SPV_PATH ${SHADER_COMPILE_SPV_FILE_FULL} DIRECTORY) + if(NOT EXISTS ${SHADER_COMPILE_SPV_PATH}) + get_filename_component(SHADER_COMPILE_SPV_FILENAME ${SHADER_COMPILE_SPV_FILE_FULL} NAME) + add_custom_target(build-time-make-directory-${SHADER_COMPILE_SPV_FILENAME} ALL + COMMAND ${CMAKE_COMMAND} -E make_directory ${SHADER_COMPILE_SPV_PATH}) + endif() + ## Requires custom command function as this is the only way to call + ## a function during compile time from cmake (ie through cmake script) + add_custom_command(OUTPUT "${SHADER_COMPILE_HEADER_FILE_FULL}" + COMMAND ${CMAKE_COMMAND} + ARGS "-DINPUT_SHADER_FILE=${SHADER_COMPILE_SPV_FILE_FULL}" + "-DOUTPUT_HEADER_FILE=${SHADER_COMPILE_HEADER_FILE_FULL}" + "-DHEADER_NAMESPACE=${SHADER_COMPILE_NAMESPACE}" + "-DIS_BIG_ENDIAN=${IS_BIG_ENDIAN}" + "-P" + "${VULKAN_COMPILE_SHADER_CWD}/bin_file_to_header.cmake" + WORKING_DIRECTORY "${VULKAN_COMPILE_SHADER_CWD}" + COMMENT "Converting compiled shader '${SHADER_COMPILE_SPV_FILE_FULL}' to header file '${SHADER_COMPILE_HEADER_FILE_FULL}'." + MAIN_DEPENDENCY "${SHADER_COMPILE_SPV_FILE_FULL}") +endfunction() diff --git a/shaders/rgbd2pointcloud.comp b/shaders/rgbd2pointcloud.comp new file mode 100644 index 000000000..b4700cff4 --- /dev/null +++ b/shaders/rgbd2pointcloud.comp @@ -0,0 +1,47 @@ +#version 450 core +layout(local_size_x = 256, local_size_y = 1, local_size_z = 1) in; + +// Binding 0: Depth input (float array) +layout(std430, binding = 0) buffer DepthBuffer { + float depth[]; +}; + +// Binding 2: Intrinsics buffer [fx, fy, cx, cy, scale, width, height] +layout(std430, binding = 1) buffer IntrinsicsBuffer { + float intrinsics[]; +}; + +// Binding 3: Output XYZ buffer (float array, 3 floats per pixel) +layout(std430, binding = 2) buffer XYZBuffer { + float xyz[]; +}; + + +void main() { + uint i = gl_GlobalInvocationID.x; + + float fx = intrinsics[0]; + float fy = intrinsics[1]; + float cx = intrinsics[2]; + float cy = intrinsics[3]; + float scale = intrinsics[4]; + float width = intrinsics[5]; + float height = intrinsics[6]; + + if (i >= uint(width * height)) { + return; + } + + uint u = i % uint(width); + uint v = i / uint(width); + + float z = depth[i] * scale; + float x = (float(u) - cx) * z / fx; + float y = (float(v) - cy) * z / fy; + + xyz[i * 3 + 0] = x; + xyz[i * 3 + 1] = y; + xyz[i * 3 + 2] = z; + +} + diff --git a/src/basalt/BasaltVIO.cpp b/src/basalt/BasaltVIO.cpp index 7e1e8f89c..4ba1bdf0d 100644 --- a/src/basalt/BasaltVIO.cpp +++ b/src/basalt/BasaltVIO.cpp @@ -1,6 +1,7 @@ #include "depthai/basalt/BasaltVIO.hpp" #include "../utility/PimplImpl.hpp" +#include "basalt/vi_estimator/vio_estimator.h" #include "depthai/pipeline/Pipeline.hpp" #include "depthai/pipeline/ThreadedHostNode.hpp" #include "depthai/pipeline/datatype/MessageGroup.hpp" @@ -208,6 +209,7 @@ void BasaltVIO::initialize(std::vector> frames) { } optFlowPtr = basalt::OpticalFlowFactory::getOpticalFlow(vioConfig, *calib); + optFlowPtr->show_gui = false; optFlowPtr->start(); pimpl->imageDataQueue = optFlowPtr->input_img_queue; vio = basalt::VioEstimatorFactory::getVioEstimator(vioConfig, *calib, basalt::constants::g, true, true); @@ -218,6 +220,7 @@ void BasaltVIO::initialize(std::vector> frames) { vio->out_state_queue = pimpl->outStateQueue; vio->opt_flow_depth_guess_queue = optFlowPtr->input_depth_queue; vio->opt_flow_state_queue = optFlowPtr->input_state_queue; + vio->opt_flow_lm_bundle_queue = optFlowPtr->input_lm_bundle_queue; initialized = true; } diff --git a/src/pipeline/datatype/PointCloudData.cpp b/src/pipeline/datatype/PointCloudData.cpp index 5f5046630..1ad576d33 100644 --- a/src/pipeline/datatype/PointCloudData.cpp +++ b/src/pipeline/datatype/PointCloudData.cpp @@ -37,6 +37,22 @@ std::vector PointCloudData::getPointsRGB() { return points; } +void PointCloudData::setPoints(const std::vector& points) { + auto size = points.size(); + std::vector data(size * sizeof(Point3f)); + std::memcpy(data.data(), points.data(), size * sizeof(Point3f)); + setData(std::move(data)); + setColor(false); +} + +void PointCloudData::setPointsRGB(const std::vector& points) { + auto size = points.size(); + std::vector data(size * sizeof(Point3fRGB)); + std::memcpy(data.data(), points.data(), size * sizeof(Point3fRGB)); + setData(std::move(data)); + setColor(true); +} + unsigned int PointCloudData::getInstanceNum() const { return instanceNum; } @@ -168,4 +184,5 @@ ProtoSerializable::SchemaPair PointCloudData::serializeSchema() const { #endif static_assert(sizeof(Point3f) == 12, "Point3f size must be 12 bytes"); +static_assert(sizeof(Point3fRGB) == 15, "Point3fRGB size must be 15 bytes"); } // namespace dai diff --git a/src/pipeline/datatype/RGBDData.cpp b/src/pipeline/datatype/RGBDData.cpp new file mode 100644 index 000000000..92a3dcbcb --- /dev/null +++ b/src/pipeline/datatype/RGBDData.cpp @@ -0,0 +1,5 @@ +#include "depthai/pipeline/datatype/RGBDData.hpp" + +namespace dai { +; // TODO - no impl needed +} // namespace dai diff --git a/src/pipeline/node/host/RGBD.cpp b/src/pipeline/node/host/RGBD.cpp new file mode 100644 index 000000000..f7d59b8a2 --- /dev/null +++ b/src/pipeline/node/host/RGBD.cpp @@ -0,0 +1,359 @@ +#include "depthai/pipeline/node/host/RGBD.hpp" + +#include + +#include "common/Point3fRGB.hpp" +#include "depthai/common/Point3fRGB.hpp" +#include "depthai/pipeline/Pipeline.hpp" +#include "depthai/pipeline/datatype/ImgFrame.hpp" +#include "depthai/pipeline/datatype/MessageGroup.hpp" +#include "depthai/pipeline/datatype/PointCloudData.hpp" +#include "depthai/pipeline/node/Camera.hpp" +#include "depthai/pipeline/node/ImageAlign.hpp" +#include "depthai/pipeline/node/StereoDepth.hpp" +#include "depthai/pipeline/node/Sync.hpp" +#ifdef DEPTHAI_ENABLE_KOMPUTE + #include "depthai/shaders/rgbd2pointcloud.hpp" + #include "kompute/Kompute.hpp" +#endif +#include "utility/PimplImpl.hpp" + +namespace dai { +namespace node { + +class RGBD::Impl { + public: + Impl() = default; + void computePointCloud(const uint8_t* depthData, const uint8_t* colorData, std::vector& points) { + if(!intrinsicsSet) { + throw std::runtime_error("Intrinsics not set"); + } + points.reserve(size); + switch(computeMethod) { + case ComputeMethod::CPU: + computePointCloudCPU(depthData, colorData, points); + break; + case ComputeMethod::CPU_MT: + computePointCloudCPUMT(depthData, colorData, points); + break; + case ComputeMethod::GPU: + computePointCloudGPU(depthData, colorData, points); + break; + } + } + void setDepthUnit(StereoDepthConfig::AlgorithmControl::DepthUnit depthUnit) { + // Default is millimeter + switch(depthUnit) { + case StereoDepthConfig::AlgorithmControl::DepthUnit::MILLIMETER: + scaleFactor = 1.0f; + break; + case StereoDepthConfig::AlgorithmControl::DepthUnit::METER: + scaleFactor = 0.001f; + break; + case StereoDepthConfig::AlgorithmControl::DepthUnit::CENTIMETER: + scaleFactor = 0.01f; + break; + case StereoDepthConfig::AlgorithmControl::DepthUnit::FOOT: + scaleFactor = 0.3048f; + break; + case StereoDepthConfig::AlgorithmControl::DepthUnit::INCH: + scaleFactor = 0.0254f; + break; + case StereoDepthConfig::AlgorithmControl::DepthUnit::CUSTOM: + scaleFactor = 1.0f; + break; + } + } + void printDevices() { +#ifdef DEPTHAI_ENABLE_KOMPUTE + auto devices = mgr->listDevices(); + for(auto& device : devices) { + std::cout << "Device: " << device.getProperties().deviceName << std::endl; + } +#endif + } + void useCPU() { + computeMethod = ComputeMethod::CPU; + } + void useCPUMT(uint32_t numThreads) { + threadNum = numThreads; + computeMethod = ComputeMethod::CPU_MT; + } + void useGPU(uint32_t device) { + initializeGPU(device); + } + void setIntrinsics(float fx, float fy, float cx, float cy, unsigned int width, unsigned int height) { + this->fx = fx; + this->fy = fy; + this->cx = cx; + this->cy = cy; + this->width = width; + this->height = height; + size = this->width * this->height; + intrinsicsSet = true; + } + + private: + void initializeGPU(uint32_t device) { +#ifdef DEPTHAI_ENABLE_KOMPUTE + // Initialize Kompute + mgr = std::make_shared(device); + shader = std::vector(shaders::RGBD2POINTCLOUD_COMP_SPV.begin(), shaders::RGBD2POINTCLOUD_COMP_SPV.end()); + computeMethod = ComputeMethod::GPU; +#else + throw std::runtime_error("Kompute not enabled in this build"); +#endif + } + void computePointCloudGPU(const uint8_t* depthData, const uint8_t* colorData, std::vector& points) { +#ifdef DEPTHAI_ENABLE_KOMPUTE + std::vector xyzOut; + + xyzOut.resize(size * 3); + + // Convert depth to float + // Depth is in mm by default, convert to meters if outputMeters == true + // If outputMeters is true, scale = 1/1000.0 else scale = 1.0 + float scale = scaleFactor; + + std::vector depthDataFloat(size); + for(int i = 0; i < size; i++) { + uint16_t depthValue = *(reinterpret_cast(depthData + i * 2)); + depthDataFloat[i] = static_cast(depthValue); // will multiply by scale in shader + } + + // Intrinsics: [fx, fy, cx, cy, scale, width, height] + std::vector intrinsics = {fx, fy, cx, cy, scale, static_cast(width), static_cast(height)}; + + // Create Kompute tensors + if(!tensorsInitialized) { + depthTensor = mgr->tensor(depthDataFloat); + intrinsicsTensor = mgr->tensor(intrinsics); + xyzTensor = mgr->tensor(xyzOut); + tensorsInitialized = true; + } else { + depthTensor->setData(depthDataFloat); + } + // Load shader + if(!algoInitialized) { + tensors.emplace_back(depthTensor); + tensors.emplace_back(intrinsicsTensor); + tensors.emplace_back(xyzTensor); + algo = mgr->algorithm(tensors, shader); + algoInitialized = true; + } + mgr->sequence()->record(tensors)->record(algo)->record(tensors)->eval(); + // Retrieve results + xyzOut = xyzTensor->vector(); + for(int i = 0; i < size; i++) { + Point3fRGB p; + p.x = xyzOut[i * 3 + 0]; + p.y = xyzOut[i * 3 + 1]; + p.z = xyzOut[i * 3 + 2]; + p.r = colorData[i * 3 + 0]; + p.g = colorData[i * 3 + 1]; + p.b = colorData[i * 3 + 2]; + points.emplace_back(p); + } +#endif + } + void calcPointsChunk(const uint8_t* depthData, const uint8_t* colorData, std::vector& outChunk, int startRow, int endRow) { + float scale = scaleFactor; + outChunk.reserve((endRow - startRow) * width); + + for(int row = startRow; row < endRow; row++) { + int rowStart = row * width; + for(int col = 0; col < width; col++) { + size_t i = rowStart + col; + + uint16_t depthValue = *(reinterpret_cast(depthData + i * 2)); + float z = static_cast(depthValue) * scale; + + float xCoord = (col - cx) * z / fx; + float yCoord = (row - cy) * z / fy; + + // BGR order in colorData + uint8_t r = colorData[i * 3 + 0]; + uint8_t g = colorData[i * 3 + 1]; + uint8_t b = colorData[i * 3 + 2]; + + outChunk.push_back(Point3fRGB{xCoord, yCoord, z, r, g, b}); + } + } + } + + void computePointCloudCPU(const uint8_t* depthData, const uint8_t* colorData, std::vector& points) { + // Single-threaded directly writes into points + calcPointsChunk(depthData, colorData, points, 0, height); + } + + void computePointCloudCPUMT(const uint8_t* depthData, const uint8_t* colorData, std::vector& points) { + int rowsPerThread = height / threadNum; + std::vector>> futures; + + // Each thread returns a local vector + auto processRows = [&](int startRow, int endRow) { + std::vector localPoints; + calcPointsChunk(depthData, colorData, localPoints, startRow, endRow); + return localPoints; + }; + + for(int t = 0; t < threadNum; ++t) { + int startRow = t * rowsPerThread; + int endRow = (t == threadNum - 1) ? height : (startRow + rowsPerThread); + futures.emplace_back(std::async(std::launch::async, processRows, startRow, endRow)); + } + + // Merge all results + for(auto& f : futures) { + auto localPoints = f.get(); + // Now we do one lock per merge if needed + // If this is not called concurrently from multiple places, no lock needed + points.insert(points.end(), localPoints.begin(), localPoints.end()); + } + } + enum class ComputeMethod { CPU, CPU_MT, GPU }; + ComputeMethod computeMethod = ComputeMethod::CPU; +#ifdef DEPTHAI_ENABLE_KOMPUTE + std::shared_ptr mgr; + std::vector shader; + std::shared_ptr algo; + std::shared_ptr depthTensor; + std::shared_ptr intrinsicsTensor; + std::shared_ptr xyzTensor; + std::vector> tensors; + bool algoInitialized = false; + bool tensorsInitialized = false; +#endif + float scaleFactor = 1.0f; + float fx, fy, cx, cy; + int width, height; + int size; + bool intrinsicsSet = false; + int threadNum = 2; +}; + +RGBD::RGBD() = default; + +RGBD::~RGBD() = default; + +std::shared_ptr RGBD::build() { + sync->out.link(inSync); + sync->setRunOnHost(false); + inColor.setBlocking(false); + inColor.setMaxSize(1); + inDepth.setBlocking(false); + inDepth.setMaxSize(1); + inSync.setBlocking(false); + inSync.setMaxSize(1); + return std::static_pointer_cast(shared_from_this()); +} + +std::shared_ptr RGBD::build(bool autocreate, std::pair size) { + if(!autocreate) { + return std::static_pointer_cast(shared_from_this()); + } + auto pipeline = getParentPipeline(); + auto colorCam = pipeline.create()->build(); + auto platform = pipeline.getDefaultDevice()->getPlatform(); + auto stereo = pipeline.create()->build(true); + std::shared_ptr align = nullptr; + if(platform == Platform::RVC4) { + auto align = pipeline.create(); + } + auto* out = colorCam->requestOutput(size, dai::ImgFrame::Type::RGB888i); + out->link(inColor); + if(platform == dai::Platform::RVC4) { + stereo->depth.link(align->input); + out->link(align->inputAlignTo); + align->outputAligned.link(inDepth); + } else { + out->link(stereo->inputAlignTo); + stereo->depth.link(inDepth); + } + return build(); +} + +void RGBD::initialize(std::shared_ptr frames) { + // Initialize the camera intrinsics + // Check if width, width and cameraID match + auto colorFrame = std::dynamic_pointer_cast(frames->group.at(inColor.getName())); + if(colorFrame->getType() != dai::ImgFrame::Type::RGB888i) { + throw std::runtime_error("RGBD node only supports RGB888i frames"); + } + auto depthFrame = std::dynamic_pointer_cast(frames->group.at(inDepth.getName())); + if(colorFrame->getWidth() != depthFrame->getWidth() || colorFrame->getHeight() != depthFrame->getHeight()) { + throw std::runtime_error("Color and depth frame sizes do not match"); + } + if(colorFrame->getInstanceNum() != depthFrame->getInstanceNum()) { + throw std::runtime_error("Depth is not aligned to color"); + } + auto calibHandler = getParentPipeline().getDefaultDevice()->readCalibration(); + auto camID = static_cast(colorFrame->getInstanceNum()); + auto width = colorFrame->getWidth(); + auto height = colorFrame->getHeight(); + auto intrinsics = calibHandler.getCameraIntrinsics(camID, width, height); + float fx = intrinsics[0][0]; + float fy = intrinsics[1][1]; + float cx = intrinsics[0][2]; + float cy = intrinsics[1][2]; + pimpl->setIntrinsics(fx, fy, cx, cy, width, height); + initialized = true; +} + +void RGBD::run() { + while(isRunning()) { + // Get the color and depth frames + auto group = inSync.get(); + if(group == nullptr) continue; + if(!initialized) { + initialize(group); + } + auto colorFrame = std::dynamic_pointer_cast(group->group.at(inColor.getName())); + if(colorFrame->getType() != dai::ImgFrame::Type::RGB888i) { + throw std::runtime_error("RGBD node only supports RGB888i frames"); + } + auto depthFrame = std::dynamic_pointer_cast(group->group.at(inDepth.getName())); + + // Create the point cloud + auto pc = std::make_shared(); + pc->setTimestamp(colorFrame->getTimestamp()); + pc->setTimestampDevice(colorFrame->getTimestampDevice()); + pc->setSequenceNum(colorFrame->getSequenceNum()); + pc->setInstanceNum(colorFrame->getInstanceNum()); + auto width = colorFrame->getWidth(); + auto height = colorFrame->getHeight(); + pc->setSize(width, height); + + std::vector points; + // Fill the point cloud + auto* depthData = depthFrame->getData().data(); + auto* colorData = colorFrame->getData().data(); + // Use GPU to compute point cloud + pimpl->computePointCloud(depthData, colorData, points); + + pc->setPointsRGB(points); + pcl.send(pc); + auto rgbdData = std::make_shared(); + rgbdData->depthFrame = *depthFrame; + rgbdData->rgbFrame = *colorFrame; + rgbd.send(rgbdData); + } +} +void RGBD::setDepthUnit(StereoDepthConfig::AlgorithmControl::DepthUnit depthUnit) { + pimpl->setDepthUnit(depthUnit); +} +void RGBD::useCPU() { + pimpl->useCPU(); +} +void RGBD::useCPUMT(uint32_t numThreads) { + pimpl->useCPUMT(numThreads); +} +void RGBD::useGPU(uint32_t device) { + pimpl->useGPU(device); +} +void RGBD::printDevices() { + pimpl->printDevices(); +} + +} // namespace node +} // namespace dai diff --git a/src/utility/EventsManager.cpp b/src/utility/EventsManager.cpp index 77cc239e1..3f8b6b4a6 100644 --- a/src/utility/EventsManager.cpp +++ b/src/utility/EventsManager.cpp @@ -109,8 +109,8 @@ EventsManager::EventsManager(std::string url, bool uploadCachedOnStart, float pu uploadCachedOnStart(uploadCachedOnStart), cacheIfCannotSend(false), stopEventBuffer(false) { - sourceAppId = utility::getEnv("AGENT_APP_ID"); - sourceAppIdentifier = utility::getEnv("AGENT_APP_IDENTIFIER"); + sourceAppId = utility::getEnv("OAKAGENT_APP_VERSION"); + sourceAppIdentifier = utility::getEnv("OAKAGENT_APP_IDENTIFIER"); token = utility::getEnv("DEPTHAI_HUB_API_KEY"); if(token.empty()) { throw std::runtime_error("Missing token, please set DEPTHAI_HUB_API_KEY environment variable or use setToken method"); diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 20017176f..eee7aeb3b 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -339,6 +339,10 @@ dai_set_test_labels(message_group_frame_test ondevice rvc2_all rvc4 ci) dai_add_test(pointcloud_test src/ondevice_tests/pointcloud_test.cpp) dai_set_test_labels(pointcloud_test ondevice rvc2_all ci) +# RGBD test +dai_add_test(rgbd_test src/ondevice_tests/rgbd_test.cpp) +dai_set_test_labels(rgbd_test ondevice rvc2_all rvc4 ci) + # Resolutions test dai_add_test(resolutions_test src/ondevice_tests/resolutions_test.cpp) dai_set_test_labels(resolutions_test ondevice) # TODO(jakob) Make the test runnable in CI diff --git a/tests/src/ondevice_tests/rgbd_test.cpp b/tests/src/ondevice_tests/rgbd_test.cpp new file mode 100644 index 000000000..e6843b1aa --- /dev/null +++ b/tests/src/ondevice_tests/rgbd_test.cpp @@ -0,0 +1,72 @@ +#include +#include + +#include "depthai/depthai.hpp" + +TEST_CASE("basic rgbd") { + // Create pipeline + dai::Pipeline pipeline; + auto platform = pipeline.getDefaultDevice()->getPlatform(); + // Define sources and outputs + auto left = pipeline.create(); + auto right = pipeline.create(); + auto stereo = pipeline.create(); + auto rgbd = pipeline.create()->build(); + auto color = pipeline.create(); + std::shared_ptr align = nullptr; + if(platform == dai::Platform::RVC4) { + align = pipeline.create(); + } + color->build(); + + left->build(dai::CameraBoardSocket::CAM_B); + right->build(dai::CameraBoardSocket::CAM_C); + stereo->setDefaultProfilePreset(dai::node::StereoDepth::PresetMode::DEFAULT); + + auto* out = color->requestOutput(std::pair(1280, 720), dai::ImgFrame::Type::RGB888i); + left->requestOutput(std::pair(1280, 720))->link(stereo->left); + right->requestOutput(std::pair(1280, 720))->link(stereo->right); + + if(platform == dai::Platform::RVC4) { + stereo->depth.link(align->input); + out->link(align->inputAlignTo); + align->outputAligned.link(rgbd->inDepth); + } else { + out->link(stereo->inputAlignTo); + stereo->depth.link(rgbd->inDepth); + } + out->link(rgbd->inColor); + + auto outQ = rgbd->pcl.createOutputQueue(); + pipeline.start(); + for(int i = 0; i < 10; ++i) { + auto pcl = outQ->get(); + REQUIRE(pcl != nullptr); + REQUIRE(pcl->getWidth() == 1280); + REQUIRE(pcl->getHeight() == 720); + REQUIRE(pcl->getPoints().size() == 1280UL * 720UL); + REQUIRE(pcl->isColor() == true); + REQUIRE(pcl->getMinX() <= pcl->getMaxX()); + REQUIRE(pcl->getMinY() <= pcl->getMaxY()); + REQUIRE(pcl->getMinZ() <= pcl->getMaxZ()); + } +} +TEST_CASE("rgbd autocreate") { + // Create pipeline + dai::Pipeline pipeline; + auto rgbd = pipeline.create()->build(true, {1280, 720}); + + auto outQ = rgbd->pcl.createOutputQueue(); + pipeline.start(); + for(int i = 0; i < 10; ++i) { + auto pcl = outQ->get(); + REQUIRE(pcl != nullptr); + REQUIRE(pcl->getWidth() == 1280); + REQUIRE(pcl->getHeight() == 720); + REQUIRE(pcl->getPoints().size() == 1280UL * 720UL); + REQUIRE(pcl->isColor() == true); + REQUIRE(pcl->getMinX() <= pcl->getMaxX()); + REQUIRE(pcl->getMinY() <= pcl->getMaxY()); + REQUIRE(pcl->getMinZ() <= pcl->getMaxZ()); + } +} diff --git a/vcpkg.json b/vcpkg.json index ffd192ce3..245d0962b 100644 --- a/vcpkg.json +++ b/vcpkg.json @@ -70,6 +70,19 @@ "cpr" ] }, + "kompute-support": { + "description": "Enable Kompute support", + "dependencies": [ + "kompute", + { + "name": "glslang", + "default-features": false, + "features": [ + "tools" + ] + } + ] + }, "protobuf-support": { "description": "Enable Protobuf support", "dependencies": [