Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/v3_develop' into v3_device_id
Browse files Browse the repository at this point in the history
  • Loading branch information
Matevz Morato committed Jan 7, 2025
2 parents 1e7e958 + 1ef7c11 commit 199e582
Show file tree
Hide file tree
Showing 45 changed files with 793 additions and 201 deletions.
2 changes: 1 addition & 1 deletion 3rdparty/foxglove/ws-protocol
19 changes: 9 additions & 10 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ option(DEPTHAI_NEW_FIND_PYTHON "Use new FindPython module" ON)
if(NOT DEPTHAI_OPENCV_SUPPORT)
set(DEPTHAI_MERGED_TARGET OFF CACHE BOOL "Enable merged target build" FORCE)
endif()
option(DEPTHAI_ENABLE_EVENTS_MANAGER "Enable Events Manager" ON)

set(DEPTHAI_HAS_APRIL_TAG ${DEPTHAI_ENABLE_APRIL_TAG})
if(WIN32)
Expand Down Expand Up @@ -108,18 +107,17 @@ endif()

if(DEPTHAI_ENABLE_PROTOBUF)
option(DEPTHAI_ENABLE_REMOTE_CONNECTION "Enable Remote Connection support" ON)
if(DEPTHAI_ENABLE_CURL)
option(DEPTHAI_ENABLE_EVENTS_MANAGER "Enable Events Manager" ON)
else()
message(STATUS "Events Manager disabled because Protobuf & curl support is disabled.")
option(DEPTHAI_ENABLE_EVENTS_MANAGER "Enable Events Manager" OFF)
endif()
else()
option(DEPTHAI_ENABLE_REMOTE_CONNECTION "Enable Remote Connection support" OFF)
message(STATUS "Remote Connection support disabled because Protobuf support is disabled.")
endif()

if(DEPTHAI_ENABLE_EVENTS_MANAGER)
if(NOT DEPTHAI_ENABLE_PROTOBUF OR NOT DEPTHAI_CURL_SUPPORT)
message(STATUS "Events Manager disabled because Protobuf & curl support is disabled.")
set(DEPTHAI_ENABLE_EVENTS_MANAGER OFF)
endif()
endif()

if(DEPTHAI_BUILD_PYTHON)
list(APPEND VCPKG_MANIFEST_FEATURES "python-bindings")
endif()
Expand Down Expand Up @@ -164,7 +162,7 @@ endif()
# Create depthai project
project(depthai VERSION "3.0.0" LANGUAGES CXX C)
set(DEPTHAI_PRE_RELEASE_TYPE "alpha") # Valid options are "alpha", "beta", "rc", ""
set(DEPTHAI_PRE_RELEASE_VERSION "10")
set(DEPTHAI_PRE_RELEASE_VERSION "11")

# Set DEPTHAI_VERSION universally, not conditionally
set(DEPTHAI_VERSION ${PROJECT_VERSION}-${DEPTHAI_PRE_RELEASE_TYPE}.${DEPTHAI_PRE_RELEASE_VERSION})
Expand Down Expand Up @@ -370,7 +368,8 @@ set(TARGET_CORE_SOURCES
src/pipeline/node/DetectionNetwork.cpp
src/pipeline/node/Script.cpp
src/pipeline/node/Pool.cpp
src/pipeline/node/Benchmark.cpp
src/pipeline/node/BenchmarkIn.cpp
src/pipeline/node/BenchmarkOut.cpp
src/pipeline/node/SpatialDetectionNetwork.cpp
src/pipeline/node/SystemLogger.cpp
src/pipeline/node/SpatialLocationCalculator.cpp
Expand Down
26 changes: 18 additions & 8 deletions bindings/python/src/MessageQueueBindings.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,15 +40,25 @@ void MessageQueueBindings::bind(pybind11::module& m, void* pCallstack) {

// Bind DataOutputQueue
auto addCallbackLambda = [](MessageQueue& q, py::function cb) -> int {
pybind11::module inspect_module = pybind11::module::import("inspect");
pybind11::object result = inspect_module.attr("signature")(cb).attr("parameters");
pybind11::module inspectModule = pybind11::module::import("inspect");
pybind11::object result = inspectModule.attr("signature")(cb).attr("parameters");
auto numParams = pybind11::len(result);
if(numParams == 2) {
return q.addCallback(cb.cast<std::function<void(std::string, std::shared_ptr<ADatatype>)>>());
} else if(numParams == 1) {
return q.addCallback(cb.cast<std::function<void(std::shared_ptr<ADatatype>)>>());
} else if(numParams == 0) {
return q.addCallback(cb.cast<std::function<void()>>());

if (numParams == 2) {
return q.addCallback([cb](std::string msg, std::shared_ptr<ADatatype> data) {
pybind11::gil_scoped_acquire gil;
cb(msg, data);
});
} else if (numParams == 1) {
return q.addCallback([cb](std::shared_ptr<ADatatype> data) {
pybind11::gil_scoped_acquire gil;
cb(data);
});
} else if (numParams == 0) {
return q.addCallback([cb]() {
pybind11::gil_scoped_acquire gil;
cb();
});
} else {
throw py::value_error("Callback must take either zero, one or two arguments");
}
Expand Down
4 changes: 3 additions & 1 deletion bindings/python/src/pipeline/PipelineBindings.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ void PipelineBindings::bind(pybind11::module& m, void* pCallstack){
py::gil_scoped_release release;
p.wait();
})
.def("stop", &Pipeline::stop)
.def("stop", &Pipeline::stop, py::call_guard<py::gil_scoped_release>(), DOC(dai, Pipeline, stop))
.def("run",
[](Pipeline& p) {
{
Expand All @@ -258,6 +258,8 @@ void PipelineBindings::bind(pybind11::module& m, void* pCallstack){
}
p.stop();
})
.def("build", &Pipeline::build)
.def("isBuilt", &Pipeline::isBuilt)
.def("isRunning", &Pipeline::isRunning)
.def("processTasks", &Pipeline::processTasks, py::arg("waitForTasks") = false, py::arg("timeoutSeconds") = -1.0)
.def("enableHolisticRecord", &Pipeline::enableHolisticRecord, py::arg("recordConfig"), DOC(dai, Pipeline, enableHolisticRecord))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@ void bind_imagemanipconfigv2(pybind11::module& m, void* pCallstack) {
py::arg("rect"),
py::arg("normalizedCoords"),
DOC(dai, ImageManipConfigV2, addCropRotatedRect))
.def("addResize", &ImageManipConfigV2::addResize, py::arg("w"), py::arg("h"), DOC(dai, ImageManipConfigV2, addResize))
.def(
"addScale", [](ImageManipConfigV2& self, float scale) { return self.addScale(scale); }, py::arg("scale"), DOC(dai, ImageManipConfigV2, addScale))
.def(
Expand Down
6 changes: 5 additions & 1 deletion bindings/python/src/pipeline/node/BenchmarkBindings.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,13 @@ void bind_benchmark(pybind11::module& m, void* pCallstack) {
benchmarkOut.def_readonly("out", &BenchmarkOut::out, DOC(dai, node, BenchmarkOut, out))
.def_readonly("input", &BenchmarkOut::input, DOC(dai, node, BenchmarkOut, input))
.def("setNumMessagesToSend", &BenchmarkOut::setNumMessagesToSend, py::arg("num"), DOC(dai, node, BenchmarkOut, setNumMessagesToSend))
.def("setRunOnHost", &BenchmarkOut::setRunOnHost, py::arg("runOnHost"), DOC(dai, node, BenchmarkOut, setRunOnHost))
.def("setFps", &BenchmarkOut::setFps, py::arg("fps"), DOC(dai, node, BenchmarkOut, setFps));
benchmarkIn.def_readonly("input", &BenchmarkIn::input, DOC(dai, node, BenchmarkIn, input))
.def_readonly("report", &BenchmarkIn::report, DOC(dai, node, BenchmarkIn, report))
.def_readonly("passthrough", &BenchmarkIn::passthrough, DOC(dai, node, BenchmarkIn, passthrough))
.def("setNumMessagesToGet", &BenchmarkIn::setNumMessagesToGet, py::arg("num"), DOC(dai, node, BenchmarkIn, setNumMessagesToGet));
.def("setRunOnHost", &BenchmarkIn::setRunOnHost, py::arg("runOnHost"), DOC(dai, node, BenchmarkIn, setRunOnHost))
.def("logReportsAsWarnings", &BenchmarkIn::logReportsAsWarnings, py::arg("logReportsAsWarnings"), DOC(dai, node, BenchmarkIn, logReportsAsWarnings))
.def("measureIndividualLatencies", &BenchmarkIn::measureIndividualLatencies, py::arg("attachLatencies"), DOC(dai, node, BenchmarkIn, measureIndividualLatencies))
.def("sendReportEveryNMessages", &BenchmarkIn::sendReportEveryNMessages, py::arg("num"), DOC(dai, node, BenchmarkIn, sendReportEveryNMessages));
}
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,13 @@ void bind_detectionnetwork(pybind11::module& m, void* pCallstack) {
py::arg("input"),
py::arg("model"),
py::arg("fps") = 30.0f)
.def("build",
([](DetectionNetwork& self, const std::shared_ptr<Camera>& input, std::string model, float fps) {
return self.build(input, NNModelDescription{model}, fps);
}),
py::arg("input"),
py::arg("model"),
py::arg("fps") = 30.0f)
.def("build",
py::overload_cast<const std::shared_ptr<Camera>&, NNArchive, float>(&DetectionNetwork::build),
py::arg("input"),
Expand Down
3 changes: 3 additions & 0 deletions bindings/python/src/pipeline/node/NeuralNetworkBindings.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,9 @@ void bind_neuralnetwork(pybind11::module& m, void* pCallstack){
.def("build", py::overload_cast<dai::Node::Output&, const NNArchive&>(&NeuralNetwork::build), py::arg("input"), py::arg("nnArchive"), DOC(dai, node, NeuralNetwork, build))
.def("build", py::overload_cast<const std::shared_ptr<Camera>&, dai::NNModelDescription, float>(&NeuralNetwork::build), py::arg("input"), py::arg("modelDesc"), py::arg("fps")=30.0f, DOC(dai, node, NeuralNetwork, build,2))
.def("build", py::overload_cast<const std::shared_ptr<Camera>&, dai::NNArchive, float>(&NeuralNetwork::build), py::arg("input"), py::arg("nnArchive"), py::arg("fps")=30.0f, DOC(dai, node, NeuralNetwork, build, 3))
.def("build", [](NeuralNetwork& self, const std::shared_ptr<Camera>& input, const std::string& model, float fps) {
return self.build(input, NNModelDescription{model}, fps);
}, py::arg("input"), py::arg("model"), py::arg("fps")=30.0f, DOC(dai, node, NeuralNetwork, build))
.def("setBlob", py::overload_cast<dai::OpenVINO::Blob>(&NeuralNetwork::setBlob), py::arg("blob"), DOC(dai, node, NeuralNetwork, setBlob))
.def("setBlob", py::overload_cast<const dai::Path&>(&NeuralNetwork::setBlob), py::arg("path"), DOC(dai, node, NeuralNetwork, setBlob, 2))
.def("setModelPath",
Expand Down
19 changes: 19 additions & 0 deletions bindings/python/src/pipeline/node/NodeBindings.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -313,6 +313,16 @@ void NodeBindings::bind(pybind11::module& m, void* pCallstack) {
static_cast<Node& (Node::Input::*)()>(&Node::Input::getParent),
py::return_value_policy::reference_internal,
DOC(dai, Node, Input, getParent))
.def("getPossibleDatatypes", &Node::Input::getPossibleDatatypes, DOC(dai, Node, Input, getPossibleDatatypes))
.def("setPossibleDatatypes", &Node::Input::setPossibleDatatypes, py::arg("types"), DOC(dai, Node, Input, setPossibleDatatypes))
.def("setPossibleDatatypes", [](Node::Input& input, const std::vector<std::tuple<DatatypeEnum, bool>>& types) {
std::vector<Node::DatatypeHierarchy> converted;
converted.reserve(types.size());
for(const auto& t : types) {
converted.emplace_back(std::get<0>(t), std::get<1>(t));
}
input.setPossibleDatatypes(converted);
}, py::arg("types"), DOC(dai, Node, Input, setPossibleDatatypes))
.def("setWaitForMessage", &Node::Input::setWaitForMessage, py::arg("waitForMessage"), DOC(dai, Node, Input, setWaitForMessage))
.def("getWaitForMessage", &Node::Input::getWaitForMessage, DOC(dai, Node, Input, getWaitForMessage))
.def("setReusePreviousMessage", &Node::Input::setReusePreviousMessage, py::arg("reusePreviousMessage"), DOC(dai, Node, Input, setReusePreviousMessage))
Expand All @@ -336,6 +346,15 @@ void NodeBindings::bind(pybind11::module& m, void* pCallstack) {
py::arg("possibleDatatypes") = Node::OutputDescription{}.types,
py::keep_alive<1, 0>())
.def("getPossibleDatatypes", &Node::Output::getPossibleDatatypes, DOC(dai, Node, Output, getPossibleDatatypes))
.def("setPossibleDatatypes", &Node::Output::setPossibleDatatypes, py::arg("types"), DOC(dai, Node, Output, setPossibleDatatypes))
.def("setPossibleDatatypes", [](Node::Output& output, const std::vector<std::tuple<DatatypeEnum, bool>>& types) {
std::vector<Node::DatatypeHierarchy> converted;
converted.reserve(types.size());
for(const auto& t : types) {
converted.emplace_back(std::get<0>(t), std::get<1>(t));
}
output.setPossibleDatatypes(converted);
}, py::arg("types"), DOC(dai, Node, Output, setPossibleDatatypes))
.def("getParent",
static_cast<const Node& (Node::Output::*)() const>(&Node::Output::getParent),
py::return_value_policy::reference_internal,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,9 @@ void bind_spatialdetectionnetwork(pybind11::module& m, void* pCallstack){
spatialDetectionNetwork
// Copied from NN node
.def("build", py::overload_cast<const std::shared_ptr<Camera>&, const std::shared_ptr<StereoDepth>&, NNModelDescription, float>(&SpatialDetectionNetwork::build), py::arg("input"), py::arg("stereo"), py::arg("model"), py::arg("fps") = 30.0f, DOC(dai, node, SpatialDetectionNetwork, build))
.def("build", ([](SpatialDetectionNetwork& self, const std::shared_ptr<Camera>& input, const std::shared_ptr<StereoDepth>& stereo, std::string model, float fps) {
return self.build(input, stereo, NNModelDescription{model}, fps);
}), py::arg("input"), py::arg("stereo"), py::arg("model"), py::arg("fps") = 30.0f, DOC(dai, node, SpatialDetectionNetwork, build, 2))
.def("build", py::overload_cast<const std::shared_ptr<Camera>&, const std::shared_ptr<StereoDepth>&, NNArchive, float>(&SpatialDetectionNetwork::build), py::arg("input"), py::arg("stereo"), py::arg("nnArchive"), py::arg("fps") = 30.0f, DOC(dai, node, SpatialDetectionNetwork, build, 2))
.def("setBlobPath", &SpatialDetectionNetwork::setBlobPath, py::arg("path"), DOC(dai, node, SpatialDetectionNetwork, setBlobPath))
.def("setNumPoolFrames", &SpatialDetectionNetwork::setNumPoolFrames, py::arg("numFrames"), DOC(dai, node, SpatialDetectionNetwork, setNumPoolFrames))
Expand Down
2 changes: 1 addition & 1 deletion cmake/Depthai/DepthaiDeviceRVC4Config.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@ set(DEPTHAI_DEVICE_RVC4_MATURITY "snapshot")

# "version if applicable"
# set(DEPTHAI_DEVICE_RVC4_VERSION "0.0.1+93f7b75a885aa32f44c5e9f53b74470c49d2b1af")
set(DEPTHAI_DEVICE_RVC4_VERSION "0.0.1+72e173caa7b97651f1ca4428091c7e46ed2c62f1")
set(DEPTHAI_DEVICE_RVC4_VERSION "0.0.1+6fc71e8674fe7a520b93f4370cb157805f0bc0f2")
2 changes: 1 addition & 1 deletion cmake/Depthai/DepthaiDeviceSideConfig.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
set(DEPTHAI_DEVICE_SIDE_MATURITY "snapshot")

# "full commit hash of device side binary"
set(DEPTHAI_DEVICE_SIDE_COMMIT "c3e98b39b6a5445b2187b4109d03a146c6df37dd")
set(DEPTHAI_DEVICE_SIDE_COMMIT "052ee5648d9d5a49e6367c4a81d14a9b9ae8bfcf")

# "version if applicable"
set(DEPTHAI_DEVICE_SIDE_VERSION "")
2 changes: 1 addition & 1 deletion examples/cpp/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ dai_add_example(model_zoo RVC2/ModelZoo/model_zoo.cpp OFF OFF)

# Events Manager
if(DEPTHAI_ENABLE_EVENTS_MANAGER)
dai_add_example(events HostNodes/events.cpp ON OFF)
dai_add_example(events Events/events.cpp ON OFF)
endif()
# Image Align
dai_add_example(image_align RVC2/ImageAlign/image_align.cpp OFF OFF)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

#include <chrono>
#include <iostream>
#include <string>
Expand Down Expand Up @@ -27,20 +26,6 @@ int main(int argc, char* argv[]) {
std::vector<std::shared_ptr<dai::utility::EventData>> data;
data.emplace_back(fileData);
eventsManager->sendEvent("testdata", nullptr, data, {"tag3", "tag4"}, {{"key8", "value8"}});
auto fileData2 = std::make_shared<dai::utility::EventData>("/test.txt");
std::vector<std::shared_ptr<dai::utility::EventData>> data2;
data2.push_back(fileData2);
// will fail, you sendEvent instead of sendSnap
eventsManager->sendSnap("testdata2", nullptr, data2, {"tag5", "tag6"}, {{"key8", "value8"}});
auto fileData3 = std::make_shared<dai::utility::EventData>("/test.jpg");
std::vector<std::shared_ptr<dai::utility::EventData>> data3;
data3.push_back(fileData3);
eventsManager->sendSnap("testdata3", nullptr, data3, {"tag7", "tag8"}, {{"key8", "value8"}});
std::vector<std::shared_ptr<dai::utility::EventData>> data4;
data4.push_back(fileData);
data4.push_back(fileData2);
eventsManager->sendEvent("testdata4", nullptr, data4, {"tag9", "tag10"}, {{"key8", "value8"}});
data4.push_back(fileData3);
while(pipeline.isRunning()) {
auto rgb = previewQ->get<dai::ImgFrame>();

Expand All @@ -49,10 +34,6 @@ int main(int argc, char* argv[]) {

if(!sent) {
eventsManager->sendSnap("rgb", rgb, {}, {"tag11", "tag12"}, {{"key", "value"}});
// will fail due to two images being sent, use sendEvent instead
eventsManager->sendSnap("test2", rgb, data3, {"tag13", "tag14"}, {{"key8", "value8"}});
// will fail, sendSnap requires only one image data to be present
eventsManager->sendSnap("test3", rgb, data4, {"tag13", "tag14"}, {{"key8", "value8"}});
sent = true;
}
//
Expand Down
16 changes: 16 additions & 0 deletions examples/python/Benchmark/benchmark_camera.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#!/usr/bin/env python3
import depthai as dai
import time

# Create pipeline
with dai.Pipeline() as pipeline:
# Create the nodes
cam = pipeline.create(dai.node.Camera).build()
benchmarkIn = pipeline.create(dai.node.BenchmarkIn)
# benchmarkIn.setRunOnHost(True) # The node can also run on host and include the transfer limitation, default is False
output = cam.requestFullResolutionOutput()
output.link(benchmarkIn.input)

pipeline.start()
while pipeline.isRunning():
time.sleep(1) # Let the logger print out the FPS
51 changes: 51 additions & 0 deletions examples/python/Benchmark/benchmark_nn.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
import depthai as dai
import numpy as np


# First prepare the model for benchmarking
device = dai.Device()
modelPath = dai.getModelFromZoo(dai.NNModelDescription("yolov6-nano", platform=device.getPlatformAsString()))
modelArhive = dai.NNArchive(modelPath)
inputSize = modelArhive.getInputSize()
type = modelArhive.getConfig().model.inputs[0].preprocessing.daiType

if type:
try:
frameType = dai.ImgFrame.Type.__getattribute__(type)
except AttributeError:
type = None

if not type:
if device.getPlatform() == dai.Platform.RVC2:
frameType = dai.ImgFrame.Type.BGR888p
else:
frameType = dai.ImgFrame.Type.BGR888i


# Construct the input (white) image for benchmarking
img = np.ones((inputSize[1], inputSize[0], 3), np.uint8) * 255
inputFrame = dai.ImgFrame()
inputFrame.setCvFrame(img, frameType)

with dai.Pipeline(device) as p:
benchmarkOut = p.create(dai.node.BenchmarkOut)
benchmarkOut.setRunOnHost(False) # The node can run on host or on device
benchmarkOut.setFps(-1) # As fast as possible

neuralNetwork = p.create(dai.node.NeuralNetwork).build(benchmarkOut.out, modelArhive)

benchmarkIn = p.create(dai.node.BenchmarkIn)
benchmarkIn.setRunOnHost(False) # The node can run on host or on device
benchmarkIn.sendReportEveryNMessages(100)
benchmarkIn.logReportsAsWarnings(False)
neuralNetwork.out.link(benchmarkIn.input)

outputQueue = benchmarkIn.report.createOutputQueue()
inputQueue = benchmarkOut.input.createInputQueue()

p.start()
inputQueue.send(inputFrame) # Send the input image only once
while p.isRunning():
benchmarkReport = outputQueue.get()
assert isinstance(benchmarkReport, dai.BenchmarkReport)
print(f"FPS is {benchmarkReport.fps}")
29 changes: 29 additions & 0 deletions examples/python/Benchmark/benchmark_simple.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import depthai as dai

with dai.Pipeline(createImplicitDevice=False) as p:
# Create a BenchmarkOut node
# It will listen on the input to get the first message and then send it out at a specified rate
# The node sends the same message out (creates new pointers), not deep copies.
benchmarkOut = p.create(dai.node.BenchmarkOut)
benchmarkOut.setRunOnHost(True) # The node can run on host or on device
benchmarkOut.setFps(30)

# Create a BenchmarkIn node
# This node is receiving the messages on the input and measuring the FPS and latency.
# In the case that the input is with BenchmarkOut, the latency measurement is not always possible, as the message is not deep copied,
# which means that the timestamps stay the same and latency virtually increases over time.
benchmarkIn = p.create(dai.node.BenchmarkIn)
benchmarkIn.setRunOnHost(True) # The node can run on host or on device
benchmarkIn.sendReportEveryNMessages(100)

benchmarkOut.out.link(benchmarkIn.input)
outputQueue = benchmarkIn.report.createOutputQueue()
inputQueue = benchmarkOut.input.createInputQueue()

p.start()
imgFrame = dai.ImgFrame()
inputQueue.send(imgFrame)
while p.isRunning():
benchmarkReport = outputQueue.get()
assert isinstance(benchmarkReport, dai.BenchmarkReport)
print(f"FPS is {benchmarkReport.fps}")
Loading

0 comments on commit 199e582

Please sign in to comment.