From e7349305a4161af9452ab5e50a8cb36cdba7c178 Mon Sep 17 00:00:00 2001 From: asahtik Date: Tue, 8 Oct 2024 12:38:43 +0200 Subject: [PATCH] Allow the Display node to display multiple video streams --- .../RVC4/Camera/camera_multiple_outputs.cpp | 33 ++++++++--------- .../depthai/pipeline/node/host/Display.hpp | 2 +- src/pipeline/node/host/Display.cpp | 36 ++++++++++++++++--- 3 files changed, 47 insertions(+), 24 deletions(-) diff --git a/examples/cpp/RVC4/Camera/camera_multiple_outputs.cpp b/examples/cpp/RVC4/Camera/camera_multiple_outputs.cpp index ce43b3060..b07842c64 100644 --- a/examples/cpp/RVC4/Camera/camera_multiple_outputs.cpp +++ b/examples/cpp/RVC4/Camera/camera_multiple_outputs.cpp @@ -1,8 +1,11 @@ // Includes common necessary includes for development using depthai library #include +#include #include "depthai/capabilities/ImgFrameCapability.hpp" #include "depthai/depthai.hpp" +#include "depthai/pipeline/node/Sync.hpp" +#include "depthai/pipeline/node/host/Display.hpp" int main(int argc, char** argv) { if(argc < 4 || (argc - 1) % 3 != 0) { @@ -16,13 +19,19 @@ int main(int argc, char** argv) { dai::Pipeline pipeline; auto camRgb = pipeline.create()->build(); + auto sync = pipeline.create(); + auto display = pipeline.create(); + + sync->setSyncAttempts(0); + sync->setRunOnHost(true); if(sizes.empty()) { throw std::runtime_error("internal error to few sizes"); } - std::vector> videos; + int index = 0; for(const auto& size : sizes) { + ++index; dai::ImgFrameCapability cap; cap.type = dai::ImgFrame::Type::NV12; // Fastest cap.size.value = std::pair{std::get<0>(size), std::get<1>(size)}; @@ -40,28 +49,14 @@ int main(int argc, char** argv) { throw std::runtime_error("Resize mode argument (every 3rd) must be 0, 1 or 2"); } auto* output = camRgb->requestOutput(cap, true); - videos.push_back(output->createOutputQueue()); + output->link(sync->inputs[std::to_string(index)]); } + sync->out.link(display->input); + pipeline.start(); - while(pipeline.isRunning()) { - size_t videoIndex = 0; - for(const auto& video : videos) { - auto videoIn = video->tryGet(); - // Get BGR frame from NV12 encoded video frame to show with opencv - // Visualizing the frame on slower hosts might have overhead - if(videoIn) { - cv::imshow("video_" + std::to_string(videoIndex), videoIn->getCvFrame()); - } - ++videoIndex; - } + pipeline.wait(); - int key = cv::waitKey(1); - if(key == 'q' || key == 'Q') { - pipeline.stop(); - return 0; - } - } return 0; } diff --git a/include/depthai/pipeline/node/host/Display.hpp b/include/depthai/pipeline/node/host/Display.hpp index c34ed4a1c..e766a149d 100644 --- a/include/depthai/pipeline/node/host/Display.hpp +++ b/include/depthai/pipeline/node/host/Display.hpp @@ -15,4 +15,4 @@ class Display : public dai::NodeCRTP { void run() override; }; } // namespace node -} // namespace dai \ No newline at end of file +} // namespace dai diff --git a/src/pipeline/node/host/Display.cpp b/src/pipeline/node/host/Display.cpp index d9ae8d98e..77c789b56 100644 --- a/src/pipeline/node/host/Display.cpp +++ b/src/pipeline/node/host/Display.cpp @@ -1,9 +1,12 @@ #include "depthai/pipeline/node/host/Display.hpp" #include +#include #include +#include #include "depthai/pipeline/Pipeline.hpp" +#include "pipeline/datatype/MessageGroup.hpp" namespace dai { namespace node { @@ -34,12 +37,15 @@ class FPSCounter { Display::Display(std::string name) : name(std::move(name)) {} void Display::run() { - auto fpsCounter = FPSCounter(); + std::unordered_map fpsCounters; + fpsCounters["default"] = FPSCounter(); while(isRunning()) { - std::shared_ptr imgFrame = input.get(); + auto msg = input.get(); + auto imgFrame = std::dynamic_pointer_cast(msg); + auto msgGroup = std::dynamic_pointer_cast(msg); if(imgFrame != nullptr) { - fpsCounter.update(); - auto fps = fpsCounter.getFPS(); + fpsCounters["default"].update(); + auto fps = fpsCounters["default"].getFPS(); using namespace std::chrono; auto latencyMs = duration_cast(steady_clock::now() - imgFrame->getTimestamp()); auto frame = imgFrame->getCvFrame(); @@ -53,6 +59,28 @@ void Display::run() { auto parentPipeline = getParentPipeline(); parentPipeline.stop(); } + } else if(msgGroup != nullptr) { + for(const auto& [k, v] : *msgGroup) { + auto imgFrame = std::dynamic_pointer_cast(v); + if(imgFrame != nullptr) { + fpsCounters[k].update(); + auto fps = fpsCounters[k].getFPS(); + using namespace std::chrono; + auto latencyMs = duration_cast(steady_clock::now() - imgFrame->getTimestamp()); + auto frame = imgFrame->getCvFrame(); + cv::putText(frame, fmt::format("FPS: {:.2f}", fps), cv::Point(10, 30), cv::FONT_HERSHEY_SIMPLEX, 1, cv::Scalar(0, 255, 0), 2); + cv::putText( + frame, fmt::format("Latency: {}ms", latencyMs.count()), cv::Point(10, 60), cv::FONT_HERSHEY_SIMPLEX, 1, cv::Scalar(0, 255, 0), 2); + cv::imshow(name + " - " + k, frame); + } + } + auto key = cv::waitKey(1); + if(key == 'q') { + // Get the parent pipeline and stop it + // TODO(Morato) - add a convience stop method directly to the pipeline + auto parentPipeline = getParentPipeline(); + parentPipeline.stop(); + } } } fmt::print("Display node stopped\n");