diff --git a/CMakeLists.txt b/CMakeLists.txt index e678a2919..ecd5a4850 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -46,7 +46,7 @@ if(WIN32) endif() # Create depthai project -project(depthai VERSION "2.23.0" LANGUAGES CXX C) +project(depthai VERSION "2.24.0" LANGUAGES CXX C) get_directory_property(has_parent PARENT_DIRECTORY) if(has_parent) set(DEPTHAI_VERSION ${PROJECT_VERSION} PARENT_SCOPE) @@ -208,8 +208,10 @@ add_library(${TARGET_CORE_NAME} src/pipeline/node/ColorCamera.cpp src/pipeline/node/Camera.cpp src/pipeline/node/ToF.cpp + src/pipeline/node/MessageDemux.cpp src/pipeline/node/MonoCamera.cpp src/pipeline/node/StereoDepth.cpp + src/pipeline/node/Sync.cpp src/pipeline/node/NeuralNetwork.cpp src/pipeline/node/ImageManip.cpp src/pipeline/node/Warp.cpp @@ -229,6 +231,7 @@ add_library(${TARGET_CORE_NAME} src/pipeline/node/UVC.cpp src/pipeline/datatype/Buffer.cpp src/pipeline/datatype/ImgFrame.cpp + src/pipeline/datatype/EncodedFrame.cpp src/pipeline/datatype/ImageManipConfig.cpp src/pipeline/datatype/CameraControl.cpp src/pipeline/datatype/NNData.cpp @@ -247,6 +250,8 @@ add_library(${TARGET_CORE_NAME} src/pipeline/datatype/TrackedFeatures.cpp src/pipeline/datatype/FeatureTrackerConfig.cpp src/pipeline/datatype/ToFConfig.cpp + src/pipeline/datatype/MessageGroup.cpp + src/utility/H26xParsers.cpp src/utility/Initialization.cpp src/utility/Resources.cpp src/utility/Path.cpp diff --git a/README.md b/README.md index e499c363b..d951d888f 100644 --- a/README.md +++ b/README.md @@ -186,6 +186,9 @@ The following environment variables can be set to alter default behavior of the | DEPTHAI_BOOTLOADER_BINARY_USB | Overrides device USB Bootloader binary. Mostly for internal debugging purposes. | | DEPTHAI_BOOTLOADER_BINARY_ETH | Overrides device Network Bootloader binary. Mostly for internal debugging purposes. | | DEPTHAI_ALLOW_FACTORY_FLASHING | Internal use only | +| DEPTHAI_LIBUSB_ANDROID_JAVAVM | JavaVM pointer that is passed to libusb for rootless Android interaction with devices. Interpreted as decimal value of uintptr_t | +| DEPTHAI_CRASHDUMP | Directory in which to save the crash dump. | +| DEPTHAI_CRASHDUMP_TIMEOUT | Specifies the duration in seconds to wait for device reboot when obtaining a crash dump. Crash dump retrieval disabled if 0. | ## Running tests diff --git a/cmake/Depthai/DepthaiDeviceSideConfig.cmake b/cmake/Depthai/DepthaiDeviceSideConfig.cmake index 8ad8d1296..dccf31c58 100644 --- a/cmake/Depthai/DepthaiDeviceSideConfig.cmake +++ b/cmake/Depthai/DepthaiDeviceSideConfig.cmake @@ -2,7 +2,7 @@ set(DEPTHAI_DEVICE_SIDE_MATURITY "snapshot") # "full commit hash of device side binary" -set(DEPTHAI_DEVICE_SIDE_COMMIT "39a9d271a9ed0172f6481877723fca96d41b54c6") +set(DEPTHAI_DEVICE_SIDE_COMMIT "a95f582a61ec9bdbd0f72dec84822455872ffaf7") # "version if applicable" set(DEPTHAI_DEVICE_SIDE_VERSION "") diff --git a/cmake/Hunter/config.cmake b/cmake/Hunter/config.cmake index 8b1ec7984..6a7a85458 100644 --- a/cmake/Hunter/config.cmake +++ b/cmake/Hunter/config.cmake @@ -7,9 +7,9 @@ hunter_config( hunter_config( XLink - VERSION "luxonis-2021.4.2-develop" - URL "https://github.com/luxonis/XLink/archive/c940feaf9321f06a7d9660f28e686a9718135f38.tar.gz" - SHA1 "52935b6ceb470ee632de3348b9d2aaa2c6c24ac0" + VERSION "luxonis-2021.4.2-xlink-linkid-race-fix" + URL "https://github.com/luxonis/XLink/archive/e9eb1ef38030176ad70cddd3b545d5e6c509f1e1.tar.gz" + SHA1 "b1e4ded41cd7b9c37189468e2aaddbb10cbda9f6" CMAKE_ARGS XLINK_ENABLE_LIBUSB=${DEPTHAI_ENABLE_LIBUSB} ) @@ -101,9 +101,9 @@ hunter_config( # Specific Catch2 version hunter_config( Catch2 - VERSION "2.13.7" - URL "https://github.com/catchorg/Catch2/archive/refs/tags/v3.2.1.tar.gz" - SHA1 "acfba7f71cbbbbf60bc1bc4c0e3efca4a9c70df7" + VERSION "3.4.0" + URL "https://github.com/catchorg/Catch2/archive/refs/tags/v3.4.0.tar.gz" + SHA1 "4c308576c856a43dc88949a8f64ef90ebf94ae1b" ) # ZLib - Luxonis fix for alias on imported target for old CMake versions diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 1be23d128..606ab29ff 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -369,3 +369,9 @@ target_compile_definitions(detection_parser PRIVATE BLOB_PATH="${mobilenet_blob} # DetectionParser dai_add_example(crash_report CrashReport/crash_report.cpp OFF) + +# Sync +dai_add_example(sync_scripts Sync/sync_scripts.cpp ON) +dai_add_example(demux_message_group Sync/demux_message_group.cpp ON) +dai_add_example(depth_video_synced Sync/depth_video_synced.cpp ON) +dai_add_example(imu_video_synced Sync/imu_video_synced.cpp ON) diff --git a/examples/SpatialDetection/spatial_calculator_multi_roi.cpp b/examples/SpatialDetection/spatial_calculator_multi_roi.cpp index c24e81982..78e4ca724 100644 --- a/examples/SpatialDetection/spatial_calculator_multi_roi.cpp +++ b/examples/SpatialDetection/spatial_calculator_multi_roi.cpp @@ -61,7 +61,7 @@ int main() { // Connect to device and start pipeline dai::Device device(pipeline); - device.setIrLaserDotProjectorBrightness(1000); + device.setIrLaserDotProjectorIntensity(0.7f); // Output queue will be used to get the depth frames from the outputs defined above auto depthQueue = device.getOutputQueue("depth", 4, false); @@ -106,4 +106,4 @@ int main() { } } return 0; -} \ No newline at end of file +} diff --git a/examples/Sync/demux_message_group.cpp b/examples/Sync/demux_message_group.cpp new file mode 100644 index 000000000..b8845242e --- /dev/null +++ b/examples/Sync/demux_message_group.cpp @@ -0,0 +1,63 @@ +#include +#include + +#include "depthai/depthai.hpp" + +int main() { + dai::Pipeline pipeline; + + auto script1 = pipeline.create(); + script1->setScript( + R"SCRPT( +from time import sleep + +while True: + sleep(1) + b = Buffer(512) + b.setData(bytes(4 * [i for i in range(0, 128)])) + b.setTimestamp(Clock.now()) + node.io['out'].send(b) +)SCRPT"); + + auto script2 = pipeline.create(); + script2->setScript( + R"SCRPT( +from time import sleep + +while True: + sleep(0.3) + b = Buffer(512) + b.setData(bytes(4 * [i for i in range(128, 256)])) + b.setTimestamp(Clock.now()) + node.io['out'].send(b) +)SCRPT"); + + auto sync = pipeline.create(); + sync->setSyncThreshold(std::chrono::milliseconds(100)); + + auto demux = pipeline.create(); + + auto xout1 = pipeline.create(); + xout1->setStreamName("xout1"); + auto xout2 = pipeline.create(); + xout2->setStreamName("xout2"); + + script1->outputs["out"].link(sync->inputs["s1"]); + script2->outputs["out"].link(sync->inputs["s2"]); + sync->out.link(demux->input); + demux->outputs["s1"].link(xout1->input); + demux->outputs["s2"].link(xout2->input); + + dai::Device device(pipeline); + std::cout << "Start" << std::endl; + auto queue1 = device.getOutputQueue("xout1", 10, true); + auto queue2 = device.getOutputQueue("xout2", 10, true); + while(true) { + auto bufS1 = queue1->get(); + auto bufS2 = queue2->get(); + std::cout << "Buffer 1 timestamp: " << bufS1->getTimestamp().time_since_epoch().count() << std::endl; + std::cout << "Buffer 2 timestamp: " << bufS2->getTimestamp().time_since_epoch().count() << std::endl; + std::cout << "----------" << std::endl; + std::this_thread::sleep_for(std::chrono::milliseconds(200)); + } +} diff --git a/examples/Sync/depth_video_synced.cpp b/examples/Sync/depth_video_synced.cpp new file mode 100644 index 000000000..dad8ef19a --- /dev/null +++ b/examples/Sync/depth_video_synced.cpp @@ -0,0 +1,68 @@ +#include + +// Includes common necessary includes for development using depthai library +#include "depthai/depthai.hpp" + +int main() { + // Create pipeline + dai::Pipeline pipeline; + + // Define sources and outputs + auto monoLeft = pipeline.create(); + auto monoRight = pipeline.create(); + auto color = pipeline.create(); + auto stereo = pipeline.create(); + auto sync = pipeline.create(); + + auto xoutGrp = pipeline.create(); + + // XLinkOut + xoutGrp->setStreamName("xout"); + + // Properties + monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P); + monoLeft->setCamera("left"); + monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P); + monoRight->setCamera("right"); + + stereo->setDefaultProfilePreset(dai::node::StereoDepth::PresetMode::HIGH_ACCURACY); + + color->setCamera("color"); + + sync->setSyncThreshold(std::chrono::milliseconds(100)); + + // Linking + monoLeft->out.link(stereo->left); + monoRight->out.link(stereo->right); + + stereo->disparity.link(sync->inputs["disparity"]); + color->video.link(sync->inputs["video"]); + + sync->out.link(xoutGrp->input); + + // Connect to device and start pipeline + dai::Device device(pipeline); + + auto queue = device.getOutputQueue("xout", 10, true); + + float disparityMultiplier = 255 / stereo->initialConfig.getMaxDisparity(); + + while(true) { + auto msgGrp = queue->get(); + for(auto& frm : *msgGrp) { + auto imgFrm = std::dynamic_pointer_cast(frm.second); + cv::Mat img = imgFrm->getCvFrame(); + if(frm.first == "disparity") { + img.convertTo(img, CV_8UC1, disparityMultiplier); // Extend disparity range + cv::applyColorMap(img, img, cv::COLORMAP_JET); + } + cv::imshow(frm.first, img); + } + + int key = cv::waitKey(1); + if(key == 'q' || key == 'Q') { + return 0; + } + } + return 0; +} diff --git a/examples/Sync/imu_video_synced.cpp b/examples/Sync/imu_video_synced.cpp new file mode 100644 index 000000000..9a12f96cb --- /dev/null +++ b/examples/Sync/imu_video_synced.cpp @@ -0,0 +1,74 @@ +#include +#include + +#include "depthai/depthai.hpp" + +int main() { + dai::Device device; + + auto imuType = device.getConnectedIMU(); + auto imuFirmwareVersion = device.getIMUFirmwareVersion(); + std::cout << "IMU type: " << imuType << ", firmware version: " << imuFirmwareVersion << std::endl; + + if(imuType != "BNO086") { + std::cout << "Rotation vector output is supported only by BNO086!" << std::endl; + return 1; + } + + dai::Pipeline pipeline; + + auto colorCamera = pipeline.create(); + auto imu = pipeline.create(); + auto sync = pipeline.create(); + auto xoutGroup = pipeline.create(); + + xoutGroup->setStreamName("xout"); + + colorCamera->setCamera("color"); + + imu->enableIMUSensor(dai::IMUSensor::ROTATION_VECTOR, 120); + imu->setBatchReportThreshold(1); + imu->setMaxBatchReports(10); + + sync->setSyncThreshold(std::chrono::milliseconds(10)); + sync->setSyncAttempts(-1); // Infinite attempts + + colorCamera->video.link(sync->inputs["video"]); + imu->out.link(sync->inputs["imu"]); + + sync->out.link(xoutGroup->input); + + device.startPipeline(pipeline); + + auto groupQueue = device.getOutputQueue("xout", 3, false); + + while(true) { + auto groupMessage = groupQueue->get(); + auto imuData = groupMessage->get("imu"); + auto colorData = groupMessage->get("video"); + auto timeDifference = imuData->getTimestampDevice() - colorData->getTimestampDevice(); + auto timeDifferenceUs = std::chrono::duration_cast(timeDifference).count(); + + std::cout << "Time difference between messages is: " << std::abs(timeDifferenceUs / 1000.0) << " ms" << std::endl; + + for(auto& packet : imuData->packets) { + auto& rv = packet.rotationVector; + + printf( + "Quaternion: i: %.3f j: %.3f k: %.3f real: %.3f\n" + "Accuracy (rad): %.3f \n", + rv.i, + rv.j, + rv.k, + rv.real, + rv.rotationVectorAccuracy); + } + + cv::imshow("Color", colorData->getCvFrame()); + if(cv::waitKey(1) == 'q') { + break; + } + } + + return 0; +} diff --git a/examples/Sync/sync_scripts.cpp b/examples/Sync/sync_scripts.cpp new file mode 100644 index 000000000..2a3403e4d --- /dev/null +++ b/examples/Sync/sync_scripts.cpp @@ -0,0 +1,56 @@ +#include +#include + +#include "depthai/depthai.hpp" + +int main() { + dai::Pipeline pipeline; + + auto script1 = pipeline.create(); + script1->setScript( + R"SCRPT( +from time import sleep + +while True: + sleep(1) + b = Buffer(512) + b.setData(bytes(4 * [i for i in range(0, 128)])) + b.setTimestamp(Clock.now()) + node.io['out'].send(b) +)SCRPT"); + + auto script2 = pipeline.create(); + script2->setScript( + R"SCRPT( +from time import sleep + +while True: + sleep(0.3) + b = Buffer(512) + b.setData(bytes(4 * [i for i in range(128, 256)])) + b.setTimestamp(Clock.now()) + node.io['out'].send(b) +)SCRPT"); + + auto sync = pipeline.create(); + sync->setSyncThreshold(std::chrono::milliseconds(100)); + + auto xout = pipeline.create(); + xout->setStreamName("xout"); + + sync->out.link(xout->input); + script1->outputs["out"].link(sync->inputs["s1"]); + script2->outputs["out"].link(sync->inputs["s2"]); + + dai::Device device(pipeline); + std::cout << "Start" << std::endl; + auto queue = device.getOutputQueue("xout", 10, true); + while(true) { + auto grp = queue->get(); + std::cout << "Buffer 1 timestamp: " << grp->get("s1")->getTimestamp().time_since_epoch().count() << std::endl; + std::cout << "Buffer 2 timestamp: " << grp->get("s2")->getTimestamp().time_since_epoch().count() << std::endl; + std::cout << "Time interval between messages: " << static_cast(grp->getIntervalNs()) / 1e6 << "ms" << std::endl; + std::cout << "----------" << std::endl; + std::this_thread::sleep_for(std::chrono::milliseconds(200)); + } +} diff --git a/include/depthai/device/CalibrationHandler.hpp b/include/depthai/device/CalibrationHandler.hpp index d5feb1146..b1bb2b4f1 100644 --- a/include/depthai/device/CalibrationHandler.hpp +++ b/include/depthai/device/CalibrationHandler.hpp @@ -166,7 +166,10 @@ class CalibrationHandler { * Get the Distortion Coefficients object * * @param cameraId Uses the cameraId to identify which distortion Coefficients to return. - * @return the distortion coefficients of the requested camera in this order: [k1,k2,p1,p2,k3,k4,k5,k6,s1,s2,s3,s4,τx,τy] + * @return the distortion coefficients of the requested camera in this order: [k1,k2,p1,p2,k3,k4,k5,k6,s1,s2,s3,s4,τx,τy] for CameraModel::Perspective + * or [k1, k2, k3, k4] for CameraModel::Fisheye + * see https://docs.opencv.org/4.5.4/d9/d0c/group__calib3d.html for Perspective model (Rational Polynomial Model) + * see https://docs.opencv.org/4.5.4/db/d58/group__calib3d__fisheye.html for Fisheye model */ std::vector getDistortionCoefficients(CameraBoardSocket cameraId) const; diff --git a/include/depthai/device/DeviceBase.hpp b/include/depthai/device/DeviceBase.hpp index 327f726fe..98d5bd6bc 100644 --- a/include/depthai/device/DeviceBase.hpp +++ b/include/depthai/device/DeviceBase.hpp @@ -28,6 +28,7 @@ // shared #include "depthai-shared/common/ChipTemperature.hpp" +#include "depthai-shared/common/ConnectionInterface.hpp" #include "depthai-shared/common/CpuUsage.hpp" #include "depthai-shared/common/MemoryInfo.hpp" #include "depthai-shared/datatype/RawIMUData.hpp" @@ -303,7 +304,7 @@ class DeviceBase { * * @param devInfo DeviceInfo which specifies which device to connect to */ - DeviceBase(const DeviceInfo& devInfo); + explicit DeviceBase(const DeviceInfo& devInfo); /** * Connects to any available device with a DEFAULT_SEARCH_TIME timeout. @@ -375,8 +376,9 @@ class DeviceBase { * @param config Config with which the device will be booted with * @param devInfo DeviceInfo which specifies which device to connect to * @param pathToCmd Path to custom device firmware + * @param dumpOnly If true only the minimal connection is established to retrieve the crash dump */ - DeviceBase(Config config, const DeviceInfo& devInfo, const dai::Path& pathToCmd); + DeviceBase(Config config, const DeviceInfo& devInfo, const dai::Path& pathToCmd, bool dumpOnly = false); /** * Device destructor @@ -493,7 +495,7 @@ class DeviceBase { * @param mask Optional mask to modify only Left (0x1) or Right (0x2) sides on OAK-D-Pro-W-DEV * @returns True on success, false if not found or other failure */ - bool setIrLaserDotProjectorBrightness(float mA, int mask = -1); + [[deprecated("Use setIrLaserDotProjectorIntensity(float intensity) instead.")]] bool setIrLaserDotProjectorBrightness(float mA, int mask = -1); /** * Sets the brightness of the IR Flood Light. Limits: up to 1500mA at 30% duty cycle. @@ -505,7 +507,31 @@ class DeviceBase { * @param mask Optional mask to modify only Left (0x1) or Right (0x2) sides on OAK-D-Pro-W-DEV * @returns True on success, false if not found or other failure */ - bool setIrFloodLightBrightness(float mA, int mask = -1); + [[deprecated("Use setIrFloodLightIntensity(float intensity) instead.")]] bool setIrFloodLightBrightness(float mA, int mask = -1); + + /** + * Sets the intensity of the IR Laser Dot Projector. Limits: up to 765mA at 30% frame time duty cycle when exposure time is longer than 30% frame time. + * Otherwise, duty cycle is 100% of exposure time, with current increased up to max 1200mA to make up for shorter duty cycle. + * The duty cycle is controlled by `left` camera STROBE, aligned to start of exposure. + * The emitter is turned off by default + * + * @param intensity Intensity on range 0 to 1, that will determine brightness. 0 or negative to turn off + * @param mask Optional mask to modify only Left (0x1) or Right (0x2) sides on OAK-D-Pro-W-DEV + * @returns True on success, false if not found or other failure + */ + bool setIrLaserDotProjectorIntensity(float intensity, int mask = -1); + + /** + * Sets the intensity of the IR Flood Light. Limits: Intensity is directly normalized to 0 - 1500mA current. + * The duty cycle is 30% when exposure time is longer than 30% frame time. Otherwise, duty cycle is 100% of exposure time. + * The duty cycle is controlled by the `left` camera STROBE, aligned to start of exposure. + * The emitter is turned off by default + * + * @param intensity Intensity on range 0 to 1, that will determine brightness, 0 or negative to turn off + * @param mask Optional mask to modify only Left (0x1) or Right (0x2) sides on OAK-D-Pro-W-DEV + * @returns True on success, false if not found or other failure + */ + bool setIrFloodLightIntensity(float intensity, int mask = -1); /** * Retrieves detected IR laser/LED drivers. @@ -570,6 +596,13 @@ class DeviceBase { */ std::vector getConnectedCameras(); + /** + * Get connection interfaces for device + * + * @returns Vector of connection type + */ + std::vector getConnectionInterfaces(); + /** * Get cameras that are connected to the device with their features/properties * @@ -932,5 +965,8 @@ class DeviceBase { // Device config Config config; + + dai::Path firmwarePath; + bool dumpOnly = false; }; } // namespace dai diff --git a/include/depthai/device/DeviceBootloader.hpp b/include/depthai/device/DeviceBootloader.hpp index 0c34d0957..4b0282fb8 100644 --- a/include/depthai/device/DeviceBootloader.hpp +++ b/include/depthai/device/DeviceBootloader.hpp @@ -495,6 +495,9 @@ class DeviceBootloader { std::tuple readCustom( Memory memory, size_t offset, size_t size, uint8_t* data, std::string filename, std::function progressCb); + void createWatchdog(); + void destroyWatchdog(); + // private variables std::shared_ptr connection; DeviceInfo deviceInfo = {}; diff --git a/include/depthai/pipeline/datatype/CameraControl.hpp b/include/depthai/pipeline/datatype/CameraControl.hpp index c9606f817..6d006a8b9 100644 --- a/include/depthai/pipeline/datatype/CameraControl.hpp +++ b/include/depthai/pipeline/datatype/CameraControl.hpp @@ -40,6 +40,8 @@ class CameraControl : public Buffer { using AutoWhiteBalanceMode = RawCameraControl::AutoWhiteBalanceMode; using SceneMode = RawCameraControl::SceneMode; using EffectMode = RawCameraControl::EffectMode; + using ControlMode = RawCameraControl::ControlMode; + using CaptureIntent = RawCameraControl::CaptureIntent; using FrameSyncMode = RawCameraControl::FrameSyncMode; /// Construct CameraControl message @@ -159,6 +161,22 @@ class CameraControl : public Buffer { */ CameraControl& setAutoExposureCompensation(int compensation); + /** + * Set a command to specify the maximum exposure time limit for auto-exposure. By default + * the AE algorithm prioritizes increasing exposure over ISO, up to around frame-time + * (subject to further limits imposed by anti-banding) + * @param maxExposureTimeUs Maximum exposure time in microseconds + */ + CameraControl& setAutoExposureLimit(uint32_t maxExposureTimeUs); + + /** + * Set a command to specify the maximum exposure time limit for auto-exposure. By default + * the AE algorithm prioritizes increasing exposure over ISO, up to around frame-time + * (subject to further limits imposed by anti-banding) + * @param maxExposureTime Maximum exposure time + */ + CameraControl& setAutoExposureLimit(std::chrono::microseconds maxExposureTime); + /** * Set a command to specify anti-banding mode. Anti-banding / anti-flicker * works in auto-exposure mode, by controlling the exposure time to be applied @@ -184,7 +202,7 @@ class CameraControl : public Buffer { * @param exposureTime Exposure time * @param sensitivityIso Sensitivity as ISO value, usual range 100..1600 */ - void setManualExposure(std::chrono::microseconds exposureTime, uint32_t sensitivityIso); + CameraControl& setManualExposure(std::chrono::microseconds exposureTime, uint32_t sensitivityIso); // White Balance /** @@ -254,6 +272,18 @@ class CameraControl : public Buffer { */ CameraControl& setEffectMode(EffectMode mode); + /** + * Set a command to specify control mode + * @param mode Control mode + */ + CameraControl& setControlMode(ControlMode mode); + + /** + * Set a command to specify capture intent mode + * @param mode Capture intent mode + */ + CameraControl& setCaptureIntent(CaptureIntent mode); + // Functions to retrieve properties /** * Check whether command to capture a still is set diff --git a/include/depthai/pipeline/datatype/EncodedFrame.hpp b/include/depthai/pipeline/datatype/EncodedFrame.hpp new file mode 100644 index 000000000..99db79ba0 --- /dev/null +++ b/include/depthai/pipeline/datatype/EncodedFrame.hpp @@ -0,0 +1,144 @@ +#pragma once + +#include + +#include "depthai/pipeline/datatype/Buffer.hpp" + +// shared +#include "depthai-shared/datatype/RawEncodedFrame.hpp" + +// optional +#ifdef DEPTHAI_HAVE_OPENCV_SUPPORT + #include +#endif + +namespace dai { + +class EncodedFrame : public Buffer { + std::shared_ptr serialize() const override; + RawEncodedFrame& frame; + + public: + // Raw* mirror + using Profile = RawEncodedFrame::Profile; + using FrameType = RawEncodedFrame::FrameType; + + /** + * Construct EncodedFrame message. + * Timestamp is set to now + */ + EncodedFrame(); + explicit EncodedFrame(std::shared_ptr ptr); + virtual ~EncodedFrame() = default; + + // getters + /** + * Retrieves instance number + */ + unsigned int getInstanceNum() const; + /** + * Retrieves exposure time + */ + std::chrono::microseconds getExposureTime() const; + + /** + * Retrieves sensitivity, as an ISO value + */ + int getSensitivity() const; + + /** + * Retrieves white-balance color temperature of the light source, in kelvins + */ + int getColorTemperature() const; + + /** + * Retrieves lens position, range 0..255. Returns -1 if not available + */ + int getLensPosition() const; + /** + * Retrieves the encoding quality + */ + unsigned int getQuality() const; + + /** + * Retrieves the encoding bitrate + */ + unsigned int getBitrate() const; + + /** + * Returns true if encoding is lossless (JPEG only) + */ + bool getLossless() const; + + /** + * Retrieves frame type (H26x only) + */ + FrameType getFrameType() const; + + /** + * Retrieves the encoding profile (JPEG, AVC or HEVC) + */ + Profile getProfile() const; + + // setters + /** + * Retrieves image timestamp related to dai::Clock::now() + */ + EncodedFrame& setTimestamp(std::chrono::time_point tp); + + /** + * Sets image timestamp related to dai::Clock::now() + */ + EncodedFrame& setTimestampDevice(std::chrono::time_point tp); + + /** + * Specifies sequence number + * + * @param seq Sequence number + */ + EncodedFrame& setSequenceNum(int64_t seq); + + /** + * Instance number relates to the origin of the frame (which camera) + * + * @param instance Instance number + */ + EncodedFrame& setInstanceNum(unsigned int instance); + + /** + * Specifies the encoding quality + * + * @param quality Encoding quality + */ + EncodedFrame& setQuality(unsigned int quality); + + /** + * Specifies the encoding quality + * + * @param quality Encoding quality + */ + EncodedFrame& setBitrate(unsigned int bitrate); + + /** + * Specifies if encoding is lossless (JPEG only) + * + * @param lossless True if lossless + */ + EncodedFrame& setLossless(bool lossless); + + /** + * Specifies the frame type (H26x only) + * + * @param type Type of h26x frame (I, P, B) + */ + EncodedFrame& setFrameType(FrameType type); + + /** + * Specifies the encoding profile + * + * @param profile Encoding profile + */ + EncodedFrame& setProfile(Profile profile); +}; + +} // namespace dai diff --git a/include/depthai/pipeline/datatype/MessageGroup.hpp b/include/depthai/pipeline/datatype/MessageGroup.hpp new file mode 100644 index 000000000..a8b792195 --- /dev/null +++ b/include/depthai/pipeline/datatype/MessageGroup.hpp @@ -0,0 +1,79 @@ +#pragma once + +#include +#include +#include +#include + +#include "depthai-shared/datatype/RawMessageGroup.hpp" +#include "depthai/pipeline/datatype/Buffer.hpp" + +namespace dai { + +/** + * MessageGroup message. Carries multiple messages in one. + */ +class MessageGroup : public Buffer { + std::shared_ptr serialize() const override; + RawMessageGroup& rawGrp; + std::unordered_map> group; + + public: + /// Construct MessageGroup message + MessageGroup(); + explicit MessageGroup(std::shared_ptr ptr); + virtual ~MessageGroup() = default; + + /// Group + std::shared_ptr operator[](const std::string& name); + template + std::shared_ptr get(const std::string& name) { + return std::dynamic_pointer_cast(group[name]); + } + void add(const std::string& name, const std::shared_ptr& value); + template + void add(const std::string& name, const T& value) { + static_assert(std::is_base_of::value, "T must derive from ADatatype"); + group[name] = std::make_shared(value); + rawGrp.group[name] = {value.getRaw(), 0}; + } + + // Iterators + std::unordered_map>::iterator begin(); + std::unordered_map>::iterator end(); + + /** + * True if all messages in the group are in the interval + * @param thresholdNs Maximal interval between messages + */ + bool isSynced(int64_t thresholdNs) const; + + /** + * Retrieves interval between the first and the last message in the group. + */ + int64_t getIntervalNs() const; + + int64_t getNumMessages() const; + + /** + * Gets the names of messages in the group + */ + std::vector getMessageNames() const; + + /** + * Sets image timestamp related to dai::Clock::now() + */ + MessageGroup& setTimestamp(std::chrono::time_point timestamp); + + /** + * Sets image timestamp related to dai::Clock::now() + */ + MessageGroup& setTimestampDevice(std::chrono::time_point timestamp); + + /** + * Retrieves image sequence number + */ + MessageGroup& setSequenceNum(int64_t sequenceNum); +}; + +} // namespace dai diff --git a/include/depthai/pipeline/datatype/StreamMessageParser.hpp b/include/depthai/pipeline/datatype/StreamMessageParser.hpp index 01374f0fd..3e0c99e54 100644 --- a/include/depthai/pipeline/datatype/StreamMessageParser.hpp +++ b/include/depthai/pipeline/datatype/StreamMessageParser.hpp @@ -7,6 +7,8 @@ #include // project +#include "depthai-shared/datatype/DatatypeEnum.hpp" +#include "depthai-shared/datatype/RawMessageGroup.hpp" #include "depthai/pipeline/datatype/ADatatype.hpp" // shared @@ -20,6 +22,7 @@ class StreamMessageParser { public: static std::shared_ptr parseMessage(streamPacketDesc_t* const packet); static std::shared_ptr parseMessageToADatatype(streamPacketDesc_t* const packet); + static std::shared_ptr parseMessageToADatatype(streamPacketDesc_t* const packet, DatatypeEnum& type); static std::vector serializeMessage(const std::shared_ptr& data); static std::vector serializeMessage(const RawBuffer& data); static std::vector serializeMessage(const std::shared_ptr& data); diff --git a/include/depthai/pipeline/datatypes.hpp b/include/depthai/pipeline/datatypes.hpp index b16c8f89e..82462e456 100644 --- a/include/depthai/pipeline/datatypes.hpp +++ b/include/depthai/pipeline/datatypes.hpp @@ -12,6 +12,7 @@ #include "datatype/ImageManipConfig.hpp" #include "datatype/ImgDetections.hpp" #include "datatype/ImgFrame.hpp" +#include "datatype/MessageGroup.hpp" #include "datatype/NNData.hpp" #include "datatype/SpatialImgDetections.hpp" #include "datatype/SpatialLocationCalculatorConfig.hpp" diff --git a/include/depthai/pipeline/node/MessageDemux.hpp b/include/depthai/pipeline/node/MessageDemux.hpp new file mode 100644 index 000000000..8039bd18b --- /dev/null +++ b/include/depthai/pipeline/node/MessageDemux.hpp @@ -0,0 +1,28 @@ +#pragma once + +#include "depthai-shared/properties/MessageDemuxProperties.hpp" +#include "depthai/pipeline/Node.hpp" + +namespace dai { +namespace node { + +class MessageDemux : public NodeCRTP { + public: + constexpr static const char* NAME = "MessageDemux"; + MessageDemux(const std::shared_ptr& par, int64_t nodeId); + + MessageDemux(const std::shared_ptr& par, int64_t nodeId, std::unique_ptr props); + + /** + * Input message of type MessageGroup + */ + Input input{*this, "input", Input::Type::SReceiver, {{DatatypeEnum::MessageGroup, false}}}; + + /** + * A map of outputs, where keys are same as in the input MessageGroup + */ + OutputMap outputs; +}; + +} // namespace node +} // namespace dai diff --git a/include/depthai/pipeline/node/Sync.hpp b/include/depthai/pipeline/node/Sync.hpp new file mode 100644 index 000000000..2168e49ef --- /dev/null +++ b/include/depthai/pipeline/node/Sync.hpp @@ -0,0 +1,55 @@ +#pragma once + +#include + +#include "depthai-shared/properties/SyncProperties.hpp" +#include "depthai/pipeline/Node.hpp" + +namespace dai { +namespace node { + +class Sync : public NodeCRTP { + public: + constexpr static const char* NAME = "Sync"; + Sync(const std::shared_ptr& par, int64_t nodeId); + + Sync(const std::shared_ptr& par, int64_t nodeId, std::unique_ptr props); + + /** + * A map of inputs + */ + InputMap inputs; + + /** + * Output message of type MessageGroup + */ + Output out{*this, "out", Output::Type::MSender, {{DatatypeEnum::MessageGroup, false}}}; + + /** + * Set the maximal interval between messages in the group + * @param syncThreshold Maximal interval between messages in the group + */ + void setSyncThreshold(std::chrono::nanoseconds syncThreshold); + + /** + * Set the number of attempts to get the specified max interval between messages in the group + * @param syncAttempts Number of attempts to get the specified max interval between messages in the group: + * - if syncAttempts = 0 then the node sends a message as soon at the group is filled + * - if syncAttempts > 0 then the node will make syncAttemts attempts to synchronize before sending out a message + * - if syncAttempts = -1 (default) then the node will only send a message if successfully synchronized + */ + void setSyncAttempts(int syncAttempts); + + /** + * Gets the maximal interval between messages in the group in milliseconds + */ + std::chrono::nanoseconds getSyncThreshold() const; + + /** + * Gets the number of sync attempts + */ + int getSyncAttempts() const; +}; + +} // namespace node +} // namespace dai diff --git a/include/depthai/pipeline/node/VideoEncoder.hpp b/include/depthai/pipeline/node/VideoEncoder.hpp index caa21ca82..59c186e52 100644 --- a/include/depthai/pipeline/node/VideoEncoder.hpp +++ b/include/depthai/pipeline/node/VideoEncoder.hpp @@ -25,10 +25,15 @@ class VideoEncoder : public NodeCRTP Input input{*this, "in", Input::Type::SReceiver, true, 4, true, {{DatatypeEnum::ImgFrame, true}}}; /** - * Outputs ImgFrame message that carries BITSTREAM encoded (MJPEG, H264 or H265) frame data. + * Outputs ImgFrame message that carries BITSTREAM encoded (MJPEG, H264 or H265) frame data. Mutually exclusive with out. */ Output bitstream{*this, "bitstream", Output::Type::MSender, {{DatatypeEnum::ImgFrame, false}}}; + /** + * Outputs EncodedFrame message that carries encoded (MJPEG, H264 or H265) frame data. Mutually exclusive with bitstream. + */ + Output out{*this, "out", Output::Type::MSender, {{DatatypeEnum::EncodedFrame, false}}}; + // Sets default options for a specified size and profile /** * Sets a default preset based on specified frame rate and profile diff --git a/include/depthai/pipeline/nodes.hpp b/include/depthai/pipeline/nodes.hpp index bf0a3dc76..73ac61b1f 100644 --- a/include/depthai/pipeline/nodes.hpp +++ b/include/depthai/pipeline/nodes.hpp @@ -10,6 +10,7 @@ #include "node/FeatureTracker.hpp" #include "node/IMU.hpp" #include "node/ImageManip.hpp" +#include "node/MessageDemux.hpp" #include "node/MonoCamera.hpp" #include "node/NeuralNetwork.hpp" #include "node/ObjectTracker.hpp" @@ -19,9 +20,10 @@ #include "node/SpatialDetectionNetwork.hpp" #include "node/SpatialLocationCalculator.hpp" #include "node/StereoDepth.hpp" +#include "node/Sync.hpp" #include "node/SystemLogger.hpp" #include "node/ToF.hpp" #include "node/VideoEncoder.hpp" #include "node/Warp.hpp" #include "node/XLinkIn.hpp" -#include "node/XLinkOut.hpp" \ No newline at end of file +#include "node/XLinkOut.hpp" diff --git a/shared/depthai-shared b/shared/depthai-shared index 8bacf258a..a73b58b5d 160000 --- a/shared/depthai-shared +++ b/shared/depthai-shared @@ -1 +1 @@ -Subproject commit 8bacf258a2ccf507634da153a7166aec77af1ea7 +Subproject commit a73b58b5d0c7798633309d3b0ce4d40c096014ca diff --git a/shared/depthai-shared.cmake b/shared/depthai-shared.cmake index 72f46ce99..414e1ebf6 100644 --- a/shared/depthai-shared.cmake +++ b/shared/depthai-shared.cmake @@ -63,4 +63,4 @@ foreach(source_file ${DEPTHAI_SHARED_SOURCES}) if(NOT EXISTS ${source_file}) message(FATAL_ERROR "depthai-shared submodule files missing. Make sure to download prepackaged release instead of \"Source code\" on GitHub. Example: depthai-core-vX.Y.Z.tar.gz") endif() -endforeach() \ No newline at end of file +endforeach() diff --git a/src/device/DataQueue.cpp b/src/device/DataQueue.cpp index 7e1496fa0..881f23991 100644 --- a/src/device/DataQueue.cpp +++ b/src/device/DataQueue.cpp @@ -3,10 +3,14 @@ // std #include #include +#include // project +#include "depthai-shared/datatype/DatatypeEnum.hpp" +#include "depthai-shared/datatype/RawMessageGroup.hpp" #include "depthai/pipeline/datatype/ADatatype.hpp" #include "depthai/xlink/XLinkStream.hpp" +#include "pipeline/datatype/MessageGroup.hpp" #include "pipeline/datatype/StreamMessageParser.hpp" // shared @@ -35,8 +39,23 @@ DataOutputQueue::DataOutputQueue(const std::shared_ptr conn, co while(running) { // Blocking -- parse packet and gather timing information auto packet = stream.readMove(); + DatatypeEnum type; const auto t1Parse = std::chrono::steady_clock::now(); - const auto data = StreamMessageParser::parseMessageToADatatype(&packet); + const auto data = StreamMessageParser::parseMessageToADatatype(&packet, type); + if(type == DatatypeEnum::MessageGroup) { + auto msgGrp = std::static_pointer_cast(data); + unsigned int size = msgGrp->getNumMessages(); + std::vector> packets; + packets.reserve(size); + for(unsigned int i = 0; i < size; ++i) { + auto dpacket = stream.readMove(); + packets.push_back(StreamMessageParser::parseMessageToADatatype(&dpacket)); + } + auto rawMsgGrp = std::static_pointer_cast(data->getRaw()); + for(auto& msg : rawMsgGrp->group) { + msgGrp->add(msg.first, packets[msg.second.index]); + } + } const auto t2Parse = std::chrono::steady_clock::now(); // Trace level debugging @@ -192,6 +211,16 @@ DataInputQueue::DataInputQueue( // serialize auto t1Parse = std::chrono::steady_clock::now(); + std::vector> serializedAux; + if(data->getType() == DatatypeEnum::MessageGroup) { + auto rawMsgGrp = std::dynamic_pointer_cast(data); + serializedAux.reserve(rawMsgGrp->group.size()); + unsigned int index = 0; + for(auto& msg : rawMsgGrp->group) { + msg.second.index = index++; + serializedAux.push_back(StreamMessageParser::serializeMessage(msg.second.buffer)); + } + } auto serialized = StreamMessageParser::serializeMessage(data); auto t2Parse = std::chrono::steady_clock::now(); @@ -210,6 +239,9 @@ DataInputQueue::DataInputQueue( // Blocking stream.write(serialized); + for(auto& msg : serializedAux) { + stream.write(msg); + } // Increment num packets sent numPacketsSent++; diff --git a/src/device/DeviceBase.cpp b/src/device/DeviceBase.cpp index 6994860ff..68d2245c5 100644 --- a/src/device/DeviceBase.cpp +++ b/src/device/DeviceBase.cpp @@ -23,6 +23,7 @@ #include "pipeline/Pipeline.hpp" #include "utility/EepromDataParser.hpp" #include "utility/Environment.hpp" +#include "utility/Files.hpp" #include "utility/Initialization.hpp" #include "utility/PimplImpl.hpp" #include "utility/Resources.hpp" @@ -45,6 +46,8 @@ const std::string MAGIC_PROTECTED_FLASHING_VALUE = "235539980"; const std::string MAGIC_FACTORY_FLASHING_VALUE = "413424129"; const std::string MAGIC_FACTORY_PROTECTED_FLASHING_VALUE = "868632271"; +const unsigned int DEFAULT_CRASHDUMP_TIMEOUT = 9000; + // local static function static void getFlashingPermissions(bool& factoryPermissions, bool& protectedPermissions) { auto permissionEnv = utility::getEnv("DEPTHAI_ALLOW_FACTORY_FLASHING"); @@ -398,7 +401,7 @@ DeviceBase::DeviceBase(Config config, const DeviceInfo& devInfo, UsbSpeed maxUsb init(config, maxUsbSpeed, ""); } -DeviceBase::DeviceBase(Config config, const DeviceInfo& devInfo, const dai::Path& pathToCmd) : deviceInfo(devInfo) { +DeviceBase::DeviceBase(Config config, const DeviceInfo& devInfo, const dai::Path& pathToCmd, bool dumpOnly) : deviceInfo(devInfo), dumpOnly(dumpOnly) { init2(config, pathToCmd, {}); } @@ -512,10 +515,52 @@ void DeviceBase::close() { } } +unsigned int getCrashdumpTimeout(XLinkProtocol_t protocol) { + std::string timeoutStr = utility::getEnv("DEPTHAI_CRASHDUMP_TIMEOUT"); + if(!timeoutStr.empty()) { + try { + return std::stoi(timeoutStr) * 1000; + } catch(const std::invalid_argument& e) { + logger::warn("DEPTHAI_CRASHDUMP_TIMEOUT value invalid: {}", e.what()); + } + } + return DEFAULT_CRASHDUMP_TIMEOUT + (protocol == X_LINK_TCP_IP ? device::XLINK_TCP_WATCHDOG_TIMEOUT.count() : device::XLINK_USB_WATCHDOG_TIMEOUT.count()); +} + +tl::optional saveCrashDump(dai::CrashDump& dump, std::string mxId) { + std::vector data; + utility::serialize(dump, data); + auto crashDumpPathStr = utility::getEnv("DEPTHAI_CRASHDUMP"); + return saveFileToTemporaryDirectory(data, mxId + "-depthai_crash_dump.json", crashDumpPathStr); +} + void DeviceBase::closeImpl() { using namespace std::chrono; auto t1 = steady_clock::now(); - pimpl->logger.debug("Device about to be closed..."); + bool shouldGetCrashDump = false; + if(!dumpOnly) { + pimpl->logger.debug("Device about to be closed..."); + try { + if(hasCrashDump()) { + connection->setRebootOnDestruction(true); + auto dump = getCrashDump(); + auto path = saveCrashDump(dump, deviceInfo.getMxId()); + if(path.has_value()) { + pimpl->logger.warn("There was a fatal error. Crash dump saved to {}", path.value()); + } else { + pimpl->logger.warn("There was a fatal error. Crash dump could not be saved"); + } + } else { + bool isRunning = pimpl->rpcClient->call("isRunning").as(); + shouldGetCrashDump = !isRunning; + connection->setRebootOnDestruction(connection->getRebootOnDestruction() || shouldGetCrashDump); + pimpl->logger.debug("Shutdown {}", isRunning ? "OK" : "error"); + } + } catch(const std::exception& ex) { + pimpl->logger.debug("shutdown call error: {}", ex.what()); + shouldGetCrashDump = true; + } + } // Close connection first; causes Xlink internal calls to unblock semaphore waits and // return error codes, which then allows queues to unblock @@ -524,28 +569,68 @@ void DeviceBase::closeImpl() { // invalid memory, etc. which hard crashes main app connection->close(); - // Stop various threads watchdogRunning = false; - timesyncRunning = false; - loggingRunning = false; - profilingRunning = false; - // Stop watchdog first (this resets and waits for link to fall down) if(watchdogThread.joinable()) watchdogThread.join(); - // Then stop timesync - if(timesyncThread.joinable()) timesyncThread.join(); - // And at the end stop logging thread - if(loggingThread.joinable()) loggingThread.join(); - // And at the end stop profiling thread - if(profilingThread.joinable()) profilingThread.join(); - // At the end stop the monitor thread - if(monitorThread.joinable()) monitorThread.join(); + + if(!dumpOnly) { + // Stop various threads + timesyncRunning = false; + loggingRunning = false; + profilingRunning = false; + + // Then stop timesync + if(timesyncThread.joinable()) timesyncThread.join(); + // And at the end stop logging thread + if(loggingThread.joinable()) loggingThread.join(); + // And at the end stop profiling thread + if(profilingThread.joinable()) profilingThread.join(); + // At the end stop the monitor thread + if(monitorThread.joinable()) monitorThread.join(); + } // Close rpcStream pimpl->rpcStream = nullptr; pimpl->rpcClient = nullptr; - pimpl->logger.debug("Device closed, {}", duration_cast(steady_clock::now() - t1).count()); + if(!dumpOnly) { + auto timeout = getCrashdumpTimeout(deviceInfo.protocol); + // Get crash dump if needed + if(shouldGetCrashDump && timeout > 0) { + pimpl->logger.debug("Getting crash dump..."); + auto t1 = steady_clock::now(); + bool gotDump = false; + bool found = false; + do { + DeviceInfo rebootingDeviceInfo; + std::tie(found, rebootingDeviceInfo) = XLinkConnection::getDeviceByMxId(deviceInfo.getMxId(), X_LINK_ANY_STATE, false); + if(found && (rebootingDeviceInfo.state == X_LINK_UNBOOTED || rebootingDeviceInfo.state == X_LINK_BOOTLOADER)) { + pimpl->logger.trace("Found rebooting device in {}ns", duration_cast(steady_clock::now() - t1).count()); + DeviceBase rebootingDevice(config, rebootingDeviceInfo, firmwarePath, true); + if(rebootingDevice.hasCrashDump()) { + auto dump = rebootingDevice.getCrashDump(); + auto path = saveCrashDump(dump, deviceInfo.getMxId()); + if(path.has_value()) { + pimpl->logger.warn("Device crashed. Crash dump saved to {}", path.value()); + } else { + pimpl->logger.warn("Device crashed. Crash dump could not be saved"); + } + } else { + pimpl->logger.warn("Device crashed, but no crash dump could be extracted."); + } + gotDump = true; + break; + } + } while(!found && steady_clock::now() - t1 < std::chrono::milliseconds(timeout)); + if(!gotDump) { + pimpl->logger.error("Device likely crashed but did not reboot in time to get the crash dump"); + } + } else if(shouldGetCrashDump) { + pimpl->logger.warn("Device crashed. Crash dump retrieval disabled."); + } + + pimpl->logger.debug("Device closed, {}", duration_cast(steady_clock::now() - t1).count()); + } } // This function is thread-unsafe. The idea of "isClosed" is ephemerial and @@ -596,10 +681,11 @@ void DeviceBase::init(Config config, UsbSpeed maxUsbSpeed, const dai::Path& path void DeviceBase::init2(Config cfg, const dai::Path& pathToMvcmd, tl::optional pipeline) { // Initalize depthai library if not already - initialize(); + if(!dumpOnly) initialize(); // Specify cfg config = cfg; + firmwarePath = pathToMvcmd; // Apply nonExclusiveMode config.board.nonExclusiveMode = config.nonExclusiveMode; @@ -825,131 +911,133 @@ void DeviceBase::init2(Config cfg, const dai::Path& pathToMvcmd, tl::optionallogger.debug("Timesync thread exception caught: {}", ex.what()); + auto level = spdlogLevelToLogLevel(logger::get_level()); + setLogLevel(config.logLevel.value_or(level)); + + // Sets system inforation logging rate. By default 1s + setSystemInformationLoggingRate(DEFAULT_SYSTEM_INFORMATION_LOGGING_RATE_HZ); + } catch(const std::exception&) { + // close device (cleanup) + close(); + // Rethrow original exception + throw; } - timesyncRunning = false; - }); + // prepare timesync thread, which will keep device synchronized + timesyncThread = std::thread([this]() { + using namespace std::chrono; - // prepare logging thread, which will log device messages - loggingThread = std::thread([this]() { - using namespace std::chrono; - std::vector messages; - try { - XLinkStream stream(connection, device::XLINK_CHANNEL_LOG, 128); - while(loggingRunning) { - // Block - auto log = stream.read(); + try { + XLinkStream stream(connection, device::XLINK_CHANNEL_TIMESYNC, 128); + while(timesyncRunning) { + // Block + XLinkTimespec timestamp; + stream.read(timestamp); + + // Write timestamp back + stream.write(×tamp, sizeof(timestamp)); + } + } catch(const std::exception& ex) { + // ignore + pimpl->logger.debug("Timesync thread exception caught: {}", ex.what()); + } - try { - // Deserialize incoming messages - utility::deserialize(log, messages); + timesyncRunning = false; + }); + + // prepare logging thread, which will log device messages + loggingThread = std::thread([this]() { + using namespace std::chrono; + std::vector messages; + try { + XLinkStream stream(connection, device::XLINK_CHANNEL_LOG, 128); + while(loggingRunning) { + // Block + auto log = stream.read(); - pimpl->logger.trace("Log vector decoded, size: {}", messages.size()); + try { + // Deserialize incoming messages + utility::deserialize(log, messages); - // log the messages in incremental order (0 -> size-1) - for(const auto& msg : messages) { - pimpl->logger.logMessage(msg); - } + pimpl->logger.trace("Log vector decoded, size: {}", messages.size()); - // Log to callbacks - { - // lock mtx to callback map (shared) - std::unique_lock l(logCallbackMapMtx); + // log the messages in incremental order (0 -> size-1) for(const auto& msg : messages) { - for(const auto& kv : logCallbackMap) { - const auto& cb = kv.second; - // If available, callback with msg - if(cb) cb(msg); - } + pimpl->logger.logMessage(msg); } - } - } catch(const nlohmann::json::exception& ex) { - pimpl->logger.error("Exception while parsing or calling callbacks for log message from device: {}", ex.what()); - } - } - } catch(const std::exception& ex) { - // ignore exception from logging - pimpl->logger.debug("Log thread exception caught: {}", ex.what()); - } - - loggingRunning = false; - }); + // Log to callbacks + { + // lock mtx to callback map (shared) + std::unique_lock l(logCallbackMapMtx); + for(const auto& msg : messages) { + for(const auto& kv : logCallbackMap) { + const auto& cb = kv.second; + // If available, callback with msg + if(cb) cb(msg); + } + } + } - if(utility::getEnv("DEPTHAI_PROFILING") == "1") { - // prepare profiling thread, which will log device messages - profilingThread = std::thread([this]() { - using namespace std::chrono; - try { - ProfilingData lastData = {}; - // TODO(themarpe) - expose - float rate = 1.0f; - while(profilingRunning) { - ProfilingData data = getProfilingData(); - long long w = data.numBytesWritten - lastData.numBytesWritten; - long long r = data.numBytesRead - lastData.numBytesRead; - w = static_cast(w / rate); - r = static_cast(r / rate); - - lastData = data; - - pimpl->logger.debug("Profiling write speed: {:.2f} MiB/s, read speed: {:.2f} MiB/s, total written: {:.2f} MiB, read: {:.2f} MiB", - w / 1024.0f / 1024.0f, - r / 1024.0f / 1024.0f, - data.numBytesWritten / 1024.0f / 1024.0f, - data.numBytesRead / 1024.0f / 1024.0f); - - std::this_thread::sleep_for(duration(1) / rate); + } catch(const nlohmann::json::exception& ex) { + pimpl->logger.error("Exception while parsing or calling callbacks for log message from device: {}", ex.what()); + } } } catch(const std::exception& ex) { // ignore exception from logging - pimpl->logger.debug("Profiling thread exception caught: {}", ex.what()); + pimpl->logger.debug("Log thread exception caught: {}", ex.what()); } - profilingRunning = false; + loggingRunning = false; }); - } - // Below can throw - make sure to gracefully exit threads - try { - // Starts and waits for inital timesync - setTimesync(DEFAULT_TIMESYNC_PERIOD, DEFAULT_TIMESYNC_NUM_SAMPLES, DEFAULT_TIMESYNC_RANDOM); - } catch(const std::exception&) { - // close device (cleanup) - close(); - // Rethrow original exception - throw; + if(utility::getEnv("DEPTHAI_PROFILING") == "1") { + // prepare profiling thread, which will log device messages + profilingThread = std::thread([this]() { + using namespace std::chrono; + try { + ProfilingData lastData = {}; + // TODO(themarpe) - expose + float rate = 1.0f; + while(profilingRunning) { + ProfilingData data = getProfilingData(); + long long w = data.numBytesWritten - lastData.numBytesWritten; + long long r = data.numBytesRead - lastData.numBytesRead; + w = static_cast(w / rate); + r = static_cast(r / rate); + + lastData = data; + + pimpl->logger.debug("Profiling write speed: {:.2f} MiB/s, read speed: {:.2f} MiB/s, total written: {:.2f} MiB, read: {:.2f} MiB", + w / 1024.0f / 1024.0f, + r / 1024.0f / 1024.0f, + data.numBytesWritten / 1024.0f / 1024.0f, + data.numBytesRead / 1024.0f / 1024.0f); + + std::this_thread::sleep_for(duration(1) / rate); + } + } catch(const std::exception& ex) { + // ignore exception from logging + pimpl->logger.debug("Profiling thread exception caught: {}", ex.what()); + } + + profilingRunning = false; + }); + } + + // Below can throw - make sure to gracefully exit threads + try { + // Starts and waits for inital timesync + setTimesync(DEFAULT_TIMESYNC_PERIOD, DEFAULT_TIMESYNC_NUM_SAMPLES, DEFAULT_TIMESYNC_RANDOM); + } catch(const std::exception&) { + // close device (cleanup) + close(); + // Rethrow original exception + throw; + } } } @@ -961,6 +1049,10 @@ std::vector DeviceBase::getConnectedCameras() { return pimpl->rpcClient->call("getConnectedCameras").as>(); } +std::vector DeviceBase::getConnectionInterfaces() { + return pimpl->rpcClient->call("getConnectionInterfaces").as>(); +} + std::vector DeviceBase::getConnectedCameraFeatures() { return pimpl->rpcClient->call("getConnectedCameraFeatures").as>(); } @@ -1085,11 +1177,19 @@ LogLevel DeviceBase::getLogOutputLevel() { } bool DeviceBase::setIrLaserDotProjectorBrightness(float mA, int mask) { - return pimpl->rpcClient->call("setIrLaserDotProjectorBrightness", mA, mask); + return pimpl->rpcClient->call("setIrLaserDotProjectorBrightness", mA, mask, false); +} + +bool DeviceBase::setIrLaserDotProjectorIntensity(float intensity, int mask) { + return pimpl->rpcClient->call("setIrLaserDotProjectorBrightness", intensity, mask, true); } bool DeviceBase::setIrFloodLightBrightness(float mA, int mask) { - return pimpl->rpcClient->call("setIrFloodLightBrightness", mA, mask); + return pimpl->rpcClient->call("setIrFloodLightBrightness", mA, mask, false); +} + +bool DeviceBase::setIrFloodLightIntensity(float intensity, int mask) { + return pimpl->rpcClient->call("setIrFloodLightBrightness", intensity, mask, true); } std::vector> DeviceBase::getIrDrivers() { diff --git a/src/device/DeviceBootloader.cpp b/src/device/DeviceBootloader.cpp index ad33db4fd..17a7a1182 100644 --- a/src/device/DeviceBootloader.cpp +++ b/src/device/DeviceBootloader.cpp @@ -273,6 +273,87 @@ DeviceBootloader::DeviceBootloader(std::string nameOrDeviceId, bool allowFlashin init(true, {}, tl::nullopt, allowFlashingBootloader); } +void DeviceBootloader::createWatchdog() { + if(monitorThread.joinable() || watchdogThread.joinable()) { + throw std::runtime_error("Watchdog already created. Destroy it first."); + } + + // Specify "last" ping time (5s in the future, for some grace time) + { + std::unique_lock lock(lastWatchdogPingTimeMtx); + lastWatchdogPingTime = std::chrono::steady_clock::now() + std::chrono::seconds(5); + } + // Create monitor thread + monitorThread = std::thread([this]() { + while(watchdogRunning) { + // Ping with a period half of that of the watchdog timeout + std::this_thread::sleep_for(bootloader::XLINK_WATCHDOG_TIMEOUT); + // Check if wd was pinged in the specified watchdogTimeout time. + decltype(lastWatchdogPingTime) prevPingTime; + { + std::unique_lock lock(lastWatchdogPingTimeMtx); + prevPingTime = lastWatchdogPingTime; + } + // Recheck if watchdogRunning wasn't already closed and close if more than twice of WD passed + // Bump checking thread to not cause spurious warnings/closes + std::chrono::milliseconds watchdogTimeout = std::chrono::milliseconds(3000); + if(watchdogRunning && std::chrono::steady_clock::now() - prevPingTime > watchdogTimeout * 2) { + logger::warn("Monitor thread (device: {} [{}]) - ping was missed, closing the device connection", deviceInfo.mxid, deviceInfo.name); + // ping was missed, reset the device + watchdogRunning = false; + // close the underlying connection + connection->close(); + } + } + }); + + // prepare watchdog thread, which will keep device alive + watchdogThread = std::thread([this]() { + try { + // constructor often throws in quick start/stop scenarios because + // the connection is close()...usually by DeviceBootloader::close() + XLinkStream stream(connection, bootloader::XLINK_CHANNEL_WATCHDOG, 64); + std::vector watchdogKeepalive = {0, 0, 0, 0}; + std::vector reset = {1, 0, 0, 0}; + while(watchdogRunning) { + try { + stream.write(watchdogKeepalive); + } catch(const std::exception&) { + break; + } + { + std::unique_lock lock(lastWatchdogPingTimeMtx); + lastWatchdogPingTime = std::chrono::steady_clock::now(); + } + // Ping with a period half of that of the watchdog timeout + std::this_thread::sleep_for(bootloader::XLINK_WATCHDOG_TIMEOUT / 2); + } + + try { + // Send reset request + stream.write(reset); + // Dummy read (wait till link falls down) + const auto dummy = stream.readMove(); + } catch(const std::exception&) { + // ignore + } + } catch(const std::exception&) { + // ignore + } + + // Sleep a bit, so device isn't available anymore + std::this_thread::sleep_for(std::chrono::milliseconds(500)); + }); +} + +void DeviceBootloader::destroyWatchdog() { + watchdogRunning = false; + // Check & join previous thread + if(watchdogThread.joinable()) watchdogThread.join(); + // Check & join previous thread + if(monitorThread.joinable()) monitorThread.join(); +} + void DeviceBootloader::init(bool embeddedMvcmd, const dai::Path& pathToMvcmd, tl::optional type, bool allowBlFlash) { stream = nullptr; allowFlashingBootloader = allowBlFlash; @@ -291,242 +372,172 @@ void DeviceBootloader::init(bool embeddedMvcmd, const dai::Path& pathToMvcmd, tl } } - // Init device (if bootloader, handle correctly - issue USB boot command) - if(deviceInfo.state == X_LINK_UNBOOTED) { - // Unbooted device found, boot to BOOTLOADER and connect with XLinkConnection constructor - if(embeddedMvcmd) { - connection = std::make_shared(deviceInfo, getEmbeddedBootloaderBinary(bootloaderType), X_LINK_BOOTLOADER); - } else { - connection = std::make_shared(deviceInfo, pathToMvcmd, X_LINK_BOOTLOADER); - } + // Below can throw - make sure to gracefully exit threads + try { + // Init device (if bootloader, handle correctly - issue USB boot command) + if(deviceInfo.state == X_LINK_UNBOOTED) { + // Unbooted device found, boot to BOOTLOADER and connect with XLinkConnection constructor + if(embeddedMvcmd) { + connection = std::make_shared(deviceInfo, getEmbeddedBootloaderBinary(bootloaderType), X_LINK_BOOTLOADER); + } else { + connection = std::make_shared(deviceInfo, pathToMvcmd, X_LINK_BOOTLOADER); + } - // prepare bootloader stream - stream = std::make_unique(connection, bootloader::XLINK_CHANNEL_BOOTLOADER, bootloader::XLINK_STREAM_MAX_SIZE); + // prepare bootloader stream + stream = std::make_unique(connection, bootloader::XLINK_CHANNEL_BOOTLOADER, bootloader::XLINK_STREAM_MAX_SIZE); - // Retrieve bootloader version - version = requestVersion(); + // Prepare monitor & wd threads in case things go down + createWatchdog(); - // Device wasn't already in bootloader, that means that embedded bootloader is booted - isEmbedded = true; - } else if(deviceInfo.state == X_LINK_BOOTLOADER || deviceInfo.state == X_LINK_FLASH_BOOTED) { - // If device is in flash booted state, reset to bootloader and then continue by booting appropriate FW - if(deviceInfo.state == X_LINK_FLASH_BOOTED) { - // Boot bootloader and set current deviceInfo to new device state - deviceInfo = XLinkConnection::bootBootloader(deviceInfo); - } + // Retrieve bootloader version + version = requestVersion(); - // In this case boot specified bootloader only if current bootloader isn't of correct type - // Check version first, if >= 0.0.12 then check type and then either bootmemory to correct BL or continue as is + // Device wasn't already in bootloader, that means that embedded bootloader is booted + isEmbedded = true; + } else if(deviceInfo.state == X_LINK_BOOTLOADER || deviceInfo.state == X_LINK_FLASH_BOOTED) { + // If device is in flash booted state, reset to bootloader and then continue by booting appropriate FW + if(deviceInfo.state == X_LINK_FLASH_BOOTED) { + // Boot bootloader and set current deviceInfo to new device state + deviceInfo = XLinkConnection::bootBootloader(deviceInfo); + } - // Device already in bootloader mode. - // Connect without booting - connection = std::make_shared(deviceInfo, X_LINK_BOOTLOADER); + // In this case boot specified bootloader only if current bootloader isn't of correct type + // Check version first, if >= 0.0.12 then check type and then either bootmemory to correct BL or continue as is - // If type is specified, try to boot into that BL type - stream = std::make_unique(connection, bootloader::XLINK_CHANNEL_BOOTLOADER, bootloader::XLINK_STREAM_MAX_SIZE); + // Device already in bootloader mode. + // Connect without booting + connection = std::make_shared(deviceInfo, X_LINK_BOOTLOADER); - // Retrieve bootloader version - version = requestVersion(); - if(version >= Version(0, 0, 12)) { - // If version is adequate, do an in memory boot. + // Prepare monitor & wd threads in case things go down + createWatchdog(); - // Send request for bootloader type - if(!sendRequest(Request::GetBootloaderType{})) { - throw std::runtime_error("Error trying to connect to device"); - } - // Receive response - Response::BootloaderType runningBootloaderType; - if(!receiveResponse(runningBootloaderType)) throw std::runtime_error("Error trying to connect to device"); - - // Modify actual bootloader type - bootloaderType = runningBootloaderType.type; - - Type desiredBootloaderType = type.value_or(bootloaderType); - - // If not correct type OR if allowFlashingBootloader is set, then boot internal (latest) bootloader of correct type - if((desiredBootloaderType != bootloaderType) || allowFlashingBootloader) { - // prepare watchdog thread, which will keep device alive - std::atomic wdRunning{true}; - std::thread wd = std::thread([&]() { - // prepare watchdog thread - try { - // constructor can throw in rare+quick start/stop scenarios because - // the connection is close() eg. by DeviceBootloader::close() - XLinkStream stream(connection, bootloader::XLINK_CHANNEL_WATCHDOG, 64); - std::vector watchdogKeepalive = {0, 0, 0, 0}; - while(wdRunning) { - try { - stream.write(watchdogKeepalive); - } catch(const std::exception&) { - break; - } - // Ping with a period half of that of the watchdog timeout - std::this_thread::sleep_for(bootloader::XLINK_WATCHDOG_TIMEOUT / 2); - } - } catch(const std::exception&) { - // ignore, probably invalid connection or stream - } - }); - - // Send request to boot firmware directly from bootloader - Request::BootMemory bootMemory; - auto binary = getEmbeddedBootloaderBinary(desiredBootloaderType); - bootMemory.totalSize = static_cast(binary.size()); - bootMemory.numPackets = ((static_cast(binary.size()) - 1) / bootloader::XLINK_STREAM_MAX_SIZE) + 1; - if(!sendRequest(bootMemory)) { + // If type is specified, try to boot into that BL type + stream = std::make_unique(connection, bootloader::XLINK_CHANNEL_BOOTLOADER, bootloader::XLINK_STREAM_MAX_SIZE); + + // Retrieve bootloader version + version = requestVersion(); + if(version >= Version(0, 0, 12)) { + // If version is adequate, do an in memory boot. + + // Send request for bootloader type + if(!sendRequest(Request::GetBootloaderType{})) { throw std::runtime_error("Error trying to connect to device"); } + // Receive response + Response::BootloaderType runningBootloaderType; + if(!receiveResponse(runningBootloaderType)) throw std::runtime_error("Error trying to connect to device"); + + // Modify actual bootloader type + bootloaderType = runningBootloaderType.type; + + Type desiredBootloaderType = type.value_or(bootloaderType); + + // If not correct type OR if allowFlashingBootloader is set, then boot internal (latest) bootloader of correct type + if((desiredBootloaderType != bootloaderType) || allowFlashingBootloader) { + // Send request to boot firmware directly from bootloader + Request::BootMemory bootMemory; + auto binary = getEmbeddedBootloaderBinary(desiredBootloaderType); + bootMemory.totalSize = static_cast(binary.size()); + bootMemory.numPackets = ((static_cast(binary.size()) - 1) / bootloader::XLINK_STREAM_MAX_SIZE) + 1; + if(!sendRequest(bootMemory)) { + throw std::runtime_error("Error trying to connect to device"); + } - // After that send numPackets of data - stream->writeSplit(binary.data(), binary.size(), bootloader::XLINK_STREAM_MAX_SIZE); - // Close existing stream first - stream = nullptr; - // Stop watchdog - wdRunning = false; - wd.join(); - // Close connection - connection->close(); + // After that send numPackets of data + stream->writeSplit(binary.data(), binary.size(), bootloader::XLINK_STREAM_MAX_SIZE); + // Close existing stream first + stream = nullptr; + // Stop watchdog + destroyWatchdog(); + // Close connection + connection->close(); - // Now reconnect - connection = std::make_shared(deviceInfo, X_LINK_BOOTLOADER); + // Now reconnect + connection = std::make_shared(deviceInfo, X_LINK_BOOTLOADER); - // prepare new bootloader stream - stream = std::make_unique(connection, bootloader::XLINK_CHANNEL_BOOTLOADER, bootloader::XLINK_STREAM_MAX_SIZE); + // Recreate watchdog - monitor & wd threads in case things go down + createWatchdog(); - // Retrieve bootloader version - version = requestVersion(); + // prepare new bootloader stream + stream = std::make_unique(connection, bootloader::XLINK_CHANNEL_BOOTLOADER, bootloader::XLINK_STREAM_MAX_SIZE); - // The type of bootloader is now 'desiredBootloaderType' - bootloaderType = desiredBootloaderType; + // Retrieve bootloader version + version = requestVersion(); - // Embedded bootloader was used to boot, set to true - isEmbedded = true; - } else { - // Just connected to existing bootloader on device. Set embedded to false - isEmbedded = false; - } + // The type of bootloader is now 'desiredBootloaderType' + bootloaderType = desiredBootloaderType; - } else { - // If version isn't adequate to do an in memory boot - do regular Bootloader -> USB ROM -> Boot transition. - Type desiredBootloaderType = type.value_or(Type::USB); - - // If not correct type OR if allowFlashingBootloader is set, then boot internal (latest) bootloader of correct type - if((desiredBootloaderType != Type::USB) || allowFlashingBootloader) { - // Send request to jump to USB bootloader - // Boot into USB ROM BOOTLOADER NOW - if(!sendRequest(Request::UsbRomBoot{})) { - throw std::runtime_error("Error trying to connect to device"); - } - // Close existing stream first - stream = nullptr; - // Close connection - connection->close(); - - // Now reconnect - // Unbooted device found, boot to BOOTLOADER and connect with XLinkConnection constructor - if(embeddedMvcmd) { - connection = std::make_shared(deviceInfo, getEmbeddedBootloaderBinary(desiredBootloaderType), X_LINK_BOOTLOADER); + // Embedded bootloader was used to boot, set to true + isEmbedded = true; } else { - connection = std::make_shared(deviceInfo, pathToMvcmd, X_LINK_BOOTLOADER); + // Just connected to existing bootloader on device. Set embedded to false + isEmbedded = false; } - // prepare bootloader stream - stream = std::make_unique(connection, bootloader::XLINK_CHANNEL_BOOTLOADER, bootloader::XLINK_STREAM_MAX_SIZE); - - // Retrieve bootloader version - version = requestVersion(); + } else { + // If version isn't adequate to do an in memory boot - do regular Bootloader -> USB ROM -> Boot transition. + Type desiredBootloaderType = type.value_or(Type::USB); + + // If not correct type OR if allowFlashingBootloader is set, then boot internal (latest) bootloader of correct type + if((desiredBootloaderType != Type::USB) || allowFlashingBootloader) { + // Send request to jump to USB bootloader + // Boot into USB ROM BOOTLOADER NOW + if(!sendRequest(Request::UsbRomBoot{})) { + throw std::runtime_error("Error trying to connect to device"); + } + // Close existing stream first + stream = nullptr; + // Close connection + connection->close(); + + // Now reconnect + // Unbooted device found, boot to BOOTLOADER and connect with XLinkConnection constructor + if(embeddedMvcmd) { + connection = std::make_shared(deviceInfo, getEmbeddedBootloaderBinary(desiredBootloaderType), X_LINK_BOOTLOADER); + } else { + connection = std::make_shared(deviceInfo, pathToMvcmd, X_LINK_BOOTLOADER); + } - // The type of bootloader is now 'desiredBootloaderType' - bootloaderType = desiredBootloaderType; + // Prepare monitor & wd threads in case things go down + createWatchdog(); - // Embedded bootloader was used to boot, set to true - isEmbedded = true; + // prepare bootloader stream + stream = std::make_unique(connection, bootloader::XLINK_CHANNEL_BOOTLOADER, bootloader::XLINK_STREAM_MAX_SIZE); - } else { - bootloaderType = dai::bootloader::Type::USB; + // Retrieve bootloader version + version = requestVersion(); - // Just connected to existing bootloader on device. Set embedded to false - isEmbedded = false; - } - } + // The type of bootloader is now 'desiredBootloaderType' + bootloaderType = desiredBootloaderType; - } else { - throw std::runtime_error("Device not in UNBOOTED, BOOTLOADER or FLASH_BOOTED state"); - } + // Embedded bootloader was used to boot, set to true + isEmbedded = true; - deviceInfo.state = X_LINK_BOOTLOADER; + } else { + bootloaderType = dai::bootloader::Type::USB; - // Specify "last" ping time (5s in the future, for some grace time) - { - std::unique_lock lock(lastWatchdogPingTimeMtx); - lastWatchdogPingTime = std::chrono::steady_clock::now() + std::chrono::seconds(5); - } - // prepare watchdog thread, which will keep device alive - watchdogThread = std::thread([this]() { - try { - // constructor often throws in quick start/stop scenarios because - // the connection is close()...usually by DeviceBootloader::close() - XLinkStream stream(connection, bootloader::XLINK_CHANNEL_WATCHDOG, 64); - std::vector watchdogKeepalive = {0, 0, 0, 0}; - std::vector reset = {1, 0, 0, 0}; - while(watchdogRunning) { - try { - stream.write(watchdogKeepalive); - } catch(const std::exception&) { - break; - } - { - std::unique_lock lock(lastWatchdogPingTimeMtx); - lastWatchdogPingTime = std::chrono::steady_clock::now(); + // Just connected to existing bootloader on device. Set embedded to false + isEmbedded = false; } - // Ping with a period half of that of the watchdog timeout - std::this_thread::sleep_for(bootloader::XLINK_WATCHDOG_TIMEOUT / 2); } - try { - // Send reset request - stream.write(reset); - // Dummy read (wait till link falls down) - const auto dummy = stream.readMove(); - } catch(const std::exception&) { - // ignore - } - } catch(const std::exception&) { - // ignore + } else { + throw std::runtime_error("Device not in UNBOOTED, BOOTLOADER or FLASH_BOOTED state"); } - // Sleep a bit, so device isn't available anymore - std::this_thread::sleep_for(std::chrono::milliseconds(500)); - }); + deviceInfo.state = X_LINK_BOOTLOADER; - // Start monitor thread for host - makes sure that device is responding to pings, otherwise it disconnects - monitorThread = std::thread([this]() { - while(watchdogRunning) { - // Ping with a period half of that of the watchdog timeout - std::this_thread::sleep_for(bootloader::XLINK_WATCHDOG_TIMEOUT); - // Check if wd was pinged in the specified watchdogTimeout time. - decltype(lastWatchdogPingTime) prevPingTime; - { - std::unique_lock lock(lastWatchdogPingTimeMtx); - prevPingTime = lastWatchdogPingTime; - } - // Recheck if watchdogRunning wasn't already closed and close if more than twice of WD passed - // Bump checking thread to not cause spurious warnings/closes - std::chrono::milliseconds watchdogTimeout = std::chrono::milliseconds(3000); - if(watchdogRunning && std::chrono::steady_clock::now() - prevPingTime > watchdogTimeout * 2) { - logger::warn("Monitor thread (device: {} [{}]) - ping was missed, closing the device connection", deviceInfo.mxid, deviceInfo.name); - // ping was missed, reset the device - watchdogRunning = false; - // close the underlying connection - connection->close(); - } + // Bootloader device ready, check for version + logger::debug("Connected bootloader version {}", version.toString()); + if(getEmbeddedBootloaderVersion() > version) { + logger::info("New bootloader version available. Device has: {}, available: {}", version.toString(), getEmbeddedBootloaderVersion().toString()); } - }); - // Bootloader device ready, check for version - logger::debug("Connected bootloader version {}", version.toString()); - if(getEmbeddedBootloaderVersion() > version) { - logger::info("New bootloader version available. Device has: {}, available: {}", version.toString(), getEmbeddedBootloaderVersion().toString()); + } catch(...) { + // catchall, to properly close the threads as we are in constructor context + destroyWatchdog(); + // Rethrow original exception + throw; } } @@ -545,13 +556,8 @@ void DeviceBootloader::close() { // invalid memory, etc. which hard crashes main app connection->close(); - // Stop watchdog - watchdogRunning = false; - - // Stop watchdog first (this resets and waits for link to fall down) - if(watchdogThread.joinable()) watchdogThread.join(); - // At the end stop the monitor thread - if(monitorThread.joinable()) monitorThread.join(); + // destroy Watchdog + destroyWatchdog(); // Close stream // BUGBUG investigate ownership; can another thread accessing this at the same time? diff --git a/src/pipeline/datatype/CameraControl.cpp b/src/pipeline/datatype/CameraControl.cpp index 8ec645a4f..6b1ab049d 100644 --- a/src/pipeline/datatype/CameraControl.cpp +++ b/src/pipeline/datatype/CameraControl.cpp @@ -115,6 +115,14 @@ CameraControl& CameraControl::setAutoExposureCompensation(int compensation) { cfg.expCompensation = compensation; return *this; } +CameraControl& CameraControl::setAutoExposureLimit(uint32_t maxExposureTimeUs) { + cfg.setCommand(RawCameraControl::Command::AE_TARGET_FPS_RANGE); + cfg.aeMaxExposureTimeUs = maxExposureTimeUs; + return *this; +} +CameraControl& CameraControl::setAutoExposureLimit(std::chrono::microseconds maxExposureTime) { + return setAutoExposureLimit(maxExposureTime.count()); +} CameraControl& CameraControl::setAntiBandingMode(AntiBandingMode mode) { cfg.setCommand(RawCameraControl::Command::ANTIBANDING_MODE); cfg.antiBandingMode = mode; @@ -128,8 +136,8 @@ CameraControl& CameraControl::setManualExposure(uint32_t exposureTimeUs, uint32_ return *this; } -void CameraControl::setManualExposure(std::chrono::microseconds exposureTime, uint32_t sensitivityIso) { - setManualExposure(exposureTime.count(), sensitivityIso); +CameraControl& CameraControl::setManualExposure(std::chrono::microseconds exposureTime, uint32_t sensitivityIso) { + return setManualExposure(exposureTime.count(), sensitivityIso); } // White Balance @@ -190,6 +198,16 @@ CameraControl& CameraControl::setEffectMode(EffectMode mode) { cfg.effectMode = mode; return *this; } +CameraControl& CameraControl::setControlMode(ControlMode mode) { + cfg.setCommand(RawCameraControl::Command::CONTROL_MODE); + cfg.controlMode = mode; + return *this; +} +CameraControl& CameraControl::setCaptureIntent(CaptureIntent mode) { + cfg.setCommand(RawCameraControl::Command::CAPTURE_INTENT); + cfg.captureIntent = mode; + return *this; +} bool CameraControl::getCaptureStill() const { return cfg.getCommand(RawCameraControl::Command::STILL_CAPTURE); diff --git a/src/pipeline/datatype/EncodedFrame.cpp b/src/pipeline/datatype/EncodedFrame.cpp new file mode 100644 index 000000000..607df8a1e --- /dev/null +++ b/src/pipeline/datatype/EncodedFrame.cpp @@ -0,0 +1,121 @@ +#include "depthai/pipeline/datatype/EncodedFrame.hpp" + +#include "utility/H26xParsers.hpp" + +namespace dai { + +std::shared_ptr EncodedFrame::serialize() const { + return raw; +} + +EncodedFrame::EncodedFrame() : Buffer(std::make_shared()), frame(*dynamic_cast(raw.get())) { + // set timestamp to now + setTimestamp(std::chrono::steady_clock::now()); +} +EncodedFrame::EncodedFrame(std::shared_ptr ptr) : Buffer(std::move(ptr)), frame(*dynamic_cast(raw.get())) {} + +// getters +unsigned int EncodedFrame::getInstanceNum() const { + return frame.instanceNum; +} +std::chrono::microseconds EncodedFrame::getExposureTime() const { + return std::chrono::microseconds(frame.cam.exposureTimeUs); +} +int EncodedFrame::getSensitivity() const { + return frame.cam.sensitivityIso; +} +int EncodedFrame::getColorTemperature() const { + return frame.cam.wbColorTemp; +} +int EncodedFrame::getLensPosition() const { + return frame.cam.lensPosition; +} +unsigned int EncodedFrame::getQuality() const { + return frame.quality; +} +unsigned int EncodedFrame::getBitrate() const { + return frame.bitrate; +} +bool EncodedFrame::getLossless() const { + return frame.lossless; +} +EncodedFrame::FrameType EncodedFrame::getFrameType() const { + if(frame.type == FrameType::Unknown) { + utility::SliceType frameType = utility::SliceType::Unknown; + switch(frame.profile) { + case RawEncodedFrame::Profile::JPEG: + frameType = utility::SliceType::I; + break; + case RawEncodedFrame::Profile::AVC: + frameType = utility::getTypesH264(frame.data, true)[0]; + break; + case RawEncodedFrame::Profile::HEVC: + frameType = utility::getTypesH265(frame.data, true)[0]; + break; + } + switch(frameType) { + case utility::SliceType::P: + frame.type = FrameType::P; + break; + case utility::SliceType::B: + frame.type = FrameType::B; + break; + case utility::SliceType::I: + frame.type = FrameType::I; + break; + case utility::SliceType::SP: + frame.type = FrameType::P; + break; + case utility::SliceType::SI: + frame.type = FrameType::I; + break; + case utility::SliceType::Unknown: + frame.type = FrameType::Unknown; + break; + } + } + return frame.type; +} +EncodedFrame::Profile EncodedFrame::getProfile() const { + return frame.profile; +} + +// setters +EncodedFrame& EncodedFrame::setTimestamp(std::chrono::time_point tp) { + // Set timestamp from timepoint + return static_cast(Buffer::setTimestamp(tp)); +} +EncodedFrame& EncodedFrame::setTimestampDevice(std::chrono::time_point tp) { + // Set timestamp from timepoint + return static_cast(Buffer::setTimestampDevice(tp)); +} +EncodedFrame& EncodedFrame::setSequenceNum(int64_t sequenceNum) { + return static_cast(Buffer::setSequenceNum(sequenceNum)); +} +EncodedFrame& EncodedFrame::setInstanceNum(unsigned int instanceNum) { + frame.instanceNum = instanceNum; + return *this; +} +EncodedFrame& EncodedFrame::setQuality(unsigned int quality) { + frame.quality = quality; + return *this; +} +EncodedFrame& EncodedFrame::setBitrate(unsigned int bitrate) { + frame.bitrate = bitrate; + return *this; +} + +EncodedFrame& EncodedFrame::setLossless(bool lossless) { + frame.lossless = lossless; + return *this; +} +EncodedFrame& EncodedFrame::setFrameType(FrameType frameType) { + frame.type = frameType; + return *this; +} +EncodedFrame& EncodedFrame::setProfile(Profile profile) { + frame.profile = profile; + return *this; +} + +} // namespace dai diff --git a/src/pipeline/datatype/MessageGroup.cpp b/src/pipeline/datatype/MessageGroup.cpp new file mode 100644 index 000000000..9b3f64c75 --- /dev/null +++ b/src/pipeline/datatype/MessageGroup.cpp @@ -0,0 +1,77 @@ +#include "depthai/pipeline/datatype/MessageGroup.hpp" + +#include +#include + +#include "depthai-shared/datatype/DatatypeEnum.hpp" +#include "depthai-shared/datatype/RawBuffer.hpp" + +namespace dai { + +std::shared_ptr MessageGroup::serialize() const { + return raw; +} + +MessageGroup::MessageGroup() : Buffer(std::make_shared()), rawGrp(*dynamic_cast(raw.get())) {} +MessageGroup::MessageGroup(std::shared_ptr ptr) : Buffer(std::move(ptr)), rawGrp(*dynamic_cast(raw.get())) {} + +std::shared_ptr MessageGroup::operator[](const std::string& name) { + return group.at(name); +} +void MessageGroup::add(const std::string& name, const std::shared_ptr& value) { + group[name] = value; + rawGrp.group[name] = {value->getRaw(), 0}; +} + +std::unordered_map>::iterator MessageGroup::begin() { + return group.begin(); +} +std::unordered_map>::iterator MessageGroup::end() { + return group.end(); +} + +int64_t MessageGroup::getIntervalNs() const { + if(!rawGrp.group.empty()) { + auto oldest = std::dynamic_pointer_cast(group.begin()->second)->getTimestampDevice(); + auto latest = oldest; + for(const auto& entry : group) { + auto ts = std::dynamic_pointer_cast(entry.second)->getTimestampDevice(); + if(ts < oldest) oldest = ts; + if(ts > latest) latest = ts; + } + return std::chrono::duration_cast(latest - oldest).count(); + } + return {}; +} + +int64_t MessageGroup::getNumMessages() const { + return rawGrp.group.size(); +} + +std::vector MessageGroup::getMessageNames() const { + std::vector names; + names.reserve(group.size()); + for(const auto& entry : group) { + names.push_back(entry.first); + } + return names; +} + +bool MessageGroup::isSynced(int64_t thresholdNs) const { + return getIntervalNs() <= thresholdNs; +} + +// setters +MessageGroup& MessageGroup::setTimestamp(std::chrono::time_point tp) { + // Set timestamp from timepoint + return static_cast(Buffer::setTimestamp(tp)); +} +MessageGroup& MessageGroup::setTimestampDevice(std::chrono::time_point tp) { + // Set timestamp from timepoint + return static_cast(Buffer::setTimestampDevice(tp)); +} +MessageGroup& MessageGroup::setSequenceNum(int64_t sequenceNum) { + return static_cast(Buffer::setSequenceNum(sequenceNum)); +} + +} // namespace dai diff --git a/src/pipeline/datatype/StreamMessageParser.cpp b/src/pipeline/datatype/StreamMessageParser.cpp index db52b55d4..0ce4ad476 100644 --- a/src/pipeline/datatype/StreamMessageParser.cpp +++ b/src/pipeline/datatype/StreamMessageParser.cpp @@ -15,11 +15,13 @@ #include "depthai/pipeline/datatype/Buffer.hpp" #include "depthai/pipeline/datatype/CameraControl.hpp" #include "depthai/pipeline/datatype/EdgeDetectorConfig.hpp" +#include "depthai/pipeline/datatype/EncodedFrame.hpp" #include "depthai/pipeline/datatype/FeatureTrackerConfig.hpp" #include "depthai/pipeline/datatype/IMUData.hpp" #include "depthai/pipeline/datatype/ImageManipConfig.hpp" #include "depthai/pipeline/datatype/ImgDetections.hpp" #include "depthai/pipeline/datatype/ImgFrame.hpp" +#include "depthai/pipeline/datatype/MessageGroup.hpp" #include "depthai/pipeline/datatype/NNData.hpp" #include "depthai/pipeline/datatype/SpatialImgDetections.hpp" #include "depthai/pipeline/datatype/SpatialLocationCalculatorConfig.hpp" @@ -37,11 +39,13 @@ #include "depthai-shared/datatype/RawBuffer.hpp" #include "depthai-shared/datatype/RawCameraControl.hpp" #include "depthai-shared/datatype/RawEdgeDetectorConfig.hpp" +#include "depthai-shared/datatype/RawEncodedFrame.hpp" #include "depthai-shared/datatype/RawFeatureTrackerConfig.hpp" #include "depthai-shared/datatype/RawIMUData.hpp" #include "depthai-shared/datatype/RawImageManipConfig.hpp" #include "depthai-shared/datatype/RawImgDetections.hpp" #include "depthai-shared/datatype/RawImgFrame.hpp" +#include "depthai-shared/datatype/RawMessageGroup.hpp" #include "depthai-shared/datatype/RawNNData.hpp" #include "depthai-shared/datatype/RawSpatialImgDetections.hpp" #include "depthai-shared/datatype/RawSpatialLocationCalculatorConfig.hpp" @@ -122,6 +126,10 @@ std::shared_ptr StreamMessageParser::parseMessage(streamPacketDesc_t* return parseDatatype(metadataStart, serializedObjectSize, data); break; + case DatatypeEnum::EncodedFrame: + return parseDatatype(metadataStart, serializedObjectSize, data); + break; + case DatatypeEnum::NNData: return parseDatatype(metadataStart, serializedObjectSize, data); break; @@ -189,13 +197,15 @@ std::shared_ptr StreamMessageParser::parseMessage(streamPacketDesc_t* case DatatypeEnum::ToFConfig: return parseDatatype(metadataStart, serializedObjectSize, data); break; + case DatatypeEnum::MessageGroup: + return parseDatatype(metadataStart, serializedObjectSize, data); + break; } throw std::runtime_error("Bad packet, couldn't parse"); } -std::shared_ptr StreamMessageParser::parseMessageToADatatype(streamPacketDesc_t* const packet) { - DatatypeEnum objectType; +std::shared_ptr StreamMessageParser::parseMessageToADatatype(streamPacketDesc_t* const packet, DatatypeEnum& objectType) { size_t serializedObjectSize; size_t bufferLength; std::tie(objectType, serializedObjectSize, bufferLength) = parseHeader(packet); @@ -213,6 +223,10 @@ std::shared_ptr StreamMessageParser::parseMessageToADatatype(streamPa return std::make_shared(parseDatatype(metadataStart, serializedObjectSize, data)); break; + case DatatypeEnum::EncodedFrame: + return std::make_shared(parseDatatype(metadataStart, serializedObjectSize, data)); + break; + case DatatypeEnum::NNData: return std::make_shared(parseDatatype(metadataStart, serializedObjectSize, data)); break; @@ -281,10 +295,17 @@ std::shared_ptr StreamMessageParser::parseMessageToADatatype(streamPa case DatatypeEnum::ToFConfig: return std::make_shared(parseDatatype(metadataStart, serializedObjectSize, data)); break; + case DatatypeEnum::MessageGroup: + return std::make_shared(parseDatatype(metadataStart, serializedObjectSize, data)); + break; } throw std::runtime_error("Bad packet, couldn't parse (invalid message type)"); } +std::shared_ptr StreamMessageParser::parseMessageToADatatype(streamPacketDesc_t* const packet) { + DatatypeEnum objectType; + return parseMessageToADatatype(packet, objectType); +} std::vector StreamMessageParser::serializeMessage(const RawBuffer& data) { // Serialization: @@ -293,8 +314,8 @@ std::vector StreamMessageParser::serializeMessage(const RawBuffer& // 3. append datatype enum (4B LE) // 4. append size (4B LE) of serialized metadata - std::vector metadata; DatatypeEnum datatype; + std::vector metadata; data.serialize(metadata, datatype); uint32_t metadataSize = static_cast(metadata.size()); diff --git a/src/pipeline/node/MessageDemux.cpp b/src/pipeline/node/MessageDemux.cpp new file mode 100644 index 000000000..b30a1dba5 --- /dev/null +++ b/src/pipeline/node/MessageDemux.cpp @@ -0,0 +1,16 @@ +#include "depthai/pipeline/node/MessageDemux.hpp" + +namespace dai { +namespace node { + +MessageDemux::MessageDemux(const std::shared_ptr& par, int64_t nodeId) + : MessageDemux(par, nodeId, std::make_unique()) {} +MessageDemux::MessageDemux(const std::shared_ptr& par, int64_t nodeId, std::unique_ptr props) + : NodeCRTP(par, nodeId, std::move(props)), + outputs("outputs", Output(*this, "", Output::Type::MSender, {{DatatypeEnum::Buffer, true}})) { + setInputRefs({&input}); + setOutputMapRefs(&outputs); +} + +} // namespace node +} // namespace dai diff --git a/src/pipeline/node/Sync.cpp b/src/pipeline/node/Sync.cpp new file mode 100644 index 000000000..b2d9d04a6 --- /dev/null +++ b/src/pipeline/node/Sync.cpp @@ -0,0 +1,31 @@ +#include "depthai/pipeline/node/Sync.hpp" + +namespace dai { +namespace node { + +Sync::Sync(const std::shared_ptr& par, int64_t nodeId) : Sync(par, nodeId, std::make_unique()) {} +Sync::Sync(const std::shared_ptr& par, int64_t nodeId, std::unique_ptr props) + : NodeCRTP(par, nodeId, std::move(props)), + inputs("inputs", Input(*this, "", Input::Type::SReceiver, {{DatatypeEnum::Buffer, true}})) { + setInputMapRefs(&inputs); + setOutputRefs({&out}); +} + +void Sync::setSyncThreshold(std::chrono::nanoseconds syncThreshold) { + properties.syncThresholdNs = syncThreshold.count(); +} + +void Sync::setSyncAttempts(int syncAttempts) { + properties.syncAttempts = syncAttempts; +} + +std::chrono::nanoseconds Sync::getSyncThreshold() const { + return std::chrono::nanoseconds(properties.syncThresholdNs); +} + +int Sync::getSyncAttempts() const { + return properties.syncAttempts; +} + +} // namespace node +} // namespace dai diff --git a/src/pipeline/node/VideoEncoder.cpp b/src/pipeline/node/VideoEncoder.cpp index da8efc018..0ff9afc7f 100644 --- a/src/pipeline/node/VideoEncoder.cpp +++ b/src/pipeline/node/VideoEncoder.cpp @@ -15,7 +15,7 @@ VideoEncoder::VideoEncoder(const std::shared_ptr& par, int64_t nod VideoEncoder::VideoEncoder(const std::shared_ptr& par, int64_t nodeId, std::unique_ptr props) : NodeCRTP(par, nodeId, std::move(props)) { setInputRefs({&input}); - setOutputRefs({&bitstream}); + setOutputRefs({&bitstream, &out}); } // node properties void VideoEncoder::setNumFramesPool(int frames) { diff --git a/src/utility/Files.hpp b/src/utility/Files.hpp new file mode 100644 index 000000000..e712feb99 --- /dev/null +++ b/src/utility/Files.hpp @@ -0,0 +1,38 @@ +#pragma once + +#include + +#include +#include + +#include "Platform.hpp" + +namespace dai { + +tl::optional saveFileToTemporaryDirectory(std::vector data, std::string filename, std::string fpath = "") { + if(fpath.empty()) { + fpath = platform::getTempPath(); + } + std::string path = std::string(fpath); + if(path.back() != '/' && path.back() != '\\') { + path += '/'; + } + path += filename; + + std::ofstream file(path, std::ios::binary); + if(!file.is_open()) { + spdlog::error("Couldn't open file {} for writing", path); + return tl::nullopt; + } + + file.write(reinterpret_cast(data.data()), data.size()); + file.close(); + if(!file.good()) { + spdlog::error("Couldn't write to file {}", path); + return tl::nullopt; + } + spdlog::debug("Saved file {} to {}", filename, path); + return std::string(path); +} + +} // namespace dai diff --git a/src/utility/H26xParsers.cpp b/src/utility/H26xParsers.cpp new file mode 100644 index 000000000..b0c2da0dc --- /dev/null +++ b/src/utility/H26xParsers.cpp @@ -0,0 +1,292 @@ +#include "H26xParsers.hpp" + +#include +#include + +namespace dai { +namespace utility { + +template +struct H26xParser { + protected: + virtual void parseNal(const std::vector& bs, unsigned int start, std::vector& out) = 0; + std::vector parseBytestream(const std::vector& bs, bool breakOnFirst); + + public: + static std::vector getTypes(const std::vector& bs, bool breakOnFirst); + virtual ~H26xParser() = default; +}; + +struct H264Parser : H26xParser { + void parseNal(const std::vector& bs, unsigned int start, std::vector& out); +}; + +struct H265Parser : H26xParser { + unsigned int nalUnitType = 0; // In NAL header + unsigned int dependentSliceSegmentsEnabledFlag = 0; // In picture parameter set + unsigned int numExtraSliceHeaderBits = 0; // In picture parameter set + // Getting the length of slice_segment address + unsigned int picWidthInLumaSamples = 0; // In sequence parameter set + unsigned int picHeightInLumaSamples = 0; // In sequence parameter set + unsigned int chromaFormatIdc = 0; // In sequence parameter set + unsigned int log2DiffMaxMinLumaCodingBlockSize = 0; // In sequence parameter set + unsigned int log2MinLumaCodingBlockSizeMinus3 = 0; // In sequence parameter set + + void parseNal(const std::vector& bs, unsigned int start, std::vector& out); +}; + +typedef unsigned int uint; +typedef unsigned long ulong; +typedef const std::vector buf; + +SliceType getSliceType(uint num, Profile p) { + switch(p) { + case Profile::H264: + switch(num) { + case 0: + case 5: + return SliceType::P; + case 1: + case 6: + return SliceType::B; + case 2: + case 7: + return SliceType::I; + case 3: + case 8: + return SliceType::SP; + case 4: + case 9: + return SliceType::SI; + default: + return SliceType::Unknown; + } + case Profile::H265: + switch(num) { + case 0: + return SliceType::B; + case 1: + return SliceType::P; + case 2: + return SliceType::I; + default: + return SliceType::Unknown; + } + default: + return SliceType::Unknown; + } +} + +bool scodeEq(buf& bs, uint pos, buf code) { + if(bs.size() - pos > code.size()) { + for(uint i = 0; i < code.size(); ++i) { + if(bs[pos + i] != code[i]) return false; + } + return true; + } else + return false; +} + +uint findStart(buf& bs, uint pos) { + buf codeLong = {0, 0, 0, 1}; + buf codeShort = {0, 0, 1}; + uint size = bs.size(); + for(uint i = pos; i < size; ++i) { + if(bs[i] == 0) { + if(scodeEq(bs, i, codeLong)) { + return i + 4; + } else if(scodeEq(bs, i, codeShort)) { + return i + 3; + } + } + } + return size; +} + +uint findEnd(buf& bs, uint pos) { + buf end1 = {0, 0, 0}; + buf end2 = {0, 0, 1}; + uint size = bs.size(); + for(uint i = pos; i < size; ++i) { + if(bs[i] == 0) { + if(scodeEq(bs, i, end1) || scodeEq(bs, i, end2)) { + return i; + } + } + } + return size; +} + +uint readUint(buf& bs, ulong start, ulong end) { + uint ret = 0; + for(ulong i = start; i < end; ++i) { + uint bit = (bs[(uint)(i / 8)] & (1 << (7 - i % 8))) > 0; + ret += bit * (1 << (end - i - 1)); + } + return ret; +} + +std::tuple readGE(buf& bs, ulong pos) { + uint count = 0; + ulong size = bs.size() * 8; + while(pos < size) { + std::uint8_t bit = bs[(uint)(pos / 8)] & (char)(1 << (7 - pos % 8)); + if(bit == 0) { + ++count; + ++pos; + } else + break; + } + ++count; + ulong start = pos; + ulong end = pos + count; + if(end > size) exit(30); + return std::make_tuple(readUint(bs, start, end) - 1, end); +} + +template +std::vector H26xParser::getTypes(buf& buffer, bool breakOnFirst) { + T p; + return p.parseBytestream(buffer, breakOnFirst); +} + +template +std::vector H26xParser::parseBytestream(buf& bs, bool breakOnFirst) { + uint pos = 0; + uint size = bs.size(); + std::vector ret; + while(pos < size) { + uint start = findStart(bs, pos); + uint end = findEnd(bs, start); + if(start >= end) break; + parseNal(bs, start, ret); + pos = end; + if(breakOnFirst && ret.size() > 0) break; + } + return ret; +} + +void H264Parser::parseNal(buf& bs, uint start, std::vector& out) { + uint pos = start; + uint nalUnitType = bs[pos++] & 31; + uint nalUnitHeaderBytes = 1; + if(nalUnitType == 14 || nalUnitType == 20 || nalUnitType == 21) { + uint avc3dExtensionFlag = 0; + if(nalUnitType == 21) avc3dExtensionFlag = readUint(bs, pos * 8, pos * 8 + 1); + if(avc3dExtensionFlag) + nalUnitHeaderBytes += 2; + else + nalUnitHeaderBytes += 3; + } + pos = start + nalUnitHeaderBytes; + if(nalUnitType == 1 || nalUnitType == 5) { + const auto bpos1 = std::get<1>(readGE(bs, pos * 8)); + uint tyNum; + ulong bpos2; + std::tie(tyNum, bpos2) = readGE(bs, bpos1); + pos = (uint)(bpos2 / 8) + (bpos2 % 8 > 0); + out.push_back(getSliceType(tyNum, Profile::H264)); + } +} + +void H265Parser::parseNal(buf& bs, uint start, std::vector& out) { + nalUnitType = (bs[start] & 126) >> 1; + uint pos = start + 2; + if(nalUnitType == 33) { + // Sequence parameter set + uint spsMaxSubLayersMinus1 = (bs[pos] & 14) >> 1; + ++pos; + const auto bpos1 = std::get<1>(readGE(bs, pos * 8)); // We don't need this value + uint chr; + ulong bpos2; + std::tie(chr, bpos2) = readGE(bs, bpos1); + chromaFormatIdc = chr; + if(chromaFormatIdc) ++bpos2; // We don't need this value + uint pw; + ulong bpos3; + std::tie(pw, bpos3) = readGE(bs, bpos2); + uint ph; + ulong bpos4; + std::tie(ph, bpos4) = readGE(bs, bpos3); + picWidthInLumaSamples = pw; + picHeightInLumaSamples = ph; + uint conformanceWindowFlag = readUint(bs, bpos4, bpos4 + 1); + ulong bpos8 = ++bpos4; + if(conformanceWindowFlag) { + // We don't care about any of these values + auto bpos5 = std::get<1>(readGE(bs, bpos4)); + auto bpos6 = std::get<1>(readGE(bs, bpos5)); + auto bpos7 = std::get<1>(readGE(bs, bpos6)); + auto tbpos8 = std::get<1>(readGE(bs, bpos7)); + bpos8 = tbpos8; + } + auto bpos9 = std::get<1>(readGE(bs, bpos8)); + auto bpos10 = std::get<1>(readGE(bs, bpos9)); + auto bpos11 = std::get<1>(readGE(bs, bpos10)); + uint spsSubLayerOrderingInfoPresentFlag = readUint(bs, bpos11, bpos11 + 1); + ulong bpos12 = ++bpos11; + for(uint i = spsSubLayerOrderingInfoPresentFlag ? 0 : spsMaxSubLayersMinus1; i <= spsMaxSubLayersMinus1; ++i) { + auto tbpos1 = std::get<1>(readGE(bs, bpos12)); + auto tbpos2 = std::get<1>(readGE(bs, tbpos1)); + auto tbpos3 = std::get<1>(readGE(bs, tbpos2)); + bpos12 = tbpos3; + } + uint lm, ldm; + ulong bpos13, bpos14; + std::tie(lm, bpos13) = readGE(bs, bpos12); + std::tie(ldm, bpos14) = readGE(bs, bpos13); + log2MinLumaCodingBlockSizeMinus3 = lm; + log2DiffMaxMinLumaCodingBlockSize = ldm; + pos = (uint)(bpos14 / 8) + (bpos14 % 8 > 0); // Update byte position with newest bit position + } else if(nalUnitType == 34) { + // Picture parameter set + auto bpos1 = std::get<1>(readGE(bs, pos * 8)); + auto bpos2 = std::get<1>(readGE(bs, bpos1)); + dependentSliceSegmentsEnabledFlag = readUint(bs, bpos2, bpos2 + 1); + bpos2 += 2; + numExtraSliceHeaderBits = readUint(bs, bpos2, bpos2 + 3); + bpos2 += 3; + pos = (uint)(bpos2 / 8) + (bpos2 % 8 > 0); + } else if(nalUnitType <= 9 || (16 <= nalUnitType && nalUnitType <= 21)) { + // Coded slice segment + ulong bpos1 = pos * 8; + uint firstSliceSegmentInPicFlag = readUint(bs, bpos1, bpos1 + 1); + ++bpos1; + if(16 <= nalUnitType && nalUnitType <= 23) ++bpos1; + auto bpos2 = std::get<1>(readGE(bs, bpos1)); + uint dependentSliceSegmentFlag = 0; + if(!firstSliceSegmentInPicFlag) { + if(dependentSliceSegmentsEnabledFlag) { + dependentSliceSegmentFlag = readUint(bs, bpos2, bpos2 + 1); + ++bpos2; + } + uint ctbLog2SizeY = log2MinLumaCodingBlockSizeMinus3 + 3 + log2DiffMaxMinLumaCodingBlockSize; + uint ctbSizeY = 1 << ctbLog2SizeY; + uint picWidthInCtbsY = ceil(picWidthInLumaSamples / ctbSizeY); + uint picHeightInCtbsY = ceil(picHeightInLumaSamples / ctbSizeY); + uint picSizeInCtbsY = picWidthInCtbsY * picHeightInCtbsY; + uint vlen = ceil(log2(picSizeInCtbsY)); + bpos2 += vlen; + } + ulong bpos3 = bpos2; + if(!dependentSliceSegmentFlag) { + bpos2 += numExtraSliceHeaderBits; + uint tyNum; + ulong tbpos3; + std::tie(tyNum, tbpos3) = readGE(bs, bpos2); + bpos3 = tbpos3; + out.push_back(getSliceType(tyNum, Profile::H265)); + } + pos = (uint)(bpos3 / 8) + (bpos3 % 8 > 0); + } +} + +std::vector getTypesH264(buf& bs, bool breakOnFirst) { + return H264Parser::getTypes(bs, breakOnFirst); +} +std::vector getTypesH265(buf& bs, bool breakOnFirst) { + return H265Parser::getTypes(bs, breakOnFirst); +} + +} // namespace utility +} // namespace dai diff --git a/src/utility/H26xParsers.hpp b/src/utility/H26xParsers.hpp new file mode 100644 index 000000000..a8b1e3b6c --- /dev/null +++ b/src/utility/H26xParsers.hpp @@ -0,0 +1,16 @@ +#pragma once + +#include +#include + +namespace dai { +namespace utility { + +enum class Profile { H264, H265 }; +enum class SliceType { P, B, I, SP, SI, Unknown }; + +std::vector getTypesH264(const std::vector& bs, bool breakOnFirst = false); +std::vector getTypesH265(const std::vector& bs, bool breakOnFirst = false); + +} // namespace utility +} // namespace dai diff --git a/src/utility/Initialization.cpp b/src/utility/Initialization.cpp index 5225ed6aa..7ed63fad1 100644 --- a/src/utility/Initialization.cpp +++ b/src/utility/Initialization.cpp @@ -78,6 +78,15 @@ bool initialize(const char* additionalInfo, bool installSignalHandler, void* jav (void)installSignalHandler; #endif + // Read JavaVM pointer from env if present + { + auto javavmEnvStr = utility::getEnv("DEPTHAI_LIBUSB_ANDROID_JAVAVM"); + if(javavm == nullptr && !javavmEnvStr.empty()) { + // Read the uintptr_t value from the decimal string + sscanf(javavmEnvStr.c_str(), "%" SCNuPTR, reinterpret_cast(&javavm)); + } + } + // Print core commit and build datetime if(additionalInfo != nullptr && additionalInfo[0] != '\0') { logger::debug("{}", additionalInfo); diff --git a/src/utility/Platform.cpp b/src/utility/Platform.cpp index 7847a6f44..09a5dbc6d 100644 --- a/src/utility/Platform.cpp +++ b/src/utility/Platform.cpp @@ -13,6 +13,14 @@ #include #endif +#if defined(_WIN32) || defined(__USE_W32_SOCKETS) + #include +#endif + +#ifndef _WIN32 + #include +#endif + namespace dai { namespace platform { @@ -48,5 +56,24 @@ std::string getIPv4AddressAsString(std::uint32_t binary) { return {address}; } +std::string getTempPath() { + std::string tmpPath; +#if defined(_WIN32) || defined(__USE_W32_SOCKETS) + char tmpPathBuffer[MAX_PATH]; + GetTempPathA(MAX_PATH, tmpPathBuffer); + tmpPath = tmpPathBuffer; +#else + char tmpTemplate[] = "/tmp/depthai_XXXXXX"; + char* tmpName = mkdtemp(tmpTemplate); + if(tmpName == nullptr) { + tmpPath = "/tmp"; + } else { + tmpPath = tmpName; + tmpPath += '/'; + } +#endif + return tmpPath; +} + } // namespace platform } // namespace dai diff --git a/src/utility/Platform.hpp b/src/utility/Platform.hpp index ef299164b..3a436bd39 100644 --- a/src/utility/Platform.hpp +++ b/src/utility/Platform.hpp @@ -1,13 +1,14 @@ #pragma once -#include #include +#include namespace dai { namespace platform { uint32_t getIPv4AddressAsBinary(std::string address); std::string getIPv4AddressAsString(std::uint32_t binary); +std::string getTempPath(); -} -} +} // namespace platform +} // namespace dai diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 381072b80..4459d6bc6 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -225,6 +225,10 @@ dai_add_test(device_usbspeed_test src/device_usbspeed_test.cpp CONFORMING) dai_add_test(device_usbspeed_test_17 src/device_usbspeed_test.cpp CONFORMING CXX_STANDARD 17) dai_add_test(device_usbspeed_test_20 src/device_usbspeed_test.cpp CONFORMING CXX_STANDARD 20) +dai_add_test(encoded_frame_test src/encoded_frame_test.cpp CXX_STANDARD 17) + +dai_add_test(message_group_frame_test src/message_group_test.cpp CXX_STANDARD 17) + # Unlimited io connections test dai_add_test(unlimited_io_connection_test src/unlimited_io_connection_test.cpp) diff --git a/tests/src/encoded_frame_test.cpp b/tests/src/encoded_frame_test.cpp new file mode 100644 index 000000000..8c68bd79f --- /dev/null +++ b/tests/src/encoded_frame_test.cpp @@ -0,0 +1,122 @@ +#include +#include +#include + +#include "depthai-shared/datatype/RawEncodedFrame.hpp" +#include "depthai-shared/properties/VideoEncoderProperties.hpp" +#include "depthai/device/Device.hpp" +#include "depthai/pipeline/Pipeline.hpp" +#include "depthai/pipeline/datatype/EncodedFrame.hpp" +#include "depthai/pipeline/datatype/ImgFrame.hpp" +#include "depthai/pipeline/node/ColorCamera.hpp" +#include "depthai/pipeline/node/VideoEncoder.hpp" +#include "depthai/pipeline/node/XLinkOut.hpp" + +dai::Pipeline getPipeline(dai::VideoEncoderProperties::Profile profile, unsigned int quality, bool lossless, unsigned int bitrate) { + dai::Pipeline pipeline; + auto camNode = pipeline.create(); + auto encNode = pipeline.create(); + auto xlinkOut = pipeline.create(); + camNode->video.link(encNode->input); + encNode->out.link(xlinkOut->input); + + camNode->setVideoSize(1280, 720); + encNode->setProfile(profile); + encNode->setBitrate(bitrate); + encNode->setQuality(quality); + encNode->setLossless(lossless); + encNode->setKeyframeFrequency(30); + xlinkOut->setStreamName("out"); + + return pipeline; +} + +TEST_CASE("OLD_OUTPUT") { + dai::Pipeline pipeline; + auto camNode = pipeline.create(); + auto encNode = pipeline.create(); + auto xlinkOut = pipeline.create(); + camNode->video.link(encNode->input); + encNode->bitstream.link(xlinkOut->input); + + camNode->setVideoSize(1280, 720); + encNode->setProfile(dai::VideoEncoderProperties::Profile::H264_MAIN); + xlinkOut->setStreamName("out"); + + dai::Device device(pipeline); + + auto outQ = device.getOutputQueue("out"); + for(int i = 0; i < 100; ++i) { + REQUIRE_NOTHROW(outQ->get()); + } +} + +TEST_CASE("JPEG_ENCODING_LOSSLESS") { + dai::Device device(getPipeline(dai::VideoEncoderProperties::Profile::MJPEG, 30, true, 0)); + + auto outQ = device.getOutputQueue("out"); + for(int i = 0; i < 100; ++i) { + auto encfrm = outQ->get(); + REQUIRE(encfrm->getProfile() == dai::EncodedFrame::Profile::JPEG); + REQUIRE(encfrm->getLossless() == true); + REQUIRE(encfrm->getQuality() == 30); + } +} + +TEST_CASE("JPEG_ENCODING_LOSSY") { + dai::Device device(getPipeline(dai::VideoEncoderProperties::Profile::MJPEG, 30, false, 0)); + + auto outQ = device.getOutputQueue("out"); + for(int i = 0; i < 100; ++i) { + auto encfrm = outQ->get(); + REQUIRE(encfrm->getProfile() == dai::EncodedFrame::Profile::JPEG); + REQUIRE(encfrm->getLossless() == false); + REQUIRE(encfrm->getQuality() == 30); + } +} + +TEST_CASE("AVC_ENCODING") { + dai::Device device(getPipeline(dai::VideoEncoderProperties::Profile::H264_HIGH, 30, false, 8500000)); + + auto outQ = device.getOutputQueue("out"); + for(int i = 0; i < 100; ++i) { + auto encfrm = outQ->get(); + REQUIRE(encfrm->getProfile() == dai::EncodedFrame::Profile::AVC); + REQUIRE(encfrm->getLossless() == false); + if(i % 30 == 0) REQUIRE(encfrm->getFrameType() == dai::EncodedFrame::FrameType::I); + REQUIRE(encfrm->getQuality() == 30); + REQUIRE(encfrm->getBitrate() == 8500000); + } +} + +TEST_CASE("HEVC_ENCODING") { + dai::Device device(getPipeline(dai::VideoEncoderProperties::Profile::H265_MAIN, 30, false, 8500000)); + + auto outQ = device.getOutputQueue("out"); + for(int i = 0; i < 100; ++i) { + auto encfrm = outQ->get(); + REQUIRE(encfrm->getProfile() == dai::EncodedFrame::Profile::HEVC); + REQUIRE(encfrm->getLossless() == false); + if(i % 30 == 0) REQUIRE(encfrm->getFrameType() == dai::EncodedFrame::FrameType::I); + REQUIRE(encfrm->getQuality() == 30); + REQUIRE(encfrm->getBitrate() == 8500000); + } +} + +TEST_CASE("LINK_TO_BOTH") { + dai::Pipeline pipeline; + auto camNode = pipeline.create(); + auto encNode = pipeline.create(); + auto xlinkOut1 = pipeline.create(); + auto xlinkOut2 = pipeline.create(); + camNode->video.link(encNode->input); + encNode->bitstream.link(xlinkOut1->input); + encNode->out.link(xlinkOut2->input); + + camNode->setVideoSize(1280, 720); + encNode->setProfile(dai::VideoEncoderProperties::Profile::H264_MAIN); + xlinkOut1->setStreamName("out1"); + xlinkOut2->setStreamName("out2"); + + REQUIRE_THROWS(dai::Device(pipeline)); +} diff --git a/tests/src/message_group_test.cpp b/tests/src/message_group_test.cpp new file mode 100644 index 000000000..03e3ee00b --- /dev/null +++ b/tests/src/message_group_test.cpp @@ -0,0 +1,131 @@ +#include +#include +#include +#include +#include +#include + +TEST_CASE("Set and get messages") { + auto buf1Ts = std::chrono::steady_clock::now() + std::chrono::milliseconds(100); + auto buf2Ts = std::chrono::steady_clock::now() + std::chrono::milliseconds(200); + std::vector buf1Data = {1, 2, 3}; + std::vector buf2Data = {4, 5, 6}; + dai::MessageGroup msgGrp; + dai::Buffer buf; + buf.setTimestamp(buf1Ts); + buf.setData(buf1Data); + msgGrp.add("buf1", buf); + + dai::ImgFrame img; + img.setTimestamp(buf2Ts); + img.setData(buf2Data); + img.setSize({5, 6}); + msgGrp.add("img1", img); + + REQUIRE(msgGrp.get("buf1")->getTimestamp() == buf1Ts); + REQUIRE(msgGrp.get("img1")->getTimestamp() == buf2Ts); + REQUIRE(msgGrp.get("buf1")->getData() == buf1Data); + REQUIRE(msgGrp.get("img1")->getData() == buf2Data); + REQUIRE(msgGrp.get("img1")->getWidth() == 5); + REQUIRE(msgGrp.get("img1")->getHeight() == 6); +} + +// TODO(asahtik): Bring back when the [issue](https://github.com/luxonis/depthai-core/issues/929) is fixed +// TEST_CASE("Sync - demux") { +// auto buf1Ts = std::chrono::steady_clock::now() + std::chrono::milliseconds(100); +// auto buf2Ts = std::chrono::steady_clock::now() + std::chrono::milliseconds(150); +// +// dai::Pipeline pipeline; +// auto xout1 = pipeline.create(); +// xout1->setStreamName("out1"); +// auto xout2 = pipeline.create(); +// xout2->setStreamName("out2"); +// +// auto demux = pipeline.create(); +// +// auto sync = pipeline.create(); +// sync->setSyncThreshold(std::chrono::milliseconds(100)); +// +// auto xin1 = pipeline.create(); +// xin1->setStreamName("in1"); +// auto xin2 = pipeline.create(); +// xin2->setStreamName("in2"); +// +// xin1->out.link(sync->inputs["buf1"]); +// xin2->out.link(sync->inputs["buf2"]); +// sync->out.link(demux->input); +// demux->outputs["buf1"].link(xout1->input); +// demux->outputs["buf2"].link(xout2->input); +// +// dai::Device device(pipeline); +// +// auto inQ1 = device.getInputQueue("in1"); +// auto inQ2 = device.getInputQueue("in2"); +// auto outQ1 = device.getOutputQueue("out1"); +// auto outQ2 = device.getOutputQueue("out2"); +// +// dai::Buffer buf1; +// buf1.setData({1, 2, 3, 4, 5}); +// buf1.setTimestamp(buf1Ts); +// +// dai::ImgFrame img1; +// img1.setData({6, 7, 8, 9, 10}); +// img1.setTimestamp(buf2Ts); +// img1.setSize({5, 6}); +// +// inQ1->send(buf1); +// inQ2->send(img1); +// +// auto out1 = outQ1->get(); +// auto out2 = outQ2->get(); +// +// REQUIRE(out1->getTimestamp() == buf1Ts); +// REQUIRE(out2->getTimestamp() == buf2Ts); +// REQUIRE(out1->getData() == std::vector{1, 2, 3, 4, 5}); +// REQUIRE(out2->getData() == std::vector{6, 7, 8, 9, 10}); +// REQUIRE(out2->getWidth() == 5); +// REQUIRE(out2->getHeight() == 6); +// } + +TEST_CASE("MessageGroup ping-pong") { + auto buf1Ts = std::chrono::steady_clock::now() + std::chrono::milliseconds(100); + auto buf2Ts = std::chrono::steady_clock::now() + std::chrono::milliseconds(150); + + dai::Pipeline pipeline; + auto xout = pipeline.create(); + xout->setStreamName("out"); + + auto xin = pipeline.create(); + xin->setStreamName("in"); + + xin->out.link(xout->input); + + dai::Device device(pipeline); + + auto inQ = device.getInputQueue("in"); + auto outQ = device.getOutputQueue("out"); + + dai::Buffer buf1; + buf1.setData({1, 2, 3, 4, 5}); + buf1.setTimestamp(buf1Ts); + + dai::ImgFrame img1; + img1.setData({6, 7, 8, 9, 10}); + img1.setTimestamp(buf2Ts); + img1.setSize({5, 6}); + + dai::MessageGroup msgGrp; + msgGrp.add("buf1", buf1); + msgGrp.add("img1", img1); + + inQ->send(msgGrp); + + auto out = outQ->get(); + + // REQUIRE(out->get("buf1")->getTimestamp() == buf1Ts); + // REQUIRE(out->get("img1")->getTimestamp() == buf2Ts); + REQUIRE(out->get("buf1")->getData() == std::vector{1, 2, 3, 4, 5}); + REQUIRE(out->get("img1")->getData() == std::vector{6, 7, 8, 9, 10}); + REQUIRE(out->get("img1")->getWidth() == 5); + REQUIRE(out->get("img1")->getHeight() == 6); +}