66 */
77
88#include < atomic>
9+ #include < algorithm>
10+ #include < cstdlib>
911#include < csignal>
12+ #include < cctype>
1013#include < cstring>
1114#include < iostream>
15+ #include < string>
16+ #include < vector>
1217
1318#include " depthai/depthai.hpp"
1419#include " depthai/pipeline/MessageQueue.hpp"
20+ #include " depthai/pipeline/datatype/Buffer.hpp"
1521#include " depthai/pipeline/datatype/ImgFrame.hpp"
1622#include " uvc_example.hpp"
1723
@@ -35,6 +41,28 @@ std::atomic<bool> quitEvent(false);
3541std::shared_ptr<dai::InputQueue> inputQueue{nullptr };
3642std::shared_ptr<dai::MessageQueue> outputQueue;
3743
44+ enum class StreamFormat {
45+ MJPEG,
46+ UNCOMPRESSED,
47+ };
48+
49+ static StreamFormat gStreamFormat = StreamFormat::UNCOMPRESSED;
50+ static std::vector<uint8_t > gNv12Buffer ;
51+
52+ static StreamFormat parseStreamFormat () {
53+ const char * format = std::getenv (" UVC_FORMAT" );
54+ if (format == nullptr ) return StreamFormat::UNCOMPRESSED;
55+
56+ std::string formatStr (format);
57+ std::transform (formatStr.begin (), formatStr.end (), formatStr.begin (), [](unsigned char c) { return std::tolower (c); });
58+
59+ if (formatStr == " mjpeg" ) return StreamFormat::MJPEG;
60+ if (formatStr == " uncompressed" || formatStr == " nv12" ) return StreamFormat::UNCOMPRESSED;
61+
62+ std::cerr << " Unknown UVC_FORMAT=\" " << formatStr << " \" , defaulting to uncompressed NV12." << std::endl;
63+ return StreamFormat::UNCOMPRESSED;
64+ }
65+
3866/* Necessary for and only used by signal handler. */
3967static struct events *sigint_events;
4068
@@ -48,22 +76,64 @@ void signalHandler(int signum) {
4876
4977extern " C" void depthai_uvc_get_buffer (struct video_source *s, struct video_buffer *buf) {
5078 unsigned int frame_size, size;
51- uint8_t *f;
79+ const uint8_t *f;
5280
5381 if (quitEvent) {
5482 std::cout << " depthai_uvc_get_buffer(): Stopping capture due to quit event." << std::endl;
5583 return ;
5684 }
5785
58- auto frame = outputQueue->get <dai::ImgFrame>();
59- if (frame == nullptr ) {
60- std::cerr << " depthai_uvc_get_buffer(): No frame available." << std::endl;
61- return ;
86+ if (gStreamFormat == StreamFormat::MJPEG) {
87+ auto frame = outputQueue->get <dai::Buffer>();
88+ if (frame == nullptr || frame->getData ().empty ()) {
89+ std::cerr << " depthai_uvc_get_buffer(): No MJPEG frame available." << std::endl;
90+ return ;
91+ }
92+ f = frame->getData ().data ();
93+ frame_size = frame->getData ().size ();
94+ } else {
95+ auto frame = outputQueue->get <dai::ImgFrame>();
96+ if (frame == nullptr ) {
97+ std::cerr << " depthai_uvc_get_buffer(): No uncompressed frame available." << std::endl;
98+ return ;
99+ }
100+ if (frame->getType () != dai::ImgFrame::Type::NV12) {
101+ std::cerr << " depthai_uvc_get_buffer(): Unexpected frame type for uncompressed mode: " << static_cast <int >(frame->getType ()) << std::endl;
102+ return ;
103+ }
104+
105+ const auto width = frame->getWidth ();
106+ const auto height = frame->getHeight ();
107+ const auto stride = frame->getStride ();
108+ const auto uvPlaneOffset = frame->getPlaneStride (0 );
109+ const auto compactNv12FrameSize = (width * height * 3 ) / 2 ;
110+ const auto expectedSrcBytes = uvPlaneOffset + (stride * (height / 2 ));
111+ const auto & data = frame->getData ();
112+
113+ if (data.size () < expectedSrcBytes) {
114+ std::cerr << " depthai_uvc_get_buffer(): NV12 frame smaller than expected: have "
115+ << data.size () << " need " << expectedSrcBytes << std::endl;
116+ return ;
117+ }
118+
119+ gNv12Buffer .resize (compactNv12FrameSize);
120+ const auto * src = data.data ();
121+ auto * dst = gNv12Buffer .data ();
122+
123+ for (uint32_t y = 0 ; y < height; ++y) {
124+ memcpy (dst + (y * width), src + (y * stride), width);
125+ }
126+
127+ const auto * uvSrc = src + uvPlaneOffset;
128+ auto * uvDst = dst + (width * height);
129+ for (uint32_t y = 0 ; y < height / 2 ; ++y) {
130+ memcpy (uvDst + (y * width), uvSrc + (y * stride), width);
131+ }
132+
133+ f = gNv12Buffer .data ();
134+ frame_size = static_cast <unsigned int >(gNv12Buffer .size ());
62135 }
63136
64- f = frame->getData ().data ();
65- frame_size = frame->getData ().size ();
66-
67137 size = std::min (frame_size, buf->size );
68138 memcpy (buf->mem , f, size);
69139 buf->bytesused = size;
@@ -90,6 +160,8 @@ int main() {
90160 struct video_source * src;
91161 struct uvc_stream * stream;
92162
163+ gStreamFormat = parseStreamFormat ();
164+
93165 depthai_uvc_register_get_buffer (depthai_uvc_get_buffer);
94166
95167 fc = configfs_parse_uvc_function (" uvc.0" );
@@ -141,13 +213,20 @@ int main() {
141213 // Create nodes
142214 auto camRgb = pipeline.create <dai::node::Camera>()->build (socket);
143215 inputQueue = camRgb->inputControl .createInputQueue ();
144- auto output = camRgb->requestOutput (std::make_pair (1920 , 1080 ), dai::ImgFrame::Type::NV12);
145-
146- // Create video encoder node
147- auto encoded = pipeline.create <dai::node::VideoEncoder>();
148- encoded->setDefaultProfilePreset (30 , dai::VideoEncoderProperties::Profile::MJPEG);
149- output->link (encoded->input );
150- outputQueue = encoded->bitstream .createOutputQueue (1 , false );
216+ constexpr uint32_t width = 1920 ;
217+ constexpr uint32_t height = 1080 ;
218+ auto output = camRgb->requestOutput (std::make_pair (width, height), dai::ImgFrame::Type::NV12);
219+
220+ if (gStreamFormat == StreamFormat::MJPEG) {
221+ auto encoded = pipeline.create <dai::node::VideoEncoder>();
222+ encoded->setDefaultProfilePreset (30 , dai::VideoEncoderProperties::Profile::MJPEG);
223+ output->link (encoded->input );
224+ outputQueue = encoded->bitstream .createOutputQueue (1 , false );
225+ std::cout << " Configured UVC stream format: MJPEG" << std::endl;
226+ } else {
227+ outputQueue = output->createOutputQueue (1 , false );
228+ std::cout << " Configured UVC stream format: uncompressed NV12" << std::endl;
229+ }
151230
152231 // Start pipeline
153232 pipeline.start ();
0 commit comments