From 50abaa33a69894a1f5636eb95021b7714ce38fd6 Mon Sep 17 00:00:00 2001 From: Alain Volmat Date: Tue, 21 Oct 2025 21:37:35 +0200 Subject: [PATCH 01/11] video: addition of video_transfer_buffer helper function Addition of a helper function which takes care of dequeue from a source video device and queue into a sink video device. Signed-off-by: Alain Volmat --- doc/releases/release-notes-4.3.rst | 1 + drivers/video/video_common.c | 17 +++++++++++++++++ include/zephyr/drivers/video.h | 18 ++++++++++++++++++ 3 files changed, 36 insertions(+) diff --git a/doc/releases/release-notes-4.3.rst b/doc/releases/release-notes-4.3.rst index d8850a1be1972..2e462fe9886a9 100644 --- a/doc/releases/release-notes-4.3.rst +++ b/doc/releases/release-notes-4.3.rst @@ -324,6 +324,7 @@ New APIs and options * :c:member:`video_format.size` field * :c:func:`video_estimate_fmt_size` + * :c:func:`video_transfer_buffer` .. zephyr-keep-sorted-stop diff --git a/drivers/video/video_common.c b/drivers/video/video_common.c index b75fb3165a454..c8a0214b0dbb0 100644 --- a/drivers/video/video_common.c +++ b/drivers/video/video_common.c @@ -489,3 +489,20 @@ int video_set_compose_format(const struct device *dev, struct video_format *fmt) return video_set_format(dev, fmt); } + +int video_transfer_buffer(const struct device *src, const struct device *sink, + enum video_buf_type src_type, enum video_buf_type sink_type, + k_timeout_t timeout) +{ + struct video_buffer *buf = &(struct video_buffer){.type = src_type}; + int ret; + + ret = video_dequeue(src, &buf, timeout); + if (ret < 0) { + return ret; + } + + buf->type = sink_type; + + return video_enqueue(sink, buf); +} diff --git a/include/zephyr/drivers/video.h b/include/zephyr/drivers/video.h index 65e3fb3769c89..ded70a80ff891 100644 --- a/include/zephyr/drivers/video.h +++ b/include/zephyr/drivers/video.h @@ -1003,6 +1003,24 @@ int video_estimate_fmt_size(struct video_format *fmt); */ int video_set_compose_format(const struct device *dev, struct video_format *fmt); +/** + * @brief Transfer a buffer between 2 video device + * + * Helper function which dequeues a buffer from a source device and enqueues it into a + * sink device, changing its buffer type between the two. + * + * @param src Video device from where buffer is dequeued (source) + * @param sink Video device into which the buffer is queued (sink) + * @param src_type Video buffer type on the source device + * @param sink_type Video buffer type on the sink device + * @param timeout Timeout to be applied on dequeue + * + * @return 0 on success, otherwise a negative errno code + */ +int video_transfer_buffer(const struct device *src, const struct device *sink, + enum video_buf_type src_type, enum video_buf_type sink_type, + k_timeout_t timeout); + /** * @defgroup video_pixel_formats Video pixel formats * The '|' characters separate the pixels or logical blocks, and spaces separate the bytes. From 4b2a48f4ef5a2017f9f5a5730c3dbd0cec18902d Mon Sep 17 00:00:00 2001 From: Alain Volmat Date: Tue, 21 Oct 2025 21:42:57 +0200 Subject: [PATCH 02/11] samples: usb: uvc: use video_transfer_buffer helper function Replace video_dequeue / video_enqueue buffer exchange code by video_transfer_buffer helper function. Signed-off-by: Alain Volmat --- samples/subsys/usb/uvc/src/main.c | 40 +++++++++++-------------------- 1 file changed, 14 insertions(+), 26 deletions(-) diff --git a/samples/subsys/usb/uvc/src/main.c b/samples/subsys/usb/uvc/src/main.c index 18b3e144779d3..42de271d4cf99 100644 --- a/samples/subsys/usb/uvc/src/main.c +++ b/samples/subsys/usb/uvc/src/main.c @@ -292,34 +292,22 @@ int main(void) return ret; } - vbuf = &(struct video_buffer){.type = VIDEO_BUF_TYPE_OUTPUT}; - - if (video_dequeue(video_dev, &vbuf, K_NO_WAIT) == 0) { - LOG_DBG("Dequeued %p from %s, enqueueing to %s", - (void *)vbuf, video_dev->name, uvc_dev->name); - - vbuf->type = VIDEO_BUF_TYPE_INPUT; - - ret = video_enqueue(uvc_dev, vbuf); - if (ret != 0) { - LOG_ERR("Could not enqueue video buffer to %s", uvc_dev->name); - return ret; - } + ret = video_transfer_buffer(video_dev, uvc_dev, + VIDEO_BUF_TYPE_OUTPUT, VIDEO_BUF_TYPE_INPUT, + K_NO_WAIT); + if (ret != 0 && ret != -EAGAIN) { + LOG_ERR("Failed to transfer from %s to %s", + video_dev->name, uvc_dev->name); + return ret; } - vbuf = &(struct video_buffer){.type = VIDEO_BUF_TYPE_INPUT}; - - if (video_dequeue(uvc_dev, &vbuf, K_NO_WAIT) == 0) { - LOG_DBG("Dequeued %p from %s, enqueueing to %s", - (void *)vbuf, uvc_dev->name, video_dev->name); - - vbuf->type = VIDEO_BUF_TYPE_OUTPUT; - - ret = video_enqueue(video_dev, vbuf); - if (ret != 0) { - LOG_ERR("Could not enqueue video buffer to %s", video_dev->name); - return ret; - } + ret = video_transfer_buffer(uvc_dev, video_dev, + VIDEO_BUF_TYPE_INPUT, VIDEO_BUF_TYPE_OUTPUT, + K_NO_WAIT); + if (ret != 0 && ret != -EAGAIN) { + LOG_ERR("Failed to transfer from %s to %s", + uvc_dev->name, video_dev->name); + return ret; } k_poll_signal_reset(&sig); From c9c928f4c7d4164e3a1932fe59ff4bd8da7c00ea Mon Sep 17 00:00:00 2001 From: Alain Volmat Date: Wed, 22 Oct 2025 00:02:39 +0200 Subject: [PATCH 03/11] samples: usb: uvc: add indirection for UVC source device In preparation for the introduction of video encoder support add an indirection for handling of the buffers of the UVC source device. Currently this is only video_dev however it can also be an encoder device when encoder is introduced between video capture device and the UVC device. Signed-off-by: Alain Volmat --- samples/subsys/usb/uvc/src/main.c | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/samples/subsys/usb/uvc/src/main.c b/samples/subsys/usb/uvc/src/main.c index 42de271d4cf99..2644eea8f4aa2 100644 --- a/samples/subsys/usb/uvc/src/main.c +++ b/samples/subsys/usb/uvc/src/main.c @@ -23,6 +23,11 @@ const static struct device *const video_dev = DEVICE_DT_GET(DT_CHOSEN(zephyr_cam /* Format capabilities of video_dev, used everywhere through the sample */ static struct video_caps video_caps = {.type = VIDEO_BUF_TYPE_OUTPUT}; +static const struct device *app_uvc_source_dev(void) +{ + return video_dev; +} + /* Pixel formats present in one of the UVC 1.5 standard */ static bool app_is_supported_format(uint32_t pixfmt) { @@ -46,6 +51,7 @@ static bool app_has_supported_format(void) static int app_add_format(uint32_t pixfmt, uint32_t width, uint32_t height, bool has_sup_fmts) { + const struct device *uvc_src_dev = app_uvc_source_dev(); struct video_format fmt = { .pixelformat = pixfmt, .width = width, @@ -60,7 +66,7 @@ static int app_add_format(uint32_t pixfmt, uint32_t width, uint32_t height, bool } /* Set the format to get the size */ - ret = video_set_compose_format(video_dev, &fmt); + ret = video_set_compose_format(uvc_src_dev, &fmt); if (ret != 0) { LOG_ERR("Could not set the format of %s to %s %ux%u (size %u)", video_dev->name, VIDEO_FOURCC_TO_STR(fmt.pixelformat), @@ -161,6 +167,7 @@ static int app_add_filtered_formats(void) int main(void) { + const struct device *uvc_src_dev = app_uvc_source_dev(); struct usbd_context *sample_usbd; struct video_buffer *vbuf; struct video_format fmt = {0}; @@ -182,7 +189,7 @@ int main(void) } /* Must be called before usb_enable() */ - uvc_set_video_dev(uvc_dev, video_dev); + uvc_set_video_dev(uvc_dev, uvc_src_dev); /* Must be called before usb_enable() */ ret = app_add_filtered_formats(); @@ -292,21 +299,21 @@ int main(void) return ret; } - ret = video_transfer_buffer(video_dev, uvc_dev, + ret = video_transfer_buffer(uvc_src_dev, uvc_dev, VIDEO_BUF_TYPE_OUTPUT, VIDEO_BUF_TYPE_INPUT, K_NO_WAIT); if (ret != 0 && ret != -EAGAIN) { LOG_ERR("Failed to transfer from %s to %s", - video_dev->name, uvc_dev->name); + uvc_src_dev->name, uvc_dev->name); return ret; } - ret = video_transfer_buffer(uvc_dev, video_dev, + ret = video_transfer_buffer(uvc_dev, uvc_src_dev, VIDEO_BUF_TYPE_INPUT, VIDEO_BUF_TYPE_OUTPUT, K_NO_WAIT); if (ret != 0 && ret != -EAGAIN) { LOG_ERR("Failed to transfer from %s to %s", - uvc_dev->name, video_dev->name); + uvc_dev->name, uvc_src_dev->name); return ret; } From 8f1247e00d7e2c073252d58e36c60396a2668321 Mon Sep 17 00:00:00 2001 From: Alain Volmat Date: Sat, 11 Oct 2025 22:12:27 +0200 Subject: [PATCH 04/11] samples: usb: uvc: add video encoder support Allow creating a pipeline as follow camera receiver -> encoder -> uvc If the chosen zephyr,videoenc is available, the sample will pipe the camera receiver to the encoder and then the UVC device instead of directly the camera receiver to the UVC. Current implementation has several points hardcoded for the time being: 1. intermediate pixel format between the camera receiver and encoder is set to NV12. This shouldn't be hardcoded and should instead be discovered as a commonly capable format from the encoder / video dev 2. it is considered that encoder device do NOT perform any resolution change and that encoder output resolution is directly based on the camera receiver resolution. Thanks to this, UVC exposed formats are thus the encoder output pixel format & camera receiver resolutions. Signed-off-by: Alain Volmat --- samples/subsys/usb/uvc/src/main.c | 230 ++++++++++++++++++++++++++++-- 1 file changed, 220 insertions(+), 10 deletions(-) diff --git a/samples/subsys/usb/uvc/src/main.c b/samples/subsys/usb/uvc/src/main.c index 2644eea8f4aa2..144b2bc8123c8 100644 --- a/samples/subsys/usb/uvc/src/main.c +++ b/samples/subsys/usb/uvc/src/main.c @@ -19,13 +19,37 @@ LOG_MODULE_REGISTER(uvc_sample, LOG_LEVEL_INF); const static struct device *const uvc_dev = DEVICE_DT_GET(DT_NODELABEL(uvc)); const static struct device *const video_dev = DEVICE_DT_GET(DT_CHOSEN(zephyr_camera)); +static const struct device *const videoenc_dev = DEVICE_DT_GET_OR_NULL(DT_CHOSEN(zephyr_videoenc)); /* Format capabilities of video_dev, used everywhere through the sample */ static struct video_caps video_caps = {.type = VIDEO_BUF_TYPE_OUTPUT}; +static struct video_caps videoenc_out_caps = {.type = VIDEO_BUF_TYPE_OUTPUT}; + +#if DT_HAS_CHOSEN(zephyr_videoenc) && CONFIG_VIDEO_BUFFER_POOL_NUM_MAX < 2 +#error CONFIG_VIDEO_BUFFER_POOL_NUM_MAX must be >=2 in order to use a zephyr,videoenc +#endif + +static bool app_has_videoenc(void) +{ + return (videoenc_dev != NULL); +} static const struct device *app_uvc_source_dev(void) { - return video_dev; + if (app_has_videoenc()) { + return videoenc_dev; + } else { + return video_dev; + } +} + +static struct video_caps *app_uvc_source_caps(void) +{ + if (app_has_videoenc()) { + return &videoenc_out_caps; + } else { + return &video_caps; + } } /* Pixel formats present in one of the UVC 1.5 standard */ @@ -38,7 +62,8 @@ static bool app_is_supported_format(uint32_t pixfmt) static bool app_has_supported_format(void) { - const struct video_format_cap *fmts = video_caps.format_caps; + const struct video_caps *const caps = app_uvc_source_caps(); + const struct video_format_cap *const fmts = caps->format_caps; for (int i = 0; fmts[i].pixelformat != 0; i++) { if (app_is_supported_format(fmts[i].pixelformat)) { @@ -107,21 +132,42 @@ static struct video_resolution video_common_fmts[] = { /* Submit to UVC only the formats expected to be working (enough memory for the size, etc.) */ static int app_add_filtered_formats(void) { + struct video_caps *uvc_src_caps = app_uvc_source_caps(); const bool has_sup_fmts = app_has_supported_format(); int ret; for (int i = 0; video_caps.format_caps[i].pixelformat != 0; i++) { + /* + * FIXME - in the meantime that auto-negotiation is supported, + * use the resolution list of the camera for NV12 pixelformat + */ const struct video_format_cap *vcap = &video_caps.format_caps[i]; + uint32_t pixelformat; int count = 1; - ret = app_add_format(vcap->pixelformat, vcap->width_min, vcap->height_min, + if (app_has_videoenc() && vcap->pixelformat != VIDEO_PIX_FMT_NV12) { + continue; + } + + if (app_has_videoenc()) { + /* + * FIXME - in the meantime that auto-negotiation is supported, + * when a video encoder is present, always use the first pixelformat. + */ + pixelformat = uvc_src_caps->format_caps[0].pixelformat; + __ASSERT_NO_MSG(pixelformat != 0); + } else { + pixelformat = vcap->pixelformat; + } + + ret = app_add_format(pixelformat, vcap->width_min, vcap->height_min, has_sup_fmts); if (ret != 0) { return ret; } if (vcap->width_min != vcap->width_max || vcap->height_min != vcap->height_max) { - ret = app_add_format(vcap->pixelformat, vcap->width_max, vcap->height_max, + ret = app_add_format(pixelformat, vcap->width_max, vcap->height_max, has_sup_fmts); if (ret != 0) { return ret; @@ -152,7 +198,7 @@ static int app_add_filtered_formats(void) continue; } - ret = app_add_format(vcap->pixelformat, video_common_fmts[j].width, + ret = app_add_format(pixelformat, video_common_fmts[j].width, video_common_fmts[j].height, has_sup_fmts); if (ret != 0) { return ret; @@ -165,12 +211,112 @@ static int app_add_filtered_formats(void) return 0; } +static int app_init_videoenc(const struct device *const dev) +{ + int ret; + + if (!device_is_ready(dev)) { + LOG_ERR("video encoder %s failed to initialize", dev->name); + return -ENODEV; + } + + ret = video_get_caps(dev, &videoenc_out_caps); + if (ret != 0) { + LOG_ERR("Unable to retrieve video encoder output capabilities"); + return ret; + } + + /* + * FIXME - we should look carefully at both video capture output and encoder input + * caps to detect intermediate format. + * This is where we should define the format which is going to be used + * between the camera and the encoder input + */ + + return 0; +} + +static int app_configure_videoenc(const struct device *const dev, + uint32_t width, uint32_t height, + uint32_t sink_pixelformat, uint32_t source_pixelformat, + uint32_t nb_buffer) +{ + struct video_format fmt = { + .width = width, + .height = height, + }; + struct video_buffer *buf; + int ret; + + /* + * Need to configure both input & output of the encoder + * and allocate / enqueue buffers to the output of the + * encoder + */ + fmt.type = VIDEO_BUF_TYPE_INPUT; + fmt.pixelformat = sink_pixelformat; + ret = video_set_compose_format(dev, &fmt); + if (ret != 0) { + LOG_ERR("Could not set the %s encoder input format", dev->name); + return ret; + } + + fmt.type = VIDEO_BUF_TYPE_OUTPUT; + fmt.pixelformat = source_pixelformat; + ret = video_set_compose_format(dev, &fmt); + if (ret != 0) { + LOG_ERR("Could not set the %s encoder output format", dev->name); + return ret; + } + + LOG_INF("Preparing %u buffers of %u bytes for encoder output", nb_buffer, fmt.size); + + for (int i = 0; i < nb_buffer; i++) { + buf = video_buffer_aligned_alloc(fmt.size, CONFIG_VIDEO_BUFFER_POOL_ALIGN, + K_NO_WAIT); + if (buf == NULL) { + LOG_ERR("Could not allocate the encoder output buffer"); + return -ENOMEM; + } + + buf->type = VIDEO_BUF_TYPE_OUTPUT; + + ret = video_enqueue(dev, buf); + if (ret != 0) { + LOG_ERR("Could not enqueue video buffer"); + return ret; + } + } + + return 0; +} + +static int app_start_videoenc(const struct device *const dev) +{ + int ret; + + ret = video_stream_start(dev, VIDEO_BUF_TYPE_OUTPUT); + if (ret != 0) { + LOG_ERR("Failed to start %s output", dev->name); + return ret; + } + + ret = video_stream_start(dev, VIDEO_BUF_TYPE_INPUT); + if (ret != 0) { + LOG_ERR("Failed to start %s input", dev->name); + return ret; + } + + return 0; +} + int main(void) { const struct device *uvc_src_dev = app_uvc_source_dev(); struct usbd_context *sample_usbd; struct video_buffer *vbuf; struct video_format fmt = {0}; + uint32_t uvc_buf_count = CONFIG_VIDEO_BUFFER_POOL_NUM_MAX; struct video_frmival frmival = {0}; struct k_poll_signal sig; struct k_poll_event evt[1]; @@ -188,6 +334,16 @@ int main(void) return 0; } + if (app_has_videoenc()) { + ret = app_init_videoenc(videoenc_dev); + if (ret != 0) { + return ret; + } + + /* When using encoder, we split the VIDEO_BUFFER_POOL_NUM_MAX in 2 */ + uvc_buf_count /= 2; + } + /* Must be called before usb_enable() */ uvc_set_video_dev(uvc_dev, uvc_src_dev); @@ -234,7 +390,27 @@ int main(void) VIDEO_FOURCC_TO_STR(fmt.pixelformat), fmt.width, fmt.height, frmival.numerator, frmival.denominator); + if (app_has_videoenc()) { + /* + * FIXME - this is currently hardcoded in NV12 while it should be + * a format that has been validated for both video dev and encoder + */ + ret = app_configure_videoenc(videoenc_dev, fmt.width, fmt.height, + VIDEO_PIX_FMT_NV12, fmt.pixelformat, + CONFIG_VIDEO_BUFFER_POOL_NUM_MAX - uvc_buf_count); + if (ret != 0) { + return ret; + } + } + fmt.type = VIDEO_BUF_TYPE_OUTPUT; + if (app_has_videoenc()) { + /* + * FIXME - this is currently hardcoded in NV12 while it should be + * a format that has been validated for both video dev and encoder + */ + fmt.pixelformat = VIDEO_PIX_FMT_NV12; + } ret = video_set_compose_format(video_dev, &fmt); if (ret != 0) { @@ -243,14 +419,19 @@ int main(void) fmt.width, fmt.height, fmt.size); } + /* + * FIXME - shortcut here since current available encoders do not + * have frmival support for the time being so this is done directly + * at camera level + */ ret = video_set_frmival(video_dev, &frmival); if (ret != 0) { LOG_WRN("Could not set the framerate of %s", video_dev->name); } - LOG_INF("Preparing %u buffers of %u bytes", CONFIG_VIDEO_BUFFER_POOL_NUM_MAX, fmt.size); + LOG_INF("Preparing %u buffers of %u bytes", uvc_buf_count, fmt.size); - for (int i = 0; i < CONFIG_VIDEO_BUFFER_POOL_NUM_MAX; i++) { + for (int i = 0; i < uvc_buf_count; i++) { vbuf = video_buffer_aligned_alloc(fmt.size, CONFIG_VIDEO_BUFFER_POOL_ALIGN, K_NO_WAIT); if (vbuf == NULL) { @@ -267,14 +448,14 @@ int main(void) } } - LOG_DBG("Preparing signaling for %s input/output", video_dev->name); + LOG_DBG("Preparing signaling for %s input/output", uvc_src_dev->name); k_poll_signal_init(&sig); k_poll_event_init(&evt[0], K_POLL_TYPE_SIGNAL, K_POLL_MODE_NOTIFY_ONLY, &sig); - ret = video_set_signal(video_dev, &sig); + ret = video_set_signal(uvc_src_dev, &sig); if (ret != 0) { - LOG_WRN("Failed to setup the signal on %s output endpoint", video_dev->name); + LOG_WRN("Failed to setup the signal on %s output endpoint", uvc_src_dev->name); timeout = K_MSEC(1); } @@ -286,6 +467,13 @@ int main(void) LOG_INF("Starting the video transfer"); + if (app_has_videoenc()) { + ret = app_start_videoenc(videoenc_dev); + if (ret != 0) { + return ret; + } + } + ret = video_stream_start(video_dev, VIDEO_BUF_TYPE_OUTPUT); if (ret != 0) { LOG_ERR("Failed to start %s", video_dev->name); @@ -299,6 +487,17 @@ int main(void) return ret; } + if (app_has_videoenc()) { + ret = video_transfer_buffer(video_dev, uvc_src_dev, + VIDEO_BUF_TYPE_OUTPUT, VIDEO_BUF_TYPE_INPUT, + K_NO_WAIT); + if (ret != 0 && ret != -EAGAIN) { + LOG_ERR("Failed to transfer from %s to %s", + video_dev->name, uvc_src_dev->name); + return ret; + } + } + ret = video_transfer_buffer(uvc_src_dev, uvc_dev, VIDEO_BUF_TYPE_OUTPUT, VIDEO_BUF_TYPE_INPUT, K_NO_WAIT); @@ -308,6 +507,17 @@ int main(void) return ret; } + if (app_has_videoenc()) { + ret = video_transfer_buffer(uvc_src_dev, video_dev, + VIDEO_BUF_TYPE_INPUT, VIDEO_BUF_TYPE_OUTPUT, + K_NO_WAIT); + if (ret != 0 && ret != -EAGAIN) { + LOG_ERR("Failed to transfer from %s to %s", + uvc_src_dev->name, video_dev->name); + return ret; + } + } + ret = video_transfer_buffer(uvc_dev, uvc_src_dev, VIDEO_BUF_TYPE_INPUT, VIDEO_BUF_TYPE_OUTPUT, K_NO_WAIT); From c3ab4c9bad2e6ae30867bc4676e691ce662ed19c Mon Sep 17 00:00:00 2001 From: Alain Volmat Date: Wed, 22 Oct 2025 12:23:12 +0200 Subject: [PATCH 05/11] video: add H264 estimate in video_estimate_fmt_size Add rough estimate of a worth case H264 output size. The video_estimate_fmt_size would need more information such as quality, profile in order to give a better estimate for each formats so for the time being just stick to 16bpp based size, same as for JPEG. Signed-off-by: Alain Volmat --- drivers/video/video_common.c | 1 + 1 file changed, 1 insertion(+) diff --git a/drivers/video/video_common.c b/drivers/video/video_common.c index c8a0214b0dbb0..b5f6e04b4cab6 100644 --- a/drivers/video/video_common.c +++ b/drivers/video/video_common.c @@ -452,6 +452,7 @@ int video_estimate_fmt_size(struct video_format *fmt) switch (fmt->pixelformat) { case VIDEO_PIX_FMT_JPEG: + case VIDEO_PIX_FMT_H264: /* Rough estimate for the worst case (quality = 100) */ fmt->pitch = 0; fmt->size = fmt->width * fmt->height * 2; From 0f2ff6efe84d8ab362573f4c48a9cac11f7f863c Mon Sep 17 00:00:00 2001 From: Alain Volmat Date: Wed, 22 Oct 2025 11:58:36 +0200 Subject: [PATCH 06/11] usb: uvc: use struct uvc_frame_descriptor in most of function parameters This commit prepares introduction of the UVC Frame Based support by using the struct uvc_frame_descriptor as parameter of most of the UVC functions. struct uvc_frame_descriptor contains the common fields for all supported frame type and then depending on the DescriptorSubtype the pointer is casted in the correct struct definition. Signed-off-by: Alain Volmat --- subsys/usb/device_next/class/usbd_uvc.c | 97 ++++++++++++++++--------- subsys/usb/device_next/class/usbd_uvc.h | 5 +- 2 files changed, 64 insertions(+), 38 deletions(-) diff --git a/subsys/usb/device_next/class/usbd_uvc.c b/subsys/usb/device_next/class/usbd_uvc.c index 35d575a3fd13d..b5994d5fc064b 100644 --- a/subsys/usb/device_next/class/usbd_uvc.c +++ b/subsys/usb/device_next/class/usbd_uvc.c @@ -384,7 +384,7 @@ static const struct uvc_control_map uvc_control_map_xu[] = { /* Get the format and frame descriptors selected for the given VideoStreaming interface. */ static void uvc_get_vs_fmtfrm_desc(const struct device *dev, struct uvc_format_descriptor **const format_desc, - struct uvc_frame_discrete_descriptor **const frame_desc) + struct uvc_frame_descriptor **const frame_desc) { const struct uvc_config *cfg = dev->config; struct uvc_data *data = dev->data; @@ -407,7 +407,7 @@ static void uvc_get_vs_fmtfrm_desc(const struct device *dev, *frame_desc = NULL; for (i++; i < ARRAY_SIZE(cfg->desc->if1_fmts); i++) { - struct uvc_frame_discrete_descriptor *desc = &cfg->desc->if1_fmts[i].frm_disc; + struct uvc_frame_descriptor *desc = &cfg->desc->if1_fmts[i].frm; LOG_DBG("Walking through frame %u, subtype %u, index %u, ptr %p", i, desc->bDescriptorSubtype, desc->bFrameIndex, desc); @@ -537,8 +537,8 @@ static int uvc_get_vs_probe_frame_interval(const struct device *dev, struct uvc_ { struct uvc_data *data = dev->data; struct uvc_format_descriptor *format_desc; - struct uvc_frame_discrete_descriptor *frame_desc; - int max; + struct uvc_frame_descriptor *frame_desc; + int min, max, max_id; uvc_get_vs_fmtfrm_desc(dev, &format_desc, &frame_desc); if (format_desc == NULL || frame_desc == NULL) { @@ -546,13 +546,24 @@ static int uvc_get_vs_probe_frame_interval(const struct device *dev, struct uvc_ return -EINVAL; } + if (frame_desc->bDescriptorSubtype == UVC_VS_FRAME_UNCOMPRESSED || + frame_desc->bDescriptorSubtype == UVC_VS_FRAME_MJPEG) { + struct uvc_frame_discrete_descriptor *desc = (void *)frame_desc; + + min = desc->dwFrameInterval[0]; + max_id = desc->bFrameIntervalType - 1; + max = desc->dwFrameInterval[max_id]; + } else { + LOG_DBG("Invalid frame type"); + return -EINVAL; + } + switch (request) { case UVC_GET_MIN: - probe->dwFrameInterval = sys_cpu_to_le32(frame_desc->dwFrameInterval[0]); + probe->dwFrameInterval = sys_cpu_to_le32(min); break; case UVC_GET_MAX: - max = frame_desc->bFrameIntervalType - 1; - probe->dwFrameInterval = sys_cpu_to_le32(frame_desc->dwFrameInterval[max]); + probe->dwFrameInterval = sys_cpu_to_le32(max); break; case UVC_GET_RES: probe->dwFrameInterval = sys_cpu_to_le32(1); @@ -599,7 +610,7 @@ static int uvc_get_vs_format_from_desc(const struct device *dev, struct video_fo { struct uvc_data *data = dev->data; struct uvc_format_descriptor *format_desc = NULL; - struct uvc_frame_discrete_descriptor *frame_desc; + struct uvc_frame_descriptor *frame_desc; /* Update the format based on the probe message from the host */ uvc_get_vs_fmtfrm_desc(dev, &format_desc, &frame_desc); @@ -1449,7 +1460,7 @@ static int uvc_compare_frmival_desc(const void *const a, const void *const b) return ib - ia; } -static void uvc_set_vs_bitrate_range(struct uvc_frame_discrete_descriptor *const desc, +static void uvc_set_vs_bitrate_range(struct uvc_frame_descriptor *const desc, const uint64_t frmival_nsec, const struct video_format *const fmt) { @@ -1475,21 +1486,28 @@ static void uvc_set_vs_bitrate_range(struct uvc_frame_discrete_descriptor *const desc->dwMaxBitRate = sys_cpu_to_le32(bitrate_max); } -static int uvc_add_vs_frame_interval(struct uvc_frame_discrete_descriptor *const desc, +static int uvc_add_vs_frame_interval(struct uvc_frame_descriptor *const desc, const struct video_frmival *const frmival, const struct video_format *const fmt) { - int i = desc->bFrameIntervalType; + if (desc->bDescriptorSubtype == UVC_VS_FRAME_UNCOMPRESSED || + desc->bDescriptorSubtype == UVC_VS_FRAME_MJPEG) { + struct uvc_frame_discrete_descriptor *frame_desc = (void *)desc; - if (i >= CONFIG_USBD_VIDEO_MAX_FRMIVAL) { - LOG_WRN("Out of descriptors, raise CONFIG_USBD_VIDEO_MAX_FRMIVAL above %u", - CONFIG_USBD_VIDEO_MAX_FRMIVAL); - return -ENOMEM; - } + if (frame_desc->bFrameIntervalType >= CONFIG_USBD_VIDEO_MAX_FRMIVAL) { + LOG_WRN("Out of descriptors, raise CONFIG_USBD_VIDEO_MAX_FRMIVAL above %u", + CONFIG_USBD_VIDEO_MAX_FRMIVAL); + return -ENOMEM; + } - desc->dwFrameInterval[i] = sys_cpu_to_le32(video_frmival_nsec(frmival) / 100); - desc->bFrameIntervalType++; - desc->bLength += sizeof(uint32_t); + frame_desc->dwFrameInterval[frame_desc->bFrameIntervalType] = + sys_cpu_to_le32(video_frmival_nsec(frmival) / 100); + frame_desc->bFrameIntervalType++; + frame_desc->bLength += sizeof(uint32_t); + } else { + LOG_DBG("Invalid frame type"); + return -EINVAL; + } uvc_set_vs_bitrate_range(desc, video_frmival_nsec(frmival), fmt); @@ -1502,7 +1520,7 @@ static int uvc_add_vs_frame_desc(const struct device *dev, { const struct uvc_config *cfg = dev->config; struct uvc_data *data = dev->data; - struct uvc_frame_discrete_descriptor *desc; + struct uvc_frame_descriptor *desc; struct video_frmival_enum fie = {.format = fmt}; int ret; @@ -1512,17 +1530,17 @@ static int uvc_add_vs_frame_desc(const struct device *dev, LOG_INF("Adding frame descriptor #%u for %ux%u", format_desc->bNumFrameDescriptors + 1, fmt->width, fmt->height); - desc = &uvc_new_fmt_desc(dev)->frm_disc; + desc = &uvc_new_fmt_desc(dev)->frm; if (desc == NULL) { return -ENOMEM; } - desc->bLength = sizeof(*desc) - CONFIG_USBD_VIDEO_MAX_FRMIVAL * sizeof(uint32_t); + desc->bLength = sizeof(struct uvc_frame_discrete_descriptor) - + CONFIG_USBD_VIDEO_MAX_FRMIVAL * sizeof(uint32_t); desc->bDescriptorType = USB_DESC_CS_INTERFACE; desc->bFrameIndex = format_desc->bNumFrameDescriptors + 1; desc->wWidth = sys_cpu_to_le16(fmt->width); desc->wHeight = sys_cpu_to_le16(fmt->height); - desc->dwMaxVideoFrameBufferSize = sys_cpu_to_le32(fmt->size); desc->bDescriptorSubtype = (format_desc->bDescriptorSubtype == UVC_VS_FORMAT_UNCOMPRESSED) ? UVC_VS_FRAME_UNCOMPRESSED : UVC_VS_FRAME_MJPEG; desc->dwMinBitRate = sys_cpu_to_le32(UINT32_MAX); @@ -1560,21 +1578,32 @@ static int uvc_add_vs_frame_desc(const struct device *dev, fie.index++; } - /* If no frame intrval supported, default to 30 FPS */ - if (desc->bFrameIntervalType == 0) { - struct video_frmival frmival = {.numerator = 1, .denominator = 30}; + if (desc->bDescriptorSubtype == UVC_VS_FRAME_UNCOMPRESSED || + desc->bDescriptorSubtype == UVC_VS_FRAME_MJPEG) { + struct uvc_frame_discrete_descriptor *frame_desc = (void *)desc; - ret = uvc_add_vs_frame_interval(desc, &frmival, fmt); - if (ret != 0) { - return ret; + frame_desc->dwMaxVideoFrameBufferSize = sys_cpu_to_le32(fmt->size); + + /* If no frame intrval supported, default to 30 FPS */ + if (frame_desc->bFrameIntervalType == 0) { + struct video_frmival frmival = {.numerator = 1, .denominator = 30}; + + ret = uvc_add_vs_frame_interval(desc, &frmival, fmt); + if (ret != 0) { + return ret; + } } - } - /* UVC requires the frame intervals to be sorted, but not Zephyr */ - qsort(desc->dwFrameInterval, desc->bFrameIntervalType, - sizeof(*desc->dwFrameInterval), uvc_compare_frmival_desc); + /* UVC requires the frame intervals to be sorted, but not Zephyr */ + qsort(frame_desc->dwFrameInterval, frame_desc->bFrameIntervalType, + sizeof(*frame_discrete_desc->dwFrameInterval), uvc_compare_frmival_desc); + + frame_desc->dwDefaultFrameInterval = frame_desc->dwFrameInterval[0]; + } else { + LOG_DBG("Invalid frame type"); + return -EINVAL; + } - desc->dwDefaultFrameInterval = desc->dwFrameInterval[0]; format_desc->bNumFrameDescriptors++; cfg->desc->if1_hdr.wTotalLength += desc->bLength; diff --git a/subsys/usb/device_next/class/usbd_uvc.h b/subsys/usb/device_next/class/usbd_uvc.h index 3364f83258d11..ffc31490abdf6 100644 --- a/subsys/usb/device_next/class/usbd_uvc.h +++ b/subsys/usb/device_next/class/usbd_uvc.h @@ -385,10 +385,7 @@ struct uvc_frame_descriptor { uint16_t wHeight; uint32_t dwMinBitRate; uint32_t dwMaxBitRate; - uint32_t dwMaxVideoFrameBufferSize; - uint32_t dwDefaultFrameInterval; - uint8_t bFrameIntervalType; - /* Other fields depending on bFrameIntervalType value */ + /* Other fields depending on bDescriptorSubtype value */ } __packed; struct uvc_frame_continuous_descriptor { From 38c8fc30fd9f4647cedac9c8b9616ef25d42e004 Mon Sep 17 00:00:00 2001 From: Alain Volmat Date: Sat, 11 Oct 2025 22:51:07 +0200 Subject: [PATCH 07/11] usb: usbd_uvc: add frame_based support (currently only H264) The frame_based descriptors differ from the frame descriptors in that there is no dwMaxVideoFrameBufferSize field. In order to do that, add a new uvc_frame_based_discrete_descriptor structure to be used to fill in proper information into the frame descriptor. In addition to that, a new format descriptor is also added for frame based transfer. Signed-off-by: Alain Volmat --- samples/subsys/usb/uvc/src/main.c | 3 +- subsys/usb/device_next/class/usbd_uvc.c | 103 +++++++++++++++++++++--- subsys/usb/device_next/class/usbd_uvc.h | 48 +++++++++++ 3 files changed, 140 insertions(+), 14 deletions(-) diff --git a/samples/subsys/usb/uvc/src/main.c b/samples/subsys/usb/uvc/src/main.c index 144b2bc8123c8..c4ac9d278c4de 100644 --- a/samples/subsys/usb/uvc/src/main.c +++ b/samples/subsys/usb/uvc/src/main.c @@ -57,7 +57,8 @@ static bool app_is_supported_format(uint32_t pixfmt) { return pixfmt == VIDEO_PIX_FMT_JPEG || pixfmt == VIDEO_PIX_FMT_YUYV || - pixfmt == VIDEO_PIX_FMT_NV12; + pixfmt == VIDEO_PIX_FMT_NV12 || + pixfmt == VIDEO_PIX_FMT_H264; } static bool app_has_supported_format(void) diff --git a/subsys/usb/device_next/class/usbd_uvc.c b/subsys/usb/device_next/class/usbd_uvc.c index b5994d5fc064b..bd59e0b4c6d6b 100644 --- a/subsys/usb/device_next/class/usbd_uvc.c +++ b/subsys/usb/device_next/class/usbd_uvc.c @@ -72,6 +72,7 @@ union uvc_fmt_desc { struct uvc_format_descriptor fmt; struct uvc_format_uncomp_descriptor fmt_uncomp; struct uvc_format_mjpeg_descriptor fmt_mjpeg; + struct uvc_format_frame_based_descriptor fmt_frame_based; struct uvc_frame_descriptor frm; struct uvc_frame_continuous_descriptor frm_cont; struct uvc_frame_discrete_descriptor frm_disc; @@ -398,7 +399,8 @@ static void uvc_get_vs_fmtfrm_desc(const struct device *dev, i, desc->bDescriptorSubtype, desc->bFormatIndex, desc); if ((desc->bDescriptorSubtype == UVC_VS_FORMAT_UNCOMPRESSED || - desc->bDescriptorSubtype == UVC_VS_FORMAT_MJPEG) && + desc->bDescriptorSubtype == UVC_VS_FORMAT_MJPEG || + desc->bDescriptorSubtype == UVC_VS_FORMAT_FRAME_BASED) && desc->bFormatIndex == data->format_id) { *format_desc = desc; break; @@ -413,7 +415,8 @@ static void uvc_get_vs_fmtfrm_desc(const struct device *dev, i, desc->bDescriptorSubtype, desc->bFrameIndex, desc); if (desc->bDescriptorSubtype != UVC_VS_FRAME_UNCOMPRESSED && - desc->bDescriptorSubtype != UVC_VS_FRAME_MJPEG) { + desc->bDescriptorSubtype != UVC_VS_FRAME_MJPEG && + desc->bDescriptorSubtype != UVC_VS_FRAME_FRAME_BASED) { break; } @@ -461,7 +464,8 @@ static int uvc_get_vs_probe_format_index(const struct device *dev, struct uvc_pr struct uvc_format_descriptor *desc = &cfg->desc->if1_fmts[i].fmt; max += desc->bDescriptorSubtype == UVC_VS_FORMAT_UNCOMPRESSED || - desc->bDescriptorSubtype == UVC_VS_FORMAT_MJPEG; + desc->bDescriptorSubtype == UVC_VS_FORMAT_MJPEG || + desc->bDescriptorSubtype == UVC_VS_FORMAT_FRAME_BASED; } switch (request) { @@ -496,8 +500,9 @@ static int uvc_get_vs_probe_frame_index(const struct device *dev, struct uvc_pro struct uvc_format_descriptor *desc = &cfg->desc->if1_fmts[i].fmt; if ((desc->bDescriptorSubtype == UVC_VS_FORMAT_UNCOMPRESSED || - desc->bDescriptorSubtype == UVC_VS_FORMAT_MJPEG) && - desc->bFormatIndex == data->format_id) { + desc->bDescriptorSubtype == UVC_VS_FORMAT_MJPEG || + desc->bDescriptorSubtype == UVC_VS_FORMAT_FRAME_BASED) && + desc->bFormatIndex == data->format_id) { break; } } @@ -507,7 +512,8 @@ static int uvc_get_vs_probe_frame_index(const struct device *dev, struct uvc_pro struct uvc_frame_discrete_descriptor *desc = &cfg->desc->if1_fmts[i].frm_disc; if (desc->bDescriptorSubtype != UVC_VS_FRAME_UNCOMPRESSED && - desc->bDescriptorSubtype != UVC_VS_FRAME_MJPEG) { + desc->bDescriptorSubtype != UVC_VS_FRAME_MJPEG && + desc->bDescriptorSubtype != UVC_VS_FRAME_FRAME_BASED) { break; } max++; @@ -550,6 +556,12 @@ static int uvc_get_vs_probe_frame_interval(const struct device *dev, struct uvc_ frame_desc->bDescriptorSubtype == UVC_VS_FRAME_MJPEG) { struct uvc_frame_discrete_descriptor *desc = (void *)frame_desc; + min = desc->dwFrameInterval[0]; + max_id = desc->bFrameIntervalType - 1; + max = desc->dwFrameInterval[max_id]; + } else if (frame_desc->bDescriptorSubtype == UVC_VS_FRAME_FRAME_BASED) { + struct uvc_frame_based_discrete_descriptor *desc = (void *)frame_desc; + min = desc->dwFrameInterval[0]; max_id = desc->bFrameIntervalType - 1; max = desc->dwFrameInterval[max_id]; @@ -626,14 +638,22 @@ static int uvc_get_vs_format_from_desc(const struct device *dev, struct video_fo LOG_DBG("Found descriptor for format %u, frame %u, MJPEG", format_desc->bFormatIndex, frame_desc->bFrameIndex); + } else if (format_desc->bDescriptorSubtype == UVC_VS_FORMAT_FRAME_BASED) { + struct uvc_format_frame_based_descriptor *desc = (void *)format_desc; + + fmt->pixelformat = uvc_guid_to_fourcc(desc->guidFormat); + + LOG_DBG("Found descriptor for format %u, frame %u, pixfmt %s", + desc->bFormatIndex, frame_desc->bFrameIndex, + VIDEO_FOURCC_TO_STR(fmt->pixelformat)); } else { - struct uvc_format_uncomp_descriptor *format_uncomp_desc = (void *)format_desc; + struct uvc_format_uncomp_descriptor *desc = (void *)format_desc; - fmt->pixelformat = uvc_guid_to_fourcc(format_uncomp_desc->guidFormat); + fmt->pixelformat = uvc_guid_to_fourcc(desc->guidFormat); LOG_DBG("Found descriptor for format %u, frame %u, GUID '%.4s', pixfmt %04x", - format_uncomp_desc->bFormatIndex, frame_desc->bFrameIndex, - format_uncomp_desc->guidFormat, fmt->pixelformat); + desc->bFormatIndex, frame_desc->bFrameIndex, + desc->guidFormat, fmt->pixelformat); } /* Fill the format according to what the host selected */ @@ -1421,6 +1441,27 @@ static int uvc_add_vs_format_desc(const struct device *dev, cfg->desc->if1_hdr.bNumFormats++; cfg->desc->if1_hdr.wTotalLength += desc->bLength; *format_desc = (struct uvc_format_descriptor *)desc; + } else if (fourcc == VIDEO_PIX_FMT_H264) { + struct uvc_format_frame_based_descriptor *desc; + + LOG_INF("Adding format descriptor #%u for H264", + cfg->desc->if1_hdr.bNumFormats + 1); + + desc = &uvc_new_fmt_desc(dev)->fmt_frame_based; + if (desc == NULL) { + return -ENOMEM; + } + + desc->bDescriptorType = USB_DESC_CS_INTERFACE; + desc->bFormatIndex = cfg->desc->if1_hdr.bNumFormats + 1; + desc->bLength = sizeof(*desc); + desc->bDescriptorSubtype = UVC_VS_FORMAT_FRAME_BASED; + uvc_fourcc_to_guid(desc->guidFormat, fourcc); + desc->bDefaultFrameIndex = 1; + desc->bVariableSize = 1; + cfg->desc->if1_hdr.bNumFormats++; + cfg->desc->if1_hdr.wTotalLength += desc->bLength; + *format_desc = (struct uvc_format_descriptor *)desc; } else { struct uvc_format_uncomp_descriptor *desc; @@ -1500,6 +1541,19 @@ static int uvc_add_vs_frame_interval(struct uvc_frame_descriptor *const desc, return -ENOMEM; } + frame_desc->dwFrameInterval[frame_desc->bFrameIntervalType] = + sys_cpu_to_le32(video_frmival_nsec(frmival) / 100); + frame_desc->bFrameIntervalType++; + frame_desc->bLength += sizeof(uint32_t); + } else if (desc->bDescriptorSubtype == UVC_VS_FRAME_FRAME_BASED) { + struct uvc_frame_based_discrete_descriptor *frame_desc = (void *)desc; + + if (frame_desc->bFrameIntervalType >= CONFIG_USBD_VIDEO_MAX_FRMIVAL) { + LOG_WRN("Out of descriptors, raise CONFIG_USBD_VIDEO_MAX_FRMIVAL above %u", + CONFIG_USBD_VIDEO_MAX_FRMIVAL); + return -ENOMEM; + } + frame_desc->dwFrameInterval[frame_desc->bFrameIntervalType] = sys_cpu_to_le32(video_frmival_nsec(frmival) / 100); frame_desc->bFrameIntervalType++; @@ -1541,8 +1595,13 @@ static int uvc_add_vs_frame_desc(const struct device *dev, desc->bFrameIndex = format_desc->bNumFrameDescriptors + 1; desc->wWidth = sys_cpu_to_le16(fmt->width); desc->wHeight = sys_cpu_to_le16(fmt->height); - desc->bDescriptorSubtype = (format_desc->bDescriptorSubtype == UVC_VS_FORMAT_UNCOMPRESSED) - ? UVC_VS_FRAME_UNCOMPRESSED : UVC_VS_FRAME_MJPEG; + if (format_desc->bDescriptorSubtype == UVC_VS_FORMAT_UNCOMPRESSED) { + desc->bDescriptorSubtype = UVC_VS_FRAME_UNCOMPRESSED; + } else if (format_desc->bDescriptorSubtype == UVC_VS_FORMAT_MJPEG) { + desc->bDescriptorSubtype = UVC_VS_FRAME_MJPEG; + } else if (format_desc->bDescriptorSubtype == UVC_VS_FORMAT_FRAME_BASED) { + desc->bDescriptorSubtype = UVC_VS_FRAME_FRAME_BASED; + } desc->dwMinBitRate = sys_cpu_to_le32(UINT32_MAX); desc->dwMaxBitRate = sys_cpu_to_le32(0); @@ -1596,7 +1655,25 @@ static int uvc_add_vs_frame_desc(const struct device *dev, /* UVC requires the frame intervals to be sorted, but not Zephyr */ qsort(frame_desc->dwFrameInterval, frame_desc->bFrameIntervalType, - sizeof(*frame_discrete_desc->dwFrameInterval), uvc_compare_frmival_desc); + sizeof(*frame_desc->dwFrameInterval), uvc_compare_frmival_desc); + + frame_desc->dwDefaultFrameInterval = frame_desc->dwFrameInterval[0]; + } else if (desc->bDescriptorSubtype == UVC_VS_FRAME_FRAME_BASED) { + struct uvc_frame_based_discrete_descriptor *frame_desc = (void *)desc; + + /* If no frame intrval supported, default to 30 FPS */ + if (frame_desc->bFrameIntervalType == 0) { + struct video_frmival frmival = {.numerator = 1, .denominator = 30}; + + ret = uvc_add_vs_frame_interval(desc, &frmival, fmt); + if (ret != 0) { + return ret; + } + } + + /* UVC requires the frame intervals to be sorted, but not Zephyr */ + qsort(frame_desc->dwFrameInterval, frame_desc->bFrameIntervalType, + sizeof(*frame_desc->dwFrameInterval), uvc_compare_frmival_desc); frame_desc->dwDefaultFrameInterval = frame_desc->dwFrameInterval[0]; } else { diff --git a/subsys/usb/device_next/class/usbd_uvc.h b/subsys/usb/device_next/class/usbd_uvc.h index ffc31490abdf6..e9e792a60b302 100644 --- a/subsys/usb/device_next/class/usbd_uvc.h +++ b/subsys/usb/device_next/class/usbd_uvc.h @@ -375,6 +375,22 @@ struct uvc_format_mjpeg_descriptor { uint8_t bCopyProtect; } __packed; +struct uvc_format_frame_based_descriptor { + uint8_t bLength; + uint8_t bDescriptorType; + uint8_t bDescriptorSubtype; + uint8_t bFormatIndex; + uint8_t bNumFrameDescriptors; + uint8_t guidFormat[16]; + uint8_t bBitsPerPixel; + uint8_t bDefaultFrameIndex; + uint8_t bAspectRatioX; + uint8_t bAspectRatioY; + uint8_t bmInterlaceFlags; + uint8_t bCopyProtect; + uint8_t bVariableSize; +} __packed; + struct uvc_frame_descriptor { uint8_t bLength; uint8_t bDescriptorType; @@ -422,6 +438,38 @@ struct uvc_frame_discrete_descriptor { uint32_t dwFrameInterval[CONFIG_USBD_VIDEO_MAX_FRMIVAL]; } __packed; +struct uvc_frame_based_continuous_descriptor { + uint8_t bLength; + uint8_t bDescriptorType; + uint8_t bDescriptorSubtype; + uint8_t bFrameIndex; + uint8_t bmCapabilities; + uint16_t wWidth; + uint16_t wHeight; + uint32_t dwMinBitRate; + uint32_t dwMaxBitRate; + uint32_t dwDefaultFrameInterval; + uint8_t bFrameIntervalType; + uint32_t dwMinFrameInterval; + uint32_t dwMaxFrameInterval; + uint32_t dwFrameIntervalStep; +} __packed; + +struct uvc_frame_based_discrete_descriptor { + uint8_t bLength; + uint8_t bDescriptorType; + uint8_t bDescriptorSubtype; + uint8_t bFrameIndex; + uint8_t bmCapabilities; + uint16_t wWidth; + uint16_t wHeight; + uint32_t dwMinBitRate; + uint32_t dwMaxBitRate; + uint32_t dwDefaultFrameInterval; + uint8_t bFrameIntervalType; + uint32_t dwFrameInterval[CONFIG_USBD_VIDEO_MAX_FRMIVAL]; +} __packed; + struct uvc_color_descriptor { uint8_t bLength; uint8_t bDescriptorType; From ba131339861f7140ecf35a87fe49b74de909525c Mon Sep 17 00:00:00 2001 From: Alain Volmat Date: Sun, 19 Oct 2025 16:43:50 +0200 Subject: [PATCH 08/11] samples: video: tcpserversink: check video_enqueue/dequeue return values Add proper check of the return value of video_enqueue / video_dequeue. Signed-off-by: Alain Volmat --- .../drivers/video/tcpserversink/src/main.c | 32 +++++++++++++++---- 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/samples/drivers/video/tcpserversink/src/main.c b/samples/drivers/video/tcpserversink/src/main.c index d54bc6c528afd..26315b9235897 100644 --- a/samples/drivers/video/tcpserversink/src/main.c +++ b/samples/drivers/video/tcpserversink/src/main.c @@ -131,7 +131,10 @@ int configure_encoder(void) } buffer->type = VIDEO_BUF_TYPE_OUTPUT; - video_enqueue(encoder_dev, buffer); + if (video_enqueue(encoder_dev, buffer)) { + LOG_ERR("Unable to enqueue encoder output buf"); + return -1; + } /* Set input format */ if (strcmp(CONFIG_VIDEO_PIXEL_FORMAT, "")) { @@ -165,12 +168,16 @@ int encode_frame(struct video_buffer *in, struct video_buffer **out) int ret; in->type = VIDEO_BUF_TYPE_INPUT; - video_enqueue(encoder_dev, in); + ret = video_enqueue(encoder_dev, in); + if (ret) { + LOG_ERR("Unable to enqueue encoder input buf"); + return ret; + } (*out)->type = VIDEO_BUF_TYPE_OUTPUT; ret = video_dequeue(encoder_dev, out, K_FOREVER); if (ret) { - LOG_ERR("Unable to dequeue encoder buf"); + LOG_ERR("Unable to dequeue encoder output buf"); return ret; } @@ -422,7 +429,11 @@ int main(void) /* Enqueue Buffers */ for (i = 0; i < ARRAY_SIZE(buffers); i++) { - video_enqueue(video_dev, buffers[i]); + ret = video_enqueue(video_dev, buffers[i]); + if (ret) { + LOG_ERR("Unable to enqueue video buf"); + return 0; + } } /* Start video capture */ @@ -452,7 +463,12 @@ int main(void) ret = sendall(client, vbuf_out->buffer, vbuf_out->bytesused); vbuf_out->type = VIDEO_BUF_TYPE_OUTPUT; - video_enqueue(encoder_dev, vbuf_out); + ret = video_enqueue(encoder_dev, vbuf_out); + if (ret) { + LOG_ERR("Unable to enqueue encoder output buf"); + return 0; + } + #else LOG_INF("Sending frame %d", i++); /* Send video buffer to TCP client */ @@ -465,7 +481,11 @@ int main(void) } vbuf->type = VIDEO_BUF_TYPE_INPUT; - (void)video_enqueue(video_dev, vbuf); + ret = video_enqueue(video_dev, vbuf); + if (ret) { + LOG_ERR("Unable to enqueue video buf"); + return 0; + } } while (!ret); /* stop capture */ From ff25b782d23c3d3f395f8cf5916dc22b9354e644 Mon Sep 17 00:00:00 2001 From: Alain Volmat Date: Wed, 22 Oct 2025 18:35:15 +0200 Subject: [PATCH 09/11] samples: usb: uvc: add h264/jpeg suffixed overlay Add overlay files in order to enable usage of the encoder in the UVC sample. This work with platform defining node label zephyr_jpegenc zephyr_h264enc Mode can be selected by using -DFILE_SUFFIX="jpegenc" or -DFILE_SUFFIX="h264enc" when building the sample while also adding -DCONFIG_VIDEO_ENCODER_JPEG or -DCONFIG_VIDEO_ENCODER_H264 as well in the command line. Signed-off-by: Alain Volmat --- samples/subsys/usb/uvc/app_h264enc.overlay | 13 +++++++++++++ samples/subsys/usb/uvc/app_jpegenc.overlay | 13 +++++++++++++ 2 files changed, 26 insertions(+) create mode 100644 samples/subsys/usb/uvc/app_h264enc.overlay create mode 100644 samples/subsys/usb/uvc/app_jpegenc.overlay diff --git a/samples/subsys/usb/uvc/app_h264enc.overlay b/samples/subsys/usb/uvc/app_h264enc.overlay new file mode 100644 index 0000000000000..14547af27a266 --- /dev/null +++ b/samples/subsys/usb/uvc/app_h264enc.overlay @@ -0,0 +1,13 @@ +/* + * Copyright (c) 2025 STMicroelectronics + * + * SPDX-License-Identifier: Apache-2.0 + */ + +#include "app.overlay" + +/ { + chosen { + zephyr,videoenc = &zephyr_h264enc; + }; +}; diff --git a/samples/subsys/usb/uvc/app_jpegenc.overlay b/samples/subsys/usb/uvc/app_jpegenc.overlay new file mode 100644 index 0000000000000..fcbfb269b24a6 --- /dev/null +++ b/samples/subsys/usb/uvc/app_jpegenc.overlay @@ -0,0 +1,13 @@ +/* + * Copyright (c) 2025 STMicroelectronics + * + * SPDX-License-Identifier: Apache-2.0 + */ + +#include "app.overlay" + +/ { + chosen { + zephyr,videoenc = &zephyr_jpegenc; + }; +}; From 89de557e9857164d58be258228574cb7afffaa96 Mon Sep 17 00:00:00 2001 From: Alain Volmat Date: Thu, 23 Oct 2025 17:34:21 +0200 Subject: [PATCH 10/11] boards: stm32n6570_dk: add zephyr_h264enc / zephyr_jpegenc labels Add zephyr_h264enc and zephyr_jpegenc labels on node in order to be able to use VENC and JPEG codec from samples. Signed-off-by: Alain Volmat --- boards/st/stm32n6570_dk/stm32n6570_dk_common.dtsi | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/boards/st/stm32n6570_dk/stm32n6570_dk_common.dtsi b/boards/st/stm32n6570_dk/stm32n6570_dk_common.dtsi index e93f6ffa86f14..dc9579308eb72 100644 --- a/boards/st/stm32n6570_dk/stm32n6570_dk_common.dtsi +++ b/boards/st/stm32n6570_dk/stm32n6570_dk_common.dtsi @@ -534,10 +534,10 @@ csi_interface: &dcmipp { }; }; -&venc { +zephyr_h264enc: &venc { status = "okay"; }; -&jpeg { +zephyr_jpegenc: &jpeg { status = "okay"; }; From 431233f3688272d2f5752e1ba2ad170b27caeb41 Mon Sep 17 00:00:00 2001 From: Alain Volmat Date: Thu, 23 Oct 2025 20:22:51 +0200 Subject: [PATCH 11/11] samples: usb: uvc: add h264 / jpeg encoder test entries Add entries in sample.yaml for enabling h264enc / jpegenc uvc based test on the stm32n6570_dk/stm32n657xx/sb platform. Signed-off-by: Alain Volmat --- samples/subsys/usb/uvc/sample.yaml | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/samples/subsys/usb/uvc/sample.yaml b/samples/subsys/usb/uvc/sample.yaml index 6495e7a4bd8a4..5000b964f509c 100644 --- a/samples/subsys/usb/uvc/sample.yaml +++ b/samples/subsys/usb/uvc/sample.yaml @@ -27,3 +27,27 @@ tests: filter: dt_chosen_enabled("zephyr,camera") integration_platforms: - arduino_nicla_vision/stm32h747xx/m7 + sample.subsys.usb.uvc.encoder.h264: + depends_on: + - usbd + tags: usb video + extra_configs: + - CONFIG_VIDEO_ENCODER_H264=y + extra_args: + - EXTRA_DTC_OVERLAY_FILE="app_h264enc.overlay" + - SHIELD=st_b_cams_imx_mb1854 + filter: dt_chosen_enabled("zephyr,camera") + integration_platforms: + - stm32n6570_dk/stm32n657xx/sb + sample.subsys.usb.uvc.encoder.jpeg: + depends_on: + - usbd + tags: usb video + extra_configs: + - CONFIG_VIDEO_ENCODER_JPEG=y + extra_args: + - EXTRA_DTC_OVERLAY_FILE="app_jpegenc.overlay" + - SHIELD=st_b_cams_imx_mb1854 + filter: dt_chosen_enabled("zephyr,camera") + integration_platforms: + - stm32n6570_dk/stm32n657xx/sb