diff --git a/drivers/video/video_sw_generator.c b/drivers/video/video_sw_generator.c index 6c270646ef503..116e9b277b562 100644 --- a/drivers/video/video_sw_generator.c +++ b/drivers/video/video_sw_generator.c @@ -32,6 +32,7 @@ LOG_MODULE_REGISTER(video_sw_generator, CONFIG_VIDEO_LOG_LEVEL); struct sw_ctrls { struct video_ctrl hflip; + struct video_ctrl test_pattern; }; struct video_sw_generator_data { @@ -69,6 +70,11 @@ static const struct video_format_cap fmts[] = { {0}, }; +static const char *const test_pattern_menu[] = { + "Color bars", + NULL, +}; + static int video_sw_generator_set_fmt(const struct device *dev, struct video_format *fmt) { struct video_sw_generator_data *data = dev->data; @@ -454,6 +460,13 @@ static DEVICE_API(video, video_sw_generator_driver_api) = { static int video_sw_generator_init_controls(const struct device *dev) { struct video_sw_generator_data *data = dev->data; + int ret; + + ret = video_init_menu_ctrl(&data->ctrls.test_pattern, dev, VIDEO_CID_TEST_PATTERN, + 0, test_pattern_menu); + if (ret < 0) { + return ret; + } return video_init_ctrl(&data->ctrls.hflip, dev, VIDEO_CID_HFLIP, (struct video_ctrl_range){.min = 0, .max = 1, .step = 1, .def = 0}); diff --git a/samples/drivers/video/capture/sample.yaml b/samples/drivers/video/capture/sample.yaml index 209411155eb75..1b843e1fcf26a 100644 --- a/samples/drivers/video/capture/sample.yaml +++ b/samples/drivers/video/capture/sample.yaml @@ -15,7 +15,6 @@ tests: - platform:stm32h7b3i_dk:SHIELD="st_b_cams_omv_mb1683" - platform:ek_ra8d1/r7fa8d1bhecbd:SHIELD="dvp_20pin_ov7670;rtkmipilcdb00000be" extra_configs: - - CONFIG_TEST=y - CONFIG_FPU=y harness: console harness_config: @@ -26,7 +25,6 @@ tests: - "Got frame \\d+" - "size: \\d+;" - "timestamp \\d+" - - "Pattern OK" platform_allow: - arduino_nicla_vision/stm32h747xx/m7 - mimxrt1064_evk/mimxrt1064 diff --git a/samples/drivers/video/capture/src/check_test_pattern.h b/samples/drivers/video/capture/src/check_test_pattern.h deleted file mode 100644 index 42dbe7c79e198..0000000000000 --- a/samples/drivers/video/capture/src/check_test_pattern.h +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Copyright 2024 NXP - * - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef TEST_PATTERN_CHECK_H_ -#define TEST_PATTERN_CHECK_H_ - -#include - -#include - -#define LAB_THRESHOLD 10.0 - -#define BARS_NUM 8 -#define PIXELS_NUM 5 - -typedef struct { - double L; - double a; - double b; -} CIELAB; - -/* - * This is measured on a real 8-colorbar pattern generated by an ov5640 camera sensor. - * For other sensors, it can be slightly different. If it doesn't fit anymore, either - * this array or the LAB_THRESHOLD can be modified. - * - * {White, Yellow, Cyan, Green, Magenta, Red, Blue, Black} - */ -static const CIELAB colorbars_target[] = { - {100.0, 0.0053, -0.0104}, {97.1804, -21.2151, 91.3538}, {90.1352, -58.4675, 6.0570}, - {87.7630, -85.9469, 83.2128}, {56.6641, 95.0182, -66.9129}, {46.6937, 72.7494, 49.5801}, - {27.6487, 71.5662, -97.4712}, {1.3726, -2.8040, 2.0043}}; - -static inline CIELAB rgb888_to_lab(const uint8_t r, const uint8_t g, const uint8_t b) -{ - CIELAB lab; - - double r_lin = r / 255.0; - double g_lin = g / 255.0; - double b_lin = b / 255.0; - - r_lin = r_lin > 0.04045 ? pow((r_lin + 0.055) / 1.055, 2.4) : r_lin / 12.92; - g_lin = g_lin > 0.04045 ? pow((g_lin + 0.055) / 1.055, 2.4) : g_lin / 12.92; - b_lin = b_lin > 0.04045 ? pow((b_lin + 0.055) / 1.055, 2.4) : b_lin / 12.92; - - double x = r_lin * 0.4124 + g_lin * 0.3576 + b_lin * 0.1805; - double y = r_lin * 0.2126 + g_lin * 0.7152 + b_lin * 0.0722; - double z = r_lin * 0.0193 + g_lin * 0.1192 + b_lin * 0.9505; - - x /= 0.95047; - z /= 1.08883; - - x = x > 0.008856 ? pow(x, 1.0 / 3.0) : (7.787 * x) + (16.0 / 116.0); - y = y > 0.008856 ? pow(y, 1.0 / 3.0) : (7.787 * y) + (16.0 / 116.0); - z = z > 0.008856 ? pow(z, 1.0 / 3.0) : (7.787 * z) + (16.0 / 116.0); - - lab.L = 116.0 * y - 16.0; - lab.a = 500.0 * (x - y); - lab.b = 200.0 * (y - z); - - return lab; -} - -static inline CIELAB xrgb32_to_lab(const uint32_t color) -{ - uint8_t r = (color >> 16) & 0xFF; - uint8_t g = (color >> 8) & 0xFF; - uint8_t b = color & 0xFF; - - return rgb888_to_lab(r, g, b); -} - -static inline CIELAB rgb565_to_lab(const uint16_t color) -{ - uint8_t r5 = (color >> 11) & 0x1F; - uint8_t g6 = (color >> 5) & 0x3F; - uint8_t b5 = color & 0x1F; - - /* Convert RGB565 to RGB888 */ - uint8_t r = (r5 * 255) / 31; - uint8_t g = (g6 * 255) / 63; - uint8_t b = (b5 * 255) / 31; - - return rgb888_to_lab(r, g, b); -} - -static inline void sum_lab(CIELAB *sum, const CIELAB lab) -{ - sum->L += lab.L; - sum->a += lab.a; - sum->b += lab.b; -} - -static inline void average_lab(CIELAB *lab, const uint32_t count) -{ - if (count > 0) { - lab->L /= count; - lab->a /= count; - lab->b /= count; - } -} - -static inline double deltaE(const CIELAB lab1, const CIELAB lab2) -{ - return sqrt(pow(lab1.L - lab2.L, 2) + pow(lab1.a - lab2.a, 2) + pow(lab1.b - lab2.b, 2)); -} - -/* - * As color values may vary near the boundary of each bar and also, for computational - * efficiency, check only a small number of pixels (PIXELS_NUM) in the middle of each bar. - */ -static inline bool is_colorbar_ok(const uint8_t *const buf, const struct video_format fmt) -{ - int i; - int bw = fmt.width / BARS_NUM; - CIELAB colorbars[BARS_NUM] = {0}; - - for (int h = 0; h < fmt.height; h++) { - for (i = 0; i < BARS_NUM; i++) { - if (fmt.pixelformat == VIDEO_PIX_FMT_XRGB32) { - uint32_t *pixel = - (uint32_t *)&buf[4 * (h * fmt.width + bw / 2 + i * bw)]; - - for (int j = -PIXELS_NUM / 2; j <= PIXELS_NUM / 2; j++) { - sum_lab(&colorbars[i], xrgb32_to_lab(*(pixel + j))); - } - } else if (fmt.pixelformat == VIDEO_PIX_FMT_RGB565) { - uint16_t *pixel = - (uint16_t *)&buf[2 * (h * fmt.width + bw / 2 + i * bw)]; - - for (int j = -PIXELS_NUM / 2; j <= PIXELS_NUM / 2; j++) { - sum_lab(&colorbars[i], rgb565_to_lab(*(pixel + j))); - } - } else { - printk("Format %d is not supported", fmt.pixelformat); - return false; - } - } - } - - for (i = 0; i < BARS_NUM; i++) { - average_lab(&colorbars[i], PIXELS_NUM * fmt.height); - if (deltaE(colorbars[i], colorbars_target[i]) > LAB_THRESHOLD) { - return false; - } - } - - return true; -} - -#endif /* TEST_PATTERN_CHECK_H_ */ diff --git a/samples/drivers/video/capture/src/main.c b/samples/drivers/video/capture/src/main.c index 80cbda7aa542f..1888d4ec292c6 100644 --- a/samples/drivers/video/capture/src/main.c +++ b/samples/drivers/video/capture/src/main.c @@ -13,27 +13,24 @@ #include #include - -#ifdef CONFIG_TEST -#include "check_test_pattern.h" - -LOG_MODULE_REGISTER(main, LOG_LEVEL_DBG); -#else LOG_MODULE_REGISTER(main, CONFIG_LOG_DEFAULT_LEVEL); -#endif #if !DT_HAS_CHOSEN(zephyr_camera) #error No camera chosen in devicetree. Missing "--shield" or "--snippet video-sw-generator" flag? #endif -#if DT_HAS_CHOSEN(zephyr_display) -static inline int display_setup(const struct device *const display_dev, const uint32_t pixfmt) +static inline int app_setup_display(const struct device *const display_dev, const uint32_t pixfmt) { struct display_capabilities capabilities; int ret = 0; LOG_INF("Display device: %s", display_dev->name); + if (!device_is_ready(display_dev)) { + LOG_ERR("%s: display device not ready.", display_dev->name); + return -ENOSYS; + } + display_get_capabilities(display_dev, &capabilities); LOG_INF("- Capabilities:"); @@ -56,10 +53,10 @@ static inline int display_setup(const struct device *const display_dev, const ui } break; default: + LOG_ERR("Display pixel format not supported by this sample"); return -ENOTSUP; } - - if (ret) { + if (ret < 0) { LOG_ERR("Unable to set display format"); return ret; } @@ -74,69 +71,101 @@ static inline int display_setup(const struct device *const display_dev, const ui return ret; } -static inline void video_display_frame(const struct device *const display_dev, - const struct video_buffer *const vbuf, - const struct video_format fmt) +static int app_display_frame(const struct device *const display_dev, + const struct video_buffer *const vbuf, + const struct video_format *const fmt) { struct display_buffer_descriptor buf_desc = { .buf_size = vbuf->bytesused, - .width = fmt.width, + .width = fmt->width, .pitch = buf_desc.width, - .height = vbuf->bytesused / fmt.pitch, + .height = vbuf->bytesused / fmt->pitch, }; - display_write(display_dev, 0, vbuf->line_offset, &buf_desc, vbuf->buffer); + return display_write(display_dev, 0, vbuf->line_offset, &buf_desc, vbuf->buffer); } -#endif -int main(void) +static int app_setup_video_selection(const struct device *const video_dev, + const struct video_format *const fmt) { - struct video_buffer *vbuf = &(struct video_buffer){}; - const struct device *video_dev; - struct video_format fmt; - struct video_caps caps; - struct video_frmival frmival; - struct video_frmival_enum fie; - enum video_buf_type type = VIDEO_BUF_TYPE_OUTPUT; -#if (CONFIG_VIDEO_SOURCE_CROP_WIDTH && CONFIG_VIDEO_SOURCE_CROP_HEIGHT) - struct video_selection crop_sel = { + struct video_selection sel = { .type = VIDEO_BUF_TYPE_OUTPUT, - .target = VIDEO_SEL_TGT_CROP; - .rect.left = CONFIG_VIDEO_SOURCE_CROP_LEFT; - .rect.top = CONFIG_VIDEO_SOURCE_CROP_TOP; - .rect.width = CONFIG_VIDEO_SOURCE_CROP_WIDTH; - .rect.height = CONFIG_VIDEO_SOURCE_CROP_HEIGHT; }; -#endif - unsigned int frame = 0; - int i = 0; - int err; + int ret; + + /* Set the crop setting only if configured */ + if (CONFIG_VIDEO_SOURCE_CROP_WIDTH > 0) { + sel.target = VIDEO_SEL_TGT_CROP; + sel.rect.left = CONFIG_VIDEO_SOURCE_CROP_LEFT; + sel.rect.top = CONFIG_VIDEO_SOURCE_CROP_TOP; + sel.rect.width = CONFIG_VIDEO_SOURCE_CROP_WIDTH; + sel.rect.height = CONFIG_VIDEO_SOURCE_CROP_HEIGHT; + + ret = video_set_selection(video_dev, &sel); + if (ret < 0) { + LOG_ERR("Unable to set selection crop"); + return ret; + } - /* When the video shell is enabled, do not run the capture loop */ - if (IS_ENABLED(CONFIG_VIDEO_SHELL)) { - LOG_INF("Letting the user control the device with the video shell"); - return 0; + LOG_INF("Crop window set to (%u,%u)/%ux%u", + sel.rect.left, sel.rect.top, sel.rect.width, sel.rect.height); } - video_dev = DEVICE_DT_GET(DT_CHOSEN(zephyr_camera)); - if (!device_is_ready(video_dev)) { - LOG_ERR("%s: video device is not ready", video_dev->name); - return 0; + /* + * Check (if possible) if targeted size is same as crop + * and if compose is necessary + */ + sel.target = VIDEO_SEL_TGT_CROP; + ret = video_get_selection(video_dev, &sel); + if (ret < 0 && ret != -ENOSYS) { + LOG_ERR("Unable to get selection crop"); + return ret; } + if (ret == 0 && (sel.rect.width != fmt->width || sel.rect.height != fmt->height)) { + sel.target = VIDEO_SEL_TGT_COMPOSE; + sel.rect.left = 0; + sel.rect.top = 0; + sel.rect.width = fmt->width; + sel.rect.height = fmt->height; + + ret = video_set_selection(video_dev, &sel); + if (ret < 0 && ret != -ENOSYS) { + LOG_ERR("Unable to set selection compose"); + return ret; + } + + LOG_INF("Compose window set to (%u,%u)/%ux%u", + sel.rect.left, sel.rect.top, sel.rect.width, sel.rect.height); + } + + return 0; +} + +static int app_query_video_info(const struct device *const video_dev, + struct video_caps *const caps, + struct video_format *const fmt) +{ + int ret; + LOG_INF("Video device: %s", video_dev->name); + if (!device_is_ready(video_dev)) { + LOG_ERR("%s: video device is not ready", video_dev->name); + return -ENOSYS; + } + /* Get capabilities */ - caps.type = type; - if (video_get_caps(video_dev, &caps)) { + ret = video_get_caps(video_dev, caps); + if (ret < 0) { LOG_ERR("Unable to retrieve video capabilities"); - return 0; + return ret; } LOG_INF("- Capabilities:"); - while (caps.format_caps[i].pixelformat) { - const struct video_format_cap *fcap = &caps.format_caps[i]; - /* fourcc to string */ + for (int i = 0; caps->format_caps[i].pixelformat; i++) { + const struct video_format_cap *fcap = &caps->format_caps[i]; + LOG_INF(" %s width [%u; %u; %u] height [%u; %u; %u]", VIDEO_FOURCC_TO_STR(fcap->pixelformat), fcap->width_min, fcap->width_max, fcap->width_step, @@ -145,50 +174,53 @@ int main(void) } /* Get default/native format */ - fmt.type = type; - if (video_get_format(video_dev, &fmt)) { + ret = video_get_format(video_dev, fmt); + if (ret < 0) { LOG_ERR("Unable to retrieve video format"); - return 0; } - /* Set the crop setting if necessary */ -#if CONFIG_VIDEO_SOURCE_CROP_WIDTH && CONFIG_VIDEO_SOURCE_CROP_HEIGHT - if (video_set_selection(video_dev, &crop_sel)) { - LOG_ERR("Unable to set selection crop"); - return 0; + /* Adjust video format according to the configuration */ + if (CONFIG_VIDEO_FRAME_HEIGHT > 0) { + fmt->height = CONFIG_VIDEO_FRAME_HEIGHT; + } + if (CONFIG_VIDEO_FRAME_WIDTH > 0) { + fmt->width = CONFIG_VIDEO_FRAME_WIDTH; + } + if (strcmp(CONFIG_VIDEO_PIXEL_FORMAT, "") != 0) { + fmt->pixelformat = VIDEO_FOURCC_FROM_STR(CONFIG_VIDEO_PIXEL_FORMAT); } - LOG_INF("Selection crop set to (%u,%u)/%ux%u", - sel.rect.left, sel.rect.top, sel.rect.width, sel.rect.height); -#endif - -#if CONFIG_VIDEO_FRAME_HEIGHT - fmt.height = CONFIG_VIDEO_FRAME_HEIGHT; -#endif -#if CONFIG_VIDEO_FRAME_WIDTH - fmt.width = CONFIG_VIDEO_FRAME_WIDTH; -#endif + return 0; +} - if (strcmp(CONFIG_VIDEO_PIXEL_FORMAT, "")) { - fmt.pixelformat = VIDEO_FOURCC_FROM_STR(CONFIG_VIDEO_PIXEL_FORMAT); - } +static int app_setup_video_format(const struct device *const video_dev, + struct video_format *const fmt) +{ + int ret; LOG_INF("- Video format: %s %ux%u", - VIDEO_FOURCC_TO_STR(fmt.pixelformat), fmt.width, fmt.height); + VIDEO_FOURCC_TO_STR(fmt->pixelformat), fmt->width, fmt->height); - if (video_set_compose_format(video_dev, &fmt)) { + ret = video_set_compose_format(video_dev, fmt); + if (ret < 0) { LOG_ERR("Unable to set format"); - return 0; + return ret; } - if (!video_get_frmival(video_dev, &frmival)) { - LOG_INF("- Default frame rate : %f fps", - 1.0 * frmival.denominator / frmival.numerator); - } + return 0; +} + +static int app_setup_video_frmival(const struct device *const video_dev, + struct video_format *const fmt) +{ + struct video_frmival frmival = {}; + struct video_frmival_enum fie = { + .format = fmt, + }; + int ret; LOG_INF("- Supported frame intervals for the default format:"); - memset(&fie, 0, sizeof(fie)); - fie.format = &fmt; + while (video_enum_frmival(video_dev, &fie) == 0) { if (fie.type == VIDEO_FRMIVAL_TYPE_DISCRETE) { LOG_INF(" %u/%u", fie.discrete.numerator, fie.discrete.denominator); @@ -201,12 +233,30 @@ int main(void) fie.index++; } + ret = video_get_frmival(video_dev, &frmival); + if (ret == -ENOTSUP || ret == -ENOSYS) { + LOG_WRN("The video source does not support frame rate control"); + } else if (ret < 0) { + LOG_ERR("Error while getting the frame interval"); + return ret; + } else if (ret == 0) { + LOG_INF("- Default frame rate : %f fps", + 1.0 * frmival.denominator / frmival.numerator); + } + + return 0; +} + +static int app_setup_video_controls(const struct device *const video_dev) +{ + int ret; + /* Get supported controls */ LOG_INF("- Supported controls:"); const struct device *last_dev = NULL; struct video_ctrl_query cq = {.dev = video_dev, .id = VIDEO_CTRL_FLAG_NEXT_CTRL}; - while (!video_query_ctrl(&cq)) { + while (video_query_ctrl(&cq) == 0) { if (cq.dev != last_dev) { last_dev = cq.dev; LOG_INF("\t\tdevice: %s", cq.dev->name); @@ -217,95 +267,165 @@ int main(void) /* Set controls */ struct video_control ctrl = {.id = VIDEO_CID_HFLIP, .val = 1}; - int tp_set_ret = -ENOTSUP; if (IS_ENABLED(CONFIG_VIDEO_CTRL_HFLIP)) { - video_set_ctrl(video_dev, &ctrl); + ret = video_set_ctrl(video_dev, &ctrl); + if (ret < 0) { + LOG_ERR("Failed to set horizontal flip"); + return ret; + } } if (IS_ENABLED(CONFIG_VIDEO_CTRL_VFLIP)) { ctrl.id = VIDEO_CID_VFLIP; - video_set_ctrl(video_dev, &ctrl); + ret = video_set_ctrl(video_dev, &ctrl); + if (ret < 0) { + LOG_ERR("Failed to set vertical flip"); + return ret; + } } if (IS_ENABLED(CONFIG_TEST)) { ctrl.id = VIDEO_CID_TEST_PATTERN; - tp_set_ret = video_set_ctrl(video_dev, &ctrl); + ret = video_set_ctrl(video_dev, &ctrl); + if (ret < 0 && ret != -ENOTSUP) { + LOG_WRN("Failed to set the test pattern"); + } } -#if DT_HAS_CHOSEN(zephyr_display) - const struct device *const display_dev = DEVICE_DT_GET(DT_CHOSEN(zephyr_display)); - - if (!device_is_ready(display_dev)) { - LOG_ERR("%s: display device not ready.", display_dev->name); - return 0; - } + return 0; +} - err = display_setup(display_dev, fmt.pixelformat); - if (err) { - LOG_ERR("Unable to set up display"); - return err; - } -#endif +static int app_setup_video_buffers(const struct device *const video_dev, + struct video_caps *const caps, + struct video_format *const fmt) +{ + int ret; /* Alloc video buffers and enqueue for capture */ - if (caps.min_vbuf_count > CONFIG_VIDEO_BUFFER_POOL_NUM_MAX || - fmt.size > CONFIG_VIDEO_BUFFER_POOL_SZ_MAX) { + if (caps->min_vbuf_count > CONFIG_VIDEO_BUFFER_POOL_NUM_MAX || + fmt->size > CONFIG_VIDEO_BUFFER_POOL_SZ_MAX) { LOG_ERR("Not enough buffers or memory to start streaming"); - return 0; + return -EINVAL; } - for (i = 0; i < CONFIG_VIDEO_BUFFER_POOL_NUM_MAX; i++) { + for (int i = 0; i < CONFIG_VIDEO_BUFFER_POOL_NUM_MAX; i++) { + struct video_buffer *vbuf; + /* * For some hardwares, such as the PxP used on i.MX RT1170 to do image rotation, * buffer alignment is needed in order to achieve the best performance */ - vbuf = video_buffer_aligned_alloc(fmt.size, CONFIG_VIDEO_BUFFER_POOL_ALIGN, - K_NO_WAIT); + vbuf = video_buffer_aligned_alloc(fmt->size, CONFIG_VIDEO_BUFFER_POOL_ALIGN, + K_NO_WAIT); if (vbuf == NULL) { LOG_ERR("Unable to alloc video buffer"); - return 0; + return -ENOMEM; + } + + vbuf->type = VIDEO_BUF_TYPE_OUTPUT; + + ret = video_enqueue(video_dev, vbuf); + if (ret < 0) { + LOG_ERR("Failed to enqueue video buffer"); + return ret; } - vbuf->type = type; - video_enqueue(video_dev, vbuf); } - /* Start video capture */ - if (video_stream_start(video_dev, type)) { - LOG_ERR("Unable to start capture (interface)"); + return 0; +} + +int main(void) +{ + const struct device *const video_dev = DEVICE_DT_GET(DT_CHOSEN(zephyr_camera)); + const struct device *const display_dev = DEVICE_DT_GET_OR_NULL(DT_CHOSEN(zephyr_display)); + struct video_buffer *vbuf = &(struct video_buffer){}; + struct video_format fmt = { + .type = VIDEO_BUF_TYPE_OUTPUT, + }; + struct video_caps caps = { + .type = VIDEO_BUF_TYPE_OUTPUT, + }; + unsigned int frame = 0; + int ret; + + /* When the video shell is enabled, do not run the capture loop */ + if (IS_ENABLED(CONFIG_VIDEO_SHELL)) { + LOG_INF("Letting the user control the device with the video shell"); return 0; } + ret = app_query_video_info(video_dev, &caps, &fmt); + if (ret < 0) { + goto err; + } + + ret = app_setup_video_selection(video_dev, &fmt); + if (ret < 0) { + goto err; + } + + ret = app_setup_video_format(video_dev, &fmt); + if (ret < 0) { + goto err; + } + + ret = app_setup_video_frmival(video_dev, &fmt); + if (ret < 0) { + goto err; + } + + ret = app_setup_video_controls(video_dev); + if (ret < 0) { + goto err; + } + + if (DT_HAS_CHOSEN(zephyr_display)) { + ret = app_setup_display(display_dev, fmt.pixelformat); + if (ret < 0) { + goto err; + } + } + + ret = app_setup_video_buffers(video_dev, &caps, &fmt); + if (ret < 0) { + goto err; + } + + ret = video_stream_start(video_dev, VIDEO_BUF_TYPE_OUTPUT); + if (ret < 0) { + LOG_ERR("Unable to start capture (interface)"); + goto err; + } + LOG_INF("Capture started"); - /* Grab video frames */ - vbuf->type = type; + vbuf->type = VIDEO_BUF_TYPE_OUTPUT; while (1) { - err = video_dequeue(video_dev, &vbuf, K_FOREVER); - if (err) { + ret = video_dequeue(video_dev, &vbuf, K_FOREVER); + if (ret < 0) { LOG_ERR("Unable to dequeue video buf"); - return 0; + goto err; } - LOG_DBG("Got frame %u! size: %u; timestamp %u ms", + LOG_INF("Got frame %u! size: %u; timestamp %u ms", frame++, vbuf->bytesused, vbuf->timestamp); -#ifdef CONFIG_TEST - if (tp_set_ret < 0) { - LOG_DBG("Test pattern control was not successful. Skip test"); - } else if (is_colorbar_ok(vbuf->buffer, fmt)) { - LOG_DBG("Pattern OK!\n"); + if (DT_HAS_CHOSEN(zephyr_display)) { + ret = app_display_frame(display_dev, vbuf, &fmt); + if (ret != 0) { + LOG_WRN("Failed to display this frame"); + } } -#endif -#if DT_HAS_CHOSEN(zephyr_display) - video_display_frame(display_dev, vbuf, fmt); -#endif - - err = video_enqueue(video_dev, vbuf); - if (err) { + ret = video_enqueue(video_dev, vbuf); + if (ret < 0) { LOG_ERR("Unable to requeue video buf"); - return 0; + goto err; } } + +err: + LOG_ERR("Aborting sample"); + return 0; } diff --git a/tests/drivers/video/test_pattern/CMakeLists.txt b/tests/drivers/video/test_pattern/CMakeLists.txt new file mode 100644 index 0000000000000..4cc11977f4df0 --- /dev/null +++ b/tests/drivers/video/test_pattern/CMakeLists.txt @@ -0,0 +1,8 @@ +# Copyright The Zephyr Project Contributors +# SPDX-License-Identifier: Apache-2.0 + +cmake_minimum_required(VERSION 3.20.0) +find_package(Zephyr REQUIRED HINTS $ENV{ZEPHYR_BASE}) +project(integration) + +target_sources(app PRIVATE src/main.c) diff --git a/tests/drivers/video/test_pattern/Kconfig b/tests/drivers/video/test_pattern/Kconfig new file mode 100644 index 0000000000000..c791d7ac1bdea --- /dev/null +++ b/tests/drivers/video/test_pattern/Kconfig @@ -0,0 +1,59 @@ +# Copyright The Zephyr Project Contributors +# SPDX-License-Identifier: Apache-2.0 + +mainmenu "Video test pattern testing" + +menu "Video capture configuration" + +config TEST_PIXEL_FORMAT + string "Pixel format of the video frame" + help + Pixel format of the video frame. If not set, the default pixel format is used. + +config TEST_FRAME_HEIGHT + int "Height of the video frame" + default 0 + help + Height of the video frame. If set to 0, the default height is used. + +config TEST_FRAME_WIDTH + int "Width of the video frame" + default 0 + help + Width of the video frame. If set to 0, the default width is used. + +config TEST_FRAMES_TOTAL + int "Number of frames to test in total" + default 1000 + help + Video hardware do not always produce valid frames immediately, at high FPS, there can + be a lot of frames completely black or otherwise invalid before the test pattern + to show-up, which would still be the expected behavior of the hardware. + +config TEST_PATTERN_CTRL + int "Value used for the test pattern menu control" + default 1 + help + Some drivers support different types of test patterns and/or in a different order. + Control the menu CID value to select the correct "vertical color bar" pattern. + +config TEST_FRAMES_VALID + int "Number of valid frames to expect" + default 10 + help + Number of frames after which consider the test successful. + A valid frame is a frame featuring the test pattern with colors close enough according + to CONFIG_TEST_LAB_THRESHOLD. + +config TEST_LAB_THRESHOLD + int "CIE LAB acceptance threshold" + default 10 + help + Margin value to consider the color similarity to be close enough. + The default is 10 to allow slight difference to be ignored, and complete swaps to + always be detected. This can be raised in case the colors are all slightly off but + correctly ordered. + +endmenu + +source "Kconfig.zephyr" diff --git a/tests/drivers/video/test_pattern/boards/native_sim_native_64.conf b/tests/drivers/video/test_pattern/boards/native_sim_native_64.conf new file mode 100644 index 0000000000000..bee94f1dd1daf --- /dev/null +++ b/tests/drivers/video/test_pattern/boards/native_sim_native_64.conf @@ -0,0 +1,7 @@ +CONFIG_VIDEO_BUFFER_POOL_SZ_MAX=33000 +CONFIG_VIDEO_BUFFER_POOL_NUM_MAX=1 +CONFIG_TEST_FRAME_HEIGHT=64 +CONFIG_TEST_FRAME_WIDTH=256 +CONFIG_TEST_PIXEL_FORMAT="RGBP" +CONFIG_TEST_LAB_THRESHOLD=30 +CONFIG_TEST_PATTERN_CTRL=0 diff --git a/tests/drivers/video/test_pattern/prj.conf b/tests/drivers/video/test_pattern/prj.conf new file mode 100644 index 0000000000000..193d9535a724f --- /dev/null +++ b/tests/drivers/video/test_pattern/prj.conf @@ -0,0 +1,4 @@ +CONFIG_LOG=y +CONFIG_VIDEO=y +CONFIG_ZTEST=y +CONFIG_FPU=y diff --git a/tests/drivers/video/test_pattern/src/main.c b/tests/drivers/video/test_pattern/src/main.c new file mode 100644 index 0000000000000..c272546f74120 --- /dev/null +++ b/tests/drivers/video/test_pattern/src/main.c @@ -0,0 +1,268 @@ +/* + * Copyright (c) 2019 Linaro Limited + * Copyright 2025 NXP + * + * SPDX-License-Identifier: Apache-2.0 + */ + +#include + +#include +#include +#include +#include +#include +#include +#include + +LOG_MODULE_REGISTER(test_pattern, LOG_LEVEL_INF); + +#define LAB_THRESHOLD ((double)CONFIG_TEST_LAB_THRESHOLD) + +#define BARS_NUM 8 +#define PIXELS_NUM 5 + +typedef struct { + double L; + double a; + double b; +} CIELAB; + +/* + * This is measured on a real 8-colorbar pattern generated by an ov5640 camera sensor. + * For other sensors, it can be slightly different. If it doesn't fit anymore, either + * this array or the LAB_THRESHOLD can be modified. + */ +static const CIELAB colorbars_target[] = { + {100.0, 0.0053, -0.0104}, /* White */ + {97.1804, -21.2151, 91.3538}, /* Yellow */ + {90.1352, -58.4675, 6.0570}, /* Cyan */ + {87.7630, -85.9469, 83.2128}, /* Green */ + {56.6641, 95.0182, -66.9129}, /* Magenta */ + {46.6937, 72.7494, 49.5801}, /* Red */ + {27.6487, 71.5662, -97.4712}, /* Blue */ + {1.3726, -2.8040, 2.0043}, /* Black */ +}; + +static inline CIELAB rgb888_to_lab(const uint8_t r, const uint8_t g, const uint8_t b) +{ + CIELAB lab; + + double r_lin = r / 255.0; + double g_lin = g / 255.0; + double b_lin = b / 255.0; + + r_lin = r_lin > 0.04045 ? pow((r_lin + 0.055) / 1.055, 2.4) : r_lin / 12.92; + g_lin = g_lin > 0.04045 ? pow((g_lin + 0.055) / 1.055, 2.4) : g_lin / 12.92; + b_lin = b_lin > 0.04045 ? pow((b_lin + 0.055) / 1.055, 2.4) : b_lin / 12.92; + + double x = r_lin * 0.4124 + g_lin * 0.3576 + b_lin * 0.1805; + double y = r_lin * 0.2126 + g_lin * 0.7152 + b_lin * 0.0722; + double z = r_lin * 0.0193 + g_lin * 0.1192 + b_lin * 0.9505; + + x /= 0.95047; + z /= 1.08883; + + x = x > 0.008856 ? pow(x, 1.0 / 3.0) : (7.787 * x) + (16.0 / 116.0); + y = y > 0.008856 ? pow(y, 1.0 / 3.0) : (7.787 * y) + (16.0 / 116.0); + z = z > 0.008856 ? pow(z, 1.0 / 3.0) : (7.787 * z) + (16.0 / 116.0); + + lab.L = 116.0 * y - 16.0; + lab.a = 500.0 * (x - y); + lab.b = 200.0 * (y - z); + + return lab; +} + +static inline CIELAB xrgb32_to_lab(const uint32_t color) +{ + uint8_t r = (color >> 16) & 0xFF; + uint8_t g = (color >> 8) & 0xFF; + uint8_t b = color & 0xFF; + + return rgb888_to_lab(r, g, b); +} + +static inline CIELAB rgb565_to_lab(const uint16_t color) +{ + uint8_t r5 = (color >> 11) & 0x1F; + uint8_t g6 = (color >> 5) & 0x3F; + uint8_t b5 = color & 0x1F; + + /* Convert RGB565 to RGB888 */ + uint8_t r = (r5 * 255) / 31; + uint8_t g = (g6 * 255) / 63; + uint8_t b = (b5 * 255) / 31; + + return rgb888_to_lab(r, g, b); +} + +static inline void sum_lab(CIELAB *sum, const CIELAB lab) +{ + sum->L += lab.L; + sum->a += lab.a; + sum->b += lab.b; +} + +static inline void average_lab(CIELAB *lab, const uint32_t count) +{ + if (count > 0) { + lab->L /= count; + lab->a /= count; + lab->b /= count; + } +} + +static inline double deltaE(const CIELAB lab1, const CIELAB lab2) +{ + return sqrt(pow(lab1.L - lab2.L, 2) + pow(lab1.a - lab2.a, 2) + pow(lab1.b - lab2.b, 2)); +} + +/* + * As color values may vary near the boundary of each bar and also, for computational + * efficiency, check only a small number of pixels (PIXELS_NUM) in the middle of each bar. + */ +static inline bool is_colorbar_ok(const uint8_t *const buf, const struct video_format *fmt) +{ + int i; + int bw = fmt->width / BARS_NUM; + CIELAB colorbars[BARS_NUM] = {0}; + + for (int h = 0; h < fmt->height; h++) { + for (i = 0; i < BARS_NUM; i++) { + if (fmt->pixelformat == VIDEO_PIX_FMT_XRGB32) { + uint32_t *pixel = + (uint32_t *)&buf[4 * (h * fmt->width + bw / 2 + i * bw)]; + + for (int j = -PIXELS_NUM / 2; j <= PIXELS_NUM / 2; j++) { + sum_lab(&colorbars[i], xrgb32_to_lab(*(pixel + j))); + } + } else if (fmt->pixelformat == VIDEO_PIX_FMT_RGB565) { + uint16_t *pixel = + (uint16_t *)&buf[2 * (h * fmt->width + bw / 2 + i * bw)]; + + for (int j = -PIXELS_NUM / 2; j <= PIXELS_NUM / 2; j++) { + sum_lab(&colorbars[i], rgb565_to_lab(*(pixel + j))); + } + } else { + printk("Format %d is not supported", fmt->pixelformat); + return false; + } + } + } + + for (i = 0; i < BARS_NUM; i++) { + average_lab(&colorbars[i], PIXELS_NUM * fmt->height); + if (deltaE(colorbars[i], colorbars_target[i]) > LAB_THRESHOLD) { + return false; + } + } + + return true; +} + +static const struct device *const video_dev = DEVICE_DT_GET(DT_CHOSEN(zephyr_camera)); +struct video_format fmt; + +static void *test_pattern_setup(void) +{ + struct video_buffer *vbuf = &(struct video_buffer){}; + struct video_caps caps = { + .type = VIDEO_BUF_TYPE_OUTPUT, + }; + struct video_control ctrl = { + .id = VIDEO_CID_TEST_PATTERN, .val = CONFIG_TEST_PATTERN_CTRL, + }; + int ret; + + zassert(device_is_ready(video_dev), "device initialization failed"); + + ret = video_get_caps(video_dev, &caps); + zassert_ok(ret, "getting video capabilities failed"); + + fmt.type = VIDEO_BUF_TYPE_OUTPUT; + ret = video_get_format(video_dev, &fmt); + zassert_ok(ret, "getting default video format failed"); + + if (CONFIG_TEST_FRAME_HEIGHT > 0) { + fmt.height = CONFIG_TEST_FRAME_HEIGHT; + } + if (CONFIG_TEST_FRAME_WIDTH > 0) { + fmt.width = CONFIG_TEST_FRAME_WIDTH; + } + if (strcmp(CONFIG_TEST_PIXEL_FORMAT, "") != 0) { + fmt.pixelformat = VIDEO_FOURCC_FROM_STR(CONFIG_TEST_PIXEL_FORMAT); + } + + LOG_INF("Video format: %s %ux%u", + VIDEO_FOURCC_TO_STR(fmt.pixelformat), fmt.width, fmt.height); + + ret = video_set_format(video_dev, &fmt); + zassert_ok(ret, "setting video format failed"); + + ret = video_set_ctrl(video_dev, &ctrl); + zassert_ok(ret, "setting test pattern"); + + /* Alloc video buffers and enqueue for capture */ + zassert(caps.min_vbuf_count <= CONFIG_VIDEO_BUFFER_POOL_NUM_MAX, + "not enough buffers"); + zassert(fmt.size <= CONFIG_VIDEO_BUFFER_POOL_SZ_MAX, + "buffers too large"); + + for (int i = 0; i < CONFIG_VIDEO_BUFFER_POOL_NUM_MAX; i++) { + vbuf = video_buffer_aligned_alloc(fmt.size, CONFIG_VIDEO_BUFFER_POOL_ALIGN, + K_NO_WAIT); + zassert_not_null(vbuf); + + vbuf->type = VIDEO_BUF_TYPE_OUTPUT; + + ret = video_enqueue(video_dev, vbuf); + zassert_ok(ret); + } + + LOG_INF("Device %s configured starting capture", video_dev->name); + + ret = video_stream_start(video_dev, VIDEO_BUF_TYPE_OUTPUT); + zassert_ok(ret); + + return NULL; +} + +void test_pattern_after(void *) +{ + int ret; + + ret = video_stream_stop(video_dev, VIDEO_BUF_TYPE_OUTPUT); + zassert_ok(ret); +} + +ZTEST(test_pattern, test_pattern_frames) +{ + struct video_buffer *vbuf = &(struct video_buffer){ + .type = VIDEO_BUF_TYPE_OUTPUT + }; + size_t valid = 0; + int ret; + + for (size_t i = 0; i < CONFIG_TEST_FRAMES_TOTAL; i++) { + ret = video_dequeue(video_dev, &vbuf, K_FOREVER); + zassert_ok(ret); + + LOG_INF("Got frame, testing color bars"); + + valid += is_colorbar_ok(vbuf->buffer, &fmt); + if (valid >= CONFIG_TEST_FRAMES_VALID) { + LOG_INF("Got %u valid frames out of %u, stopping the test", valid, i + 1); + break; + } + + ret = video_enqueue(video_dev, vbuf); + zassert_ok(ret); + } + + zassert_equal(valid, CONFIG_TEST_FRAMES_VALID, + "there should be at least %u valid frames out of %u", + CONFIG_TEST_FRAMES_VALID, CONFIG_TEST_FRAMES_TOTAL); +} + +ZTEST_SUITE(test_pattern, NULL, test_pattern_setup, NULL, test_pattern_after, NULL); diff --git a/tests/drivers/video/test_pattern/testcase.yaml b/tests/drivers/video/test_pattern/testcase.yaml new file mode 100644 index 0000000000000..37f69f9c2a624 --- /dev/null +++ b/tests/drivers/video/test_pattern/testcase.yaml @@ -0,0 +1,23 @@ +# Copyright The Zephyr Project Contributors +# SPDX-License-Identifier: Apache-2.0 + +common: + tags: + - drivers + - video + +tests: + + drivers.video.test_pattern.sw_generator: + platform_allow: + - native_sim/native/64 + integration_platforms: + - native_sim/native/64 + extra_args: + - SNIPPET="video-sw-generator" + + drivers.video.test_pattern.mimxrt1170_evk: + platform_allow: + - mimxrt1170_evk/mimxrt1176/cm7 + extra_args: + - SHIELD="nxp_btb44_ov5640;rk055hdmipi4ma0"