diff --git a/boards/st/stm32n6570_dk/stm32n6570_dk_common.dtsi b/boards/st/stm32n6570_dk/stm32n6570_dk_common.dtsi index 4de5629036b83..6269e4ede427f 100644 --- a/boards/st/stm32n6570_dk/stm32n6570_dk_common.dtsi +++ b/boards/st/stm32n6570_dk/stm32n6570_dk_common.dtsi @@ -10,6 +10,7 @@ #include #include #include +#include #include #include "arduino_r3_connector.dtsi" @@ -21,6 +22,7 @@ zephyr,canbus = &fdcan1; zephyr,display = <dc; zephyr,touch = >911; + zephyr,videoenc = &venc; spi-flash0 = &mx66uw1g45g; zephyr,flash-controller = &mx66uw1g45g; zephyr,flash = &mx66uw1g45g; @@ -42,7 +44,7 @@ compatible = "zephyr,memory-region"; reg = <0x90000000 DT_SIZE_M(32)>; zephyr,memory-region = "PSRAM"; - zephyr,memory-attr = <( DT_MEM_ARM(ATTR_MPU_RAM) )>; + zephyr,memory-attr = <( DT_MEM_ARM(ATTR_MPU_RAM_NOCACHE) | DT_MEM_SW_ALLOC_NON_CACHE )>; }; leds: leds { @@ -376,7 +378,7 @@ zephyr_udc0: &usbotg_hs1 { }; &mac { - status = "okay"; + status = "disabled"; pinctrl-0 = <ð1_rgmii_gtx_clk_pf0 ð1_rgmii_clk125_pf2 ð1_rgmii_rx_clk_pf7 @@ -396,7 +398,7 @@ zephyr_udc0: &usbotg_hs1 { }; &mdio { - status = "okay"; + status = "disabled"; pinctrl-0 = <ð1_mdio_pd12 ð1_mdc_pd1>; pinctrl-names = "default"; @@ -469,3 +471,7 @@ csi_interface: &dcmipp { }; }; }; + +&venc { + status = "okay"; +}; diff --git a/drivers/video/CMakeLists.txt b/drivers/video/CMakeLists.txt index 083df586b0033..b35c53db736aa 100644 --- a/drivers/video/CMakeLists.txt +++ b/drivers/video/CMakeLists.txt @@ -15,6 +15,7 @@ zephyr_library_sources_ifdef(CONFIG_VIDEO_OV7725 ov7725.c) zephyr_library_sources_ifdef(CONFIG_VIDEO_OV2640 ov2640.c) zephyr_library_sources_ifdef(CONFIG_VIDEO_GC2145 gc2145.c) zephyr_library_sources_ifdef(CONFIG_VIDEO_STM32_DCMI video_stm32_dcmi.c) +zephyr_library_sources_ifdef(CONFIG_VIDEO_STM32_VENC video_stm32_venc.c) zephyr_library_sources_ifdef(CONFIG_VIDEO_OV5640 ov5640.c) zephyr_library_sources_ifdef(CONFIG_VIDEO_OV7670 ov7670.c) zephyr_library_sources_ifdef(CONFIG_VIDEO_OV9655 ov9655.c) diff --git a/drivers/video/Kconfig b/drivers/video/Kconfig index 924e2156273c4..5e7b66e35831e 100644 --- a/drivers/video/Kconfig +++ b/drivers/video/Kconfig @@ -58,6 +58,20 @@ config VIDEO_I2C_RETRY_NUM The default is to not retry. Board configuration files or user project can then use the number of retries that matches their situation. +config VIDEO_BUFFER_USE_MEM_ATTR_HEAP + bool "Use mem attr api for video buffer" + default n + +config VIDEO_BUFFER_MEM_SW_ATTRIBUTE + int "Mem SW attribute for video buffer" + depends on VIDEO_BUFFER_USE_MEM_ATTR_HEAP + default 1 + help + Mem SW attribute for video buffer: + 1: ATTR_SW_ALLOC_CACHE + 2: ATTR_SW_ALLOC_NON_CACHE + 4: ATTR_SW_ALLOC_DMA + source "drivers/video/Kconfig.esp32_dvp" source "drivers/video/Kconfig.mcux_csi" @@ -76,6 +90,8 @@ source "drivers/video/Kconfig.ov2640" source "drivers/video/Kconfig.stm32_dcmi" +source "drivers/video/Kconfig.stm32_venc" + source "drivers/video/Kconfig.ov5640" source "drivers/video/Kconfig.ov7670" diff --git a/drivers/video/Kconfig.stm32_venc b/drivers/video/Kconfig.stm32_venc new file mode 100644 index 0000000000000..42f98b7d58686 --- /dev/null +++ b/drivers/video/Kconfig.stm32_venc @@ -0,0 +1,14 @@ +# STM32 VENC driver configuration options + +# Copyright (c) 2025 Hugues Fruchet +# SPDX-License-Identifier: Apache-2.0 + +config VIDEO_STM32_VENC + bool "STM32 video encoder (VENC) driver" + default y + depends on DT_HAS_ST_STM32_VENC_ENABLED + select HAS_STM32LIB + select USE_STM32_LL_VENC + select USE_STM32_HAL_RIF if SOC_SERIES_STM32N6X + help + Enable driver for STM32 video encoder peripheral. diff --git a/drivers/video/video_common.c b/drivers/video/video_common.c index 37ed899b8029a..1b4a891a14b01 100644 --- a/drivers/video/video_common.c +++ b/drivers/video/video_common.c @@ -26,6 +26,14 @@ LOG_MODULE_REGISTER(video_common, CONFIG_VIDEO_LOG_LEVEL); #define VIDEO_COMMON_HEAP_ALLOC(align, size, timeout) \ shared_multi_heap_aligned_alloc(CONFIG_VIDEO_BUFFER_SMH_ATTRIBUTE, align, size) #define VIDEO_COMMON_FREE(block) shared_multi_heap_free(block) +#elif defined(CONFIG_VIDEO_BUFFER_USE_MEM_ATTR_HEAP) +#include +#include + +#define VIDEO_COMMON_HEAP_ALLOC(align, size, timeout) \ + mem_attr_heap_aligned_alloc((CONFIG_VIDEO_BUFFER_MEM_SW_ATTRIBUTE << \ + DT_MEM_SW_ATTR_SHIFT), align, size) +#define VIDEO_COMMON_FREE(block) mem_attr_heap_free(block) #else K_HEAP_DEFINE(video_buffer_pool, CONFIG_VIDEO_BUFFER_POOL_SZ_MAX*CONFIG_VIDEO_BUFFER_POOL_NUM_MAX); #define VIDEO_COMMON_HEAP_ALLOC(align, size, timeout) \ @@ -47,6 +55,9 @@ struct video_buffer *video_buffer_aligned_alloc(size_t size, size_t align, k_tim struct mem_block *block; int i; +#if defined(CONFIG_VIDEO_BUFFER_USE_MEM_ATTR_HEAP) + mem_attr_heap_pool_init(); +#endif /* find available video buffer */ for (i = 0; i < ARRAY_SIZE(video_buf); i++) { if (video_buf[i].buffer == NULL) { diff --git a/drivers/video/video_stm32_dcmipp.c b/drivers/video/video_stm32_dcmipp.c index 13ae21a601045..80dcbdf42f66f 100644 --- a/drivers/video/video_stm32_dcmipp.c +++ b/drivers/video/video_stm32_dcmipp.c @@ -205,17 +205,25 @@ void HAL_DCMIPP_PIPE_VsyncEventCallback(DCMIPP_HandleTypeDef *hdcmipp, uint32_t return; } - /* - * TODO - we only support 1 buffer formats for the time being, setting of - * MEMORY_ADDRESS_1 and MEMORY_ADDRESS_2 required depending on the pixelformat - * for Pipe1 - */ ret = HAL_DCMIPP_PIPE_SetMemoryAddress(&dcmipp->hdcmipp, Pipe, DCMIPP_MEMORY_ADDRESS_0, (uint32_t)pipe->next->buffer); if (ret != HAL_OK) { LOG_ERR("Failed to update memory address"); return; } + + if (pipe->fmt.pixelformat == VIDEO_PIX_FMT_NV12) { + uint32_t addr = (uint32_t)pipe->next->buffer + + pipe->fmt.width * pipe->fmt.height; + + ret = HAL_DCMIPP_PIPE_SetMemoryAddress(&dcmipp->hdcmipp, Pipe, + DCMIPP_MEMORY_ADDRESS_1, + addr); + if (ret != HAL_OK) { + LOG_ERR("Failed to update second memory address"); + return; + } + } } #if defined(STM32_DCMIPP_HAS_CSI) @@ -432,6 +440,7 @@ static const struct stm32_dcmipp_mapping { PIXEL_PIPE_FMT(RGB565, RGB565_1, 0, (BIT(1) | BIT(2))), PIXEL_PIPE_FMT(YUYV, YUV422_1, 0, (BIT(1) | BIT(2))), PIXEL_PIPE_FMT(YVYU, YUV422_1, 1, (BIT(1) | BIT(2))), + PIXEL_PIPE_FMT(NV12, YUV420_2, 0, (BIT(1) | BIT(2))), PIXEL_PIPE_FMT(GREY, MONO_Y8_G8_1, 0, (BIT(1) | BIT(2))), PIXEL_PIPE_FMT(RGB24, RGB888_YUV444_1, 1, (BIT(1) | BIT(2))), PIXEL_PIPE_FMT(BGR24, RGB888_YUV444_1, 0, (BIT(1) | BIT(2))), @@ -460,7 +469,7 @@ static const struct stm32_dcmipp_mapping { ((fmt) == VIDEO_PIX_FMT_GREY || \ (fmt) == VIDEO_PIX_FMT_YUYV || (fmt) == VIDEO_PIX_FMT_YVYU || \ (fmt) == VIDEO_PIX_FMT_VYUY || (fmt) == VIDEO_PIX_FMT_UYVY || \ - (fmt) == VIDEO_PIX_FMT_XYUV32) ? VIDEO_COLORSPACE_YUV : \ + (fmt) == VIDEO_PIX_FMT_XYUV32 || (fmt) == VIDEO_PIX_FMT_NV12) ? VIDEO_COLORSPACE_YUV : \ \ VIDEO_COLORSPACE_RAW) @@ -481,6 +490,9 @@ static inline void stm32_dcmipp_compute_fmt_pitch(uint32_t pipe_id, struct video { fmt->pitch = fmt->width * video_bits_per_pixel(fmt->pixelformat) / BITS_PER_BYTE; #if defined(STM32_DCMIPP_HAS_PIXEL_PIPES) + if (fmt->pixelformat == VIDEO_PIX_FMT_NV12) + fmt->pitch = fmt->width; + if (pipe_id == DCMIPP_PIPE1 || pipe_id == DCMIPP_PIPE2) { /* On Pipe1 and Pipe2, the pitch must be multiple of 16 bytes */ fmt->pitch = ROUND_UP(fmt->pitch, 16); @@ -1044,9 +1056,21 @@ static int stm32_dcmipp_stream_enable(const struct device *dev) } #if defined(STM32_DCMIPP_HAS_CSI) else if (config->bus_type == VIDEO_BUS_TYPE_CSI2_DPHY) { - ret = HAL_DCMIPP_CSI_PIPE_Start(&dcmipp->hdcmipp, pipe->id, DCMIPP_VIRTUAL_CHANNEL0, - (uint32_t)pipe->next->buffer, - DCMIPP_MODE_CONTINUOUS); + uint32_t addr = (uint32_t)pipe->next->buffer; + + if (pipe->fmt.pixelformat == VIDEO_PIX_FMT_NV12) { + DCMIPP_SemiPlanarDstAddressTypeDef spaddr; + + spaddr.YAddress = addr; + spaddr.UVAddress = addr + pipe->fmt.width * pipe->fmt.height; + ret = HAL_DCMIPP_CSI_PIPE_SemiPlanarStart(&dcmipp->hdcmipp, + pipe->id, DCMIPP_VIRTUAL_CHANNEL0, + &spaddr, DCMIPP_MODE_CONTINUOUS); + } else { + ret = HAL_DCMIPP_CSI_PIPE_Start(&dcmipp->hdcmipp, pipe->id, + DCMIPP_VIRTUAL_CHANNEL0, + addr, DCMIPP_MODE_CONTINUOUS); + } } #endif else { @@ -1193,6 +1217,20 @@ static int stm32_dcmipp_enqueue(const struct device *dev, struct video_buffer *v LOG_ERR("Failed to update memory address"); return -EIO; } + + if (pipe->fmt.pixelformat == VIDEO_PIX_FMT_NV12) { + uint32_t addr = (uint32_t)pipe->next->buffer + + pipe->fmt.width * pipe->fmt.height; + + ret = HAL_DCMIPP_PIPE_SetMemoryAddress(&dcmipp->hdcmipp, pipe->id, + DCMIPP_MEMORY_ADDRESS_1, + addr); + if (ret != HAL_OK) { + LOG_ERR("Failed to update second memory address"); + return -EIO; + } + } + if (pipe->id == DCMIPP_PIPE0) { SET_BIT(dcmipp->hdcmipp.Instance->P0FCTCR, DCMIPP_P0FCTCR_CPTREQ); } diff --git a/drivers/video/video_stm32_venc.c b/drivers/video/video_stm32_venc.c new file mode 100644 index 0000000000000..61cd32594d756 --- /dev/null +++ b/drivers/video/video_stm32_venc.c @@ -0,0 +1,1026 @@ +/* + * Copyright (c) 2025 Hugues Fruchet + * + * SPDX-License-Identifier: Apache-2.0 + */ + +#define DT_DRV_COMPAT st_stm32_venc + +#include +#include + +#include +#include +#include +#include +#include +#include + +#include "ewl.h" +#include "h264encapi.h" +#include "stm32n6xx_ll_venc.h" +#include "reg_offset_v7.h" + +#include +#include +#include + +LOG_MODULE_REGISTER(video_stm32_venc, CONFIG_VIDEO_LOG_LEVEL); + +#define EWL_HEAP_ALIGNED_ALLOC(size) \ + mem_attr_heap_aligned_alloc((ATTR_SW_ALLOC_NON_CACHE << \ + DT_MEM_SW_ATTR_SHIFT), ALIGNMENT_INCR, size) +#define EWL_HEAP_ALIGNED_FREE(block) mem_attr_heap_free(block) + +#define VENC_RATECTRL_FPS 30 +#define VENC_RATECTRL_QP 25 + +#define TRACE_EWL + +#define EWL_USE_POLLING_SYNC + +#define EWL_TIMEOUT 100UL + +#define ALIGNMENT_INCR 8UL + +#define MEM_CHUNKS 32 + +#ifdef TRACE_EWL +u32 mem_counter = 0; +#endif /* TRACE_EWL */ + +typedef struct +{ + u32 clientType; + /* tables required for buffer alignment */ + u32 *chunks[MEM_CHUNKS]; + u32 *alignedChunks[MEM_CHUNKS]; + u32 totalChunks; + + struct k_sem complete; + i32 status; + i32 irq_status; + i32 irq_cnt; +} VENC_EWL_TypeDef; + +#define ALIGNED_SIZE(x) ((((x) + ALIGNMENT_INCR - 1UL) / ALIGNMENT_INCR) * ALIGNMENT_INCR) + +static VENC_EWL_TypeDef ewl_instance; + +typedef void (*irq_config_func_t)(const struct device *dev); + +struct video_stm32_venc_data { + const struct device *dev; + struct video_format fmt; + struct k_fifo fifo_input; + struct k_fifo fifo_output_in; + struct k_fifo fifo_output_out; + uint32_t pixelformat; + uint32_t height; + uint32_t width; + uint32_t pitch; + struct video_buffer *vbuf; + H264EncInst encoder; + uint32_t frame_nb; + bool resync; +}; + +struct video_stm32_venc_config { + struct stm32_pclken pclken; + irq_config_func_t irq_config; +}; + +static int encoder_prepare(struct video_stm32_venc_data *data); +static int encode_frame(struct video_stm32_venc_data *data); +static int encoder_end(struct video_stm32_venc_data *data); +static int encoder_start(struct video_stm32_venc_data *data); + +static inline H264EncPictureType to_h264pixfmt(uint32_t pixelformat) +{ + switch (pixelformat) { + case VIDEO_PIX_FMT_NV12: + return H264ENC_YUV420_SEMIPLANAR; + case VIDEO_PIX_FMT_RGB565: + return H264ENC_RGB565; + default: + return 0; + } +} + +u32 EWLReadAsicID(void) +{ + LOG_DBG("%s>\n", __func__); + return LL_VENC_ReadRegister(0UL); + LOG_DBG("%s<\n", __func__); +} + +EWLHwConfig_t EWLReadAsicConfig(void) +{ + EWLHwConfig_t cfg_info; + u32 cfgval; + + LOG_DBG("%s>\n", __func__); + + /* read first part of the configuration stored in register 63 */ + cfgval = LL_VENC_ReadRegister(63UL); + + cfg_info.maxEncodedWidth = cfgval & ((1U << 12U) - 1U); + cfg_info.h264Enabled = (cfgval >> 27U) & 1U; + cfg_info.vp8Enabled = (cfgval >> 26U) & 1U; + cfg_info.jpegEnabled = (cfgval >> 25U) & 1U; + cfg_info.vsEnabled = (cfgval >> 24U) & 1U; + cfg_info.rgbEnabled = (cfgval >> 28U) & 1U; + cfg_info.searchAreaSmall = (cfgval >> 29U) & 1U; + cfg_info.scalingEnabled = (cfgval >> 30U) & 1U; + + cfg_info.busType = (cfgval >> 20U) & 15U; + cfg_info.synthesisLanguage = (cfgval >> 16U) & 15U; + cfg_info.busWidth = (cfgval >> 12U) & 15U; + + /* read second part of the configuration stored in register 296 */ + cfgval = LL_VENC_ReadRegister(296UL); + + cfg_info.addr64Support = (cfgval >> 31U) & 1U; + cfg_info.dnfSupport = (cfgval >> 30U) & 1U; + cfg_info.rfcSupport = (cfgval >> 28U) & 3U; + cfg_info.enhanceSupport = (cfgval >> 27U) & 1U; + cfg_info.instantSupport = (cfgval >> 26U) & 1U; + cfg_info.svctSupport = (cfgval >> 25U) & 1U; + cfg_info.inAxiIdSupport = (cfgval >> 24U) & 1U; + cfg_info.inLoopbackSupport = (cfgval >> 23U) & 1U; + cfg_info.irqEnhanceSupport = (cfgval >> 22U) & 1U; + + LOG_DBG("EWLReadAsicConfig:\n"); + LOG_DBG(" maxEncodedWidth = %d\n", cfg_info.maxEncodedWidth); + LOG_DBG(" h264Enabled = %d\n", cfg_info.h264Enabled); + LOG_DBG( " jpegEnabled = %d\n", cfg_info.jpegEnabled); + LOG_DBG( " vp8Enabled = %d\n", cfg_info.vp8Enabled); + LOG_DBG( " vsEnabled = %d\n", cfg_info.vsEnabled); + LOG_DBG( " rgbEnabled = %d\n", cfg_info.rgbEnabled); + LOG_DBG( " searchAreaSmall = %d\n", cfg_info.searchAreaSmall); + LOG_DBG( " scalingEnabled = %d\n", cfg_info.scalingEnabled); + LOG_DBG( " address64bits = %d\n", cfg_info.addr64Support); + LOG_DBG( " denoiseEnabled = %d\n", cfg_info.dnfSupport); + LOG_DBG( " rfcEnabled = %d\n", cfg_info.rfcSupport); + LOG_DBG( " instanctEnabled = %d\n", cfg_info.instantSupport); + LOG_DBG( " busType = %d\n", cfg_info.busType); + LOG_DBG( " synthesisLanguage = %d\n", cfg_info.synthesisLanguage); + LOG_DBG( " busWidth = %d\n", cfg_info.busWidth * 32); + + LOG_DBG("%s<\n", __func__); + return cfg_info; +} + +const void *EWLInit(EWLInitParam_t *param) +{ + LOG_DBG("%s>\n", __func__); + + /* Check for NULL pointer */ + __ASSERT_NO_MSG(param != NULL); + /* only H264 (0) and JPEG (1) are supported */ + __ASSERT_NO_MSG(param->clientType <= 1U); + + /* sync */ + k_sem_init(&ewl_instance.complete, 0, 1); + k_sem_reset(&ewl_instance.complete); + + /* set client type */ + ewl_instance.clientType = param->clientType; + + mem_attr_heap_pool_init(); + + ewl_instance.irq_cnt = 0; + + LOG_DBG("%s<\n", __func__); + return (void *)&ewl_instance; +} + +i32 EWLRelease(const void *inst) +{ + LOG_DBG("%s>\n", __func__); + + __ASSERT_NO_MSG(inst != NULL); + + LOG_DBG("%s<\n", __func__); + return EWL_OK; +} + +/* #define _LL_VENC_WriteRegister(swreg,val) {if (val) LOG_DBG("'swreg%d' : 0x%08x,\n",swreg,val);LL_VENC_WriteRegister(swreg,val);} */ +#define _LL_VENC_WriteRegister(swreg,val) LL_VENC_WriteRegister(swreg,val) + +void EWLWriteReg(const void *inst, u32 offset, u32 val) +{ + /*LOG_DBG("%s> %d at offset 0x%02X --> %08X\n", + __func__, offset / 4, offset, val);*/ + + /* offset is defined in the EWL API in bytes. convert to number of registers by shifting */ + _LL_VENC_WriteRegister((offset >> 2), val); +} + +void EWLEnableHW(const void *inst, u32 offset, u32 val) +{ + LOG_DBG("%s>\n", __func__); + /* offset is defined in the EWL API in bytes. convert to number of registers by shifting */ + _LL_VENC_WriteRegister((offset >> 2), val); + LOG_DBG("%s<\n", __func__); +} + +void EWLDisableHW(const void *inst, u32 offset, u32 val) +{ + LOG_DBG("%s>\n", __func__); + /* offset is defined in the EWL API in bytes. convert to number of registers by shifting */ + _LL_VENC_WriteRegister((offset >> 2), val); + LOG_DBG("%s<\n", __func__); +} + +u32 EWLReadReg(const void *inst, u32 offset) +{ + u32 ret; + /* offset is defined in the EWL API in bytes. convert to number of registers by shifting */ + ret = LL_VENC_ReadRegister((offset >> 2)); + + /*LOG_DBG("%s< %d at offset 0x%02X --> %08X\n", + __func__, offset / 4, offset, ret);*/ + return ret; +} + +i32 EWLMallocRefFrm(const void *instance, u32 size, EWLLinearMem_t *info) +{ + LOG_DBG("%s>\n", __func__); + return EWLMallocLinear(instance, size, info); + LOG_DBG("%s<\n", __func__); +} + +void EWLFreeRefFrm(const void *instance, EWLLinearMem_t *info) +{ + LOG_DBG("%s>\n", __func__); + EWLFreeLinear(instance, info); + LOG_DBG("%s<\n", __func__); +} + +i32 EWLMallocLinear(const void *instance, u32 size, EWLLinearMem_t *info) +{ + LOG_DBG("%s> size=%d\n", __func__, size); + + if (!instance) + return EWL_ERROR; + + VENC_EWL_TypeDef *inst = (VENC_EWL_TypeDef *) instance; + /* make size 8-byte aligned */ + u32 size_aligned = ALIGNED_SIZE(size); + info->size = size_aligned; + + /* allocate using malloc and check return */ + inst->chunks[inst->totalChunks] = (u32 *)EWL_HEAP_ALIGNED_ALLOC(size_aligned); + if (!inst->chunks[inst->totalChunks]) { + LOG_DBG("EWLMallocLinear unable to allocate %8d bytes\n", size_aligned); + return EWL_ERROR; + } + + /* align given malloc buffer */ + inst->alignedChunks[inst->totalChunks] = (u32 *) ALIGNED_SIZE((u32)inst->chunks[inst->totalChunks]); + /* put the aligned pointer in the return structure */ + info->virtualAddress = inst->alignedChunks[inst->totalChunks++]; + + if (!info->virtualAddress) { + LOG_DBG("EWLMallocLinear unable to get chunk for %8d bytes\n", size_aligned); + return EWL_ERROR; + } + + /* bus address is the same as virtual address because no MMU */ + info->busAddress = (ptr_t)info->virtualAddress; + + /* check buffer alignment */ + __ASSERT_NO_MSG((info->busAddress & 0b111UL) == 0U); + +#ifdef TRACE_EWL + mem_counter += size; + LOG_DBG("EWLMallocLinear allocated %8d bytes --> %p / 0x%x. Total : %d\n", size_aligned, info->virtualAddress, + info->busAddress, mem_counter); +#endif /* TRACE_EWL */ + LOG_DBG("%s<\n", __func__); + return EWL_OK; +} + +void EWLFreeLinear(const void *instance, EWLLinearMem_t *info) +{ + VENC_EWL_TypeDef *inst = (VENC_EWL_TypeDef *) instance; + LOG_DBG("%s>\n", __func__); + + /* find the pointer corresponding to the aligned buffer */ + for (u32 i = 0; i < inst->totalChunks; i++) { + if (inst->alignedChunks[i] == info->virtualAddress) { + EWL_HEAP_ALIGNED_FREE(inst->chunks[i]); + break; + } + } + info->virtualAddress = NULL; + info->busAddress = 0; + info->size = 0; + + LOG_DBG("%s<\n", __func__); +} + +i32 EWLReserveHw(const void *inst) +{ + LOG_DBG("%s>\n", __func__); + __ASSERT_NO_MSG(inst != NULL); + LOG_DBG("%s<\n", __func__); + return EWL_OK; +} + +void EWLReleaseHw(const void *inst) +{ + LOG_DBG("%s>\n", __func__); + __ASSERT_NO_MSG(inst != NULL); + LOG_DBG("%s<\n", __func__); +} + +void *EWLmalloc(u32 n) +{ + void * p = NULL; + + LOG_DBG("%s> n=%d\n", __func__, n); + + p = EWLcalloc(n, 1); + +#ifdef TRACE_EWL + mem_counter += n; + LOG_DBG("EWLmalloc %8d bytes --> %p, total : %d\n", n, p, mem_counter); +#endif /* TRACE_EWL */ + + LOG_DBG("%s<\n", __func__); + return p; +} + +void EWLfree(void *p) +{ + EWL_HEAP_ALIGNED_FREE(p); +} + +void *EWLcalloc(u32 n, u32 s) +{ + void *p; + + LOG_DBG("%s> n=%d s=%d\n", __func__, n, s); + + p = EWL_HEAP_ALIGNED_ALLOC(n * s); + if (!p) { + LOG_ERR("alloc failed for size=%d\n", n * s); + return NULL; + } + + memset(p, 0, n * s); + + LOG_DBG("%s< %p\n", __func__, p); + return p; +} + +void *EWLmemcpy(void *d, const void *s, u32 n) +{ + return memcpy(d, s, (size_t)n); +} + +void *EWLmemset(void *d, i32 c, u32 n) +{ + return memset(d, c, (size_t)n); +} + +int EWLmemcmp(const void *s1, const void *s2, u32 n) +{ + return memcmp((const uint8_t *) s1, (const uint8_t *) s2, (size_t)n); +} + +i32 EWLWaitHwRdy(const void *inst, u32 *slicesReady) +{ + LOG_DBG("%s>\n", __func__); + + /* Check invalid parameters */ + if (!inst) + return EWL_HW_WAIT_ERROR; + +#ifdef EWL_USE_POLLING_SYNC + u32 ret = EWL_HW_WAIT_TIMEOUT; + volatile u32 irq_stats; + u32 prevSlicesReady = 0; + k_timepoint_t timeout = sys_timepoint_calc(K_MSEC(EWL_TIMEOUT)); + uint32_t start = sys_clock_tick_get_32(); + + /* check how to clear IRQ flags for VENC */ + u32 clrByWrite1 = (EWLReadReg(inst, BASE_HWFuse2) & HWCFGIrqClearSupport); + do { + irq_stats = VENC_REG(1UL); + /* get the number of completed slices from ASIC registers. */ + if (slicesReady) + *slicesReady = (VENC_REG(21UL) >> 16) & 0xFFUL; + + LOG_DBG("EWLWaitHw: IRQ stat = %08x\n", irq_stats); + u32 hw_handshake_status = READ_BIT(VENC_REG(BASE_HEncInstantInput >> 2U), (1U << 29U)); + + /* ignore the irq status of input line buffer in hw handshake mode */ + if ((irq_stats == ASIC_STATUS_LINE_BUFFER_DONE) && (hw_handshake_status != 0UL)) { + VENC_REG(1U) = (1<<9); + continue; + } + + if ((irq_stats & ASIC_STATUS_ALL) != 0UL) { + /* clear IRQ and slice ready status */ + u32 clr_stats; + irq_stats &= (~(ASIC_STATUS_SLICE_READY | ASIC_IRQ_LINE)); + + if (clrByWrite1 != 0UL) + clr_stats = ASIC_STATUS_SLICE_READY | ASIC_IRQ_LINE; + else + clr_stats = irq_stats; + + EWLWriteReg(inst, 0x04, clr_stats); + ret = EWL_OK; + break; + } + + if (slicesReady) + if (*slicesReady > prevSlicesReady) { + ret = EWL_OK; + break; + } + + } while (!sys_timepoint_expired(timeout)); + + LOG_DBG("EWLWaitHw: encoding = %d ms\n", k_ticks_to_ms_ceil32(sys_clock_tick_get_32() - start)); + + if (slicesReady != NULL) + LOG_DBG("EWLWaitHw: slicesReady = %d\n", *slicesReady); + + LOG_DBG("%s<\n", __func__); + return ret; +#else + uint32_t start = sys_clock_tick_get_32(); + + VENC_EWL_TypeDef *enc = (VENC_EWL_TypeDef *) inst; + + enc->status = EWL_OK; + + if (k_sem_take(&enc->complete, K_MSEC(EWL_TIMEOUT))) { + uint32_t irq_status = VENC_REG(1U); + + printf("Interrupt timeout VENC status=0x%x\n", irq_status); + LOG_DBG("%s<\n", __func__); + return EWL_OK; + } + + LOG_DBG("EWLWaitHw: encoding = %ld ms\n", k_ticks_to_ms_ceil32(sys_clock_tick_get_32() - start)); + + /* Get the number of completed slices from ASIC registers. */ + if (slicesReady) { + *slicesReady = (VENC_REG(21UL) >> 16) & 0xFFUL; + LOG_DBG("EWLWaitHw: slicesReady = %d\n", *slicesReady); + } + + LOG_DBG("%s<\n", __func__); + return enc->status; +#endif +} + +static int stm32_venc_enable_clock(const struct device *dev) +{ + const struct device *venc_clock = DEVICE_DT_GET(STM32_CLOCK_CONTROL_NODE); + + LOG_DBG("%s>\n", __func__); + + if (!device_is_ready(venc_clock)) { + LOG_ERR("clock control device not ready"); + return -ENODEV; + } + + LOG_DBG("%s<\n", __func__); + return 0; +} + +static int video_stm32_venc_set_fmt(const struct device *dev, + struct video_format *fmt) +{ + struct video_stm32_venc_data *data = dev->data; + unsigned int bpp = video_bits_per_pixel(fmt->pixelformat) / BITS_PER_BYTE; + + LOG_DBG("%s>\n", __func__); + + if (bpp == 0){ + LOG_ERR("invalid 0 bpp"); + return -EINVAL; + } + + data->pixelformat = fmt->pixelformat; + data->pitch = fmt->pitch; + data->height = fmt->height; + data->width = fmt->width; + + LOG_DBG("%s<\n", __func__); + return 0; +} + +static int video_stm32_venc_get_fmt(const struct device *dev, + struct video_format *fmt) +{ + struct video_stm32_venc_data *data = dev->data; + + LOG_DBG("%s>\n", __func__); + + if (fmt == NULL) { + LOG_ERR("invalid null fmt"); + return -EINVAL; + } + + fmt->pixelformat = data->pixelformat; + fmt->height = data->height; + fmt->width = data->width; + fmt->pitch = data->pitch; + + LOG_DBG("%s<\n", __func__); + return 0; +} + +int encoder_prepare(struct video_stm32_venc_data *data) +{ + H264EncRet ret; + H264EncConfig cfg = {0}; + H264EncPreProcessingCfg preproc_cfg = {0}; + H264EncRateCtrl ratectrl_cfg = {0}; + H264EncCodingCtrl codingctrl_cfg = {0}; + + LOG_DBG("%s>\n", __func__); + + data->frame_nb = 0; + /* Step 1: Initialize an encoder instance */ + /* set config to 1 ref frame */ + cfg.refFrameAmount = 1; + /* 30 fps frame rate */ + cfg.frameRateDenom = 1; + cfg.frameRateNum = 30; + /* Image resolution */ + cfg.width = data->width; + cfg.height = data->height; + /* Stream type */ + cfg.streamType = H264ENC_BYTE_STREAM; + + /* encoding level*/ + /* See API guide for level depending on resolution and framerate */ + cfg.level = H264ENC_LEVEL_4; + cfg.svctLevel = 0; + cfg.viewMode = H264ENC_BASE_VIEW_SINGLE_BUFFER; + + ret = H264EncInit(&cfg, &data->encoder); + if (ret != H264ENC_OK) + { + LOG_ERR("H264EncInit error=%d\n", ret); + return -1; + } + + /* set format conversion for preprocessing */ + ret = H264EncGetPreProcessing(data->encoder, &preproc_cfg); + if (ret != H264ENC_OK) { + LOG_ERR("H264EncGetPreProcessing error=%d\n", ret); + return -1; + } + preproc_cfg.inputType = to_h264pixfmt(data->pixelformat); + ret = H264EncSetPreProcessing(data->encoder, &preproc_cfg); + if (ret != H264ENC_OK) { + LOG_ERR("H264EncSetPreProcessing error=%d\n", ret); + return -1; + } + + /* setup coding ctrl */ + ret = H264EncGetCodingCtrl(data->encoder, &codingctrl_cfg); + if (ret != H264ENC_OK) { + LOG_ERR("H264EncGetCodingCtrl error=%d\n", ret); + return -1; + } +#ifdef VENC_HW_HANDSHAKE + codingctrl_cfg.inputLineBufEn = 1; + codingctrl_cfg.inputLineBufLoopBackEn = 1; + codingctrl_cfg.inputLineBufDepth = 1; + codingctrl_cfg.inputLineBufHwModeEn = 1; +#else + codingctrl_cfg.enableCabac = 0; + codingctrl_cfg.quarterPixelMv = 0; + codingctrl_cfg.inputLineBufEn = 0; + codingctrl_cfg.inputLineBufLoopBackEn = 0; + codingctrl_cfg.inputLineBufDepth = 0; + codingctrl_cfg.inputLineBufHwModeEn = 0; +#endif + ret = H264EncSetCodingCtrl(data->encoder, &codingctrl_cfg); + if (ret != H264ENC_OK) { + LOG_ERR("H264EncSetCodingCtrl error=%d\n", ret); + return -1; + } + + /* set bit rate configuration */ + ret = H264EncGetRateCtrl(data->encoder, &ratectrl_cfg); + if (ret != H264ENC_OK) { + LOG_ERR("H264EncGetRateCtrl error=%d\n", ret); + return -1; + } +#ifdef VENC_RATECTRL_VBR + /* Variable bitrate */ + ratectrl_cfg.pictureRc = 1; + ratectrl_cfg.mbRc = 1; + ratectrl_cfg.pictureSkip = 0; + ratectrl_cfg.hrd = 0; + ratectrl_cfg.qpHdr = VENC_RATECTRL_QP; + ratectrl_cfg.qpMin = 10; + ratectrl_cfg.qpMax = 51; + ratectrl_cfg.gopLen = VENC_RATECTRL_FPS; + ratectrl_cfg.bitPerSecond = ((data->width * data->height * 12) * VENC_RATECTRL_FPS) / 30; + ratectrl_cfg.intraQpDelta = 0; +#else + /* Constant bitrate */ + ratectrl_cfg.pictureRc = 0; + ratectrl_cfg.mbRc = 0; + ratectrl_cfg.pictureSkip = 0; + ratectrl_cfg.hrd = 0; + ratectrl_cfg.qpHdr = VENC_RATECTRL_QP; + ratectrl_cfg.qpMin = VENC_RATECTRL_QP; + ratectrl_cfg.qpMax = VENC_RATECTRL_QP; +#endif + ret = H264EncSetRateCtrl(data->encoder, &ratectrl_cfg); + if (ret != H264ENC_OK) { + LOG_ERR("H264EncSetRateCtrl error=%d\n", ret); + return -1; + } + + LOG_DBG("%s<\n", __func__); + return 0; +} + +static int encoder_start(struct video_stm32_venc_data *data) +{ + H264EncRet ret; + struct video_buffer *output; + H264EncIn encIn = {0}; + H264EncOut encOut = {0}; + + LOG_DBG("%s>\n", __func__); + + output = k_fifo_get(&data->fifo_output_in, K_FOREVER); + + encIn.pOutBuf = (u32 *)output->buffer; + encIn.busOutBuf = (uint32_t) encIn.pOutBuf; + encIn.outBufSize = ALIGNED_SIZE(output->size); + + /* create stream */ + ret = H264EncStrmStart(data->encoder, &encIn, &encOut); + if (ret != H264ENC_OK) { + LOG_ERR("H264EncStrmStart error=%d\n", ret); + return -1; + } + + output->bytesused = encOut.streamSize; + LOG_DBG("SPS/PPS generated, size= %d\n", output->bytesused); + + LOG_DBG("%s> %p\n", __func__, &data->fifo_output_out); + k_fifo_put(&data->fifo_output_in, output); + k_fifo_put(&data->fifo_output_out, output); + + data->resync = true; + + LOG_DBG("%s<\n", __func__); + return 0; +} + +static void encoder_reset(void) +{ + /* VENC HW REset */ + __HAL_RCC_VENC_FORCE_RESET(); + k_sleep(K_MSEC(1)); + __HAL_RCC_VENC_RELEASE_RESET(); + k_sleep(K_MSEC(1)); +} + +static int encode_frame(struct video_stm32_venc_data *data) +{ + int ret = H264ENC_FRAME_READY; + struct video_buffer *input; + struct video_buffer *output; + H264EncIn encIn = {0}; + H264EncOut encOut = {0}; + + LOG_DBG("%s>\n", __func__); + + if (!data->encoder) { + ret = encoder_prepare(data); + if (ret) + return -1; + + ret = encoder_start(data); + if (ret) + return -1; + + LOG_DBG("SPS/PPS generated and pushed\n"); + return 0; + } + + input = k_fifo_get(&data->fifo_input, K_NO_WAIT); + if (!input) + return 0; + LOG_DBG("input=%p\n", input); + + output = k_fifo_get(&data->fifo_output_in, K_NO_WAIT); + if (!output) { + return 0; + } + LOG_DBG("output=%p\n", output); + + if (!(data->frame_nb % 30) || data->resync) { + /* if frame is the first or resync needed: set as intra coded */ + encIn.codingType = H264ENC_INTRA_FRAME; + } else { + /* if there was a frame previously, set as predicted */ + encIn.timeIncrement = 1; + encIn.codingType = H264ENC_PREDICTED_FRAME; + } + + encIn.ipf = H264ENC_REFERENCE_AND_REFRESH; + encIn.ltrf = H264ENC_REFERENCE; + + /* set input buffers to structures */ + encIn.busLuma = (ptr_t)input->buffer; + encIn.busChromaU = (ptr_t)encIn.busLuma + data->width * data->height; + + encIn.pOutBuf = (u32 *)output->buffer; + encIn.busOutBuf = (uint32_t)encIn.pOutBuf; + encIn.outBufSize = ALIGNED_SIZE(output->size); + encOut.streamSize = 0; + + encoder_reset(); + + ret = H264EncStrmEncode(data->encoder, &encIn, &encOut, NULL, NULL, NULL); + output->bytesused = encOut.streamSize; + LOG_DBG("output=%p, encOut.streamSize=%d\n", output, encOut.streamSize); + + k_fifo_put(&data->fifo_output_in, output); + k_fifo_put(&data->fifo_output_out, output); + + switch (ret) { + case H264ENC_FRAME_READY: + /* save stream */ + if (!encOut.streamSize) { + /* Nothing encoded */ + data->resync = true; + return -1; + } + output->bytesused = encOut.streamSize; + break; + case H264ENC_FUSE_ERROR: + LOG_ERR("H264EncStrmEncode error=%d\n", ret); + + LOG_ERR("DCMIPP and VENC desync at frame %d, restart the video\n", data->frame_nb); + encoder_end(data); + + ret = encoder_start(data); + if (ret) + return -1; + + break; + default: + LOG_ERR("H264EncStrmEncode error=%d\n", ret); + LOG_ERR("error encoding frame %d\n", data->frame_nb); + + encoder_end(data); + + ret = encoder_start(data); + if (ret) + return -1; + + data->resync = true; + + return -1; + break; + } + + data->frame_nb++; + + LOG_DBG("%s<\n", __func__); + return 0; +} + + +static int encoder_end(struct video_stm32_venc_data *data) +{ + H264EncIn encIn = {0}; + H264EncOut encOut = {0}; + + LOG_DBG("%s>\n", __func__); + + H264EncStrmEnd(data->encoder, &encIn, &encOut); + + LOG_DBG("%s<\n", __func__); + return 0; +} + +static int video_stm32_venc_set_stream(const struct device *dev, bool enable, + enum video_buf_type type) +{ + struct video_stm32_venc_data *data = dev->data; + + LOG_DBG("%s>\n", __func__); + + if (!enable) { + /* Stop VENC */ + encoder_end(data); + + LOG_DBG("%s<\n", __func__); + return 0; + } + + LOG_DBG("%s<\n", __func__); + return 0; +} + +static int video_stm32_venc_enqueue(const struct device *dev, + struct video_buffer *vbuf) +{ + struct video_stm32_venc_data *data = dev->data; + int ret = 0; + + LOG_DBG("%s>\n", __func__); + + if (vbuf->type == VIDEO_BUF_TYPE_INPUT) { + k_fifo_put(&data->fifo_input, vbuf); + LOG_DBG("%s put %p in fifo_input\n", __func__, vbuf); + ret = encode_frame(data); + } else { + k_fifo_put(&data->fifo_output_in, vbuf); + LOG_DBG("%s put %p in fifo_output_in\n", __func__, vbuf); + } + + LOG_DBG("%s<\n", __func__); + return ret; +} + +static int video_stm32_venc_dequeue(const struct device *dev, + struct video_buffer **vbuf, + k_timeout_t timeout) +{ + struct video_stm32_venc_data *data = dev->data; + + LOG_DBG("%s>\n", __func__); + + *vbuf = k_fifo_get(&data->fifo_output_out, timeout); + if (*vbuf == NULL) { + return -EAGAIN; + } + + LOG_DBG("%s<\n", __func__); + return 0; +} + +#ifdef EWL_USE_POLLING_SYNC +ISR_DIRECT_DECLARE(stm32_venc_isr) +{ + VENC_EWL_TypeDef *enc = &ewl_instance; + u32 hw_handshake_status = READ_BIT(VENC_REG(BASE_HEncInstantInput >> 2U), (1U << 29U)); + uint32_t irq_status = VENC_REG(1U); + + enc->irq_status = irq_status; + enc->irq_cnt++; + + if (!hw_handshake_status && (irq_status & ASIC_STATUS_FUSE)) { + VENC_REG(1U) = ASIC_STATUS_FUSE | ASIC_IRQ_LINE; + /* read back the IRQ status to update its value */ + irq_status = VENC_REG(1U); + } + + if (irq_status != 0U) { + /* status flag is raised, + * clear the ones that the IRQ needs to clear + * and signal to EWLWaitHwRdy */ + VENC_REG(1U) = ASIC_STATUS_SLICE_READY | ASIC_IRQ_LINE; + } + + k_sem_give(&enc->complete); + + return 0; +} +#endif + +static int video_stm32_venc_get_caps(const struct device *dev, + struct video_caps *caps) +{ + int ret = -ENODEV; + + LOG_DBG("%s>\n", __func__); + + /* VENC produces full frames */ + caps->min_line_count = caps->max_line_count = LINE_COUNT_HEIGHT; + + ret = 0; + + LOG_DBG("%s<\n", __func__); + return ret; +} + +static DEVICE_API(video, video_stm32_venc_driver_api) = { + .set_format = video_stm32_venc_set_fmt, + .get_format = video_stm32_venc_get_fmt, + .set_stream = video_stm32_venc_set_stream, + .enqueue = video_stm32_venc_enqueue, + .dequeue = video_stm32_venc_dequeue, + .get_caps = video_stm32_venc_get_caps, +}; + +#ifdef EWL_USE_POLLING_SYNC +static void video_stm32_venc_irq_config_func(const struct device *dev) +{ + IRQ_DIRECT_CONNECT(DT_INST_IRQN(0), DT_INST_IRQ(0, priority), + stm32_venc_isr, 0); + irq_enable(DT_INST_IRQN(0)); +} +#endif + +static struct video_stm32_venc_data video_stm32_venc_data_0 = { +}; + +static const struct video_stm32_venc_config video_stm32_venc_config_0 = { + .pclken = { + .enr = DT_INST_CLOCKS_CELL(0, bits), + .bus = DT_INST_CLOCKS_CELL(0, bus) + }, +#ifdef EWL_USE_POLLING_SYNC + .irq_config = video_stm32_venc_irq_config_func, +#endif +}; + +static void RISAF_Config(void) +{ + LOG_DBG("%s>\n", __func__); + + /* Define and initialize the master configuration structure */ + RIMC_MasterConfig_t RIMC_master = {0}; + + /* Enable the clock for the RIFSC (RIF Security Controller) */ + __HAL_RCC_RIFSC_CLK_ENABLE(); + + RIMC_master.MasterCID = RIF_CID_1; + RIMC_master.SecPriv = RIF_ATTRIBUTE_SEC | RIF_ATTRIBUTE_PRIV; + + /* Configure the master attributes for the Ethernet peripheral (VENC) */ + HAL_RIF_RIMC_ConfigMasterAttributes(RIF_MASTER_INDEX_VENC, &RIMC_master); + + /* Set the secure and privileged attributes for the Ethernet peripheral (VENC) as a slave */ + HAL_RIF_RISC_SetSlaveSecureAttributes(RIF_RISC_PERIPH_INDEX_VENC, + RIF_ATTRIBUTE_SEC | RIF_ATTRIBUTE_PRIV); + + LOG_DBG("%s<\n", __func__); +} + +static int video_stm32_venc_init(const struct device *dev) +{ + const struct video_stm32_venc_config *config = dev->config; + struct video_stm32_venc_data *data = dev->data; + int err; + + LOG_DBG("%s>\n", __func__); + + /* Enable VENC clock */ + err = stm32_venc_enable_clock(dev); + if (err < 0) { + LOG_ERR("Clock enabling failed."); + return -EIO; + } + + data->dev = dev; + k_fifo_init(&data->fifo_input); + k_fifo_init(&data->fifo_output_in); + k_fifo_init(&data->fifo_output_out); + + /* Run IRQ init */ + config->irq_config(dev); + + /* Initialize VENC peripheral */ + + RISAF_Config(); + + /* initialize VENC */ + LL_VENC_DeInit(); + LL_VENC_Init(); + + LOG_DBG("CPU frequency : %d\n", HAL_RCC_GetCpuClockFreq() / 1000000); + LOG_DBG("sysclk frequency : %d\n", HAL_RCC_GetSysClockFreq() / 1000000); + LOG_DBG("pclk5 frequency : %d\n", HAL_RCC_GetPCLK5Freq() / 1000000); + + /* default */ + data->width = 320; + data->height = 240; + data->pixelformat = VIDEO_PIX_FMT_NV12; + data->pitch = data->width; + + LOG_DBG("%s inited", dev->name); + + LOG_DBG("%s<\n", __func__); + return 0; +} + +DEVICE_DT_INST_DEFINE(0, &video_stm32_venc_init, + NULL, &video_stm32_venc_data_0, + &video_stm32_venc_config_0, + POST_KERNEL, CONFIG_VIDEO_INIT_PRIORITY, + &video_stm32_venc_driver_api); diff --git a/dts/arm/st/n6/stm32n6.dtsi b/dts/arm/st/n6/stm32n6.dtsi index 5fdd55295b2d0..0450d57bcbd3b 100644 --- a/dts/arm/st/n6/stm32n6.dtsi +++ b/dts/arm/st/n6/stm32n6.dtsi @@ -852,6 +852,15 @@ resets = <&rctl STM32_RESET(APB5, 1)>; status = "disabled"; }; + + venc: venc@58005000 { + compatible = "st,stm32-venc"; + reg = <0x58005000 0x1000>; + interrupts = <62 0>; + clocks = <&rcc STM32_CLOCK(APB5, 5)>; + resets = <&rctl STM32_RESET(APB5, 5)>; + status = "disabled"; + }; }; }; diff --git a/dts/bindings/video/st,stm32-venc.yaml b/dts/bindings/video/st,stm32-venc.yaml new file mode 100644 index 0000000000000..7656fbea5ea65 --- /dev/null +++ b/dts/bindings/video/st,stm32-venc.yaml @@ -0,0 +1,27 @@ +# +# Copyright (c) 2025 Hugues Fruchet +# +# SPDX-License-Identifier: Apache-2.0 +# + +description: | + STMicroelectronics STM32 video encoder peripheral (VENC). + Example of node configuration at board level: + + &venc { + status = "okay"; + }; + +compatible: "st,stm32-venc" + +include: [base.yaml, reset-device.yaml] + +properties: + interrupts: + required: true + + clocks: + required: true + + resets: + required: false diff --git a/include/zephyr/drivers/video.h b/include/zephyr/drivers/video.h index 0744219a76960..29b82287cbcad 100644 --- a/include/zephyr/drivers/video.h +++ b/include/zephyr/drivers/video.h @@ -1551,6 +1551,8 @@ int64_t video_get_csi_link_freq(const struct device *dev, uint8_t bpp, uint8_t l */ #define VIDEO_PIX_FMT_XYUV32 VIDEO_FOURCC('X', 'Y', 'U', 'V') +#define VIDEO_PIX_FMT_NV12 VIDEO_FOURCC('N', 'V', '1', '2') + /** * @} */ @@ -1597,6 +1599,7 @@ static inline unsigned int video_bits_per_pixel(uint32_t pixfmt) case VIDEO_PIX_FMT_SGRBG12P: case VIDEO_PIX_FMT_SRGGB12P: case VIDEO_PIX_FMT_Y12P: + case VIDEO_PIX_FMT_NV12: return 12; case VIDEO_PIX_FMT_SBGGR14P: case VIDEO_PIX_FMT_SGBRG14P: diff --git a/samples/drivers/video/tcpserversink/Kconfig b/samples/drivers/video/tcpserversink/Kconfig new file mode 100644 index 0000000000000..bfb8d7d0f2c4b --- /dev/null +++ b/samples/drivers/video/tcpserversink/Kconfig @@ -0,0 +1,72 @@ +# Copyright (c) 2024 Espressif Systems (Shanghai) Co., Ltd. +# SPDX-License-Identifier: Apache-2.0 + +mainmenu "TCP camera streaming sample application" + +menu "Video capture configuration" + +config VIDEO_SOURCE_CROP_LEFT + int "Crop area left value" + default 0 + help + Left value of the crop area within the video source. + +config VIDEO_SOURCE_CROP_TOP + int "Crop area top value" + default 0 + help + Top value of the crop area within the video source. + +config VIDEO_SOURCE_CROP_WIDTH + int "Crop area width value" + default 0 + help + Width value of the crop area within the video source. + If set to 0, the crop is not applied. + +config VIDEO_SOURCE_CROP_HEIGHT + int "Crop area height value" + default 0 + help + Height value of the crop area within the video source. + If set to 0, the crop is not applied. + +config VIDEO_FRAME_HEIGHT + int "Height of the video frame" + default 0 + help + Height of the video frame. If set to 0, the default height is used. + +config VIDEO_FRAME_WIDTH + int "Width of the video frame" + default 0 + help + Width of the video frame. If set to 0, the default width is used. + +config VIDEO_PIXEL_FORMAT + string "Pixel format of the video frame" + help + Pixel format of the video frame. If not set, the default pixel format is used. + +config VIDEO_NUM_FRAMES + int "Capture N-buffering" + default 0 + help + Framerate versus memory usage tradeoff. + "2" allows to capture while sending data (optimal framerate). + "1" allows to reduce memory usage but capture framerate is lower. + If not set defaults to "2". + +config VIDEO_CTRL_HFLIP + bool "Mirror the video frame horizontally" + help + If set, mirror the video frame horizontally + +config VIDEO_CTRL_VFLIP + bool "Mirror the video frame vertically" + help + If set, mirror the video frame vertically + +endmenu + +source "Kconfig.zephyr" diff --git a/samples/drivers/video/tcpserversink/README.rst b/samples/drivers/video/tcpserversink/README.rst index b44d9f3105ad7..0e6ea840418e3 100644 --- a/samples/drivers/video/tcpserversink/README.rst +++ b/samples/drivers/video/tcpserversink/README.rst @@ -18,6 +18,9 @@ This samples requires a video capture device and network support. - :zephyr:board:`mimxrt1064_evk` - `MT9M114 camera module`_ +- :zephyr:board:`stm32n6570_dk` + with a `MB1854 camera module`_ + Wiring ****** @@ -26,6 +29,11 @@ J35 camera connector. A USB cable should be connected from a host to the micro USB debug connector (J41) in order to get console output via the freelink interface. Ethernet cable must be connected to RJ45 connector. +On :zephyr:board:`stm32n6570_dk`, the MB1854 IMX335 camera module must be plugged in +the CSI-2 camera connector. A RJ45 ethernet cable must be plugged in the ethernet CN6 +connector. When using ethernet over USB, plug an USB-C cable between CN8 USB1 connector +and your host PC. + Building and Running ******************** @@ -49,6 +57,15 @@ a video software pattern generator is supported by using :ref:`snippet-video-sw- :goals: build :compact: +For :zephyr:board:`stm32n6570_dk`, the sample can be built with the following command: + +.. zephyr-app-commands:: + :zephyr-app: samples/drivers/video/tcpserversink + :board: stm32n6570_dk + :shield: st_b_cams_imx_mb1854 + :goals: build + :compact: + Sample Output ============= @@ -71,6 +88,23 @@ Example with gstreamer: For video software generator, the default resolution should be width=320 and height=160. +When using compression support, use this GStreamer command line: + +.. code-block:: console + + gst-launch-1.0 tcpclientsrc host=192.0.2.1 port=5000 \ + ! queue ! decodebin ! queue ! fpsdisplaysink sync=false + +When using ethernet over USB, set the ethernet interface through USB before +starting the GStreamer command line: + +.. code-block:: console + sudo ifconfig enx00005e005301 192.0.2.2 + +To find the right interface, search for "enx" in dmesg + .. code-block:: console + [1316045.847337] cdc_ether 3-6.3.2:1.0 enx00005e005301: renamed from eth0 + References ********** diff --git a/samples/drivers/video/tcpserversink/boards/stm32n6570_dk.conf b/samples/drivers/video/tcpserversink/boards/stm32n6570_dk.conf new file mode 100644 index 0000000000000..6f879927f4ac2 --- /dev/null +++ b/samples/drivers/video/tcpserversink/boards/stm32n6570_dk.conf @@ -0,0 +1,29 @@ +# Video buffer pool +CONFIG_MULTI_HEAP=y +CONFIG_MEM_ATTR_HEAP=y +CONFIG_VIDEO_BUFFER_USE_MEM_ATTR_HEAP=y +CONFIG_VIDEO_BUFFER_MEM_SW_ATTRIBUTE=2 +CONFIG_VIDEO_BUFFER_POOL_SZ_MAX=10000000 +CONFIG_VIDEO_BUFFER_POOL_NUM_MAX=10 + +# Camera interface +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_PIXEL_FORMAT="pRAA" +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_WIDTH=2592 +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_HEIGHT=1944 +CONFIG_FPU=y + +# Capture +CONFIG_VIDEO_FRAME_WIDTH=1920 +CONFIG_VIDEO_FRAME_HEIGHT=1088 +CONFIG_VIDEO_PIXEL_FORMAT="NV12" +CONFIG_VIDEO_NUM_FRAMES=1 + +# Video encoder +CONFIG_VIDEO_STM32_VENC=y +CONFIG_MULTI_HEAP=y +CONFIG_MEM_ATTR_HEAP=y +CONFIG_MAIN_STACK_SIZE=4096 + +# Network buffers +CONFIG_NET_BUF_RX_COUNT=4 +CONFIG_NET_BUF_TX_COUNT=8 diff --git a/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_fsbl.conf b/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_fsbl.conf new file mode 100644 index 0000000000000..6f879927f4ac2 --- /dev/null +++ b/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_fsbl.conf @@ -0,0 +1,29 @@ +# Video buffer pool +CONFIG_MULTI_HEAP=y +CONFIG_MEM_ATTR_HEAP=y +CONFIG_VIDEO_BUFFER_USE_MEM_ATTR_HEAP=y +CONFIG_VIDEO_BUFFER_MEM_SW_ATTRIBUTE=2 +CONFIG_VIDEO_BUFFER_POOL_SZ_MAX=10000000 +CONFIG_VIDEO_BUFFER_POOL_NUM_MAX=10 + +# Camera interface +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_PIXEL_FORMAT="pRAA" +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_WIDTH=2592 +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_HEIGHT=1944 +CONFIG_FPU=y + +# Capture +CONFIG_VIDEO_FRAME_WIDTH=1920 +CONFIG_VIDEO_FRAME_HEIGHT=1088 +CONFIG_VIDEO_PIXEL_FORMAT="NV12" +CONFIG_VIDEO_NUM_FRAMES=1 + +# Video encoder +CONFIG_VIDEO_STM32_VENC=y +CONFIG_MULTI_HEAP=y +CONFIG_MEM_ATTR_HEAP=y +CONFIG_MAIN_STACK_SIZE=4096 + +# Network buffers +CONFIG_NET_BUF_RX_COUNT=4 +CONFIG_NET_BUF_TX_COUNT=8 diff --git a/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_sb.conf b/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_sb.conf new file mode 100644 index 0000000000000..6f879927f4ac2 --- /dev/null +++ b/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_sb.conf @@ -0,0 +1,29 @@ +# Video buffer pool +CONFIG_MULTI_HEAP=y +CONFIG_MEM_ATTR_HEAP=y +CONFIG_VIDEO_BUFFER_USE_MEM_ATTR_HEAP=y +CONFIG_VIDEO_BUFFER_MEM_SW_ATTRIBUTE=2 +CONFIG_VIDEO_BUFFER_POOL_SZ_MAX=10000000 +CONFIG_VIDEO_BUFFER_POOL_NUM_MAX=10 + +# Camera interface +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_PIXEL_FORMAT="pRAA" +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_WIDTH=2592 +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_HEIGHT=1944 +CONFIG_FPU=y + +# Capture +CONFIG_VIDEO_FRAME_WIDTH=1920 +CONFIG_VIDEO_FRAME_HEIGHT=1088 +CONFIG_VIDEO_PIXEL_FORMAT="NV12" +CONFIG_VIDEO_NUM_FRAMES=1 + +# Video encoder +CONFIG_VIDEO_STM32_VENC=y +CONFIG_MULTI_HEAP=y +CONFIG_MEM_ATTR_HEAP=y +CONFIG_MAIN_STACK_SIZE=4096 + +# Network buffers +CONFIG_NET_BUF_RX_COUNT=4 +CONFIG_NET_BUF_TX_COUNT=8 diff --git a/samples/drivers/video/tcpserversink/overlay-netusb.conf b/samples/drivers/video/tcpserversink/overlay-netusb.conf new file mode 100644 index 0000000000000..ca03d24a61833 --- /dev/null +++ b/samples/drivers/video/tcpserversink/overlay-netusb.conf @@ -0,0 +1,10 @@ +# USB Device settings +CONFIG_USB_DEVICE_STACK=y + +# Select USB Configurations +CONFIG_USB_DEVICE_NETWORK_ECM=y +CONFIG_USB_DEVICE_INITIALIZE_AT_BOOT=n + +# Logging +CONFIG_USB_DRIVER_LOG_LEVEL_INF=y +CONFIG_USB_DEVICE_LOG_LEVEL_INF=y diff --git a/samples/drivers/video/tcpserversink/prj.conf b/samples/drivers/video/tcpserversink/prj.conf index a49aa3de90ff3..543710e6d1417 100644 --- a/samples/drivers/video/tcpserversink/prj.conf +++ b/samples/drivers/video/tcpserversink/prj.conf @@ -35,8 +35,25 @@ CONFIG_NET_MAX_CONTEXTS=10 # Network shell CONFIG_NET_SHELL=y +# Network management +CONFIG_NET_MGMT=y +CONFIG_NET_MGMT_EVENT=y +CONFIG_NET_MGMT_EVENT_STACK_SIZE=800 +CONFIG_NET_MGMT_EVENT_QUEUE_SIZE=5 +CONFIG_NET_CONNECTION_MANAGER=y + # Network application options and configuration +CONFIG_NET_CONFIG_INIT_TIMEOUT=1 +CONFIG_NET_CONFIG_AUTO_INIT=y CONFIG_NET_CONFIG_SETTINGS=y CONFIG_NET_CONFIG_MY_IPV4_ADDR="192.0.2.1" +CONFIG_NET_CONFIG_PEER_IPV4_ADDR="192.0.2.2" CONFIG_VIDEO=y + +# USB Device settings +CONFIG_USB_DEVICE_STACK=y + +# Select USB Configurations +CONFIG_USB_DEVICE_NETWORK_ECM=y +CONFIG_USB_DEVICE_INITIALIZE_AT_BOOT=y diff --git a/samples/drivers/video/tcpserversink/src/main.c b/samples/drivers/video/tcpserversink/src/main.c index 640a728f64eea..3bb481a8018d9 100644 --- a/samples/drivers/video/tcpserversink/src/main.c +++ b/samples/drivers/video/tcpserversink/src/main.c @@ -6,9 +6,12 @@ #include #include +#include #include #include +#include #include +#include LOG_MODULE_REGISTER(main, CONFIG_LOG_DEFAULT_LEVEL); @@ -30,17 +33,141 @@ static ssize_t sendall(int sock, const void *buf, size_t len) return 0; } +void usb_init(void) +{ + if (usb_enable(NULL)) + printk("usb enable error\n"); + + (void)net_config_init_app(NULL, "Initializing network over USB"); +} + +#if DT_HAS_CHOSEN(zephyr_videoenc) +const struct device *encoder_dev = NULL; + +int configure_encoder() +{ + struct video_buffer *buffer; + struct video_format fmt; + struct video_caps caps; + enum video_buf_type type = VIDEO_BUF_TYPE_OUTPUT; + uint32_t size; + if (encoder_dev) + return 0; + + encoder_dev = DEVICE_DT_GET(DT_CHOSEN(zephyr_videoenc)); + if (!device_is_ready(encoder_dev)) { + LOG_ERR("%s: video device not ready.", encoder_dev->name); + return -1; + } + LOG_INF("Video device: %s", encoder_dev->name); + + /* Get capabilities */ + caps.type = type; + if (video_get_caps(encoder_dev, &caps)) { + LOG_ERR("Unable to retrieve video capabilities"); + return -1; + } + + /* Get default/native format */ + fmt.type = type; + if (video_get_format(encoder_dev, &fmt)) { + LOG_ERR("Unable to retrieve video format"); + return -1; + } + +#if CONFIG_VIDEO_FRAME_HEIGHT + fmt.height = CONFIG_VIDEO_FRAME_HEIGHT; +#endif + +#if CONFIG_VIDEO_FRAME_WIDTH + fmt.width = CONFIG_VIDEO_FRAME_WIDTH; +#endif + + if (strcmp(CONFIG_VIDEO_PIXEL_FORMAT, "")) { + fmt.pixelformat = VIDEO_FOURCC_FROM_STR(CONFIG_VIDEO_PIXEL_FORMAT); + } + + LOG_INF("- Video format: %s %ux%u", + VIDEO_FOURCC_TO_STR(fmt.pixelformat), fmt.width, fmt.height); + + if (video_set_format(encoder_dev, &fmt)) { + LOG_ERR("Unable to set format"); + return -1; + } + + printk("Video device detected, format: %s %ux%u\n", + VIDEO_FOURCC_TO_STR(fmt.pixelformat), fmt.width, fmt.height); + + /* Alloc output buffer */ + size = fmt.width * fmt.height / 10;/* Assuming H264 x10 compression ratio */ + buffer = video_buffer_aligned_alloc(size, CONFIG_VIDEO_BUFFER_POOL_ALIGN, + K_FOREVER); + if (buffer == NULL) { + LOG_ERR("Unable to alloc compressed video buffer size=%d", size); + return -1; + } + buffer->type = type; + video_enqueue(encoder_dev, buffer); + + /* Start video capture */ + if (video_stream_start(encoder_dev, type)) { + LOG_ERR("Unable to start video"); + return -1; + } + + return 0; +} + +int encode_frame(struct video_buffer *in, struct video_buffer **out) +{ + struct video_buffer vbuf_in; + int ret; + + vbuf_in = *in;/* Do not override capture video buffer */ + + vbuf_in.type = VIDEO_BUF_TYPE_INPUT; + video_enqueue(encoder_dev, &vbuf_in); + + (*out)->type = VIDEO_BUF_TYPE_OUTPUT; + ret = video_dequeue(encoder_dev, out, K_FOREVER); + if (ret) { + LOG_ERR("Unable to dequeue encoder buf"); + return ret; + } + + video_enqueue(encoder_dev, (*out)); + + return 0; +} +#endif + int main(void) { struct sockaddr_in addr, client_addr; socklen_t client_addr_len = sizeof(client_addr); +#if CONFIG_VIDEO_NUM_FRAMES + struct video_buffer *buffers[CONFIG_VIDEO_NUM_FRAMES]; +#else struct video_buffer *buffers[2]; +#endif struct video_buffer *vbuf = &(struct video_buffer){}; - int i, ret, sock, client; + struct video_buffer *vbuf_out = &(struct video_buffer){}; + int ret, sock, client; struct video_format fmt; struct video_caps caps; + struct video_frmival frmival; + struct video_frmival_enum fie; enum video_buf_type type = VIDEO_BUF_TYPE_OUTPUT; const struct device *video_dev; +#if (CONFIG_VIDEO_SOURCE_CROP_WIDTH && CONFIG_VIDEO_SOURCE_CROP_HEIGHT) || \ + CONFIG_VIDEO_FRAME_HEIGHT || CONFIG_VIDEO_FRAME_WIDTH + struct video_selection sel = { + .type = VIDEO_BUF_TYPE_OUTPUT, + }; +#endif + size_t bsize; + int i = 0; + int err; video_dev = DEVICE_DT_GET(DT_CHOSEN(zephyr_camera)); if (!device_is_ready(video_dev)) { @@ -49,6 +176,8 @@ int main(void) } /* Prepare Network */ + usb_init(); + (void)memset(&addr, 0, sizeof(addr)); addr.sin_family = AF_INET; addr.sin_port = htons(MY_PORT); @@ -80,6 +209,17 @@ int main(void) return 0; } + LOG_INF("- Capabilities:"); + while (caps.format_caps[i].pixelformat) { + const struct video_format_cap *fcap = &caps.format_caps[i]; + /* fourcc to string */ + LOG_INF(" %s width [%u; %u; %u] height [%u; %u; %u]", + VIDEO_FOURCC_TO_STR(fcap->pixelformat), + fcap->width_min, fcap->width_max, fcap->width_step, + fcap->height_min, fcap->height_max, fcap->height_step); + i++; + } + /* Get default/native format */ fmt.type = type; if (video_get_format(video_dev, &fmt)) { @@ -95,9 +235,137 @@ int main(void) return 0; } + /* Set the crop setting if necessary */ +#if CONFIG_VIDEO_SOURCE_CROP_WIDTH && CONFIG_VIDEO_SOURCE_CROP_HEIGHT + sel.target = VIDEO_SEL_TGT_CROP; + sel.rect.left = CONFIG_VIDEO_SOURCE_CROP_LEFT; + sel.rect.top = CONFIG_VIDEO_SOURCE_CROP_TOP; + sel.rect.width = CONFIG_VIDEO_SOURCE_CROP_WIDTH; + sel.rect.height = CONFIG_VIDEO_SOURCE_CROP_HEIGHT; + if (video_set_selection(video_dev, &sel)) { + LOG_ERR("Unable to set selection crop"); + return 0; + } + LOG_INF("Selection crop set to (%u,%u)/%ux%u", + sel.rect.left, sel.rect.top, sel.rect.width, sel.rect.height); +#endif + +#if CONFIG_VIDEO_FRAME_HEIGHT || CONFIG_VIDEO_FRAME_WIDTH +#if CONFIG_VIDEO_FRAME_HEIGHT + fmt.height = CONFIG_VIDEO_FRAME_HEIGHT; +#endif + +#if CONFIG_VIDEO_FRAME_WIDTH + fmt.width = CONFIG_VIDEO_FRAME_WIDTH; +#endif + + /* + * Check (if possible) if targeted size is same as crop + * and if compose is necessary + */ + sel.target = VIDEO_SEL_TGT_CROP; + err = video_get_selection(video_dev, &sel); + if (err < 0 && err != -ENOSYS) { + LOG_ERR("Unable to get selection crop"); + return 0; + } + + if (err == 0 && (sel.rect.width != fmt.width || sel.rect.height != fmt.height)) { + sel.target = VIDEO_SEL_TGT_COMPOSE; + sel.rect.left = 0; + sel.rect.top = 0; + sel.rect.width = fmt.width; + sel.rect.height = fmt.height; + err = video_set_selection(video_dev, &sel); + if (err < 0 && err != -ENOSYS) { + LOG_ERR("Unable to set selection compose"); + return 0; + } + } +#endif + + if (strcmp(CONFIG_VIDEO_PIXEL_FORMAT, "")) { + fmt.pixelformat = VIDEO_FOURCC_FROM_STR(CONFIG_VIDEO_PIXEL_FORMAT); + } + + LOG_INF("- Video format: %s %ux%u", + VIDEO_FOURCC_TO_STR(fmt.pixelformat), fmt.width, fmt.height); + + if (video_set_format(video_dev, &fmt)) { + LOG_ERR("Unable to set format"); + return 0; + } + + if (!video_get_frmival(video_dev, &frmival)) { + LOG_INF("- Default frame rate : %f fps", + 1.0 * frmival.denominator / frmival.numerator); + } + + LOG_INF("- Supported frame intervals for the default format:"); + memset(&fie, 0, sizeof(fie)); + fie.format = &fmt; + while (video_enum_frmival(video_dev, &fie) == 0) { + if (fie.type == VIDEO_FRMIVAL_TYPE_DISCRETE) { + LOG_INF(" %u/%u", fie.discrete.numerator, fie.discrete.denominator); + } else { + LOG_INF(" [min = %u/%u; max = %u/%u; step = %u/%u]", + fie.stepwise.min.numerator, fie.stepwise.min.denominator, + fie.stepwise.max.numerator, fie.stepwise.max.denominator, + fie.stepwise.step.numerator, fie.stepwise.step.denominator); + } + fie.index++; + } + + /* Get supported controls */ + LOG_INF("- Supported controls:"); + const struct device *last_dev = NULL; + struct video_ctrl_query cq = {.dev = video_dev, .id = VIDEO_CTRL_FLAG_NEXT_CTRL}; + + while (!video_query_ctrl(&cq)) { + if (cq.dev != last_dev) { + last_dev = cq.dev; + LOG_INF("\t\tdevice: %s", cq.dev->name); + } + video_print_ctrl(&cq); + cq.id |= VIDEO_CTRL_FLAG_NEXT_CTRL; + } + + /* Set controls */ + struct video_control ctrl = {.id = VIDEO_CID_HFLIP, .val = 1}; + int tp_set_ret = -ENOTSUP; + + if (IS_ENABLED(CONFIG_VIDEO_CTRL_HFLIP)) { + video_set_ctrl(video_dev, &ctrl); + } + + if (IS_ENABLED(CONFIG_VIDEO_CTRL_VFLIP)) { + ctrl.id = VIDEO_CID_VFLIP; + video_set_ctrl(video_dev, &ctrl); + } + + if (IS_ENABLED(CONFIG_TEST)) { + ctrl.id = VIDEO_CID_TEST_PATTERN; + tp_set_ret = video_set_ctrl(video_dev, &ctrl); + } + + /* Size to allocate for each buffer */ + if (caps.min_line_count == LINE_COUNT_HEIGHT) { + if (fmt.pixelformat == VIDEO_PIX_FMT_NV12) + bsize = fmt.width * fmt.height * 3 / 2; + else + bsize = fmt.pitch * fmt.height; + } else { + bsize = fmt.pitch * caps.min_line_count; + } + /* Alloc Buffers */ for (i = 0; i < ARRAY_SIZE(buffers); i++) { - buffers[i] = video_buffer_alloc(fmt.pitch * fmt.height, K_FOREVER); + /* + * For some hardwares, such as the PxP used on i.MX RT1170 to do image rotation, + * buffer alignment is needed in order to achieve the best performance + */ + buffers[i] = video_buffer_aligned_alloc(bsize, CONFIG_VIDEO_BUFFER_POOL_ALIGN, + K_FOREVER); if (buffers[i] == NULL) { LOG_ERR("Unable to alloc video buffer"); return 0; @@ -105,6 +373,13 @@ int main(void) buffers[i]->type = type; } +#if DT_HAS_CHOSEN(zephyr_videoenc) + if (configure_encoder()) { + LOG_ERR("Unable to configure video encoder"); + return 0; + } +#endif + /* Connection loop */ do { printk("TCP: Waiting for client...\n"); @@ -140,10 +415,17 @@ int main(void) return 0; } - printk("\rSending frame %d\n", i++); +#if DT_HAS_CHOSEN(zephyr_videoenc) + encode_frame(vbuf, &vbuf_out); + printk("\rSending compressed frame %d (size=%d bytes)\n", i++, vbuf_out->bytesused); + /* Send compressed video buffer to TCP client */ + ret = sendall(client, vbuf_out->buffer, vbuf_out->bytesused); +#else + printk("\rSending frame %d\n", i++); /* Send video buffer to TCP client */ ret = sendall(client, vbuf->buffer, vbuf->bytesused); +#endif if (ret && ret != -EAGAIN) { /* client disconnected */ printk("\nTCP: Client disconnected %d\n", ret); diff --git a/tests/drivers/build_all/video/testcase.yaml b/tests/drivers/build_all/video/testcase.yaml index c7a5e07161ec8..7a5ac66f3705a 100644 --- a/tests/drivers/build_all/video/testcase.yaml +++ b/tests/drivers/build_all/video/testcase.yaml @@ -36,3 +36,8 @@ tests: - stm32n6570_dk/stm32n657xx/sb extra_args: - platform:stm32n6570_dk/stm32n657xx/sb:SHIELD=st_b_cams_imx_mb1854 + drivers.video.stm32_venc.build: + platform_allow: + - stm32n6570_dk/stm32n657xx/sb + extra_args: + - platform:stm32n6570_dk/stm32n657xx/sb diff --git a/west.yml b/west.yml index 91d959de4fba7..1fd0a88b8a682 100644 --- a/west.yml +++ b/west.yml @@ -370,6 +370,11 @@ manifest: - name: zcbor revision: 9b07780aca6fb21f82a241ba386ad9b379809337 path: modules/lib/zcbor + - name: zephyr-isp + url: git@github.st.com:AIS/mg-zephyr-isp.git + revision: master + path: Lib/zephyr-isp + submodules: true # zephyr-keep-sorted-stop self: