Skip to content

Commit 2d59b75

Browse files
ttmutkartben
authored andcommitted
drivers: serial: uart_max32: Use cache to workaround DMA limitation
When using asynchronous API, transfer will fail if the source buffer is located in a region that cannot be accessed by DMA. This could happen when a buffer is declared const, and placed in flash memory, for example. Workaround this problem by loading the data into a set of temporary caches before passing them to DMA. Signed-off-by: Tahsin Mutlugun <Tahsin.Mutlugun@analog.com>
1 parent 5059632 commit 2d59b75

File tree

2 files changed

+120
-29
lines changed

2 files changed

+120
-29
lines changed

drivers/serial/Kconfig.max32

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,3 +16,15 @@ config UART_MAX32
1616
This option enables the UART driver for MAX32 family of
1717
processors.
1818
Say y if you wish to use serial port on MAX32 MCU.
19+
20+
if UART_MAX32
21+
22+
config UART_TX_CACHE_LEN
23+
int "TX cache buffer size"
24+
range 8 64
25+
default 8
26+
help
27+
Size of UART transmit buffer that is used when source buffer
28+
is not located in a DMA-able region.
29+
30+
endif # UART_MAX32

drivers/serial/uart_max32.c

Lines changed: 108 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66

77
#ifdef CONFIG_UART_ASYNC_API
88
#include <zephyr/drivers/dma.h>
9+
#include <wrap_max32_dma.h>
910
#endif
1011
#include <zephyr/drivers/pinctrl.h>
1112
#include <zephyr/drivers/uart.h>
@@ -43,9 +44,15 @@ struct max32_uart_config {
4344
};
4445

4546
#ifdef CONFIG_UART_ASYNC_API
47+
#define MAX32_UART_TX_CACHE_NUM 2
4648
struct max32_uart_async_tx {
4749
const uint8_t *buf;
50+
const uint8_t *src;
4851
size_t len;
52+
uint8_t cache[MAX32_UART_TX_CACHE_NUM][CONFIG_UART_TX_CACHE_LEN];
53+
uint8_t cache_id;
54+
struct dma_block_config dma_blk;
55+
int32_t timeout;
4956
struct k_work_delayable timeout_work;
5057
};
5158

@@ -86,6 +93,10 @@ struct max32_uart_data {
8693
static void uart_max32_isr(const struct device *dev);
8794
#endif
8895

96+
#ifdef CONFIG_UART_ASYNC_API
97+
static int uart_max32_tx_dma_load(const struct device *dev, uint8_t *buf, size_t len);
98+
#endif
99+
89100
static void api_poll_out(const struct device *dev, unsigned char c)
90101
{
91102
const struct max32_uart_config *cfg = dev->config;
@@ -492,13 +503,22 @@ static void async_user_callback(const struct device *dev, struct uart_event *evt
492503
}
493504
}
494505

506+
static uint32_t load_tx_cache(const uint8_t *src, size_t len, uint8_t *dest)
507+
{
508+
memcpy(dest, src, MIN(len, CONFIG_UART_TX_CACHE_LEN));
509+
510+
return MIN(len, CONFIG_UART_TX_CACHE_LEN);
511+
}
512+
495513
static void uart_max32_async_tx_callback(const struct device *dma_dev, void *user_data,
496514
uint32_t channel, int status)
497515
{
498516
const struct device *dev = user_data;
499517
const struct max32_uart_config *config = dev->config;
500518
struct max32_uart_data *data = dev->data;
519+
struct max32_uart_async_tx *tx = &data->async.tx;
501520
struct dma_status dma_stat;
521+
int ret;
502522

503523
unsigned int key = irq_lock();
504524

@@ -509,17 +529,73 @@ static void uart_max32_async_tx_callback(const struct device *dma_dev, void *use
509529
return;
510530
}
511531

512-
k_work_cancel_delayable(&data->async.tx.timeout_work);
532+
k_work_cancel_delayable(&tx->timeout_work);
513533
Wrap_MXC_UART_DisableTxDMA(config->regs);
514534

515535
irq_unlock(key);
516536

517-
struct uart_event tx_done = {
518-
.type = status == 0 ? UART_TX_DONE : UART_TX_ABORTED,
519-
.data.tx.buf = data->async.tx.buf,
520-
.data.tx.len = data->async.tx.len,
521-
};
522-
async_user_callback(dev, &tx_done);
537+
tx->len -= tx->dma_blk.block_size;
538+
if (tx->len > 0) {
539+
tx->cache_id = !(tx->cache_id);
540+
ret = uart_max32_tx_dma_load(dev, tx->cache[tx->cache_id],
541+
MIN(tx->len, CONFIG_UART_TX_CACHE_LEN));
542+
if (ret < 0) {
543+
LOG_ERR("Error configuring Tx DMA (%d)", ret);
544+
return;
545+
}
546+
547+
ret = dma_start(config->tx_dma.dev, config->tx_dma.channel);
548+
if (ret < 0) {
549+
LOG_ERR("Error starting Tx DMA (%d)", ret);
550+
return;
551+
}
552+
553+
async_timer_start(&tx->timeout_work, tx->timeout);
554+
555+
Wrap_MXC_UART_SetTxDMALevel(config->regs, 2);
556+
Wrap_MXC_UART_EnableTxDMA(config->regs);
557+
558+
/* Load next chunk as well */
559+
if (tx->len > CONFIG_UART_TX_CACHE_LEN) {
560+
tx->src += load_tx_cache(tx->src, tx->len - CONFIG_UART_TX_CACHE_LEN,
561+
tx->cache[!(tx->cache_id)]);
562+
}
563+
} else {
564+
struct uart_event tx_done = {
565+
.type = status == 0 ? UART_TX_DONE : UART_TX_ABORTED,
566+
.data.tx.buf = tx->buf,
567+
.data.tx.len = tx->len,
568+
};
569+
async_user_callback(dev, &tx_done);
570+
}
571+
}
572+
573+
static int uart_max32_tx_dma_load(const struct device *dev, uint8_t *buf, size_t len)
574+
{
575+
int ret;
576+
const struct max32_uart_config *config = dev->config;
577+
struct max32_uart_data *data = dev->data;
578+
struct dma_config dma_cfg = {0};
579+
struct dma_block_config *dma_blk = &data->async.tx.dma_blk;
580+
581+
dma_cfg.channel_direction = MEMORY_TO_PERIPHERAL;
582+
dma_cfg.dma_callback = uart_max32_async_tx_callback;
583+
dma_cfg.user_data = (void *)dev;
584+
dma_cfg.dma_slot = config->tx_dma.slot;
585+
dma_cfg.block_count = 1;
586+
dma_cfg.source_data_size = 1U;
587+
dma_cfg.source_burst_length = 1U;
588+
dma_cfg.dest_data_size = 1U;
589+
dma_cfg.head_block = dma_blk;
590+
dma_blk->block_size = len;
591+
dma_blk->source_address = (uint32_t)buf;
592+
593+
ret = dma_config(config->tx_dma.dev, config->tx_dma.channel, &dma_cfg);
594+
if (ret < 0) {
595+
return ret;
596+
}
597+
598+
return 0;
523599
}
524600

525601
static int api_callback_set(const struct device *dev, uart_callback_t callback, void *user_data)
@@ -537,15 +613,14 @@ static int api_tx(const struct device *dev, const uint8_t *buf, size_t len, int3
537613
struct max32_uart_data *data = dev->data;
538614
const struct max32_uart_config *config = dev->config;
539615
struct dma_status dma_stat;
540-
struct dma_config dma_cfg = {0};
541-
struct dma_block_config dma_blk = {0};
542616
int ret;
617+
bool use_cache = false;
543618
unsigned int key = irq_lock();
544619

545620
if (config->tx_dma.channel == 0xFF) {
546621
LOG_ERR("Tx DMA channel is not configured");
547-
irq_unlock(key);
548-
return -ENOTSUP;
622+
ret = -ENOTSUP;
623+
goto unlock;
549624
}
550625

551626
ret = dma_get_status(config->tx_dma.dev, config->tx_dma.channel, &dma_stat);
@@ -557,38 +632,37 @@ static int api_tx(const struct device *dev, const uint8_t *buf, size_t len, int3
557632

558633
data->async.tx.buf = buf;
559634
data->async.tx.len = len;
635+
data->async.tx.src = data->async.tx.buf;
636+
637+
if (((uint32_t)buf < MXC_SRAM_MEM_BASE) ||
638+
(((uint32_t)buf + len) > (MXC_SRAM_MEM_BASE + MXC_SRAM_MEM_SIZE))) {
639+
use_cache = true;
640+
len = load_tx_cache(data->async.tx.src, MIN(len, CONFIG_UART_TX_CACHE_LEN),
641+
data->async.tx.cache[0]);
642+
data->async.tx.src += len;
643+
data->async.tx.cache_id = 0;
644+
}
560645

561-
dma_cfg.channel_direction = MEMORY_TO_PERIPHERAL;
562-
dma_cfg.dma_callback = uart_max32_async_tx_callback;
563-
dma_cfg.user_data = (void *)dev;
564-
dma_cfg.dma_slot = config->tx_dma.slot;
565-
dma_cfg.block_count = 1;
566-
dma_cfg.source_data_size = 1U;
567-
dma_cfg.source_burst_length = 1U;
568-
dma_cfg.dest_data_size = 1U;
569-
dma_cfg.head_block = &dma_blk;
570-
dma_blk.block_size = len;
571-
dma_blk.source_address = (uint32_t)buf;
572-
573-
ret = dma_config(config->tx_dma.dev, config->tx_dma.channel, &dma_cfg);
646+
ret = uart_max32_tx_dma_load(dev, use_cache ? data->async.tx.cache[0] : ((uint8_t *)buf),
647+
len);
574648
if (ret < 0) {
575649
LOG_ERR("Error configuring Tx DMA (%d)", ret);
576-
irq_unlock(key);
577-
return ret;
650+
goto unlock;
578651
}
579652

580653
ret = dma_start(config->tx_dma.dev, config->tx_dma.channel);
581654
if (ret < 0) {
582655
LOG_ERR("Error starting Tx DMA (%d)", ret);
583-
irq_unlock(key);
584-
return ret;
656+
goto unlock;
585657
}
586658

659+
data->async.tx.timeout = timeout;
587660
async_timer_start(&data->async.tx.timeout_work, timeout);
588661

589662
Wrap_MXC_UART_SetTxDMALevel(config->regs, 2);
590663
Wrap_MXC_UART_EnableTxDMA(config->regs);
591664

665+
unlock:
592666
irq_unlock(key);
593667

594668
return ret;
@@ -709,6 +783,12 @@ static void uart_max32_async_rx_callback(const struct device *dma_dev, void *use
709783
unsigned int key = irq_lock();
710784

711785
dma_get_status(config->rx_dma.dev, config->rx_dma.channel, &dma_stat);
786+
787+
if (dma_stat.pending_length > 0) {
788+
irq_unlock(key);
789+
return;
790+
}
791+
712792
total_rx = async->rx.len - dma_stat.pending_length;
713793

714794
api_irq_rx_disable(dev);
@@ -717,7 +797,6 @@ static void uart_max32_async_rx_callback(const struct device *dma_dev, void *use
717797

718798
if (total_rx > async->rx.offset) {
719799
async->rx.counter = total_rx - async->rx.offset;
720-
721800
struct uart_event rdy_event = {
722801
.type = UART_RX_RDY,
723802
.data.rx.buf = async->rx.buf,

0 commit comments

Comments
 (0)