|
13 | 13 | #include <zephyr/linker/devicetree_regions.h>
|
14 | 14 | #include <zephyr/logging/log.h>
|
15 | 15 | #include <zephyr/irq.h>
|
| 16 | +#include <dmm.h> |
16 | 17 |
|
17 | 18 | LOG_MODULE_REGISTER(adc_nrfx_saadc, CONFIG_ADC_LOG_LEVEL);
|
18 | 19 |
|
@@ -86,34 +87,22 @@ BUILD_ASSERT((NRF_SAADC_AIN0 == NRF_SAADC_INPUT_AIN0) &&
|
86 | 87 | "Definitions from nrf-adc.h do not match those from nrf_saadc.h");
|
87 | 88 | #endif
|
88 | 89 |
|
89 |
| -#if defined(CONFIG_NRF_PLATFORM_HALTIUM) |
90 |
| -#include <dmm.h> |
91 |
| -/* Haltium devices always use bounce buffers in RAM */ |
92 |
| -static uint16_t adc_samples_buffer[SAADC_CH_NUM] DMM_MEMORY_SECTION(DT_NODELABEL(adc)); |
93 |
| - |
94 |
| -#define ADC_BUFFER_IN_RAM |
95 |
| - |
96 |
| -#endif /* defined(CONFIG_NRF_PLATFORM_HALTIUM) */ |
97 |
| - |
98 | 90 | struct driver_data {
|
99 | 91 | struct adc_context ctx;
|
100 |
| - |
101 | 92 | uint8_t single_ended_channels;
|
102 |
| - |
103 |
| -#if defined(ADC_BUFFER_IN_RAM) |
104 |
| - void *samples_buffer; |
| 93 | + void *mem_reg; |
105 | 94 | void *user_buffer;
|
106 |
| - uint8_t channels_cnt; |
107 |
| -#endif |
| 95 | + void *samples_buffer; |
108 | 96 | const nrfx_saadc_evt_t *event;
|
| 97 | + uint8_t channels_cnt; |
109 | 98 | };
|
110 | 99 |
|
111 | 100 | static struct driver_data m_data = {
|
112 | 101 | ADC_CONTEXT_INIT_TIMER(m_data, ctx),
|
113 | 102 | ADC_CONTEXT_INIT_LOCK(m_data, ctx),
|
114 | 103 | ADC_CONTEXT_INIT_SYNC(m_data, ctx),
|
115 |
| -#if defined(ADC_BUFFER_IN_RAM) |
116 |
| - .samples_buffer = adc_samples_buffer, |
| 104 | +#if defined(CONFIG_HAS_NORDIC_DMM) |
| 105 | + .mem_reg = DMM_DEV_TO_REG(DT_NODELABEL(adc)), |
117 | 106 | #endif
|
118 | 107 | };
|
119 | 108 |
|
@@ -414,21 +403,28 @@ static void adc_context_start_sampling(struct adc_context *ctx)
|
414 | 403 |
|
415 | 404 | if (ret != NRFX_SUCCESS) {
|
416 | 405 | LOG_ERR("Cannot start sampling: %d", ret);
|
| 406 | + adc_context_complete(&m_data.ctx, -EIO); |
417 | 407 | }
|
418 | 408 | }
|
419 | 409 | }
|
420 | 410 |
|
421 | 411 | static void adc_context_update_buffer_pointer(struct adc_context *ctx, bool repeat)
|
422 | 412 | {
|
423 | 413 | if (!repeat) {
|
424 |
| -#if defined(ADC_BUFFER_IN_RAM) |
425 |
| - m_data.user_buffer = (uint16_t *)m_data.user_buffer + |
426 |
| - m_data.event->data.done.size; |
427 |
| -#else |
428 |
| - nrf_saadc_value_t *buffer = |
429 |
| - (uint16_t *)m_data.event->data.done.p_buffer + |
430 |
| - m_data.event->data.done.size; |
431 |
| - nrfx_saadc_buffer_set(buffer, m_data.event->data.done.size); |
| 414 | + m_data.user_buffer = (uint16_t *)m_data.user_buffer + m_data.event->data.done.size; |
| 415 | + |
| 416 | + int error = dmm_buffer_in_prepare(m_data.mem_reg, m_data.user_buffer, |
| 417 | + samples_to_bytes(m_data.channels_cnt), |
| 418 | + (void **)&m_data.samples_buffer); |
| 419 | + if (error != 0) { |
| 420 | + LOG_ERR("DMM buffer allocation failed err=%d", error); |
| 421 | + dmm_buffer_in_release(m_data.mem_reg, m_data.user_buffer, |
| 422 | + samples_to_bytes(m_data.channels_cnt), |
| 423 | + m_data.user_buffer); |
| 424 | + adc_context_complete(&m_data.ctx, -EIO); |
| 425 | + } |
| 426 | +#if !defined(CONFIG_HAS_NORDIC_DMM) |
| 427 | + nrfx_saadc_buffer_set(m_data.samples_buffer, m_data.event->data.done.size); |
432 | 428 | #endif
|
433 | 429 | }
|
434 | 430 | }
|
@@ -604,17 +600,26 @@ static int start_read(const struct device *dev,
|
604 | 600 | return error;
|
605 | 601 | }
|
606 | 602 |
|
607 |
| -#if defined(ADC_BUFFER_IN_RAM) |
608 |
| - m_data.user_buffer = sequence->buffer; |
609 | 603 | m_data.channels_cnt = channels_cnt;
|
| 604 | + m_data.user_buffer = sequence->buffer; |
| 605 | + |
| 606 | + error = dmm_buffer_in_prepare(m_data.mem_reg, |
| 607 | + m_data.user_buffer, |
| 608 | + samples_to_bytes(channels_cnt), |
| 609 | + (void **)&m_data.samples_buffer); |
| 610 | + if (error != 0) { |
| 611 | + LOG_ERR("DMM buffer allocation failed err=%d", error); |
| 612 | + dmm_buffer_in_release(m_data.mem_reg, |
| 613 | + m_data.user_buffer, |
| 614 | + samples_to_bytes(channels_cnt), |
| 615 | + m_data.user_buffer); |
| 616 | + return error; |
| 617 | + } |
610 | 618 |
|
611 |
| - nrfx_saadc_buffer_set(m_data.samples_buffer, channels_cnt); |
612 |
| -#else |
613 | 619 | /* Buffer is filled in chunks, each chunk composed of number of samples equal to number
|
614 | 620 | * of active channels. Buffer pointer is advanced and reloaded after each chunk.
|
615 | 621 | */
|
616 |
| - nrfx_saadc_buffer_set(sequence->buffer, channels_cnt); |
617 |
| -#endif |
| 622 | + nrfx_saadc_buffer_set(m_data.samples_buffer, channels_cnt); |
618 | 623 |
|
619 | 624 | adc_context_start_read(&m_data.ctx, sequence);
|
620 | 625 |
|
@@ -661,10 +666,10 @@ static void event_handler(const nrfx_saadc_evt_t *event)
|
661 | 666 | correct_single_ended(&m_data.ctx.sequence);
|
662 | 667 | }
|
663 | 668 |
|
664 |
| -#if defined(ADC_BUFFER_IN_RAM) |
665 |
| - memcpy(m_data.user_buffer, m_data.samples_buffer, |
666 |
| - samples_to_bytes(m_data.channels_cnt)); |
667 |
| -#endif |
| 669 | + dmm_buffer_in_release(m_data.mem_reg, |
| 670 | + m_data.user_buffer, |
| 671 | + samples_to_bytes(m_data.channels_cnt), |
| 672 | + m_data.samples_buffer); |
668 | 673 |
|
669 | 674 | adc_context_on_sampling_done(&m_data.ctx, DEVICE_DT_INST_GET(0));
|
670 | 675 | } else if (m_data.event->type == NRFX_SAADC_EVT_CALIBRATEDONE) {
|
|
0 commit comments