Loading...
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 | /*
* Copyright (c) 2022 Intel Corporation.
*
* SPDX-License-Identifier: Apache-2.0
*/
/**
* @file
* @brief Verify zephyr dma memory to memory transfer loops with scatter gather
* @details
* - Test Steps
* -# Set dma configuration for scatter gather enable
* -# Set direction memory-to-memory with two block transfers
* -# Start transfer tx -> rx
* - Expected Results
* -# Data is transferred correctly from src buffers to dest buffers without
* software intervention.
*/
#include <zephyr/kernel.h>
#include <zephyr/drivers/dma.h>
#include <zephyr/ztest.h>
#define XFERS 4
#define XFER_SIZE 8192
#if CONFIG_NOCACHE_MEMORY
static __aligned(32) uint8_t tx_data[XFER_SIZE] __used
__attribute__((__section__(".nocache")));
static __aligned(32) uint8_t rx_data[XFERS][XFER_SIZE] __used
__attribute__((__section__(".nocache.dma")));
#else
/* this src memory shall be in RAM to support usingas a DMA source pointer.*/
static __aligned(32) uint8_t tx_data[XFER_SIZE];
static __aligned(32) uint8_t rx_data[XFERS][XFER_SIZE] = { { 0 } };
#endif
K_SEM_DEFINE(xfer_sem, 0, 1);
static struct dma_config dma_cfg = {0};
static struct dma_block_config dma_block_cfgs[XFERS];
static void dma_sg_callback(const struct device *dma_dev, void *user_data,
uint32_t channel, int status)
{
if (status < 0) {
TC_PRINT("callback status %d\n", status);
} else {
TC_PRINT("giving xfer_sem\n");
k_sem_give(&xfer_sem);
}
}
static int test_sg(void)
{
const struct device *dma;
static int chan_id;
TC_PRINT("DMA memory to memory transfer started\n");
TC_PRINT("Preparing DMA Controller\n");
memset(tx_data, 0, sizeof(tx_data));
for (int i = 0; i < XFER_SIZE; i++) {
tx_data[i] = i;
}
memset(rx_data, 0, sizeof(rx_data));
dma = DEVICE_DT_GET(DT_ALIAS(dma0));
if (!device_is_ready(dma)) {
TC_PRINT("dma controller device is not ready\n");
return TC_FAIL;
}
dma_cfg.channel_direction = MEMORY_TO_MEMORY;
dma_cfg.source_data_size = 4U;
dma_cfg.dest_data_size = 4U;
dma_cfg.source_burst_length = 4U;
dma_cfg.dest_burst_length = 4U;
#ifdef CONFIG_DMAMUX_STM32
dma_cfg.user_data = (struct device *)dma;
#else
dma_cfg.user_data = NULL;
#endif /* CONFIG_DMAMUX_STM32 */
dma_cfg.dma_callback = dma_sg_callback;
dma_cfg.block_count = XFERS;
dma_cfg.head_block = dma_block_cfgs;
dma_cfg.complete_callback_en = false; /* per block completion */
#ifdef CONFIG_DMA_MCUX_TEST_SLOT_START
dma_cfg.dma_slot = CONFIG_DMA_MCUX_TEST_SLOT_START;
#endif
chan_id = dma_request_channel(dma, NULL);
if (chan_id < 0) {
TC_PRINT("Platform does not support dma request channel,"
" using Kconfig DMA_SG_CHANNEL_NR\n");
chan_id = CONFIG_DMA_SG_CHANNEL_NR;
}
memset(dma_block_cfgs, 0, sizeof(dma_block_cfgs));
for (int i = 0; i < XFERS; i++) {
dma_block_cfgs[i].source_gather_en = 1U;
dma_block_cfgs[i].block_size = XFER_SIZE;
#ifdef CONFIG_DMA_64BIT
dma_block_cfgs[i].source_address = (uint64_t)(tx_data);
dma_block_cfgs[i].dest_address = (uint64_t)(rx_data[i]);
TC_PRINT("dma block %d block_size %d, source addr %" PRIx64 ", dest addr %"
PRIx64 "\n", i, XFER_SIZE, dma_block_cfgs[i].source_address,
dma_block_cfgs[i].dest_address);
#else
dma_block_cfgs[i].source_address = (uint32_t)(tx_data);
dma_block_cfgs[i].dest_address = (uint32_t)(rx_data[i]);
TC_PRINT("dma block %d block_size %d, source addr %x, dest addr %x\n",
i, XFER_SIZE, dma_block_cfgs[i].source_address,
dma_block_cfgs[i].dest_address);
#endif
if (i < XFERS - 1) {
dma_block_cfgs[i].next_block = &dma_block_cfgs[i+1];
TC_PRINT("set next block pointer to %p\n", dma_block_cfgs[i].next_block);
}
}
TC_PRINT("Configuring the scatter-gather transfer on channel %d\n", chan_id);
if (dma_config(dma, chan_id, &dma_cfg)) {
TC_PRINT("ERROR: transfer config (%d)\n", chan_id);
return TC_FAIL;
}
TC_PRINT("Starting the transfer on channel %d and waiting completion\n", chan_id);
if (dma_start(dma, chan_id)) {
TC_PRINT("ERROR: transfer start (%d)\n", chan_id);
return TC_FAIL;
}
if (k_sem_take(&xfer_sem, K_MSEC(1000)) != 0) {
TC_PRINT("Timed out waiting for xfers\n");
return TC_FAIL;
}
TC_PRINT("Verify RX buffer should contain the full TX buffer string.\n");
for (int i = 0; i < XFERS; i++) {
TC_PRINT("rx_data[%d]\n", i);
if (memcmp(tx_data, rx_data[i], XFER_SIZE)) {
return TC_FAIL;
}
}
TC_PRINT("Finished: DMA Scatter-Gather\n");
return TC_PASS;
}
/* export test cases */
ZTEST(dma_m2m_sg, test_dma_m2m_sg)
{
zassert_true((test_sg() == TC_PASS));
}
|