[fix][hal_dma] fix dma transfer lli config,optimise if statement

This commit is contained in:
jzlv 2021-09-29 14:46:48 +08:00
parent 9f2e63e06f
commit 839870500d
2 changed files with 140 additions and 192 deletions

View File

@ -273,18 +273,31 @@ int dma_reload(struct device *dev, uint32_t src_addr, uint32_t dst_addr, uint32_
DMA_Channel_Disable(dma_device->ch); DMA_Channel_Disable(dma_device->ch);
if (dma_device->direction == DMA_MEMORY_TO_MEMORY) { if (transfer_size == 0) {
dma_ctrl_cfg.bits.SI = 1; return 0;
dma_ctrl_cfg.bits.DI = 1; }
} else if (dma_device->direction == DMA_MEMORY_TO_PERIPH) {
dma_ctrl_cfg.bits.SI = 1; switch (dma_device->direction) {
dma_ctrl_cfg.bits.DI = 0; case DMA_MEMORY_TO_MEMORY:
} else if (dma_device->direction == DMA_PERIPH_TO_MEMORY) { dma_ctrl_cfg.bits.SI = 1;
dma_ctrl_cfg.bits.SI = 0; dma_ctrl_cfg.bits.DI = 1;
dma_ctrl_cfg.bits.DI = 1; break;
} else if (dma_device->direction == DMA_PERIPH_TO_PERIPH) { case DMA_MEMORY_TO_PERIPH:
dma_ctrl_cfg.bits.SI = 0; dma_ctrl_cfg.bits.SI = 1;
dma_ctrl_cfg.bits.DI = 0; dma_ctrl_cfg.bits.DI = 0;
break;
case DMA_PERIPH_TO_MEMORY:
dma_ctrl_cfg.bits.SI = 0;
dma_ctrl_cfg.bits.DI = 1;
break;
case DMA_PERIPH_TO_PERIPH:
dma_ctrl_cfg.bits.SI = 0;
dma_ctrl_cfg.bits.DI = 0;
break;
default:
return -3;
break;
} }
dma_ctrl_cfg.bits.SBSize = dma_device->src_burst_size; dma_ctrl_cfg.bits.SBSize = dma_device->src_burst_size;
@ -292,23 +305,30 @@ int dma_reload(struct device *dev, uint32_t src_addr, uint32_t dst_addr, uint32_
dma_ctrl_cfg.bits.SWidth = dma_device->src_width; dma_ctrl_cfg.bits.SWidth = dma_device->src_width;
dma_ctrl_cfg.bits.DWidth = dma_device->dst_width; dma_ctrl_cfg.bits.DWidth = dma_device->dst_width;
if (dma_device->src_width == DMA_TRANSFER_WIDTH_8BIT) { switch (dma_device->src_width) {
actual_transfer_offset = 4095; case DMA_TRANSFER_WIDTH_8BIT:
actual_transfer_len = transfer_size; actual_transfer_offset = 4095;
} else if (dma_device->src_width == DMA_TRANSFER_WIDTH_16BIT) { actual_transfer_len = transfer_size;
if (transfer_size % 2) { break;
return -1; case DMA_TRANSFER_WIDTH_16BIT:
} if (transfer_size % 2) {
return -1;
}
actual_transfer_offset = 4095 << 1;
actual_transfer_len = transfer_size >> 1;
break;
case DMA_TRANSFER_WIDTH_32BIT:
if (transfer_size % 4) {
return -1;
}
actual_transfer_offset = (4095 * 2); actual_transfer_offset = 4095 << 2;
actual_transfer_len = transfer_size / 2; actual_transfer_len = transfer_size >> 2;
} else if (dma_device->src_width == DMA_TRANSFER_WIDTH_32BIT) { break;
if (transfer_size % 4) {
return -1;
}
actual_transfer_offset = (4095 * 4); default:
actual_transfer_len = transfer_size / 4; return -3;
break;
} }
malloc_count = actual_transfer_len / 4095; malloc_count = actual_transfer_len / 4095;
@ -326,79 +346,39 @@ int dma_reload(struct device *dev, uint32_t src_addr, uint32_t dst_addr, uint32_
} }
if (dma_device->lli_cfg) { if (dma_device->lli_cfg) {
/*transfer_size will be 4095 or 4095*2 or 4095*4 in different transfer width*/ /*transfer_size will be integer multiple of 4095*n or 4095*2*n or 4095*4*n,(n>0) */
if ((!remain_len) && (malloc_count == 1)) { for (uint32_t i = 0; i < malloc_count; i++) {
dma_device->lli_cfg[0].src_addr = src_addr; dma_device->lli_cfg[i].src_addr = src_addr;
dma_device->lli_cfg[0].dst_addr = dst_addr; dma_device->lli_cfg[i].dst_addr = dst_addr;
dma_device->lli_cfg[0].nextlli = 0; dma_device->lli_cfg[i].nextlli = 0;
dma_ctrl_cfg.bits.TransferSize = remain_len;
dma_ctrl_cfg.bits.I = 1;
memcpy(&dma_device->lli_cfg[0].cfg, &dma_ctrl_cfg, sizeof(dma_control_data_t));
}
/*transfer_size will be 4095*n or 4095*2*n or 4095*4*n,(n>1) in different transfer width*/
else if ((!remain_len) && (malloc_count > 1)) {
for (uint32_t i = 0; i < malloc_count; i++) {
dma_device->lli_cfg[i].src_addr = src_addr;
dma_device->lli_cfg[i].dst_addr = dst_addr;
dma_device->lli_cfg[i].nextlli = 0;
dma_ctrl_cfg.bits.TransferSize = 4095; dma_ctrl_cfg.bits.TransferSize = 4095;
dma_ctrl_cfg.bits.I = 0; dma_ctrl_cfg.bits.I = 0;
if (dma_ctrl_cfg.bits.SI) { if (dma_ctrl_cfg.bits.SI) {
src_addr += actual_transfer_offset; src_addr += actual_transfer_offset;
}
if (dma_ctrl_cfg.bits.DI) {
dst_addr += actual_transfer_offset;
}
if (i == malloc_count - 1) {
dma_ctrl_cfg.bits.I = 1;
if (dma_device->transfer_mode == DMA_LLI_CYCLE_MODE) {
dma_device->lli_cfg[i].nextlli = (uint32_t)&dma_device->lli_cfg[0];
}
}
if (i) {
dma_device->lli_cfg[i - 1].nextlli = (uint32_t)&dma_device->lli_cfg[i];
}
memcpy(&dma_device->lli_cfg[i].cfg, &dma_ctrl_cfg, sizeof(dma_control_data_t));
} }
} else {
for (uint32_t i = 0; i < malloc_count; i++) {
dma_device->lli_cfg[i].src_addr = src_addr;
dma_device->lli_cfg[i].dst_addr = dst_addr;
dma_device->lli_cfg[i].nextlli = 0;
dma_ctrl_cfg.bits.TransferSize = 4095; if (dma_ctrl_cfg.bits.DI) {
dma_ctrl_cfg.bits.I = 0; dst_addr += actual_transfer_offset;
}
if (dma_ctrl_cfg.bits.SI) { if (i == malloc_count - 1) {
src_addr += actual_transfer_offset; if (remain_len) {
}
if (dma_ctrl_cfg.bits.DI) {
dst_addr += actual_transfer_offset;
}
if (i == malloc_count - 1) {
dma_ctrl_cfg.bits.TransferSize = remain_len; dma_ctrl_cfg.bits.TransferSize = remain_len;
dma_ctrl_cfg.bits.I = 1;
if (dma_device->transfer_mode == DMA_LLI_CYCLE_MODE) {
dma_device->lli_cfg[i].nextlli = (uint32_t)&dma_device->lli_cfg[0];
}
} }
dma_ctrl_cfg.bits.I = 1;
if (i) { if (dma_device->transfer_mode == DMA_LLI_CYCLE_MODE) {
dma_device->lli_cfg[i - 1].nextlli = (uint32_t)&dma_device->lli_cfg[i]; dma_device->lli_cfg[i].nextlli = (uint32_t)&dma_device->lli_cfg[0];
} }
memcpy(&dma_device->lli_cfg[i].cfg, &dma_ctrl_cfg, sizeof(dma_control_data_t));
} }
if (i) {
dma_device->lli_cfg[i - 1].nextlli = (uint32_t)&dma_device->lli_cfg[i];
}
memcpy(&dma_device->lli_cfg[i].cfg, &dma_ctrl_cfg, sizeof(dma_control_data_t));
} }
DMA_LLI_Update(dma_device->ch, (uint32_t)dma_device->lli_cfg); DMA_LLI_Update(dma_device->ch, (uint32_t)dma_device->lli_cfg);

View File

@ -270,54 +270,62 @@ int dma_reload(struct device *dev, uint32_t src_addr, uint32_t dst_addr, uint32_
DMA_Channel_Disable(dma_device->ch); DMA_Channel_Disable(dma_device->ch);
if (dma_device->direction == DMA_MEMORY_TO_MEMORY) { if (transfer_size == 0) {
dma_ctrl_cfg.bits.SI = 1; return 0;
dma_ctrl_cfg.bits.DI = 1;
} else if (dma_device->direction == DMA_MEMORY_TO_PERIPH) {
dma_ctrl_cfg.bits.SI = 1;
dma_ctrl_cfg.bits.DI = 0;
} else if (dma_device->direction == DMA_PERIPH_TO_MEMORY) {
dma_ctrl_cfg.bits.SI = 0;
dma_ctrl_cfg.bits.DI = 1;
} else if (dma_device->direction == DMA_PERIPH_TO_PERIPH) {
dma_ctrl_cfg.bits.SI = 0;
dma_ctrl_cfg.bits.DI = 0;
} }
if (dma_device->direction == DMA_MEMORY_TO_MEMORY) { switch (dma_device->direction) {
switch (dma_device->src_width) { case DMA_MEMORY_TO_MEMORY:
case DMA_TRANSFER_WIDTH_8BIT: dma_ctrl_cfg.bits.SI = 1;
dma_device->src_burst_size = DMA_BURST_16BYTE; dma_ctrl_cfg.bits.DI = 1;
case DMA_TRANSFER_WIDTH_16BIT: break;
dma_device->src_burst_size = DMA_BURST_8BYTE; case DMA_MEMORY_TO_PERIPH:
case DMA_TRANSFER_WIDTH_32BIT: dma_ctrl_cfg.bits.SI = 1;
dma_device->src_burst_size = DMA_BURST_4BYTE; dma_ctrl_cfg.bits.DI = 0;
} break;
} else { case DMA_PERIPH_TO_MEMORY:
dma_ctrl_cfg.bits.SBSize = dma_device->src_burst_size; dma_ctrl_cfg.bits.SI = 0;
dma_ctrl_cfg.bits.DBSize = dma_device->dst_burst_size; dma_ctrl_cfg.bits.DI = 1;
break;
case DMA_PERIPH_TO_PERIPH:
dma_ctrl_cfg.bits.SI = 0;
dma_ctrl_cfg.bits.DI = 0;
break;
default:
return -3;
break;
} }
dma_ctrl_cfg.bits.SBSize = dma_device->src_burst_size;
dma_ctrl_cfg.bits.DBSize = dma_device->dst_burst_size;
dma_ctrl_cfg.bits.SWidth = dma_device->src_width; dma_ctrl_cfg.bits.SWidth = dma_device->src_width;
dma_ctrl_cfg.bits.DWidth = dma_device->dst_width; dma_ctrl_cfg.bits.DWidth = dma_device->dst_width;
if (dma_device->src_width == DMA_TRANSFER_WIDTH_8BIT) { switch (dma_device->src_width) {
actual_transfer_offset = 4095; case DMA_TRANSFER_WIDTH_8BIT:
actual_transfer_len = transfer_size; actual_transfer_offset = 4095;
} else if (dma_device->src_width == DMA_TRANSFER_WIDTH_16BIT) { actual_transfer_len = transfer_size;
if (transfer_size % 2) { break;
return -1; case DMA_TRANSFER_WIDTH_16BIT:
} if (transfer_size % 2) {
return -1;
}
actual_transfer_offset = 4095 << 1;
actual_transfer_len = transfer_size >> 1;
break;
case DMA_TRANSFER_WIDTH_32BIT:
if (transfer_size % 4) {
return -1;
}
actual_transfer_offset = (4095 * 2); actual_transfer_offset = 4095 << 2;
actual_transfer_len = transfer_size / 2; actual_transfer_len = transfer_size >> 2;
} else if (dma_device->src_width == DMA_TRANSFER_WIDTH_32BIT) { break;
if (transfer_size % 4) {
return -1;
}
actual_transfer_offset = (4095 * 4); default:
actual_transfer_len = transfer_size / 4; return -3;
break;
} }
malloc_count = actual_transfer_len / 4095; malloc_count = actual_transfer_len / 4095;
@ -335,79 +343,39 @@ int dma_reload(struct device *dev, uint32_t src_addr, uint32_t dst_addr, uint32_
} }
if (dma_device->lli_cfg) { if (dma_device->lli_cfg) {
/*transfer_size will be 4095 or 4095*2 or 4095*4 in different transfer width*/ /*transfer_size will be integer multiple of 4095*n or 4095*2*n or 4095*4*n,(n>0) */
if ((!remain_len) && (malloc_count == 1)) { for (uint32_t i = 0; i < malloc_count; i++) {
dma_device->lli_cfg[0].src_addr = src_addr; dma_device->lli_cfg[i].src_addr = src_addr;
dma_device->lli_cfg[0].dst_addr = dst_addr; dma_device->lli_cfg[i].dst_addr = dst_addr;
dma_device->lli_cfg[0].nextlli = 0; dma_device->lli_cfg[i].nextlli = 0;
dma_ctrl_cfg.bits.TransferSize = remain_len;
dma_ctrl_cfg.bits.I = 1;
memcpy(&dma_device->lli_cfg[0].cfg, &dma_ctrl_cfg, sizeof(dma_control_data_t));
}
/*transfer_size will be 4095*n or 4095*2*n or 4095*4*n,(n>1) in different transfer width*/
else if ((!remain_len) && (malloc_count > 1)) {
for (uint32_t i = 0; i < malloc_count; i++) {
dma_device->lli_cfg[i].src_addr = src_addr;
dma_device->lli_cfg[i].dst_addr = dst_addr;
dma_device->lli_cfg[i].nextlli = 0;
dma_ctrl_cfg.bits.TransferSize = 4095; dma_ctrl_cfg.bits.TransferSize = 4095;
dma_ctrl_cfg.bits.I = 0; dma_ctrl_cfg.bits.I = 0;
if (dma_ctrl_cfg.bits.SI) { if (dma_ctrl_cfg.bits.SI) {
src_addr += actual_transfer_offset; src_addr += actual_transfer_offset;
}
if (dma_ctrl_cfg.bits.DI) {
dst_addr += actual_transfer_offset;
}
if (i == malloc_count - 1) {
dma_ctrl_cfg.bits.I = 1;
if (dma_device->transfer_mode == DMA_LLI_CYCLE_MODE) {
dma_device->lli_cfg[i].nextlli = (uint32_t)&dma_device->lli_cfg[0];
}
}
if (i) {
dma_device->lli_cfg[i - 1].nextlli = (uint32_t)&dma_device->lli_cfg[i];
}
memcpy(&dma_device->lli_cfg[i].cfg, &dma_ctrl_cfg, sizeof(dma_control_data_t));
} }
} else {
for (uint32_t i = 0; i < malloc_count; i++) {
dma_device->lli_cfg[i].src_addr = src_addr;
dma_device->lli_cfg[i].dst_addr = dst_addr;
dma_device->lli_cfg[i].nextlli = 0;
dma_ctrl_cfg.bits.TransferSize = 4095; if (dma_ctrl_cfg.bits.DI) {
dma_ctrl_cfg.bits.I = 0; dst_addr += actual_transfer_offset;
}
if (dma_ctrl_cfg.bits.SI) { if (i == malloc_count - 1) {
src_addr += actual_transfer_offset; if (remain_len) {
}
if (dma_ctrl_cfg.bits.DI) {
dst_addr += actual_transfer_offset;
}
if (i == malloc_count - 1) {
dma_ctrl_cfg.bits.TransferSize = remain_len; dma_ctrl_cfg.bits.TransferSize = remain_len;
dma_ctrl_cfg.bits.I = 1;
if (dma_device->transfer_mode == DMA_LLI_CYCLE_MODE) {
dma_device->lli_cfg[i].nextlli = (uint32_t)&dma_device->lli_cfg[0];
}
} }
dma_ctrl_cfg.bits.I = 1;
if (i) { if (dma_device->transfer_mode == DMA_LLI_CYCLE_MODE) {
dma_device->lli_cfg[i - 1].nextlli = (uint32_t)&dma_device->lli_cfg[i]; dma_device->lli_cfg[i].nextlli = (uint32_t)&dma_device->lli_cfg[0];
} }
memcpy(&dma_device->lli_cfg[i].cfg, &dma_ctrl_cfg, sizeof(dma_control_data_t));
} }
if (i) {
dma_device->lli_cfg[i - 1].nextlli = (uint32_t)&dma_device->lli_cfg[i];
}
memcpy(&dma_device->lli_cfg[i].cfg, &dma_ctrl_cfg, sizeof(dma_control_data_t));
} }
DMA_LLI_Update(dma_device->ch, (uint32_t)dma_device->lli_cfg); DMA_LLI_Update(dma_device->ch, (uint32_t)dma_device->lli_cfg);