@@ -3185,27 +3185,40 @@ static int udma_configure_statictr(struct udma_chan *uc, struct udma_desc *d,
3185
3185
3186
3186
d -> static_tr .elcnt = elcnt ;
3187
3187
3188
- /*
3189
- * PDMA must to close the packet when the channel is in packet mode.
3190
- * For TR mode when the channel is not cyclic we also need PDMA to close
3191
- * the packet otherwise the transfer will stall because PDMA holds on
3192
- * the data it has received from the peripheral.
3193
- */
3194
3188
if (uc -> config .pkt_mode || !uc -> cyclic ) {
3189
+ /*
3190
+ * PDMA must close the packet when the channel is in packet mode.
3191
+ * For TR mode when the channel is not cyclic we also need PDMA
3192
+ * to close the packet otherwise the transfer will stall because
3193
+ * PDMA holds on the data it has received from the peripheral.
3194
+ */
3195
3195
unsigned int div = dev_width * elcnt ;
3196
3196
3197
3197
if (uc -> cyclic )
3198
3198
d -> static_tr .bstcnt = d -> residue / d -> sglen / div ;
3199
3199
else
3200
3200
d -> static_tr .bstcnt = d -> residue / div ;
3201
+ } else if (uc -> ud -> match_data -> type == DMA_TYPE_BCDMA &&
3202
+ uc -> config .dir == DMA_DEV_TO_MEM &&
3203
+ uc -> cyclic ) {
3204
+ /*
3205
+ * For cyclic mode with BCDMA we have to set EOP in each TR to
3206
+ * prevent short packet errors seen on channel teardown. So the
3207
+ * PDMA must close the packet after every TR transfer by setting
3208
+ * burst count equal to the number of bytes transferred.
3209
+ */
3210
+ struct cppi5_tr_type1_t * tr_req = d -> hwdesc [0 ].tr_req_base ;
3201
3211
3202
- if (uc -> config .dir == DMA_DEV_TO_MEM &&
3203
- d -> static_tr .bstcnt > uc -> ud -> match_data -> statictr_z_mask )
3204
- return - EINVAL ;
3212
+ d -> static_tr .bstcnt =
3213
+ (tr_req -> icnt0 * tr_req -> icnt1 ) / dev_width ;
3205
3214
} else {
3206
3215
d -> static_tr .bstcnt = 0 ;
3207
3216
}
3208
3217
3218
+ if (uc -> config .dir == DMA_DEV_TO_MEM &&
3219
+ d -> static_tr .bstcnt > uc -> ud -> match_data -> statictr_z_mask )
3220
+ return - EINVAL ;
3221
+
3209
3222
return 0 ;
3210
3223
}
3211
3224
@@ -3450,8 +3463,9 @@ udma_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
3450
3463
/* static TR for remote PDMA */
3451
3464
if (udma_configure_statictr (uc , d , dev_width , burst )) {
3452
3465
dev_err (uc -> ud -> dev ,
3453
- "%s: StaticTR Z is limited to maximum 4095 (%u)\n" ,
3454
- __func__ , d -> static_tr .bstcnt );
3466
+ "%s: StaticTR Z is limited to maximum %u (%u)\n" ,
3467
+ __func__ , uc -> ud -> match_data -> statictr_z_mask ,
3468
+ d -> static_tr .bstcnt );
3455
3469
3456
3470
udma_free_hwdesc (uc , d );
3457
3471
kfree (d );
@@ -3476,6 +3490,7 @@ udma_prep_dma_cyclic_tr(struct udma_chan *uc, dma_addr_t buf_addr,
3476
3490
u16 tr0_cnt0 , tr0_cnt1 , tr1_cnt0 ;
3477
3491
unsigned int i ;
3478
3492
int num_tr ;
3493
+ u32 period_csf = 0 ;
3479
3494
3480
3495
num_tr = udma_get_tr_counters (period_len , __ffs (buf_addr ), & tr0_cnt0 ,
3481
3496
& tr0_cnt1 , & tr1_cnt0 );
@@ -3498,6 +3513,20 @@ udma_prep_dma_cyclic_tr(struct udma_chan *uc, dma_addr_t buf_addr,
3498
3513
period_addr = buf_addr |
3499
3514
((u64 )uc -> config .asel << K3_ADDRESS_ASEL_SHIFT );
3500
3515
3516
+ /*
3517
+ * For BCDMA <-> PDMA transfers, the EOP flag needs to be set on the
3518
+ * last TR of a descriptor, to mark the packet as complete.
3519
+ * This is required for getting the teardown completion message in case
3520
+ * of TX, and to avoid short-packet error in case of RX.
3521
+ *
3522
+ * As we are in cyclic mode, we do not know which period might be the
3523
+ * last one, so set the flag for each period.
3524
+ */
3525
+ if (uc -> config .ep_type == PSIL_EP_PDMA_XY &&
3526
+ uc -> ud -> match_data -> type == DMA_TYPE_BCDMA ) {
3527
+ period_csf = CPPI5_TR_CSF_EOP ;
3528
+ }
3529
+
3501
3530
for (i = 0 ; i < periods ; i ++ ) {
3502
3531
int tr_idx = i * num_tr ;
3503
3532
@@ -3525,8 +3554,10 @@ udma_prep_dma_cyclic_tr(struct udma_chan *uc, dma_addr_t buf_addr,
3525
3554
}
3526
3555
3527
3556
if (!(flags & DMA_PREP_INTERRUPT ))
3528
- cppi5_tr_csf_set (& tr_req [tr_idx ].flags ,
3529
- CPPI5_TR_CSF_SUPR_EVT );
3557
+ period_csf |= CPPI5_TR_CSF_SUPR_EVT ;
3558
+
3559
+ if (period_csf )
3560
+ cppi5_tr_csf_set (& tr_req [tr_idx ].flags , period_csf );
3530
3561
3531
3562
period_addr += period_len ;
3532
3563
}
@@ -3655,8 +3686,9 @@ udma_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
3655
3686
/* static TR for remote PDMA */
3656
3687
if (udma_configure_statictr (uc , d , dev_width , burst )) {
3657
3688
dev_err (uc -> ud -> dev ,
3658
- "%s: StaticTR Z is limited to maximum 4095 (%u)\n" ,
3659
- __func__ , d -> static_tr .bstcnt );
3689
+ "%s: StaticTR Z is limited to maximum %u (%u)\n" ,
3690
+ __func__ , uc -> ud -> match_data -> statictr_z_mask ,
3691
+ d -> static_tr .bstcnt );
3660
3692
3661
3693
udma_free_hwdesc (uc , d );
3662
3694
kfree (d );
0 commit comments