@@ -75,7 +75,6 @@ void fsl_edma_disable_request(struct fsl_edma_chan *fsl_chan)
75
75
iowrite8 (EDMA_CEEI_CEEI (ch ), regs -> ceei );
76
76
}
77
77
}
78
- EXPORT_SYMBOL_GPL (fsl_edma_disable_request );
79
78
80
79
static void mux_configure8 (struct fsl_edma_chan * fsl_chan , void __iomem * addr ,
81
80
u32 off , u32 slot , bool enable )
@@ -126,7 +125,6 @@ void fsl_edma_chan_mux(struct fsl_edma_chan *fsl_chan,
126
125
else
127
126
mux_configure8 (fsl_chan , muxaddr , ch_off , slot , enable );
128
127
}
129
- EXPORT_SYMBOL_GPL (fsl_edma_chan_mux );
130
128
131
129
static unsigned int fsl_edma_get_tcd_attr (enum dma_slave_buswidth addr_width )
132
130
{
@@ -155,7 +153,6 @@ void fsl_edma_free_desc(struct virt_dma_desc *vdesc)
155
153
fsl_desc -> tcd [i ].ptcd );
156
154
kfree (fsl_desc );
157
155
}
158
- EXPORT_SYMBOL_GPL (fsl_edma_free_desc );
159
156
160
157
int fsl_edma_terminate_all (struct dma_chan * chan )
161
158
{
@@ -172,7 +169,6 @@ int fsl_edma_terminate_all(struct dma_chan *chan)
172
169
vchan_dma_desc_free_list (& fsl_chan -> vchan , & head );
173
170
return 0 ;
174
171
}
175
- EXPORT_SYMBOL_GPL (fsl_edma_terminate_all );
176
172
177
173
int fsl_edma_pause (struct dma_chan * chan )
178
174
{
@@ -188,7 +184,6 @@ int fsl_edma_pause(struct dma_chan *chan)
188
184
spin_unlock_irqrestore (& fsl_chan -> vchan .lock , flags );
189
185
return 0 ;
190
186
}
191
- EXPORT_SYMBOL_GPL (fsl_edma_pause );
192
187
193
188
int fsl_edma_resume (struct dma_chan * chan )
194
189
{
@@ -204,7 +199,6 @@ int fsl_edma_resume(struct dma_chan *chan)
204
199
spin_unlock_irqrestore (& fsl_chan -> vchan .lock , flags );
205
200
return 0 ;
206
201
}
207
- EXPORT_SYMBOL_GPL (fsl_edma_resume );
208
202
209
203
static void fsl_edma_unprep_slave_dma (struct fsl_edma_chan * fsl_chan )
210
204
{
@@ -265,7 +259,6 @@ int fsl_edma_slave_config(struct dma_chan *chan,
265
259
266
260
return 0 ;
267
261
}
268
- EXPORT_SYMBOL_GPL (fsl_edma_slave_config );
269
262
270
263
static size_t fsl_edma_desc_residue (struct fsl_edma_chan * fsl_chan ,
271
264
struct virt_dma_desc * vdesc , bool in_progress )
@@ -340,7 +333,6 @@ enum dma_status fsl_edma_tx_status(struct dma_chan *chan,
340
333
341
334
return fsl_chan -> status ;
342
335
}
343
- EXPORT_SYMBOL_GPL (fsl_edma_tx_status );
344
336
345
337
static void fsl_edma_set_tcd_regs (struct fsl_edma_chan * fsl_chan ,
346
338
struct fsl_edma_hw_tcd * tcd )
@@ -520,7 +512,6 @@ struct dma_async_tx_descriptor *fsl_edma_prep_dma_cyclic(
520
512
521
513
return vchan_tx_prep (& fsl_chan -> vchan , & fsl_desc -> vdesc , flags );
522
514
}
523
- EXPORT_SYMBOL_GPL (fsl_edma_prep_dma_cyclic );
524
515
525
516
struct dma_async_tx_descriptor * fsl_edma_prep_slave_sg (
526
517
struct dma_chan * chan , struct scatterlist * sgl ,
@@ -589,7 +580,6 @@ struct dma_async_tx_descriptor *fsl_edma_prep_slave_sg(
589
580
590
581
return vchan_tx_prep (& fsl_chan -> vchan , & fsl_desc -> vdesc , flags );
591
582
}
592
- EXPORT_SYMBOL_GPL (fsl_edma_prep_slave_sg );
593
583
594
584
struct dma_async_tx_descriptor * fsl_edma_prep_memcpy (struct dma_chan * chan ,
595
585
dma_addr_t dma_dst , dma_addr_t dma_src ,
@@ -612,7 +602,6 @@ struct dma_async_tx_descriptor *fsl_edma_prep_memcpy(struct dma_chan *chan,
612
602
613
603
return vchan_tx_prep (& fsl_chan -> vchan , & fsl_desc -> vdesc , flags );
614
604
}
615
- EXPORT_SYMBOL_GPL (fsl_edma_prep_memcpy );
616
605
617
606
void fsl_edma_xfer_desc (struct fsl_edma_chan * fsl_chan )
618
607
{
@@ -629,7 +618,6 @@ void fsl_edma_xfer_desc(struct fsl_edma_chan *fsl_chan)
629
618
fsl_chan -> status = DMA_IN_PROGRESS ;
630
619
fsl_chan -> idle = false;
631
620
}
632
- EXPORT_SYMBOL_GPL (fsl_edma_xfer_desc );
633
621
634
622
void fsl_edma_issue_pending (struct dma_chan * chan )
635
623
{
@@ -649,7 +637,6 @@ void fsl_edma_issue_pending(struct dma_chan *chan)
649
637
650
638
spin_unlock_irqrestore (& fsl_chan -> vchan .lock , flags );
651
639
}
652
- EXPORT_SYMBOL_GPL (fsl_edma_issue_pending );
653
640
654
641
int fsl_edma_alloc_chan_resources (struct dma_chan * chan )
655
642
{
@@ -660,7 +647,6 @@ int fsl_edma_alloc_chan_resources(struct dma_chan *chan)
660
647
32 , 0 );
661
648
return 0 ;
662
649
}
663
- EXPORT_SYMBOL_GPL (fsl_edma_alloc_chan_resources );
664
650
665
651
void fsl_edma_free_chan_resources (struct dma_chan * chan )
666
652
{
@@ -683,7 +669,6 @@ void fsl_edma_free_chan_resources(struct dma_chan *chan)
683
669
fsl_chan -> tcd_pool = NULL ;
684
670
fsl_chan -> is_sw = false;
685
671
}
686
- EXPORT_SYMBOL_GPL (fsl_edma_free_chan_resources );
687
672
688
673
void fsl_edma_cleanup_vchan (struct dma_device * dmadev )
689
674
{
@@ -695,7 +680,6 @@ void fsl_edma_cleanup_vchan(struct dma_device *dmadev)
695
680
tasklet_kill (& chan -> vchan .task );
696
681
}
697
682
}
698
- EXPORT_SYMBOL_GPL (fsl_edma_cleanup_vchan );
699
683
700
684
/*
701
685
* On the 32 channels Vybrid/mpc577x edma version (here called "v1"),
@@ -743,6 +727,5 @@ void fsl_edma_setup_regs(struct fsl_edma_engine *edma)
743
727
744
728
edma -> regs .tcd = edma -> membase + EDMA_TCD ;
745
729
}
746
- EXPORT_SYMBOL_GPL (fsl_edma_setup_regs );
747
730
748
731
MODULE_LICENSE ("GPL v2" );
0 commit comments