blob: 82cd7bbc5029f925cbae921d7261d1b7e2b0daf2 [file] [log] [blame]
yuezonghe824eb0c2024-06-27 02:32:26 -07001/*******************************************************************************
2 * Copyright (C) 2013, ZTE Corporation.
3 *
4 * File Name:dma.c
5 * File Mark:
6 * Description:
7 * Others:
8 * Version: 0.1
9 * Author: limeifeng
10 * Date:
11 * modify
12
13
14 ********************************************************************************/
15
16/****************************************************************************
17* Include files
18****************************************************************************/
19#include <linux/kernel.h>
20#include <linux/interrupt.h>
21#include <linux/spinlock.h>
22#include <linux/dma-mapping.h>
23#include <linux/mutex.h>
24#include <linux/platform_device.h>
25#include <linux/err.h>
26#include <linux/ioport.h>
27#include <linux/module.h>
28#include <mach/iomap.h>
29#include <mach/clock.h>
30#include <mach/zx297510_dma.h>
31#include <mach/clock.h>
32#include <linux/clk.h>
33
34#include "dmaengine.h"
35#include <mach/usb_debug.h>
36/****************************************************************************
37* Local Macros
38****************************************************************************/
39#define BIT_SHIFT_L(value,BIT_NO) (unsigned int)(value << (BIT_NO))
40#define GET_HIGH_16BIT(val) (unsigned int)(val >> (16))
41#define GET_LOW_16BIT(val) (unsigned int)(val & (0xffff))
42#define DMA_CHANNEL(dmac,channel) (unsigned int)(dmac << (16)|(channel) )
43
44/*dma control reg bit */
45#define DMA_CTRL_ENABLE(value) BIT_SHIFT_L(value,0)
46#define DMA_CTRL_SOFT_B_REQ(value) BIT_SHIFT_L(value,1)
47#define DMA_CTRL_SRC_FIFO_MOD(value) BIT_SHIFT_L(value,2)
48#define DMA_CTRL_DEST_FIFO_MOD(value) BIT_SHIFT_L(value,3)
49#define DMA_CTRL_IRQ_MOD(value) BIT_SHIFT_L(value,4)
50#define DMA_CTRL_SRC_BURST_SIZE(value) BIT_SHIFT_L(value,6)
51#define DMA_CTRL_SRC_BURST_LENGTH(value) BIT_SHIFT_L(value,9)
52#define DMA_CTRL_DEST_BURST_SIZE(value) BIT_SHIFT_L(value,13)
53#define DMA_CTRL_DEST_BURST_LENGTH(value) BIT_SHIFT_L(value,16)
54#define DMA_CTRL_INTERRUPT_SEL(value) BIT_SHIFT_L(value,20)
55#define DMA_CTRL_FORCE_CLOSE(value) BIT_SHIFT_L(BIT_SHIFT_L(value,30),1);
56
57
58/*for LLI*/
59#define MAX_LLI_PARA_CNT 32 //Ò»¸öÁ´±íÖÐ×î´óµÄÁ´±íÏî¸öÊý
60#define DMA_RAM_END (0x23400000+0x500000)
61
62
63#define MAX(a,b) ((a) > (b) ? (a) : (b))
64#define FALSE 0
65#define TRUE 1
66#undef ZX297510_DMA_TEST
67//#define ZX297510_DMA_TEST
68/****************************************************************************
69* Local Types
70****************************************************************************/
71static DEFINE_MUTEX(dma0_mutex);
72static DEFINE_MUTEX(dma1_mutex);
73
74/*for DMA0 IO remap*/
75static void __iomem *dma0_base = NULL;
76static void __iomem *dma1_base = NULL;
77static void __iomem *dma_reuse_reg_base = NULL;
78/*for lli transfer*/
79static dma_lli_para * dma_lli_para_array[DMAC_NUM][DMA_CHAN_NUM];
80static dma_lli_para * dma_addr[2] = {NULL};
81
82#ifdef ZX297510_DMA_TEST
83static void *src = NULL;
84static unsigned int dma_int_count = 0;
85#endif
86
87struct zx297510_dma_chan
88{
89 dma_peripheral_id peripheralID;
90 short int channel_id;
91 struct zx297510_dmac * dma_device;
92 struct dma_chan chan;
93 struct dma_async_tx_descriptor desc;
94 struct tasklet_struct tasklet;
95 enum dma_status status;
96 spinlock_t lock;
97 bool be_used;
98 dma_chan_reg __iomem *chan_regs;
99 dma_chan_def dma_chan_par; //srcaddr,destaddr,burstsize...
100 dma_cookie_t zx29_dma_cookie;
101};
102struct zx297510_dmac
103{
104 unsigned int dmac_id;
105 struct dma_device dma;
106 dma_regs __iomem *reg;
107 dma_chan_config * chan_config;
108 struct mutex dma_mutex;
109 struct zx297510_dma_chan dma_chan[DMA_CHAN_NUM];
110};
111
112struct zx297510_dmac dma_dev[DMAC_NUM];
113struct timer_list dma0_timer;
114unsigned int dma_timer_num = 0;
115unsigned int dma_err_num = 0;
116
117static dma_chan_config dma0_chan_config[DMA_CHAN_NUM] =
118 {
119 DMAC0_CFG_CH0,DMAC0_CFG_CH1,DMAC0_CFG_CH2,DMAC0_CFG_CH3,DMAC0_CFG_CH4,
120 DMAC0_CFG_CH5,DMAC0_CFG_CH6,DMAC0_CFG_CH7,DMAC0_CFG_CH8,DMAC0_CFG_CH9,
121
122 DMAC0_CFG_CH10,DMAC0_CFG_CH11,DMAC0_CFG_CH12,DMAC0_CFG_CH13,DMAC0_CFG_CH14,
123 DMAC0_CFG_CH15
124 };
125static dma_chan_config dma1_chan_config[DMA_CHAN_NUM] =
126 {
127 DMAC1_CFG_CH0,DMAC1_CFG_CH1,DMAC1_CFG_CH2,DMAC1_CFG_CH3,DMAC1_CFG_CH4,
128 DMAC1_CFG_CH5,DMAC1_CFG_CH6,DMAC1_CFG_CH7,DMAC1_CFG_CH8,DMAC1_CFG_CH9,
129
130 DMAC1_CFG_CH10,DMAC1_CFG_CH11,DMAC1_CFG_CH12,DMAC1_CFG_CH13,DMAC1_CFG_CH14,
131 DMAC1_CFG_CH15
132 };
133
134
135
136
137static signed int dma_find_chan(dma_peripheral_id peripheralID,dmac_id dmaID);
138static signed int dma_reset_chan(struct zx297510_dma_chan *chan);
139static signed int dma_set_chan_addr(unsigned int dmaID, unsigned int channel,dma_chan_def * ptChanPar);
140static signed int dma_set_chan_ctrl(unsigned int dmaID, unsigned int channel,dma_chan_def * ptChanPar);
141static signed int dma_set_chan_para(unsigned int dmaID, unsigned int channel, dma_chan_def * ptChanPar);
142
143static void zx29_dma_tasklet(unsigned long data)
144{
145 struct zx297510_dma_chan *chan = (struct zx297510_dma_chan *) data;
146
147 if (chan->desc.callback)
148 chan->desc.callback(chan->desc.callback_param);
149}
150
151
152static struct zx297510_dma_chan *to_zx29_dma_chan(struct dma_chan *chan)
153{
154 return container_of(chan, struct zx297510_dma_chan, chan);
155}
156static signed int dma_disable_chan (struct zx297510_dma_chan *chan)
157{
158 unsigned int dmac_id = chan->dma_device->dmac_id;
159 unsigned int channel_id=chan->channel_id;
160 dma_chan_reg __iomem* chan_reg_ptr=NULL;
161
162 if (channel_id >= DMA_CHAN_NUM||dmac_id >= DMAC_NUM)
163 {
164 return -EINVAL;
165 }
166 if (dma_dev[dmac_id].chan_config[channel_id].ownner != CORE_ID_A9)
167 {
168 return -EINVAL;
169 }
170
171 chan_reg_ptr= &(dma_dev[dmac_id].reg->channel[channel_id]);
172 chan_reg_ptr->control |= DMA_CTRL_FORCE_CLOSE(1);
173 return 0;
174}
175
176/*reset channel para*/
177static signed int dma_reset_chan(struct zx297510_dma_chan *chan)
178{
179 unsigned int dmac_id=chan->dma_device->dmac_id;
180 unsigned int channel_id=chan->channel_id;
181 dma_regs __iomem* pReg=NULL;
182 dma_chan_reg __iomem* chan_reg_ptr=NULL;
183
184 if (channel_id >= DMA_CHAN_NUM||dmac_id >= DMAC_NUM)
185 {
186 return -EINVAL;
187 }
188 if (dma_dev[dmac_id].chan_config[channel_id].ownner!=CORE_ID_A9)
189 {
190 return -EINVAL;
191 }
192
193 pReg= dma_dev[dmac_id].reg;
194 chan_reg_ptr= &(pReg->channel[channel_id]);
195
196 /*force close current channel*/
197 chan_reg_ptr->control |= DMA_CTRL_FORCE_CLOSE(1);
198
199 memset((void*) chan_reg_ptr,0,sizeof(dma_chan_reg));
200 pReg->raw_int_tc_status |= BIT_SHIFT_L(0x1,channel_id);
201 pReg->raw_int_src_err_status |= BIT_SHIFT_L(0x1,channel_id);
202 pReg->raw_int_dest_err_status |= BIT_SHIFT_L(0x1,channel_id);
203 pReg->raw_int_cfg_err_status |= BIT_SHIFT_L(0x1,channel_id);
204 //dma_dev[dmac_id].chan_config[channel_id].channelCbk = NULL;
205 //dma_dev[dmac_id].chan_config[channel_id].data = NULL;
206 dma_dev[dmac_id].chan_config[channel_id].isUsed = FALSE;
207 return 0;
208}
209
210/*find the fixed free channel for peripheralID*/
211static signed int dma_find_chan(dma_peripheral_id peripheralID,dmac_id dmaID)
212{
213 unsigned int i = 0;
214 unsigned int reg_value = 0;
215 unsigned int chNum = (unsigned int)peripheralID;
216 dma_chan_config *dma_chan_config_ptr = dma_dev[dmaID].chan_config;
217
218 /*if require dma1's channel on dma0,return error*/
219 if(dmaID == DMAC0&&peripheralID>=DMAC1_CH_TD_DMA0)
220 return -EAGAIN;
221 /*if require dma0's channel on dma1,return error*/
222 else if((dmaID == DMAC1) && peripheralID<DMAC1_CH_TD_DMA0)
223 return -EAGAIN;
224
225 /*in case there is free channel,allocate it to M2M*/
226 if (DMAC0_CH_MEMORY==peripheralID||DMAC1_CH_MEMORY==peripheralID)
227 {
228 for(i=0;i<DMA_CHAN_NUM;i++)
229 {
230 if ( (dma_chan_config_ptr[i].isUsed==FALSE))
231 {
232 dma_chan_config_ptr[i].isUsed =TRUE;
233 return DMA_CHANNEL(dmaID,i);
234 }
235 }
236 return -EAGAIN;
237 }
238
239 /*if channel is reused ,get the channle number*/
240 if(peripheralID ==DMAC0_CH_I2S0_TX)
241 chNum = peripheralID-DMAC0_REUSE;
242 if((peripheralID >=DMAC1_CH_RX_PDSCH_CIR_RAM)&&(peripheralID <=DMAC1_CH_RX_MBSFN_CIR_MAX))
243 chNum = peripheralID-DMAC1_REUSE;
244
245 if(dmaID == DMAC1)
246 chNum -=DMAC1_CH_START;
247
248 /*if channle has been used,return error*/
249 if(dma_chan_config_ptr[chNum].isUsed==TRUE)
250 return -EAGAIN;
251
252 /*config dma0 reuse regs*/
253 if(peripheralID ==DMAC0_CH_USIM1)
254 {
255 reg_value = ioread32(dma_reuse_reg_base);
256 reg_value |= BIT_SHIFT_L(0x1,chNum);
257 iowrite32(reg_value,dma_reuse_reg_base);
258 }
259 else if(peripheralID==DMAC0_CH_I2S0_TX)
260 {
261 reg_value = ioread32(dma_reuse_reg_base);
262 reg_value &= (~(BIT_SHIFT_L(0x1,chNum)));
263 iowrite32(reg_value,dma_reuse_reg_base);
264 }
265 /*config dma0 reuse regs*/
266 if((peripheralID >=DMAC1_CH_TD_DMA0)&&(peripheralID <=DMAC1_CH_TD_DMA3))
267 {
268 reg_value = ioread32(dma_reuse_reg_base+4);
269 reg_value |= BIT_SHIFT_L(0x1,chNum);
270 iowrite32(reg_value,dma_reuse_reg_base+4);
271 }
272 else if((peripheralID >=DMAC1_CH_RX_PDSCH_CIR_RAM)&&(peripheralID <=DMAC1_CH_RX_MBSFN_CIR_MAX))
273 {
274 reg_value = ioread32(dma_reuse_reg_base+4);
275 reg_value &= (~(BIT_SHIFT_L(0x1,chNum)));
276 iowrite32(reg_value,dma_reuse_reg_base+4);
277 }
278 /*get the channel number*/
279 dma_chan_config_ptr[chNum].isUsed =TRUE;
280 return DMA_CHANNEL(dmaID,chNum);
281 /*channel not found, return error*/
282 return -EAGAIN;
283}
284
285static enum dma_status zx29_dma_tx_status(struct dma_chan *chan,
286 dma_cookie_t cookie, struct dma_tx_state *txstate)
287{
288 struct zx297510_dma_chan *zx29_chan = to_zx29_dma_chan(chan);
289 dma_cookie_t last_used;
290
291 last_used = chan->cookie;
292 dma_set_tx_state(txstate, chan->completed_cookie, last_used, 0);
293
294 return zx29_chan->status;
295}
296
297
298static signed int dma_set_chan_addr(unsigned int dmaID, unsigned int channel,dma_chan_def * ptChanPar)
299{
300 volatile dma_chan_reg __iomem* pChReg = NULL;
301 pChReg= &(dma_dev[dmaID].reg->channel[channel]);
302 pChReg->src_addr = ptChanPar->SrcAddr;
303 pChReg->dest_addr = ptChanPar->DestAddr;
304 pChReg->lli = ptChanPar->LLI;
305 return 0;
306}
307
308static signed int dma_set_chan_ctrl(unsigned int dmaID, unsigned int channel,dma_chan_def * ptChanPar)
309{
310 volatile dma_chan_reg __iomem * pChReg=NULL;
311 pChReg= &(dma_dev[dmaID].reg->channel[channel]);
312 pChReg->control=DMA_CTRL_SOFT_B_REQ(ptChanPar->CONTROL.BurstReqMod)\
313 | DMA_CTRL_SRC_FIFO_MOD(ptChanPar->CONTROL.SrcMod ) \
314 | DMA_CTRL_DEST_FIFO_MOD(ptChanPar->CONTROL.DestMod) \
315 | DMA_CTRL_IRQ_MOD(ptChanPar->CONTROL.IrqMod) \
316 | DMA_CTRL_SRC_BURST_SIZE(ptChanPar->CONTROL.SrcBurstSize) \
317 | DMA_CTRL_SRC_BURST_LENGTH((ptChanPar->CONTROL.SrcBurstLen )) \
318 | DMA_CTRL_DEST_BURST_SIZE(ptChanPar->CONTROL.DestBurstSize) \
319 | DMA_CTRL_DEST_BURST_LENGTH((ptChanPar->CONTROL.DestBurstLen ))\
320 | DMA_CTRL_INTERRUPT_SEL(ptChanPar->CONTROL.IntSel) ;
321 return 0;
322}
323
324static signed int dma_set_chan_para(unsigned int dmaID, unsigned int channel, dma_chan_def * ptChanPar)
325{
326 volatile dma_chan_reg __iomem* pChReg = NULL;
327 pChReg= &(dma_dev[dmaID].reg->channel[channel]);
328 pChReg->count=ptChanPar->Count;
329 return 0;
330}
331
332bool zx297510_dma_filter_fn(struct dma_chan *chan, void *param)
333{
334 struct zx297510_dma_chan * channel = to_zx29_dma_chan(chan);
335 unsigned int peripheral_id = (unsigned int) param;
336 unsigned int chan_id = peripheral_id;
337
338 if(peripheral_id >DMA_CH_ALL||
339 (channel->dma_device->dmac_id == DMAC0&&peripheral_id>DMAC0_CH_MEMORY) ||
340 (channel->dma_device->dmac_id == DMAC1&&peripheral_id<=DMAC0_CH_MEMORY))
341 return false;
342 if(peripheral_id == DMAC0_CH_MEMORY||peripheral_id == DMAC1_CH_MEMORY)
343 {
344 if(zx29_dma_request(peripheral_id) == -EAGAIN)
345 return false;
346 else
347 return true;
348 }
349 if(channel->dma_device->dmac_id == DMAC1)
350 chan_id -= DMAC1_CH_START;
351 if (channel->channel_id != chan_id)
352 return false;
353 if(zx29_dma_request(peripheral_id) == -EAGAIN)
354 return false;
355
356 return true;
357}
358EXPORT_SYMBOL(zx297510_dma_filter_fn);
359/*allocate a channel for peripheralID,
360and return the channel number.if failed return -EAGAIN
361*/
362signed int zx29_dma_request(dma_peripheral_id peripheralID)
363{
364 signed int errCode = -EAGAIN;
365 mutex_lock(&dma_dev[0].dma_mutex);
366 errCode=dma_find_chan(peripheralID,DMAC0);
367 mutex_unlock(&dma_dev[0].dma_mutex);
368 if(errCode == -EAGAIN)
369 {
370 mutex_lock(&dma_dev[1].dma_mutex);
371 errCode=dma_find_chan(peripheralID,DMAC1);
372 mutex_unlock(&dma_dev[1].dma_mutex);
373 }
374 return errCode;
375}
376EXPORT_SYMBOL(zx29_dma_request);
377signed int zx29_dma_config(unsigned int ucChannel,dma_chan_def *ptChanPar)
378{
379 signed int errCode = -EAGAIN;
380 unsigned int dmaID =GET_HIGH_16BIT(ucChannel);
381 unsigned int channel = GET_LOW_16BIT(ucChannel);
382 if (channel >= DMA_CHAN_NUM || ptChanPar == NULL||dmaID >= DMAC_NUM)
383 {
384 return -EINVAL;
385 }
386 if(dma_dev[dmaID].chan_config[channel].isUsed == FALSE)
387 {
388 return -EINVAL;
389 }
390 if (ptChanPar->CONTROL.BurstReqMod>=DMA_REQ_MOD_ALL\
391 ||ptChanPar->CONTROL.SrcMod>=DMA_ADDRMOD_ALL\
392 ||ptChanPar->CONTROL.DestMod>=DMA_ADDRMOD_ALL\
393 ||ptChanPar->CONTROL.IrqMod>=DMA_IRQMOD_ALL\
394 ||ptChanPar->CONTROL.SrcBurstSize>=DMA_BURST_SIZE_ALL\
395 ||ptChanPar->CONTROL.SrcBurstLen>=DMA_BURST_LEN_ALL\
396 ||ptChanPar->CONTROL.DestBurstSize>=DMA_BURST_SIZE_ALL\
397 ||ptChanPar->CONTROL.DestBurstLen>=DMA_BURST_LEN_ALL\
398 ||ptChanPar->CONTROL.IntSel>=DMA_INT_SEL_ALL)
399 {
400 return -EINVAL;
401 }
402 dma_dev[dmaID].chan_config[channel].channelCbk= ptChanPar->CallBack;
403 dma_dev[dmaID].chan_config[channel].data =ptChanPar->data;
404 errCode=dma_set_chan_addr(dmaID, channel,ptChanPar);
405 if (errCode)
406 {
407 return errCode;
408 }
409 errCode=dma_set_chan_para(dmaID, channel,ptChanPar);
410 if (errCode)
411 {
412 return errCode;
413 }
414 errCode=dma_set_chan_ctrl(dmaID, channel,ptChanPar);
415 return errCode;
416
417}
418EXPORT_SYMBOL(zx29_dma_config);
419
420/*******************************************************************************
421* Function: zx29_dma_configLLI
422* Description:
423* Parameters:
424* Input:
425* channelaPara:
426
427* Output:
428*
429* Returns:
430*
431* Others:
432********************************************************************************/
433signed int zx29_dma_configLLI(unsigned int channelID,dma_chan_def * channelaPara, unsigned int LLIParaCnt)
434{
435 unsigned int index = 0;
436 unsigned int dmacID = GET_HIGH_16BIT(channelID);
437 unsigned int channel = GET_LOW_16BIT(channelID);
438 volatile dma_chan_reg __iomem* pChReg = NULL;
439 pChReg= &(dma_dev[dmacID].reg->channel[channel]);
440
441 if((channelaPara == NULL) || (LLIParaCnt < 1) ||(LLIParaCnt > MAX_LLI_PARA_CNT) || (dmacID == DMAC1))
442 {
443 return -EINVAL;
444 }
445
446 for(index=0; index<LLIParaCnt; index++)
447 {
448 (dma_lli_para_array[dmacID][channel])[index].src_addr = channelaPara[index].SrcAddr;
449 (dma_lli_para_array[dmacID][channel])[index].dest_addr = channelaPara[index].DestAddr;
450 (dma_lli_para_array[dmacID][channel])[index].count = channelaPara[index].Count;
451 (dma_lli_para_array[dmacID][channel])[index].src_ypara = channelaPara[index].SrcYPara;
452 (dma_lli_para_array[dmacID][channel])[index].src_zpara = channelaPara[index].SrcZPara;
453 (dma_lli_para_array[dmacID][channel])[index].dest_ypara = channelaPara[index].DestYPara;
454 (dma_lli_para_array[dmacID][channel])[index].dest_zpara = channelaPara[index].DestZPara;
455 (dma_lli_para_array[dmacID][channel])[index].lli = (unsigned int)(&((dma_lli_para_array[dmacID][channel])[index+1]))-(unsigned int)dma_lli_para_array[dmacID][0]+DMA_RAM_END-32*PAGE_SIZE;
456 (dma_lli_para_array[dmacID][channel])[index].control = DMA_CTRL_SOFT_B_REQ(channelaPara[index].CONTROL.BurstReqMod)\
457 | DMA_CTRL_SRC_FIFO_MOD(channelaPara[index].CONTROL.SrcMod ) \
458 | DMA_CTRL_DEST_FIFO_MOD(channelaPara[index].CONTROL.DestMod) \
459 | DMA_CTRL_IRQ_MOD(DMA_ERR_IRQ_ENABLE) \
460 | DMA_CTRL_SRC_BURST_SIZE(channelaPara[index].CONTROL.SrcBurstSize) \
461 | DMA_CTRL_SRC_BURST_LENGTH(channelaPara[index].CONTROL.SrcBurstLen ) \
462 | DMA_CTRL_DEST_BURST_SIZE(channelaPara[index].CONTROL.DestBurstSize) \
463 | DMA_CTRL_DEST_BURST_LENGTH(channelaPara[index].CONTROL.DestBurstLen) \
464 | DMA_CTRL_INTERRUPT_SEL(channelaPara[index].CONTROL.IntSel)\
465
466 | DMA_CTRL_ENABLE(1);
467 }
468 (dma_lli_para_array[dmacID][channel])[0].control &= (~0x1);
469 (dma_lli_para_array[dmacID][channel])[LLIParaCnt-1].lli = 0;
470 (dma_lli_para_array[dmacID][channel])[LLIParaCnt-1].control |= DMA_CTRL_IRQ_MOD(DMA_ALL_IRQ_ENABLE);
471 /*config first dma para into dma channel regs*/
472 pChReg->src_addr = (dma_lli_para_array[dmacID][channel])[0].src_addr;
473 pChReg->dest_addr = (dma_lli_para_array[dmacID][channel])[0].dest_addr;
474 pChReg->count = (dma_lli_para_array[dmacID][channel])[0].count;
475 pChReg->src_ypara = (dma_lli_para_array[dmacID][channel])[0].src_ypara;
476 pChReg->src_zpara = (dma_lli_para_array[dmacID][channel])[0].src_zpara;
477 pChReg->dest_ypara =(dma_lli_para_array[dmacID][channel])[0].dest_ypara;
478 pChReg->dest_zpara =(dma_lli_para_array[dmacID][channel])[0].dest_zpara;
479 pChReg->lli = (dma_lli_para_array[dmacID][channel])[0].lli;
480 pChReg->control= (dma_lli_para_array[dmacID][channel])[0].control;
481
482
483 dma_dev[dmacID].chan_config[channel].channelCbk= channelaPara[0].CallBack;
484 dma_dev[dmacID].chan_config[channel].data =channelaPara[0].data;
485 return 0;
486}
487EXPORT_SYMBOL(zx29_dma_configLLI);
488signed int zx29_dma_start(unsigned int ucChannel)
489{
490 volatile dma_regs __iomem * pReg=NULL;
491 unsigned int dmaID =GET_HIGH_16BIT(ucChannel);
492 unsigned int channel = GET_LOW_16BIT(ucChannel);
493 if(channel >= DMA_CHAN_NUM||dmaID >= DMAC_NUM)
494 {
495 return -EINVAL;
496 }
497 pReg= dma_dev[dmaID].reg;
498 pReg->channel[channel].control |= DMA_CTRL_ENABLE(DMA_ENABLE);
499 return 0;
500}
501EXPORT_SYMBOL(zx29_dma_start);
502signed int zx29_dma_stop(unsigned int ucChannel)
503{
504 volatile dma_regs __iomem * pReg=NULL;
505 unsigned int dmaID =GET_HIGH_16BIT(ucChannel);
506 unsigned int channel = GET_LOW_16BIT(ucChannel);
507 if(channel >= DMA_CHAN_NUM||dmaID >= DMAC_NUM)
508 {
509 return -EINVAL;
510 }
511 pReg= dma_dev[dmaID].reg;
512 pReg->channel[channel].control |= DMA_CTRL_ENABLE(DMA_DISABLE);
513 return 0;
514}
515EXPORT_SYMBOL(zx29_dma_stop);
516
517
518signed int zx29_dma_set_priority(dmac_id dmaID, dma_group_order groupOrder, dma_group_mode groupMode)
519{
520 if(groupOrder >= DMA_GROUP_ALL ||groupMode >= DMA_MODE_ALL)
521 {
522 return -EINVAL;
523 }
524 dma_dev[dmaID].reg->group_order = groupOrder;
525 dma_dev[dmaID].reg->arbit_mode = groupMode;
526 return 0;
527}
528EXPORT_SYMBOL(zx29_dma_set_priority);
529
530signed int zx297510_dma_config(struct dma_chan *chan,dma_chan_def *ptChanPar)
531{
532 struct zx297510_dma_chan *channel = to_zx29_dma_chan(chan);
533 unsigned int dmac_id=channel->dma_device->dmac_id;
534 unsigned int channel_id=channel->channel_id;
535
536 return zx29_dma_config(DMA_CHANNEL(dmac_id,channel_id),ptChanPar);
537}
538
539signed int zx297510_dma_start(struct zx297510_dma_chan *chan)
540{
541 unsigned int dmac_id=chan->dma_device->dmac_id;
542 unsigned int channel_id=chan->channel_id;
543 return zx29_dma_start(DMA_CHANNEL(dmac_id,channel_id));
544}
545
546static dma_cookie_t zx29_dma_tx_submit(struct dma_async_tx_descriptor *tx)
547{
548 return dma_cookie_assign(tx);
549}
550
551static int zx29_dma_alloc_chan_resources(struct dma_chan *channel)
552{
553 struct zx297510_dma_chan *chan = to_zx29_dma_chan(channel);
554
555
556 zx29_dma_request(chan->peripheralID);
557
558 dma_async_tx_descriptor_init(&chan->desc, channel);
559 chan->desc.tx_submit = zx29_dma_tx_submit;
560
561 /* the descriptor is ready */
562 async_tx_ack(&chan->desc);
563
564 return 0;
565}
566
567
568void zx29_dma_free_chan_resource(struct dma_chan *chan)
569{
570 struct zx297510_dma_chan *zx29_chan = to_zx29_dma_chan(chan);
571
572 dma_reset_chan(zx29_chan);
573}
574
575
576static struct dma_async_tx_descriptor *zx29_prep_dma_interleaved(
577 struct dma_chan *chan, struct dma_interleaved_template *xt,
578 unsigned long flags)
579{
580 struct zx297510_dma_chan *channel = to_zx29_dma_chan(chan);
581 struct dma_async_tx_descriptor *desc = &channel->desc;
582
583 if(channel->status == DMA_IN_PROGRESS)
584 return NULL;
585 channel->status = DMA_IN_PROGRESS;
586
587 desc->callback = NULL;
588 desc->callback_param = NULL;
589
590 return desc;
591}
592
593static int zx29_dma_control(struct dma_chan *channel, enum dma_ctrl_cmd cmd,
594 unsigned long arg)
595{
596 struct zx297510_dma_chan *chan = to_zx29_dma_chan(channel);
597 int ret = 0;
598
599 switch (cmd) {
600 case DMA_TERMINATE_ALL:
601 dma_reset_chan(chan);
602 ret = dma_disable_chan(chan);
603 break;
604 case DMA_SLAVE_CONFIG:
605 ret = zx297510_dma_config(channel,(dma_chan_def *)arg);
606 default:
607 ret = -ENOSYS;
608 }
609
610 return ret;
611}
612
613static void zx29_dma_issue_pending(struct dma_chan *chan)
614{
615 struct zx297510_dma_chan *zx29_chan = to_zx29_dma_chan(chan);
616
617 zx297510_dma_start(zx29_chan);
618}
619
620
621irqreturn_t dma_Isr(int irq, void *dev)
622{
623 unsigned int need_continue = 0;
624 unsigned int i;
625 struct zx297510_dmac *dmac_ptr = dev;
626 dma_regs __iomem * pReg=NULL;
627 unsigned int dwTcInt = 0;
628 unsigned int dwRawTcInt = 0;
629 unsigned int dwSrcErrInt = 0;
630 unsigned int dwDestErrInt = 0;
631 unsigned int dwCfgErrInt = 0;
632
633 pReg= dmac_ptr->reg;
634 dwTcInt = pReg->int_tc_status;
635 dwRawTcInt = pReg->raw_int_tc_status;
636 dwSrcErrInt = pReg->int_src_err_status;
637 dwDestErrInt = pReg->int_dest_err_status;
638 dwCfgErrInt = pReg->int_cfg_err_status;
639
640 if ((dwSrcErrInt||dwSrcErrInt||dwDestErrInt) != 0)
641 {
642 for (i = 0;(i< DMA_CHAN_NUM)&&((dwSrcErrInt||dwSrcErrInt||dwDestErrInt) != 0); i++)
643 {
644 if ((dwSrcErrInt|dwSrcErrInt|dwDestErrInt)&0x01)
645 {
646 dmac_ptr->dma_chan[i].status = DMA_ERROR;
647 dma_reset_chan(&dmac_ptr->dma_chan[i]);
648 }
649 }
650 pReg->raw_int_src_err_status = dwSrcErrInt ;
651 pReg->raw_int_dest_err_status = dwDestErrInt ;
652 pReg->raw_int_cfg_err_status = dwCfgErrInt ;
653 }
654
655 if(dwRawTcInt == 0)
656 return IRQ_HANDLED;
657
658 do
659 {
660 need_continue = 0;
661 dwRawTcInt = pReg->raw_int_tc_status;
662 if(dwRawTcInt == 0)
663 break;
664 for (i = 0;(i< DMA_CHAN_NUM)&&(dwRawTcInt!=0); i++)
665 {
666 if (dwRawTcInt&0x01)
667 {
668 if((dmac_ptr->dma_chan[i].channel_id==DMAC0_CH_SD0_TX ||
669 dmac_ptr->dma_chan[i].channel_id==DMAC0_CH_SD0_RX||
670 dmac_ptr->dma_chan[i].channel_id==DMAC0_CH_SD1_TX||
671 dmac_ptr->dma_chan[i].channel_id==DMAC0_CH_SD1_RX))
672 {
673 if((dmac_ptr->dmac_id == 0)&&(dma0_chan_config[i].ownner == CORE_ID_A9)&&(((pReg->working_status)& (0x1<<i)) == 0))
674 {
675 pReg->raw_int_tc_status = (0x1<<i);
676 need_continue = 1;
677 if(dmac_ptr->chan_config[i].channelCbk)
678 (*(dmac_ptr->chan_config[i].channelCbk)) \
679 (i,DMA_INT_END,dmac_ptr->chan_config[i].data);
680
681 }
682 }
683 else if(((dmac_ptr->dmac_id == 0)&&(dma0_chan_config[i].ownner == CORE_ID_A9)) ||\
684 ((dmac_ptr->dmac_id == 1)&&(dma1_chan_config[i].ownner == CORE_ID_A9)) )
685 {
686 pReg->raw_int_tc_status = (0x1<<i);
687 need_continue = 1;
688 dmac_ptr->dma_chan[i].status = DMA_SUCCESS;
689 dma_cookie_complete(&dmac_ptr->dma_chan[i].desc);
690 /* schedule tasklet on this channel */
691 tasklet_schedule(&dmac_ptr->dma_chan[i].tasklet);
692 }
693 }
694 dwRawTcInt = dwRawTcInt>>1;
695 }
696 }while(need_continue);
697
698 //check if or not the operation on dma0 above affects PS core's dma interruption,if so
699 return IRQ_HANDLED;
700}
701#ifdef ZX297510_DMA_TEST
702void dma_cb(struct zx297510_dma_chan * chan)
703{
704 dma_int_count++;
705}
706#endif
707
708
709#ifdef ZX297510_DMA_TEST
710static void dma_m2m_test(void)
711{
712 static unsigned int test_loop_cnt = 0;
713 static struct dma_chan * chan = NULL;
714 struct dma_async_tx_descriptor *desc =NULL;
715 struct zx297510_dma_chan * zx29_chan = NULL;
716 dma_chan_def temp = {};
717 dma_cap_mask_t mask;
718 dma_cap_zero(mask);
719 dma_cap_set(DMA_SLAVE, mask);
720
721
722 temp= (dma_chan_def){0x23500000, 0x23500190, 400, 0, 0, 0, 0, 0, {0},NULL,dma_cb};
723 /*DMA test start*/
724 temp.CONTROL.BurstReqMod = DMA_SOFT_REQ;
725 temp.CONTROL.SrcMod = DMA_ADDRMOD_RAM;
726 temp.CONTROL.DestMod = DMA_ADDRMOD_RAM;
727 temp.CONTROL.SrcBurstSize = DMA_BURST_SIZE_8BIT;
728 temp.CONTROL.SrcBurstLen = DMA_BURST_LEN_16;
729 temp.CONTROL.DestBurstSize = DMA_BURST_SIZE_8BIT;
730 temp.CONTROL.DestBurstLen = DMA_BURST_LEN_16;
731 temp.CONTROL.IntSel = DMA_INT_TO_A9;
732 temp.CONTROL.IrqMod = DMA_ALL_IRQ_ENABLE;
733 src = ioremap(0x23500000, 0x400);
734 memset(src,0x5A,0x190);
735 memset(src+0x190,0x00,0x200);
736 if(dma_int_count == 0)
737 chan = dma_request_channel(mask,zx297510_dma_filter_fn,(void*)DMAC0_CH_SD1_TX);
738
739 if(!dmaengine_slave_config(chan,(struct dma_slave_config*)&temp))
740 printk("dmaengine_slave_config failed~~~~~~");
741
742 zx29_chan = to_zx29_dma_chan(chan);
743 desc = zx29_chan->dma_device->dma.device_prep_interleaved_dma(chan,NULL,NULL);
744 desc->callback = dma_cb;
745 desc->callback_param = (void *) zx29_chan;
746 zx29_chan->zx29_dma_cookie = dmaengine_submit(desc);
747 dma_async_issue_pending(chan);
748 /*DMA test end*/
749
750return ;
751}
752
753static void dma_lli_test(void)
754{
755 static unsigned int test_loop_cnt = 0;
756 dma_chan_def temp[3] = {NULL};
757 signed int dma_chan = -1;
758 int i = 0;
759 temp[0]= (dma_chan_def){0x2391E000, 0x2391E800, 0x200, 0, 0, 0, 0, 0, {0},NULL,dma_cb};
760 temp[1]= (dma_chan_def){0x2391E200, 0x2391EA00, 0x200, 0, 0, 0, 0, 0, {0},NULL,dma_cb};
761 temp[2]= (dma_chan_def){0x2391E400, 0x2391EC00, 0x200, 0, 0, 0, 0, 0, {0},NULL,dma_cb};
762
763 /*DMA test start*/
764 for(; i<3; i++)
765 {
766 temp[i].CONTROL.BurstReqMod = DMA_SOFT_REQ;
767 temp[i].CONTROL.SrcMod = DMA_ADDRMOD_RAM;
768 temp[i].CONTROL.DestMod = DMA_ADDRMOD_RAM;
769 temp[i].CONTROL.SrcBurstSize = DMA_BURST_SIZE_8BIT;
770 temp[i].CONTROL.SrcBurstLen = DMA_BURST_LEN_16;
771 temp[i].CONTROL.DestBurstSize = DMA_BURST_SIZE_8BIT;
772 temp[i].CONTROL.DestBurstLen = DMA_BURST_LEN_16;
773 temp[i].CONTROL.IntSel = DMA_INT_TO_A9;
774 temp[i].CONTROL.IrqMod = DMA_ALL_IRQ_ENABLE;
775 }
776 src = ioremap(0x2391E000, 0x1000);
777 memset(src,0x5A,0x200);
778 memset(src+0x200,0xA5,0x200);
779 memset(src+0x400,0xFF,0x200);
780 memset(src+0x800,0x0,0x600);
781
782 dma_chan = zx29_dma_request(DMAC0_CH_SD1_TX);
783 zx29_dma_configLLI(dma_chan,temp,3);
784 zx29_dma_start(dma_chan);
785 /*DMA test end*/
786
787return ;
788}
789
790static const DEVICE_ATTR(dma_test,0600,dma_m2m_test,NULL);
791static struct attribute *zx29_dma_attributes[] = {
792 &dev_attr_dma_test.attr,
793 NULL,
794};
795
796static const struct attribute_group zx29_dma_attribute_group = {
797 .attrs = (struct attribute **) zx29_dma_attributes,
798};
799#endif
800
801static void check_dma_status(unsigned long data)
802{
803 volatile unsigned int dma_int_status = 0;
804 volatile unsigned int raw_dma_int_status = 0;
805 volatile unsigned int dma_int_wrong_status = 0;
806
807 dma_timer_num++;
808 dma_int_status = dma_dev[0].reg->int_tc_status;
809 raw_dma_int_status = dma_dev[0].reg->raw_int_tc_status;
810 dma_int_wrong_status = (dma_int_status^raw_dma_int_status)&(0x3<<DMAC0_CH_SD0_TX);
811 if(dma_int_wrong_status)
812 {
813 dma_err_num++;
814 local_irq_disable();
815 dma_Isr(0,&dma_dev[0]);
816 local_irq_enable();
817 }
818 mod_timer(&dma0_timer,jiffies+100);
819}
820static int __devinit zx297510_dma_probe(struct platform_device* pDev)
821{
822 int ret = 0;
823 int i = 0;
824 int j = 0;
825 void * dma_addr_for_cpu[2]= {NULL};
826 struct zx297510_dma_chan * dma_chan_ptr = NULL;
827 /*DMA IO ,mux regs remap*/
828 dma0_base = ioremap(pDev->resource[0].start, pDev->resource[0].end-pDev->resource[0].start);
829 dma1_base = ioremap(pDev->resource[1].start, pDev->resource[1].end-pDev->resource[1].start);
830 dma_reuse_reg_base = ioremap(0x01300014,8);
831 if(dma0_base == 0 ||dma1_base ==0||dma_reuse_reg_base == 0)
832 return -EAGAIN;
833 dma_dev[0].reg=(dma_regs *)dma0_base;
834 dma_dev[0].chan_config= dma0_chan_config;
835 dma_dev[0].dma_mutex = dma0_mutex;
836 dma_dev[0].reg->irq_type = 0xF;
837 dma_dev[1].reg=(dma_regs *)dma1_base;
838 dma_dev[1].chan_config= dma1_chan_config;
839 dma_dev[1].dma_mutex = dma1_mutex;
840 dma_dev[1].reg->irq_type = 0xF;
841
842
843 dma_addr_for_cpu[0] = ioremap(DMA_RAM_END-32*PAGE_SIZE, 16*PAGE_SIZE);
844 dma_addr_for_cpu[1] = ioremap(DMA_RAM_END-16*PAGE_SIZE, 16*PAGE_SIZE);
845 for(i=0;i<DMA_CHAN_NUM;i++)
846 {
847 dma_lli_para_array[0][i] = (dma_lli_para *) (dma_addr_for_cpu[0]+i*sizeof(dma_lli_para)*(MAX_LLI_PARA_CNT+1));
848 dma_lli_para_array[1][i] = (dma_lli_para *) (dma_addr_for_cpu[1]+i*sizeof(dma_lli_para)*(MAX_LLI_PARA_CNT+1));
849 }
850
851 ret = request_irq(pDev->resource[2].start, dma_Isr, 0, "dma0", &dma_dev[0]);
852 ret += request_irq(pDev->resource[3].start, dma_Isr, 0, "dma1", &dma_dev[1]);
853 if (ret)
854 return ret;
855 //2015.01.22,¹æ±ÜDMAÓ²¼þbug¡£
856 setup_timer(&dma0_timer,check_dma_status,0);
857 dma0_timer.expires = jiffies + 10;
858 add_timer(&dma0_timer);
859
860 for(i=0;i<2;i++)
861 {
862 dma_dev[i].dmac_id = i;
863 dma_cap_set(DMA_SLAVE, dma_dev[i].dma.cap_mask);
864 dma_cap_set(DMA_INTERLEAVE, dma_dev[i].dma.cap_mask);
865 INIT_LIST_HEAD(&dma_dev[i].dma.channels);
866
867 /*init channel*/
868 for(j=0;j<DMA_CHAN_NUM;j++)
869 {
870 dma_chan_ptr = &dma_dev[i].dma_chan[j];
871 dma_chan_ptr->be_used = dma_dev[i].chan_config[j].isUsed;
872 dma_chan_ptr->channel_id = j;
873 dma_chan_ptr->peripheralID = dma_dev[i].chan_config[j].peripheralID;
874 dma_chan_ptr->chan_regs = (dma_chan_reg*) (dma_dev[i].reg+j*sizeof(dma_chan_reg));
875 dma_chan_ptr->dma_device = &(dma_dev[i]);
876 dma_chan_ptr->chan.device = &(dma_dev[i].dma);
877 dma_cookie_init(&dma_chan_ptr->chan);
878
879 tasklet_init(&dma_chan_ptr->tasklet, zx29_dma_tasklet,
880 (unsigned long) (dma_chan_ptr));
881
882 /* Add the channel to zx29_chan list */
883 list_add_tail(&dma_chan_ptr->chan.device_node,
884 &(dma_dev[i].dma.channels));
885 }
886 dma_dev[i].dma.device_alloc_chan_resources = zx29_dma_alloc_chan_resources;
887 dma_dev[i].dma.device_free_chan_resources = zx29_dma_free_chan_resource;
888 dma_dev[i].dma.device_tx_status = zx29_dma_tx_status;
889 dma_dev[i].dma.device_control = zx29_dma_control;
890 dma_dev[i].dma.device_prep_interleaved_dma = zx29_prep_dma_interleaved;
891 dma_dev[i].dma.device_issue_pending = zx29_dma_issue_pending;
892
893 /*BUGON at dma_async_device_register : BUG_ON(!device->dev);*/
894 dma_dev[i].dma.dev = &pDev->dev;
895 ret = dma_async_device_register(&dma_dev[i].dma);
896 if (ret)
897 {
898 dev_err(dma_dev[i].dma.dev, "unable to register\n");
899 return -EINVAL;
900 }
901
902 }
903#ifdef ZX297510_DMA_TEST
904 ret = sysfs_create_group(&pDev->dev.kobj,&zx29_dma_attribute_group);
905#endif
906 //dma_m2m_test();
907 //dma_lli_test();
908
909 return 0;
910}
911
912struct platform_driver zx297510_dma_driver = {
913 .driver = {
914 .name = "zx297510_dma",
915 },
916 .probe = zx297510_dma_probe,
917};
918static int __init zx297510_dma_driver_init(void)
919{
920 return platform_driver_register(&zx297510_dma_driver);
921}
922arch_initcall(zx297510_dma_driver_init);
923