blob: 82cd7bbc5029f925cbae921d7261d1b7e2b0daf2 [file] [log] [blame]
/*******************************************************************************
* Copyright (C) 2013, ZTE Corporation.
*
* File Name:dma.c
* File Mark:
* Description:
* Others:
* Version: 0.1
* Author: limeifeng
* Date:
* modify
********************************************************************************/
/****************************************************************************
* Include files
****************************************************************************/
#include <linux/kernel.h>
#include <linux/interrupt.h>
#include <linux/spinlock.h>
#include <linux/dma-mapping.h>
#include <linux/mutex.h>
#include <linux/platform_device.h>
#include <linux/err.h>
#include <linux/ioport.h>
#include <linux/module.h>
#include <mach/iomap.h>
#include <mach/clock.h>
#include <mach/zx297510_dma.h>
#include <mach/clock.h>
#include <linux/clk.h>
#include "dmaengine.h"
#include <mach/usb_debug.h>
/****************************************************************************
* Local Macros
****************************************************************************/
#define BIT_SHIFT_L(value,BIT_NO) (unsigned int)(value << (BIT_NO))
#define GET_HIGH_16BIT(val) (unsigned int)(val >> (16))
#define GET_LOW_16BIT(val) (unsigned int)(val & (0xffff))
#define DMA_CHANNEL(dmac,channel) (unsigned int)(dmac << (16)|(channel) )
/*dma control reg bit */
#define DMA_CTRL_ENABLE(value) BIT_SHIFT_L(value,0)
#define DMA_CTRL_SOFT_B_REQ(value) BIT_SHIFT_L(value,1)
#define DMA_CTRL_SRC_FIFO_MOD(value) BIT_SHIFT_L(value,2)
#define DMA_CTRL_DEST_FIFO_MOD(value) BIT_SHIFT_L(value,3)
#define DMA_CTRL_IRQ_MOD(value) BIT_SHIFT_L(value,4)
#define DMA_CTRL_SRC_BURST_SIZE(value) BIT_SHIFT_L(value,6)
#define DMA_CTRL_SRC_BURST_LENGTH(value) BIT_SHIFT_L(value,9)
#define DMA_CTRL_DEST_BURST_SIZE(value) BIT_SHIFT_L(value,13)
#define DMA_CTRL_DEST_BURST_LENGTH(value) BIT_SHIFT_L(value,16)
#define DMA_CTRL_INTERRUPT_SEL(value) BIT_SHIFT_L(value,20)
#define DMA_CTRL_FORCE_CLOSE(value) BIT_SHIFT_L(BIT_SHIFT_L(value,30),1);
/*for LLI*/
#define MAX_LLI_PARA_CNT 32 //Ò»¸öÁ´±íÖÐ×î´óµÄÁ´±íÏî¸öÊý
#define DMA_RAM_END (0x23400000+0x500000)
#define MAX(a,b) ((a) > (b) ? (a) : (b))
#define FALSE 0
#define TRUE 1
#undef ZX297510_DMA_TEST
//#define ZX297510_DMA_TEST
/****************************************************************************
* Local Types
****************************************************************************/
static DEFINE_MUTEX(dma0_mutex);
static DEFINE_MUTEX(dma1_mutex);
/*for DMA0 IO remap*/
static void __iomem *dma0_base = NULL;
static void __iomem *dma1_base = NULL;
static void __iomem *dma_reuse_reg_base = NULL;
/*for lli transfer*/
static dma_lli_para * dma_lli_para_array[DMAC_NUM][DMA_CHAN_NUM];
static dma_lli_para * dma_addr[2] = {NULL};
#ifdef ZX297510_DMA_TEST
static void *src = NULL;
static unsigned int dma_int_count = 0;
#endif
struct zx297510_dma_chan
{
dma_peripheral_id peripheralID;
short int channel_id;
struct zx297510_dmac * dma_device;
struct dma_chan chan;
struct dma_async_tx_descriptor desc;
struct tasklet_struct tasklet;
enum dma_status status;
spinlock_t lock;
bool be_used;
dma_chan_reg __iomem *chan_regs;
dma_chan_def dma_chan_par; //srcaddr,destaddr,burstsize...
dma_cookie_t zx29_dma_cookie;
};
struct zx297510_dmac
{
unsigned int dmac_id;
struct dma_device dma;
dma_regs __iomem *reg;
dma_chan_config * chan_config;
struct mutex dma_mutex;
struct zx297510_dma_chan dma_chan[DMA_CHAN_NUM];
};
struct zx297510_dmac dma_dev[DMAC_NUM];
struct timer_list dma0_timer;
unsigned int dma_timer_num = 0;
unsigned int dma_err_num = 0;
static dma_chan_config dma0_chan_config[DMA_CHAN_NUM] =
{
DMAC0_CFG_CH0,DMAC0_CFG_CH1,DMAC0_CFG_CH2,DMAC0_CFG_CH3,DMAC0_CFG_CH4,
DMAC0_CFG_CH5,DMAC0_CFG_CH6,DMAC0_CFG_CH7,DMAC0_CFG_CH8,DMAC0_CFG_CH9,
DMAC0_CFG_CH10,DMAC0_CFG_CH11,DMAC0_CFG_CH12,DMAC0_CFG_CH13,DMAC0_CFG_CH14,
DMAC0_CFG_CH15
};
static dma_chan_config dma1_chan_config[DMA_CHAN_NUM] =
{
DMAC1_CFG_CH0,DMAC1_CFG_CH1,DMAC1_CFG_CH2,DMAC1_CFG_CH3,DMAC1_CFG_CH4,
DMAC1_CFG_CH5,DMAC1_CFG_CH6,DMAC1_CFG_CH7,DMAC1_CFG_CH8,DMAC1_CFG_CH9,
DMAC1_CFG_CH10,DMAC1_CFG_CH11,DMAC1_CFG_CH12,DMAC1_CFG_CH13,DMAC1_CFG_CH14,
DMAC1_CFG_CH15
};
static signed int dma_find_chan(dma_peripheral_id peripheralID,dmac_id dmaID);
static signed int dma_reset_chan(struct zx297510_dma_chan *chan);
static signed int dma_set_chan_addr(unsigned int dmaID, unsigned int channel,dma_chan_def * ptChanPar);
static signed int dma_set_chan_ctrl(unsigned int dmaID, unsigned int channel,dma_chan_def * ptChanPar);
static signed int dma_set_chan_para(unsigned int dmaID, unsigned int channel, dma_chan_def * ptChanPar);
static void zx29_dma_tasklet(unsigned long data)
{
struct zx297510_dma_chan *chan = (struct zx297510_dma_chan *) data;
if (chan->desc.callback)
chan->desc.callback(chan->desc.callback_param);
}
static struct zx297510_dma_chan *to_zx29_dma_chan(struct dma_chan *chan)
{
return container_of(chan, struct zx297510_dma_chan, chan);
}
static signed int dma_disable_chan (struct zx297510_dma_chan *chan)
{
unsigned int dmac_id = chan->dma_device->dmac_id;
unsigned int channel_id=chan->channel_id;
dma_chan_reg __iomem* chan_reg_ptr=NULL;
if (channel_id >= DMA_CHAN_NUM||dmac_id >= DMAC_NUM)
{
return -EINVAL;
}
if (dma_dev[dmac_id].chan_config[channel_id].ownner != CORE_ID_A9)
{
return -EINVAL;
}
chan_reg_ptr= &(dma_dev[dmac_id].reg->channel[channel_id]);
chan_reg_ptr->control |= DMA_CTRL_FORCE_CLOSE(1);
return 0;
}
/*reset channel para*/
static signed int dma_reset_chan(struct zx297510_dma_chan *chan)
{
unsigned int dmac_id=chan->dma_device->dmac_id;
unsigned int channel_id=chan->channel_id;
dma_regs __iomem* pReg=NULL;
dma_chan_reg __iomem* chan_reg_ptr=NULL;
if (channel_id >= DMA_CHAN_NUM||dmac_id >= DMAC_NUM)
{
return -EINVAL;
}
if (dma_dev[dmac_id].chan_config[channel_id].ownner!=CORE_ID_A9)
{
return -EINVAL;
}
pReg= dma_dev[dmac_id].reg;
chan_reg_ptr= &(pReg->channel[channel_id]);
/*force close current channel*/
chan_reg_ptr->control |= DMA_CTRL_FORCE_CLOSE(1);
memset((void*) chan_reg_ptr,0,sizeof(dma_chan_reg));
pReg->raw_int_tc_status |= BIT_SHIFT_L(0x1,channel_id);
pReg->raw_int_src_err_status |= BIT_SHIFT_L(0x1,channel_id);
pReg->raw_int_dest_err_status |= BIT_SHIFT_L(0x1,channel_id);
pReg->raw_int_cfg_err_status |= BIT_SHIFT_L(0x1,channel_id);
//dma_dev[dmac_id].chan_config[channel_id].channelCbk = NULL;
//dma_dev[dmac_id].chan_config[channel_id].data = NULL;
dma_dev[dmac_id].chan_config[channel_id].isUsed = FALSE;
return 0;
}
/*find the fixed free channel for peripheralID*/
static signed int dma_find_chan(dma_peripheral_id peripheralID,dmac_id dmaID)
{
unsigned int i = 0;
unsigned int reg_value = 0;
unsigned int chNum = (unsigned int)peripheralID;
dma_chan_config *dma_chan_config_ptr = dma_dev[dmaID].chan_config;
/*if require dma1's channel on dma0,return error*/
if(dmaID == DMAC0&&peripheralID>=DMAC1_CH_TD_DMA0)
return -EAGAIN;
/*if require dma0's channel on dma1,return error*/
else if((dmaID == DMAC1) && peripheralID<DMAC1_CH_TD_DMA0)
return -EAGAIN;
/*in case there is free channel,allocate it to M2M*/
if (DMAC0_CH_MEMORY==peripheralID||DMAC1_CH_MEMORY==peripheralID)
{
for(i=0;i<DMA_CHAN_NUM;i++)
{
if ( (dma_chan_config_ptr[i].isUsed==FALSE))
{
dma_chan_config_ptr[i].isUsed =TRUE;
return DMA_CHANNEL(dmaID,i);
}
}
return -EAGAIN;
}
/*if channel is reused ,get the channle number*/
if(peripheralID ==DMAC0_CH_I2S0_TX)
chNum = peripheralID-DMAC0_REUSE;
if((peripheralID >=DMAC1_CH_RX_PDSCH_CIR_RAM)&&(peripheralID <=DMAC1_CH_RX_MBSFN_CIR_MAX))
chNum = peripheralID-DMAC1_REUSE;
if(dmaID == DMAC1)
chNum -=DMAC1_CH_START;
/*if channle has been used,return error*/
if(dma_chan_config_ptr[chNum].isUsed==TRUE)
return -EAGAIN;
/*config dma0 reuse regs*/
if(peripheralID ==DMAC0_CH_USIM1)
{
reg_value = ioread32(dma_reuse_reg_base);
reg_value |= BIT_SHIFT_L(0x1,chNum);
iowrite32(reg_value,dma_reuse_reg_base);
}
else if(peripheralID==DMAC0_CH_I2S0_TX)
{
reg_value = ioread32(dma_reuse_reg_base);
reg_value &= (~(BIT_SHIFT_L(0x1,chNum)));
iowrite32(reg_value,dma_reuse_reg_base);
}
/*config dma0 reuse regs*/
if((peripheralID >=DMAC1_CH_TD_DMA0)&&(peripheralID <=DMAC1_CH_TD_DMA3))
{
reg_value = ioread32(dma_reuse_reg_base+4);
reg_value |= BIT_SHIFT_L(0x1,chNum);
iowrite32(reg_value,dma_reuse_reg_base+4);
}
else if((peripheralID >=DMAC1_CH_RX_PDSCH_CIR_RAM)&&(peripheralID <=DMAC1_CH_RX_MBSFN_CIR_MAX))
{
reg_value = ioread32(dma_reuse_reg_base+4);
reg_value &= (~(BIT_SHIFT_L(0x1,chNum)));
iowrite32(reg_value,dma_reuse_reg_base+4);
}
/*get the channel number*/
dma_chan_config_ptr[chNum].isUsed =TRUE;
return DMA_CHANNEL(dmaID,chNum);
/*channel not found, return error*/
return -EAGAIN;
}
static enum dma_status zx29_dma_tx_status(struct dma_chan *chan,
dma_cookie_t cookie, struct dma_tx_state *txstate)
{
struct zx297510_dma_chan *zx29_chan = to_zx29_dma_chan(chan);
dma_cookie_t last_used;
last_used = chan->cookie;
dma_set_tx_state(txstate, chan->completed_cookie, last_used, 0);
return zx29_chan->status;
}
static signed int dma_set_chan_addr(unsigned int dmaID, unsigned int channel,dma_chan_def * ptChanPar)
{
volatile dma_chan_reg __iomem* pChReg = NULL;
pChReg= &(dma_dev[dmaID].reg->channel[channel]);
pChReg->src_addr = ptChanPar->SrcAddr;
pChReg->dest_addr = ptChanPar->DestAddr;
pChReg->lli = ptChanPar->LLI;
return 0;
}
static signed int dma_set_chan_ctrl(unsigned int dmaID, unsigned int channel,dma_chan_def * ptChanPar)
{
volatile dma_chan_reg __iomem * pChReg=NULL;
pChReg= &(dma_dev[dmaID].reg->channel[channel]);
pChReg->control=DMA_CTRL_SOFT_B_REQ(ptChanPar->CONTROL.BurstReqMod)\
| DMA_CTRL_SRC_FIFO_MOD(ptChanPar->CONTROL.SrcMod ) \
| DMA_CTRL_DEST_FIFO_MOD(ptChanPar->CONTROL.DestMod) \
| DMA_CTRL_IRQ_MOD(ptChanPar->CONTROL.IrqMod) \
| DMA_CTRL_SRC_BURST_SIZE(ptChanPar->CONTROL.SrcBurstSize) \
| DMA_CTRL_SRC_BURST_LENGTH((ptChanPar->CONTROL.SrcBurstLen )) \
| DMA_CTRL_DEST_BURST_SIZE(ptChanPar->CONTROL.DestBurstSize) \
| DMA_CTRL_DEST_BURST_LENGTH((ptChanPar->CONTROL.DestBurstLen ))\
| DMA_CTRL_INTERRUPT_SEL(ptChanPar->CONTROL.IntSel) ;
return 0;
}
static signed int dma_set_chan_para(unsigned int dmaID, unsigned int channel, dma_chan_def * ptChanPar)
{
volatile dma_chan_reg __iomem* pChReg = NULL;
pChReg= &(dma_dev[dmaID].reg->channel[channel]);
pChReg->count=ptChanPar->Count;
return 0;
}
bool zx297510_dma_filter_fn(struct dma_chan *chan, void *param)
{
struct zx297510_dma_chan * channel = to_zx29_dma_chan(chan);
unsigned int peripheral_id = (unsigned int) param;
unsigned int chan_id = peripheral_id;
if(peripheral_id >DMA_CH_ALL||
(channel->dma_device->dmac_id == DMAC0&&peripheral_id>DMAC0_CH_MEMORY) ||
(channel->dma_device->dmac_id == DMAC1&&peripheral_id<=DMAC0_CH_MEMORY))
return false;
if(peripheral_id == DMAC0_CH_MEMORY||peripheral_id == DMAC1_CH_MEMORY)
{
if(zx29_dma_request(peripheral_id) == -EAGAIN)
return false;
else
return true;
}
if(channel->dma_device->dmac_id == DMAC1)
chan_id -= DMAC1_CH_START;
if (channel->channel_id != chan_id)
return false;
if(zx29_dma_request(peripheral_id) == -EAGAIN)
return false;
return true;
}
EXPORT_SYMBOL(zx297510_dma_filter_fn);
/*allocate a channel for peripheralID,
and return the channel number.if failed return -EAGAIN
*/
signed int zx29_dma_request(dma_peripheral_id peripheralID)
{
signed int errCode = -EAGAIN;
mutex_lock(&dma_dev[0].dma_mutex);
errCode=dma_find_chan(peripheralID,DMAC0);
mutex_unlock(&dma_dev[0].dma_mutex);
if(errCode == -EAGAIN)
{
mutex_lock(&dma_dev[1].dma_mutex);
errCode=dma_find_chan(peripheralID,DMAC1);
mutex_unlock(&dma_dev[1].dma_mutex);
}
return errCode;
}
EXPORT_SYMBOL(zx29_dma_request);
signed int zx29_dma_config(unsigned int ucChannel,dma_chan_def *ptChanPar)
{
signed int errCode = -EAGAIN;
unsigned int dmaID =GET_HIGH_16BIT(ucChannel);
unsigned int channel = GET_LOW_16BIT(ucChannel);
if (channel >= DMA_CHAN_NUM || ptChanPar == NULL||dmaID >= DMAC_NUM)
{
return -EINVAL;
}
if(dma_dev[dmaID].chan_config[channel].isUsed == FALSE)
{
return -EINVAL;
}
if (ptChanPar->CONTROL.BurstReqMod>=DMA_REQ_MOD_ALL\
||ptChanPar->CONTROL.SrcMod>=DMA_ADDRMOD_ALL\
||ptChanPar->CONTROL.DestMod>=DMA_ADDRMOD_ALL\
||ptChanPar->CONTROL.IrqMod>=DMA_IRQMOD_ALL\
||ptChanPar->CONTROL.SrcBurstSize>=DMA_BURST_SIZE_ALL\
||ptChanPar->CONTROL.SrcBurstLen>=DMA_BURST_LEN_ALL\
||ptChanPar->CONTROL.DestBurstSize>=DMA_BURST_SIZE_ALL\
||ptChanPar->CONTROL.DestBurstLen>=DMA_BURST_LEN_ALL\
||ptChanPar->CONTROL.IntSel>=DMA_INT_SEL_ALL)
{
return -EINVAL;
}
dma_dev[dmaID].chan_config[channel].channelCbk= ptChanPar->CallBack;
dma_dev[dmaID].chan_config[channel].data =ptChanPar->data;
errCode=dma_set_chan_addr(dmaID, channel,ptChanPar);
if (errCode)
{
return errCode;
}
errCode=dma_set_chan_para(dmaID, channel,ptChanPar);
if (errCode)
{
return errCode;
}
errCode=dma_set_chan_ctrl(dmaID, channel,ptChanPar);
return errCode;
}
EXPORT_SYMBOL(zx29_dma_config);
/*******************************************************************************
* Function: zx29_dma_configLLI
* Description:
* Parameters:
* Input:
* channelaPara:
* Output:
*
* Returns:
*
* Others:
********************************************************************************/
signed int zx29_dma_configLLI(unsigned int channelID,dma_chan_def * channelaPara, unsigned int LLIParaCnt)
{
unsigned int index = 0;
unsigned int dmacID = GET_HIGH_16BIT(channelID);
unsigned int channel = GET_LOW_16BIT(channelID);
volatile dma_chan_reg __iomem* pChReg = NULL;
pChReg= &(dma_dev[dmacID].reg->channel[channel]);
if((channelaPara == NULL) || (LLIParaCnt < 1) ||(LLIParaCnt > MAX_LLI_PARA_CNT) || (dmacID == DMAC1))
{
return -EINVAL;
}
for(index=0; index<LLIParaCnt; index++)
{
(dma_lli_para_array[dmacID][channel])[index].src_addr = channelaPara[index].SrcAddr;
(dma_lli_para_array[dmacID][channel])[index].dest_addr = channelaPara[index].DestAddr;
(dma_lli_para_array[dmacID][channel])[index].count = channelaPara[index].Count;
(dma_lli_para_array[dmacID][channel])[index].src_ypara = channelaPara[index].SrcYPara;
(dma_lli_para_array[dmacID][channel])[index].src_zpara = channelaPara[index].SrcZPara;
(dma_lli_para_array[dmacID][channel])[index].dest_ypara = channelaPara[index].DestYPara;
(dma_lli_para_array[dmacID][channel])[index].dest_zpara = channelaPara[index].DestZPara;
(dma_lli_para_array[dmacID][channel])[index].lli = (unsigned int)(&((dma_lli_para_array[dmacID][channel])[index+1]))-(unsigned int)dma_lli_para_array[dmacID][0]+DMA_RAM_END-32*PAGE_SIZE;
(dma_lli_para_array[dmacID][channel])[index].control = DMA_CTRL_SOFT_B_REQ(channelaPara[index].CONTROL.BurstReqMod)\
| DMA_CTRL_SRC_FIFO_MOD(channelaPara[index].CONTROL.SrcMod ) \
| DMA_CTRL_DEST_FIFO_MOD(channelaPara[index].CONTROL.DestMod) \
| DMA_CTRL_IRQ_MOD(DMA_ERR_IRQ_ENABLE) \
| DMA_CTRL_SRC_BURST_SIZE(channelaPara[index].CONTROL.SrcBurstSize) \
| DMA_CTRL_SRC_BURST_LENGTH(channelaPara[index].CONTROL.SrcBurstLen ) \
| DMA_CTRL_DEST_BURST_SIZE(channelaPara[index].CONTROL.DestBurstSize) \
| DMA_CTRL_DEST_BURST_LENGTH(channelaPara[index].CONTROL.DestBurstLen) \
| DMA_CTRL_INTERRUPT_SEL(channelaPara[index].CONTROL.IntSel)\
| DMA_CTRL_ENABLE(1);
}
(dma_lli_para_array[dmacID][channel])[0].control &= (~0x1);
(dma_lli_para_array[dmacID][channel])[LLIParaCnt-1].lli = 0;
(dma_lli_para_array[dmacID][channel])[LLIParaCnt-1].control |= DMA_CTRL_IRQ_MOD(DMA_ALL_IRQ_ENABLE);
/*config first dma para into dma channel regs*/
pChReg->src_addr = (dma_lli_para_array[dmacID][channel])[0].src_addr;
pChReg->dest_addr = (dma_lli_para_array[dmacID][channel])[0].dest_addr;
pChReg->count = (dma_lli_para_array[dmacID][channel])[0].count;
pChReg->src_ypara = (dma_lli_para_array[dmacID][channel])[0].src_ypara;
pChReg->src_zpara = (dma_lli_para_array[dmacID][channel])[0].src_zpara;
pChReg->dest_ypara =(dma_lli_para_array[dmacID][channel])[0].dest_ypara;
pChReg->dest_zpara =(dma_lli_para_array[dmacID][channel])[0].dest_zpara;
pChReg->lli = (dma_lli_para_array[dmacID][channel])[0].lli;
pChReg->control= (dma_lli_para_array[dmacID][channel])[0].control;
dma_dev[dmacID].chan_config[channel].channelCbk= channelaPara[0].CallBack;
dma_dev[dmacID].chan_config[channel].data =channelaPara[0].data;
return 0;
}
EXPORT_SYMBOL(zx29_dma_configLLI);
signed int zx29_dma_start(unsigned int ucChannel)
{
volatile dma_regs __iomem * pReg=NULL;
unsigned int dmaID =GET_HIGH_16BIT(ucChannel);
unsigned int channel = GET_LOW_16BIT(ucChannel);
if(channel >= DMA_CHAN_NUM||dmaID >= DMAC_NUM)
{
return -EINVAL;
}
pReg= dma_dev[dmaID].reg;
pReg->channel[channel].control |= DMA_CTRL_ENABLE(DMA_ENABLE);
return 0;
}
EXPORT_SYMBOL(zx29_dma_start);
signed int zx29_dma_stop(unsigned int ucChannel)
{
volatile dma_regs __iomem * pReg=NULL;
unsigned int dmaID =GET_HIGH_16BIT(ucChannel);
unsigned int channel = GET_LOW_16BIT(ucChannel);
if(channel >= DMA_CHAN_NUM||dmaID >= DMAC_NUM)
{
return -EINVAL;
}
pReg= dma_dev[dmaID].reg;
pReg->channel[channel].control |= DMA_CTRL_ENABLE(DMA_DISABLE);
return 0;
}
EXPORT_SYMBOL(zx29_dma_stop);
signed int zx29_dma_set_priority(dmac_id dmaID, dma_group_order groupOrder, dma_group_mode groupMode)
{
if(groupOrder >= DMA_GROUP_ALL ||groupMode >= DMA_MODE_ALL)
{
return -EINVAL;
}
dma_dev[dmaID].reg->group_order = groupOrder;
dma_dev[dmaID].reg->arbit_mode = groupMode;
return 0;
}
EXPORT_SYMBOL(zx29_dma_set_priority);
signed int zx297510_dma_config(struct dma_chan *chan,dma_chan_def *ptChanPar)
{
struct zx297510_dma_chan *channel = to_zx29_dma_chan(chan);
unsigned int dmac_id=channel->dma_device->dmac_id;
unsigned int channel_id=channel->channel_id;
return zx29_dma_config(DMA_CHANNEL(dmac_id,channel_id),ptChanPar);
}
signed int zx297510_dma_start(struct zx297510_dma_chan *chan)
{
unsigned int dmac_id=chan->dma_device->dmac_id;
unsigned int channel_id=chan->channel_id;
return zx29_dma_start(DMA_CHANNEL(dmac_id,channel_id));
}
static dma_cookie_t zx29_dma_tx_submit(struct dma_async_tx_descriptor *tx)
{
return dma_cookie_assign(tx);
}
static int zx29_dma_alloc_chan_resources(struct dma_chan *channel)
{
struct zx297510_dma_chan *chan = to_zx29_dma_chan(channel);
zx29_dma_request(chan->peripheralID);
dma_async_tx_descriptor_init(&chan->desc, channel);
chan->desc.tx_submit = zx29_dma_tx_submit;
/* the descriptor is ready */
async_tx_ack(&chan->desc);
return 0;
}
void zx29_dma_free_chan_resource(struct dma_chan *chan)
{
struct zx297510_dma_chan *zx29_chan = to_zx29_dma_chan(chan);
dma_reset_chan(zx29_chan);
}
static struct dma_async_tx_descriptor *zx29_prep_dma_interleaved(
struct dma_chan *chan, struct dma_interleaved_template *xt,
unsigned long flags)
{
struct zx297510_dma_chan *channel = to_zx29_dma_chan(chan);
struct dma_async_tx_descriptor *desc = &channel->desc;
if(channel->status == DMA_IN_PROGRESS)
return NULL;
channel->status = DMA_IN_PROGRESS;
desc->callback = NULL;
desc->callback_param = NULL;
return desc;
}
static int zx29_dma_control(struct dma_chan *channel, enum dma_ctrl_cmd cmd,
unsigned long arg)
{
struct zx297510_dma_chan *chan = to_zx29_dma_chan(channel);
int ret = 0;
switch (cmd) {
case DMA_TERMINATE_ALL:
dma_reset_chan(chan);
ret = dma_disable_chan(chan);
break;
case DMA_SLAVE_CONFIG:
ret = zx297510_dma_config(channel,(dma_chan_def *)arg);
default:
ret = -ENOSYS;
}
return ret;
}
static void zx29_dma_issue_pending(struct dma_chan *chan)
{
struct zx297510_dma_chan *zx29_chan = to_zx29_dma_chan(chan);
zx297510_dma_start(zx29_chan);
}
irqreturn_t dma_Isr(int irq, void *dev)
{
unsigned int need_continue = 0;
unsigned int i;
struct zx297510_dmac *dmac_ptr = dev;
dma_regs __iomem * pReg=NULL;
unsigned int dwTcInt = 0;
unsigned int dwRawTcInt = 0;
unsigned int dwSrcErrInt = 0;
unsigned int dwDestErrInt = 0;
unsigned int dwCfgErrInt = 0;
pReg= dmac_ptr->reg;
dwTcInt = pReg->int_tc_status;
dwRawTcInt = pReg->raw_int_tc_status;
dwSrcErrInt = pReg->int_src_err_status;
dwDestErrInt = pReg->int_dest_err_status;
dwCfgErrInt = pReg->int_cfg_err_status;
if ((dwSrcErrInt||dwSrcErrInt||dwDestErrInt) != 0)
{
for (i = 0;(i< DMA_CHAN_NUM)&&((dwSrcErrInt||dwSrcErrInt||dwDestErrInt) != 0); i++)
{
if ((dwSrcErrInt|dwSrcErrInt|dwDestErrInt)&0x01)
{
dmac_ptr->dma_chan[i].status = DMA_ERROR;
dma_reset_chan(&dmac_ptr->dma_chan[i]);
}
}
pReg->raw_int_src_err_status = dwSrcErrInt ;
pReg->raw_int_dest_err_status = dwDestErrInt ;
pReg->raw_int_cfg_err_status = dwCfgErrInt ;
}
if(dwRawTcInt == 0)
return IRQ_HANDLED;
do
{
need_continue = 0;
dwRawTcInt = pReg->raw_int_tc_status;
if(dwRawTcInt == 0)
break;
for (i = 0;(i< DMA_CHAN_NUM)&&(dwRawTcInt!=0); i++)
{
if (dwRawTcInt&0x01)
{
if((dmac_ptr->dma_chan[i].channel_id==DMAC0_CH_SD0_TX ||
dmac_ptr->dma_chan[i].channel_id==DMAC0_CH_SD0_RX||
dmac_ptr->dma_chan[i].channel_id==DMAC0_CH_SD1_TX||
dmac_ptr->dma_chan[i].channel_id==DMAC0_CH_SD1_RX))
{
if((dmac_ptr->dmac_id == 0)&&(dma0_chan_config[i].ownner == CORE_ID_A9)&&(((pReg->working_status)& (0x1<<i)) == 0))
{
pReg->raw_int_tc_status = (0x1<<i);
need_continue = 1;
if(dmac_ptr->chan_config[i].channelCbk)
(*(dmac_ptr->chan_config[i].channelCbk)) \
(i,DMA_INT_END,dmac_ptr->chan_config[i].data);
}
}
else if(((dmac_ptr->dmac_id == 0)&&(dma0_chan_config[i].ownner == CORE_ID_A9)) ||\
((dmac_ptr->dmac_id == 1)&&(dma1_chan_config[i].ownner == CORE_ID_A9)) )
{
pReg->raw_int_tc_status = (0x1<<i);
need_continue = 1;
dmac_ptr->dma_chan[i].status = DMA_SUCCESS;
dma_cookie_complete(&dmac_ptr->dma_chan[i].desc);
/* schedule tasklet on this channel */
tasklet_schedule(&dmac_ptr->dma_chan[i].tasklet);
}
}
dwRawTcInt = dwRawTcInt>>1;
}
}while(need_continue);
//check if or not the operation on dma0 above affects PS core's dma interruption,if so
return IRQ_HANDLED;
}
#ifdef ZX297510_DMA_TEST
void dma_cb(struct zx297510_dma_chan * chan)
{
dma_int_count++;
}
#endif
#ifdef ZX297510_DMA_TEST
static void dma_m2m_test(void)
{
static unsigned int test_loop_cnt = 0;
static struct dma_chan * chan = NULL;
struct dma_async_tx_descriptor *desc =NULL;
struct zx297510_dma_chan * zx29_chan = NULL;
dma_chan_def temp = {};
dma_cap_mask_t mask;
dma_cap_zero(mask);
dma_cap_set(DMA_SLAVE, mask);
temp= (dma_chan_def){0x23500000, 0x23500190, 400, 0, 0, 0, 0, 0, {0},NULL,dma_cb};
/*DMA test start*/
temp.CONTROL.BurstReqMod = DMA_SOFT_REQ;
temp.CONTROL.SrcMod = DMA_ADDRMOD_RAM;
temp.CONTROL.DestMod = DMA_ADDRMOD_RAM;
temp.CONTROL.SrcBurstSize = DMA_BURST_SIZE_8BIT;
temp.CONTROL.SrcBurstLen = DMA_BURST_LEN_16;
temp.CONTROL.DestBurstSize = DMA_BURST_SIZE_8BIT;
temp.CONTROL.DestBurstLen = DMA_BURST_LEN_16;
temp.CONTROL.IntSel = DMA_INT_TO_A9;
temp.CONTROL.IrqMod = DMA_ALL_IRQ_ENABLE;
src = ioremap(0x23500000, 0x400);
memset(src,0x5A,0x190);
memset(src+0x190,0x00,0x200);
if(dma_int_count == 0)
chan = dma_request_channel(mask,zx297510_dma_filter_fn,(void*)DMAC0_CH_SD1_TX);
if(!dmaengine_slave_config(chan,(struct dma_slave_config*)&temp))
printk("dmaengine_slave_config failed~~~~~~");
zx29_chan = to_zx29_dma_chan(chan);
desc = zx29_chan->dma_device->dma.device_prep_interleaved_dma(chan,NULL,NULL);
desc->callback = dma_cb;
desc->callback_param = (void *) zx29_chan;
zx29_chan->zx29_dma_cookie = dmaengine_submit(desc);
dma_async_issue_pending(chan);
/*DMA test end*/
return ;
}
static void dma_lli_test(void)
{
static unsigned int test_loop_cnt = 0;
dma_chan_def temp[3] = {NULL};
signed int dma_chan = -1;
int i = 0;
temp[0]= (dma_chan_def){0x2391E000, 0x2391E800, 0x200, 0, 0, 0, 0, 0, {0},NULL,dma_cb};
temp[1]= (dma_chan_def){0x2391E200, 0x2391EA00, 0x200, 0, 0, 0, 0, 0, {0},NULL,dma_cb};
temp[2]= (dma_chan_def){0x2391E400, 0x2391EC00, 0x200, 0, 0, 0, 0, 0, {0},NULL,dma_cb};
/*DMA test start*/
for(; i<3; i++)
{
temp[i].CONTROL.BurstReqMod = DMA_SOFT_REQ;
temp[i].CONTROL.SrcMod = DMA_ADDRMOD_RAM;
temp[i].CONTROL.DestMod = DMA_ADDRMOD_RAM;
temp[i].CONTROL.SrcBurstSize = DMA_BURST_SIZE_8BIT;
temp[i].CONTROL.SrcBurstLen = DMA_BURST_LEN_16;
temp[i].CONTROL.DestBurstSize = DMA_BURST_SIZE_8BIT;
temp[i].CONTROL.DestBurstLen = DMA_BURST_LEN_16;
temp[i].CONTROL.IntSel = DMA_INT_TO_A9;
temp[i].CONTROL.IrqMod = DMA_ALL_IRQ_ENABLE;
}
src = ioremap(0x2391E000, 0x1000);
memset(src,0x5A,0x200);
memset(src+0x200,0xA5,0x200);
memset(src+0x400,0xFF,0x200);
memset(src+0x800,0x0,0x600);
dma_chan = zx29_dma_request(DMAC0_CH_SD1_TX);
zx29_dma_configLLI(dma_chan,temp,3);
zx29_dma_start(dma_chan);
/*DMA test end*/
return ;
}
static const DEVICE_ATTR(dma_test,0600,dma_m2m_test,NULL);
static struct attribute *zx29_dma_attributes[] = {
&dev_attr_dma_test.attr,
NULL,
};
static const struct attribute_group zx29_dma_attribute_group = {
.attrs = (struct attribute **) zx29_dma_attributes,
};
#endif
static void check_dma_status(unsigned long data)
{
volatile unsigned int dma_int_status = 0;
volatile unsigned int raw_dma_int_status = 0;
volatile unsigned int dma_int_wrong_status = 0;
dma_timer_num++;
dma_int_status = dma_dev[0].reg->int_tc_status;
raw_dma_int_status = dma_dev[0].reg->raw_int_tc_status;
dma_int_wrong_status = (dma_int_status^raw_dma_int_status)&(0x3<<DMAC0_CH_SD0_TX);
if(dma_int_wrong_status)
{
dma_err_num++;
local_irq_disable();
dma_Isr(0,&dma_dev[0]);
local_irq_enable();
}
mod_timer(&dma0_timer,jiffies+100);
}
static int __devinit zx297510_dma_probe(struct platform_device* pDev)
{
int ret = 0;
int i = 0;
int j = 0;
void * dma_addr_for_cpu[2]= {NULL};
struct zx297510_dma_chan * dma_chan_ptr = NULL;
/*DMA IO ,mux regs remap*/
dma0_base = ioremap(pDev->resource[0].start, pDev->resource[0].end-pDev->resource[0].start);
dma1_base = ioremap(pDev->resource[1].start, pDev->resource[1].end-pDev->resource[1].start);
dma_reuse_reg_base = ioremap(0x01300014,8);
if(dma0_base == 0 ||dma1_base ==0||dma_reuse_reg_base == 0)
return -EAGAIN;
dma_dev[0].reg=(dma_regs *)dma0_base;
dma_dev[0].chan_config= dma0_chan_config;
dma_dev[0].dma_mutex = dma0_mutex;
dma_dev[0].reg->irq_type = 0xF;
dma_dev[1].reg=(dma_regs *)dma1_base;
dma_dev[1].chan_config= dma1_chan_config;
dma_dev[1].dma_mutex = dma1_mutex;
dma_dev[1].reg->irq_type = 0xF;
dma_addr_for_cpu[0] = ioremap(DMA_RAM_END-32*PAGE_SIZE, 16*PAGE_SIZE);
dma_addr_for_cpu[1] = ioremap(DMA_RAM_END-16*PAGE_SIZE, 16*PAGE_SIZE);
for(i=0;i<DMA_CHAN_NUM;i++)
{
dma_lli_para_array[0][i] = (dma_lli_para *) (dma_addr_for_cpu[0]+i*sizeof(dma_lli_para)*(MAX_LLI_PARA_CNT+1));
dma_lli_para_array[1][i] = (dma_lli_para *) (dma_addr_for_cpu[1]+i*sizeof(dma_lli_para)*(MAX_LLI_PARA_CNT+1));
}
ret = request_irq(pDev->resource[2].start, dma_Isr, 0, "dma0", &dma_dev[0]);
ret += request_irq(pDev->resource[3].start, dma_Isr, 0, "dma1", &dma_dev[1]);
if (ret)
return ret;
//2015.01.22,¹æ±ÜDMAÓ²¼þbug¡£
setup_timer(&dma0_timer,check_dma_status,0);
dma0_timer.expires = jiffies + 10;
add_timer(&dma0_timer);
for(i=0;i<2;i++)
{
dma_dev[i].dmac_id = i;
dma_cap_set(DMA_SLAVE, dma_dev[i].dma.cap_mask);
dma_cap_set(DMA_INTERLEAVE, dma_dev[i].dma.cap_mask);
INIT_LIST_HEAD(&dma_dev[i].dma.channels);
/*init channel*/
for(j=0;j<DMA_CHAN_NUM;j++)
{
dma_chan_ptr = &dma_dev[i].dma_chan[j];
dma_chan_ptr->be_used = dma_dev[i].chan_config[j].isUsed;
dma_chan_ptr->channel_id = j;
dma_chan_ptr->peripheralID = dma_dev[i].chan_config[j].peripheralID;
dma_chan_ptr->chan_regs = (dma_chan_reg*) (dma_dev[i].reg+j*sizeof(dma_chan_reg));
dma_chan_ptr->dma_device = &(dma_dev[i]);
dma_chan_ptr->chan.device = &(dma_dev[i].dma);
dma_cookie_init(&dma_chan_ptr->chan);
tasklet_init(&dma_chan_ptr->tasklet, zx29_dma_tasklet,
(unsigned long) (dma_chan_ptr));
/* Add the channel to zx29_chan list */
list_add_tail(&dma_chan_ptr->chan.device_node,
&(dma_dev[i].dma.channels));
}
dma_dev[i].dma.device_alloc_chan_resources = zx29_dma_alloc_chan_resources;
dma_dev[i].dma.device_free_chan_resources = zx29_dma_free_chan_resource;
dma_dev[i].dma.device_tx_status = zx29_dma_tx_status;
dma_dev[i].dma.device_control = zx29_dma_control;
dma_dev[i].dma.device_prep_interleaved_dma = zx29_prep_dma_interleaved;
dma_dev[i].dma.device_issue_pending = zx29_dma_issue_pending;
/*BUGON at dma_async_device_register : BUG_ON(!device->dev);*/
dma_dev[i].dma.dev = &pDev->dev;
ret = dma_async_device_register(&dma_dev[i].dma);
if (ret)
{
dev_err(dma_dev[i].dma.dev, "unable to register\n");
return -EINVAL;
}
}
#ifdef ZX297510_DMA_TEST
ret = sysfs_create_group(&pDev->dev.kobj,&zx29_dma_attribute_group);
#endif
//dma_m2m_test();
//dma_lli_test();
return 0;
}
struct platform_driver zx297510_dma_driver = {
.driver = {
.name = "zx297510_dma",
},
.probe = zx297510_dma_probe,
};
static int __init zx297510_dma_driver_init(void)
{
return platform_driver_register(&zx297510_dma_driver);
}
arch_initcall(zx297510_dma_driver_init);