blob: 6eb68fa9c482df45dbff9918b7ca0c65f3fc38b1 [file] [log] [blame]
/*******************************************************************************
* Copyright (C) 2013, ZTE Corporation.
*
* File Name:dma.c
* File Mark:
* Description:
* Others:
* Version: 0.1
* Author: limeifeng
* Date:
* modify
********************************************************************************/
/****************************************************************************
* Include files
****************************************************************************/
#include <linux/kernel.h>
#include <linux/interrupt.h>
#include <linux/spinlock.h>
#include <linux/dma-mapping.h>
#include <linux/mutex.h>
#include <linux/platform_device.h>
#include <linux/err.h>
#include <linux/ioport.h>
#include <linux/module.h>
#include <linux/clk.h>
#include <linux/slab.h>
#include <linux/io.h>
#include <linux/of.h>
#include <linux/of_address.h>
#include <linux/of_irq.h>
#include <linux/delay.h>
#include <linux/kthread.h>
#include <asm/cacheflush.h>
#include <asm/io.h>
#include "../dmaengine.h"
#include <linux/soc/sc/common.h>
#include <linux/soc/sc/spinlock.h>
#include "zx297520v3_dma.h"
//#pragma GCC optimize("O0")
#define DMA_SUCCESS DMA_COMPLETE
#define DMA_CHANNEL_CONFIG(peripheral_id, is_used , enable_mem2mem) {peripheral_id, is_used, enable_mem2mem}
/*dma channel config define*/
typedef struct
{
dma_peripheral_id peripheral_id; /* hw channel id */
unsigned int is_used;
unsigned int enable_mem2mem;
#if 0
void * data;
dma_callback_func channel_callback;
#endif
}dma_channel_config;
static dma_channel_config dma_chan_config[] =
{
DMA_CHANNEL_CONFIG(DMA_CH_UART0_TX, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_UART0_RX, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_UART1_TX, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_UART1_RX, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_SSP0_TX, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_SSP0_RX, false, true),
#if 1 /* only ps core used */
DMA_CHANNEL_CONFIG(DMA_CH_GPRS0, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_GPRS1, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_USIM, false, false),
#endif
DMA_CHANNEL_CONFIG(DMA_CH_I2S0_TX, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_I2S0_RX0, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_I2S1_TX, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_I2S1_RX0, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_SPIFC_TX, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_SPIFC_RX, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_SSP1_TX, false, true),
DMA_CHANNEL_CONFIG(DMA_CH_SSP1_RX, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_UART2_TX, false, true),
DMA_CHANNEL_CONFIG(DMA_CH_UART2_RX, false, true),
DMA_CHANNEL_CONFIG(DMA_CH_EMBMS, false, false),
#if 1 /* only ps core used */
DMA_CHANNEL_CONFIG(DMA_CH_USIM1, false, false),
#endif
DMA_CHANNEL_CONFIG(DMA_CH_M2M_TX, false, true),
DMA_CHANNEL_CONFIG(DMA_CH_M2M_RX, false, true),
};
/****************************************************************************
* Local Macros
****************************************************************************/
#define BIT_SHIFT_L(value,BIT_NO) ((unsigned int)(value << (BIT_NO)))
#define GET_HIGH_16BIT(val) (unsigned int)(val >> (16))
#define GET_LOW_16BIT(val) (unsigned int)(val & (0xffff))
#define DMA_CHANNEL(dmac,channel) (unsigned int)(dmac << (16)|(channel) )
/*dma control reg bit */
#define DMA_CTRL_ENABLE(value) BIT_SHIFT_L(value,0)
#define DMA_CTRL_SOFT_B_REQ(value) BIT_SHIFT_L(value,1)
#define DMA_CTRL_SRC_FIFO_MOD(value) BIT_SHIFT_L(value,2)
#define DMA_CTRL_DEST_FIFO_MOD(value) BIT_SHIFT_L(value,3)
#define DMA_CTRL_IRQ_MOD(value) BIT_SHIFT_L(value,4)
#define DMA_CTRL_SRC_BURST_SIZE(value) BIT_SHIFT_L(value,6)
#define DMA_CTRL_SRC_BURST_LENGTH(value) BIT_SHIFT_L(value,9)
#define DMA_CTRL_DEST_BURST_SIZE(value) BIT_SHIFT_L(value,13)
#define DMA_CTRL_DEST_BURST_LENGTH(value) BIT_SHIFT_L(value,16)
#define DMA_CTRL_INTERRUPT_SEL(value) BIT_SHIFT_L(value,20)
#define DMA_CTRL_FORCE_CLOSE(value) BIT_SHIFT_L(value,31)
#define MAX(a,b) ((a) > (b) ? (a) : (b))
/* һ��������������������� */
#define MAX_LLI_PARA_CNT (32)
/* config dma reused */
#define DMA_SEL_CFG_REG (get_socsys_base() + 0x120)
#if defined(CONFIG_ARCH_ZX297520V2)
#define DMA_SEL_UART2_I2S (1U << 0)
#define DMA_SEL_UART1_HASH (1U << 1)
#define DMA_SEL_I2S0_TDM (1U << 2)
#define DMA_SEL_I2S1_TDM (1U << 3)
#elif defined(CONFIG_ARCH_ZX297520V3)
#define DMA_SEL_UART2TX_I2S0RX1 (1U << 0)
#define DMA_SEL_UART2RX_I2S1RX1 (1U << 1)
#define DMA_SEL_UART1RX_HASH (1U << 2)
#define DMA_SEL_I2S0TX_TDMTX0 (1U << 3)
#define DMA_SEL_I2S0RX0_TDMRX0 (1U << 4)
#define DMA_SEL_I2S1TX_TDMTX1 (1U << 5)
#define DMA_SEL_I2S1RX0_TDMRX1 (1U << 6)
#endif
typedef struct
{
volatile unsigned int src_addr;
volatile unsigned int dest_addr;
volatile unsigned int xpara;
volatile unsigned int yzpara;
volatile unsigned int src_yzstep;
volatile unsigned int dest_yzstep;
volatile unsigned int reserved0;
volatile unsigned int link_addr;
volatile unsigned int control;
}dma_lli_param;
#define MAX_LLI_PARAMS_CNT (sizeof(dma_lli_param)*MAX_LLI_PARA_CNT)
static dma_lli_param *dma_lli_params[DMA_CH_NUM];
static dma_addr_t dma_lli_phy_addr[DMA_CH_NUM];
#define ZX29_DMA_TEST 0
typedef struct
{
volatile unsigned short core_id; /* zte_coreid -- for debug */
volatile unsigned short is_used;
}dma_pub_config;
static dma_pub_config *dma_pub_configs;
#define ZX29_DMA_INT_SEL DMA_INT_TO_A9
/****************************************************************************
* Local Types
****************************************************************************/
static DEFINE_MUTEX(dma_mutex);
struct zx29_dma_channel
{
dma_peripheral_id peripheral_id;
struct zx29_dma * dma_device;
struct dma_chan chan;
struct dma_async_tx_descriptor desc;
struct tasklet_struct tasklet;
enum dma_status status;
unsigned int cyclic;
dma_peripheral_id req_peripheral_id;
// dma_channel_def dma_chan_par;
dma_cookie_t zx29_dma_cookie;
};
struct zx29_dma
{
struct dma_device dma;
dma_regs __iomem * reg;
dma_channel_config * chan_config;
unsigned int channel_count;
struct zx29_dma_channel dma_chan[DMA_CH_NUM];
};
/****************************************************************************
* DMA trace
****************************************************************************/
/* #define ZX_TRACE_DMA */
#ifdef ZX_TRACE_DMA
/*#pragma GCC optimize("O0")*/
extern unsigned int test_timer_read( void );
#define TRACE_DMA_COUNT 1000
typedef enum
{
DMA_DO_SUBMIT = 0,
DMA_DO_START = 1,
DMA_DO_ERR = 2,
DMA_DO_SUCCESS = 3,
}dma_behavior_t;
typedef struct
{
dma_peripheral_id peripheral_id;
dma_behavior_t behavior;
}dma_trace_t;
volatile dma_trace_t dma_trace_view[TRACE_DMA_COUNT+10];
volatile unsigned int dma_trace_index = 0;
#define dma_trace_index_inc() \
do{ \
dma_trace_index++;\
if(dma_trace_index>=TRACE_DMA_COUNT)\
dma_trace_index=0;\
}while(0)
static struct zx29_dma_channel *to_zx29_dma_chan(struct dma_chan *chan);
static void dma_trace_submit(struct dma_async_tx_descriptor *tx)
{
struct zx29_dma_channel *dma_channel = to_zx29_dma_chan(tx->chan);
dma_trace_view[dma_trace_index].peripheral_id = dma_channel->peripheral_id;
dma_trace_view[dma_trace_index].behavior = DMA_DO_SUBMIT;
dma_trace_index_inc();
}
static void dma_trace_pending(dma_peripheral_id peripheral_id)
{
dma_trace_view[dma_trace_index].peripheral_id = peripheral_id;
dma_trace_view[dma_trace_index].behavior = DMA_DO_START;
dma_trace_index_inc();
}
static void dma_trace_err(dma_peripheral_id peripheral_id)
{
dma_trace_view[dma_trace_index].peripheral_id = peripheral_id;
dma_trace_view[dma_trace_index].behavior = DMA_DO_ERR;
dma_trace_index_inc();
}
static void dma_trace_success(dma_peripheral_id peripheral_id)
{
dma_trace_view[dma_trace_index].peripheral_id = peripheral_id;
dma_trace_view[dma_trace_index].behavior = DMA_DO_SUCCESS;
dma_trace_index_inc();
}
#else
static void dma_trace_submit(struct dma_async_tx_descriptor *tx){}
static void dma_trace_pending(dma_peripheral_id peripheral_id){}
static void dma_trace_err(dma_peripheral_id peripheral_id){}
static void dma_trace_success(dma_peripheral_id peripheral_id){}
#endif
static struct zx29_dma dma_dev;
unsigned int dma_err_num = 0;
#if 0
#define DMA_CHANNEL_CONFIG(peripheral_id, is_used , enable_mem2mem) {peripheral_id, is_used, enable_mem2mem}
static dma_channel_config dma_chan_config[] =
{
DMA_CHANNEL_CONFIG(DMA_CH_UART0_TX, false, true),
DMA_CHANNEL_CONFIG(DMA_CH_UART0_RX, false, true),
DMA_CHANNEL_CONFIG(DMA_CH_UART1_TX, false, true),
DMA_CHANNEL_CONFIG(DMA_CH_UART1_RX, false, true),
DMA_CHANNEL_CONFIG(DMA_CH_SSP0_TX, false, true),
DMA_CHANNEL_CONFIG(DMA_CH_SSP0_RX, false, true),
#if 0 /* only ps core used */
DMA_CHANNEL_CONFIG(DMA_CH_GPRS0, true, true),
DMA_CHANNEL_CONFIG(DMA_CH_GPRS1, true, true),
DMA_CHANNEL_CONFIG(DMA_CH_USIM, true, true),
#endif
DMA_CHANNEL_CONFIG(DMA_CH_I2S0_TX, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_I2S0_RX0, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_I2S1_TX, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_I2S1_RX0, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_SPIFC_TX, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_SPIFC_RX, false, false),
DMA_CHANNEL_CONFIG(DMA_CH_SSP1_TX, false, true),
DMA_CHANNEL_CONFIG(DMA_CH_SSP1_RX, false, true),
DMA_CHANNEL_CONFIG(DMA_CH_UART2_TX, false, true),
DMA_CHANNEL_CONFIG(DMA_CH_UART2_RX, false, true),
#ifdef CONFIG_ARCH_ZX297520V2
#else
DMA_CHANNEL_CONFIG(DMA_CH_EMBMS, false, true),
#if 0 /* only ps core used */
DMA_CHANNEL_CONFIG(DMA_CH_USIM1, false, true),
#endif
DMA_CHANNEL_CONFIG(DMA_CH_M2M_TX, false, true),
DMA_CHANNEL_CONFIG(DMA_CH_M2M_RX, false, true),
#endif
};
#endif
static unsigned short dma_chan_check_lock(dma_peripheral_id peripheral_id)
{
if((peripheral_id == DMA_CH_SPIFC_TX) || (peripheral_id == DMA_CH_SPIFC_RX))
return false;
return dma_pub_configs[peripheral_id].is_used;
}
static void dma_chan_lock(dma_peripheral_id peripheral_id)
{
if((peripheral_id == DMA_CH_SPIFC_TX) || (peripheral_id == DMA_CH_SPIFC_RX))
return;
dma_pub_configs[peripheral_id].core_id = 208 /*for cap CORE_ID_AP*/;
dma_pub_configs[peripheral_id].is_used = true;
}
static void dma_chan_unlock(dma_peripheral_id peripheral_id)
{
if((peripheral_id == DMA_CH_SPIFC_TX) || (peripheral_id == DMA_CH_SPIFC_RX))
return;
dma_pub_configs[peripheral_id].core_id = CORE_ID_NUM;
dma_pub_configs[peripheral_id].is_used = false;
}
/* some channel need config reuse register */
static void dma_reuse_config(dma_peripheral_id peripheral_id)
{
switch(peripheral_id)
{
#if defined(CONFIG_ARCH_ZX297520V2)
case DMA_CH_UART2_TX:
case DMA_CH_UART2_RX:
zx_clr_reg(DMA_SEL_CFG_REG, DMA_SEL_UART2_I2S);
break;
case DMA_CH_I2S0_RX1:
case DMA_CH_I2S1_RX1:
zx_set_reg(DMA_SEL_CFG_REG, DMA_SEL_UART2_I2S);
break;
case DMA_CH_UART1_RX:
zx_clr_reg(DMA_SEL_CFG_REG, DMA_SEL_UART1_HASH);
break;
case DMA_CH_HASH_RX:
zx_set_reg(DMA_SEL_CFG_REG, DMA_SEL_UART1_HASH);
break;
case DMA_CH_I2S0_TX:
case DMA_CH_I2S0_RX0:
zx_clr_reg(DMA_SEL_CFG_REG, DMA_SEL_I2S0_TDM);
break;
case DMA_CH_TDM_TX0:
case DMA_CH_TDM_RX0:
zx_set_reg(DMA_SEL_CFG_REG, DMA_SEL_I2S0_TDM);
break;
case DMA_CH_I2S1_TX:
case DMA_CH_I2S1_RX0:
zx_clr_reg(DMA_SEL_CFG_REG, DMA_SEL_I2S1_TDM);
break;
case DMA_CH_TDM_TX1:
case DMA_CH_TDM_RX1:
zx_set_reg(DMA_SEL_CFG_REG, DMA_SEL_I2S1_TDM);
break;
#elif defined(CONFIG_ARCH_ZX297520V3)
case DMA_CH_UART1_RX:
zx_clr_reg(DMA_SEL_CFG_REG, DMA_SEL_UART1RX_HASH);
break;
case DMA_CH_I2S0_TX:
zx_clr_reg(DMA_SEL_CFG_REG, DMA_SEL_I2S0TX_TDMTX0);
break;
case DMA_CH_I2S0_RX0:
zx_clr_reg(DMA_SEL_CFG_REG, DMA_SEL_I2S0RX0_TDMRX0);
break;
case DMA_CH_I2S1_TX:
zx_clr_reg(DMA_SEL_CFG_REG, DMA_SEL_I2S1TX_TDMTX1);
break;
case DMA_CH_I2S1_RX0:
zx_clr_reg(DMA_SEL_CFG_REG, DMA_SEL_I2S1RX0_TDMRX1);
break;
case DMA_CH_UART2_TX:
zx_clr_reg(DMA_SEL_CFG_REG, DMA_SEL_UART2TX_I2S0RX1);
break;
case DMA_CH_UART2_RX:
zx_clr_reg(DMA_SEL_CFG_REG, DMA_SEL_UART2RX_I2S1RX1);
break;
case DMA_CH_HASH_RX:
zx_set_reg(DMA_SEL_CFG_REG, DMA_SEL_UART1RX_HASH);
break;
case DMA_CH_TDM_TX0:
zx_set_reg(DMA_SEL_CFG_REG, DMA_SEL_I2S0TX_TDMTX0);
break;
case DMA_CH_TDM_RX0:
zx_set_reg(DMA_SEL_CFG_REG, DMA_SEL_I2S0RX0_TDMRX0);
break;
case DMA_CH_TDM_TX1:
zx_set_reg(DMA_SEL_CFG_REG, DMA_SEL_I2S1TX_TDMTX1);
break;
case DMA_CH_TDM_RX1:
zx_set_reg(DMA_SEL_CFG_REG, DMA_SEL_I2S1RX0_TDMRX1);
break;
case DMA_CH_I2S0_RX1:
zx_set_reg(DMA_SEL_CFG_REG, DMA_SEL_UART2TX_I2S0RX1);
break;
case DMA_CH_I2S1_RX1:
zx_set_reg(DMA_SEL_CFG_REG, DMA_SEL_UART2RX_I2S1RX1);
break;
#endif
default:
break;
}
}
static dma_peripheral_id get_real_peri_id(dma_peripheral_id peripheral_id)
{
if(peripheral_id < DMA_CH_NUM)
return peripheral_id;
switch(peripheral_id)
{
case DMA_CH_HASH_RX:
return DMA_CH_UART1_RX;
case DMA_CH_TDM_TX0:
return DMA_CH_I2S0_TX;
case DMA_CH_TDM_RX0:
return DMA_CH_I2S0_RX0;
case DMA_CH_TDM_TX1:
return DMA_CH_I2S1_TX;
case DMA_CH_TDM_RX1:
return DMA_CH_I2S1_RX0;
case DMA_CH_I2S0_RX1:
return DMA_CH_UART2_TX;
case DMA_CH_I2S1_RX1:
return DMA_CH_UART2_RX;
default:
return 0xff;
}
}
static unsigned int get_channel_id(dma_peripheral_id peripheral_id)
{
int i;
dma_channel_config *chan_config = dma_dev.chan_config;
dma_peripheral_id real_peripheral_id = 0xff;
real_peripheral_id = get_real_peri_id(peripheral_id);
for(i=0; i<dma_dev.channel_count; i++)
{
if ( (chan_config[i].peripheral_id==real_peripheral_id))
return i;
}
return 0xff;
}
static void dma_sync_lli_for_cpu(unsigned int channel_id)
{
dma_sync_single_for_cpu(dma_dev.dma.dev, dma_lli_phy_addr[channel_id], MAX_LLI_PARAMS_CNT, DMA_BIDIRECTIONAL);
}
static void dma_sync_lli_for_device(unsigned int channel_id)
{
dma_sync_single_for_device(dma_dev.dma.dev, dma_lli_phy_addr[channel_id], MAX_LLI_PARAMS_CNT, DMA_BIDIRECTIONAL);
}
static signed int dma_find_chan(dma_peripheral_id channel_id);
static signed int dma_reset_chan(struct zx29_dma_channel *channel);
static void dma_tasklet(unsigned long data)
{
struct zx29_dma_channel *chan = (struct zx29_dma_channel *)data;
if (chan->desc.callback)
chan->desc.callback(chan->desc.callback_param);
}
static struct zx29_dma_channel *to_zx29_dma_chan(struct dma_chan *chan)
{
return container_of(chan, struct zx29_dma_channel, chan);
}
static signed int dma_disable_chan(struct zx29_dma_channel *chan)
{
dma_chan_reg __iomem* chan_reg_ptr=NULL;
if (chan->peripheral_id >= DMA_CH_NUM)
{
return -EINVAL;
}
chan_reg_ptr= &(dma_dev.reg->channel[chan->peripheral_id]);
chan_reg_ptr->control |= DMA_CTRL_FORCE_CLOSE(1);
return 0;
}
/*reset channel para*/
static signed int dma_reset_chan(struct zx29_dma_channel *chan)
{
unsigned int peripheral_id;
unsigned int channel_id;
dma_regs __iomem* pReg;
dma_chan_reg __iomem* chan_reg_ptr;
if (!chan) {
return -EINVAL;
}
peripheral_id = (unsigned int)chan->peripheral_id;
if (peripheral_id >= DMA_CH_NUM) {
return -EINVAL;
}
channel_id = get_channel_id(chan->peripheral_id);
if(channel_id == 0xff)
return -EINVAL;
pReg= dma_dev.reg;
chan_reg_ptr= &(pReg->channel[peripheral_id]);
/*force close current channel*/
chan_reg_ptr->control |= DMA_CTRL_FORCE_CLOSE(1);
//memset((void*) chan_reg_ptr,0,sizeof(dma_chan_reg));
pReg->raw_int_tc_status = BIT_SHIFT_L(0x1,peripheral_id);
pReg->raw_int_src_err_status = BIT_SHIFT_L(0x1,peripheral_id);
pReg->raw_int_dest_err_status = BIT_SHIFT_L(0x1,peripheral_id);
pReg->raw_int_cfg_err_status = BIT_SHIFT_L(0x1,peripheral_id);
memset((void*) chan_reg_ptr,0,sizeof(dma_chan_reg));
//dma_dev[dmac_id].chan_config[channel_id].channelCbk = NULL;
//dma_dev[dmac_id].chan_config[channel_id].data = NULL;
chan->status = DMA_SUCCESS;
chan->cyclic = 0;
dma_dev.chan_config[channel_id].is_used = false;
dma_chan_unlock(dma_dev.chan_config[channel_id].peripheral_id);
return 0;
}
/*find the fixed free channel for peripheralID*/
static signed int dma_find_chan(dma_peripheral_id peripheral_id)
{
unsigned int channel_id = 0xff;
dma_channel_config *chan_config = dma_dev.chan_config;
#if 0/*move to zx29_dma_filter_fn*/
/*in case there is free channel,allocate it to M2M*/
if (DMA_CH_MEMORY==peripheral_id)
{
for(i=0; i<dma_dev.channel_count; i++)
{
if((chan_config[i].is_used==false) && \
(dma_chan_check_lock(chan_config[i].peripheral_id)==false) && \
(chan_config[i].enable_mem2mem==true))
{
chan_config[i].is_used = true;
dma_chan_lock(chan_config[i].peripheral_id);
return i;
}
}
return -EAGAIN;
}
#endif
channel_id = get_channel_id(peripheral_id);
if(channel_id==0xff)
return -EAGAIN;
reg_spin_lock();
/*if channle has been used,return error*/
if((chan_config[channel_id].is_used==true) || \
(dma_chan_check_lock(chan_config[channel_id].peripheral_id)==true))
{
reg_spin_unlock();
return -EAGAIN;
}
/*get the channel number*/
chan_config[channel_id].is_used =true;
dma_chan_lock(chan_config[channel_id].peripheral_id);
reg_spin_unlock();
/* channel reuse*/
dma_reuse_config(peripheral_id);
return channel_id;
}
static u32 dma_get_residue(struct zx29_dma_channel *chan)
{
dma_regs __iomem* pReg = NULL;
dma_chan_reg __iomem* chan_reg_ptr = NULL;
pReg= dma_dev.reg;
chan_reg_ptr= &(pReg->channel[chan->peripheral_id]);
return chan_reg_ptr->xpara;
}
static enum dma_status zx29_dma_tx_status(struct dma_chan *chan,
dma_cookie_t cookie, struct dma_tx_state *txstate)
{
struct zx29_dma_channel *zx29_chan = to_zx29_dma_chan(chan);
dma_cookie_t last_used;
u32 bytes;
bytes = dma_get_residue(zx29_chan);
last_used = chan->cookie;
dma_set_tx_state(txstate, chan->completed_cookie, last_used, bytes);
return zx29_chan->status;
}
static unsigned int parse_dma_req(dma_transfer_mode trans_mode)
{
unsigned int control = 0;
switch(trans_mode)
{
case TRAN_PERI_TO_PERI:
control = DMA_CTRL_SOFT_B_REQ(DMA_PERIPHERAL_REQ)\
| DMA_CTRL_SRC_FIFO_MOD(DMA_ADDRMOD_FIFO) \
| DMA_CTRL_DEST_FIFO_MOD(DMA_ADDRMOD_FIFO);
break;
case TRAN_PERI_TO_MEM:
control = DMA_CTRL_SOFT_B_REQ(DMA_PERIPHERAL_REQ)\
| DMA_CTRL_SRC_FIFO_MOD(DMA_ADDRMOD_FIFO) \
| DMA_CTRL_DEST_FIFO_MOD(DMA_ADDRMOD_RAM);
break;
case TRAN_MEM_TO_PERI:
control = DMA_CTRL_SOFT_B_REQ(DMA_PERIPHERAL_REQ)\
| DMA_CTRL_SRC_FIFO_MOD(DMA_ADDRMOD_RAM) \
| DMA_CTRL_DEST_FIFO_MOD(DMA_ADDRMOD_FIFO);
break;
case TRAN_MEM_TO_MEM:
default:
control = DMA_CTRL_SOFT_B_REQ(DMA_SOFT_REQ)\
| DMA_CTRL_SRC_FIFO_MOD(DMA_ADDRMOD_RAM) \
| DMA_CTRL_DEST_FIFO_MOD(DMA_ADDRMOD_RAM);
break;
}
return control;
}
static signed int dma_set_chan_para(unsigned int channel)//,dma_channel_def * chan_para)
{
volatile dma_chan_reg __iomem* chan_reg = &(dma_dev.reg->channel[channel]);
unsigned int channel_id = get_channel_id(channel);
dma_lli_param *temp_dma_lli_params = NULL;
if(channel_id>= DMA_CH_NUM)
return -EAGAIN;
temp_dma_lli_params = dma_lli_params[channel_id];
/* chan_reg->src_addr = chan_para->src_addr;
chan_reg->dest_addr = chan_para->dest_addr;
chan_reg->xpara = chan_para->count;
chan_reg->link_addr = chan_para->link_addr;
if(chan_para->link_addr)
chan_reg->link_addr = dma_lli_phy_addr[get_channel_id(channel)];
chan_reg->control = parse_dma_req(chan_para->dma_control.tran_mode)\
| DMA_CTRL_SRC_BURST_SIZE(chan_para->dma_control.src_burst_size) \
| DMA_CTRL_SRC_BURST_LENGTH((chan_para->dma_control.src_burst_len )) \
| DMA_CTRL_DEST_BURST_SIZE(chan_para->dma_control.dest_burst_size) \
| DMA_CTRL_DEST_BURST_LENGTH((chan_para->dma_control.dest_burst_len ))\
| DMA_CTRL_INTERRUPT_SEL(DMA_INT_TO_PS) ;
if(chan_para->dma_control.irq_mode)
{
if(chan_para->link_addr)
chan_reg->control |= DMA_CTRL_IRQ_MOD(DMA_ERR_IRQ_ENABLE);
else
chan_reg->control |= DMA_CTRL_IRQ_MOD(DMA_ALL_IRQ_ENABLE);
}*/
chan_reg->src_addr = temp_dma_lli_params[0].src_addr;
chan_reg->dest_addr = temp_dma_lli_params[0].dest_addr;
chan_reg->xpara = temp_dma_lli_params[0].xpara;
chan_reg->link_addr = temp_dma_lli_params[0].link_addr;
chan_reg->control = temp_dma_lli_params[0].control &
(~(DMA_CTRL_ENABLE(DMA_ENABLE)));
return 0;
}
/*allocate a channel for peripheralID,
and return the channel number.if failed return -EAGAIN
*/
signed int zx29_dma_request(dma_peripheral_id peripheral_id)
{
signed int errCode = -EAGAIN;
mutex_lock(&dma_mutex);
errCode=dma_find_chan(peripheral_id);
mutex_unlock(&dma_mutex);
return errCode;
}
static void dma_config_lli(unsigned int channel_id, dma_channel_def *chan_para)
{
int i = 0;
dma_lli_param *temp_dma_lli_params = dma_lli_params[channel_id];
dma_sync_lli_for_cpu(channel_id);
do{
temp_dma_lli_params[i].src_addr = chan_para[i].src_addr;
temp_dma_lli_params[i].dest_addr = chan_para[i].dest_addr;
temp_dma_lli_params[i].xpara = chan_para[i].count;
temp_dma_lli_params[i].yzpara = chan_para[i].ycount | (chan_para[i].zcount << 16);
temp_dma_lli_params[i].src_yzstep = chan_para[i].src_ystep | (chan_para[i].src_zstep << 16);
temp_dma_lli_params[i].dest_yzstep = chan_para[i].dest_ystep | (chan_para[i].dest_zstep << 16);
temp_dma_lli_params[i].control = parse_dma_req(chan_para[i].dma_control.tran_mode)\
| DMA_CTRL_SRC_BURST_SIZE(chan_para[i].dma_control.src_burst_size) \
| DMA_CTRL_SRC_BURST_LENGTH((chan_para[i].dma_control.src_burst_len )) \
| DMA_CTRL_DEST_BURST_SIZE(chan_para[i].dma_control.dest_burst_size) \
| DMA_CTRL_DEST_BURST_LENGTH((chan_para[i].dma_control.dest_burst_len ))\
| DMA_CTRL_INTERRUPT_SEL(ZX29_DMA_INT_SEL)\
| DMA_CTRL_ENABLE(DMA_ENABLE);
if(chan_para[i].dma_control.irq_mode > DMA_ALL_IRQ_DISABLE)
temp_dma_lli_params[i].control |= DMA_CTRL_IRQ_MOD(DMA_ERR_IRQ_ENABLE);
if(chan_para[i].link_addr > 0)
temp_dma_lli_params[i].link_addr = dma_lli_phy_addr[channel_id] + sizeof(dma_lli_param)*(i+1);
else
{
if(chan_para[i].dma_control.irq_mode > DMA_ALL_IRQ_DISABLE)
temp_dma_lli_params[i].control |= DMA_CTRL_IRQ_MOD(DMA_ALL_IRQ_ENABLE);
temp_dma_lli_params[i].link_addr = 0;
}
// i++;
}while(chan_para[i++].link_addr);
dma_sync_lli_for_device(channel_id);
}
signed int zx29_dma_config(struct dma_chan *chan,
struct dma_slave_config *cfg)
{
struct zx29_dma_channel *dma_channel;
dma_peripheral_id peripheral_id;
unsigned int channel_id;
dma_channel_def *chan_para;
if (!cfg || !chan)
return -EINVAL;
dma_channel = to_zx29_dma_chan(chan);
peripheral_id = dma_channel->peripheral_id;
channel_id = get_channel_id(peripheral_id);
if(dma_dev.chan_config[channel_id].is_used == false)
return -EINVAL;
chan_para = (dma_channel_def *)cfg;
if (chan_para->dma_control.tran_mode>=DMA_TRAN_MOD_ALL\
||chan_para->dma_control.irq_mode>=DMA_IRQMOD_ALL\
||chan_para->dma_control.src_burst_size>=DMA_BURST_SIZE_ALL\
||chan_para->dma_control.src_burst_len>=DMA_BURST_LEN_ALL\
||chan_para->dma_control.dest_burst_size>=DMA_BURST_SIZE_ALL\
||chan_para->dma_control.dest_burst_len>=DMA_BURST_LEN_ALL)
{
return -EINVAL;
}
/* config lli */
dma_config_lli(channel_id, chan_para);
/* config regs */
#if 0
dma_dev.chan_config[channel_id].channel_callback = chan_para->callback;
dma_dev.chan_config[channel_id].data = chan_para->data;
#endif
return 0;//dma_set_chan_para((unsigned int)peripheral_id, chan_para);
}
signed int zx29_dma_start(unsigned int channel_id)
{
volatile dma_regs __iomem * pReg = dma_dev.reg;
if(channel_id >= DMA_CH_NUM)
{
BUG();
return -EINVAL;
}
dsb();
pReg->channel[channel_id].control |= DMA_CTRL_ENABLE(DMA_ENABLE);
return 0;
}
signed int zx29_dma_stop(unsigned int channel_id)
{
volatile dma_regs __iomem * pReg = dma_dev.reg;
if(channel_id >= DMA_CH_NUM)
return -EINVAL;
//pReg->channel[channel_id].control &= ~(DMA_CTRL_ENABLE(DMA_ENABLE));
pReg->channel[channel_id].control |= DMA_CTRL_FORCE_CLOSE(1);//change by gsn for linuxDMA
return 0;
}
signed int zx29_dma_get_transfer_num(unsigned int channel_id)
{
volatile dma_regs __iomem * pReg = dma_dev.reg;
if(channel_id >= DMA_CH_NUM)
return -EINVAL;
return (pReg->channel[channel_id].xpara);
}
signed int zx29_dma_set_priority(dma_group_order groupOrder, dma_group_mode groupMode)
{
if(groupOrder >= DMA_GROUP_ALL ||groupMode >= DMA_MODE_ALL)
return -EINVAL;
dma_dev.reg->group_order = groupOrder;
dma_dev.reg->arbit_mode = groupMode;
return 0;
}
static dma_cookie_t zx29_dma_tx_submit(struct dma_async_tx_descriptor *tx)
{
dma_trace_submit(tx);
return dma_cookie_assign(tx);
}
static int zx29_dma_alloc_chan_resources(struct dma_chan *chan)
{
int ret = 0;
struct zx29_dma_channel *dma_channel = to_zx29_dma_chan(chan);
ret = zx29_dma_request(dma_channel->req_peripheral_id);
if(ret < 0)
return ret;
dma_async_tx_descriptor_init(&dma_channel->desc, chan);
dma_channel->desc.tx_submit = zx29_dma_tx_submit;
/* the descriptor is ready */
async_tx_ack(&dma_channel->desc);
return ret;
}
void zx29_dma_free_chan_resource(struct dma_chan *chan)
{
struct zx29_dma_channel *dma_channel = to_zx29_dma_chan(chan);
dma_reset_chan(dma_channel);
}
static struct dma_async_tx_descriptor *zx29_prep_dma_cyclic(
struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
size_t period_len, enum dma_transfer_direction direction,
unsigned long context)
{
struct zx29_dma_channel *dma_channel = to_zx29_dma_chan(chan);
struct dma_async_tx_descriptor *desc = &dma_channel->desc;
unsigned int channel_id = get_channel_id(dma_channel->peripheral_id);
int num_periods = buf_len / period_len;
int i = 0;
dma_lli_param *temp_dma_lli_params;
if (channel_id >= DMA_CH_NUM)
return NULL;
// change by gsn for linuxDMA
//if(dma_channel->status == DMA_IN_PROGRESS)
//return NULL;
dma_channel->status = DMA_IN_PROGRESS;
temp_dma_lli_params = dma_lli_params[channel_id];
dma_sync_lli_for_cpu(channel_id);
for (i = 0; i < num_periods; i++)
{
temp_dma_lli_params[i].control |= DMA_CTRL_IRQ_MOD(DMA_ALL_IRQ_ENABLE);
}
temp_dma_lli_params[num_periods - 1].link_addr = dma_lli_phy_addr[channel_id];
dma_sync_lli_for_device(channel_id);
dma_channel->cyclic = 1;
desc->callback = NULL;
desc->callback_param = NULL;
dma_set_chan_para(dma_channel->peripheral_id);
return desc;
}
static struct dma_async_tx_descriptor *zx29_prep_dma_interleaved(
struct dma_chan *chan,
struct dma_interleaved_template *xt,
unsigned long flags)
{
struct zx29_dma_channel *dma_channel = to_zx29_dma_chan(chan);
struct dma_async_tx_descriptor *desc = &dma_channel->desc;
// change by gsn for linuxDMA
//if(dma_channel->status == DMA_IN_PROGRESS)
//return NULL;
dma_channel->status = DMA_IN_PROGRESS;
desc->callback = NULL;
desc->callback_param = NULL;
dma_set_chan_para(dma_channel->peripheral_id);
return desc;
}
static int zx29_dma_terminate_all(struct dma_chan *chan)
{
struct zx29_dma_channel *dma_channel = to_zx29_dma_chan(chan);
return dma_disable_chan(dma_channel);
}
static void zx29_dma_issue_pending(struct dma_chan *chan)
{
struct zx29_dma_channel *dma_channel = to_zx29_dma_chan(chan);
dma_trace_pending(dma_channel->peripheral_id);
zx29_dma_start(dma_channel->peripheral_id);
}
unsigned int zx29_dma_get_status(void)
{
volatile dma_regs __iomem * pReg = dma_dev.reg;
return pReg->working_status;
}
bool zx29_dma_filter_fn(struct dma_chan *chan, void *param)
{
struct zx29_dma_channel * channel = to_zx29_dma_chan(chan);
unsigned int channel_id = 0;
dma_peripheral_id peri_id = 0;
peri_id = get_real_peri_id((dma_peripheral_id)param);
if(peri_id >= DMA_CH_NUM)
return false;
channel_id = get_channel_id(channel->peripheral_id);
if(channel_id == 0xff)
return false;
if(peri_id == DMA_CH_MEMORY)
{
if ((dma_dev.chan_config[channel_id].is_used == false)&& \
(dma_chan_check_lock(dma_dev.chan_config[channel_id].peripheral_id)==false)&& \
(dma_dev.chan_config[channel_id].enable_mem2mem==true))
{
channel->req_peripheral_id = channel->peripheral_id;
return true;
}
else
return false;
}
if (channel->peripheral_id != peri_id)
return false;
if ((dma_dev.chan_config[channel_id].is_used == false)&& \
(dma_chan_check_lock(dma_dev.chan_config[channel_id].peripheral_id)==false))
{
channel->req_peripheral_id = (dma_peripheral_id)param;
return true;
}
else
return false;
}
EXPORT_SYMBOL(zx29_dma_filter_fn);
irqreturn_t dma_Isr(int irq, void *dev)
{
unsigned int need_continue = 0;
unsigned int i;
struct zx29_dma *dmac_ptr = dev;
dma_regs __iomem * dma_reg=dmac_ptr->reg;
volatile unsigned int control;
volatile unsigned int raw_tc_int = dma_reg->raw_int_tc_status;
volatile unsigned int raw_src_err_int = dma_reg->raw_int_src_err_status;
volatile unsigned int raw_dest_err_int = dma_reg->raw_int_dest_err_status;
volatile unsigned int raw_cfg_err_int = dma_reg->raw_int_cfg_err_status;
volatile unsigned int tc_int = dma_reg->int_tc_status;
unsigned int channel_id;
/* error */
if (raw_src_err_int!=0 || raw_dest_err_int!=0 || raw_cfg_err_int!=0)
{
for (i=0; i<DMA_CH_NUM; i++)
{
if ((raw_src_err_int|raw_dest_err_int|raw_cfg_err_int)&(0x01<<i))
{
channel_id = get_channel_id(i);
if(channel_id >= DMA_CH_NUM)
continue;
dmac_ptr->dma_chan[channel_id].status = DMA_ERROR;
dma_trace_err(i);
}
}
BUG();
/* dma_reg->raw_int_src_err_status |= raw_src_err_int ;
dma_reg->raw_int_dest_err_status |= raw_dest_err_int ;
dma_reg->raw_int_cfg_err_status |= raw_cfg_err_int ;
return IRQ_HANDLED;*/
}
do
{
need_continue = 0;
tc_int = dma_reg->int_tc_status;
raw_tc_int = dma_reg->raw_int_tc_status;
for (i = 0;(i< DMA_CH_NUM)&&(raw_tc_int!=0); i++)
{
if (raw_tc_int&(0x01<<i))
{
control = dma_reg->channel[i].control;
channel_id = get_channel_id(i);
/*dma_reg->raw_int_tc_status = (0x1<<i);*//*clear here may create error clear*/
if(channel_id >= DMA_CH_NUM)
continue;
if(((control&DMA_CTRL_INTERRUPT_SEL(0xf))==DMA_CTRL_INTERRUPT_SEL(ZX29_DMA_INT_SEL))&&\
(control&DMA_CTRL_IRQ_MOD(1))&&\
( ((control&DMA_CTRL_ENABLE(1)) == 0) || ((dmac_ptr->dma_chan[channel_id].cyclic)&&(tc_int&(0x1<<i))) ) )
{
dma_reg->raw_int_tc_status = (0x1<<i);
need_continue = 1;
dma_trace_success(i);
//channel_id = get_channel_id(i);
dmac_ptr->dma_chan[channel_id].status = DMA_SUCCESS;
if(dmac_ptr->dma_chan[channel_id].cyclic == 0)
{
dma_cookie_complete(&dmac_ptr->dma_chan[channel_id].desc);
}
#ifdef CONFIG_PREEMPT_RT_FULL
if (dmac_ptr->dma_chan[channel_id].desc.callback)
dmac_ptr->dma_chan[channel_id].desc.callback(dmac_ptr->dma_chan[channel_id].desc.callback_param);
#else
/* schedule tasklet on this channel */
/* yu.dong@20240617 [T106BUG-641] SPI packet loss issue, increase kernel buffer and read all cached data away, no data loss start */
#ifdef _USE_VEHICLE_DC || _USE_VEHICLE_DC_REF
if((channel_id == DMA_CH_UART0_RX) || (channel_id == DMA_CH_UART2_RX)||(channel_id == DMA_CH_SSP0_RX)||(channel_id == DMA_CH_SSP1_RX)){
if (dmac_ptr->dma_chan[channel_id].desc.callback)
dmac_ptr->dma_chan[channel_id].desc.callback(dmac_ptr->dma_chan[channel_id].desc.callback_param);
}else
tasklet_schedule(&dmac_ptr->dma_chan[channel_id].tasklet);
#else
/* yu.dong@20240617 [T106BUG-641] SPI packet loss issue, increase kernel buffer and read all cached data away, no data loss end */
tasklet_schedule(&dmac_ptr->dma_chan[channel_id].tasklet);
#endif
#endif
}
}
}
}while(need_continue);
return IRQ_HANDLED;
}
#if ZX29_DMA_TEST
#define DMA_LLI_TEST 0
#if DMA_LLI_TEST
#define MEM_CPY_CNT (3)
#else
#define MEM_CPY_CNT (1)
#endif
#define MEM_TEST_COUNT (0x200)
static unsigned int dma_int_count = 0;
static unsigned char * test_buffer = NULL;
static dma_addr_t test_phy_addr;
static struct dma_chan * test_chan = NULL;
void dma_cb(struct zx29_dma_channel * chan)
{
int i;
// dma_sync_single_for_cpu(dma_dev.dma.dev, test_phy_addr, MEM_TEST_COUNT*2, DMA_BIDIRECTIONAL);
dma_unmap_single(dma_dev.dma.dev, test_phy_addr, MEM_TEST_COUNT*2*MEM_CPY_CNT, DMA_BIDIRECTIONAL);
for(i=0; i<MEM_CPY_CNT; i++)
{
if(memcmp(test_buffer+MEM_TEST_COUNT*2*i,
test_buffer+MEM_TEST_COUNT+MEM_TEST_COUNT*2*i,
MEM_TEST_COUNT))
{
pr_info("[DMA] m2m test copy failed(%d). \n", i+1);
}
}
kfree(test_buffer);
if (test_chan)
dma_release_channel(test_chan);
pr_info("[DMA] m2m test copy succeeded (%d). \n", ++dma_int_count);
}
static void *test_prepare_buff(size_t size)
{
int i;
/* alloc buffer */
test_buffer = kzalloc(size, GFP_KERNEL);
if (!test_buffer) {
dev_err(dma_dev.dma.dev, "%s: could not alloc DMA memory\n",
__func__);
BUG();
}
pr_info("[DMA] m2m test alloc buffer (%x). \n", (unsigned int)test_buffer);
/* prepare data */
for(i=0; i<MEM_CPY_CNT; i++)
memset(test_buffer+MEM_TEST_COUNT*2*i, 0x11+0x11*i, MEM_TEST_COUNT);
return test_buffer;
}
static struct dma_chan *test_alloc_channel(void)
{
dma_cap_mask_t mask;
dma_cap_zero(mask);
dma_cap_set(DMA_SLAVE, mask);
return dma_request_channel(mask, zx29_dma_filter_fn, (void*)DMA_CH_MEMORY);
}
static signed int test_dma_config(struct dma_chan *chan, dma_addr_t phy_addr)
{
dma_channel_def temp[MEM_CPY_CNT];
int i;
memset(temp, 0, sizeof(temp));
for(i=0; i<MEM_CPY_CNT; i++)
{
temp[i].src_addr = phy_addr + MEM_TEST_COUNT*2*i;
temp[i].dest_addr = temp[i].src_addr + MEM_TEST_COUNT;
temp[i].count = MEM_TEST_COUNT;
// temp[i].callback = (dma_callback_func)dma_cb;
temp[i].dma_control.tran_mode = TRAN_MEM_TO_MEM;
temp[i].dma_control.src_burst_size = DMA_BURST_SIZE_8BIT;
temp[i].dma_control.src_burst_len = DMA_BURST_LEN_16;
temp[i].dma_control.dest_burst_size = DMA_BURST_SIZE_8BIT;
temp[i].dma_control.dest_burst_len = DMA_BURST_LEN_16;
temp[i].dma_control.irq_mode = DMA_ALL_IRQ_ENABLE;
temp[i].link_addr = 1;
}
temp[MEM_CPY_CNT-1].link_addr = 0;
return dmaengine_slave_config(chan,(struct dma_slave_config*)&temp);
}
//static
void dma_m2m_test(struct device *dev)
{
struct dma_async_tx_descriptor *desc =NULL;
struct zx29_dma_channel * zx29_chan = NULL;
unsigned char *p = NULL;
int ret = 0;
p = test_prepare_buff(MEM_TEST_COUNT*2*MEM_CPY_CNT);
/* alloc dma channel */
test_chan = test_alloc_channel();
if (!test_chan)
{
pr_info("[DMA]test request channel failed \n");
return;
}
/* map dma address */
test_phy_addr = dma_map_single(dma_dev.dma.dev, (void *)p, MEM_TEST_COUNT*2*MEM_CPY_CNT, DMA_BIDIRECTIONAL);
if (dma_mapping_error(dma_dev.dma.dev, test_phy_addr)) {
dev_err(dma_dev.dma.dev, "Failed to dma_map_single\n");
BUG();
}
/* config dma */
ret = test_dma_config(test_chan, test_phy_addr);
if(ret < 0)
printk("dmaengine_slave_config failed(%d)~~~~~~", ret);
/* start transfer */
zx29_chan = to_zx29_dma_chan(test_chan);
#if 0
desc = zx29_chan->dma_device->dma.device_prep_interleaved_dma(test_chan,NULL,0);
desc->callback = (dma_async_tx_callback)dma_cb;
desc->callback_param = (void *) zx29_chan;
#else
desc = test_chan->device->device_prep_interleaved_dma(test_chan,NULL,0);
desc->callback = (dma_async_tx_callback)dma_cb;
desc->callback_param = (void *) zx29_chan;
#endif
zx29_chan->zx29_dma_cookie = dmaengine_submit(desc);
dma_async_issue_pending(test_chan);
return ;
}
static ssize_t dma_m2m_show(struct device *dev,
struct device_attribute *attr, char *buf)
{
return sprintf(buf, "dma_int_count:%d\n", dma_int_count);
}
static ssize_t dma_m2m_store(struct device *dev, struct device_attribute *attr,
const char *buf, size_t count)
{
dma_m2m_test(dev);
return (count);
}
static DEVICE_ATTR(dma,0600,dma_m2m_show,dma_m2m_store);
static struct attribute *zx29_dma_attributes[] = {
&dev_attr_dma.attr,
NULL,
};
static const struct attribute_group zx29_dma_attribute_group = {
.attrs = (struct attribute **) zx29_dma_attributes,
};
#endif
static void dma_init_channels(void)
{
int i = 0;
struct zx29_dma_channel * dma_chan_ptr = NULL;
dma_dev.chan_config = dma_chan_config;
dma_dev.channel_count = ARRAY_SIZE(dma_chan_config);
INIT_LIST_HEAD(&dma_dev.dma.channels);
for(i=0;i<dma_dev.channel_count;i++)
{
dma_chan_ptr = &dma_dev.dma_chan[i];
dma_chan_ptr->peripheral_id = dma_dev.chan_config[i].peripheral_id;
dma_chan_ptr->dma_device = &(dma_dev);
dma_chan_ptr->chan.device = &(dma_dev.dma);
dma_cookie_init(&dma_chan_ptr->chan);
tasklet_init(&dma_chan_ptr->tasklet, dma_tasklet, (unsigned long)(dma_chan_ptr));
list_add_tail(&dma_chan_ptr->chan.device_node, &dma_dev.dma.channels);
}
}
static u64 general_dma_mask = DMA_BIT_MASK(32);
static int dma_init_resource(struct platform_device* pdev)
{
int ret = 0;
int irq;
int i;
struct device_node *np = pdev->dev.of_node;
/* registers */
dma_dev.reg = (dma_regs *)of_iomap(np, 0);
if ( !dma_dev.reg ) {
dev_err(&pdev->dev, "[DMA]Cannot get IORESOURCE_MEM\n");
return -ENOENT;
}
dma_pub_configs = (dma_pub_config *)(dma_regs *)of_iomap(np, 1);
if ( !dma_pub_configs ) {
dev_err(&pdev->dev, "[DMA]Cannot get IORESOURCE_MEM 1\n");
return -ENOENT;
}
// only for test
// memset((u8 *)dma_pub_configs, 0, 0x80);
/* irq */
irq = irq_of_parse_and_map(np, 0);
if( !irq ) {
dev_err(&pdev->dev, "[DMA]Cannot get IORESOURCE_IRQ\n");
return -ENOENT;
}
dma_dev.reg->irq_type = 0xF; /* high level for all cores */
ret = request_irq(irq, dma_Isr, IRQF_NO_THREAD, "zx29dma", &dma_dev);
if(ret)
return ret;
/* memory for lli */
for(i=0; i<ARRAY_SIZE(dma_chan_config); i++)
{
dma_lli_params[i] = kzalloc(MAX_LLI_PARAMS_CNT, GFP_KERNEL);
if (!dma_lli_params[i]) {
int j;
dev_err(&pdev->dev, "[DMA]%s: could not alloc memory for lli[%d].\n",
__func__, i);
for(j=0; j<i; j++)
{
dma_unmap_single(&pdev->dev, dma_lli_phy_addr[j], MAX_LLI_PARAMS_CNT, DMA_BIDIRECTIONAL);
kfree(dma_lli_params[j]);
dma_lli_phy_addr[j]=0;
dma_lli_params[j]=NULL;
}
return -ENOENT;
}
dma_lli_phy_addr[i] = dma_map_single(&pdev->dev, dma_lli_params[i], MAX_LLI_PARAMS_CNT, DMA_BIDIRECTIONAL);
}
return 0;
}
static int dma_register_device(struct platform_device* pdev)
{
dma_cap_zero(dma_dev.dma.cap_mask);
dma_cap_set(DMA_SLAVE, dma_dev.dma.cap_mask);
dma_cap_set(DMA_CYCLIC, dma_dev.dma.cap_mask);
dma_cap_set(DMA_INTERLEAVE, dma_dev.dma.cap_mask);
dma_dev.dma.device_alloc_chan_resources = zx29_dma_alloc_chan_resources;
dma_dev.dma.device_free_chan_resources = zx29_dma_free_chan_resource;
dma_dev.dma.device_tx_status = zx29_dma_tx_status;
dma_dev.dma.device_config = zx29_dma_config;
dma_dev.dma.device_terminate_all = zx29_dma_terminate_all;
dma_dev.dma.device_prep_dma_cyclic = zx29_prep_dma_cyclic;
dma_dev.dma.device_prep_interleaved_dma = zx29_prep_dma_interleaved;
dma_dev.dma.device_issue_pending = zx29_dma_issue_pending;
dma_dev.dma.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
dma_dev.dma.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
dma_dev.dma.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) | BIT(DMA_MEM_TO_MEM);
pdev->dev.coherent_dma_mask = DMA_BIT_MASK(32);
pdev->dev.dma_mask = &general_dma_mask;
dma_dev.dma.dev = &pdev->dev;
return dma_async_device_register(&dma_dev.dma);
}
static int zx29_dma_probe(struct platform_device* pdev)
{
int ret = 0;
/* resource */
ret = dma_init_resource(pdev);
if(ret)
{
pr_info("[DMA]get resource failed!\n");
return ret;
}
/* channel info */
dma_init_channels();
/* register device */
ret = dma_register_device(pdev);
if (ret)
{
dev_info(dma_dev.dma.dev, "[DMA]unable to register\n");
return -EINVAL;
}
pr_info("[DMA]zx297520v DMA initialized\n");
return 0;
}
static const struct of_device_id zx29_dma_dt_ids[] = {
{ .compatible = "arm,zx297520v3-dma" },
{}
};
MODULE_DEVICE_TABLE(of, zx29_dma_dt_ids);
struct platform_driver zx29_dma_driver = {
.driver = {
.name = "zx29_dma",
.of_match_table = of_match_ptr(zx29_dma_dt_ids),
},
.probe = zx29_dma_probe,
};
static int __init zx29_dma_driver_init(void)
{
return platform_driver_register(&zx29_dma_driver);
}
subsys_initcall(zx29_dma_driver_init);
/**
* "/sys/zte/test/dma_test"
*/
extern struct kobject *zx_test_kobj;
int __init zx_dma_test_init(void)
{
#if ZX29_DMA_TEST
int ret;
ret = sysfs_create_group(zx_test_kobj, &zx29_dma_attribute_group);
if (!ret)
pr_debug("[DEBUG] create test dma sysfs interface OK.\n");
#endif
return 0;
}