blob: 5701394e4cccde420e6ad9ff9460cacb1f625972 [file] [log] [blame]
#include "kal_general_types.h"
#include "kal_public_api.h"
#include "kal_hrt_api.h"
#include "kal_public_defs.h"
#include "us_timer.h"
#include "sync_data.h"
#include "sleepdrv_interface.h"
#include "drv_features.h"
#include "drv_comm.h"
#include "drv_msgid.h"
#include "stack_msgs.h"
#include "stack_ltlcom.h"
#include "intrCtrl.h"
#include "idc_internal.h"
#include "dcl_idc.h"
#include "drv_idc.h"
#include "idc_reg.h"
#include "svc_sap.h"
#define __SMP_LL(LL_PTR) ({kal_uint32 __ll_ret;void *__ll_p=(void*)(LL_PTR);\
__asm__ __volatile__("ll %0, (%1)" \
: "=&d" (__ll_ret): "d" (__ll_p):);__ll_ret;})
#define __SMP_SC(LL_PTR, SC_V) ({kal_uint32 __sc_ret=(SC_V);void *__ll_p=(void*)(LL_PTR);\
__asm__ __volatile__(\
"sc %0, (%2)\n"\
: "=d" (__sc_ret):"0" (__sc_ret), "d" (__ll_p): "cc","memory");__sc_ret;})
#define LTE_TIMER_MHZ (61.44)
#define LTE_CLOCK_MHZ (52)
#if defined(__MD97__) || defined(__MD97P__)
#define IDC_PHYTIME_WRAPPING 0x3FFFFFFF //frc wrap
#else
#define IDC_PHYTIME_WRAPPING 0xFFFFF
#endif
#define TIME_DIFF_WITHIN(latter_offset, previous_offset, time)\
(((latter_offset > previous_offset) \
&&((latter_offset - previous_offset) <= time)) \
||((latter_offset < previous_offset) \
&&((IDC_PHYTIME_WRAPPING - previous_offset + latter_offset) <= time)))
#define TIME_DIFF_EXCEED(latter_offset, previous_offset, time)\
(((latter_offset > previous_offset) \
&&((latter_offset - previous_offset) > time)) \
||((latter_offset < previous_offset) \
&&((IDC_PHYTIME_WRAPPING - previous_offset + latter_offset) > time)))
#ifdef ATEST_DRV_ENABLE
#define IDC_ASSERT(statement) \
do { if(!statement) { \
dbg_print("%s: line %d, statement = %", __FUNCTION__, __LINE__, statement);\
dbg_flush();\
while(1);\
}\
}while(0)
#else
#define IDC_ASSERT(statement) ASSERT(statement)
#endif
#ifdef ATEST_DRV_ENABLE
#define __4G_IDC__
#define dhl_trace(...)
#define DT_IDC_PRINTF(x...) \
do{ \
dbg_print(x); \
dbg_flush(); \
}while(0)
#else /*ATEST_DRV_ENABLE*/
#include "dhl_trace.h"
#include "idc_trace.h"
#endif
#define POLL_STATUS(poll_statement) do{kal_uint32 i=0, __cnt=0;while(poll_statement){for(i=0;i<10;i++){__cnt++;}if(100000000<__cnt){IDC_ASSERT(0);}}}while(0)
#if defined(__MD93__)
idc_struct_t idc_port = {0, IDC_CLOSED, KAL_FALSE, IDC_PLAN, 0, 0, {0}, {0}, 0, 0, 0, 0, {0}, {0}, 0, 0, {0, len_8, sb_1, pa_none}, IDC_INTERNAL_PIN};
#elif defined(__MD95__) || defined(__MD97__) || defined(__MD97P__)
idc_struct_t idc_port = {0, IDC_CLOSED, KAL_FALSE, IDC_PLAN, 0, 0, {0}, {{0}}, {0}, {0}, {0}, 0, 0, 0, 0, 0, {0}, {0}, 0, 0, 0, 0, 0, 0, {0, len_8, sb_1, pa_none}, IDC_INTERNAL_PIN};
#endif
IDC_ILM_MSG_T ilm_buf[4] = {{0}, {0}};
kal_uint32 ilm_num = 0;
kal_char idc_dbg_str[100];
kal_bool idc_read_RBR = KAL_FALSE;
kal_uint8 idc_rx_count = 0;
kal_uint32 idc_new_cmd_error_cnt = 0;
kal_uint8 idc_rx_history[20] = {0};
kal_uint32 idc_rx_history_time[20] = {0};
kal_uint8 idc_lisr_count, idc_hisr_count = 0;
kal_uint32 idc_hisr_time[20] = {0};
kal_uint16 IIR_L, IIR_H, IER_L, LSR_L, LSR_H;
kal_bool idc_rx_suspend = KAL_FALSE;
kal_uint8 idc_rx_suspend_timer = 0;
kal_bool idc_count_start = KAL_FALSE;
kal_bool idc_ilm_on = KAL_TRUE;
kal_bool idc_ilm_trigger = KAL_FALSE;
kal_uint32 idc_cmd_byte_count = 0;
volatile kal_bool idc_in_hisr = KAL_FALSE, idc_in_pm_hisr = KAL_FALSE;
volatile kal_bool idc_in_close = KAL_FALSE;
kal_uint32 idc_80211_tx_count = 0;
kal_uint32 idc_80211_rx_count = 0;
kal_uint32 idc_consys_tx_grant_ntf = 0;
volatile kal_uint32 stop_status_check = 0;
kal_uint32 poll_time = 0;
kal_uint32 poll_time_E = 0, before_poll_time_E = 0, after_poll_time_E = 0;
kal_uint32 poll_time_S = 0, before_poll_time_S = 0, after_poll_time_S = 0;
kal_uint32 idc_in_workaround = 0, idc_in_eventpending = 0;
kal_uint32 before_poll_time_U = 0, after_poll_time_U = 0;
kal_uint32 before_poll_time_SE = 0, after_poll_time_SE = 0;
kal_uint32 before_poll_time_SCH = 0, after_poll_time_SCH = 0;
kal_uint32 before_poll_time_GPS = 0, after_poll_time_GPS = 0;
kal_uint32 before_poll_time_UART_HISR = 0, after_poll_time_UART_HISR = 0;
kal_uint32 before_poll_time_STOP_EVENT = 0, after_poll_time_STOP_EVENT = 0;
kal_uint32 before_poll_time_SLP_NTY = 0, after_poll_time_SLP_NTY = 0;
kal_uint32 stop_event_bitmap = 0, stop_event_bitmap32_0_15 = 0, stop_event_bitmap32_16_31 = 0;
kal_uint32 event_status_0_15 = 0, event_status_16_31 = 0, expire_event_status_0_15 = 0, expire_event_status_16_31 = 0;
kal_uint32 stop_event_bitmap_debug = 0, stop_event_bitmap32_0_15_debug = 0, stop_event_bitmap32_16_31_debug = 0;
kal_uint8 sram_wrap = 0, event_idx_wrap = 0;
IDC_ILM_MSG_T ilm;
kal_bool new_cmd_flag = KAL_FALSE, old_cmd_flag = KAL_FALSE, ilm_stage = KAL_FALSE;
/**********Colgin**************/
kal_bool GPS_L1_LTE_FLAG = KAL_FALSE;
kal_bool GPS_L5_LTE_FLAG = KAL_FALSE;
kal_bool GPS_L1_NR_FLAG = KAL_FALSE;
kal_bool GPS_L5_NR_FLAG = KAL_FALSE;
kal_uint8 GPS_L1_L5_LTE_BM = 0;
kal_uint8 GPS_L1_L5_NR_BM = 0;
kal_uint8 GPS_L1_L5_ALL_BM = 0;
kal_uint8 GPS_LTE_NR_ALL_BM = 0;
/**********MD97_PETRUS*********/
kal_bool SET_PIN_FLAG = KAL_FALSE;
kal_bool LTE_FLAG = KAL_FALSE;
kal_bool NR_FLAG = KAL_FALSE;
kal_bool IDC_INIT_FLAG = KAL_FALSE;
kal_bool ACTIVATE_FLAG = KAL_FALSE;
kal_bool PM_INIT_FLAG = KAL_FALSE;
kal_bool AUTO_TX_CON_INIT_FLAG = KAL_FALSE;
kal_bool AUTO_TX_EN_INIT_FLAG = KAL_FALSE;
kal_bool DR_ISSUE_FLAG = KAL_FALSE;
kal_uint32 DR_ISSUE_FAIL_CNT = 0;
kal_uint32 IDC_CMD_SUCCESS_CNT[10] = {0};
kal_uint32 IDC_CMD_SUCCESS_CNT_IDX = 0;
kal_uint8 idc_sleep_handle;
#define LTE_TIMER_BASE 0xA6090000
#define LTE_TIMER_START LTE_TIMER_BASE
#define LTE_TIMER_FN_READ0 (LTE_TIMER_BASE + 0x10)
#define LTE_TIMER_STIME_READ0 (LTE_TIMER_BASE + 0x14)
static kal_atomic_uint32 idc_flag = 0;
#define IDC_CTRL_LOCK 0xFFFFFFFF
static volatile kal_atomic_uint32 idc_drv_atom_lock=0;
enum{
IDC_ATLOCK_PWR_UPDATE,//= 0
IDC_ATLOCK_PWR_LISR, //= 1
IDC_ATLOCK_PWR_HISR, //= 2
IDC_ATLOCK_ISR_PINCFG, //= 3
IDC_ATLOCK_L1_L5_GPS, //= 4
IDC_ATLOCK_SINGLE_GPS // = 5
};
//return old value of lock
//return 0 : get lock successful
//return not 0: get lock fail
static inline kal_uint32 _do_atomic_try_lock(volatile kal_atomic_uint32 *lock_v, kal_uint8 lock_idx){
const kal_uint32 lock_map=1<<lock_idx;
kal_uint32 old_v = 0;
do{
old_v=__SMP_LL(lock_v);
if(old_v&lock_map){
return lock_map;
}
old_v|=lock_map;
}while(0==__SMP_SC(lock_v,old_v));
return 0;
}
kal_bool _idc_atomic_try_lock(volatile kal_atomic_uint32 *lock_v, kal_uint8 lock_idx){
if(0==((*lock_v)&(1<<lock_idx))){
if(0==_do_atomic_try_lock(lock_v,lock_idx)){return KAL_TRUE;}
}
return KAL_FALSE;
}
void _idc_atomic_lock(volatile kal_atomic_uint32 *lock_v, kal_uint8 lock_idx){
while(_do_atomic_try_lock(lock_v,lock_idx)){
__asm__ __volatile__("pause \n");
}
}
void _idc_atomic_unlock(volatile kal_atomic_uint32 *lock_v, kal_uint8 lock_idx){
kal_uint32 old_v,lock_map=1<<lock_idx,tmp;
miu_syncn(4);
do{
old_v=__SMP_LL(lock_v);
tmp=(~lock_map)&old_v;
old_v&=lock_map;
if(0==old_v){break;}
}while(0==__SMP_SC(lock_v,tmp));
}
void idc_ctrl_enter(kal_uint32 func_flag){
kal_uint32 old_flag = kal_atomic_set_bitmask_return(&idc_flag, func_flag);
if(old_flag & func_flag){
#if !defined(ATEST_DRV_ENABLE)
MD_TRC(IDC_FUN_ENTER_CONCURRENTLY_MSG,old_flag, func_flag);
#else
kal_sprintf(idc_dbg_str, "drv_idc: the idc ctrl func entering concurrently(%X, %X)\r\n", old_flag, func_flag);
DT_IDC_PRINTF(idc_dbg_str);
#endif
IDC_ASSERT(0);
}
}
void idc_ctrl_leave(kal_uint32 func_flag){
kal_uint32 old_flag = kal_atomic_clear_bitmask_return(&idc_flag, func_flag);
if(((func_flag == IDC_CTRL_LOCK) || (old_flag == IDC_CTRL_LOCK))?
(func_flag != old_flag): //main enter should match main leave
((func_flag & old_flag) != func_flag)){ //sub-func leave should exist its enter
#if !defined(ATEST_DRV_ENABLE)
MD_TRC(IDC_LEAVE_FUN_NOT_MATCH_MSG,func_flag, old_flag);
#else
kal_sprintf(idc_dbg_str, "drv_idc: the power ctrl leave func(%x) don't match enter func(%x)\r\n", func_flag, old_flag);
DT_IDC_PRINTF(idc_dbg_str);
#endif
IDC_ASSERT(0);
}
}
void drv_idc_conn_txrx_count(kal_bool is_start)
{
if (KAL_TRUE == is_start)
{
MD_TRC(IDC_START_COUNT_MSG,);
idc_count_start = KAL_TRUE;
idc_80211_tx_count = 0;
idc_80211_rx_count = 0;
}
else
{
idc_count_start = KAL_FALSE;
MD_TRC(IDC_END_COUNT_MSG,idc_80211_tx_count, idc_80211_rx_count);
}
}
void drv_idc_init_uart(void)
{
#if !defined(ATEST_DRV_ENABLE)
MD_TRC(IDC_UART_INIT_MSG,);
#else
kal_sprintf(idc_dbg_str, "drv_idc: IDC UART Init\n\r");
//DT_IDC_PRINTF(idc_dbg_str);
#endif
#if (!defined(__MD97__)) && (!defined(__MD97P__))
// Open Clock Gating of LTE_TIMER
DRV_WriteReg32(MDL1AO_PDN_CLR, PDN_LTE_TMR_MASK);
#endif
// Open Clock Gating of IDC_UART
DRV_WriteReg32(MDL1AO_PDN_CLR, PDN_IDC_UART_MASK);
// Open Clock Gating of IDC_CTRL
DRV_WriteReg32_NPW(MDL1AO_PDN_CLR, PDN_IDC_CTRL_MASK);
// Initialize baud rate
drv_idc_set_baudrate(4000000);
// Initialize IDC UART FIFO threshold
drv_idc_set_fifo_trigger(1);
//remove beacause HW limitation can't CLR RXFIFO simultaneously
//DRV_WriteReg32_NPW(IDC_UART_FCR, IDC_UART_FCR_RXTRIG | IDC_UART_FCR_FIFOINI);
//MD_TRC(IDC_CLEAN_RXFIFO_MSG,__FUNCTION__);
#if !defined(MT6297)
//Init SRAM wrap start_idx, idx 0~11 for immediate_event, rest for schedule_event
drv_idc_set_sram_wrap_idx(IDC_SRAM_WRAP_IDX);
#endif
// Enable RX interrupt
//DRV_WriteReg32(IDC_UART_IER, IDC_UART_IER_ERBFI);
#if defined(CHIP10992)
// Initialize m2c bridge
drv_idc_init_m2c_bridge();
#endif
return;
}
void drv_idc_init_isr(void)
{
//IRQ_Register_LISR(MD_IRQID_IDC_UART_IRQ, idc_uart_lisr, "IDC_UART");
//IRQSensitivity(MD_IRQID_IDC_UART_IRQ, KAL_FALSE);// KAL_TRUE :pulse trigger KAL_FALSE: level trigger
//IRQ_Register_LISR(MD_IRQID_IDC_PM_INT, idc_pm_lisr, "IDC_PM");
//IRQSensitivity(MD_IRQID_IDC_PM_INT, KAL_FALSE);
IRQUnmask(MD_IRQID_IDC_PM_INT);
#if !defined(MT6297)
//IRQ_Register_LISR(MD_IRQID_IDC_UART_TX_FORCE_ON, idc_auto_tx_lisr, "IDC_AUTO_TX");
//IRQSensitivity(MD_IRQID_IDC_UART_TX_FORCE_ON, KAL_FALSE);
IRQUnmask(MD_IRQID_IDC_UART_TX_FORCE_ON);
#endif
}
//Before in sleep mode, El1 call this function to let IDC know us in sleep mode
void drv_sleep_notify(void)
{
_idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
_idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR);
#if (!defined(__MD97__)) && (!defined(__MD97P__))
_idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_UPDATE);
#endif
IRQMask(MD_IRQID_IDC_UART_IRQ);
// Turn off RX INT, turn on TX INT
DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ETBEI);
IRQUnmask(MD_IRQID_IDC_UART_IRQ);
//modify idc_port main state
idc_port.main_state = IDC_IN_SLEEP;
MM_Sync();
#if (!defined(__MD97__)) && (!defined(__MD97P__))
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_UPDATE);
#endif
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR);
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
}
// After wake-up, IRQ is masked until first SF tick in order to prevent rx data error
void drv_idc_uart_activate(void)
{
MD_TRC(IDC_ACTIVATE_MSG);
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
if(ACTIVATE_FLAG == KAL_TRUE){
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_TAKE_FLAG_FAIL_MSG, __FUNCTION__);
return;
}
else{
ACTIVATE_FLAG = KAL_TRUE;
MM_Sync();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
}
IRQMask(MD_IRQID_IDC_UART_IRQ);
// Clean IDC UART FIFO, HW limitation can't CLR RXFIFO simultaneously
DRV_WriteReg32_NPW(IDC_UART_FCR, IDC_UART_FCR_RXTRIG | IDC_UART_FCR_FIFOINI);
MD_TRC(IDC_CLEAN_RXFIFO_MSG , __FUNCTION__);
// Enable RX interrupt
DRV_WriteReg32(IDC_UART_IER, IDC_UART_IER_ERBFI);
//Unmask IRQ
IRQUnmask(MD_IRQID_IDC_UART_IRQ);
MD_TRC(IDC_UNMASK_UART_ISR_MSG , __FUNCTION__);
}
// Bootup init: drv_idc_init(KAL_FALSE)
// Reinit after sleep: drv_idc_init(KAL_TRUE) -> drv_idc_uart_activate
void drv_idc_init(kal_bool sleep_mode)
{
#if defined(__4G_IDC__)
kal_uint32 i = 0;
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
if(IDC_INIT_FLAG == KAL_TRUE){
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_NONSLEEP_MSG, sleep_mode, LTE_FLAG, NR_FLAG, IDC_INIT_FLAG);
return;
}
else{
IDC_INIT_FLAG = KAL_TRUE;
MM_Sync();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SET_IDC_INIT_FLAG_MSG,__FUNCTION__);
}
// Mask IRQs before init
IRQMask(MD_IRQID_IDC_UART_IRQ);
IRQMask(MD_IRQID_IDC_PM_INT);
// Initial flags/config of IDC driver
// kal_mem_set(&idc_port, 0, sizeof(idc_port));
// idc_ctrl_enter(IDC_CTRL_LOCK); //remove because LTE/NR RAT flow
idc_port.schedule_state = IDC_PLAN;
idc_port.event_cnt = 0;
idc_port.event_pending_cnt = 0;
idc_port.event_longest_index = 0;
idc_port.event_longest_time = 0;
idc_port.phy_time = 0;
idc_port.frc_time = 0;
idc_port.rx_buf = 0;
#if (!defined(__MD97__)) && (!defined(__MD97P__))
idc_port.event_w_index = 0;
idc_port.event_usage_bit_map = 0;
#else
//event_idx 0 for immediate_event.
idc_port.event_w_index = 1;
idc_port.event_usage_bit_map = 0x1;
#if !defined(MT6297)
idc_port.event_w_index_lte = IDC_LTE_STA_EVENT_IDX;
idc_port.event_w_index_nr = IDC_NR_STA_EVENT_IDX;
idc_port.event_w_index_com = IDC_COMMON_STA_EVENT_IDX;
/***record IDC_CMD_CNT***/
IDC_CMD_SUCCESS_CNT_IDX++;
if(IDC_CMD_SUCCESS_CNT_IDX >= 10)
IDC_CMD_SUCCESS_CNT_IDX = 0;
IDC_CMD_SUCCESS_CNT[IDC_CMD_SUCCESS_CNT_IDX] = 0;
#endif
#endif
#if defined(__MD95__) || defined(__MD97__) || defined(__MD97P__)
idc_port.sram_w_index = 0;
kal_mem_set(idc_port.event_offset_table, 0, sizeof(idc_port.event_offset_table));
kal_mem_set(idc_port.event_data_table, 0, sizeof(idc_port.event_data_table));
kal_mem_set(idc_port.sram_table_usage, 0, sizeof(idc_port.sram_table_usage));
#endif
#if !defined(MT6297)
//SRAM_idx 0~11 for immediate_event.
idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
for(i = 0; i < IDC_SRAM_WRAP_IDX; i++)
idc_port.sram_table_usage[i] = 1;
#endif
// Init IDC_UART
drv_idc_init_uart();
MD_TRC(IDC_SLEEP_MSG,sleep_mode);
// 93IDC_UART is in AO, IDC_UART & ISR & callback functions do not need init after sleep
if (!sleep_mode)
{
/*#ifndef ATEST_DRV_ENABLE
idc_sleep_handle = SleepDrv_GetHandle(SMP);
#endif*/
// Init callback functions
for (i = 0; i < IDC_PM_NUM; ++i)
idc_port.pm_cb_handle[i].callback_func = NULL;
// Register UART, PM ISR, then unmask IRQ
drv_idc_init_isr();
idc_port.intr_en = KAL_TRUE;
// Enable TX Count of IDC_CTRL
DRV_WriteReg32(IDC_CTRL_DATA_CNT_CTRL, 0x1);
}
idc_port.main_state = IDC_IN_USE;
MM_Sync();
#else
//IDC should not be initialized if __4G_IDC__ was not defined
ASSERT(0);
#endif
//idc_ctrl_leave(IDC_CTRL_LOCK);//remove because LTE/NR RAT flow
return;
}
void drv_idc_init_m2c_bridge(void)
{
kal_uint32 tmp;
//Enable m2c bridge feature
//M2C_IDC2PTA_BRIDGE + 0xF00[4:2] = 3'b111 ((Bit[4:2] : infra request))
tmp = DRV_Reg32(M2C_IDC2PTA_BRIDGE_SPM_ACK);
tmp &= (~0x1C);
tmp |= M2C_SPM_ACK;
DRV_WriteReg32(M2C_IDC2PTA_BRIDGE_SPM_ACK, tmp);
//M2C_IDC2PTA_BRIDGE + 0x500[10:0] = 11'h7FF
tmp = DRV_Reg32(M2C_IDC2PTA_BRIDGE_M2C_EN);
tmp &= (~0x7FF);
tmp |= M2C_EN;
DRV_WriteReg32(M2C_IDC2PTA_BRIDGE_M2C_EN, tmp);
//M2C_IDC2PTA_BRIDGE + 0x504[23:0] = 11'h40404
tmp = DRV_Reg32(M2C_IDC2PTA_BRIDGE_M2C_TIME);
tmp &= (~0xFFFFFF);
tmp |= M2C_TIME;
DRV_WriteReg32(M2C_IDC2PTA_BRIDGE_M2C_TIME, tmp);
//switch pinmux & GPIO
//GPIO 131 => m2c_bt_act, GPIO 132 => m2c_bt_pri, GPIO 133 => m2c_wlan_act, GPIO 134 => m2c_uart_tx, GPIO 135 => m2c_uart_rx
DRV_WriteReg32(0xC0005000 + 0x408, 0xFFF<<12);
DRV_WriteReg32(0xC0005000 + 0x404, 0x111<<12);
//disable pull up for wlan_act, Write 0x11D10130[2] = 0x0
tmp = DRV_Reg32(0xC1D10130);
tmp &= (~0x4);
DRV_WriteReg32(0xC1D10130, tmp);
//enable pull down for wlan_act, Write 0x11D100F0[2] = 0x1
tmp = DRV_Reg32(0xC1D100F0);
tmp |= (0x4);
DRV_WriteReg32(0xC1D100F0, tmp);
MO_Sync();
return;
}
void drv_idc_get_support(IDC_SUPPORT_T *support)
{
#if defined(__4G_IDC__)
support->idc = KAL_TRUE;
support->gpio = KAL_FALSE;
support->uart = KAL_TRUE;
#else // !defined(__4G_IDC__)
support->idc = KAL_FALSE;
support->gpio = KAL_FALSE;
support->uart = KAL_FALSE;
#endif
return;
}
void drv_idc_open(kal_uint32 mod_id)
{
idc_port.owner_id = mod_id;
idc_port.main_state = IDC_OPEN;
return;
}
void drv_idc_close()
{
kal_uint32 i = 0;
// stop all events before closing IDC
DRV_WriteReg32(IDC_CTRL_SCH_STOP, 0xFFFF);
// disable interrupt
IRQMask(MD_IRQID_IDC_UART_IRQ);
DRV_WriteReg8(IDC_UART_IER, IDC_UART_IER_ALLOFF);
IRQUnmask(MD_IRQID_IDC_UART_IRQ);
idc_port.intr_en = KAL_FALSE;
idc_port.rx_buf = 0;
for (i = 0; i < 4; ++i)
{
DRV_WriteReg32(IDC_PRI(i), 0);
DRV_WriteReg32(IDC_PRI_BITEN(i), 0);
DRV_WriteReg32(IDC_PAT(i), 0);
DRV_WriteReg32(IDC_PAT_BITEN(i), 0);
}
#if defined(__MD95__) || defined(__MD97__) || defined(__MD97P__)
DRV_WriteReg32(IDC_REMAPPING_EN, 0);
#endif
kal_mem_set(&idc_port, 0, sizeof(idc_port));
idc_port.main_state = IDC_CLOSED;
idc_in_close = KAL_FALSE;
return;
}
void drv_idc_set_dcb_config(IDC_CTRL_DCB_CONFIG_T idc_config)
{
kal_uint8 tmp_lcr, tmp_ier;
IRQMask(MD_IRQID_IDC_UART_IRQ);
tmp_ier = DRV_Reg8(IDC_UART_IER);
DRV_WriteReg8(IDC_UART_IER, IDC_UART_IER_ALLOFF);
IRQUnmask(MD_IRQID_IDC_UART_IRQ);
// set baud rate
drv_idc_set_baudrate(idc_config.u4Baud);
tmp_lcr = DRV_Reg32(IDC_UART_LCR);
tmp_lcr &= ~0x3F;
// set number of bits per character
switch(idc_config.u1DataBits)
{
case len_5:
tmp_lcr |= IDC_UART_WLS_5;
break;
case len_6:
tmp_lcr |= IDC_UART_WLS_6;
break;
case len_7:
tmp_lcr |= IDC_UART_WLS_7;
break;
case len_8:
tmp_lcr |= IDC_UART_WLS_8;
break;
default:
break;
}
switch(idc_config.u1StopBits)
{
case sb_1:
tmp_lcr |= IDC_UART_1_STOP;
break;
case sb_2:
tmp_lcr |= IDC_UART_2_STOP;
break;
case sb_1_5:
tmp_lcr |= IDC_UART_1_5_STOP;
break;
default:
break;
}
switch(idc_config.u1Parity)
{
case pa_none:
tmp_lcr |= IDC_UART_NONE_PARITY;
break;
case pa_odd:
tmp_lcr |= IDC_UART_ODD_PARITY;
break;
case pa_even:
tmp_lcr |= IDC_UART_EVEN_PARITY;
break;
default:
break;
}
DRV_WriteReg32(IDC_UART_LCR, tmp_lcr);
kal_mem_cpy((void *) &idc_port.DCB, (void *) &idc_config, sizeof(IDC_CTRL_DCB_CONFIG_T));
IRQMask(MD_IRQID_IDC_UART_IRQ);
DRV_WriteReg8(IDC_UART_IER, tmp_ier);
IRQUnmask(MD_IRQID_IDC_UART_IRQ);
}
void drv_idc_get_dcb_config(IDC_CTRL_DCB_CONFIG_T *DCB)
{
kal_mem_cpy((void *) DCB, (void *) &idc_port.DCB, sizeof(IDC_CTRL_DCB_CONFIG_T));
return;
}
void drv_idc_set_baudrate(kal_uint32 baudrate)
{
kal_uint8 tmp_lcr = 0;
kal_uint32 sample_count = 0;
idc_port.DCB.u4Baud = baudrate;
// Only 4M baudrate is used in IDC now
ASSERT(baudrate == 4000000);
sample_count = 6;
// configure register
// based on sample_count * baud_pulse, baud_rate = system clock frequency / (SAMPLE_COUNT + 1) / {DLM, DLL}
DRV_WriteReg32(IDC_UART_HIGHSPEED, IDC_UART_HIGHSPEED_X);
DRV_WriteReg32(IDC_UART_FEATURE_SEL, 0x1);
// -- FEATURE_SEL start --
DRV_WriteReg32(IDC_UART_DLL_backup, 0x1);
DRV_WriteReg32(IDC_UART_DLM_backup, 0x0);
DRV_WriteReg32(IDC_UART_FEATURE_SEL, 0x0);
// -- FEATURE_SEL end --
DRV_WriteReg32(IDC_UART_SAMPLE_COUNT, sample_count - 1);
DRV_WriteReg32(IDC_UART_SAMPLE_POINT, (sample_count - 1) >> 1);
// Set Guard time en & cnt = 2
DRV_WriteReg32(IDC_UART_GUARD, 0x12);
#if defined(__MD97__) || defined(__MD97P__)
DRV_WriteReg32(IDC_UART_FRACDIV_L_TX, 0x55);
DRV_WriteReg32(IDC_UART_FRACDIV_L_RX, 0x55);
DRV_WriteReg32(IDC_UART_FRACDIV_M_TX, 0x2);
DRV_WriteReg32(IDC_UART_FRACDIV_M_RX, 0x2);
#else
// orig: Add SAMPLE_COUNT by 1 for bit[0], bit[2], bit[4], bit[6] (8'b01010101)
// align CONSYS UART shift issue, add SAMPLE_COUNT by 1 for bit[0], bit[2], bit[4], bit[5], bit[6] (8'b01110101)
DRV_WriteReg32(IDC_UART_FRACDIV_L_TX, 0x75);
DRV_WriteReg32(IDC_UART_FRACDIV_L_RX, 0x75);
// orig: Add SAMPLE_COUNT by 1 for STOP bits (2'b10)
// align CONSYS UART shift issue, no add SAMPLE_COUNT by 1 for STOP bits (2'b00)
DRV_WriteReg32(IDC_UART_FRACDIV_M_TX, 0x0);
DRV_WriteReg32(IDC_UART_FRACDIV_M_RX, 0x0);
#endif
tmp_lcr = DRV_Reg32(IDC_UART_LCR);
DRV_WriteReg32_NPW(IDC_UART_LCR, tmp_lcr | 0x3);
}
void drv_idc_set_fifo_trigger(kal_uint8 rx_threshold)
{
DRV_WriteReg32_NPW(IDC_UART_RXTRIG, rx_threshold);
return;
}
void drv_idc_set_pm_config(kal_uint8 pm_idx, kal_uint8 priority, kal_uint8 priority_bit_en, kal_uint8 pattern, kal_uint8 pattern_bit_en)
{
IRQMask(MD_IRQID_IDC_PM_INT);
// Use DSB to make sure that pattern match is turned off before setting pattern
DRV_WriteReg32_NPW(IDC_PRI_BITEN(pm_idx), 0);
//Data_Sync_Barrier();
//Data_Sync_Barrier();
// PRI_BITEN should be set lastly
DRV_WriteReg32(IDC_PAT(pm_idx), pattern);
DRV_WriteReg32(IDC_PAT_BITEN(pm_idx), pattern_bit_en);
DRV_WriteReg32(IDC_PRI(pm_idx), priority);
DRV_WriteReg32_NPW(IDC_PRI_BITEN(pm_idx), priority_bit_en);
IRQUnmask(MD_IRQID_IDC_PM_INT);
}
void drv_idc_get_pm_config(kal_uint8 pm_idx, kal_uint8 *priority, kal_uint8 *priority_bit_en, kal_uint8 *pattern, kal_uint8 *pattern_bit_en)
{
*priority = DRV_Reg8(IDC_PRI(pm_idx));
*priority_bit_en = DRV_Reg8(IDC_PRI_BITEN(pm_idx));
*pattern = DRV_Reg8(IDC_PAT(pm_idx));
*pattern_bit_en = DRV_Reg8(IDC_PAT_BITEN(pm_idx));
}
void drv_idc_set_new_pm_config(kal_uint8 pattern0, kal_uint8 pattern1)
{
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
if(PM_INIT_FLAG == KAL_TRUE){
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_TAKE_FLAG_FAIL_MSG, __FUNCTION__);
return;
}
else{
PM_INIT_FLAG = KAL_TRUE;
MM_Sync();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
}
IRQMask(MD_IRQID_IDC_PM_INT);
DRV_WriteReg32(IDC_NEW_PAT0, pattern0);
DRV_WriteReg32_NPW(IDC_NEW_PAT1, pattern1);
IRQUnmask(MD_IRQID_IDC_PM_INT);
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
PM_INIT_FLAG = KAL_FALSE;
MM_Sync();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_IDC_SET_NEW_PM_MSG, pattern0, pattern1);
}
void drv_idc_get_new_pm_config(kal_uint8 *pattern0, kal_uint8 *pattern1)
{
*pattern0 = DRV_Reg8(IDC_NEW_PAT0);
*pattern1 = DRV_Reg8(IDC_NEW_PAT1);
}
#if defined(__MD93__)
void drv_idc_send_event(IDC_EVENT_T event, kal_bool sleep_mode)
{
kal_uint32 i = 0;
#ifdef ATEST_DRV_ENABLE
// DT_IDC_PRINTF("drv_idc: send event\n\r");
#endif
if (sleep_mode)
{
// Clear scheduled events
drv_idc_stop_event(0xFFFF);
#ifndef ATEST_DRV_ENABLE
SleepDrv_LockSleep(SLEEP_CTL_IDC, SMP);
#endif
IRQMask(MD_IRQID_IDC_PM_INT);
// clear PM configuration
for (i = 0; i < 4; ++i)
{
DRV_WriteReg32(IDC_PRI_BITEN(i), 0);
}
}
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
before_poll_time_SE = ust_get_current_time();
//********protect critical section*******
DRV_WriteReg32(IDC_UART_BASE, event.data[0]);
DRV_WriteReg32(IDC_UART_BASE, event.data[1]);
//********protect critical section*******
after_poll_time_SE = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
if (sleep_mode)
{
// Turn off ERBFI and turn on ETBEI
DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ETBEI);
}
return;
}
#elif defined(__MD95__)
kal_bool drv_idc_send_event_95(IDC_EVENT_T event, kal_bool sleep_mode)
{
kal_uint8 num = 0, i = 0, DROP = 0;
kal_uint32 reschedule_event_offset = 0;
#ifdef ATEST_DRV_ENABLE
// DT_IDC_PRINTF("drv_idc: send event\n\r");
#endif
MD_TRC(IDC_SEND_EVENT_SLEEP_MODE_STS_MSG, sleep_mode);
// Stop all scheduled events
drv_idc_stop_event(0xFFFF);
if(!sleep_mode)
{
// Check drop behavior
for(i = 0; i < 16; i++)
{
if((idc_port.event_usage_bit_map >> i) & 0x1)
{
//if(idc_port.event_offset_table[i] <= (idc_port.phy_time + 61440))
if(TIME_DIFF_WITHIN((idc_port.phy_time + 61440), idc_port.event_offset_table[i], 614400))
{
//clear all event
idc_port.event_cnt = 0;
idc_port.event_pending_cnt = 0;
idc_port.event_longest_index = 0;
idc_port.event_longest_time = 0;
//idc_port.phy_time = 0;
idc_port.event_w_index = 0;
idc_port.event_usage_bit_map = 0x0;
idc_port.rx_buf = 0;
idc_port.sram_w_index = 0;
kal_mem_set(idc_port.event_offset_table, 0, sizeof(idc_port.event_offset_table));
kal_mem_set(idc_port.event_data_table, 0, sizeof(idc_port.event_data_table));
kal_mem_set(idc_port.sram_table_usage, 0, sizeof(idc_port.sram_table_usage));
//DROP flag
DROP = 1;
MD_TRC(IDC_SEND_EVENT_DROP_MSG,);
break;
}
}
}
}
#ifndef ATEST_DRV_ENABLE
//Lock sleep if sleep_mode is KAL_TRUE
if (sleep_mode)
{
SleepDrv_LockSleep(SLEEP_CTL_IDC, SMP);
}
#endif
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
before_poll_time_SE = ust_get_current_time();
//********protect critical section*******
for(num = 0; num < event.num; num++)
{
DRV_WriteReg32(IDC_UART_BASE, event.data[num]);
}
//********protect critical section*******
after_poll_time_SE = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SEND_EVENT_MSG, event.num,event.data[0], event.data[1], event.data[2], event.data[3], event.data[4], event.data[5], event.data[6], event.data[7], event.data[8]);
if (sleep_mode)
{
//clear new PM configuration
DRV_WriteReg32(IDC_REMAPPING_EN, 0);
// Turn off ERBFI and turn on ETBEI
DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ETBEI);
IRQUnmask(MD_IRQID_IDC_UART_IRQ);
}
if(!sleep_mode)
{
//Re-Start scheduler if no drop behavior occur
if(!DROP)
{
for(i = 0; i < 16; i++)
{
if((idc_port.event_usage_bit_map >> i) & 0x1)
{
reschedule_event_offset = DRV_Reg32(IDC_CTRL_EVENT_SETETING(i));
DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(i), (reschedule_event_offset | (1 << 31)));
}
}
MD_TRC(IDC_SEND_EVENT_RESCHEDULE_MSG,);
}
}
if(DROP)
return KAL_FALSE;
else
return KAL_TRUE;
}
#elif defined(__MD97__) || defined(__MD97P__)
kal_bool drv_idc_send_event_97(IDC_EVENT_T event, kal_bool sleep_mode)
{
kal_uint8 BUSY = 0;
kal_bool LTE_STS = KAL_FALSE, NR_STS = KAL_FALSE;
MD_TRC(IDC_SEND_EVENT_SLEEP_MODE_STS_MSG, sleep_mode);
if(sleep_mode){
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
LTE_STS = LTE_FLAG;
NR_STS = NR_FLAG;
kal_hrt_give_itc_lock(KAL_ITC_IDC);
//polling immediate_event is idle
if((DRV_Reg32(IDC_CTRL_SCH_STATUS)& 0x3) != 0x0){
BUSY = 1;
MD_TRC(IDC_SEND_EVENT_FAIL_MSG, event.num, event.data[0], event.data[1], event.data[2], event.data[3], event.data[4], event.data[5], event.data[6], event.data[7], event.data[8]);
}
else{
//set_immediate_event
idc_set_immediate_event(0, event.data, event.num, 0, (event.num-1));
//MD_TRC(IDC_SEND_EVENT_MSG, event.num, event.data[0], event.data[1], event.data[2], event.data[3], event.data[4], event.data[5], event.data[6], event.data[7], event.data[8]);
}
if((LTE_STS == KAL_TRUE) && (NR_STS == KAL_TRUE)){
if(BUSY)
return KAL_FALSE;
else
return KAL_TRUE;
}
else{
#ifndef ATEST_DRV_ENABLE
//Lock sleep if sleep_mode is KAL_TRUE
SleepDrv_LockSleep(SLEEP_CTL_IDC, SMP);
#endif
#if defined(MT6297)
//disable new PM
DRV_WriteReg32(IDC_REMAPPING_EN, 0);
#endif
// Turn off ERBFI and turn on ETBEI
DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ETBEI);
IRQUnmask(MD_IRQID_IDC_UART_IRQ);
if(BUSY){
#ifndef ATEST_DRV_ENABLE
SleepDrv_UnlockSleep(SLEEP_CTL_IDC, SMP);
#endif
return KAL_FALSE;
}
else
return KAL_TRUE;
}
}
//sleep_mode is KAL_FALSE, send immediate event only
else{
kal_uint8 txfifo_cnt = 0;
txfifo_cnt = DRV_Reg8(IDC_TX_WOFFSET);
//MD_TRC(IDC_TXFIFO_CNT_MSG, txfifo_cnt);
//log IDC_UART status & MD_L1_AO
if(txfifo_cnt >= 30){
DRV_WriteReg32_NPW(IDC_UART_FEATURE_SEL, 0x1);
MD_TRC(IDC_SEND_EVENT_DUMP_MSG, DRV_Reg8(IDC_UART_SAMPLE_COUNT), DRV_Reg8(IDC_UART_SAMPLE_POINT), DRV_Reg8(IDC_UART_GUARD), DRV_Reg8(IDC_UART_ESCAPE_EN), DRV_Reg8(IDC_UART_SLEEP_EN), DRV_Reg8(IDC_UART_DEBUG_1), DRV_Reg8(IDC_UART_DEBUG_8), DRV_Reg8(IDC_UART_SLEEP_CTRL), DRV_Reg8(IDC_UART_MISC_CTRL), DRV_Reg8(IDC_UART_DEBUG_10), DRV_Reg8(IDC_UART_DLL_backup), DRV_Reg8(IDC_UART_DLM_backup), DRV_Reg8(IDC_UART_EFR_backup), DRV_Reg8(IDC_UART_FEATURE_SEL), DRV_Reg32(MDL1AO_CLK_STA), DRV_Reg32(MDL1AO_PDN_STA), DRV_Reg32(IDC_CTRL_FRC_REG));
DRV_WriteReg32_NPW(IDC_UART_FEATURE_SEL, 0x0);
}
//TXFIFO threshold check (keep 10 byte margin)
if(txfifo_cnt >= 126){
MD_TRC(IDC_SEND_EVENT_FAIL_MSG, event.num, event.data[0], event.data[1], event.data[2], event.data[3], event.data[4], event.data[5], event.data[6], event.data[7], event.data[8]);
IDC_ASSERT(0);
//return KAL_FALSE;
}
//polling immediate_event is idle
if((DRV_Reg32(IDC_CTRL_SCH_STATUS)& 0x3) != 0x0){
BUSY = 1;
MD_TRC(IDC_SEND_EVENT_FAIL_MSG, event.num, event.data[0], event.data[1], event.data[2], event.data[3], event.data[4], event.data[5], event.data[6], event.data[7], event.data[8]);
}
else{
//set_immediate_event
idc_set_immediate_event(0, event.data, event.num, 0, (event.num-1));
//MD_TRC(IDC_SEND_EVENT_MSG, event.num, event.data[0], event.data[1], event.data[2], event.data[3], event.data[4], event.data[5], event.data[6], event.data[7], event.data[8]);
}
if(BUSY)
return KAL_FALSE;
else
return KAL_TRUE;
}
}
#endif
#if defined(__MD93__)
void drv_idc_schedule_event(IDC_EVENT_T event)
{
kal_uint32 i = 0, sram_full_flag = 0, buf_displace_happen_flag = 0, put_in_buf_flag = 0;
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
before_poll_time_SCH = ust_get_current_time();
//********protect critical section*******
idc_ctrl_enter(IDC_CTRL_LOCK);
idc_port.schedule_state = IDC_RUN;
event.offset = event.offset & 0xFFFFF;
/* if(((event.offset > idc_port.phy_time)
&& ((event.offset - idc_port.phy_time) > 614400))
|| ((event.offset < idc_port.phy_time)
&& ((1048575 - idc_port.phy_time + event.offset) > 614400)))*/
if(TIME_DIFF_EXCEED(event.offset, idc_port.phy_time, 614400))
{
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_OVER_10MS_MSG,idc_port.phy_time, event.offset, event.data[0], event.data[1]);
IDC_ASSERT(0); // Time offset must not larger than 10ms
}
if(idc_port.event_cnt < 32)
{
idc_port.event_cnt++;
if(idc_port.event_usage_bit_map != 0xFFFF) // SRAM isn't full
{
// Find empty event
while (1)
{
if ((1 << idc_port.event_w_index) & idc_port.event_usage_bit_map)
{
idc_port.event_w_index++;
if (idc_port.event_w_index == 16) idc_port.event_w_index = 0;
}
else
{
break;
}
}
// Set event data
DRV_WriteReg32(IDC_CTRL_EVT_DATA(idc_port.event_w_index), (event.data[0] + (event.data[1] << 8)));
// Set time stamps para & trigger event
DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_w_index), event.offset + (1 << 31));
// Log TX information
//MD_TRC(IDC_SCHEDULE_2_MSG, event.offset, event.data[0], event.data[1]);
// Record event_offset & event_data in the table
idc_port.event_offset_table[idc_port.event_w_index] = event.offset;
idc_port.event_data_table[idc_port.event_w_index] = (event.data[0] + (event.data[1] << 8));
// Record the number and usage bitmap for the scheduler
idc_port.event_usage_bit_map |= (1 << idc_port.event_w_index);
// Update idc_port.event_longest_time
if(idc_port.event_cnt == 1)
{
idc_port.event_longest_index = idc_port.event_w_index;
idc_port.event_longest_time = event.offset;
}
if(TIME_DIFF_WITHIN(event.offset, idc_port.event_longest_time, 614400))
/* if(((event.offset > idc_port.event_longest_time)
&& ((event.offset - idc_port.event_longest_time) <=614400))
|| (((event.offset < idc_port.event_longest_time)
&& ((0xFFFFF - idc_port.event_longest_time + event.offset) <= 614400))))*/
{
idc_port.event_longest_index = idc_port.event_w_index;
idc_port.event_longest_time = event.offset;
}
idc_port.event_w_index++;
if (idc_port.event_w_index == 16) idc_port.event_w_index = 0;
}
else //SRAM is full, we need to put event into event_buffer
{
sram_full_flag = 1;
//Log SRAM is FULL information
//MD_TRC(IDC_SRAM_FULL_DATA_IN_EVENT_BUFFER);
// Replace event in SRAM with incoming event, and put event into event_buffer
if(TIME_DIFF_WITHIN(idc_port.event_longest_time, event.offset, 614400))
/* if(((idc_port.event_longest_time > event.offset)
&&((idc_port.event_longest_time - event.offset) <= 614400))
|| ((idc_port.event_longest_time < event.offset)
&& ((1048575 - event.offset + idc_port.event_longest_time) <= 614400)))*/
{
// If event is triggered and being processing, just wait...
while(((DRV_Reg32(IDC_CTRL_SCH_STATUS) >> (idc_port.event_longest_index * 2)) & 0x3) == 0x3);
// Longest_offset_evt haven't been triggered, we need to stop evt before we set data&time stamps
if(((DRV_Reg32(IDC_CTRL_SCH_STATUS) >> (idc_port.event_longest_index * 2)) & 0x3) == 0x2)
{
// Stop longest_offset_evt
drv_idc_stop_event((0x1 << idc_port.event_longest_index));
//put event into event_buffer
idc_port.event_pending_offset_table[idc_port.event_pending_cnt] = idc_port.event_longest_time;
idc_port.event_pending_data_table[idc_port.event_pending_cnt] = idc_port.event_data_table[idc_port.event_longest_index];
idc_port.event_pending_cnt++;
//Log information of event in event buffer
buf_displace_happen_flag = 1;
//MD_TRC(IDC_DATA_IN_EVENT_BUFFER_DISPLACE_HAPPEN, idc_port.event_longest_time, (idc_port.event_data_table[idc_port.event_longest_index] & 0xFF), (idc_port.event_data_table[idc_port.event_longest_index] >> 8));
// Set event data
DRV_WriteReg32(IDC_CTRL_EVT_DATA(idc_port.event_longest_index), (event.data[0] + (event.data[1] << 8)));
// Set time stamps para & trigger event
DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_longest_index), event.offset + (1 << 31));
// Record event_offset & event_data in the table
idc_port.event_offset_table[idc_port.event_longest_index] = event.offset;
idc_port.event_data_table[idc_port.event_longest_index] = event.data[0] + (event.data[1] << 8);
//update idc_port.event_longest_time
idc_port.event_longest_time = event.offset;
for(i = 0; i<16; i++)
{
if(TIME_DIFF_WITHIN(idc_port.event_offset_table[i], idc_port.event_longest_time, 614400))
/* if((idc_port.event_offset_table[i] > idc_port.event_longest_time) ||
((idc_port.event_offset_table[i] < idc_port.event_longest_time)
&& ((1048575 - idc_port.event_longest_time + idc_port.event_offset_table[i]) <= 614400)))*/
{
idc_port.event_longest_time = idc_port.event_offset_table[i];
idc_port.event_longest_index = i;
}
}
}
// Longest_offset_evt have been triggered, we reset the usage_bit_map & directly set data&time stamps
else if(((DRV_Reg32(IDC_CTRL_SCH_STATUS) >> (idc_port.event_longest_index * 2)) & 0x3) == 0x0)
{
idc_port.event_cnt--;
// Set event data
DRV_WriteReg32(IDC_CTRL_EVT_DATA(idc_port.event_longest_index), (event.data[0] + (event.data[1] << 8)));
// Set time stamps para
DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_longest_index), event.offset + (1 << 31));
// Record event_offset & event_data in the table
idc_port.event_offset_table[idc_port.event_longest_index] = event.offset;
idc_port.event_data_table[idc_port.event_longest_index] = event.data[0] + (event.data[1] << 8);
//update idc_port.event_longest_time
idc_port.event_longest_time = event.offset;
}
}
//Incoming event is large than idc_port.event_longest_time
else
{
//Whether incoming event's offset is over 1ms than idc_port.phy_time
if(TIME_DIFF_WITHIN(event.offset, idc_port.phy_time, 61440))
/* if(((event.offset > idc_port.phy_time)
&& ((event.offset - idc_port.phy_time) <= 61440))
|| ((event.offset < idc_port.phy_time)
&& (((1048575 - idc_port.phy_time) + event.offset) <= 61440)))*/
IDC_ASSERT(0); // Set over 16 events in 1ms
// Put incoming event into event_buffer
idc_port.event_pending_offset_table[idc_port.event_pending_cnt] = event.offset;
idc_port.event_pending_data_table[idc_port.event_pending_cnt] = event.data[0] + (event.data[1] << 8);
idc_port.event_pending_cnt++;
//Log information of event in event_buffer
put_in_buf_flag = 1;
//MD_TRC(IDC_DATA_IN_EVENT_BUFFER, event.offset, event.data[0], event.data[1]);
}
}
}
else //el1c put over 32 events to drv_idc
IDC_ASSERT(0);
idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
after_poll_time_SCH = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
if(sram_full_flag == 1)
MD_TRC(IDC_SRAM_FULL_DATA_IN_EVENT_BUFFER,);
if(buf_displace_happen_flag == 1)
MD_TRC(IDC_DATA_IN_EVENT_BUFFER_DISPLACE_HAPPEN,idc_port.event_longest_time, (idc_port.event_data_table[idc_port.event_longest_index] & 0xFF), (idc_port.event_data_table[idc_port.event_longest_index] >> 8));
if(put_in_buf_flag == 1)
MD_TRC(IDC_DATA_IN_EVENT_BUFFER,event.offset, event.data[0], event.data[1]);
return;
}
#elif defined(__MD95__)
kal_bool drv_idc_schedule_event_95(IDC_EVENT_T event, IDC_CTRL_DROP_CMD_T *drop_cmd)
{
kal_uint32 i = 0, drop_cmd_flag = 0, mask = 0xFFFFFFFF;
kal_uint32 w_data = 0, w_idx = 0, value = 0, tmp_sram_idx = 0;
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
before_poll_time_SCH = ust_get_current_time();
//********protect critical section*******
idc_ctrl_enter(IDC_CTRL_LOCK);
idc_port.schedule_state = IDC_RUN;
event.offset = event.offset & 0xFFFFF;
/* if(((event.offset > idc_port.phy_time)
&& ((event.offset - idc_port.phy_time) > 614400))
|| ((event.offset < idc_port.phy_time)
&& ((1048575 - idc_port.phy_time + event.offset) > 614400)))*/
if(TIME_DIFF_EXCEED(event.offset, idc_port.phy_time, 614400))
{
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_OVER_10MS_MSG,idc_port.phy_time, event.offset, event.data[0], event.data[1]);
//IDC_ASSERT(0); // Time offset must not larger than 10ms
idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
return KAL_FALSE;
}
if(idc_port.event_usage_bit_map != 0xFFFF) // SRAM isn't full
{
// Find empty event
while (1)
{
if ((1 << idc_port.event_w_index) & idc_port.event_usage_bit_map)
{
idc_port.event_w_index++;
if (idc_port.event_w_index == 16) idc_port.event_w_index = 0;
}
else
{
break;
}
}
// Find empty sram_w_idx
sram_wrap = 0;
while (1)
{
if (idc_port.sram_table_usage[idc_port.sram_w_index] == 1)
{
idc_port.sram_w_index++;
if (idc_port.sram_w_index == 78)
{
if(sram_wrap == 1)
{
//if sram full, return drop_cmd
idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
after_poll_time_SCH = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_SRAM_FULL_MSG,);
drv_idc_return_drop_cmd(event, drop_cmd);
return KAL_FALSE;
}
idc_port.sram_w_index = 0;
sram_wrap = 1;
}
}
else
{
break;
}
}
//If there no sequential sram space enough, return drop_cmd
for(i = 0; i < event.num; i++)
{
tmp_sram_idx = idc_port.sram_w_index + i;
//wrap case
if(tmp_sram_idx > 75)
{
idc_port.sram_w_index = 0;
tmp_sram_idx = 0;
i = 0;
}
//DT_IDC_PRINTF("*** sram_idx : %d***", tmp_sram_idx);
if(idc_port.sram_table_usage[tmp_sram_idx] == 1)
{
//return drop_cmd
idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
after_poll_time_SCH = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_NO_SEQUENTIAL_SRAM_MSG,);
drv_idc_return_drop_cmd(event, drop_cmd);
return KAL_FALSE;
}
}
// Set event data
w_idx = idc_port.sram_w_index;
for(i = 0; i < event.num; i++)
{
w_data = w_data | (event.data[i] << (8 * (w_idx % 4)));
mask &= ~(0xFF << (8 * (w_idx % 4)));
w_idx++;
if((w_idx % 4 == 0) || (i == event.num - 1))
{
if(w_idx % 4 == 0)
value = DRV_Reg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1));
else
value = DRV_Reg32(IDC_CTRL_EVT_DATA(w_idx/4));
value &= mask;
value |= w_data;
if(w_idx % 4 == 0)
DRV_WriteReg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1), value);
else
DRV_WriteReg32(IDC_CTRL_EVT_DATA(w_idx/4), value);
w_data = 0;
mask = 0xFFFFFFFF;
}
idc_port.sram_table_usage[idc_port.sram_w_index + i] = 1;
}
// Set event memory position
DRV_WriteReg32(IDC_CTRL_EVENT_MEM_POS(idc_port.event_w_index), (idc_port.sram_w_index << 8) + (idc_port.sram_w_index + event.num - 1));
// Set time stamps para & trigger event
DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_w_index), event.offset + (1 << 31));
//DT_IDC_PRINTF("*** schedule event done \r\n***");
// Record event_offset & event_data in the table
idc_port.event_offset_table[idc_port.event_w_index] = event.offset;
kal_mem_cpy(idc_port.event_data_table[idc_port.event_w_index], event.data, sizeof(event.data));
idc_port.event_byte_num[idc_port.event_w_index] = event.num;
idc_port.event_sram_sta_idx[idc_port.event_w_index] = idc_port.sram_w_index;
// Record the number and usage bitmap for the scheduler
idc_port.event_usage_bit_map |= (1 << idc_port.event_w_index);
// Add event_cnt
idc_port.event_cnt++;
// Add sram_w_idx
idc_port.sram_w_index += event.num;
idc_port.event_w_index++;
if (idc_port.event_w_index == 16) idc_port.event_w_index = 0;
}
else //SRAM is full, we need to return drop_cmd
{
//return drop_cmd
drop_cmd_flag = 1;
}
idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
after_poll_time_SCH = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
// Log TX information
//MD_TRC(IDC_SCHEDULE_2_MSG, event.offset, event.data[0], event.data[1]);
if(drop_cmd_flag == 1)
{
MD_TRC(IDC_SCHEDULE_EVENT_FULL_MSG,);
drv_idc_return_drop_cmd(event, drop_cmd);
return KAL_FALSE;
}
return KAL_TRUE;
}
#elif defined(__MD97__) || defined(__MD97P__)
kal_bool drv_idc_schedule_event_97(IDC_EVENT_T event, IDC_CTRL_DROP_CMD_T *drop_cmd)
{
kal_uint32 i = 0, drop_cmd_flag = 0, mask = 0xFFFFFFFF;
kal_uint32 w_data = 0, w_idx = 0, value = 0, tmp_sram_idx = 0, end_sram_idx = 0;
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
before_poll_time_SCH = ust_get_current_time();
//********protect critical section*******
//idc_ctrl_enter(IDC_CTRL_LOCK); //remove because LTE/NR RAT flow
idc_port.schedule_state = IDC_RUN;
event.offset = event.offset & 0x3FFFFFFF;
if(TIME_DIFF_EXCEED(event.offset, idc_port.frc_time, 10000))
{
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_OVER_10MS_MSG, idc_port.frc_time, event.offset, event.data[0], event.data[1]);
//idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
return KAL_FALSE;
}
if(idc_port.event_usage_bit_map != 0xFFFFFFFF) // SRAM isn't full
{
// Find empty event
while (1)
{
if ((1 << idc_port.event_w_index) & idc_port.event_usage_bit_map)
{
idc_port.event_w_index++;
if (idc_port.event_w_index == IDC_MAX_EVENT_NUM) idc_port.event_w_index = 1;
}
else
{
break;
}
}
// Find empty sram_w_idx
sram_wrap = 0;
while (1)
{
if (idc_port.sram_table_usage[idc_port.sram_w_index] == 1)
{
idc_port.sram_w_index++;
if (idc_port.sram_w_index == IDC_MAX_SRAM_SIZE)
{
if(sram_wrap == 1)
{
//if sram full, return drop_cmd
//idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
after_poll_time_SCH = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_SRAM_FULL_MSG);
drv_idc_return_drop_cmd(event, drop_cmd);
return KAL_FALSE;
}
idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
sram_wrap = 1;
}
}
else
{
break;
}
}
//If there no sequential sram space enough, return drop_cmd
for(i = 0; i < event.num; i++)
{
tmp_sram_idx = idc_port.sram_w_index + i;
//wrap case
if(tmp_sram_idx >= IDC_MAX_SRAM_SIZE)
{
idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
tmp_sram_idx = IDC_SRAM_WRAP_IDX;
}
//DT_IDC_PRINTF("*** sram_idx : %d***", tmp_sram_idx);
if(idc_port.sram_table_usage[tmp_sram_idx] == 1)
{
//return drop_cmd
//idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
after_poll_time_SCH = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_NO_SEQUENTIAL_SRAM_MSG);
drv_idc_return_drop_cmd(event, drop_cmd);
return KAL_FALSE;
}
}
end_sram_idx = idc_port.sram_w_index + event.num - 1;
if(end_sram_idx >= IDC_MAX_SRAM_SIZE)
end_sram_idx = end_sram_idx - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX;
// Set event data
w_idx = idc_port.sram_w_index;
for(i = 0; i < event.num; i++)
{
w_data = w_data | (event.data[i] << (8 * (w_idx % 4)));
mask &= ~(0xFF << (8 * (w_idx % 4)));
w_idx++;
if((w_idx % 4 == 0) || (i == event.num - 1))
{
if(w_idx % 4 == 0)
value = DRV_Reg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1));
else
value = DRV_Reg32(IDC_CTRL_EVT_DATA(w_idx/4));
value &= mask;
value |= w_data;
if(w_idx % 4 == 0)
DRV_WriteReg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1), value);
else
DRV_WriteReg32(IDC_CTRL_EVT_DATA(w_idx/4), value);
w_data = 0;
mask = 0xFFFFFFFF;
}
if(w_idx == IDC_MAX_SRAM_SIZE)
w_idx = IDC_SRAM_WRAP_IDX;
if((idc_port.sram_w_index + i) >= IDC_MAX_SRAM_SIZE)
idc_port.sram_table_usage[idc_port.sram_w_index + i - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX] = 1;
else
idc_port.sram_table_usage[idc_port.sram_w_index + i] = 1;
}
// Set event memory position
DRV_WriteReg32(IDC_CTRL_EVENT_MEM_POS(idc_port.event_w_index), (idc_port.sram_w_index << 8) + end_sram_idx);
// Set time stamps para & trigger event
DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_w_index), event.offset + (1 << 31));
//DT_IDC_PRINTF("*** schedule event done \r\n***");
// Record event_offset & event_data in the table
idc_port.event_offset_table[idc_port.event_w_index] = event.offset;
kal_mem_cpy(idc_port.event_data_table[idc_port.event_w_index], event.data, sizeof(event.data));
idc_port.event_byte_num[idc_port.event_w_index] = event.num;
idc_port.event_sram_sta_idx[idc_port.event_w_index] = idc_port.sram_w_index;
// Record the number and usage bitmap for the scheduler
idc_port.event_usage_bit_map |= (1 << idc_port.event_w_index);
// Add event_cnt
idc_port.event_cnt++;
// Add sram_w_idx
idc_port.sram_w_index += event.num;
if (idc_port.sram_w_index == IDC_MAX_SRAM_SIZE)
idc_port.sram_w_index = idc_port.sram_w_index - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX;
idc_port.event_w_index++;
if (idc_port.event_w_index == IDC_MAX_EVENT_NUM)
idc_port.event_w_index = 1;
}
else //SRAM is full, we need to return drop_cmd
{
//return drop_cmd
drop_cmd_flag = 1;
}
//idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
after_poll_time_SCH = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
// Log TX information
MD_TRC(IDC_SCHEDULE_2_MSG, event.offset, event.data[0], event.data[1]);
if(drop_cmd_flag == 1)
{
MD_TRC(IDC_SCHEDULE_EVENT_FULL_MSG);
drv_idc_return_drop_cmd(event, drop_cmd);
return KAL_FALSE;
}
return KAL_TRUE;
}
#endif
void drv_idc_return_drop_cmd(IDC_EVENT_T event, IDC_CTRL_DROP_CMD_T *drop_cmd)
{
drop_cmd->cmd_phytime = event.offset;
drop_cmd->cmd_type = (event.data[0] & 0x1E) >> 1;
if(drop_cmd->cmd_type == 0xF)
drop_cmd->cmd_sub_type = (event.data[1] & 0xFC) >> 2;
//DT_IDC_PRINTF("drop cmd happen!!!!!!!!!!!!\n\r");
//Log schedule fail info.
MD_TRC(IDC_SCHEDULE_FAIL_MSG,drop_cmd->cmd_phytime, drop_cmd->cmd_type, drop_cmd->cmd_sub_type);
return;
}
kal_bool drv_idc_schedule_gps_blank_event(kal_uint8 rat_status, kal_bool gps_mode, kal_uint32 frc_time)
{
#if !defined(CHIP10992)
kal_uint8 gps_bm = 0;
MD_TRC(IDC_GPS_BLANK_MSG, rat_status, gps_mode, frc_time);
//********protect critical section*******
_idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_SINGLE_GPS);
MD_TRC(IDC_GPS_BLANK_DETAIL_MSG, GPS_LTE_NR_ALL_BM);
frc_time= frc_time & 0x3FFFFFFF;
//LTE
if(rat_status == IDC_RAT_LTE){
if(gps_mode == KAL_TRUE){//LTE using bit0
gps_bm = GPS_LTE_NR_ALL_BM|(0x1<<0);
}
else {
gps_bm = GPS_LTE_NR_ALL_BM& 0xFFFFFFFE;
}
}
else if(rat_status == IDC_RAT_NR){//NR using bit1
if(gps_mode == KAL_TRUE){
gps_bm = GPS_LTE_NR_ALL_BM|(0x1<<1);
}
else {
gps_bm = GPS_LTE_NR_ALL_BM& 0xFFFFFFFD;
}
}
else{
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_SINGLE_GPS);
//gps l1 l5 fail
MD_TRC(IDC_GPS_BLANK_FAIL_MSG, __LINE__,0,0);
return KAL_FALSE;
}
if(TIME_DIFF_WITHIN(frc_time, ust_get_current_time(), 50)){//drop the event frc < current frc+50us
MD_TRC(IDC_GPS_BLANK_DROP0_MSG, __LINE__,frc_time,ust_get_current_time());//drop this event
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_SINGLE_GPS);
return KAL_FALSE;
}
if(TIME_DIFF_EXCEED(frc_time, ust_get_current_time(), 10000)){//drop the event frc > current frc+10ms
MD_TRC(IDC_GPS_BLANK_DROP1_MSG, __LINE__,frc_time,ust_get_current_time());//drop this event
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_SINGLE_GPS);
return KAL_FALSE;
}
if(gps_bm != GPS_LTE_NR_ALL_BM){
if((gps_bm & 0x3) ==0x0){
//gps_off
if((DRV_Reg32(IDC_CTRL_GPS_EVENT_STATUS) & (0x1 << 1))){
//event busy
MD_TRC(IDC_GPS_BLANK_FAIL_MSG, __LINE__,(DRV_Reg32(IDC_CTRL_GPS_EVENT_STATUS) ),ust_get_current_time());
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_SINGLE_GPS);
return KAL_FALSE;
}
else{
DRV_WriteReg32(IDC_CTRL_GPS_EVENT_OFF, frc_time + (0x1 << 31));
MD_TRC(IDC_GPS_BLANK_TRIG_MSG, ust_get_current_time(), 0x0);
}
}else{
if(GPS_LTE_NR_ALL_BM == 0x0){
//gps_on
if((DRV_Reg32(IDC_CTRL_GPS_EVENT_STATUS) & (0x1 << 2))){
//event busy
MD_TRC(IDC_GPS_BLANK_FAIL_MSG, __LINE__,(DRV_Reg32(IDC_CTRL_GPS_EVENT_STATUS) ),ust_get_current_time());
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_SINGLE_GPS);
return KAL_FALSE;
}
else{
DRV_WriteReg32(IDC_CTRL_GPS_EVENT_ON, frc_time + (0x1 << 31));
MD_TRC(IDC_GPS_BLANK_TRIG_MSG, ust_get_current_time(), 0x1);
}
}
}
}
GPS_LTE_NR_ALL_BM = gps_bm;
MM_Sync();
//********protect critical section********
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_SINGLE_GPS);
return KAL_TRUE;
#else
return KAL_FALSE;
#endif
}
kal_bool drv_idc_schedule_gps_l1_l5_blank_event(kal_uint8 rat_status, kal_uint8 raw_data, kal_uint32 frc_time)
{
#if defined(CHIP10992)||defined(MT6833)||defined(MT6877)
kal_uint8 gps_l1_l5_bm = 0;
MD_TRC(IDC_GPS_L1_L5_MSG, rat_status, raw_data, frc_time);
//********protect critical section*******
_idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_L1_L5_GPS);
MD_TRC(IDC_GPS_L1_L5_DETAIL_MSG, GPS_L1_L5_ALL_BM, GPS_L1_L5_LTE_BM, GPS_L1_L5_NR_BM);
frc_time= frc_time & 0x3FFFFFFF;
//LTE
if(rat_status == IDC_RAT_LTE){
gps_l1_l5_bm = raw_data | GPS_L1_L5_NR_BM;
}
else if(rat_status == IDC_RAT_NR){
gps_l1_l5_bm = raw_data | GPS_L1_L5_LTE_BM;
}
else{
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_L1_L5_GPS);
//gps l1 l5 fail
MD_TRC(IDC_GPS_L1_L5_FAIL_MSG, __LINE__);
return KAL_FALSE;
}
if(gps_l1_l5_bm != GPS_L1_L5_ALL_BM){
//L1 status check
if((gps_l1_l5_bm & (1 << 0)) != (GPS_L1_L5_ALL_BM & (1 << 0))){
if((gps_l1_l5_bm & (1 << 0)) == 0x0){
//gps_l1_off
if((DRV_Reg32(IDC_CTRL_GPS_EVENT_STATUS) & (1 << 1))){
//event busy
MD_TRC(IDC_GPS_L1_L5_FAIL_MSG, __LINE__);
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_L1_L5_GPS);
return KAL_FALSE;
}
else{
DRV_WriteReg32(IDC_CTRL_GPS_EVENT_OFF, frc_time + (1 << 31));
MD_TRC(IDC_GPS_L1_L5_TRIG_MSG, 0x0, 0x0);
}
}
else if((gps_l1_l5_bm & (1 << 0)) == 0x1){
//gps_l1_on
if((DRV_Reg32(IDC_CTRL_GPS_EVENT_STATUS) & (1 << 2))){
//event busy
MD_TRC(IDC_GPS_L1_L5_FAIL_MSG, __LINE__);
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_L1_L5_GPS);
return KAL_FALSE;
}
else{
DRV_WriteReg32(IDC_CTRL_GPS_EVENT_ON, frc_time + (1 << 31));
MD_TRC(IDC_GPS_L1_L5_TRIG_MSG, 0x0, 0x1);
}
}
}
//L5 status check
if((gps_l1_l5_bm & (1 << 1)) != (GPS_L1_L5_ALL_BM & (1 << 1))){
if((gps_l1_l5_bm & (1 << 1)) == 0x0){
//gps_l5_off
if((DRV_Reg32(IDC_CTRL_GPS_EVENT_L5_STATUS) & (1 << 1))){
//event busy
MD_TRC(IDC_GPS_L1_L5_FAIL_MSG, __LINE__);
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_L1_L5_GPS);
return KAL_FALSE;
}
else{
DRV_WriteReg32(IDC_CTRL_GPS_EVENT_L5_OFF, frc_time + (1 << 31));
MD_TRC(IDC_GPS_L1_L5_TRIG_MSG, 0x1, 0x0);
}
}
else if((gps_l1_l5_bm & (1 << 1)) == 0x1){
//gps_l5_on
if((DRV_Reg32(IDC_CTRL_GPS_EVENT_L5_STATUS) & (1 << 2))){
//event busy
MD_TRC(IDC_GPS_L1_L5_FAIL_MSG, __LINE__);
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_L1_L5_GPS);
return KAL_FALSE;
}
else{
DRV_WriteReg32(IDC_CTRL_GPS_EVENT_L5_ON, frc_time + (1 << 31));
MD_TRC(IDC_GPS_L1_L5_TRIG_MSG, 0x1, 0x1);
}
}
}
}
//update bitmap
if(rat_status == IDC_RAT_LTE){
GPS_L1_L5_LTE_BM = raw_data;
}
else if(rat_status == IDC_RAT_NR){
GPS_L1_L5_NR_BM = raw_data;
}
GPS_L1_L5_ALL_BM = gps_l1_l5_bm;
MM_Sync();
//********protect critical section********
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_L1_L5_GPS);
return KAL_TRUE;
#else
return KAL_FALSE;
#endif
}
#if defined(__MD93__)
void drv_idc_schedule_update(kal_uint32 time)
{
kal_uint32 i = 0, j = 0;
kal_uint32 evt_pend_cnt = 0;
kal_uint32 bitmap32 = 0, scheduler_status = 0, expired_evt_status = 0, expired_evt_flag = 0;
#if !defined(__MAUI_BASIC__)
kal_uint32 tx_cnt = 0;
#endif
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
before_poll_time_U = ust_get_current_time();
//********protect critical section*******
// Get phy_time from EL1C & fetch the first 20bits
idc_port.phy_time = time & 0x000FFFFF;
if(!(_idc_atomic_try_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_UPDATE)))
{
after_poll_time_U = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
return;
}
//directly unlock atomic&return and do nothing if schedule_update after slp_ntf
if(idc_port.main_state == IDC_IN_SLEEP)
{
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_UPDATE);
after_poll_time_U = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
return;
}
idc_ctrl_enter(IDC_CTRL_LOCK);
idc_in_workaround = 0;
idc_in_eventpending = 0;
// Workaround for CONSYS send too many msg
#if !defined(ATEST_DRV_ENABLE)
if (idc_rx_suspend == KAL_TRUE)
{
idc_in_workaround = 1;
if (idc_port.main_state == IDC_SUSPEND)
{
if (++idc_rx_suspend_timer >= 10)
{
idc_rx_suspend = KAL_FALSE;
// Clear RX FIFO and enalbe RX interrupt
IRQMask(MD_IRQID_IDC_UART_IRQ);
DRV_WriteReg32_NPW(IDC_UART_FCR, IDC_UART_FCR_RXTRIG | IDC_UART_FCR_CLRR | IDC_UART_FCR_FIFOEN);
DRV_WriteReg32(IDC_UART_IER, IDC_UART_IER_ERBFI);
// Change idc_port state
idc_port.main_state = IDC_IN_USE;
IRQUnmask(MD_IRQID_IDC_UART_IRQ);
}
}
else
{
idc_rx_suspend = KAL_FALSE;
}
}
#endif
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_UPDATE);
// Stop uncompleted events
if(idc_port.event_usage_bit_map != 0x0)
{
for(i = 0; i<16; i++)
{
if ((1 << i) & idc_port.event_usage_bit_map)
{
if(TIME_DIFF_WITHIN(idc_port.phy_time, idc_port.event_offset_table[i], 614400))
/*if (((idc_port.phy_time > idc_port.event_offset_table[i])
&& ((idc_port.phy_time - idc_port.event_offset_table[i]) <= 614400))
|| (idc_port.phy_time < idc_port.event_offset_table[i])
&& ((idc_port.event_offset_table[i] - idc_port.phy_time) > 614400))*/
{
/*wrap_case = 0;
if ((idc_port.phy_time < idc_port.event_offset_table[i])&&((idc_port.event_offset_table[i] - idc_port.phy_time) <= 614400))
wrap_case = 1;
MD_TRC(IDC_STOP_AND_FLUSH_EVENT_MSG,idc_port.event_data_table[i], idc_port.event_offset_table[i], idc_port.phy_time, current_phy_time, wrap_case);*/
idc_port.event_cnt--;
idc_port.event_usage_bit_map &= ~(0x1 << i);
idc_port.event_data_table[i] = 0;
idc_port.event_offset_table[i] = 0;
}
}
}
for(j = 0; j < 16; j++)
{
if((~(idc_port.event_usage_bit_map) >> j) & 0x1)
bitmap32 = (bitmap32 | (0x3 << (j*2)));
}
scheduler_status = DRV_Reg32(IDC_CTRL_SCH_STATUS);
expired_evt_status = bitmap32 & scheduler_status;
if(expired_evt_status)
{
#ifdef ATEST_DRV_ENABLE
DT_IDC_PRINTF("drv_idc: [Warning] Some events are expired in scheduler & stopped. Status = %x \n\r", expired_evt_status);
while(1);
#endif
expired_evt_flag = 1;
}
drv_idc_stop_event(~(idc_port.event_usage_bit_map));
}
if(idc_port.event_usage_bit_map == 0)
idc_port.schedule_state = IDC_PLAN;
// Check if there are pending events in event buffer
if(idc_port.event_pending_cnt > 0)
{
idc_in_eventpending = 1;
j = 0;
evt_pend_cnt = idc_port.event_pending_cnt;
idc_port.schedule_state = IDC_RUN;
for(i = 0; i< evt_pend_cnt; i++)
{
// Start the pending events
if(idc_port.event_usage_bit_map != 0xFFFF) // SRAM isn't full
{
// Find empty event
while (1)
{
if ((1 << idc_port.event_w_index) & idc_port.event_usage_bit_map)
{
idc_port.event_w_index++;
if (idc_port.event_w_index == 16) idc_port.event_w_index = 0;
}
else
{
break;
}
}
// Set event data
DRV_WriteReg32(IDC_CTRL_EVT_DATA(idc_port.event_w_index), idc_port.event_pending_data_table[i]);
// Set time stamps para
DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_w_index), idc_port.event_pending_offset_table[i] + (1 << 31));
// Record event_offset & event_data in the table
idc_port.event_offset_table[idc_port.event_w_index] = idc_port.event_pending_offset_table[i];
idc_port.event_data_table[idc_port.event_w_index] = idc_port.event_pending_data_table[i];
// Update usage bitmap for the scheduler
idc_port.event_usage_bit_map |= (1 << idc_port.event_w_index);
// Update idc_port.event_longest_time
if(TIME_DIFF_WITHIN(idc_port.event_offset_table[idc_port.event_w_index], idc_port.event_longest_time, 614400))
/*if(((idc_port.event_offset_table[idc_port.event_w_index] > idc_port.event_longest_time)
&& ((idc_port.event_offset_table[idc_port.event_w_index] - idc_port.event_longest_time) <= 614400))
|| ((idc_port.event_offset_table[idc_port.event_w_index] < idc_port.event_longest_time)
&& (idc_port.event_longest_time - idc_port.event_offset_table[idc_port.event_w_index] > 614400)))*/
{
idc_port.event_longest_index = idc_port.event_w_index;
idc_port.event_longest_time = idc_port.event_offset_table[idc_port.event_w_index];
}
idc_port.event_w_index++;
if (idc_port.event_w_index == 16) idc_port.event_w_index = 0;
idc_port.event_pending_cnt--;
}
// SRAM is full, check pending event offset sanity check
else
{
if(TIME_DIFF_WITHIN(idc_port.event_pending_offset_table[i], idc_port.phy_time, 61440))
/*if(((idc_port.event_pending_offset_table[i] > idc_port.phy_time)
&& (idc_port.event_pending_offset_table[i] - idc_port.phy_time <= 61440))
|| ((idc_port.event_pending_offset_table[i] < idc_port.phy_time)
&& (((0xFFFFF - idc_port.phy_time) + idc_port.event_pending_offset_table[i]) <= 61440)))*/
IDC_ASSERT(0); // Set over 16 events in 1ms
//re-arrange event buffer
idc_port.event_pending_data_table[j] = idc_port.event_pending_data_table[i];
idc_port.event_pending_offset_table[j++] = idc_port.event_pending_offset_table[i];
}
}
}
//idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
after_poll_time_U = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
#if !defined(__MAUI_BASIC__)
//get tx count
tx_cnt = DRV_Reg32(IDC_CTRL_DATA_CNT) & 0xFFFF;
//print tx_count log
if(idc_port.schedule_state == IDC_RUN)
MD_TRC(IDC_TX_COUNT_MSG,tx_cnt);
#endif
if((idc_in_workaround == 1) && (idc_rx_suspend == KAL_FALSE))
MD_TRC(IDC_RX_RESUME_MSG,);
if(expired_evt_flag == 1)
MD_TRC(IDC_EVENTS_STILL_BUSY_MSG,expired_evt_status, scheduler_status);
return;
}
#elif defined(__MD95__)
void drv_idc_schedule_update_95(kal_uint32 time)
{
kal_uint32 i = 0, j = 0;
kal_uint32 bitmap32 = 0, scheduler_status = 0, expired_evt_status = 0, expired_evt_flag = 0;
#if !defined(__MAUI_BASIC__)
kal_uint32 tx_cnt = 0;
#endif
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
before_poll_time_U = ust_get_current_time();
//********protect critical section*******
// Get phy_time from EL1C & fetch the first 20bits
idc_port.phy_time = time & 0x000FFFFF;
// Log PHY_TIME information
//MD_TRC(IDC_SCHEDULE_UPDATE_MSG, idc_port.phy_time);
if(!(_idc_atomic_try_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_UPDATE)))
{
after_poll_time_U = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
return;
}
//directly unlock atomic&return and do nothing if schedule_update after slp_ntf
if(idc_port.main_state == IDC_IN_SLEEP)
{
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_UPDATE);
after_poll_time_U = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
return;
}
idc_ctrl_enter(IDC_CTRL_LOCK);
idc_in_workaround = 0;
// Workaround for CONSYS send too many msg
#if !defined(ATEST_DRV_ENABLE)
if (idc_rx_suspend == KAL_TRUE)
{
if (idc_port.main_state == IDC_SUSPEND)
{
if (++idc_rx_suspend_timer >= 10)
{
idc_rx_suspend = KAL_FALSE;
// Clear RX FIFO and enalbe RX interrupt
IRQMask(MD_IRQID_IDC_UART_IRQ);
DRV_WriteReg32_NPW(IDC_UART_FCR, IDC_UART_FCR_RXTRIG | IDC_UART_FCR_CLRR | IDC_UART_FCR_FIFOEN);
DRV_WriteReg32(IDC_UART_IER, IDC_UART_IER_ERBFI);
// Change idc_port state
idc_port.main_state = IDC_IN_USE;
IRQUnmask(MD_IRQID_IDC_UART_IRQ);
}
}
else
{
idc_rx_suspend = KAL_FALSE;
}
}
#endif
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_UPDATE);
// Stop uncompleted events
if(idc_port.event_usage_bit_map != 0x0)
{
for(i = 0; i < 16; i++)
{
if ((1 << i) & idc_port.event_usage_bit_map)
{
if(TIME_DIFF_WITHIN(idc_port.phy_time, idc_port.event_offset_table[i], 614400))
/* if (((idc_port.phy_time > idc_port.event_offset_table[i])
&& ((idc_port.phy_time - idc_port.event_offset_table[i]) <= 614400))
|| (idc_port.phy_time < idc_port.event_offset_table[i])
&& ((idc_port.event_offset_table[i] - idc_port.phy_time) > 614400))*/
{
//clear all event state
idc_port.event_cnt--;
idc_port.event_usage_bit_map &= ~(0x1 << i);
idc_port.event_offset_table[i] = 0;
for(j = 0; j < 9; j++)
{
idc_port.event_data_table[i][j] = 0;
}
for(j = 0; j < idc_port.event_byte_num[i]; j++)
{
idc_port.sram_table_usage[idc_port.event_sram_sta_idx[i] + j] = 0;
}
}
}
}
for(j = 0; j < 16; j++)
{
if((~(idc_port.event_usage_bit_map) >> j) & 0x1)
bitmap32 = (bitmap32 | (0x3 << (j*2)));
}
scheduler_status = DRV_Reg32(IDC_CTRL_SCH_STATUS);
expired_evt_status = bitmap32 & scheduler_status;
if(expired_evt_status)
{
#ifdef ATEST_DRV_ENABLE
DT_IDC_PRINTF("drv_idc: [Warning] Some events are expired in scheduler & stopped. Status = %x \n\r", expired_evt_status);
while(1);
#endif
expired_evt_flag = 1;
//MD_TRC(IDC_EVENTS_STILL_BUSY_MSG, expired_evt_status);
}
drv_idc_stop_event(~(idc_port.event_usage_bit_map));
}
if(idc_port.event_usage_bit_map == 0)
idc_port.schedule_state = IDC_PLAN;
//idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
after_poll_time_U = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
#if !defined(__MAUI_BASIC__)
//get tx count
tx_cnt = DRV_Reg32(IDC_CTRL_DATA_CNT) & 0xFFFF;
//print tx_count log
if(idc_port.schedule_state == IDC_RUN)
MD_TRC(IDC_TX_COUNT_MSG,tx_cnt);
#endif
if((idc_in_workaround == 1) && (idc_rx_suspend == KAL_FALSE))
MD_TRC(IDC_RX_RESUME_MSG,);
if(expired_evt_flag == 1)
MD_TRC(IDC_EVENTS_STILL_BUSY_MSG,expired_evt_status, scheduler_status);
return;
}
#elif defined(__MD97__) || defined(__MD97P__)
void drv_idc_schedule_update_97(kal_uint32 time)
{
kal_uint32 i = 0, j = 0;
kal_uint32 bitmap32 = 0, bitmap32_2 = 0, scheduler_status = 0, scheduler_status_2 = 0, expired_evt_status = 0, expired_evt_status_2 = 0, expired_evt_flag = 0;
#if !defined(__MAUI_BASIC__)
kal_uint32 tx_cnt = 0;
#endif
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
before_poll_time_U = ust_get_current_time();
//********protect critical section*******
// Get FRC from EL1C & fetch the first 30bits
idc_port.frc_time = time & 0x3FFFFFFF;
// Log FRC_TIME information
MD_TRC(IDC_SCHEDULE_UPDATE_MSG, idc_port.frc_time);
//directly unlock atomic&return and do nothing if schedule_update after slp_ntf
if(idc_port.main_state == IDC_IN_SLEEP)
{
after_poll_time_U = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
return;
}
//idc_ctrl_enter(IDC_CTRL_LOCK);//remove because LTE/NR RAT flow
// Stop uncompleted events
if(idc_port.event_usage_bit_map != 0x1)
{
for(i = 1; i < IDC_MAX_EVENT_NUM; i++)
{
if ((1 << i) & idc_port.event_usage_bit_map)
{
if(TIME_DIFF_WITHIN(idc_port.frc_time, idc_port.event_offset_table[i], 10000))
{
//clear all event state
idc_port.event_cnt--;
idc_port.event_usage_bit_map &= ~(0x1 << i);
idc_port.event_offset_table[i] = 0;
for(j = 0; j < 9; j++)
{
idc_port.event_data_table[i][j] = 0;
}
for(j = 0; j < idc_port.event_byte_num[i]; j++)
{
idc_port.sram_table_usage[idc_port.event_sram_sta_idx[i] + j] = 0;
}
}
}
}
for(j = 1; j < IDC_MAX_EVENT_NUM; j++)
{
if((~(idc_port.event_usage_bit_map) >> j) & 0x1){
if(j < 16)
bitmap32 = (bitmap32 | (0x3 << (j*2)));
else
bitmap32_2 = (bitmap32_2 | (0x3 << ((j-16)*2)));
}
}
scheduler_status = DRV_Reg32(IDC_CTRL_SCH_STATUS);
scheduler_status_2 = DRV_Reg32(IDC_CTRL_SCH_STATUS2);
expired_evt_status = bitmap32 & scheduler_status;
expired_evt_status_2 = bitmap32_2 & scheduler_status_2;
if(expired_evt_status | expired_evt_status_2)
{
#ifdef ATEST_DRV_ENABLE
DT_IDC_PRINTF("drv_idc: [Warning] Some events are expired in scheduler & stopped. Status = %x \n\r", expired_evt_status);
while(1);
#endif
expired_evt_flag = 1;
}
drv_idc_stop_event(~(idc_port.event_usage_bit_map));
}
if(idc_port.event_usage_bit_map == 0x1)
idc_port.schedule_state = IDC_PLAN;
//idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
after_poll_time_U = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
#if !defined(__MAUI_BASIC__)
//get tx count
tx_cnt = DRV_Reg32(IDC_CTRL_DATA_CNT) & 0xFFFF;
//print tx_count log
if(idc_port.schedule_state == IDC_RUN)
MD_TRC(IDC_TX_COUNT_MSG, tx_cnt);
#endif
if(expired_evt_flag == 1)
MD_TRC(IDC_EVENTS_STILL_BUSY_2_MSG, expired_evt_status, scheduler_status, expired_evt_status_2, scheduler_status_2);
return;
}
#endif
void drv_idc_stop_event(kal_uint32 bitmap)
{
kal_uint32 bitmap32 = 0 , j = 0;
DRV_WriteReg32_NPW(IDC_CTRL_SCH_STOP, bitmap);
stop_status_check = DRV_Reg32(IDC_CTRL_SCH_STOP);
for(j = 0; j < IDC_MAX_EVENT_NUM; j++)
{
if((bitmap >> j) & 0x1)
bitmap32 = (bitmap32 | (0x3 << (j*2)));
}
POLL_STATUS(DRV_Reg32(IDC_CTRL_SCH_STATUS) & bitmap32);
return;
}
void drv_idc_stop_event_97(kal_uint32 bitmap)
{
kal_uint32 bitmap32_1 = 0 , bitmap32_2 = 0, j = 0;
stop_event_bitmap_debug = bitmap;
DRV_WriteReg32(IDC_CTRL_SCH_STOP, bitmap);
MO_Sync();
stop_status_check = DRV_Reg32(IDC_CTRL_SCH_STOP);
for(j = 0; j < IDC_MAX_EVENT_NUM; j++)
{
if(j < 16){
if((bitmap >> j) & 0x1)
bitmap32_1 = (bitmap32_1 | (0x3 << (j*2)));
}
else{
if((bitmap >> j) & 0x1)
bitmap32_2 = (bitmap32_2 | (0x3 << ((j-16)*2)));
}
}
stop_event_bitmap32_0_15_debug = bitmap32_1;
stop_event_bitmap32_16_31_debug = bitmap32_2;
POLL_STATUS(DRV_Reg32(IDC_CTRL_SCH_STATUS) & bitmap32_1);
POLL_STATUS(DRV_Reg32(IDC_CTRL_SCH_STATUS2) & bitmap32_2);
return;
}
void drv_idc_set_remapping_config(kal_uint8 remapping_table, kal_uint8 remapping_table_en)
{
DRV_WriteReg32(IDC_REMAPPING_CFG, remapping_table);
DRV_WriteReg32_NPW(IDC_REMAPPING_EN, remapping_table_en);
return;
}
void drv_idc_force_on_rf(kal_uint8 rf_path)
{
DRV_WriteReg8(IDC_FORCE_TRIGGER_RF, rf_path);
MD_TRC(IDC_FORCE_ON_TX_MSG, rf_path);
return;
}
void drv_idc_purge(UART_buffer dir)
{
//remove beacause HW limitation can't CLR RXFIFO simultaneously
/* if(dir == TX_BUF)
DRV_WriteReg32(IDC_UART_FCR, IDC_UART_FCR_RXTRIG | IDC_UART_FCR_CLRT | IDC_UART_FCR_FIFOEN);
else
DRV_WriteReg32(IDC_UART_FCR, IDC_UART_FCR_RXTRIG | IDC_UART_FCR_CLRR | IDC_UART_FCR_FIFOEN);*/
}
void drv_idc_get_schedule_status(kal_uint32 schedule_status)
{
schedule_status = DRV_Reg32(IDC_CTRL_SCH_STATUS);
#ifdef ATEST_DRV_ENABLE
MD_TRC(IDC_SCHEDULE_STATUS_MSG,schedule_status);
#endif
return;
}
void drv_idc_get_schedule_status_2(kal_uint32 schedule_status_2)
{
schedule_status_2 = DRV_Reg32(IDC_CTRL_SCH_STATUS2);
#ifdef ATEST_DRV_ENABLE
MD_TRC(IDC_SCHEDULE_STATUS_2_MSG, schedule_status_2);
#endif
return;
}
kal_bool drv_idc_check_event_send_out(void)
{
kal_uint32 schedule_status = 0;
// check that schedule is busy or not
drv_idc_get_schedule_status(schedule_status);
if(schedule_status) return KAL_FALSE;
drv_idc_get_schedule_status_2(schedule_status);
if(schedule_status) return KAL_FALSE;
// check that IDC TX FIFO has data or not
if(DRV_Reg32(IDC_UART_LSR) & IDC_UART_LSR_TEMT) return KAL_TRUE;
return KAL_FALSE;
}
DCL_STATUS drv_idc_set_pin_config(IDC_PIN_MODE_T pin_mode)
{
DCL_STATUS return_value = STATUS_FAIL;
if(SET_PIN_FLAG == KAL_TRUE){
MD_TRC(IDC_ALREADY_SET_PIN_MSG,);
return_value = STATUS_OK;
return return_value;
}
else{
SET_PIN_FLAG = KAL_TRUE;
MM_Sync();
}
if (pin_mode == IDC_INTERNAL_PIN)
{
#if !defined(ATEST_DRV_ENABLE)
MD_TRC(IDC_INTERNAL_PIN_MSG,);
#else
kal_sprintf(idc_dbg_str, "drv_idc: Switch to internal pins\n\r");
DT_IDC_PRINTF(idc_dbg_str);
#endif
idc_port.pin_mode = IDC_INTERNAL_PIN;
// Switch to internal pins, use NPW
#if defined(MT6763)
DRV_WriteReg32_NPW(0xC0005000 + 0x6C0, DRV_Reg32(0xC0005000+ 0x6C0) & ~(0x10));
#elif defined(MT6739)|| defined(MT6765)||defined(MT6761)
DRV_WriteReg32_NPW(0xC0005000 + 0x600, DRV_Reg32(0xC0005000+ 0x600) & ~(0x02));
#elif defined(MT6771)
DRV_WriteReg32_NPW(0xC0005000 + 0x6C0, DRV_Reg32(0xC0005000+ 0x6C0) & ~(0x10));
#elif defined(MT3967)
DRV_WriteReg32_NPW(0xC0005000 + 0x6C0, DRV_Reg32(0xC0005000+ 0x6C0) & ~(0x10));
return_value = STATUS_OK;
#endif
}
else if (pin_mode == IDC_EXTERNAL_PIN)
{
#if !defined(ATEST_DRV_ENABLE)
MD_TRC(IDC_EXTERNAL_PIN_MSG,);
#else
kal_sprintf(idc_dbg_str, "drv_idc: Switch to external pins\n\r");
DT_IDC_PRINTF(idc_dbg_str);
#endif
idc_port.pin_mode = IDC_EXTERNAL_PIN;
// Switch to external pins, use NPW
#if defined(MT6763)
DRV_WriteReg32_NPW(0xC0005000 + 0x6C0, DRV_Reg32(0xC0005000+ 0x6C0) | 0x10);
//GPIO 111 => PTA_RX, GPIO110 => PTA_TX
DRV_WriteReg32(0xC0005000 + 0x3D0, DRV_Reg32(0xC0005000 + 0x3D0) & ~(0xFF000000));
DRV_WriteReg32(0xC0005000 + 0x3D0, DRV_Reg32(0xC0005000 + 0x3D0) | 0x22000000);
return_value = STATUS_OK;
#elif defined(MT6739)
DRV_WriteReg32_NPW(0xC0005000 + 0x600, DRV_Reg32(0xC0005000+ 0x600) | 0x02);
//GPIO 1 => PTA_RX, GPIO 2 => PTA_TX
DRV_WriteReg32(0xC0005000 + 0x300, DRV_Reg32(0xC0005000 + 0x300) & ~(0x0FF0));
DRV_WriteReg32(0xC0005000 + 0x300, DRV_Reg32(0xC0005000 + 0x300) | 0x0220);
return_value = STATUS_OK;
#elif defined(MT6765)||defined(MT6761)
DRV_WriteReg32_NPW(0xC0005000 + 0x600, DRV_Reg32(0xC0005000+ 0x600) | 0x02);
//GPIO 105 => PTA_RX, GPIO 106 => PTA_TX
DRV_WriteReg32(0xC0005000 + 0x3D0, DRV_Reg32(0xC0005000 + 0x3D0) & ~(0x0FF0));
DRV_WriteReg32(0xC0005000 + 0x3D0, DRV_Reg32(0xC0005000 + 0x3D0) | 0x0660);
return_value = STATUS_OK;
#elif defined(MT6771)
DRV_WriteReg32_NPW(0xC0005000 + 0x6C0, DRV_Reg32(0xC0005000+ 0x6C0) | 0x10);
//GPIO 90 => PTA_RX, GPIO91 => PTA_TX
DRV_WriteReg32(0xC0005000 + 0x3B0, DRV_Reg32(0xC0005000 + 0x3B0) & ~(0xFF00));
DRV_WriteReg32(0xC0005000 + 0x3B0, DRV_Reg32(0xC0005000 + 0x3B0) | 0x6600);
#elif defined(MT3967)
DRV_WriteReg32_NPW(0xC0005000 + 0x6C0, DRV_Reg32(0xC0005000 + 0x6C0) | 0x10);
//GPIO 45 => PTA_RX, GPIO 46 => PTA_TX
DRV_WriteReg32(0xC0005000 + 0x350, DRV_Reg32(0xC0005000 + 0x350) & ~(0xFF00000));
DRV_WriteReg32(0xC0005000 + 0x350, DRV_Reg32(0xC0005000 + 0x350) | 0x6600000);
#elif defined(MT6885)
// Switch LTE IDC UART to PAD
//DRV_WriteReg32(0xC0005000 + 0x600, DRV_Reg(0xC0005000 + 0x600) | 0x20);
//GPIO 43 => PTA_RX, GPIO 44 => PTA_TX
//DRV_WriteReg32(0xC0005000 + 0x350, DRV_Reg(0xC0005000 + 0x350) & ~(0xFF000));
//DRV_WriteReg32(0xC0005000 + 0x350, DRV_Reg(0xC0005000 + 0x350) | 0x55000);
//GPIO 158 => PTA_RX, GPIO 159 => PTA_TX
//DRV_WriteReg32(0xC0005000 + 0x430, DRV_Reg32(0xC0005000 + 0x430) & ~(0xFF000000));
//DRV_WriteReg32(0xC0005000 + 0x430, DRV_Reg32(0xC0005000 + 0x430) | 0x44000000);
return_value = STATUS_OK;
#elif defined(MT6833)
//unmodified
//DRV_WriteReg32(0xC0005000 + 0x600, DRV_Reg(0xC0005000 + 0x600) | 0x20);
//GPIO 27 => PTA_RX, GPIO 28 => PTA_TX
//DRV_WriteReg32(0xC0005000 + 0x338, 0x77<<12);
//DRV_WriteReg32(0xC0005000 + 0x334, 0x44<<12);
return_value = STATUS_OK;
#elif defined(MT6893)
//unmodified
//DRV_WriteReg32(0xC0005000 + 0x600, DRV_Reg(0xC0005000 + 0x600) | 0x20);
//GPIO 27 => PTA_RX, GPIO 28 => PTA_TX
//DRV_WriteReg32(0xC0005000 + 0x338, 0x77<<12);
//DRV_WriteReg32(0xC0005000 + 0x334, 0x44<<12);
return_value = STATUS_OK;
#elif defined(MT6877)
//unmodified
//DRV_WriteReg32(0xC0005000 + 0x600, DRV_Reg(0xC0005000 + 0x600) | 0x20);
//GPIO 27 => PTA_RX, GPIO 28 => PTA_TX
//DRV_WriteReg32(0xC0005000 + 0x338, 0x77<<12);
//DRV_WriteReg32(0xC0005000 + 0x334, 0x44<<12);
return_value = STATUS_OK;
#endif
}
// Clear TX/RX FIFO , remove beacause HW limitation can't CLR RXFIFO simultaneously
//DRV_WriteReg32_NPW(IDC_UART_FCR, IDC_UART_FCR_RXTRIG | IDC_UART_FCR_FIFOINI);
IRQUnmask(MD_IRQID_IDC_UART_IRQ);
return return_value;
}
DCL_STATUS drv_idc_get_pin_config(IDC_PIN_MODE_T *pin_mode)
{
DCL_STATUS return_value = STATUS_FAIL;
#if defined(MT6763) || defined(MT6739)|| defined(MT6765) || defined(MT6771)||defined(MT6761)||defined(MT6893)||defined(MT6833)||defined(MT6877)
*pin_mode = idc_port.pin_mode;
return_value = STATUS_OK;
#endif
return return_value;
}
// Enable ilm: drv_idc_set_ilm(KAL_TRUE)
// Disable ilm: drv_idc_set_ilm(KAL_FALSE)
void drv_idc_set_ilm(kal_bool ilm_mode)
{
idc_ilm_on = ilm_mode;
if(ilm_mode == KAL_TRUE)
{
MD_TRC(IDC_ILM_ENABLE_MSG,);
return;
}
else
{
MD_TRC(IDC_ILM_DISABLE_MSG,);
return;
}
}
kal_uint32 idc_isr_count = 0;
void idc_uart_lisr(kal_uint32 vector)
{
kal_uint32 mask;
IER_L = DRV_Reg(IDC_UART_IER) & IDC_UART_IER_INT_MASK;
IIR_L = DRV_Reg(IDC_UART_IIR) & IDC_UART_IIR_INT_MASK;
LSR_L = DRV_Reg(IDC_UART_LSR);
// Turn off all INT
DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ALLOFF);
mask = kal_hrt_SaveAndSetIRQMask();
if(!(_idc_atomic_try_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR)))
{
if(IIR_L & IDC_UART_IIR_THR_EMPTY)
{
#ifndef ATEST_DRV_ENABLE
SleepDrv_UnlockSleep(SLEEP_CTL_IDC, SMP);
#endif
if((LTE_FLAG == KAL_TRUE) || (NR_FLAG == KAL_TRUE))
DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ERBFI);// Enable RX interrupt
}
kal_hrt_RestoreIRQMask(mask);
return;
}
IRQMask(MD_IRQID_IDC_UART_IRQ);
if (IIR_L & IDC_UART_IIR_THR_EMPTY)
{
#ifndef ATEST_DRV_ENABLE
SleepDrv_UnlockSleep(SLEEP_CTL_IDC, SMP);
#endif
if((LTE_FLAG == KAL_TRUE) || (NR_FLAG == KAL_TRUE))
DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ERBFI);// Enable RX interrupt
IRQUnmask(MD_IRQID_IDC_UART_IRQ);
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
kal_hrt_RestoreIRQMask(mask);
return;
}
else if (idc_port.main_state == IDC_IN_SLEEP)
{
IRQUnmask(MD_IRQID_IDC_UART_IRQ);
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
kal_hrt_RestoreIRQMask(mask);
return;
}
else if (IIR_L & IDC_UART_IIR_INT_INVALID)
{
// Restore enable interrupt
DRV_WriteReg32_NPW(IDC_UART_IER, IER_L);
IRQUnmask(MD_IRQID_IDC_UART_IRQ);
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
kal_hrt_RestoreIRQMask(mask);
return;
}
idc_lisr_count++;
if (idc_lisr_count == 20)
{
idc_lisr_count = 0;
}
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
kal_hrt_RestoreIRQMask(mask);
kal_activate_hisr_index(IDC_UART_HISR);
return;
}
void idc_uart_hisr(void)
{
kal_uint32 mask;
#if defined(__MD93__)
kal_uint32 i = 0;
#endif
idc_in_hisr = KAL_TRUE;
IIR_H = DRV_Reg(IDC_UART_IIR) & IDC_UART_IIR_INT_MASK;
LSR_H = DRV_Reg(IDC_UART_LSR);
#if !defined(__MAUI_BASIC__)
kal_uint8 r_offset, w_offset, op_rx_req, fcr_rd, scr;
kal_uint32 clk1, clk2;
kal_uint16 RXTRIG;
RXTRIG = DRV_Reg(IDC_UART_RXTRIG);
#endif
#if defined(ATEST_DRV_ENABLE)
// kal_sprintf(idc_dbg_str, "drv_idc: HISR %d, IIR = %x\n\r", ++idc_isr_count, IIR);
// DT_IDC_PRINTF(idc_dbg_str);
#endif
idc_hisr_time[idc_hisr_count] = ust_get_current_time();
idc_hisr_count++;
if (idc_hisr_count == 20)
{
idc_hisr_count = 0;
}
mask = kal_hrt_SaveAndSetIRQMask();
before_poll_time_UART_HISR = ust_get_current_time();
if(!(_idc_atomic_try_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR)))
{
after_poll_time_UART_HISR = ust_get_current_time();
kal_hrt_RestoreIRQMask(mask);
return;
}
if(idc_port.main_state == IDC_IN_SLEEP)
{
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR);
after_poll_time_UART_HISR = ust_get_current_time();
kal_hrt_RestoreIRQMask(mask);
return;
}
#if defined(__MD93__)
idc_send_rx_data_by_ilm();
#elif defined(__MD95__) || defined(__MD97__) || defined(__MD97P__)
idc_send_rx_data_by_ilm_95();
#endif
if (KAL_FALSE == idc_read_RBR)
{
#if !defined(ATEST_DRV_ENABLE)
MD_TRC(IDC_RX_LISR_MSG,IIR_L, IER_L, LSR_L);
#if !defined(__MAUI_BASIC__)
fcr_rd = DRV_Reg8(IDC_UART_FCR_RD);
op_rx_req = DRV_Reg8(IDC_OP_RX_REQ);
r_offset = DRV_Reg8(IDC_RX_ROFFSET);
w_offset = DRV_Reg8(IDC_RX_WOFFSET);
clk1 = DRV_Reg32(0xB0820018);
clk2 = DRV_Reg32(0xB0820050);
DRV_WriteReg32_NPW(IDC_UART_SCR, 0x5A);
scr = DRV_Reg8(IDC_UART_SCR);
MD_TRC(IDC_RX_HISR_MSG,IIR_H, LSR_H, RXTRIG, idc_port.main_state, idc_port.owner_id, idc_port.intr_en);
MD_TRC(IDC_BEFORE_CLR_RX_FIFO_MSG, fcr_rd, op_rx_req, r_offset, w_offset, clk1, clk2, scr);
if((IIR_L == 0x4) && (LSR_L == 0x60) && (r_offset != w_offset)){
MD_TRC(IDC_DR_ISSUE_HIT_MSG);
DR_ISSUE_FLAG = KAL_TRUE;
}
#endif
#else
#if !defined(__MAUI_BASIC__)
kal_sprintf(idc_dbg_str, "drv_idc: HISR without Read Data, IIR = %x, LSR = %x, RXTRIG = %x, (%d, %d, %d)\n\r",
IIR_H, LSR_H, RXTRIG, idc_port.main_state, idc_port.owner_id, idc_port.intr_en);
DT_IDC_PRINTF(idc_dbg_str);
#endif
#endif
if(DR_ISSUE_FLAG == KAL_TRUE){
DR_ISSUE_FAIL_CNT++;
if(DR_ISSUE_FAIL_CNT >= 10){
MD_TRC(IDC_DR_ISSUE_RECOVER_FAIL_MSG);
ASSERT(0);
}
}
}
IRQUnmask(MD_IRQID_IDC_UART_IRQ);
// Enable RX interrupt
DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ERBFI);
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR);
after_poll_time_UART_HISR = ust_get_current_time();
kal_hrt_RestoreIRQMask(mask);
idc_read_RBR = KAL_FALSE;
idc_in_hisr = KAL_FALSE;
MM_Sync();
#if defined(__MD93__)
if(idc_ilm_on == KAL_TRUE)
{
i = 0;
while(ilm_num > 0)
{
ilm_num--;
msg_send_inline6(MOD_IDC_UART_HISR, idc_port.owner_id, DRIVER_PS_SAP | INLINE_ILM_FLAG_SAP, MSG_ID_IDC_RX_DATA, (void *) &ilm_buf[i++], sizeof(IDC_ILM_MSG_T));
}
}
else
ilm_num = 0;
#elif defined(__MD95__) || defined(__MD97__) || defined(__MD97P__)
if(idc_ilm_trigger == KAL_TRUE)
{
if(idc_ilm_on == KAL_TRUE)
{
msg_send_inline6(MOD_IDC_UART_HISR, idc_port.owner_id, DRIVER_PS_SAP | INLINE_ILM_FLAG_SAP, MSG_ID_IDC_RX_DATA, (void *) &ilm, sizeof(IDC_ILM_MSG_T));
//DT_IDC_PRINTF("ILM actually send done\n\r");
}
idc_ilm_trigger = KAL_FALSE;
MM_Sync();
}
#endif
return;
}
#if defined(__MD93__)
void idc_send_rx_data_by_ilm(void)
{
kal_uint32 max_rx_count = 8;
#if !defined(__MAUI_BASIC__)
kal_uint32 count = 0;
#endif
if(ilm_num !=0)
MD_TRC(IDC_ILMNUM_ABNORMAL_MSG,__LINE__);
while(DRV_Reg(IDC_UART_LSR) & IDC_UART_LSR_DR)
{
max_rx_count--;
// read bytes from IDC UART FIFO to SW buffer
idc_port.rx_buf = (idc_port.rx_buf << 16) | (1 << 8) | DRV_Reg8(IDC_UART_RBR);
idc_read_RBR = KAL_TRUE;
idc_rx_history[idc_rx_count] = (kal_uint8)(idc_port.rx_buf & 0xFF);
idc_rx_history_time[idc_rx_count] = ust_get_current_time();
/*#if !defined(ATEST_DRV_ENABLE)
MD_TRC(IDC_RX_MSG,idc_port.rx_buf & 0xFF);
#endif*/
#if defined(ATEST_DRV_ENABLE)
// DT_IDC_PRINTF("drv_idc: receive %x\n\r", idc_port.rx_buf & 0xFF);
#endif
idc_rx_count++;
if (idc_rx_count == 20)
{
idc_rx_count = 0;
#if !defined(ATEST_DRV_ENABLE)
MD_TRC(IDC_RX_HISTORY_MSG,
idc_rx_history[0], idc_rx_history[1], idc_rx_history[2], idc_rx_history[3], idc_rx_history[4],
idc_rx_history[5], idc_rx_history[6], idc_rx_history[7], idc_rx_history[8], idc_rx_history[9],
idc_rx_history[10], idc_rx_history[11], idc_rx_history[12], idc_rx_history[13], idc_rx_history[14],
idc_rx_history[15], idc_rx_history[16], idc_rx_history[17], idc_rx_history[18], idc_rx_history[19]
);
#else
//kal_sprintf(idc_dbg_str, "drv_idc: Receive %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x\n\r",
// idc_rx_history[0], idc_rx_history[1], idc_rx_history[2], idc_rx_history[3], idc_rx_history[4],
// idc_rx_history[5], idc_rx_history[6], idc_rx_history[7], idc_rx_history[8], idc_rx_history[9],
// idc_rx_history[10], idc_rx_history[11], idc_rx_history[12], idc_rx_history[13], idc_rx_history[14],
// idc_rx_history[15], idc_rx_history[16], idc_rx_history[17], idc_rx_history[18], idc_rx_history[19]
// );
DT_IDC_PRINTF(idc_dbg_str);
#endif
#if !defined(ATEST_DRV_ENABLE)
// Check if the IDC commands receive too frequently
if ((idc_rx_history_time[19] - idc_rx_history_time[0]) < 100)
{
idc_rx_suspend = KAL_TRUE;
// Reset timer and start to wait 10 ms (EL1 will call event_start for each 1 ms)
idc_rx_suspend_timer = 0;
//IRQMask(MD_IRQID_IDC_UART_IRQ);
// Disable RX interrupt
DRV_WriteReg32(IDC_UART_IER, IDC_UART_IER_ALLOFF);
// Check writing register is finished
while (DRV_Reg32(IDC_UART_IER) != IDC_UART_IER_ALLOFF)
{
#if !defined(__MAUI_BASIC__)
MD_TRC(IDC_WAIT_IER_OFF_MSG,count++);
#endif
}
// Clean RX FIFO
DRV_WriteReg32_NPW(IDC_UART_FCR, IDC_UART_FCR_RXTRIG | IDC_UART_FCR_FIFOINI);
MD_TRC(IDC_CLEAN_RXFIFO_MSG,__FUNCTION__);
// Change idc_port state
idc_port.main_state = IDC_SUSPEND;
// Use DSB to ensure that the interrupt is disabled before leaving HISR
MM_Sync();
// Print warining message
MD_TRC(IDC_RX_SUSPEND_MSG,);
// Call CONSYS RX_OFF callback for EL1C request
if (idc_port.pm_cb_handle[1].callback_func)
idc_port.pm_cb_handle[1].callback_func(idc_port.pm_cb_handle[1].private_data);
return;
}
#endif
}
// if there are two-byte data in fifo and data is valid, send them by ilm
if((idc_port.rx_buf & 0x01010101) == 0x01000101)
{
IDC_ILM_MSG_T tmp;
tmp.type = (idc_port.rx_buf & 0x001E0000) >> 17;
tmp.msg = ((idc_port.rx_buf & 0x00E00000) >> 21) | ((idc_port.rx_buf & 0x000000FE) << 2);
#if defined(ATEST_DRV_ENABLE)
DT_IDC_PRINTF("%x %x\n\r", tmp.type, tmp.msg);
#endif
if (tmp.type == 0)
{
if (KAL_TRUE == idc_count_start)
{
idc_80211_rx_count++;
MD_TRC(IDC_RX_80211_RX_MSG,tmp.type, tmp.msg);
}
}
else if (tmp.type == 1)
{
if (KAL_TRUE == idc_count_start)
{
idc_80211_tx_count++;
MD_TRC(IDC_RX_80211_TX_MSG,tmp.type, tmp.msg);
}
}
else
{
if(idc_ilm_on == KAL_TRUE)
{
#if !defined(ATEST_DRV_ENABLE)
MD_TRC(IDC_RX_SEND_ILM_MSG,idc_port.rx_buf, tmp.type, tmp.msg);
#else
kal_sprintf(idc_dbg_str, "drv_idc: MSG Send to EL1: %x, type:%x, msg:%x\n\r", idc_port.rx_buf, tmp.type, tmp.msg);
DT_IDC_PRINTF(idc_dbg_str);
#endif
ilm_buf[ilm_num++] = tmp;
if(ilm_num > 4)
MD_TRC(IDC_ILMNUM_ABNORMAL_MSG,__LINE__);
//msg_send_inline6(MOD_IDC_UART_HISR, idc_port.owner_id, DRIVER_PS_SAP | INLINE_ILM_FLAG_SAP, MSG_ID_IDC_RX_DATA, (void *) &tmp, sizeof(IDC_ILM_MSG_T));
}
else
{
#if !defined(ATEST_DRV_ENABLE)
MD_TRC(IDC_RX_NOT_SEND_ILM_MSG,idc_port.rx_buf, tmp.type, tmp.msg);
#else
//kal_sprintf(idc_dbg_str, "drv_idc: MSG Not Send to EL1: %x, type:%x, msg:%x\n\r", idc_port.rx_buf, tmp.type, tmp.msg);
DT_IDC_PRINTF(idc_dbg_str);
#endif
}
}
idc_port.rx_buf = 0;
}
//rx_buf receive 2 abnormal byte
else if((idc_port.rx_buf & 0x01010101) == 0x01000100)
{
MD_TRC(IDC_RX_ABNORMAL_MSG,idc_port.rx_buf);
}
else if((idc_port.rx_buf & 0x01010101) == 0x01010101)
{
MD_TRC(IDC_RX_ABNORMAL_MSG,idc_port.rx_buf);
}
if (max_rx_count == 0)
return;
}
return;
}
#elif defined(__MD95__) || defined(__MD97__) || defined(__MD97P__)
void idc_send_rx_data_by_ilm_95(void)
{
//kal_uint32 count = 0;
kal_uint32 msg_tmp = 0;
kal_uint8 rx_data = 0, rx_count_log = 0;
while(DRV_Reg(IDC_UART_LSR) & IDC_UART_LSR_DR)
{
// read bytes from IDC UART FIFO to SW buffer
idc_port.rx_buf = DRV_Reg8(IDC_UART_RBR);
rx_data = (kal_uint8)(idc_port.rx_buf & 0xFF);
//idc_port.rx_buf = (idc_port.rx_buf << 16) | (1 << 8) | DRV_Reg8(IDC_UART_RBR);
idc_read_RBR = KAL_TRUE;
idc_rx_history[idc_rx_count] = (kal_uint8)(idc_port.rx_buf & 0xFF);
idc_rx_history_time[idc_rx_count] = ust_get_current_time();
#if defined(ATEST_DRV_ENABLE)
//DT_IDC_PRINTF("drv_idc: receive %x\n\r", idc_port.rx_buf & 0xFF);
#endif
idc_rx_count++;
if (idc_rx_count == 20)
{
idc_rx_count = 0;
#if !defined(ATEST_DRV_ENABLE)
MD_TRC(IDC_RX_HISTORY_MSG,
idc_rx_history[0], idc_rx_history[1], idc_rx_history[2], idc_rx_history[3], idc_rx_history[4],
idc_rx_history[5], idc_rx_history[6], idc_rx_history[7], idc_rx_history[8], idc_rx_history[9],
idc_rx_history[10], idc_rx_history[11], idc_rx_history[12], idc_rx_history[13], idc_rx_history[14],
idc_rx_history[15], idc_rx_history[16], idc_rx_history[17], idc_rx_history[18], idc_rx_history[19]
);
#else
//kal_sprintf(idc_dbg_str, "drv_idc: Receive %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x\n\r",
// idc_rx_history[0], idc_rx_history[1], idc_rx_history[2], idc_rx_history[3], idc_rx_history[4],
// idc_rx_history[5], idc_rx_history[6], idc_rx_history[7], idc_rx_history[8], idc_rx_history[9],
// idc_rx_history[10], idc_rx_history[11], idc_rx_history[12], idc_rx_history[13], idc_rx_history[14],
// idc_rx_history[15], idc_rx_history[16], idc_rx_history[17], idc_rx_history[18], idc_rx_history[19]
// );
//DT_IDC_PRINTF(idc_dbg_str);
#endif
}
rx_count_log = idc_rx_count;
if(new_cmd_flag)
{
if(idc_cmd_byte_count == 0)
{
if(rx_count_log == 0)
rx_count_log = 20;
MD_TRC(IDC_NEW_CMD_ERROR_MSG, idc_new_cmd_error_cnt, (idc_cmd_byte_count), rx_data, (rx_count_log-1));
MD_TRC(IDC_RX_HISTORY_MSG,
idc_rx_history[0], idc_rx_history[1], idc_rx_history[2], idc_rx_history[3], idc_rx_history[4],
idc_rx_history[5], idc_rx_history[6], idc_rx_history[7], idc_rx_history[8], idc_rx_history[9],
idc_rx_history[10], idc_rx_history[11], idc_rx_history[12], idc_rx_history[13], idc_rx_history[14],
idc_rx_history[15], idc_rx_history[16], idc_rx_history[17], idc_rx_history[18], idc_rx_history[19]
);
}
else if(idc_cmd_byte_count == 1)
{
if((rx_data & 0x3) == 0x1)
{
ilm.sub_type = (rx_data & 0xFC) >> 2;
idc_cmd_byte_count ++;
if(idc_cmd_byte_count == (ilm.elen + 2))
{
ilm_stage = KAL_TRUE;
//DT_IDC_PRINTF("new cmd sent to ilm\n\r");
}
}
else
{
new_cmd_flag = KAL_FALSE;
kal_mem_set(&ilm, 0, sizeof(IDC_ILM_MSG_T));
//log error info. for debug
idc_new_cmd_error_cnt++;
if(rx_count_log == 0)
rx_count_log = 20;
MD_TRC(IDC_NEW_CMD_ERROR_MSG, idc_new_cmd_error_cnt, (idc_cmd_byte_count+1), rx_data, (rx_count_log-1));
MD_TRC(IDC_RX_HISTORY_MSG,
idc_rx_history[0], idc_rx_history[1], idc_rx_history[2], idc_rx_history[3], idc_rx_history[4],
idc_rx_history[5], idc_rx_history[6], idc_rx_history[7], idc_rx_history[8], idc_rx_history[9],
idc_rx_history[10], idc_rx_history[11], idc_rx_history[12], idc_rx_history[13], idc_rx_history[14],
idc_rx_history[15], idc_rx_history[16], idc_rx_history[17], idc_rx_history[18], idc_rx_history[19]
);
//DT_IDC_PRINTF("new cmd format error, count: %d, rx_data : %x\n\r", count , idc_port.rx_buf);
}
}
else
{
//byte is even number
if((idc_cmd_byte_count % 2) == 1)
{
if((rx_data & 0x3) == 0x1)
{
if(idc_cmd_byte_count != 7)
{
msg_tmp = (rx_data & 0xFC) >> 2;
ilm.msg1 = ilm.msg1 | (msg_tmp << (6 * (idc_cmd_byte_count - 2)));
}
else
{
msg_tmp = (rx_data & 0xC) >> 2;
ilm.msg2 = (rx_data & 0xF0) >> 4;
ilm.msg1 = ilm.msg1 | (msg_tmp << (6 * (idc_cmd_byte_count - 2)));
}
idc_cmd_byte_count ++;
if(idc_cmd_byte_count == (ilm.elen + 2))
{
ilm_stage = KAL_TRUE;
//DT_IDC_PRINTF("new cmd sent to ilm\n\r");
}
}
else
{
new_cmd_flag = KAL_FALSE;
kal_mem_set(&ilm, 0, sizeof(IDC_ILM_MSG_T));
//log error info. for debug
idc_new_cmd_error_cnt++;
if(rx_count_log == 0)
rx_count_log = 20;
MD_TRC(IDC_NEW_CMD_ERROR_MSG, idc_new_cmd_error_cnt, (idc_cmd_byte_count+1), rx_data, (rx_count_log-1));
MD_TRC(IDC_RX_HISTORY_MSG,
idc_rx_history[0], idc_rx_history[1], idc_rx_history[2], idc_rx_history[3], idc_rx_history[4],
idc_rx_history[5], idc_rx_history[6], idc_rx_history[7], idc_rx_history[8], idc_rx_history[9],
idc_rx_history[10], idc_rx_history[11], idc_rx_history[12], idc_rx_history[13], idc_rx_history[14],
idc_rx_history[15], idc_rx_history[16], idc_rx_history[17], idc_rx_history[18], idc_rx_history[19]
);
//DT_IDC_PRINTF("new cmd format error, count: %d, rx_data : %x\n\r", count , idc_port.rx_buf);
}
}
//byte is odd number
else
{
if((rx_data & 0x3) == 0x3)
{
if(idc_cmd_byte_count != 8)
{
msg_tmp = (rx_data & 0xFC) >> 2;
ilm.msg1 = ilm.msg1 | (msg_tmp << (6 * (idc_cmd_byte_count - 2)));
}
else
{
msg_tmp = (rx_data & 0xFC) >> 2;
ilm.msg2 = ilm.msg2 | (msg_tmp << 4);
}
idc_cmd_byte_count ++;
if(idc_cmd_byte_count ==(ilm.elen + 2))
{
ilm_stage = KAL_TRUE;
//DT_IDC_PRINTF("new cmd sent to ilm\n\r");
}
}
else
{
new_cmd_flag = KAL_FALSE;
kal_mem_set(&ilm, 0, sizeof(IDC_ILM_MSG_T));
//log error info. for debug
idc_new_cmd_error_cnt++;
if(rx_count_log == 0)
rx_count_log = 20;
MD_TRC(IDC_NEW_CMD_ERROR_MSG, idc_new_cmd_error_cnt, (idc_cmd_byte_count+1), rx_data, (rx_count_log-1));
MD_TRC(IDC_RX_HISTORY_MSG,
idc_rx_history[0], idc_rx_history[1], idc_rx_history[2], idc_rx_history[3], idc_rx_history[4],
idc_rx_history[5], idc_rx_history[6], idc_rx_history[7], idc_rx_history[8], idc_rx_history[9],
idc_rx_history[10], idc_rx_history[11], idc_rx_history[12], idc_rx_history[13], idc_rx_history[14],
idc_rx_history[15], idc_rx_history[16], idc_rx_history[17], idc_rx_history[18], idc_rx_history[19]
);
//DT_IDC_PRINTF("new cmd format error, count: %d, rx_data : %x\n\r", count , idc_port.rx_buf);
}
}
}
}
else if(old_cmd_flag)
{
if((rx_data & 0x1) == 0x1)
{
msg_tmp = (rx_data & 0xFE) >> 1;
ilm.msg1 = ilm.msg1 | (msg_tmp << 3);
old_cmd_flag = KAL_FALSE;
ilm_stage = KAL_TRUE;
//DT_IDC_PRINTF("old cmd sent to ilm: msg = %x\n\r", ilm.msg1);
}
else if((rx_data & 0x1) == 0x0)
{
// if there is new format of IDC_CMD
if((rx_data & 0x1F) == 0x1E)
{
ilm.type = (rx_data & 0x1E) >> 1;
ilm.elen = (rx_data & 0xE0) >> 5;
new_cmd_flag = KAL_TRUE;
old_cmd_flag = KAL_FALSE;
idc_cmd_byte_count = 1;
//DT_IDC_PRINTF("new cmd : type = %x, ELEN = %d\n\r", ilm.type, ilm.elen);
}
else
{
ilm.type = (rx_data & 0x1E) >> 1;
ilm.msg1 = (rx_data & 0xE0) >> 5;
old_cmd_flag = KAL_TRUE;
//DT_IDC_PRINTF("old cmd : type = %x\n\r", ilm.type);
}
}
else
old_cmd_flag = KAL_FALSE;
}
else
{
// if there is new format of IDC_CMD
if((rx_data & 0x1F) == 0x1E)
{
kal_mem_set(&ilm, 0, sizeof(IDC_ILM_MSG_T));
ilm.type = (rx_data & 0x1E) >> 1;
ilm.elen = (rx_data & 0xE0) >> 5;
new_cmd_flag = KAL_TRUE;
idc_cmd_byte_count = 1;
//DT_IDC_PRINTF("new cmd : type = %x, ELEN = %d\n\r", ilm.type, ilm.elen);
}
else if((rx_data & 0x1) == 0x0)
{
kal_mem_set(&ilm, 0, sizeof(IDC_ILM_MSG_T));
ilm.type = (rx_data & 0x1E) >> 1;
ilm.msg1 = (rx_data & 0xE0) >> 5;
old_cmd_flag = KAL_TRUE;
//DT_IDC_PRINTF("old cmd : type = %x\n\r", ilm.type);
}
}
if(ilm_stage)
{
MD_TRC(IDC_RX_FIFO_DATA_CNT_MSG, DRV_Reg8(IDC_RX_WOFFSET));
if (ilm.type == 0)
{
idc_80211_rx_count++;
MD_TRC(IDC_RX_80211_RX_MSG,ilm.type, ilm.msg1);
//DT_IDC_PRINTF("80211_rx cmd , no ILM\n\r");
}
else if (ilm.type == 1)
{
idc_80211_tx_count++;
MD_TRC(IDC_RX_80211_TX_MSG,ilm.type, ilm.msg1);
//DT_IDC_PRINTF("80211_tx cmd , no ILM\n\r");
}
else if ((ilm.type == 0xF) && (ilm.sub_type == 1))
{
idc_consys_tx_grant_ntf++;
MD_TRC(IDC_CONSYS_TX_GRANT_NTF_MSG,ilm.type, ilm.sub_type, ilm.msg1);
//DT_IDC_PRINTF("CONNSYS_TX_GRANT_NTF cmd , no ILM\n\r");
}
else
{
idc_ilm_trigger = KAL_TRUE;
if(idc_ilm_on == KAL_TRUE)
{
#if !defined(ATEST_DRV_ENABLE)
MD_TRC(IDC_RX_95_SEND_ILM_MSG, rx_data, ilm.type, ilm.sub_type, ilm.msg1, ilm.msg2, (ilm.elen+2));
#else
//kal_sprintf(idc_dbg_str, "drv_idc: MSG Send to EL1: %x, type:%x, msg:%x, msg2:%x\n\r", idc_port.rx_buf, ilm.type, ilm.msg1, ilm.msg2);
//DT_IDC_PRINTF(idc_dbg_str);
#endif
//msg_send_inline6(MOD_IDC_UART_HISR, idc_port.owner_id, DRIVER_PS_SAP | INLINE_ILM_FLAG_SAP, MSG_ID_IDC_RX_DATA, (void *) &ilm, sizeof(IDC_ILM_MSG_T));
}
else
{
#if !defined(ATEST_DRV_ENABLE)
MD_TRC(IDC_RX_95_NOT_SEND_ILM_MSG, rx_data, ilm.type, ilm.sub_type, ilm.msg1, ilm.msg2, (ilm.elen+2));
#else
kal_sprintf(idc_dbg_str, "drv_idc: MSG Not Send to EL1: %x, type:%x, msg1:%x, msg2:%x\n\r", idc_port.rx_buf, ilm.type, ilm.msg1, ilm.msg2);
DT_IDC_PRINTF(idc_dbg_str);
#endif
}
}
idc_port.rx_buf = 0;
rx_data = 0;
new_cmd_flag = KAL_FALSE;
old_cmd_flag = KAL_FALSE;
ilm_stage = KAL_FALSE;
idc_cmd_byte_count = 0;
IDC_CMD_SUCCESS_CNT[IDC_CMD_SUCCESS_CNT_IDX]++;
}
if(idc_ilm_trigger == KAL_TRUE)
{
MM_Sync();
return;
}
}
MM_Sync();
return;
}
#endif
void idc_pm_lisr(kal_uint32 vector)
{
IRQMask(MD_IRQID_IDC_PM_INT);
#if defined(__MD95__) || defined(__MD97__) || defined(__MD97P__)
kal_uint32 i, pm_status;
kal_uint16 status = 0, cc_status, rf_path_status;
pm_status = DRV_Reg32(IDC_PM_STATUS);
//old PM status
for (i = 0; i < 4; ++i)
{
if (pm_status & (1 << i))
{
// Callback to EL1
if (idc_port.pm_cb_handle[i].callback_func)
idc_port.pm_cb_handle[i].callback_func(idc_port.pm_cb_handle[i].private_data);
}
}
#if defined(__MD95__)
//new PM status
if (pm_status & (1 << 4))
{
cc_status = DRV_Reg8(IDC_CC_STATUS);
rf_path_status = DRV_Reg8(IDC_NEW_PM_DEBUG);
status = cc_status | (rf_path_status << 8);
// Callback to EL1
if (idc_port.pm_cb_handle[4].callback_func)
{
idc_port.pm_cb_handle[4].callback_func(&status);
}
}
MD_TRC(IDC_IDC_PM_LISR_STS_MSG, status);
#if !defined(__MAUI_BASIC__)
kal_uint8 err_rx_buf;
//new PM error status
for (i = 5; i < 8; ++i)
{
err_rx_buf = DRV_Reg8(IDC_NEW_PM_ERR_RX_BUFF);
if (pm_status & (1 << i))
{
MD_TRC(IDC_NEW_PM_ERROR_MSG, pm_status, err_rx_buf);
}
}
#endif
#elif defined(__MD97__) || defined(__MD97P__)
kal_uint16 cc_status_2 = 0;
//new PM status
if (pm_status & (1 << 4))
{
cc_status = DRV_Reg8(IDC_CC_STATUS);
cc_status_2 = DRV_Reg8(IDC_CC4_STATUS);
rf_path_status = DRV_Reg8(IDC_NEW_PM_DEBUG);
status = cc_status | ((cc_status_2 & 0xF) << 8)| ((rf_path_status & 0xF) << 12);
// Callback to EL1
if (idc_port.pm_cb_handle[4].callback_func)
{
idc_port.pm_cb_handle[4].callback_func(&status);
}
}
MD_TRC(IDC_IDC_PM_LISR_STS_MSG, status);
#if !defined(MT6297)
#if !defined(__MAUI_BASIC__)
kal_uint8 err_rx_buf;
kal_uint8 err_status = 0;
err_status = DRV_Reg8(IDC_STATUS_1);
// new PM error status
for(i = 0; i < IDC_NEW_PM_ERR_NUM; i++)
{
err_rx_buf = DRV_Reg8(IDC_NEW_PM_ERR_RX_BUFF);
if (err_status & (1 << i))
{
MD_TRC(IDC_NEW_PM_ERROR_MSG, err_status, err_rx_buf);
}
}
#endif
// Write 1 to clear ERR status
DRV_WriteReg8(IDC_STATUS_1, 0xff);
MO_Sync();
#else
#if !defined(__MAUI_BASIC__)
kal_uint8 err_rx_buf;
//new PM error status
for (i = 5; i < 8; ++i)
{
err_rx_buf = DRV_Reg8(IDC_NEW_PM_ERR_RX_BUFF);
if (pm_status & (1 << i))
{
MD_TRC(IDC_NEW_PM_ERROR_MSG,pm_status, err_rx_buf);
}
}
#endif
#endif
#endif
// Write 1 to clear all status
DRV_WriteReg8(IDC_PM_STATUS, 0xff);
MO_Sync();
IRQUnmask(MD_IRQID_IDC_PM_INT);
#else
kal_activate_hisr_index(IDC_PM_HISR);
#endif
return;
}
void idc_pm_hisr(void)
{
idc_in_pm_hisr = KAL_TRUE;
#if (!defined(__MD95__)) && (!defined(__MD97__)) && (!defined(__MD97P__))
kal_uint32 i, pm_status;
pm_status = DRV_Reg32(IDC_PM_STATUS);
#ifdef ATEST_DRV_ENABLE
// DT_IDC_PRINTF("drv_idc: PM HISR %d, pm_idx = %x\n\r", ++idc_isr_count, pm_status);
#endif
//old PM status
for (i = 0; i < 4; ++i)
{
if (pm_status & (1 << i))
{
// Callback to EL1
if (idc_port.pm_cb_handle[i].callback_func)
idc_port.pm_cb_handle[i].callback_func(idc_port.pm_cb_handle[i].private_data);
}
}
// Write 1 to clear all status
DRV_WriteReg32_NPW(IDC_PM_STATUS, 0xff);
#endif
idc_in_pm_hisr = KAL_FALSE;
#ifdef ATEST_DRV_ENABLE
MM_Sync();
#endif
IRQUnmask(MD_IRQID_IDC_PM_INT);
return;
}
void drv_idc_schedule_update_n_return_rftx(kal_uint32 time, kal_uint8 *rf_path)
{
kal_uint32 i = 0, j = 0;
kal_uint32 expired_evt_flag = 0, stop_event_flag = 0;
#if !defined(__MAUI_BASIC__)
kal_uint32 tx_cnt = 0;
#endif
//clear stop_event_bitmap
stop_event_bitmap = 0x0;
stop_event_bitmap32_0_15 = 0x0;
stop_event_bitmap32_16_31 = 0x0;
// Log FRC_TIME information
//MD_TRC(IDC_SCHEDULE_UPDATE_MSG, (time & 0x3FFFFFFF));
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
before_poll_time_U = ust_get_current_time();
//********protect critical section*******
// Get FRC from EL1C & fetch the first 30bits
idc_port.frc_time = time & 0x3FFFFFFF;
//directly unlock atomic&return and do nothing if schedule_update after slp_ntf
if(idc_port.main_state == IDC_IN_SLEEP)
{
after_poll_time_U = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
//return rf_path to MD_IDC
*rf_path = DRV_Reg8(IDC_NEW_PM_DEBUG);
return;
}
//idc_ctrl_enter(IDC_CTRL_LOCK);//remove because LTE/NR RAT flow
// Stop uncompleted events
if(idc_port.event_usage_bit_map != 0x1)
{
stop_event_flag = 1;
for(i = 1; i < IDC_MAX_EVENT_NUM; i++)
{
if ((1 << i) & idc_port.event_usage_bit_map)
{
if(TIME_DIFF_WITHIN(idc_port.frc_time, idc_port.event_offset_table[i], 10000))
{
//clear all event state
stop_event_bitmap = (stop_event_bitmap | (0x1 << i));
idc_port.event_cnt--;
idc_port.event_usage_bit_map &= ~(0x1 << i);
idc_port.event_offset_table[i] = 0;
for(j = 0; j < 9; j++)
{
idc_port.event_data_table[i][j] = 0;
}
for(j = 0; j < idc_port.event_byte_num[i]; j++)
{
idc_port.sram_table_usage[idc_port.event_sram_sta_idx[i] + j] = 0;
}
}
}
}
for(j = 1; j < IDC_MAX_EVENT_NUM; j++)
{
if((stop_event_bitmap >> j) & 0x1){
if(j < 16)
stop_event_bitmap32_0_15 = (stop_event_bitmap32_0_15 | (0x3 << (j*2)));
else
stop_event_bitmap32_16_31 = (stop_event_bitmap32_16_31 | (0x3 << ((j-16)*2)));
}
}
}
if(idc_port.event_usage_bit_map == 0x1)
idc_port.schedule_state = IDC_PLAN;
if(stop_event_flag == 1){
event_status_0_15 = DRV_Reg32(IDC_CTRL_SCH_STATUS);
event_status_16_31 = DRV_Reg32(IDC_CTRL_SCH_STATUS2);
expire_event_status_0_15 = stop_event_bitmap32_0_15 & event_status_0_15;
expire_event_status_16_31 = stop_event_bitmap32_16_31 & event_status_16_31;
if(expire_event_status_0_15 | expire_event_status_16_31)
{
#ifdef ATEST_DRV_ENABLE
DT_IDC_PRINTF("drv_idc: [Warning] Some events are expired in scheduler & stopped. Status = %x \n\r", expire_event_status_0_15);
while(1);
#endif
expired_evt_flag = 1;
}
before_poll_time_STOP_EVENT = ust_get_current_time();
if(stop_event_bitmap != 0x0)
drv_idc_stop_event_97(stop_event_bitmap);
after_poll_time_STOP_EVENT = ust_get_current_time();
}
MM_Sync();
//idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
after_poll_time_U = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
#if !defined(__MAUI_BASIC__)
//get tx count
tx_cnt = DRV_Reg32(IDC_CTRL_DATA_CNT) & 0xFFFF;
//print tx_count log
if(idc_port.schedule_state == IDC_RUN)
MD_TRC(IDC_TX_COUNT_MSG, tx_cnt);
#endif
if(expired_evt_flag == 1)
MD_TRC(IDC_EVENTS_STILL_BUSY_2_MSG, expire_event_status_0_15, event_status_0_15, expire_event_status_16_31, event_status_16_31);
//return rf_path to MD_IDC
*rf_path = DRV_Reg8(IDC_NEW_PM_DEBUG);
return;
}
kal_bool drv_idc_schedule_event_lte_nr(IDC_EVENT_T event, kal_uint8 event_type,IDC_CTRL_DROP_CMD_T *drop_cmd)
{
kal_uint32 i = 0, drop_cmd_flag = 0, mask = 0xFFFFFFFF;
kal_uint32 w_data = 0, w_idx = 0, value = 0, tmp_sram_idx = 0, end_sram_idx = 0;
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
//If RAT is sleep, always just return.
if(event_type == EVENT_LTE){
if(LTE_FLAG == KAL_FALSE){
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_IN_SLEEP_MSG, event_type);
#ifdef ATEST_DRV_ENABLE
DT_IDC_PRINTF("drv_idc: schedule when LTE RAT DISABLE\n\r");
#endif
return KAL_FALSE;
}
}
else if(event_type == EVENT_NR){
if(NR_FLAG == KAL_FALSE){
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_IN_SLEEP_MSG, event_type);
#ifdef ATEST_DRV_ENABLE
DT_IDC_PRINTF("drv_idc: schedule when NR RAT DISABLE\n\r");
#endif
return KAL_FALSE;
}
}
else if(event_type == EVENT_COMMON){
if((LTE_FLAG == KAL_FALSE) && (NR_FLAG == KAL_FALSE)){
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_IN_SLEEP_MSG, event_type);
#ifdef ATEST_DRV_ENABLE
DT_IDC_PRINTF("drv_idc: schedule when LTE AND NR RAT DISABLE\n\r");
#endif
return KAL_FALSE;
}
}
before_poll_time_SCH = ust_get_current_time();
//********protect critical section*******
//idc_ctrl_enter(IDC_CTRL_LOCK);//remove because LTE/NR RAT flow
idc_port.schedule_state = IDC_RUN;
event.offset = event.offset & 0x3FFFFFFF;
if(TIME_DIFF_EXCEED(event.offset, idc_port.frc_time, 10000))
{
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_OVER_10MS_MSG, idc_port.frc_time, event.offset, event.data[0], event.data[1]);
//idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
return KAL_FALSE;
}
/**************************
event configuration*****************/
//immediate event -> 0x00000001
//LTE event -> 0x0000FFFE
//Common event -> 0x00010000
//NR event -> 0xFFFE0000
//event type switch LTE/NR/Common
if(event_type == EVENT_LTE){
if((idc_port.event_usage_bit_map & (0x0000FFFE)) != 0x0000FFFE) // LTE events not full
{
event_idx_wrap = 0;
// Find empty event
while (1)
{
if ((1 << idc_port.event_w_index_lte) & idc_port.event_usage_bit_map)
{
idc_port.event_w_index_lte++;
if (idc_port.event_w_index_lte > IDC_LTE_MAX_EVENT_IDX)
{
if(event_idx_wrap == 1)
{
//if EVENT_IDX_NOT_FOUND, return drop_cmd
//********protect critical section*******
after_poll_time_SCH = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_EVENT_IDX_NOT_FOUND_MSG);
drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
return KAL_FALSE;
}
idc_port.event_w_index_lte = IDC_LTE_STA_EVENT_IDX;
event_idx_wrap = 1;
}
}
else
{
break;
}
}
// Find empty sram_w_idx
sram_wrap = 0;
while (1)
{
if (idc_port.sram_table_usage[idc_port.sram_w_index] == 1)
{
idc_port.sram_w_index++;
if (idc_port.sram_w_index == IDC_MAX_SRAM_SIZE)
{
if(sram_wrap == 1)
{
//if sram full, return drop_cmd
//idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
after_poll_time_SCH = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_SRAM_FULL_MSG);
drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
return KAL_FALSE;
}
idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
sram_wrap = 1;
}
}
else
{
break;
}
}
//If there no sequential sram space enough, return drop_cmd
for(i = 0; i < event.num; i++)
{
tmp_sram_idx = idc_port.sram_w_index + i;
//wrap case
if(tmp_sram_idx >= IDC_MAX_SRAM_SIZE)
{
idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
tmp_sram_idx = IDC_SRAM_WRAP_IDX;
}
//DT_IDC_PRINTF("*** sram_idx : %d***", tmp_sram_idx);
if(idc_port.sram_table_usage[tmp_sram_idx] == 1)
{
//return drop_cmd
//idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
after_poll_time_SCH = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_NO_SEQUENTIAL_SRAM_MSG);
drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
return KAL_FALSE;
}
}
end_sram_idx = idc_port.sram_w_index + event.num - 1;
if(end_sram_idx >= IDC_MAX_SRAM_SIZE)
end_sram_idx = end_sram_idx - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX;
// Set event data
w_idx = idc_port.sram_w_index;
for(i = 0; i < event.num; i++)
{
w_data = w_data | (event.data[i] << (8 * (w_idx % 4)));
mask &= ~(0xFF << (8 * (w_idx % 4)));
w_idx++;
if((w_idx % 4 == 0) || (i == event.num - 1))
{
if(w_idx % 4 == 0)
value = DRV_Reg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1));
else
value = DRV_Reg32(IDC_CTRL_EVT_DATA(w_idx/4));
value &= mask;
value |= w_data;
if(w_idx % 4 == 0)
DRV_WriteReg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1), value);
else
DRV_WriteReg32(IDC_CTRL_EVT_DATA(w_idx/4), value);
w_data = 0;
mask = 0xFFFFFFFF;
}
if(w_idx == IDC_MAX_SRAM_SIZE)
w_idx = IDC_SRAM_WRAP_IDX;
if((idc_port.sram_w_index + i) >= IDC_MAX_SRAM_SIZE)
idc_port.sram_table_usage[idc_port.sram_w_index + i - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX] = 1;
else
idc_port.sram_table_usage[idc_port.sram_w_index + i] = 1;
}
// Set event memory position
DRV_WriteReg32(IDC_CTRL_EVENT_MEM_POS(idc_port.event_w_index_lte), (idc_port.sram_w_index << 8) + end_sram_idx);
// Set time stamps para & trigger event
DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_w_index_lte), event.offset + (1 << 31));
// Record event_offset & event_data in the table
idc_port.event_offset_table[idc_port.event_w_index_lte] = event.offset;
kal_mem_cpy(idc_port.event_data_table[idc_port.event_w_index_lte], event.data, sizeof(event.data));
idc_port.event_byte_num[idc_port.event_w_index_lte] = event.num;
idc_port.event_sram_sta_idx[idc_port.event_w_index_lte] = idc_port.sram_w_index;
// Record the number and usage bitmap for the scheduler
idc_port.event_usage_bit_map |= (1 << idc_port.event_w_index_lte);
// Add event_cnt
idc_port.event_cnt++;
// Add sram_w_idx
idc_port.sram_w_index += event.num;
if (idc_port.sram_w_index == IDC_MAX_SRAM_SIZE)
idc_port.sram_w_index = idc_port.sram_w_index - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX;
idc_port.event_w_index_lte++;
if (idc_port.event_w_index_lte > IDC_LTE_MAX_EVENT_IDX)
idc_port.event_w_index_lte = IDC_LTE_STA_EVENT_IDX;
}
else
{
//return drop_cmd
drop_cmd_flag = 1;
}
}
else if(event_type == EVENT_NR){
if((idc_port.event_usage_bit_map & (0xFFFE0000)) != 0xFFFE0000) // LTE events not full
{
event_idx_wrap = 0;
// Find empty event
while (1)
{
if ((1 << idc_port.event_w_index_nr) & idc_port.event_usage_bit_map)
{
idc_port.event_w_index_nr++;
if (idc_port.event_w_index_nr > IDC_NR_MAX_EVENT_IDX)
{
if(event_idx_wrap == 1)
{
//if EVENT_IDX_NOT_FOUND, return drop_cmd
//********protect critical section*******
after_poll_time_SCH = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_EVENT_IDX_NOT_FOUND_MSG);
drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
return KAL_FALSE;
}
idc_port.event_w_index_nr = IDC_NR_STA_EVENT_IDX;
event_idx_wrap = 1;
}
}
else
{
break;
}
}
// Find empty sram_w_idx
sram_wrap = 0;
while (1)
{
if (idc_port.sram_table_usage[idc_port.sram_w_index] == 1)
{
idc_port.sram_w_index++;
if (idc_port.sram_w_index == IDC_MAX_SRAM_SIZE)
{
if(sram_wrap == 1)
{
//if sram full, return drop_cmd
//idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
after_poll_time_SCH = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_SRAM_FULL_MSG);
drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
return KAL_FALSE;
}
idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
sram_wrap = 1;
}
}
else
{
break;
}
}
//If there no sequential sram space enough, return drop_cmd
for(i = 0; i < event.num; i++)
{
tmp_sram_idx = idc_port.sram_w_index + i;
//wrap case
if(tmp_sram_idx >= IDC_MAX_SRAM_SIZE)
{
idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
tmp_sram_idx = IDC_SRAM_WRAP_IDX;
}
//DT_IDC_PRINTF("*** sram_idx : %d***", tmp_sram_idx);
if(idc_port.sram_table_usage[tmp_sram_idx] == 1)
{
//return drop_cmd
//idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
after_poll_time_SCH = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_NO_SEQUENTIAL_SRAM_MSG);
drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
return KAL_FALSE;
}
}
end_sram_idx = idc_port.sram_w_index + event.num - 1;
if(end_sram_idx >= IDC_MAX_SRAM_SIZE)
end_sram_idx = end_sram_idx - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX;
// Set event data
w_idx = idc_port.sram_w_index;
for(i = 0; i < event.num; i++)
{
w_data = w_data | (event.data[i] << (8 * (w_idx % 4)));
mask &= ~(0xFF << (8 * (w_idx % 4)));
w_idx++;
if((w_idx % 4 == 0) || (i == event.num - 1))
{
if(w_idx % 4 == 0)
value = DRV_Reg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1));
else
value = DRV_Reg32(IDC_CTRL_EVT_DATA(w_idx/4));
value &= mask;
value |= w_data;
if(w_idx % 4 == 0)
DRV_WriteReg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1), value);
else
DRV_WriteReg32(IDC_CTRL_EVT_DATA(w_idx/4), value);
w_data = 0;
mask = 0xFFFFFFFF;
}
if(w_idx == IDC_MAX_SRAM_SIZE)
w_idx = IDC_SRAM_WRAP_IDX;
if((idc_port.sram_w_index + i) >= IDC_MAX_SRAM_SIZE)
idc_port.sram_table_usage[idc_port.sram_w_index + i - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX] = 1;
else
idc_port.sram_table_usage[idc_port.sram_w_index + i] = 1;
}
// Set event memory position
DRV_WriteReg32(IDC_CTRL_EVENT_MEM_POS(idc_port.event_w_index_nr), (idc_port.sram_w_index << 8) + end_sram_idx);
// Set time stamps para & trigger event
DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_w_index_nr), event.offset + (1 << 31));
// Record event_offset & event_data in the table
idc_port.event_offset_table[idc_port.event_w_index_nr] = event.offset;
kal_mem_cpy(idc_port.event_data_table[idc_port.event_w_index_nr], event.data, sizeof(event.data));
idc_port.event_byte_num[idc_port.event_w_index_nr] = event.num;
idc_port.event_sram_sta_idx[idc_port.event_w_index_nr] = idc_port.sram_w_index;
// Record the number and usage bitmap for the scheduler
idc_port.event_usage_bit_map |= (1 << idc_port.event_w_index_nr);
// Add event_cnt
idc_port.event_cnt++;
// Add sram_w_idx
idc_port.sram_w_index += event.num;
if (idc_port.sram_w_index == IDC_MAX_SRAM_SIZE)
idc_port.sram_w_index = idc_port.sram_w_index - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX;
idc_port.event_w_index_nr++;
if (idc_port.event_w_index_nr > IDC_NR_MAX_EVENT_IDX)
idc_port.event_w_index_nr = IDC_NR_STA_EVENT_IDX;
}
else
{
//return drop_cmd
drop_cmd_flag = 1;
}
}
else if(event_type == EVENT_COMMON){
idc_port.event_w_index_com = IDC_COMMON_STA_EVENT_IDX;
if((idc_port.event_usage_bit_map & (0x00010000)) != 0x00010000) // LTE events not full
{
// Find empty sram_w_idx
sram_wrap = 0;
while (1)
{
if (idc_port.sram_table_usage[idc_port.sram_w_index] == 1)
{
idc_port.sram_w_index++;
if (idc_port.sram_w_index == IDC_MAX_SRAM_SIZE)
{
if(sram_wrap == 1)
{
//if sram full, return drop_cmd
//idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
after_poll_time_SCH = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_SRAM_FULL_MSG);
drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
return KAL_FALSE;
}
idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
sram_wrap = 1;
}
}
else
{
break;
}
}
//If there no sequential sram space enough, return drop_cmd
for(i = 0; i < event.num; i++)
{
tmp_sram_idx = idc_port.sram_w_index + i;
//wrap case
if(tmp_sram_idx >= IDC_MAX_SRAM_SIZE)
{
idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
tmp_sram_idx = IDC_SRAM_WRAP_IDX;
}
//DT_IDC_PRINTF("*** sram_idx : %d***", tmp_sram_idx);
if(idc_port.sram_table_usage[tmp_sram_idx] == 1)
{
//return drop_cmd
//idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
after_poll_time_SCH = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_SCHEDULE_NO_SEQUENTIAL_SRAM_MSG);
drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
return KAL_FALSE;
}
}
end_sram_idx = idc_port.sram_w_index + event.num - 1;
if(end_sram_idx >= IDC_MAX_SRAM_SIZE)
end_sram_idx = end_sram_idx - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX;
// Set event data
w_idx = idc_port.sram_w_index;
for(i = 0; i < event.num; i++)
{
w_data = w_data | (event.data[i] << (8 * (w_idx % 4)));
mask &= ~(0xFF << (8 * (w_idx % 4)));
w_idx++;
if((w_idx % 4 == 0) || (i == event.num - 1))
{
if(w_idx % 4 == 0)
value = DRV_Reg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1));
else
value = DRV_Reg32(IDC_CTRL_EVT_DATA(w_idx/4));
value &= mask;
value |= w_data;
if(w_idx % 4 == 0)
DRV_WriteReg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1), value);
else
DRV_WriteReg32(IDC_CTRL_EVT_DATA(w_idx/4), value);
w_data = 0;
mask = 0xFFFFFFFF;
}
if(w_idx == IDC_MAX_SRAM_SIZE)
w_idx = IDC_SRAM_WRAP_IDX;
if((idc_port.sram_w_index + i) >= IDC_MAX_SRAM_SIZE)
idc_port.sram_table_usage[idc_port.sram_w_index + i - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX] = 1;
else
idc_port.sram_table_usage[idc_port.sram_w_index + i] = 1;
}
// Set event memory position
DRV_WriteReg32(IDC_CTRL_EVENT_MEM_POS(idc_port.event_w_index_com), (idc_port.sram_w_index << 8) + end_sram_idx);
// Set time stamps para & trigger event
DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_w_index_com), event.offset + (1 << 31));
// Record event_offset & event_data in the table
idc_port.event_offset_table[idc_port.event_w_index_com] = event.offset;
kal_mem_cpy(idc_port.event_data_table[idc_port.event_w_index_com], event.data, sizeof(event.data));
idc_port.event_byte_num[idc_port.event_w_index_com] = event.num;
idc_port.event_sram_sta_idx[idc_port.event_w_index_com] = idc_port.sram_w_index;
// Record the number and usage bitmap for the scheduler
idc_port.event_usage_bit_map |= (1 << idc_port.event_w_index_com);
// Add event_cnt
idc_port.event_cnt++;
// Add sram_w_idx
idc_port.sram_w_index += event.num;
if (idc_port.sram_w_index == IDC_MAX_SRAM_SIZE)
idc_port.sram_w_index = idc_port.sram_w_index - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX;
}
else
{
//return drop_cmd
drop_cmd_flag = 1;
}
}
else{
// Error : Not legal event_type
MD_TRC(IDC_EVNET_TYPE_ERR_MSG, event_type);
}
//idc_ctrl_leave(IDC_CTRL_LOCK);
//********protect critical section*******
MM_Sync();
after_poll_time_SCH = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
// Log TX information
MD_TRC(IDC_SCHEDULE_LTE_NR_MSG, event.offset, event.data[0], event.data[1], event_type);
if(drop_cmd_flag == 1)
{
MD_TRC(IDC_SCHEDULE_EVENT_FULL_MSG);
drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
return KAL_FALSE;
}
return KAL_TRUE;
}
void drv_idc_return_drop_cmd_lte_nr(IDC_EVENT_T event, IDC_CTRL_DROP_CMD_T *drop_cmd, kal_uint8 event_type)
{
drop_cmd->cmd_phytime = event.offset;
drop_cmd->cmd_type = (event.data[0] & 0x1E) >> 1;
if(drop_cmd->cmd_type == 0xF)
drop_cmd->cmd_sub_type = (event.data[1] & 0xFC) >> 2;
drop_cmd->event_type = event_type;
//DT_IDC_PRINTF("drop cmd happen!!!!!!!!!!!!\n\r");
//Log schedule fail info.
MD_TRC(IDC_SCHEDULE_FAIL_LTE_NR_MSG, drop_cmd->cmd_phytime, drop_cmd->cmd_type, drop_cmd->cmd_sub_type, drop_cmd->event_type);
return;
}
void idc_auto_tx_lisr(kal_uint32 vector)
{
#if !defined(MT6297)
IRQMask(MD_IRQID_IDC_UART_TX_FORCE_ON);
#if !defined(__MAUI_BASIC__)
kal_uint8 auto_tx_status = 0, tx_susp_int = 0, reset_int = 0;
auto_tx_status = DRV_Reg32(IDC_HW_TX_FORCE_ON);
tx_susp_int = (auto_tx_status >> 2) & 0x3;
reset_int = (auto_tx_status >> 6) & 0x3;
MD_TRC(IDC_AUTO_TX_LISR_MSG, tx_susp_int, reset_int);
#endif
DRV_WriteReg8(IDC_HW_IDC_FORCFE_ON_CLR, 0xF);
IRQUnmask(MD_IRQID_IDC_UART_TX_FORCE_ON);
#endif
return;
}
void drv_idc_auto_tx_config(kal_uint8 tx_susp_quota, kal_uint8 reset_quota)
{
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
if(AUTO_TX_CON_INIT_FLAG == KAL_TRUE){
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_TAKE_FLAG_FAIL_MSG, __FUNCTION__);
return;
}
else{
AUTO_TX_CON_INIT_FLAG = KAL_TRUE;
MM_Sync();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
}
DRV_WriteReg8(IDC_TX_SUSP_QUOTA_CFG, tx_susp_quota);
DRV_WriteReg8(IDC_CAL_WINDOW_CFG, reset_quota);
MO_Sync();
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
AUTO_TX_CON_INIT_FLAG = KAL_FALSE;
MM_Sync();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_AUTO_TX_CONFIG_MSG, tx_susp_quota, reset_quota);
return;
}
void drv_idc_auto_tx_en(kal_uint8 auto_tx_en)
{
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
if(AUTO_TX_EN_INIT_FLAG == KAL_TRUE){
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_TAKE_FLAG_FAIL_MSG, __FUNCTION__);
return;
}
else{
AUTO_TX_EN_INIT_FLAG = KAL_TRUE;
MM_Sync();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
}
if(auto_tx_en == KAL_TRUE){
DRV_WriteReg8(IDC_HW_TX_FORCFE_ON_MASK, 0x0);
DRV_WriteReg8(IDC_TX_AUTO_DIS, 0x0);
}
else{
DRV_WriteReg8(IDC_TX_AUTO_DIS, 0x1);
DRV_WriteReg8(IDC_HW_TX_FORCFE_ON_MASK, 0x3);
}
MO_Sync();
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
AUTO_TX_EN_INIT_FLAG = KAL_FALSE;
MM_Sync();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
MD_TRC(IDC_AUTO_TX_EN_MSG, auto_tx_en);
return;
}
void drv_idc_auto_tx_dis(void)
{
DRV_WriteReg8(IDC_TX_AUTO_DIS, 0x1);
DRV_WriteReg8(IDC_HW_TX_FORCFE_ON_MASK, 0x3);
MO_Sync();
return;
}
void drv_idc_set_enable_rat(kal_uint8 rat_status)
{
kal_bool err_flag = KAL_FALSE;
//update FLAG...
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
if(rat_status == IDC_RAT_LTE)
LTE_FLAG = KAL_TRUE;
else if (rat_status == IDC_RAT_NR)
NR_FLAG = KAL_TRUE;
else
err_flag = KAL_TRUE;
MM_Sync();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
if(err_flag == KAL_TRUE)
MD_TRC(IDC_SET_RAT_ERR_MSG, rat_status);
else
MD_TRC(IDC_SET_RAT_MSG, rat_status);
return;
}
void drv_idc_set_disable_rat(kal_uint8 rat_status)
{
kal_bool err_flag = KAL_FALSE, clr_flag = KAL_FALSE;
//update FLAG...
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
if(rat_status == IDC_RAT_LTE)
LTE_FLAG = KAL_FALSE;
else if (rat_status == IDC_RAT_NR)
NR_FLAG = KAL_FALSE;
else
err_flag = KAL_TRUE;
if((LTE_FLAG == KAL_FALSE) && (NR_FLAG == KAL_FALSE)){
//INIT_FlAG_CLR
IDC_INIT_FLAG = KAL_FALSE;
ACTIVATE_FLAG = KAL_FALSE;
PM_INIT_FLAG = KAL_FALSE;
AUTO_TX_CON_INIT_FLAG = KAL_FALSE;
AUTO_TX_EN_INIT_FLAG = KAL_FALSE;
clr_flag = KAL_TRUE;
}
MM_Sync();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
if(err_flag == KAL_TRUE)
MD_TRC(IDC_SET_RAT_ERR_MSG, rat_status);
else
MD_TRC(IDC_DIS_RAT_MSG, rat_status);
if(clr_flag == KAL_TRUE)
MD_TRC(IDC_CLR_IDC_INIT_FLAG_MSG,__FUNCTION__);
return;
}
void drv_idc_wakeup_notify(kal_uint8 rat_status)
{
kal_bool err_flag = KAL_FALSE;
//update FLAG...
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
if(rat_status == IDC_RAT_LTE)
LTE_FLAG = KAL_TRUE;
else if (rat_status == IDC_RAT_NR)
NR_FLAG = KAL_TRUE;
else
err_flag = KAL_TRUE;
MM_Sync();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
if(err_flag == KAL_TRUE)
MD_TRC(IDC_SET_RAT_ERR_MSG, rat_status);
else
MD_TRC(IDC_SET_RAT_WAKEUP_MSG, rat_status);
return;
}
void drv_idc_sleep_notify(kal_uint8 rat_status)
{
kal_bool err_flag = KAL_FALSE, clr_flag = KAL_FALSE;
/***itc protect region***/
//update LTE/NR FLAG...
kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
before_poll_time_SLP_NTY = ust_get_current_time();
if(rat_status == IDC_RAT_LTE){
LTE_FLAG = KAL_FALSE;
if((LTE_FLAG == KAL_FALSE) && (NR_FLAG == KAL_FALSE)){
//stop all events
drv_idc_stop_event_97(0xFFFFFFFE);
//disable new PM
DRV_WriteReg32(IDC_REMAPPING_EN, 0);
//disable Auto-denial Tx
drv_idc_auto_tx_dis();
//turn off rx int.
_idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
_idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR);
IRQMask(MD_IRQID_IDC_UART_IRQ);
// Turn off RX INT, turn on TX INT
DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ETBEI);
IRQUnmask(MD_IRQID_IDC_UART_IRQ);
//modify idc_port main state
idc_port.main_state = IDC_IN_SLEEP;
//INIT_FlAG_CLR
IDC_INIT_FLAG = KAL_FALSE;
ACTIVATE_FLAG = KAL_FALSE;
PM_INIT_FLAG = KAL_FALSE;
AUTO_TX_CON_INIT_FLAG = KAL_FALSE;
AUTO_TX_EN_INIT_FLAG = KAL_FALSE;
clr_flag = KAL_TRUE;
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR);
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
}
else{
//stop LTE events
drv_idc_stop_event_97(0x0000FFFE);
}
}
else if (rat_status == IDC_RAT_NR){
NR_FLAG = KAL_FALSE;
if((LTE_FLAG == KAL_FALSE) && (NR_FLAG == KAL_FALSE)){
//stop all events
drv_idc_stop_event_97(0xFFFFFFFE);
//disable new PM
DRV_WriteReg32(IDC_REMAPPING_EN, 0);
//disable Auto-denial Tx
drv_idc_auto_tx_dis();
//turn off rx int.
_idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
_idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR);
IRQMask(MD_IRQID_IDC_UART_IRQ);
// Turn off RX INT, turn on TX INT
DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ETBEI);
IRQUnmask(MD_IRQID_IDC_UART_IRQ);
//modify idc_port main state
idc_port.main_state = IDC_IN_SLEEP;
//INIT_FlAG_CLR
IDC_INIT_FLAG = KAL_FALSE;
ACTIVATE_FLAG = KAL_FALSE;
PM_INIT_FLAG = KAL_FALSE;
AUTO_TX_CON_INIT_FLAG = KAL_FALSE;
AUTO_TX_EN_INIT_FLAG = KAL_FALSE;
clr_flag = KAL_TRUE;
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR);
_idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
}
else{
drv_idc_stop_event_97(0xFFFE0000);
}
}
else
err_flag = KAL_TRUE;
MM_Sync();
after_poll_time_SLP_NTY = ust_get_current_time();
kal_hrt_give_itc_lock(KAL_ITC_IDC);
/***itc protect region***/
if(err_flag == KAL_TRUE){
MD_TRC(IDC_SET_RAT_ERR_MSG, rat_status);
return;
}
else
MD_TRC(IDC_SET_RAT_SLEEP_NOTIFY_MSG, rat_status);
if(clr_flag == KAL_TRUE){
MD_TRC(IDC_CLR_IDC_INIT_FLAG_MSG,__FUNCTION__);
}
return;
}
void drv_idc_set_sram_wrap_idx(kal_uint32 start_idx)
{
DRV_WriteReg32_NPW(IDC_CTRL_WRAP_REG, start_idx);
return;
}
void idc_set_immediate_event(kal_uint32 event_idx, kal_uint8* buf, kal_uint32 byte_num, kal_uint32 start_sram_idx, kal_uint32 end_sram_idx)
{
kal_uint32 i = 0, w_data = 0, w_idx = 0, mask = 0xFFFFFFFF, value = 0;
w_idx = start_sram_idx;
for(i = 0; i < byte_num; i++)
{
w_data = w_data | ((*(buf + i)) << (8 * (w_idx % 4)));
mask &= ~(0xFF << (8 * (w_idx % 4)));
w_idx++;
if((w_idx % 4 == 0) || (i == byte_num - 1))
{
if(w_idx % 4 == 0)
value = DRV_Reg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1));
else
value = DRV_Reg32(IDC_CTRL_EVT_DATA(w_idx/4));
value &= mask;
value |= w_data;
if(w_idx % 4 == 0)
DRV_WriteReg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1), value);
else
DRV_WriteReg32(IDC_CTRL_EVT_DATA(w_idx/4), value);
w_data = 0;
mask = 0xFFFFFFFF;
}
if(w_idx > IDC_MAX_NUM_BYTE)
ASSERT(0);
}
// Set event memory position
DRV_WriteReg32(IDC_CTRL_EVENT_MEM_POS(event_idx), (start_sram_idx << 8) + end_sram_idx);
//set immediate type & start
DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(event_idx), (0x3 << 30));
return;
}
void drv_idc_gps_b13_b14_set(kal_uint8 rat_status, kal_uint16 raw_data)
{
kal_uint16 tmp = 0;
if(rat_status == IDC_RAT_LTE){
tmp = (DRV_Reg32(GPS_B13_B14_REG) & GPS_LTE_MASK);
tmp |= (raw_data & GPS_LTE_OFS);
DRV_WriteReg32(GPS_B13_B14_REG, tmp);
MO_Sync();
}
else if(rat_status == IDC_RAT_NR){
tmp = (DRV_Reg32(GPS_B13_B14_REG) & GPS_NR_MASK);
tmp |= (raw_data & GPS_NR_OFS);
DRV_WriteReg32(GPS_B13_B14_REG, tmp);
MO_Sync();
}
else{
//set gps fail, wrong rat_sts
MD_TRC(IDC_GPS_B13_B14_SET_MSG, KAL_FALSE, rat_status, raw_data);
return;
}
//set gps success
MD_TRC(IDC_GPS_B13_B14_SET_MSG, KAL_TRUE, rat_status, raw_data);
return;
}
#if defined(__MD93__)
int drv_idc_register_pm_callback(kal_uint8 pm_idx, IDC_DRV_TO_EL1_CALLBACK func_ptr , kal_bool private_data)
{
ASSERT(pm_idx < IDC_PM_NUM) ;
#ifdef ATEST_DRV_ENABLE
DT_IDC_PRINTF("Register PM Callback, pm_idx = %d\n\r", pm_idx);
#endif
idc_port.pm_cb_handle[pm_idx].callback_func = func_ptr;
idc_port.pm_cb_handle[pm_idx].private_data = private_data;
MM_Sync();
return KAL_SUCCESS ;
}
#elif defined(__MD95__) || defined(__MD97__) || defined(__MD97P__)
int drv_idc_register_pm_callback_95(kal_uint8 pm_idx, IDC_DRV_TO_EL1_CALLBACK func_ptr , void *private_data)
{
ASSERT(pm_idx < IDC_PM_NUM) ;
#ifdef ATEST_DRV_ENABLE
DT_IDC_PRINTF("Register PM Callback, pm_idx = %d\n\r", pm_idx);
#endif
idc_port.pm_cb_handle[pm_idx].callback_func = func_ptr;
idc_port.pm_cb_handle[pm_idx].private_data = private_data;
MM_Sync();
return KAL_SUCCESS ;
}
#endif
int drv_idc_unregister_pm_callback(kal_uint8 pm_idx)
{
ASSERT(pm_idx < IDC_PM_NUM) ;
#ifdef ATEST_DRV_ENABLE
DT_IDC_PRINTF("Unregister PM Callback, pm_idx = %d\n\r", pm_idx);
#endif
idc_port.pm_cb_handle[pm_idx].callback_func = NULL;
idc_port.pm_cb_handle[pm_idx].private_data= 0;
MM_Sync();
return KAL_SUCCESS ;
}