blob: 5701394e4cccde420e6ad9ff9460cacb1f625972 [file] [log] [blame]
rjw6c1fd8f2022-11-30 14:33:01 +08001#include "kal_general_types.h"
2#include "kal_public_api.h"
3#include "kal_hrt_api.h"
4#include "kal_public_defs.h"
5#include "us_timer.h"
6#include "sync_data.h"
7#include "sleepdrv_interface.h"
8
9#include "drv_features.h"
10#include "drv_comm.h"
11
12#include "drv_msgid.h"
13#include "stack_msgs.h"
14#include "stack_ltlcom.h"
15
16#include "intrCtrl.h"
17
18#include "idc_internal.h"
19#include "dcl_idc.h"
20#include "drv_idc.h"
21
22#include "idc_reg.h"
23#include "svc_sap.h"
24
25#define __SMP_LL(LL_PTR) ({kal_uint32 __ll_ret;void *__ll_p=(void*)(LL_PTR);\
26 __asm__ __volatile__("ll %0, (%1)" \
27 : "=&d" (__ll_ret): "d" (__ll_p):);__ll_ret;})
28#define __SMP_SC(LL_PTR, SC_V) ({kal_uint32 __sc_ret=(SC_V);void *__ll_p=(void*)(LL_PTR);\
29 __asm__ __volatile__(\
30 "sc %0, (%2)\n"\
31 : "=d" (__sc_ret):"0" (__sc_ret), "d" (__ll_p): "cc","memory");__sc_ret;})
32
33
34#define LTE_TIMER_MHZ (61.44)
35#define LTE_CLOCK_MHZ (52)
36
37#if defined(__MD97__) || defined(__MD97P__)
38#define IDC_PHYTIME_WRAPPING 0x3FFFFFFF //frc wrap
39#else
40#define IDC_PHYTIME_WRAPPING 0xFFFFF
41#endif
42#define TIME_DIFF_WITHIN(latter_offset, previous_offset, time)\
43 (((latter_offset > previous_offset) \
44 &&((latter_offset - previous_offset) <= time)) \
45 ||((latter_offset < previous_offset) \
46 &&((IDC_PHYTIME_WRAPPING - previous_offset + latter_offset) <= time)))
47
48#define TIME_DIFF_EXCEED(latter_offset, previous_offset, time)\
49 (((latter_offset > previous_offset) \
50 &&((latter_offset - previous_offset) > time)) \
51 ||((latter_offset < previous_offset) \
52 &&((IDC_PHYTIME_WRAPPING - previous_offset + latter_offset) > time)))
53
54
55#ifdef ATEST_DRV_ENABLE
56 #define IDC_ASSERT(statement) \
57 do { if(!statement) { \
58 dbg_print("%s: line %d, statement = %", __FUNCTION__, __LINE__, statement);\
59 dbg_flush();\
60 while(1);\
61 }\
62 }while(0)
63#else
64 #define IDC_ASSERT(statement) ASSERT(statement)
65#endif
66
67
68#ifdef ATEST_DRV_ENABLE
69#define __4G_IDC__
70#define dhl_trace(...)
71#define DT_IDC_PRINTF(x...) \
72do{ \
73 dbg_print(x); \
74 dbg_flush(); \
75}while(0)
76#else /*ATEST_DRV_ENABLE*/
77#include "dhl_trace.h"
78#include "idc_trace.h"
79#endif
80
81#define POLL_STATUS(poll_statement) do{kal_uint32 i=0, __cnt=0;while(poll_statement){for(i=0;i<10;i++){__cnt++;}if(100000000<__cnt){IDC_ASSERT(0);}}}while(0)
82
83
84#if defined(__MD93__)
85 idc_struct_t idc_port = {0, IDC_CLOSED, KAL_FALSE, IDC_PLAN, 0, 0, {0}, {0}, 0, 0, 0, 0, {0}, {0}, 0, 0, {0, len_8, sb_1, pa_none}, IDC_INTERNAL_PIN};
86#elif defined(__MD95__) || defined(__MD97__) || defined(__MD97P__)
87 idc_struct_t idc_port = {0, IDC_CLOSED, KAL_FALSE, IDC_PLAN, 0, 0, {0}, {{0}}, {0}, {0}, {0}, 0, 0, 0, 0, 0, {0}, {0}, 0, 0, 0, 0, 0, 0, {0, len_8, sb_1, pa_none}, IDC_INTERNAL_PIN};
88#endif
89
90IDC_ILM_MSG_T ilm_buf[4] = {{0}, {0}};
91kal_uint32 ilm_num = 0;
92kal_char idc_dbg_str[100];
93kal_bool idc_read_RBR = KAL_FALSE;
94kal_uint8 idc_rx_count = 0;
95kal_uint32 idc_new_cmd_error_cnt = 0;
96kal_uint8 idc_rx_history[20] = {0};
97kal_uint32 idc_rx_history_time[20] = {0};
98kal_uint8 idc_lisr_count, idc_hisr_count = 0;
99kal_uint32 idc_hisr_time[20] = {0};
100kal_uint16 IIR_L, IIR_H, IER_L, LSR_L, LSR_H;
101
102kal_bool idc_rx_suspend = KAL_FALSE;
103kal_uint8 idc_rx_suspend_timer = 0;
104kal_bool idc_count_start = KAL_FALSE;
105kal_bool idc_ilm_on = KAL_TRUE;
106kal_bool idc_ilm_trigger = KAL_FALSE;
107kal_uint32 idc_cmd_byte_count = 0;
108volatile kal_bool idc_in_hisr = KAL_FALSE, idc_in_pm_hisr = KAL_FALSE;
109volatile kal_bool idc_in_close = KAL_FALSE;
110kal_uint32 idc_80211_tx_count = 0;
111kal_uint32 idc_80211_rx_count = 0;
112kal_uint32 idc_consys_tx_grant_ntf = 0;
113volatile kal_uint32 stop_status_check = 0;
114kal_uint32 poll_time = 0;
115kal_uint32 poll_time_E = 0, before_poll_time_E = 0, after_poll_time_E = 0;
116kal_uint32 poll_time_S = 0, before_poll_time_S = 0, after_poll_time_S = 0;
117kal_uint32 idc_in_workaround = 0, idc_in_eventpending = 0;
118kal_uint32 before_poll_time_U = 0, after_poll_time_U = 0;
119kal_uint32 before_poll_time_SE = 0, after_poll_time_SE = 0;
120kal_uint32 before_poll_time_SCH = 0, after_poll_time_SCH = 0;
121kal_uint32 before_poll_time_GPS = 0, after_poll_time_GPS = 0;
122kal_uint32 before_poll_time_UART_HISR = 0, after_poll_time_UART_HISR = 0;
123kal_uint32 before_poll_time_STOP_EVENT = 0, after_poll_time_STOP_EVENT = 0;
124kal_uint32 before_poll_time_SLP_NTY = 0, after_poll_time_SLP_NTY = 0;
125kal_uint32 stop_event_bitmap = 0, stop_event_bitmap32_0_15 = 0, stop_event_bitmap32_16_31 = 0;
126kal_uint32 event_status_0_15 = 0, event_status_16_31 = 0, expire_event_status_0_15 = 0, expire_event_status_16_31 = 0;
127kal_uint32 stop_event_bitmap_debug = 0, stop_event_bitmap32_0_15_debug = 0, stop_event_bitmap32_16_31_debug = 0;
128kal_uint8 sram_wrap = 0, event_idx_wrap = 0;
129IDC_ILM_MSG_T ilm;
130kal_bool new_cmd_flag = KAL_FALSE, old_cmd_flag = KAL_FALSE, ilm_stage = KAL_FALSE;
131
132/**********Colgin**************/
133kal_bool GPS_L1_LTE_FLAG = KAL_FALSE;
134kal_bool GPS_L5_LTE_FLAG = KAL_FALSE;
135kal_bool GPS_L1_NR_FLAG = KAL_FALSE;
136kal_bool GPS_L5_NR_FLAG = KAL_FALSE;
137
138kal_uint8 GPS_L1_L5_LTE_BM = 0;
139kal_uint8 GPS_L1_L5_NR_BM = 0;
140kal_uint8 GPS_L1_L5_ALL_BM = 0;
141kal_uint8 GPS_LTE_NR_ALL_BM = 0;
142
143/**********MD97_PETRUS*********/
144kal_bool SET_PIN_FLAG = KAL_FALSE;
145kal_bool LTE_FLAG = KAL_FALSE;
146kal_bool NR_FLAG = KAL_FALSE;
147kal_bool IDC_INIT_FLAG = KAL_FALSE;
148kal_bool ACTIVATE_FLAG = KAL_FALSE;
149kal_bool PM_INIT_FLAG = KAL_FALSE;
150kal_bool AUTO_TX_CON_INIT_FLAG = KAL_FALSE;
151kal_bool AUTO_TX_EN_INIT_FLAG = KAL_FALSE;
152kal_bool DR_ISSUE_FLAG = KAL_FALSE;
153
154kal_uint32 DR_ISSUE_FAIL_CNT = 0;
155kal_uint32 IDC_CMD_SUCCESS_CNT[10] = {0};
156kal_uint32 IDC_CMD_SUCCESS_CNT_IDX = 0;
157
158kal_uint8 idc_sleep_handle;
159
160#define LTE_TIMER_BASE 0xA6090000
161#define LTE_TIMER_START LTE_TIMER_BASE
162#define LTE_TIMER_FN_READ0 (LTE_TIMER_BASE + 0x10)
163#define LTE_TIMER_STIME_READ0 (LTE_TIMER_BASE + 0x14)
164
165static kal_atomic_uint32 idc_flag = 0;
166#define IDC_CTRL_LOCK 0xFFFFFFFF
167static volatile kal_atomic_uint32 idc_drv_atom_lock=0;
168
169enum{
170 IDC_ATLOCK_PWR_UPDATE,//= 0
171 IDC_ATLOCK_PWR_LISR, //= 1
172 IDC_ATLOCK_PWR_HISR, //= 2
173 IDC_ATLOCK_ISR_PINCFG, //= 3
174 IDC_ATLOCK_L1_L5_GPS, //= 4
175 IDC_ATLOCK_SINGLE_GPS // = 5
176};
177
178//return old value of lock
179//return 0 : get lock successful
180//return not 0: get lock fail
181static inline kal_uint32 _do_atomic_try_lock(volatile kal_atomic_uint32 *lock_v, kal_uint8 lock_idx){
182
183 const kal_uint32 lock_map=1<<lock_idx;
184 kal_uint32 old_v = 0;
185
186 do{
187 old_v=__SMP_LL(lock_v);
188 if(old_v&lock_map){
189 return lock_map;
190 }
191 old_v|=lock_map;
192 }while(0==__SMP_SC(lock_v,old_v));
193
194 return 0;
195
196}
197
198kal_bool _idc_atomic_try_lock(volatile kal_atomic_uint32 *lock_v, kal_uint8 lock_idx){
199 if(0==((*lock_v)&(1<<lock_idx))){
200 if(0==_do_atomic_try_lock(lock_v,lock_idx)){return KAL_TRUE;}
201 }
202 return KAL_FALSE;
203}
204
205void _idc_atomic_lock(volatile kal_atomic_uint32 *lock_v, kal_uint8 lock_idx){
206 while(_do_atomic_try_lock(lock_v,lock_idx)){
207 __asm__ __volatile__("pause \n");
208 }
209}
210void _idc_atomic_unlock(volatile kal_atomic_uint32 *lock_v, kal_uint8 lock_idx){
211 kal_uint32 old_v,lock_map=1<<lock_idx,tmp;
212
213 miu_syncn(4);
214
215 do{
216 old_v=__SMP_LL(lock_v);
217 tmp=(~lock_map)&old_v;
218 old_v&=lock_map;
219 if(0==old_v){break;}
220 }while(0==__SMP_SC(lock_v,tmp));
221}
222
223
224void idc_ctrl_enter(kal_uint32 func_flag){
225 kal_uint32 old_flag = kal_atomic_set_bitmask_return(&idc_flag, func_flag);
226 if(old_flag & func_flag){
227 #if !defined(ATEST_DRV_ENABLE)
228 MD_TRC(IDC_FUN_ENTER_CONCURRENTLY_MSG,old_flag, func_flag);
229 #else
230 kal_sprintf(idc_dbg_str, "drv_idc: the idc ctrl func entering concurrently(%X, %X)\r\n", old_flag, func_flag);
231 DT_IDC_PRINTF(idc_dbg_str);
232 #endif
233 IDC_ASSERT(0);
234 }
235}
236
237void idc_ctrl_leave(kal_uint32 func_flag){
238 kal_uint32 old_flag = kal_atomic_clear_bitmask_return(&idc_flag, func_flag);
239 if(((func_flag == IDC_CTRL_LOCK) || (old_flag == IDC_CTRL_LOCK))?
240 (func_flag != old_flag): //main enter should match main leave
241 ((func_flag & old_flag) != func_flag)){ //sub-func leave should exist its enter
242 #if !defined(ATEST_DRV_ENABLE)
243 MD_TRC(IDC_LEAVE_FUN_NOT_MATCH_MSG,func_flag, old_flag);
244 #else
245 kal_sprintf(idc_dbg_str, "drv_idc: the power ctrl leave func(%x) don't match enter func(%x)\r\n", func_flag, old_flag);
246 DT_IDC_PRINTF(idc_dbg_str);
247 #endif
248 IDC_ASSERT(0);
249 }
250}
251
252void drv_idc_conn_txrx_count(kal_bool is_start)
253{
254 if (KAL_TRUE == is_start)
255 {
256 MD_TRC(IDC_START_COUNT_MSG,);
257 idc_count_start = KAL_TRUE;
258 idc_80211_tx_count = 0;
259 idc_80211_rx_count = 0;
260 }
261 else
262 {
263 idc_count_start = KAL_FALSE;
264 MD_TRC(IDC_END_COUNT_MSG,idc_80211_tx_count, idc_80211_rx_count);
265 }
266}
267
268void drv_idc_init_uart(void)
269{
270
271#if !defined(ATEST_DRV_ENABLE)
272 MD_TRC(IDC_UART_INIT_MSG,);
273#else
274 kal_sprintf(idc_dbg_str, "drv_idc: IDC UART Init\n\r");
275 //DT_IDC_PRINTF(idc_dbg_str);
276#endif
277
278#if (!defined(__MD97__)) && (!defined(__MD97P__))
279 // Open Clock Gating of LTE_TIMER
280 DRV_WriteReg32(MDL1AO_PDN_CLR, PDN_LTE_TMR_MASK);
281#endif
282
283 // Open Clock Gating of IDC_UART
284 DRV_WriteReg32(MDL1AO_PDN_CLR, PDN_IDC_UART_MASK);
285
286 // Open Clock Gating of IDC_CTRL
287 DRV_WriteReg32_NPW(MDL1AO_PDN_CLR, PDN_IDC_CTRL_MASK);
288
289 // Initialize baud rate
290 drv_idc_set_baudrate(4000000);
291
292 // Initialize IDC UART FIFO threshold
293 drv_idc_set_fifo_trigger(1);
294
295 //remove beacause HW limitation can't CLR RXFIFO simultaneously
296 //DRV_WriteReg32_NPW(IDC_UART_FCR, IDC_UART_FCR_RXTRIG | IDC_UART_FCR_FIFOINI);
297 //MD_TRC(IDC_CLEAN_RXFIFO_MSG,__FUNCTION__);
298
299#if !defined(MT6297)
300 //Init SRAM wrap start_idx, idx 0~11 for immediate_event, rest for schedule_event
301 drv_idc_set_sram_wrap_idx(IDC_SRAM_WRAP_IDX);
302#endif
303 // Enable RX interrupt
304 //DRV_WriteReg32(IDC_UART_IER, IDC_UART_IER_ERBFI);
305#if defined(CHIP10992)
306 // Initialize m2c bridge
307 drv_idc_init_m2c_bridge();
308#endif
309 return;
310}
311
312void drv_idc_init_isr(void)
313{
314 //IRQ_Register_LISR(MD_IRQID_IDC_UART_IRQ, idc_uart_lisr, "IDC_UART");
315 //IRQSensitivity(MD_IRQID_IDC_UART_IRQ, KAL_FALSE);// KAL_TRUE :pulse trigger KAL_FALSE: level trigger
316
317 //IRQ_Register_LISR(MD_IRQID_IDC_PM_INT, idc_pm_lisr, "IDC_PM");
318 //IRQSensitivity(MD_IRQID_IDC_PM_INT, KAL_FALSE);
319 IRQUnmask(MD_IRQID_IDC_PM_INT);
320
321#if !defined(MT6297)
322 //IRQ_Register_LISR(MD_IRQID_IDC_UART_TX_FORCE_ON, idc_auto_tx_lisr, "IDC_AUTO_TX");
323 //IRQSensitivity(MD_IRQID_IDC_UART_TX_FORCE_ON, KAL_FALSE);
324 IRQUnmask(MD_IRQID_IDC_UART_TX_FORCE_ON);
325#endif
326}
327
328//Before in sleep mode, El1 call this function to let IDC know us in sleep mode
329void drv_sleep_notify(void)
330{
331
332 _idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
333 _idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR);
334#if (!defined(__MD97__)) && (!defined(__MD97P__))
335 _idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_UPDATE);
336#endif
337
338 IRQMask(MD_IRQID_IDC_UART_IRQ);
339 // Turn off RX INT, turn on TX INT
340 DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ETBEI);
341 IRQUnmask(MD_IRQID_IDC_UART_IRQ);
342
343 //modify idc_port main state
344 idc_port.main_state = IDC_IN_SLEEP;
345 MM_Sync();
346
347#if (!defined(__MD97__)) && (!defined(__MD97P__))
348 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_UPDATE);
349#endif
350 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR);
351 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
352
353}
354
355// After wake-up, IRQ is masked until first SF tick in order to prevent rx data error
356void drv_idc_uart_activate(void)
357{
358 MD_TRC(IDC_ACTIVATE_MSG);
359
360 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
361 if(ACTIVATE_FLAG == KAL_TRUE){
362 kal_hrt_give_itc_lock(KAL_ITC_IDC);
363 MD_TRC(IDC_TAKE_FLAG_FAIL_MSG, __FUNCTION__);
364 return;
365 }
366 else{
367 ACTIVATE_FLAG = KAL_TRUE;
368 MM_Sync();
369 kal_hrt_give_itc_lock(KAL_ITC_IDC);
370 }
371
372 IRQMask(MD_IRQID_IDC_UART_IRQ);
373 // Clean IDC UART FIFO, HW limitation can't CLR RXFIFO simultaneously
374 DRV_WriteReg32_NPW(IDC_UART_FCR, IDC_UART_FCR_RXTRIG | IDC_UART_FCR_FIFOINI);
375 MD_TRC(IDC_CLEAN_RXFIFO_MSG , __FUNCTION__);
376
377 // Enable RX interrupt
378 DRV_WriteReg32(IDC_UART_IER, IDC_UART_IER_ERBFI);
379
380 //Unmask IRQ
381 IRQUnmask(MD_IRQID_IDC_UART_IRQ);
382 MD_TRC(IDC_UNMASK_UART_ISR_MSG , __FUNCTION__);
383}
384
385// Bootup init: drv_idc_init(KAL_FALSE)
386// Reinit after sleep: drv_idc_init(KAL_TRUE) -> drv_idc_uart_activate
387void drv_idc_init(kal_bool sleep_mode)
388{
389#if defined(__4G_IDC__)
390 kal_uint32 i = 0;
391
392 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
393 if(IDC_INIT_FLAG == KAL_TRUE){
394 kal_hrt_give_itc_lock(KAL_ITC_IDC);
395 MD_TRC(IDC_NONSLEEP_MSG, sleep_mode, LTE_FLAG, NR_FLAG, IDC_INIT_FLAG);
396 return;
397 }
398 else{
399 IDC_INIT_FLAG = KAL_TRUE;
400 MM_Sync();
401 kal_hrt_give_itc_lock(KAL_ITC_IDC);
402 MD_TRC(IDC_SET_IDC_INIT_FLAG_MSG,__FUNCTION__);
403 }
404
405 // Mask IRQs before init
406 IRQMask(MD_IRQID_IDC_UART_IRQ);
407 IRQMask(MD_IRQID_IDC_PM_INT);
408
409 // Initial flags/config of IDC driver
410// kal_mem_set(&idc_port, 0, sizeof(idc_port));
411// idc_ctrl_enter(IDC_CTRL_LOCK); //remove because LTE/NR RAT flow
412 idc_port.schedule_state = IDC_PLAN;
413 idc_port.event_cnt = 0;
414 idc_port.event_pending_cnt = 0;
415 idc_port.event_longest_index = 0;
416 idc_port.event_longest_time = 0;
417 idc_port.phy_time = 0;
418 idc_port.frc_time = 0;
419 idc_port.rx_buf = 0;
420#if (!defined(__MD97__)) && (!defined(__MD97P__))
421 idc_port.event_w_index = 0;
422 idc_port.event_usage_bit_map = 0;
423#else
424 //event_idx 0 for immediate_event.
425 idc_port.event_w_index = 1;
426 idc_port.event_usage_bit_map = 0x1;
427#if !defined(MT6297)
428 idc_port.event_w_index_lte = IDC_LTE_STA_EVENT_IDX;
429 idc_port.event_w_index_nr = IDC_NR_STA_EVENT_IDX;
430 idc_port.event_w_index_com = IDC_COMMON_STA_EVENT_IDX;
431 /***record IDC_CMD_CNT***/
432 IDC_CMD_SUCCESS_CNT_IDX++;
433 if(IDC_CMD_SUCCESS_CNT_IDX >= 10)
434 IDC_CMD_SUCCESS_CNT_IDX = 0;
435
436 IDC_CMD_SUCCESS_CNT[IDC_CMD_SUCCESS_CNT_IDX] = 0;
437
438#endif
439#endif
440#if defined(__MD95__) || defined(__MD97__) || defined(__MD97P__)
441 idc_port.sram_w_index = 0;
442 kal_mem_set(idc_port.event_offset_table, 0, sizeof(idc_port.event_offset_table));
443 kal_mem_set(idc_port.event_data_table, 0, sizeof(idc_port.event_data_table));
444 kal_mem_set(idc_port.sram_table_usage, 0, sizeof(idc_port.sram_table_usage));
445#endif
446#if !defined(MT6297)
447 //SRAM_idx 0~11 for immediate_event.
448 idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
449 for(i = 0; i < IDC_SRAM_WRAP_IDX; i++)
450 idc_port.sram_table_usage[i] = 1;
451#endif
452
453 // Init IDC_UART
454 drv_idc_init_uart();
455 MD_TRC(IDC_SLEEP_MSG,sleep_mode);
456
457 // 93IDC_UART is in AO, IDC_UART & ISR & callback functions do not need init after sleep
458 if (!sleep_mode)
459 {
460/*#ifndef ATEST_DRV_ENABLE
461 idc_sleep_handle = SleepDrv_GetHandle(SMP);
462#endif*/
463
464
465 // Init callback functions
466 for (i = 0; i < IDC_PM_NUM; ++i)
467 idc_port.pm_cb_handle[i].callback_func = NULL;
468
469 // Register UART, PM ISR, then unmask IRQ
470 drv_idc_init_isr();
471 idc_port.intr_en = KAL_TRUE;
472
473 // Enable TX Count of IDC_CTRL
474 DRV_WriteReg32(IDC_CTRL_DATA_CNT_CTRL, 0x1);
475 }
476
477 idc_port.main_state = IDC_IN_USE;
478 MM_Sync();
479#else
480 //IDC should not be initialized if __4G_IDC__ was not defined
481 ASSERT(0);
482#endif
483 //idc_ctrl_leave(IDC_CTRL_LOCK);//remove because LTE/NR RAT flow
484 return;
485}
486void drv_idc_init_m2c_bridge(void)
487{
488 kal_uint32 tmp;
489 //Enable m2c bridge feature
490 //M2C_IDC2PTA_BRIDGE + 0xF00[4:2] = 3'b111 ((Bit[4:2] : infra request))
491 tmp = DRV_Reg32(M2C_IDC2PTA_BRIDGE_SPM_ACK);
492 tmp &= (~0x1C);
493 tmp |= M2C_SPM_ACK;
494 DRV_WriteReg32(M2C_IDC2PTA_BRIDGE_SPM_ACK, tmp);
495 //M2C_IDC2PTA_BRIDGE + 0x500[10:0] = 11'h7FF
496 tmp = DRV_Reg32(M2C_IDC2PTA_BRIDGE_M2C_EN);
497 tmp &= (~0x7FF);
498 tmp |= M2C_EN;
499 DRV_WriteReg32(M2C_IDC2PTA_BRIDGE_M2C_EN, tmp);
500 //M2C_IDC2PTA_BRIDGE + 0x504[23:0] = 11'h40404
501 tmp = DRV_Reg32(M2C_IDC2PTA_BRIDGE_M2C_TIME);
502 tmp &= (~0xFFFFFF);
503 tmp |= M2C_TIME;
504 DRV_WriteReg32(M2C_IDC2PTA_BRIDGE_M2C_TIME, tmp);
505
506 //switch pinmux & GPIO
507 //GPIO 131 => m2c_bt_act, GPIO 132 => m2c_bt_pri, GPIO 133 => m2c_wlan_act, GPIO 134 => m2c_uart_tx, GPIO 135 => m2c_uart_rx
508 DRV_WriteReg32(0xC0005000 + 0x408, 0xFFF<<12);
509 DRV_WriteReg32(0xC0005000 + 0x404, 0x111<<12);
510
511 //disable pull up for wlan_act, Write 0x11D10130[2] = 0x0
512 tmp = DRV_Reg32(0xC1D10130);
513 tmp &= (~0x4);
514 DRV_WriteReg32(0xC1D10130, tmp);
515 //enable pull down for wlan_act, Write 0x11D100F0[2] = 0x1
516 tmp = DRV_Reg32(0xC1D100F0);
517 tmp |= (0x4);
518 DRV_WriteReg32(0xC1D100F0, tmp);
519 MO_Sync();
520 return;
521}
522
523void drv_idc_get_support(IDC_SUPPORT_T *support)
524{
525#if defined(__4G_IDC__)
526 support->idc = KAL_TRUE;
527 support->gpio = KAL_FALSE;
528 support->uart = KAL_TRUE;
529
530#else // !defined(__4G_IDC__)
531 support->idc = KAL_FALSE;
532 support->gpio = KAL_FALSE;
533 support->uart = KAL_FALSE;
534#endif
535 return;
536}
537
538void drv_idc_open(kal_uint32 mod_id)
539{
540 idc_port.owner_id = mod_id;
541 idc_port.main_state = IDC_OPEN;
542
543 return;
544}
545
546void drv_idc_close()
547{
548 kal_uint32 i = 0;
549
550 // stop all events before closing IDC
551 DRV_WriteReg32(IDC_CTRL_SCH_STOP, 0xFFFF);
552
553 // disable interrupt
554 IRQMask(MD_IRQID_IDC_UART_IRQ);
555 DRV_WriteReg8(IDC_UART_IER, IDC_UART_IER_ALLOFF);
556 IRQUnmask(MD_IRQID_IDC_UART_IRQ);
557
558 idc_port.intr_en = KAL_FALSE;
559 idc_port.rx_buf = 0;
560
561 for (i = 0; i < 4; ++i)
562 {
563 DRV_WriteReg32(IDC_PRI(i), 0);
564 DRV_WriteReg32(IDC_PRI_BITEN(i), 0);
565 DRV_WriteReg32(IDC_PAT(i), 0);
566 DRV_WriteReg32(IDC_PAT_BITEN(i), 0);
567 }
568
569#if defined(__MD95__) || defined(__MD97__) || defined(__MD97P__)
570 DRV_WriteReg32(IDC_REMAPPING_EN, 0);
571#endif
572
573 kal_mem_set(&idc_port, 0, sizeof(idc_port));
574
575 idc_port.main_state = IDC_CLOSED;
576 idc_in_close = KAL_FALSE;
577
578 return;
579}
580
581void drv_idc_set_dcb_config(IDC_CTRL_DCB_CONFIG_T idc_config)
582{
583 kal_uint8 tmp_lcr, tmp_ier;
584
585 IRQMask(MD_IRQID_IDC_UART_IRQ);
586 tmp_ier = DRV_Reg8(IDC_UART_IER);
587 DRV_WriteReg8(IDC_UART_IER, IDC_UART_IER_ALLOFF);
588 IRQUnmask(MD_IRQID_IDC_UART_IRQ);
589
590 // set baud rate
591 drv_idc_set_baudrate(idc_config.u4Baud);
592
593 tmp_lcr = DRV_Reg32(IDC_UART_LCR);
594 tmp_lcr &= ~0x3F;
595 // set number of bits per character
596 switch(idc_config.u1DataBits)
597 {
598 case len_5:
599 tmp_lcr |= IDC_UART_WLS_5;
600 break;
601 case len_6:
602 tmp_lcr |= IDC_UART_WLS_6;
603 break;
604 case len_7:
605 tmp_lcr |= IDC_UART_WLS_7;
606 break;
607 case len_8:
608 tmp_lcr |= IDC_UART_WLS_8;
609 break;
610 default:
611 break;
612 }
613
614 switch(idc_config.u1StopBits)
615 {
616 case sb_1:
617 tmp_lcr |= IDC_UART_1_STOP;
618 break;
619 case sb_2:
620 tmp_lcr |= IDC_UART_2_STOP;
621 break;
622 case sb_1_5:
623 tmp_lcr |= IDC_UART_1_5_STOP;
624 break;
625 default:
626 break;
627 }
628
629 switch(idc_config.u1Parity)
630 {
631 case pa_none:
632 tmp_lcr |= IDC_UART_NONE_PARITY;
633 break;
634 case pa_odd:
635 tmp_lcr |= IDC_UART_ODD_PARITY;
636 break;
637 case pa_even:
638 tmp_lcr |= IDC_UART_EVEN_PARITY;
639 break;
640 default:
641 break;
642 }
643
644 DRV_WriteReg32(IDC_UART_LCR, tmp_lcr);
645
646 kal_mem_cpy((void *) &idc_port.DCB, (void *) &idc_config, sizeof(IDC_CTRL_DCB_CONFIG_T));
647
648 IRQMask(MD_IRQID_IDC_UART_IRQ);
649 DRV_WriteReg8(IDC_UART_IER, tmp_ier);
650 IRQUnmask(MD_IRQID_IDC_UART_IRQ);
651}
652
653
654void drv_idc_get_dcb_config(IDC_CTRL_DCB_CONFIG_T *DCB)
655{
656 kal_mem_cpy((void *) DCB, (void *) &idc_port.DCB, sizeof(IDC_CTRL_DCB_CONFIG_T));
657
658 return;
659}
660void drv_idc_set_baudrate(kal_uint32 baudrate)
661{
662 kal_uint8 tmp_lcr = 0;
663 kal_uint32 sample_count = 0;
664
665 idc_port.DCB.u4Baud = baudrate;
666
667 // Only 4M baudrate is used in IDC now
668 ASSERT(baudrate == 4000000);
669
670 sample_count = 6;
671
672 // configure register
673 // based on sample_count * baud_pulse, baud_rate = system clock frequency / (SAMPLE_COUNT + 1) / {DLM, DLL}
674 DRV_WriteReg32(IDC_UART_HIGHSPEED, IDC_UART_HIGHSPEED_X);
675
676 DRV_WriteReg32(IDC_UART_FEATURE_SEL, 0x1);
677 // -- FEATURE_SEL start --
678
679 DRV_WriteReg32(IDC_UART_DLL_backup, 0x1);
680 DRV_WriteReg32(IDC_UART_DLM_backup, 0x0);
681
682 DRV_WriteReg32(IDC_UART_FEATURE_SEL, 0x0);
683 // -- FEATURE_SEL end --
684
685 DRV_WriteReg32(IDC_UART_SAMPLE_COUNT, sample_count - 1);
686 DRV_WriteReg32(IDC_UART_SAMPLE_POINT, (sample_count - 1) >> 1);
687 // Set Guard time en & cnt = 2
688 DRV_WriteReg32(IDC_UART_GUARD, 0x12);
689
690#if defined(__MD97__) || defined(__MD97P__)
691 DRV_WriteReg32(IDC_UART_FRACDIV_L_TX, 0x55);
692 DRV_WriteReg32(IDC_UART_FRACDIV_L_RX, 0x55);
693 DRV_WriteReg32(IDC_UART_FRACDIV_M_TX, 0x2);
694 DRV_WriteReg32(IDC_UART_FRACDIV_M_RX, 0x2);
695#else
696 // orig: Add SAMPLE_COUNT by 1 for bit[0], bit[2], bit[4], bit[6] (8'b01010101)
697 // align CONSYS UART shift issue, add SAMPLE_COUNT by 1 for bit[0], bit[2], bit[4], bit[5], bit[6] (8'b01110101)
698 DRV_WriteReg32(IDC_UART_FRACDIV_L_TX, 0x75);
699 DRV_WriteReg32(IDC_UART_FRACDIV_L_RX, 0x75);
700 // orig: Add SAMPLE_COUNT by 1 for STOP bits (2'b10)
701 // align CONSYS UART shift issue, no add SAMPLE_COUNT by 1 for STOP bits (2'b00)
702 DRV_WriteReg32(IDC_UART_FRACDIV_M_TX, 0x0);
703 DRV_WriteReg32(IDC_UART_FRACDIV_M_RX, 0x0);
704#endif
705
706 tmp_lcr = DRV_Reg32(IDC_UART_LCR);
707 DRV_WriteReg32_NPW(IDC_UART_LCR, tmp_lcr | 0x3);
708}
709
710
711void drv_idc_set_fifo_trigger(kal_uint8 rx_threshold)
712{
713 DRV_WriteReg32_NPW(IDC_UART_RXTRIG, rx_threshold);
714 return;
715}
716
717void drv_idc_set_pm_config(kal_uint8 pm_idx, kal_uint8 priority, kal_uint8 priority_bit_en, kal_uint8 pattern, kal_uint8 pattern_bit_en)
718{
719 IRQMask(MD_IRQID_IDC_PM_INT);
720
721 // Use DSB to make sure that pattern match is turned off before setting pattern
722 DRV_WriteReg32_NPW(IDC_PRI_BITEN(pm_idx), 0);
723 //Data_Sync_Barrier();
724
725 //Data_Sync_Barrier();
726
727 // PRI_BITEN should be set lastly
728 DRV_WriteReg32(IDC_PAT(pm_idx), pattern);
729 DRV_WriteReg32(IDC_PAT_BITEN(pm_idx), pattern_bit_en);
730 DRV_WriteReg32(IDC_PRI(pm_idx), priority);
731 DRV_WriteReg32_NPW(IDC_PRI_BITEN(pm_idx), priority_bit_en);
732
733 IRQUnmask(MD_IRQID_IDC_PM_INT);
734}
735
736void drv_idc_get_pm_config(kal_uint8 pm_idx, kal_uint8 *priority, kal_uint8 *priority_bit_en, kal_uint8 *pattern, kal_uint8 *pattern_bit_en)
737{
738 *priority = DRV_Reg8(IDC_PRI(pm_idx));
739 *priority_bit_en = DRV_Reg8(IDC_PRI_BITEN(pm_idx));
740 *pattern = DRV_Reg8(IDC_PAT(pm_idx));
741 *pattern_bit_en = DRV_Reg8(IDC_PAT_BITEN(pm_idx));
742}
743
744void drv_idc_set_new_pm_config(kal_uint8 pattern0, kal_uint8 pattern1)
745{
746 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
747 if(PM_INIT_FLAG == KAL_TRUE){
748 kal_hrt_give_itc_lock(KAL_ITC_IDC);
749 MD_TRC(IDC_TAKE_FLAG_FAIL_MSG, __FUNCTION__);
750 return;
751 }
752 else{
753 PM_INIT_FLAG = KAL_TRUE;
754 MM_Sync();
755 kal_hrt_give_itc_lock(KAL_ITC_IDC);
756 }
757
758 IRQMask(MD_IRQID_IDC_PM_INT);
759
760 DRV_WriteReg32(IDC_NEW_PAT0, pattern0);
761 DRV_WriteReg32_NPW(IDC_NEW_PAT1, pattern1);
762
763 IRQUnmask(MD_IRQID_IDC_PM_INT);
764
765 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
766 PM_INIT_FLAG = KAL_FALSE;
767 MM_Sync();
768 kal_hrt_give_itc_lock(KAL_ITC_IDC);
769
770 MD_TRC(IDC_IDC_SET_NEW_PM_MSG, pattern0, pattern1);
771}
772
773void drv_idc_get_new_pm_config(kal_uint8 *pattern0, kal_uint8 *pattern1)
774{
775 *pattern0 = DRV_Reg8(IDC_NEW_PAT0);
776 *pattern1 = DRV_Reg8(IDC_NEW_PAT1);
777}
778
779#if defined(__MD93__)
780void drv_idc_send_event(IDC_EVENT_T event, kal_bool sleep_mode)
781{
782 kal_uint32 i = 0;
783
784#ifdef ATEST_DRV_ENABLE
785// DT_IDC_PRINTF("drv_idc: send event\n\r");
786#endif
787
788 if (sleep_mode)
789 {
790 // Clear scheduled events
791 drv_idc_stop_event(0xFFFF);
792
793#ifndef ATEST_DRV_ENABLE
794 SleepDrv_LockSleep(SLEEP_CTL_IDC, SMP);
795#endif
796
797 IRQMask(MD_IRQID_IDC_PM_INT);
798 // clear PM configuration
799 for (i = 0; i < 4; ++i)
800 {
801 DRV_WriteReg32(IDC_PRI_BITEN(i), 0);
802 }
803 }
804
805 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
806 before_poll_time_SE = ust_get_current_time();
807 //********protect critical section*******
808
809 DRV_WriteReg32(IDC_UART_BASE, event.data[0]);
810 DRV_WriteReg32(IDC_UART_BASE, event.data[1]);
811
812 //********protect critical section*******
813 after_poll_time_SE = ust_get_current_time();
814 kal_hrt_give_itc_lock(KAL_ITC_IDC);
815
816 if (sleep_mode)
817 {
818 // Turn off ERBFI and turn on ETBEI
819 DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ETBEI);
820 }
821
822 return;
823}
824#elif defined(__MD95__)
825kal_bool drv_idc_send_event_95(IDC_EVENT_T event, kal_bool sleep_mode)
826{
827 kal_uint8 num = 0, i = 0, DROP = 0;
828 kal_uint32 reschedule_event_offset = 0;
829
830#ifdef ATEST_DRV_ENABLE
831// DT_IDC_PRINTF("drv_idc: send event\n\r");
832#endif
833
834 MD_TRC(IDC_SEND_EVENT_SLEEP_MODE_STS_MSG, sleep_mode);
835 // Stop all scheduled events
836 drv_idc_stop_event(0xFFFF);
837
838 if(!sleep_mode)
839 {
840 // Check drop behavior
841 for(i = 0; i < 16; i++)
842 {
843 if((idc_port.event_usage_bit_map >> i) & 0x1)
844 {
845 //if(idc_port.event_offset_table[i] <= (idc_port.phy_time + 61440))
846 if(TIME_DIFF_WITHIN((idc_port.phy_time + 61440), idc_port.event_offset_table[i], 614400))
847 {
848 //clear all event
849 idc_port.event_cnt = 0;
850 idc_port.event_pending_cnt = 0;
851 idc_port.event_longest_index = 0;
852 idc_port.event_longest_time = 0;
853 //idc_port.phy_time = 0;
854 idc_port.event_w_index = 0;
855 idc_port.event_usage_bit_map = 0x0;
856 idc_port.rx_buf = 0;
857 idc_port.sram_w_index = 0;
858 kal_mem_set(idc_port.event_offset_table, 0, sizeof(idc_port.event_offset_table));
859 kal_mem_set(idc_port.event_data_table, 0, sizeof(idc_port.event_data_table));
860 kal_mem_set(idc_port.sram_table_usage, 0, sizeof(idc_port.sram_table_usage));
861
862 //DROP flag
863 DROP = 1;
864 MD_TRC(IDC_SEND_EVENT_DROP_MSG,);
865 break;
866 }
867
868 }
869
870 }
871
872 }
873
874
875#ifndef ATEST_DRV_ENABLE
876 //Lock sleep if sleep_mode is KAL_TRUE
877 if (sleep_mode)
878 {
879 SleepDrv_LockSleep(SLEEP_CTL_IDC, SMP);
880 }
881#endif
882
883 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
884 before_poll_time_SE = ust_get_current_time();
885 //********protect critical section*******
886
887 for(num = 0; num < event.num; num++)
888 {
889 DRV_WriteReg32(IDC_UART_BASE, event.data[num]);
890 }
891
892
893 //********protect critical section*******
894 after_poll_time_SE = ust_get_current_time();
895 kal_hrt_give_itc_lock(KAL_ITC_IDC);
896
897 MD_TRC(IDC_SEND_EVENT_MSG, event.num,event.data[0], event.data[1], event.data[2], event.data[3], event.data[4], event.data[5], event.data[6], event.data[7], event.data[8]);
898
899 if (sleep_mode)
900 {
901 //clear new PM configuration
902 DRV_WriteReg32(IDC_REMAPPING_EN, 0);
903 // Turn off ERBFI and turn on ETBEI
904 DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ETBEI);
905 IRQUnmask(MD_IRQID_IDC_UART_IRQ);
906 }
907
908 if(!sleep_mode)
909 {
910 //Re-Start scheduler if no drop behavior occur
911 if(!DROP)
912 {
913 for(i = 0; i < 16; i++)
914 {
915 if((idc_port.event_usage_bit_map >> i) & 0x1)
916 {
917 reschedule_event_offset = DRV_Reg32(IDC_CTRL_EVENT_SETETING(i));
918 DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(i), (reschedule_event_offset | (1 << 31)));
919 }
920 }
921 MD_TRC(IDC_SEND_EVENT_RESCHEDULE_MSG,);
922 }
923
924 }
925
926 if(DROP)
927 return KAL_FALSE;
928 else
929 return KAL_TRUE;
930}
931#elif defined(__MD97__) || defined(__MD97P__)
932kal_bool drv_idc_send_event_97(IDC_EVENT_T event, kal_bool sleep_mode)
933{
934 kal_uint8 BUSY = 0;
935 kal_bool LTE_STS = KAL_FALSE, NR_STS = KAL_FALSE;
936
937 MD_TRC(IDC_SEND_EVENT_SLEEP_MODE_STS_MSG, sleep_mode);
938
939 if(sleep_mode){
940
941 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
942 LTE_STS = LTE_FLAG;
943 NR_STS = NR_FLAG;
944 kal_hrt_give_itc_lock(KAL_ITC_IDC);
945
946 //polling immediate_event is idle
947 if((DRV_Reg32(IDC_CTRL_SCH_STATUS)& 0x3) != 0x0){
948 BUSY = 1;
949 MD_TRC(IDC_SEND_EVENT_FAIL_MSG, event.num, event.data[0], event.data[1], event.data[2], event.data[3], event.data[4], event.data[5], event.data[6], event.data[7], event.data[8]);
950 }
951 else{
952 //set_immediate_event
953 idc_set_immediate_event(0, event.data, event.num, 0, (event.num-1));
954 //MD_TRC(IDC_SEND_EVENT_MSG, event.num, event.data[0], event.data[1], event.data[2], event.data[3], event.data[4], event.data[5], event.data[6], event.data[7], event.data[8]);
955 }
956
957 if((LTE_STS == KAL_TRUE) && (NR_STS == KAL_TRUE)){
958 if(BUSY)
959 return KAL_FALSE;
960 else
961 return KAL_TRUE;
962 }
963 else{
964#ifndef ATEST_DRV_ENABLE
965 //Lock sleep if sleep_mode is KAL_TRUE
966 SleepDrv_LockSleep(SLEEP_CTL_IDC, SMP);
967#endif
968#if defined(MT6297)
969 //disable new PM
970 DRV_WriteReg32(IDC_REMAPPING_EN, 0);
971#endif
972 // Turn off ERBFI and turn on ETBEI
973 DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ETBEI);
974 IRQUnmask(MD_IRQID_IDC_UART_IRQ);
975
976
977 if(BUSY){
978#ifndef ATEST_DRV_ENABLE
979 SleepDrv_UnlockSleep(SLEEP_CTL_IDC, SMP);
980#endif
981 return KAL_FALSE;
982 }
983 else
984 return KAL_TRUE;
985 }
986 }
987 //sleep_mode is KAL_FALSE, send immediate event only
988 else{
989 kal_uint8 txfifo_cnt = 0;
990
991 txfifo_cnt = DRV_Reg8(IDC_TX_WOFFSET);
992 //MD_TRC(IDC_TXFIFO_CNT_MSG, txfifo_cnt);
993
994 //log IDC_UART status & MD_L1_AO
995 if(txfifo_cnt >= 30){
996 DRV_WriteReg32_NPW(IDC_UART_FEATURE_SEL, 0x1);
997 MD_TRC(IDC_SEND_EVENT_DUMP_MSG, DRV_Reg8(IDC_UART_SAMPLE_COUNT), DRV_Reg8(IDC_UART_SAMPLE_POINT), DRV_Reg8(IDC_UART_GUARD), DRV_Reg8(IDC_UART_ESCAPE_EN), DRV_Reg8(IDC_UART_SLEEP_EN), DRV_Reg8(IDC_UART_DEBUG_1), DRV_Reg8(IDC_UART_DEBUG_8), DRV_Reg8(IDC_UART_SLEEP_CTRL), DRV_Reg8(IDC_UART_MISC_CTRL), DRV_Reg8(IDC_UART_DEBUG_10), DRV_Reg8(IDC_UART_DLL_backup), DRV_Reg8(IDC_UART_DLM_backup), DRV_Reg8(IDC_UART_EFR_backup), DRV_Reg8(IDC_UART_FEATURE_SEL), DRV_Reg32(MDL1AO_CLK_STA), DRV_Reg32(MDL1AO_PDN_STA), DRV_Reg32(IDC_CTRL_FRC_REG));
998 DRV_WriteReg32_NPW(IDC_UART_FEATURE_SEL, 0x0);
999 }
1000
1001 //TXFIFO threshold check (keep 10 byte margin)
1002 if(txfifo_cnt >= 126){
1003 MD_TRC(IDC_SEND_EVENT_FAIL_MSG, event.num, event.data[0], event.data[1], event.data[2], event.data[3], event.data[4], event.data[5], event.data[6], event.data[7], event.data[8]);
1004 IDC_ASSERT(0);
1005 //return KAL_FALSE;
1006 }
1007 //polling immediate_event is idle
1008 if((DRV_Reg32(IDC_CTRL_SCH_STATUS)& 0x3) != 0x0){
1009 BUSY = 1;
1010 MD_TRC(IDC_SEND_EVENT_FAIL_MSG, event.num, event.data[0], event.data[1], event.data[2], event.data[3], event.data[4], event.data[5], event.data[6], event.data[7], event.data[8]);
1011 }
1012 else{
1013 //set_immediate_event
1014 idc_set_immediate_event(0, event.data, event.num, 0, (event.num-1));
1015 //MD_TRC(IDC_SEND_EVENT_MSG, event.num, event.data[0], event.data[1], event.data[2], event.data[3], event.data[4], event.data[5], event.data[6], event.data[7], event.data[8]);
1016 }
1017
1018 if(BUSY)
1019 return KAL_FALSE;
1020 else
1021 return KAL_TRUE;
1022 }
1023}
1024#endif
1025
1026#if defined(__MD93__)
1027void drv_idc_schedule_event(IDC_EVENT_T event)
1028{
1029 kal_uint32 i = 0, sram_full_flag = 0, buf_displace_happen_flag = 0, put_in_buf_flag = 0;
1030
1031 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
1032 before_poll_time_SCH = ust_get_current_time();
1033 //********protect critical section*******
1034 idc_ctrl_enter(IDC_CTRL_LOCK);
1035 idc_port.schedule_state = IDC_RUN;
1036
1037 event.offset = event.offset & 0xFFFFF;
1038
1039 /* if(((event.offset > idc_port.phy_time)
1040 && ((event.offset - idc_port.phy_time) > 614400))
1041 || ((event.offset < idc_port.phy_time)
1042 && ((1048575 - idc_port.phy_time + event.offset) > 614400)))*/
1043 if(TIME_DIFF_EXCEED(event.offset, idc_port.phy_time, 614400))
1044 {
1045 kal_hrt_give_itc_lock(KAL_ITC_IDC);
1046 MD_TRC(IDC_SCHEDULE_OVER_10MS_MSG,idc_port.phy_time, event.offset, event.data[0], event.data[1]);
1047 IDC_ASSERT(0); // Time offset must not larger than 10ms
1048 }
1049
1050 if(idc_port.event_cnt < 32)
1051 {
1052 idc_port.event_cnt++;
1053 if(idc_port.event_usage_bit_map != 0xFFFF) // SRAM isn't full
1054 {
1055 // Find empty event
1056 while (1)
1057 {
1058 if ((1 << idc_port.event_w_index) & idc_port.event_usage_bit_map)
1059 {
1060 idc_port.event_w_index++;
1061 if (idc_port.event_w_index == 16) idc_port.event_w_index = 0;
1062 }
1063 else
1064 {
1065 break;
1066 }
1067 }
1068
1069 // Set event data
1070 DRV_WriteReg32(IDC_CTRL_EVT_DATA(idc_port.event_w_index), (event.data[0] + (event.data[1] << 8)));
1071
1072 // Set time stamps para & trigger event
1073 DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_w_index), event.offset + (1 << 31));
1074
1075 // Log TX information
1076 //MD_TRC(IDC_SCHEDULE_2_MSG, event.offset, event.data[0], event.data[1]);
1077
1078 // Record event_offset & event_data in the table
1079 idc_port.event_offset_table[idc_port.event_w_index] = event.offset;
1080 idc_port.event_data_table[idc_port.event_w_index] = (event.data[0] + (event.data[1] << 8));
1081
1082 // Record the number and usage bitmap for the scheduler
1083 idc_port.event_usage_bit_map |= (1 << idc_port.event_w_index);
1084
1085 // Update idc_port.event_longest_time
1086 if(idc_port.event_cnt == 1)
1087 {
1088 idc_port.event_longest_index = idc_port.event_w_index;
1089 idc_port.event_longest_time = event.offset;
1090 }
1091 if(TIME_DIFF_WITHIN(event.offset, idc_port.event_longest_time, 614400))
1092/* if(((event.offset > idc_port.event_longest_time)
1093 && ((event.offset - idc_port.event_longest_time) <=614400))
1094 || (((event.offset < idc_port.event_longest_time)
1095 && ((0xFFFFF - idc_port.event_longest_time + event.offset) <= 614400))))*/
1096 {
1097 idc_port.event_longest_index = idc_port.event_w_index;
1098 idc_port.event_longest_time = event.offset;
1099 }
1100
1101 idc_port.event_w_index++;
1102 if (idc_port.event_w_index == 16) idc_port.event_w_index = 0;
1103 }
1104 else //SRAM is full, we need to put event into event_buffer
1105 {
1106 sram_full_flag = 1;
1107 //Log SRAM is FULL information
1108 //MD_TRC(IDC_SRAM_FULL_DATA_IN_EVENT_BUFFER);
1109
1110 // Replace event in SRAM with incoming event, and put event into event_buffer
1111 if(TIME_DIFF_WITHIN(idc_port.event_longest_time, event.offset, 614400))
1112/* if(((idc_port.event_longest_time > event.offset)
1113 &&((idc_port.event_longest_time - event.offset) <= 614400))
1114 || ((idc_port.event_longest_time < event.offset)
1115 && ((1048575 - event.offset + idc_port.event_longest_time) <= 614400)))*/
1116 {
1117 // If event is triggered and being processing, just wait...
1118 while(((DRV_Reg32(IDC_CTRL_SCH_STATUS) >> (idc_port.event_longest_index * 2)) & 0x3) == 0x3);
1119 // Longest_offset_evt haven't been triggered, we need to stop evt before we set data&time stamps
1120 if(((DRV_Reg32(IDC_CTRL_SCH_STATUS) >> (idc_port.event_longest_index * 2)) & 0x3) == 0x2)
1121 {
1122 // Stop longest_offset_evt
1123 drv_idc_stop_event((0x1 << idc_port.event_longest_index));
1124
1125 //put event into event_buffer
1126 idc_port.event_pending_offset_table[idc_port.event_pending_cnt] = idc_port.event_longest_time;
1127 idc_port.event_pending_data_table[idc_port.event_pending_cnt] = idc_port.event_data_table[idc_port.event_longest_index];
1128 idc_port.event_pending_cnt++;
1129 //Log information of event in event buffer
1130 buf_displace_happen_flag = 1;
1131 //MD_TRC(IDC_DATA_IN_EVENT_BUFFER_DISPLACE_HAPPEN, idc_port.event_longest_time, (idc_port.event_data_table[idc_port.event_longest_index] & 0xFF), (idc_port.event_data_table[idc_port.event_longest_index] >> 8));
1132
1133
1134 // Set event data
1135 DRV_WriteReg32(IDC_CTRL_EVT_DATA(idc_port.event_longest_index), (event.data[0] + (event.data[1] << 8)));
1136 // Set time stamps para & trigger event
1137 DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_longest_index), event.offset + (1 << 31));
1138
1139 // Record event_offset & event_data in the table
1140 idc_port.event_offset_table[idc_port.event_longest_index] = event.offset;
1141 idc_port.event_data_table[idc_port.event_longest_index] = event.data[0] + (event.data[1] << 8);
1142
1143 //update idc_port.event_longest_time
1144 idc_port.event_longest_time = event.offset;
1145 for(i = 0; i<16; i++)
1146 {
1147 if(TIME_DIFF_WITHIN(idc_port.event_offset_table[i], idc_port.event_longest_time, 614400))
1148/* if((idc_port.event_offset_table[i] > idc_port.event_longest_time) ||
1149 ((idc_port.event_offset_table[i] < idc_port.event_longest_time)
1150 && ((1048575 - idc_port.event_longest_time + idc_port.event_offset_table[i]) <= 614400)))*/
1151 {
1152 idc_port.event_longest_time = idc_port.event_offset_table[i];
1153 idc_port.event_longest_index = i;
1154 }
1155 }
1156 }
1157 // Longest_offset_evt have been triggered, we reset the usage_bit_map & directly set data&time stamps
1158 else if(((DRV_Reg32(IDC_CTRL_SCH_STATUS) >> (idc_port.event_longest_index * 2)) & 0x3) == 0x0)
1159 {
1160 idc_port.event_cnt--;
1161 // Set event data
1162 DRV_WriteReg32(IDC_CTRL_EVT_DATA(idc_port.event_longest_index), (event.data[0] + (event.data[1] << 8)));
1163
1164 // Set time stamps para
1165 DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_longest_index), event.offset + (1 << 31));
1166
1167 // Record event_offset & event_data in the table
1168 idc_port.event_offset_table[idc_port.event_longest_index] = event.offset;
1169 idc_port.event_data_table[idc_port.event_longest_index] = event.data[0] + (event.data[1] << 8);
1170
1171 //update idc_port.event_longest_time
1172 idc_port.event_longest_time = event.offset;
1173 }
1174 }
1175
1176 //Incoming event is large than idc_port.event_longest_time
1177 else
1178 {
1179 //Whether incoming event's offset is over 1ms than idc_port.phy_time
1180 if(TIME_DIFF_WITHIN(event.offset, idc_port.phy_time, 61440))
1181/* if(((event.offset > idc_port.phy_time)
1182 && ((event.offset - idc_port.phy_time) <= 61440))
1183 || ((event.offset < idc_port.phy_time)
1184 && (((1048575 - idc_port.phy_time) + event.offset) <= 61440)))*/
1185 IDC_ASSERT(0); // Set over 16 events in 1ms
1186 // Put incoming event into event_buffer
1187 idc_port.event_pending_offset_table[idc_port.event_pending_cnt] = event.offset;
1188 idc_port.event_pending_data_table[idc_port.event_pending_cnt] = event.data[0] + (event.data[1] << 8);
1189 idc_port.event_pending_cnt++;
1190 //Log information of event in event_buffer
1191 put_in_buf_flag = 1;
1192 //MD_TRC(IDC_DATA_IN_EVENT_BUFFER, event.offset, event.data[0], event.data[1]);
1193 }
1194 }
1195 }
1196
1197 else //el1c put over 32 events to drv_idc
1198 IDC_ASSERT(0);
1199
1200 idc_ctrl_leave(IDC_CTRL_LOCK);
1201
1202 //********protect critical section*******
1203 after_poll_time_SCH = ust_get_current_time();
1204 kal_hrt_give_itc_lock(KAL_ITC_IDC);
1205
1206 if(sram_full_flag == 1)
1207 MD_TRC(IDC_SRAM_FULL_DATA_IN_EVENT_BUFFER,);
1208 if(buf_displace_happen_flag == 1)
1209 MD_TRC(IDC_DATA_IN_EVENT_BUFFER_DISPLACE_HAPPEN,idc_port.event_longest_time, (idc_port.event_data_table[idc_port.event_longest_index] & 0xFF), (idc_port.event_data_table[idc_port.event_longest_index] >> 8));
1210 if(put_in_buf_flag == 1)
1211 MD_TRC(IDC_DATA_IN_EVENT_BUFFER,event.offset, event.data[0], event.data[1]);
1212
1213 return;
1214}
1215
1216#elif defined(__MD95__)
1217kal_bool drv_idc_schedule_event_95(IDC_EVENT_T event, IDC_CTRL_DROP_CMD_T *drop_cmd)
1218{
1219 kal_uint32 i = 0, drop_cmd_flag = 0, mask = 0xFFFFFFFF;
1220 kal_uint32 w_data = 0, w_idx = 0, value = 0, tmp_sram_idx = 0;
1221
1222 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
1223 before_poll_time_SCH = ust_get_current_time();
1224 //********protect critical section*******
1225 idc_ctrl_enter(IDC_CTRL_LOCK);
1226 idc_port.schedule_state = IDC_RUN;
1227
1228 event.offset = event.offset & 0xFFFFF;
1229
1230 /* if(((event.offset > idc_port.phy_time)
1231 && ((event.offset - idc_port.phy_time) > 614400))
1232 || ((event.offset < idc_port.phy_time)
1233 && ((1048575 - idc_port.phy_time + event.offset) > 614400)))*/
1234
1235 if(TIME_DIFF_EXCEED(event.offset, idc_port.phy_time, 614400))
1236 {
1237 kal_hrt_give_itc_lock(KAL_ITC_IDC);
1238 MD_TRC(IDC_SCHEDULE_OVER_10MS_MSG,idc_port.phy_time, event.offset, event.data[0], event.data[1]);
1239 //IDC_ASSERT(0); // Time offset must not larger than 10ms
1240 idc_ctrl_leave(IDC_CTRL_LOCK);
1241 //********protect critical section*******
1242 return KAL_FALSE;
1243 }
1244
1245 if(idc_port.event_usage_bit_map != 0xFFFF) // SRAM isn't full
1246 {
1247 // Find empty event
1248 while (1)
1249 {
1250 if ((1 << idc_port.event_w_index) & idc_port.event_usage_bit_map)
1251 {
1252 idc_port.event_w_index++;
1253 if (idc_port.event_w_index == 16) idc_port.event_w_index = 0;
1254 }
1255 else
1256 {
1257 break;
1258 }
1259 }
1260
1261 // Find empty sram_w_idx
1262 sram_wrap = 0;
1263 while (1)
1264 {
1265 if (idc_port.sram_table_usage[idc_port.sram_w_index] == 1)
1266 {
1267 idc_port.sram_w_index++;
1268 if (idc_port.sram_w_index == 78)
1269 {
1270 if(sram_wrap == 1)
1271 {
1272 //if sram full, return drop_cmd
1273 idc_ctrl_leave(IDC_CTRL_LOCK);
1274 //********protect critical section*******
1275 after_poll_time_SCH = ust_get_current_time();
1276 kal_hrt_give_itc_lock(KAL_ITC_IDC);
1277
1278 MD_TRC(IDC_SCHEDULE_SRAM_FULL_MSG,);
1279 drv_idc_return_drop_cmd(event, drop_cmd);
1280
1281 return KAL_FALSE;
1282 }
1283
1284
1285 idc_port.sram_w_index = 0;
1286 sram_wrap = 1;
1287 }
1288 }
1289 else
1290 {
1291 break;
1292 }
1293 }
1294
1295 //If there no sequential sram space enough, return drop_cmd
1296 for(i = 0; i < event.num; i++)
1297 {
1298 tmp_sram_idx = idc_port.sram_w_index + i;
1299 //wrap case
1300 if(tmp_sram_idx > 75)
1301 {
1302 idc_port.sram_w_index = 0;
1303 tmp_sram_idx = 0;
1304 i = 0;
1305 }
1306 //DT_IDC_PRINTF("*** sram_idx : %d***", tmp_sram_idx);
1307 if(idc_port.sram_table_usage[tmp_sram_idx] == 1)
1308 {
1309 //return drop_cmd
1310 idc_ctrl_leave(IDC_CTRL_LOCK);
1311 //********protect critical section*******
1312 after_poll_time_SCH = ust_get_current_time();
1313 kal_hrt_give_itc_lock(KAL_ITC_IDC);
1314
1315 MD_TRC(IDC_SCHEDULE_NO_SEQUENTIAL_SRAM_MSG,);
1316 drv_idc_return_drop_cmd(event, drop_cmd);
1317
1318 return KAL_FALSE;
1319 }
1320
1321 }
1322
1323 // Set event data
1324 w_idx = idc_port.sram_w_index;
1325
1326 for(i = 0; i < event.num; i++)
1327 {
1328 w_data = w_data | (event.data[i] << (8 * (w_idx % 4)));
1329 mask &= ~(0xFF << (8 * (w_idx % 4)));
1330 w_idx++;
1331 if((w_idx % 4 == 0) || (i == event.num - 1))
1332 {
1333 if(w_idx % 4 == 0)
1334 value = DRV_Reg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1));
1335 else
1336 value = DRV_Reg32(IDC_CTRL_EVT_DATA(w_idx/4));
1337 value &= mask;
1338 value |= w_data;
1339 if(w_idx % 4 == 0)
1340 DRV_WriteReg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1), value);
1341 else
1342 DRV_WriteReg32(IDC_CTRL_EVT_DATA(w_idx/4), value);
1343 w_data = 0;
1344
1345 mask = 0xFFFFFFFF;
1346 }
1347 idc_port.sram_table_usage[idc_port.sram_w_index + i] = 1;
1348 }
1349
1350 // Set event memory position
1351 DRV_WriteReg32(IDC_CTRL_EVENT_MEM_POS(idc_port.event_w_index), (idc_port.sram_w_index << 8) + (idc_port.sram_w_index + event.num - 1));
1352
1353 // Set time stamps para & trigger event
1354 DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_w_index), event.offset + (1 << 31));
1355
1356 //DT_IDC_PRINTF("*** schedule event done \r\n***");
1357 // Record event_offset & event_data in the table
1358 idc_port.event_offset_table[idc_port.event_w_index] = event.offset;
1359 kal_mem_cpy(idc_port.event_data_table[idc_port.event_w_index], event.data, sizeof(event.data));
1360 idc_port.event_byte_num[idc_port.event_w_index] = event.num;
1361 idc_port.event_sram_sta_idx[idc_port.event_w_index] = idc_port.sram_w_index;
1362 // Record the number and usage bitmap for the scheduler
1363 idc_port.event_usage_bit_map |= (1 << idc_port.event_w_index);
1364 // Add event_cnt
1365 idc_port.event_cnt++;
1366
1367 // Add sram_w_idx
1368 idc_port.sram_w_index += event.num;
1369
1370 idc_port.event_w_index++;
1371 if (idc_port.event_w_index == 16) idc_port.event_w_index = 0;
1372 }
1373 else //SRAM is full, we need to return drop_cmd
1374 {
1375 //return drop_cmd
1376 drop_cmd_flag = 1;
1377 }
1378
1379 idc_ctrl_leave(IDC_CTRL_LOCK);
1380
1381 //********protect critical section*******
1382 after_poll_time_SCH = ust_get_current_time();
1383 kal_hrt_give_itc_lock(KAL_ITC_IDC);
1384
1385 // Log TX information
1386 //MD_TRC(IDC_SCHEDULE_2_MSG, event.offset, event.data[0], event.data[1]);
1387
1388 if(drop_cmd_flag == 1)
1389 {
1390 MD_TRC(IDC_SCHEDULE_EVENT_FULL_MSG,);
1391 drv_idc_return_drop_cmd(event, drop_cmd);
1392 return KAL_FALSE;
1393 }
1394
1395 return KAL_TRUE;
1396}
1397#elif defined(__MD97__) || defined(__MD97P__)
1398kal_bool drv_idc_schedule_event_97(IDC_EVENT_T event, IDC_CTRL_DROP_CMD_T *drop_cmd)
1399{
1400 kal_uint32 i = 0, drop_cmd_flag = 0, mask = 0xFFFFFFFF;
1401 kal_uint32 w_data = 0, w_idx = 0, value = 0, tmp_sram_idx = 0, end_sram_idx = 0;
1402
1403 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
1404 before_poll_time_SCH = ust_get_current_time();
1405 //********protect critical section*******
1406 //idc_ctrl_enter(IDC_CTRL_LOCK); //remove because LTE/NR RAT flow
1407 idc_port.schedule_state = IDC_RUN;
1408
1409 event.offset = event.offset & 0x3FFFFFFF;
1410
1411 if(TIME_DIFF_EXCEED(event.offset, idc_port.frc_time, 10000))
1412 {
1413 kal_hrt_give_itc_lock(KAL_ITC_IDC);
1414 MD_TRC(IDC_SCHEDULE_OVER_10MS_MSG, idc_port.frc_time, event.offset, event.data[0], event.data[1]);
1415 //idc_ctrl_leave(IDC_CTRL_LOCK);
1416 //********protect critical section*******
1417 return KAL_FALSE;
1418 }
1419
1420 if(idc_port.event_usage_bit_map != 0xFFFFFFFF) // SRAM isn't full
1421 {
1422 // Find empty event
1423 while (1)
1424 {
1425 if ((1 << idc_port.event_w_index) & idc_port.event_usage_bit_map)
1426 {
1427 idc_port.event_w_index++;
1428 if (idc_port.event_w_index == IDC_MAX_EVENT_NUM) idc_port.event_w_index = 1;
1429 }
1430 else
1431 {
1432 break;
1433 }
1434 }
1435
1436 // Find empty sram_w_idx
1437 sram_wrap = 0;
1438 while (1)
1439 {
1440 if (idc_port.sram_table_usage[idc_port.sram_w_index] == 1)
1441 {
1442 idc_port.sram_w_index++;
1443 if (idc_port.sram_w_index == IDC_MAX_SRAM_SIZE)
1444 {
1445 if(sram_wrap == 1)
1446 {
1447 //if sram full, return drop_cmd
1448 //idc_ctrl_leave(IDC_CTRL_LOCK);
1449 //********protect critical section*******
1450 after_poll_time_SCH = ust_get_current_time();
1451 kal_hrt_give_itc_lock(KAL_ITC_IDC);
1452
1453 MD_TRC(IDC_SCHEDULE_SRAM_FULL_MSG);
1454 drv_idc_return_drop_cmd(event, drop_cmd);
1455
1456 return KAL_FALSE;
1457 }
1458
1459
1460 idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
1461 sram_wrap = 1;
1462 }
1463 }
1464 else
1465 {
1466 break;
1467 }
1468 }
1469
1470 //If there no sequential sram space enough, return drop_cmd
1471 for(i = 0; i < event.num; i++)
1472 {
1473 tmp_sram_idx = idc_port.sram_w_index + i;
1474 //wrap case
1475 if(tmp_sram_idx >= IDC_MAX_SRAM_SIZE)
1476 {
1477 idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
1478 tmp_sram_idx = IDC_SRAM_WRAP_IDX;
1479 }
1480 //DT_IDC_PRINTF("*** sram_idx : %d***", tmp_sram_idx);
1481 if(idc_port.sram_table_usage[tmp_sram_idx] == 1)
1482 {
1483 //return drop_cmd
1484 //idc_ctrl_leave(IDC_CTRL_LOCK);
1485 //********protect critical section*******
1486 after_poll_time_SCH = ust_get_current_time();
1487 kal_hrt_give_itc_lock(KAL_ITC_IDC);
1488
1489 MD_TRC(IDC_SCHEDULE_NO_SEQUENTIAL_SRAM_MSG);
1490 drv_idc_return_drop_cmd(event, drop_cmd);
1491
1492 return KAL_FALSE;
1493 }
1494
1495 }
1496
1497 end_sram_idx = idc_port.sram_w_index + event.num - 1;
1498 if(end_sram_idx >= IDC_MAX_SRAM_SIZE)
1499 end_sram_idx = end_sram_idx - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX;
1500
1501 // Set event data
1502 w_idx = idc_port.sram_w_index;
1503
1504 for(i = 0; i < event.num; i++)
1505 {
1506 w_data = w_data | (event.data[i] << (8 * (w_idx % 4)));
1507 mask &= ~(0xFF << (8 * (w_idx % 4)));
1508 w_idx++;
1509 if((w_idx % 4 == 0) || (i == event.num - 1))
1510 {
1511 if(w_idx % 4 == 0)
1512 value = DRV_Reg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1));
1513 else
1514 value = DRV_Reg32(IDC_CTRL_EVT_DATA(w_idx/4));
1515 value &= mask;
1516 value |= w_data;
1517 if(w_idx % 4 == 0)
1518 DRV_WriteReg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1), value);
1519 else
1520 DRV_WriteReg32(IDC_CTRL_EVT_DATA(w_idx/4), value);
1521 w_data = 0;
1522
1523 mask = 0xFFFFFFFF;
1524 }
1525
1526 if(w_idx == IDC_MAX_SRAM_SIZE)
1527 w_idx = IDC_SRAM_WRAP_IDX;
1528
1529 if((idc_port.sram_w_index + i) >= IDC_MAX_SRAM_SIZE)
1530 idc_port.sram_table_usage[idc_port.sram_w_index + i - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX] = 1;
1531 else
1532 idc_port.sram_table_usage[idc_port.sram_w_index + i] = 1;
1533 }
1534
1535 // Set event memory position
1536 DRV_WriteReg32(IDC_CTRL_EVENT_MEM_POS(idc_port.event_w_index), (idc_port.sram_w_index << 8) + end_sram_idx);
1537
1538 // Set time stamps para & trigger event
1539 DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_w_index), event.offset + (1 << 31));
1540
1541 //DT_IDC_PRINTF("*** schedule event done \r\n***");
1542 // Record event_offset & event_data in the table
1543 idc_port.event_offset_table[idc_port.event_w_index] = event.offset;
1544 kal_mem_cpy(idc_port.event_data_table[idc_port.event_w_index], event.data, sizeof(event.data));
1545 idc_port.event_byte_num[idc_port.event_w_index] = event.num;
1546 idc_port.event_sram_sta_idx[idc_port.event_w_index] = idc_port.sram_w_index;
1547 // Record the number and usage bitmap for the scheduler
1548 idc_port.event_usage_bit_map |= (1 << idc_port.event_w_index);
1549 // Add event_cnt
1550 idc_port.event_cnt++;
1551
1552 // Add sram_w_idx
1553 idc_port.sram_w_index += event.num;
1554 if (idc_port.sram_w_index == IDC_MAX_SRAM_SIZE)
1555 idc_port.sram_w_index = idc_port.sram_w_index - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX;
1556
1557 idc_port.event_w_index++;
1558 if (idc_port.event_w_index == IDC_MAX_EVENT_NUM)
1559 idc_port.event_w_index = 1;
1560 }
1561 else //SRAM is full, we need to return drop_cmd
1562 {
1563 //return drop_cmd
1564 drop_cmd_flag = 1;
1565 }
1566
1567 //idc_ctrl_leave(IDC_CTRL_LOCK);
1568
1569 //********protect critical section*******
1570 after_poll_time_SCH = ust_get_current_time();
1571 kal_hrt_give_itc_lock(KAL_ITC_IDC);
1572
1573 // Log TX information
1574 MD_TRC(IDC_SCHEDULE_2_MSG, event.offset, event.data[0], event.data[1]);
1575
1576 if(drop_cmd_flag == 1)
1577 {
1578 MD_TRC(IDC_SCHEDULE_EVENT_FULL_MSG);
1579 drv_idc_return_drop_cmd(event, drop_cmd);
1580 return KAL_FALSE;
1581 }
1582
1583 return KAL_TRUE;
1584}
1585#endif
1586
1587void drv_idc_return_drop_cmd(IDC_EVENT_T event, IDC_CTRL_DROP_CMD_T *drop_cmd)
1588{
1589
1590 drop_cmd->cmd_phytime = event.offset;
1591 drop_cmd->cmd_type = (event.data[0] & 0x1E) >> 1;
1592 if(drop_cmd->cmd_type == 0xF)
1593 drop_cmd->cmd_sub_type = (event.data[1] & 0xFC) >> 2;
1594
1595
1596
1597 //DT_IDC_PRINTF("drop cmd happen!!!!!!!!!!!!\n\r");
1598 //Log schedule fail info.
1599 MD_TRC(IDC_SCHEDULE_FAIL_MSG,drop_cmd->cmd_phytime, drop_cmd->cmd_type, drop_cmd->cmd_sub_type);
1600
1601 return;
1602}
1603
1604kal_bool drv_idc_schedule_gps_blank_event(kal_uint8 rat_status, kal_bool gps_mode, kal_uint32 frc_time)
1605{
1606#if !defined(CHIP10992)
1607
1608 kal_uint8 gps_bm = 0;
1609
1610 MD_TRC(IDC_GPS_BLANK_MSG, rat_status, gps_mode, frc_time);
1611 //********protect critical section*******
1612 _idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_SINGLE_GPS);
1613
1614 MD_TRC(IDC_GPS_BLANK_DETAIL_MSG, GPS_LTE_NR_ALL_BM);
1615
1616 frc_time= frc_time & 0x3FFFFFFF;
1617
1618 //LTE
1619 if(rat_status == IDC_RAT_LTE){
1620 if(gps_mode == KAL_TRUE){//LTE using bit0
1621 gps_bm = GPS_LTE_NR_ALL_BM|(0x1<<0);
1622 }
1623 else {
1624 gps_bm = GPS_LTE_NR_ALL_BM& 0xFFFFFFFE;
1625 }
1626 }
1627 else if(rat_status == IDC_RAT_NR){//NR using bit1
1628 if(gps_mode == KAL_TRUE){
1629 gps_bm = GPS_LTE_NR_ALL_BM|(0x1<<1);
1630 }
1631 else {
1632 gps_bm = GPS_LTE_NR_ALL_BM& 0xFFFFFFFD;
1633 }
1634 }
1635 else{
1636 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_SINGLE_GPS);
1637 //gps l1 l5 fail
1638 MD_TRC(IDC_GPS_BLANK_FAIL_MSG, __LINE__,0,0);
1639 return KAL_FALSE;
1640 }
1641
1642 if(TIME_DIFF_WITHIN(frc_time, ust_get_current_time(), 50)){//drop the event frc < current frc+50us
1643 MD_TRC(IDC_GPS_BLANK_DROP0_MSG, __LINE__,frc_time,ust_get_current_time());//drop this event
1644 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_SINGLE_GPS);
1645 return KAL_FALSE;
1646 }
1647
1648 if(TIME_DIFF_EXCEED(frc_time, ust_get_current_time(), 10000)){//drop the event frc > current frc+10ms
1649 MD_TRC(IDC_GPS_BLANK_DROP1_MSG, __LINE__,frc_time,ust_get_current_time());//drop this event
1650 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_SINGLE_GPS);
1651 return KAL_FALSE;
1652 }
1653
1654 if(gps_bm != GPS_LTE_NR_ALL_BM){
1655 if((gps_bm & 0x3) ==0x0){
1656 //gps_off
1657 if((DRV_Reg32(IDC_CTRL_GPS_EVENT_STATUS) & (0x1 << 1))){
1658 //event busy
1659 MD_TRC(IDC_GPS_BLANK_FAIL_MSG, __LINE__,(DRV_Reg32(IDC_CTRL_GPS_EVENT_STATUS) ),ust_get_current_time());
1660 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_SINGLE_GPS);
1661 return KAL_FALSE;
1662 }
1663 else{
1664 DRV_WriteReg32(IDC_CTRL_GPS_EVENT_OFF, frc_time + (0x1 << 31));
1665 MD_TRC(IDC_GPS_BLANK_TRIG_MSG, ust_get_current_time(), 0x0);
1666 }
1667 }else{
1668 if(GPS_LTE_NR_ALL_BM == 0x0){
1669 //gps_on
1670 if((DRV_Reg32(IDC_CTRL_GPS_EVENT_STATUS) & (0x1 << 2))){
1671 //event busy
1672 MD_TRC(IDC_GPS_BLANK_FAIL_MSG, __LINE__,(DRV_Reg32(IDC_CTRL_GPS_EVENT_STATUS) ),ust_get_current_time());
1673 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_SINGLE_GPS);
1674 return KAL_FALSE;
1675 }
1676 else{
1677 DRV_WriteReg32(IDC_CTRL_GPS_EVENT_ON, frc_time + (0x1 << 31));
1678 MD_TRC(IDC_GPS_BLANK_TRIG_MSG, ust_get_current_time(), 0x1);
1679 }
1680 }
1681 }
1682 }
1683
1684 GPS_LTE_NR_ALL_BM = gps_bm;
1685
1686 MM_Sync();
1687 //********protect critical section********
1688 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_SINGLE_GPS);
1689
1690 return KAL_TRUE;
1691#else
1692 return KAL_FALSE;
1693#endif
1694}
1695
1696kal_bool drv_idc_schedule_gps_l1_l5_blank_event(kal_uint8 rat_status, kal_uint8 raw_data, kal_uint32 frc_time)
1697{
1698#if defined(CHIP10992)||defined(MT6833)||defined(MT6877)
1699
1700 kal_uint8 gps_l1_l5_bm = 0;
1701
1702 MD_TRC(IDC_GPS_L1_L5_MSG, rat_status, raw_data, frc_time);
1703 //********protect critical section*******
1704 _idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_L1_L5_GPS);
1705
1706 MD_TRC(IDC_GPS_L1_L5_DETAIL_MSG, GPS_L1_L5_ALL_BM, GPS_L1_L5_LTE_BM, GPS_L1_L5_NR_BM);
1707
1708 frc_time= frc_time & 0x3FFFFFFF;
1709
1710 //LTE
1711 if(rat_status == IDC_RAT_LTE){
1712 gps_l1_l5_bm = raw_data | GPS_L1_L5_NR_BM;
1713 }
1714 else if(rat_status == IDC_RAT_NR){
1715 gps_l1_l5_bm = raw_data | GPS_L1_L5_LTE_BM;
1716 }
1717 else{
1718 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_L1_L5_GPS);
1719 //gps l1 l5 fail
1720 MD_TRC(IDC_GPS_L1_L5_FAIL_MSG, __LINE__);
1721 return KAL_FALSE;
1722 }
1723
1724 if(gps_l1_l5_bm != GPS_L1_L5_ALL_BM){
1725 //L1 status check
1726 if((gps_l1_l5_bm & (1 << 0)) != (GPS_L1_L5_ALL_BM & (1 << 0))){
1727 if((gps_l1_l5_bm & (1 << 0)) == 0x0){
1728 //gps_l1_off
1729 if((DRV_Reg32(IDC_CTRL_GPS_EVENT_STATUS) & (1 << 1))){
1730 //event busy
1731 MD_TRC(IDC_GPS_L1_L5_FAIL_MSG, __LINE__);
1732 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_L1_L5_GPS);
1733 return KAL_FALSE;
1734 }
1735 else{
1736 DRV_WriteReg32(IDC_CTRL_GPS_EVENT_OFF, frc_time + (1 << 31));
1737 MD_TRC(IDC_GPS_L1_L5_TRIG_MSG, 0x0, 0x0);
1738 }
1739 }
1740 else if((gps_l1_l5_bm & (1 << 0)) == 0x1){
1741 //gps_l1_on
1742 if((DRV_Reg32(IDC_CTRL_GPS_EVENT_STATUS) & (1 << 2))){
1743 //event busy
1744 MD_TRC(IDC_GPS_L1_L5_FAIL_MSG, __LINE__);
1745 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_L1_L5_GPS);
1746 return KAL_FALSE;
1747 }
1748 else{
1749 DRV_WriteReg32(IDC_CTRL_GPS_EVENT_ON, frc_time + (1 << 31));
1750 MD_TRC(IDC_GPS_L1_L5_TRIG_MSG, 0x0, 0x1);
1751 }
1752 }
1753 }
1754
1755 //L5 status check
1756 if((gps_l1_l5_bm & (1 << 1)) != (GPS_L1_L5_ALL_BM & (1 << 1))){
1757 if((gps_l1_l5_bm & (1 << 1)) == 0x0){
1758 //gps_l5_off
1759 if((DRV_Reg32(IDC_CTRL_GPS_EVENT_L5_STATUS) & (1 << 1))){
1760 //event busy
1761 MD_TRC(IDC_GPS_L1_L5_FAIL_MSG, __LINE__);
1762 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_L1_L5_GPS);
1763 return KAL_FALSE;
1764 }
1765 else{
1766 DRV_WriteReg32(IDC_CTRL_GPS_EVENT_L5_OFF, frc_time + (1 << 31));
1767 MD_TRC(IDC_GPS_L1_L5_TRIG_MSG, 0x1, 0x0);
1768 }
1769 }
1770 else if((gps_l1_l5_bm & (1 << 1)) == 0x1){
1771 //gps_l5_on
1772 if((DRV_Reg32(IDC_CTRL_GPS_EVENT_L5_STATUS) & (1 << 2))){
1773 //event busy
1774 MD_TRC(IDC_GPS_L1_L5_FAIL_MSG, __LINE__);
1775 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_L1_L5_GPS);
1776 return KAL_FALSE;
1777 }
1778 else{
1779 DRV_WriteReg32(IDC_CTRL_GPS_EVENT_L5_ON, frc_time + (1 << 31));
1780 MD_TRC(IDC_GPS_L1_L5_TRIG_MSG, 0x1, 0x1);
1781 }
1782 }
1783
1784 }
1785
1786 }
1787
1788 //update bitmap
1789 if(rat_status == IDC_RAT_LTE){
1790 GPS_L1_L5_LTE_BM = raw_data;
1791 }
1792 else if(rat_status == IDC_RAT_NR){
1793 GPS_L1_L5_NR_BM = raw_data;
1794 }
1795 GPS_L1_L5_ALL_BM = gps_l1_l5_bm;
1796
1797 MM_Sync();
1798 //********protect critical section********
1799 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_L1_L5_GPS);
1800
1801 return KAL_TRUE;
1802#else
1803 return KAL_FALSE;
1804#endif
1805}
1806
1807#if defined(__MD93__)
1808void drv_idc_schedule_update(kal_uint32 time)
1809{
1810 kal_uint32 i = 0, j = 0;
1811 kal_uint32 evt_pend_cnt = 0;
1812 kal_uint32 bitmap32 = 0, scheduler_status = 0, expired_evt_status = 0, expired_evt_flag = 0;
1813#if !defined(__MAUI_BASIC__)
1814 kal_uint32 tx_cnt = 0;
1815#endif
1816
1817 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
1818 before_poll_time_U = ust_get_current_time();
1819 //********protect critical section*******
1820
1821 // Get phy_time from EL1C & fetch the first 20bits
1822 idc_port.phy_time = time & 0x000FFFFF;
1823
1824 if(!(_idc_atomic_try_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_UPDATE)))
1825 {
1826 after_poll_time_U = ust_get_current_time();
1827 kal_hrt_give_itc_lock(KAL_ITC_IDC);
1828 return;
1829 }
1830
1831 //directly unlock atomic&return and do nothing if schedule_update after slp_ntf
1832 if(idc_port.main_state == IDC_IN_SLEEP)
1833 {
1834 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_UPDATE);
1835 after_poll_time_U = ust_get_current_time();
1836 kal_hrt_give_itc_lock(KAL_ITC_IDC);
1837 return;
1838 }
1839
1840 idc_ctrl_enter(IDC_CTRL_LOCK);
1841 idc_in_workaround = 0;
1842 idc_in_eventpending = 0;
1843
1844 // Workaround for CONSYS send too many msg
1845#if !defined(ATEST_DRV_ENABLE)
1846
1847 if (idc_rx_suspend == KAL_TRUE)
1848 {
1849 idc_in_workaround = 1;
1850 if (idc_port.main_state == IDC_SUSPEND)
1851 {
1852 if (++idc_rx_suspend_timer >= 10)
1853 {
1854 idc_rx_suspend = KAL_FALSE;
1855 // Clear RX FIFO and enalbe RX interrupt
1856 IRQMask(MD_IRQID_IDC_UART_IRQ);
1857 DRV_WriteReg32_NPW(IDC_UART_FCR, IDC_UART_FCR_RXTRIG | IDC_UART_FCR_CLRR | IDC_UART_FCR_FIFOEN);
1858 DRV_WriteReg32(IDC_UART_IER, IDC_UART_IER_ERBFI);
1859 // Change idc_port state
1860 idc_port.main_state = IDC_IN_USE;
1861 IRQUnmask(MD_IRQID_IDC_UART_IRQ);
1862 }
1863 }
1864 else
1865 {
1866 idc_rx_suspend = KAL_FALSE;
1867 }
1868 }
1869#endif
1870
1871 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_UPDATE);
1872
1873 // Stop uncompleted events
1874 if(idc_port.event_usage_bit_map != 0x0)
1875 {
1876 for(i = 0; i<16; i++)
1877 {
1878 if ((1 << i) & idc_port.event_usage_bit_map)
1879 {
1880 if(TIME_DIFF_WITHIN(idc_port.phy_time, idc_port.event_offset_table[i], 614400))
1881 /*if (((idc_port.phy_time > idc_port.event_offset_table[i])
1882 && ((idc_port.phy_time - idc_port.event_offset_table[i]) <= 614400))
1883 || (idc_port.phy_time < idc_port.event_offset_table[i])
1884 && ((idc_port.event_offset_table[i] - idc_port.phy_time) > 614400))*/
1885 {
1886 /*wrap_case = 0;
1887 if ((idc_port.phy_time < idc_port.event_offset_table[i])&&((idc_port.event_offset_table[i] - idc_port.phy_time) <= 614400))
1888 wrap_case = 1;
1889 MD_TRC(IDC_STOP_AND_FLUSH_EVENT_MSG,idc_port.event_data_table[i], idc_port.event_offset_table[i], idc_port.phy_time, current_phy_time, wrap_case);*/
1890 idc_port.event_cnt--;
1891 idc_port.event_usage_bit_map &= ~(0x1 << i);
1892 idc_port.event_data_table[i] = 0;
1893 idc_port.event_offset_table[i] = 0;
1894 }
1895 }
1896 }
1897
1898 for(j = 0; j < 16; j++)
1899 {
1900 if((~(idc_port.event_usage_bit_map) >> j) & 0x1)
1901 bitmap32 = (bitmap32 | (0x3 << (j*2)));
1902 }
1903 scheduler_status = DRV_Reg32(IDC_CTRL_SCH_STATUS);
1904 expired_evt_status = bitmap32 & scheduler_status;
1905 if(expired_evt_status)
1906 {
1907 #ifdef ATEST_DRV_ENABLE
1908 DT_IDC_PRINTF("drv_idc: [Warning] Some events are expired in scheduler & stopped. Status = %x \n\r", expired_evt_status);
1909 while(1);
1910 #endif
1911 expired_evt_flag = 1;
1912 }
1913 drv_idc_stop_event(~(idc_port.event_usage_bit_map));
1914
1915 }
1916
1917 if(idc_port.event_usage_bit_map == 0)
1918 idc_port.schedule_state = IDC_PLAN;
1919
1920 // Check if there are pending events in event buffer
1921 if(idc_port.event_pending_cnt > 0)
1922 {
1923 idc_in_eventpending = 1;
1924 j = 0;
1925 evt_pend_cnt = idc_port.event_pending_cnt;
1926 idc_port.schedule_state = IDC_RUN;
1927 for(i = 0; i< evt_pend_cnt; i++)
1928 {
1929 // Start the pending events
1930 if(idc_port.event_usage_bit_map != 0xFFFF) // SRAM isn't full
1931 {
1932 // Find empty event
1933 while (1)
1934 {
1935 if ((1 << idc_port.event_w_index) & idc_port.event_usage_bit_map)
1936 {
1937 idc_port.event_w_index++;
1938 if (idc_port.event_w_index == 16) idc_port.event_w_index = 0;
1939 }
1940 else
1941 {
1942 break;
1943 }
1944 }
1945
1946 // Set event data
1947 DRV_WriteReg32(IDC_CTRL_EVT_DATA(idc_port.event_w_index), idc_port.event_pending_data_table[i]);
1948
1949 // Set time stamps para
1950 DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_w_index), idc_port.event_pending_offset_table[i] + (1 << 31));
1951
1952 // Record event_offset & event_data in the table
1953 idc_port.event_offset_table[idc_port.event_w_index] = idc_port.event_pending_offset_table[i];
1954 idc_port.event_data_table[idc_port.event_w_index] = idc_port.event_pending_data_table[i];
1955
1956 // Update usage bitmap for the scheduler
1957 idc_port.event_usage_bit_map |= (1 << idc_port.event_w_index);
1958
1959 // Update idc_port.event_longest_time
1960 if(TIME_DIFF_WITHIN(idc_port.event_offset_table[idc_port.event_w_index], idc_port.event_longest_time, 614400))
1961 /*if(((idc_port.event_offset_table[idc_port.event_w_index] > idc_port.event_longest_time)
1962 && ((idc_port.event_offset_table[idc_port.event_w_index] - idc_port.event_longest_time) <= 614400))
1963 || ((idc_port.event_offset_table[idc_port.event_w_index] < idc_port.event_longest_time)
1964 && (idc_port.event_longest_time - idc_port.event_offset_table[idc_port.event_w_index] > 614400)))*/
1965 {
1966 idc_port.event_longest_index = idc_port.event_w_index;
1967 idc_port.event_longest_time = idc_port.event_offset_table[idc_port.event_w_index];
1968 }
1969 idc_port.event_w_index++;
1970 if (idc_port.event_w_index == 16) idc_port.event_w_index = 0;
1971
1972 idc_port.event_pending_cnt--;
1973 }
1974 // SRAM is full, check pending event offset sanity check
1975 else
1976 {
1977 if(TIME_DIFF_WITHIN(idc_port.event_pending_offset_table[i], idc_port.phy_time, 61440))
1978 /*if(((idc_port.event_pending_offset_table[i] > idc_port.phy_time)
1979 && (idc_port.event_pending_offset_table[i] - idc_port.phy_time <= 61440))
1980 || ((idc_port.event_pending_offset_table[i] < idc_port.phy_time)
1981 && (((0xFFFFF - idc_port.phy_time) + idc_port.event_pending_offset_table[i]) <= 61440)))*/
1982 IDC_ASSERT(0); // Set over 16 events in 1ms
1983 //re-arrange event buffer
1984 idc_port.event_pending_data_table[j] = idc_port.event_pending_data_table[i];
1985 idc_port.event_pending_offset_table[j++] = idc_port.event_pending_offset_table[i];
1986
1987 }
1988
1989 }
1990 }
1991
1992 //idc_ctrl_leave(IDC_CTRL_LOCK);
1993 //********protect critical section*******
1994 after_poll_time_U = ust_get_current_time();
1995 kal_hrt_give_itc_lock(KAL_ITC_IDC);
1996
1997 #if !defined(__MAUI_BASIC__)
1998 //get tx count
1999 tx_cnt = DRV_Reg32(IDC_CTRL_DATA_CNT) & 0xFFFF;
2000
2001 //print tx_count log
2002 if(idc_port.schedule_state == IDC_RUN)
2003 MD_TRC(IDC_TX_COUNT_MSG,tx_cnt);
2004 #endif
2005
2006 if((idc_in_workaround == 1) && (idc_rx_suspend == KAL_FALSE))
2007 MD_TRC(IDC_RX_RESUME_MSG,);
2008 if(expired_evt_flag == 1)
2009 MD_TRC(IDC_EVENTS_STILL_BUSY_MSG,expired_evt_status, scheduler_status);
2010
2011 return;
2012}
2013
2014#elif defined(__MD95__)
2015void drv_idc_schedule_update_95(kal_uint32 time)
2016{
2017 kal_uint32 i = 0, j = 0;
2018 kal_uint32 bitmap32 = 0, scheduler_status = 0, expired_evt_status = 0, expired_evt_flag = 0;
2019
2020#if !defined(__MAUI_BASIC__)
2021 kal_uint32 tx_cnt = 0;
2022#endif
2023
2024 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
2025 before_poll_time_U = ust_get_current_time();
2026 //********protect critical section*******
2027
2028 // Get phy_time from EL1C & fetch the first 20bits
2029 idc_port.phy_time = time & 0x000FFFFF;
2030
2031 // Log PHY_TIME information
2032 //MD_TRC(IDC_SCHEDULE_UPDATE_MSG, idc_port.phy_time);
2033
2034 if(!(_idc_atomic_try_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_UPDATE)))
2035 {
2036 after_poll_time_U = ust_get_current_time();
2037 kal_hrt_give_itc_lock(KAL_ITC_IDC);
2038 return;
2039 }
2040
2041 //directly unlock atomic&return and do nothing if schedule_update after slp_ntf
2042 if(idc_port.main_state == IDC_IN_SLEEP)
2043 {
2044 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_UPDATE);
2045 after_poll_time_U = ust_get_current_time();
2046 kal_hrt_give_itc_lock(KAL_ITC_IDC);
2047 return;
2048 }
2049
2050 idc_ctrl_enter(IDC_CTRL_LOCK);
2051 idc_in_workaround = 0;
2052
2053 // Workaround for CONSYS send too many msg
2054#if !defined(ATEST_DRV_ENABLE)
2055 if (idc_rx_suspend == KAL_TRUE)
2056 {
2057 if (idc_port.main_state == IDC_SUSPEND)
2058 {
2059 if (++idc_rx_suspend_timer >= 10)
2060 {
2061 idc_rx_suspend = KAL_FALSE;
2062 // Clear RX FIFO and enalbe RX interrupt
2063 IRQMask(MD_IRQID_IDC_UART_IRQ);
2064 DRV_WriteReg32_NPW(IDC_UART_FCR, IDC_UART_FCR_RXTRIG | IDC_UART_FCR_CLRR | IDC_UART_FCR_FIFOEN);
2065 DRV_WriteReg32(IDC_UART_IER, IDC_UART_IER_ERBFI);
2066 // Change idc_port state
2067 idc_port.main_state = IDC_IN_USE;
2068 IRQUnmask(MD_IRQID_IDC_UART_IRQ);
2069 }
2070 }
2071 else
2072 {
2073 idc_rx_suspend = KAL_FALSE;
2074 }
2075 }
2076#endif
2077
2078 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_UPDATE);
2079
2080 // Stop uncompleted events
2081 if(idc_port.event_usage_bit_map != 0x0)
2082 {
2083 for(i = 0; i < 16; i++)
2084 {
2085 if ((1 << i) & idc_port.event_usage_bit_map)
2086 {
2087 if(TIME_DIFF_WITHIN(idc_port.phy_time, idc_port.event_offset_table[i], 614400))
2088/* if (((idc_port.phy_time > idc_port.event_offset_table[i])
2089 && ((idc_port.phy_time - idc_port.event_offset_table[i]) <= 614400))
2090 || (idc_port.phy_time < idc_port.event_offset_table[i])
2091 && ((idc_port.event_offset_table[i] - idc_port.phy_time) > 614400))*/
2092 {
2093 //clear all event state
2094 idc_port.event_cnt--;
2095 idc_port.event_usage_bit_map &= ~(0x1 << i);
2096 idc_port.event_offset_table[i] = 0;
2097 for(j = 0; j < 9; j++)
2098 {
2099 idc_port.event_data_table[i][j] = 0;
2100 }
2101 for(j = 0; j < idc_port.event_byte_num[i]; j++)
2102 {
2103 idc_port.sram_table_usage[idc_port.event_sram_sta_idx[i] + j] = 0;
2104 }
2105 }
2106 }
2107 }
2108
2109 for(j = 0; j < 16; j++)
2110 {
2111 if((~(idc_port.event_usage_bit_map) >> j) & 0x1)
2112 bitmap32 = (bitmap32 | (0x3 << (j*2)));
2113 }
2114 scheduler_status = DRV_Reg32(IDC_CTRL_SCH_STATUS);
2115 expired_evt_status = bitmap32 & scheduler_status;
2116 if(expired_evt_status)
2117 {
2118 #ifdef ATEST_DRV_ENABLE
2119 DT_IDC_PRINTF("drv_idc: [Warning] Some events are expired in scheduler & stopped. Status = %x \n\r", expired_evt_status);
2120 while(1);
2121 #endif
2122 expired_evt_flag = 1;
2123 //MD_TRC(IDC_EVENTS_STILL_BUSY_MSG, expired_evt_status);
2124 }
2125 drv_idc_stop_event(~(idc_port.event_usage_bit_map));
2126
2127 }
2128
2129 if(idc_port.event_usage_bit_map == 0)
2130 idc_port.schedule_state = IDC_PLAN;
2131
2132 //idc_ctrl_leave(IDC_CTRL_LOCK);
2133 //********protect critical section*******
2134 after_poll_time_U = ust_get_current_time();
2135 kal_hrt_give_itc_lock(KAL_ITC_IDC);
2136
2137#if !defined(__MAUI_BASIC__)
2138 //get tx count
2139 tx_cnt = DRV_Reg32(IDC_CTRL_DATA_CNT) & 0xFFFF;
2140
2141 //print tx_count log
2142 if(idc_port.schedule_state == IDC_RUN)
2143 MD_TRC(IDC_TX_COUNT_MSG,tx_cnt);
2144#endif
2145
2146 if((idc_in_workaround == 1) && (idc_rx_suspend == KAL_FALSE))
2147 MD_TRC(IDC_RX_RESUME_MSG,);
2148 if(expired_evt_flag == 1)
2149 MD_TRC(IDC_EVENTS_STILL_BUSY_MSG,expired_evt_status, scheduler_status);
2150
2151 return;
2152
2153}
2154#elif defined(__MD97__) || defined(__MD97P__)
2155void drv_idc_schedule_update_97(kal_uint32 time)
2156{
2157 kal_uint32 i = 0, j = 0;
2158 kal_uint32 bitmap32 = 0, bitmap32_2 = 0, scheduler_status = 0, scheduler_status_2 = 0, expired_evt_status = 0, expired_evt_status_2 = 0, expired_evt_flag = 0;
2159
2160#if !defined(__MAUI_BASIC__)
2161 kal_uint32 tx_cnt = 0;
2162#endif
2163
2164 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
2165 before_poll_time_U = ust_get_current_time();
2166 //********protect critical section*******
2167
2168 // Get FRC from EL1C & fetch the first 30bits
2169 idc_port.frc_time = time & 0x3FFFFFFF;
2170
2171 // Log FRC_TIME information
2172 MD_TRC(IDC_SCHEDULE_UPDATE_MSG, idc_port.frc_time);
2173
2174 //directly unlock atomic&return and do nothing if schedule_update after slp_ntf
2175 if(idc_port.main_state == IDC_IN_SLEEP)
2176 {
2177 after_poll_time_U = ust_get_current_time();
2178 kal_hrt_give_itc_lock(KAL_ITC_IDC);
2179 return;
2180 }
2181
2182 //idc_ctrl_enter(IDC_CTRL_LOCK);//remove because LTE/NR RAT flow
2183
2184 // Stop uncompleted events
2185 if(idc_port.event_usage_bit_map != 0x1)
2186 {
2187 for(i = 1; i < IDC_MAX_EVENT_NUM; i++)
2188 {
2189 if ((1 << i) & idc_port.event_usage_bit_map)
2190 {
2191 if(TIME_DIFF_WITHIN(idc_port.frc_time, idc_port.event_offset_table[i], 10000))
2192 {
2193 //clear all event state
2194 idc_port.event_cnt--;
2195 idc_port.event_usage_bit_map &= ~(0x1 << i);
2196 idc_port.event_offset_table[i] = 0;
2197 for(j = 0; j < 9; j++)
2198 {
2199 idc_port.event_data_table[i][j] = 0;
2200 }
2201 for(j = 0; j < idc_port.event_byte_num[i]; j++)
2202 {
2203 idc_port.sram_table_usage[idc_port.event_sram_sta_idx[i] + j] = 0;
2204 }
2205 }
2206 }
2207 }
2208
2209 for(j = 1; j < IDC_MAX_EVENT_NUM; j++)
2210 {
2211 if((~(idc_port.event_usage_bit_map) >> j) & 0x1){
2212 if(j < 16)
2213 bitmap32 = (bitmap32 | (0x3 << (j*2)));
2214 else
2215 bitmap32_2 = (bitmap32_2 | (0x3 << ((j-16)*2)));
2216 }
2217 }
2218 scheduler_status = DRV_Reg32(IDC_CTRL_SCH_STATUS);
2219 scheduler_status_2 = DRV_Reg32(IDC_CTRL_SCH_STATUS2);
2220
2221 expired_evt_status = bitmap32 & scheduler_status;
2222 expired_evt_status_2 = bitmap32_2 & scheduler_status_2;
2223
2224 if(expired_evt_status | expired_evt_status_2)
2225 {
2226 #ifdef ATEST_DRV_ENABLE
2227 DT_IDC_PRINTF("drv_idc: [Warning] Some events are expired in scheduler & stopped. Status = %x \n\r", expired_evt_status);
2228 while(1);
2229 #endif
2230 expired_evt_flag = 1;
2231 }
2232 drv_idc_stop_event(~(idc_port.event_usage_bit_map));
2233
2234 }
2235
2236 if(idc_port.event_usage_bit_map == 0x1)
2237 idc_port.schedule_state = IDC_PLAN;
2238
2239 //idc_ctrl_leave(IDC_CTRL_LOCK);
2240 //********protect critical section*******
2241 after_poll_time_U = ust_get_current_time();
2242 kal_hrt_give_itc_lock(KAL_ITC_IDC);
2243
2244#if !defined(__MAUI_BASIC__)
2245 //get tx count
2246 tx_cnt = DRV_Reg32(IDC_CTRL_DATA_CNT) & 0xFFFF;
2247
2248 //print tx_count log
2249 if(idc_port.schedule_state == IDC_RUN)
2250 MD_TRC(IDC_TX_COUNT_MSG, tx_cnt);
2251#endif
2252
2253 if(expired_evt_flag == 1)
2254 MD_TRC(IDC_EVENTS_STILL_BUSY_2_MSG, expired_evt_status, scheduler_status, expired_evt_status_2, scheduler_status_2);
2255
2256 return;
2257
2258}
2259#endif
2260
2261void drv_idc_stop_event(kal_uint32 bitmap)
2262{
2263 kal_uint32 bitmap32 = 0 , j = 0;
2264
2265 DRV_WriteReg32_NPW(IDC_CTRL_SCH_STOP, bitmap);
2266 stop_status_check = DRV_Reg32(IDC_CTRL_SCH_STOP);
2267
2268 for(j = 0; j < IDC_MAX_EVENT_NUM; j++)
2269 {
2270 if((bitmap >> j) & 0x1)
2271 bitmap32 = (bitmap32 | (0x3 << (j*2)));
2272 }
2273 POLL_STATUS(DRV_Reg32(IDC_CTRL_SCH_STATUS) & bitmap32);
2274
2275 return;
2276}
2277
2278void drv_idc_stop_event_97(kal_uint32 bitmap)
2279{
2280 kal_uint32 bitmap32_1 = 0 , bitmap32_2 = 0, j = 0;
2281
2282 stop_event_bitmap_debug = bitmap;
2283 DRV_WriteReg32(IDC_CTRL_SCH_STOP, bitmap);
2284 MO_Sync();
2285 stop_status_check = DRV_Reg32(IDC_CTRL_SCH_STOP);
2286
2287 for(j = 0; j < IDC_MAX_EVENT_NUM; j++)
2288 {
2289 if(j < 16){
2290 if((bitmap >> j) & 0x1)
2291 bitmap32_1 = (bitmap32_1 | (0x3 << (j*2)));
2292 }
2293 else{
2294 if((bitmap >> j) & 0x1)
2295 bitmap32_2 = (bitmap32_2 | (0x3 << ((j-16)*2)));
2296 }
2297 }
2298 stop_event_bitmap32_0_15_debug = bitmap32_1;
2299 stop_event_bitmap32_16_31_debug = bitmap32_2;
2300
2301 POLL_STATUS(DRV_Reg32(IDC_CTRL_SCH_STATUS) & bitmap32_1);
2302 POLL_STATUS(DRV_Reg32(IDC_CTRL_SCH_STATUS2) & bitmap32_2);
2303
2304 return;
2305}
2306
2307void drv_idc_set_remapping_config(kal_uint8 remapping_table, kal_uint8 remapping_table_en)
2308{
2309 DRV_WriteReg32(IDC_REMAPPING_CFG, remapping_table);
2310 DRV_WriteReg32_NPW(IDC_REMAPPING_EN, remapping_table_en);
2311
2312 return;
2313}
2314void drv_idc_force_on_rf(kal_uint8 rf_path)
2315{
2316 DRV_WriteReg8(IDC_FORCE_TRIGGER_RF, rf_path);
2317
2318 MD_TRC(IDC_FORCE_ON_TX_MSG, rf_path);
2319
2320 return;
2321}
2322
2323void drv_idc_purge(UART_buffer dir)
2324{
2325 //remove beacause HW limitation can't CLR RXFIFO simultaneously
2326/* if(dir == TX_BUF)
2327 DRV_WriteReg32(IDC_UART_FCR, IDC_UART_FCR_RXTRIG | IDC_UART_FCR_CLRT | IDC_UART_FCR_FIFOEN);
2328 else
2329 DRV_WriteReg32(IDC_UART_FCR, IDC_UART_FCR_RXTRIG | IDC_UART_FCR_CLRR | IDC_UART_FCR_FIFOEN);*/
2330}
2331
2332void drv_idc_get_schedule_status(kal_uint32 schedule_status)
2333{
2334 schedule_status = DRV_Reg32(IDC_CTRL_SCH_STATUS);
2335
2336 #ifdef ATEST_DRV_ENABLE
2337 MD_TRC(IDC_SCHEDULE_STATUS_MSG,schedule_status);
2338 #endif
2339
2340 return;
2341}
2342void drv_idc_get_schedule_status_2(kal_uint32 schedule_status_2)
2343{
2344 schedule_status_2 = DRV_Reg32(IDC_CTRL_SCH_STATUS2);
2345
2346 #ifdef ATEST_DRV_ENABLE
2347 MD_TRC(IDC_SCHEDULE_STATUS_2_MSG, schedule_status_2);
2348 #endif
2349
2350 return;
2351}
2352
2353kal_bool drv_idc_check_event_send_out(void)
2354{
2355 kal_uint32 schedule_status = 0;
2356
2357 // check that schedule is busy or not
2358 drv_idc_get_schedule_status(schedule_status);
2359 if(schedule_status) return KAL_FALSE;
2360 drv_idc_get_schedule_status_2(schedule_status);
2361 if(schedule_status) return KAL_FALSE;
2362
2363 // check that IDC TX FIFO has data or not
2364 if(DRV_Reg32(IDC_UART_LSR) & IDC_UART_LSR_TEMT) return KAL_TRUE;
2365
2366 return KAL_FALSE;
2367}
2368
2369DCL_STATUS drv_idc_set_pin_config(IDC_PIN_MODE_T pin_mode)
2370{
2371 DCL_STATUS return_value = STATUS_FAIL;
2372
2373 if(SET_PIN_FLAG == KAL_TRUE){
2374 MD_TRC(IDC_ALREADY_SET_PIN_MSG,);
2375 return_value = STATUS_OK;
2376 return return_value;
2377 }
2378 else{
2379 SET_PIN_FLAG = KAL_TRUE;
2380 MM_Sync();
2381 }
2382
2383 if (pin_mode == IDC_INTERNAL_PIN)
2384 {
2385 #if !defined(ATEST_DRV_ENABLE)
2386 MD_TRC(IDC_INTERNAL_PIN_MSG,);
2387 #else
2388 kal_sprintf(idc_dbg_str, "drv_idc: Switch to internal pins\n\r");
2389 DT_IDC_PRINTF(idc_dbg_str);
2390 #endif
2391
2392 idc_port.pin_mode = IDC_INTERNAL_PIN;
2393
2394 // Switch to internal pins, use NPW
2395 #if defined(MT6763)
2396 DRV_WriteReg32_NPW(0xC0005000 + 0x6C0, DRV_Reg32(0xC0005000+ 0x6C0) & ~(0x10));
2397 #elif defined(MT6739)|| defined(MT6765)||defined(MT6761)
2398 DRV_WriteReg32_NPW(0xC0005000 + 0x600, DRV_Reg32(0xC0005000+ 0x600) & ~(0x02));
2399 #elif defined(MT6771)
2400 DRV_WriteReg32_NPW(0xC0005000 + 0x6C0, DRV_Reg32(0xC0005000+ 0x6C0) & ~(0x10));
2401 #elif defined(MT3967)
2402 DRV_WriteReg32_NPW(0xC0005000 + 0x6C0, DRV_Reg32(0xC0005000+ 0x6C0) & ~(0x10));
2403
2404 return_value = STATUS_OK;
2405 #endif
2406 }
2407 else if (pin_mode == IDC_EXTERNAL_PIN)
2408 {
2409 #if !defined(ATEST_DRV_ENABLE)
2410 MD_TRC(IDC_EXTERNAL_PIN_MSG,);
2411 #else
2412 kal_sprintf(idc_dbg_str, "drv_idc: Switch to external pins\n\r");
2413 DT_IDC_PRINTF(idc_dbg_str);
2414 #endif
2415
2416 idc_port.pin_mode = IDC_EXTERNAL_PIN;
2417
2418 // Switch to external pins, use NPW
2419 #if defined(MT6763)
2420 DRV_WriteReg32_NPW(0xC0005000 + 0x6C0, DRV_Reg32(0xC0005000+ 0x6C0) | 0x10);
2421 //GPIO 111 => PTA_RX, GPIO110 => PTA_TX
2422 DRV_WriteReg32(0xC0005000 + 0x3D0, DRV_Reg32(0xC0005000 + 0x3D0) & ~(0xFF000000));
2423 DRV_WriteReg32(0xC0005000 + 0x3D0, DRV_Reg32(0xC0005000 + 0x3D0) | 0x22000000);
2424 return_value = STATUS_OK;
2425
2426 #elif defined(MT6739)
2427 DRV_WriteReg32_NPW(0xC0005000 + 0x600, DRV_Reg32(0xC0005000+ 0x600) | 0x02);
2428 //GPIO 1 => PTA_RX, GPIO 2 => PTA_TX
2429 DRV_WriteReg32(0xC0005000 + 0x300, DRV_Reg32(0xC0005000 + 0x300) & ~(0x0FF0));
2430 DRV_WriteReg32(0xC0005000 + 0x300, DRV_Reg32(0xC0005000 + 0x300) | 0x0220);
2431 return_value = STATUS_OK;
2432
2433 #elif defined(MT6765)||defined(MT6761)
2434 DRV_WriteReg32_NPW(0xC0005000 + 0x600, DRV_Reg32(0xC0005000+ 0x600) | 0x02);
2435 //GPIO 105 => PTA_RX, GPIO 106 => PTA_TX
2436 DRV_WriteReg32(0xC0005000 + 0x3D0, DRV_Reg32(0xC0005000 + 0x3D0) & ~(0x0FF0));
2437 DRV_WriteReg32(0xC0005000 + 0x3D0, DRV_Reg32(0xC0005000 + 0x3D0) | 0x0660);
2438 return_value = STATUS_OK;
2439 #elif defined(MT6771)
2440 DRV_WriteReg32_NPW(0xC0005000 + 0x6C0, DRV_Reg32(0xC0005000+ 0x6C0) | 0x10);
2441 //GPIO 90 => PTA_RX, GPIO91 => PTA_TX
2442 DRV_WriteReg32(0xC0005000 + 0x3B0, DRV_Reg32(0xC0005000 + 0x3B0) & ~(0xFF00));
2443 DRV_WriteReg32(0xC0005000 + 0x3B0, DRV_Reg32(0xC0005000 + 0x3B0) | 0x6600);
2444 #elif defined(MT3967)
2445 DRV_WriteReg32_NPW(0xC0005000 + 0x6C0, DRV_Reg32(0xC0005000 + 0x6C0) | 0x10);
2446 //GPIO 45 => PTA_RX, GPIO 46 => PTA_TX
2447 DRV_WriteReg32(0xC0005000 + 0x350, DRV_Reg32(0xC0005000 + 0x350) & ~(0xFF00000));
2448 DRV_WriteReg32(0xC0005000 + 0x350, DRV_Reg32(0xC0005000 + 0x350) | 0x6600000);
2449 #elif defined(MT6885)
2450 // Switch LTE IDC UART to PAD
2451 //DRV_WriteReg32(0xC0005000 + 0x600, DRV_Reg(0xC0005000 + 0x600) | 0x20);
2452 //GPIO 43 => PTA_RX, GPIO 44 => PTA_TX
2453 //DRV_WriteReg32(0xC0005000 + 0x350, DRV_Reg(0xC0005000 + 0x350) & ~(0xFF000));
2454 //DRV_WriteReg32(0xC0005000 + 0x350, DRV_Reg(0xC0005000 + 0x350) | 0x55000);
2455 //GPIO 158 => PTA_RX, GPIO 159 => PTA_TX
2456 //DRV_WriteReg32(0xC0005000 + 0x430, DRV_Reg32(0xC0005000 + 0x430) & ~(0xFF000000));
2457 //DRV_WriteReg32(0xC0005000 + 0x430, DRV_Reg32(0xC0005000 + 0x430) | 0x44000000);
2458
2459 return_value = STATUS_OK;
2460 #elif defined(MT6833)
2461 //unmodified
2462 //DRV_WriteReg32(0xC0005000 + 0x600, DRV_Reg(0xC0005000 + 0x600) | 0x20);
2463 //GPIO 27 => PTA_RX, GPIO 28 => PTA_TX
2464 //DRV_WriteReg32(0xC0005000 + 0x338, 0x77<<12);
2465 //DRV_WriteReg32(0xC0005000 + 0x334, 0x44<<12);
2466 return_value = STATUS_OK;
2467
2468 #elif defined(MT6893)
2469 //unmodified
2470 //DRV_WriteReg32(0xC0005000 + 0x600, DRV_Reg(0xC0005000 + 0x600) | 0x20);
2471 //GPIO 27 => PTA_RX, GPIO 28 => PTA_TX
2472 //DRV_WriteReg32(0xC0005000 + 0x338, 0x77<<12);
2473 //DRV_WriteReg32(0xC0005000 + 0x334, 0x44<<12);
2474 return_value = STATUS_OK;
2475
2476 #elif defined(MT6877)
2477 //unmodified
2478 //DRV_WriteReg32(0xC0005000 + 0x600, DRV_Reg(0xC0005000 + 0x600) | 0x20);
2479 //GPIO 27 => PTA_RX, GPIO 28 => PTA_TX
2480 //DRV_WriteReg32(0xC0005000 + 0x338, 0x77<<12);
2481 //DRV_WriteReg32(0xC0005000 + 0x334, 0x44<<12);
2482 return_value = STATUS_OK;
2483 #endif
2484
2485 }
2486
2487 // Clear TX/RX FIFO , remove beacause HW limitation can't CLR RXFIFO simultaneously
2488 //DRV_WriteReg32_NPW(IDC_UART_FCR, IDC_UART_FCR_RXTRIG | IDC_UART_FCR_FIFOINI);
2489
2490 IRQUnmask(MD_IRQID_IDC_UART_IRQ);
2491
2492 return return_value;
2493}
2494
2495DCL_STATUS drv_idc_get_pin_config(IDC_PIN_MODE_T *pin_mode)
2496{
2497 DCL_STATUS return_value = STATUS_FAIL;
2498
2499 #if defined(MT6763) || defined(MT6739)|| defined(MT6765) || defined(MT6771)||defined(MT6761)||defined(MT6893)||defined(MT6833)||defined(MT6877)
2500 *pin_mode = idc_port.pin_mode;
2501 return_value = STATUS_OK;
2502 #endif
2503
2504 return return_value;
2505}
2506
2507// Enable ilm: drv_idc_set_ilm(KAL_TRUE)
2508// Disable ilm: drv_idc_set_ilm(KAL_FALSE)
2509void drv_idc_set_ilm(kal_bool ilm_mode)
2510{
2511 idc_ilm_on = ilm_mode;
2512 if(ilm_mode == KAL_TRUE)
2513 {
2514 MD_TRC(IDC_ILM_ENABLE_MSG,);
2515
2516 return;
2517 }
2518 else
2519 {
2520 MD_TRC(IDC_ILM_DISABLE_MSG,);
2521
2522 return;
2523 }
2524}
2525
2526kal_uint32 idc_isr_count = 0;
2527void idc_uart_lisr(kal_uint32 vector)
2528{
2529 kal_uint32 mask;
2530
2531 IER_L = DRV_Reg(IDC_UART_IER) & IDC_UART_IER_INT_MASK;
2532 IIR_L = DRV_Reg(IDC_UART_IIR) & IDC_UART_IIR_INT_MASK;
2533 LSR_L = DRV_Reg(IDC_UART_LSR);
2534
2535 // Turn off all INT
2536 DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ALLOFF);
2537
2538 mask = kal_hrt_SaveAndSetIRQMask();
2539 if(!(_idc_atomic_try_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR)))
2540 {
2541
2542 if(IIR_L & IDC_UART_IIR_THR_EMPTY)
2543 {
2544 #ifndef ATEST_DRV_ENABLE
2545 SleepDrv_UnlockSleep(SLEEP_CTL_IDC, SMP);
2546 #endif
2547 if((LTE_FLAG == KAL_TRUE) || (NR_FLAG == KAL_TRUE))
2548 DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ERBFI);// Enable RX interrupt
2549 }
2550 kal_hrt_RestoreIRQMask(mask);
2551 return;
2552 }
2553
2554 IRQMask(MD_IRQID_IDC_UART_IRQ);
2555
2556 if (IIR_L & IDC_UART_IIR_THR_EMPTY)
2557 {
2558
2559#ifndef ATEST_DRV_ENABLE
2560 SleepDrv_UnlockSleep(SLEEP_CTL_IDC, SMP);
2561#endif
2562 if((LTE_FLAG == KAL_TRUE) || (NR_FLAG == KAL_TRUE))
2563 DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ERBFI);// Enable RX interrupt
2564
2565 IRQUnmask(MD_IRQID_IDC_UART_IRQ);
2566 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
2567 kal_hrt_RestoreIRQMask(mask);
2568 return;
2569 }
2570 else if (idc_port.main_state == IDC_IN_SLEEP)
2571 {
2572 IRQUnmask(MD_IRQID_IDC_UART_IRQ);
2573 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
2574 kal_hrt_RestoreIRQMask(mask);
2575 return;
2576 }
2577 else if (IIR_L & IDC_UART_IIR_INT_INVALID)
2578 {
2579 // Restore enable interrupt
2580 DRV_WriteReg32_NPW(IDC_UART_IER, IER_L);
2581 IRQUnmask(MD_IRQID_IDC_UART_IRQ);
2582 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
2583 kal_hrt_RestoreIRQMask(mask);
2584 return;
2585 }
2586
2587 idc_lisr_count++;
2588 if (idc_lisr_count == 20)
2589 {
2590 idc_lisr_count = 0;
2591 }
2592
2593 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
2594 kal_hrt_RestoreIRQMask(mask);
2595 kal_activate_hisr_index(IDC_UART_HISR);
2596 return;
2597}
2598
2599void idc_uart_hisr(void)
2600{
2601 kal_uint32 mask;
2602#if defined(__MD93__)
2603 kal_uint32 i = 0;
2604#endif
2605
2606 idc_in_hisr = KAL_TRUE;
2607
2608 IIR_H = DRV_Reg(IDC_UART_IIR) & IDC_UART_IIR_INT_MASK;
2609 LSR_H = DRV_Reg(IDC_UART_LSR);
2610
2611#if !defined(__MAUI_BASIC__)
2612 kal_uint8 r_offset, w_offset, op_rx_req, fcr_rd, scr;
2613 kal_uint32 clk1, clk2;
2614 kal_uint16 RXTRIG;
2615 RXTRIG = DRV_Reg(IDC_UART_RXTRIG);
2616#endif
2617
2618#if defined(ATEST_DRV_ENABLE)
2619// kal_sprintf(idc_dbg_str, "drv_idc: HISR %d, IIR = %x\n\r", ++idc_isr_count, IIR);
2620// DT_IDC_PRINTF(idc_dbg_str);
2621#endif
2622
2623 idc_hisr_time[idc_hisr_count] = ust_get_current_time();
2624 idc_hisr_count++;
2625 if (idc_hisr_count == 20)
2626 {
2627 idc_hisr_count = 0;
2628 }
2629
2630 mask = kal_hrt_SaveAndSetIRQMask();
2631 before_poll_time_UART_HISR = ust_get_current_time();
2632 if(!(_idc_atomic_try_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR)))
2633 {
2634 after_poll_time_UART_HISR = ust_get_current_time();
2635 kal_hrt_RestoreIRQMask(mask);
2636 return;
2637 }
2638
2639 if(idc_port.main_state == IDC_IN_SLEEP)
2640 {
2641 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR);
2642 after_poll_time_UART_HISR = ust_get_current_time();
2643 kal_hrt_RestoreIRQMask(mask);
2644
2645 return;
2646 }
2647
2648#if defined(__MD93__)
2649 idc_send_rx_data_by_ilm();
2650#elif defined(__MD95__) || defined(__MD97__) || defined(__MD97P__)
2651 idc_send_rx_data_by_ilm_95();
2652#endif
2653
2654 if (KAL_FALSE == idc_read_RBR)
2655 {
2656 #if !defined(ATEST_DRV_ENABLE)
2657 MD_TRC(IDC_RX_LISR_MSG,IIR_L, IER_L, LSR_L);
2658 #if !defined(__MAUI_BASIC__)
2659 fcr_rd = DRV_Reg8(IDC_UART_FCR_RD);
2660 op_rx_req = DRV_Reg8(IDC_OP_RX_REQ);
2661 r_offset = DRV_Reg8(IDC_RX_ROFFSET);
2662 w_offset = DRV_Reg8(IDC_RX_WOFFSET);
2663 clk1 = DRV_Reg32(0xB0820018);
2664 clk2 = DRV_Reg32(0xB0820050);
2665 DRV_WriteReg32_NPW(IDC_UART_SCR, 0x5A);
2666 scr = DRV_Reg8(IDC_UART_SCR);
2667 MD_TRC(IDC_RX_HISR_MSG,IIR_H, LSR_H, RXTRIG, idc_port.main_state, idc_port.owner_id, idc_port.intr_en);
2668 MD_TRC(IDC_BEFORE_CLR_RX_FIFO_MSG, fcr_rd, op_rx_req, r_offset, w_offset, clk1, clk2, scr);
2669
2670 if((IIR_L == 0x4) && (LSR_L == 0x60) && (r_offset != w_offset)){
2671 MD_TRC(IDC_DR_ISSUE_HIT_MSG);
2672 DR_ISSUE_FLAG = KAL_TRUE;
2673 }
2674 #endif
2675 #else
2676 #if !defined(__MAUI_BASIC__)
2677 kal_sprintf(idc_dbg_str, "drv_idc: HISR without Read Data, IIR = %x, LSR = %x, RXTRIG = %x, (%d, %d, %d)\n\r",
2678 IIR_H, LSR_H, RXTRIG, idc_port.main_state, idc_port.owner_id, idc_port.intr_en);
2679 DT_IDC_PRINTF(idc_dbg_str);
2680 #endif
2681 #endif
2682
2683 if(DR_ISSUE_FLAG == KAL_TRUE){
2684 DR_ISSUE_FAIL_CNT++;
2685 if(DR_ISSUE_FAIL_CNT >= 10){
2686 MD_TRC(IDC_DR_ISSUE_RECOVER_FAIL_MSG);
2687 ASSERT(0);
2688 }
2689 }
2690
2691 }
2692
2693 IRQUnmask(MD_IRQID_IDC_UART_IRQ);
2694
2695 // Enable RX interrupt
2696 DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ERBFI);
2697 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR);
2698 after_poll_time_UART_HISR = ust_get_current_time();
2699 kal_hrt_RestoreIRQMask(mask);
2700
2701 idc_read_RBR = KAL_FALSE;
2702 idc_in_hisr = KAL_FALSE;
2703 MM_Sync();
2704
2705#if defined(__MD93__)
2706 if(idc_ilm_on == KAL_TRUE)
2707 {
2708 i = 0;
2709 while(ilm_num > 0)
2710 {
2711 ilm_num--;
2712 msg_send_inline6(MOD_IDC_UART_HISR, idc_port.owner_id, DRIVER_PS_SAP | INLINE_ILM_FLAG_SAP, MSG_ID_IDC_RX_DATA, (void *) &ilm_buf[i++], sizeof(IDC_ILM_MSG_T));
2713 }
2714 }
2715 else
2716 ilm_num = 0;
2717#elif defined(__MD95__) || defined(__MD97__) || defined(__MD97P__)
2718 if(idc_ilm_trigger == KAL_TRUE)
2719 {
2720 if(idc_ilm_on == KAL_TRUE)
2721 {
2722 msg_send_inline6(MOD_IDC_UART_HISR, idc_port.owner_id, DRIVER_PS_SAP | INLINE_ILM_FLAG_SAP, MSG_ID_IDC_RX_DATA, (void *) &ilm, sizeof(IDC_ILM_MSG_T));
2723 //DT_IDC_PRINTF("ILM actually send done\n\r");
2724 }
2725
2726 idc_ilm_trigger = KAL_FALSE;
2727 MM_Sync();
2728 }
2729#endif
2730
2731
2732 return;
2733}
2734
2735#if defined(__MD93__)
2736void idc_send_rx_data_by_ilm(void)
2737{
2738
2739 kal_uint32 max_rx_count = 8;
2740#if !defined(__MAUI_BASIC__)
2741 kal_uint32 count = 0;
2742#endif
2743 if(ilm_num !=0)
2744 MD_TRC(IDC_ILMNUM_ABNORMAL_MSG,__LINE__);
2745 while(DRV_Reg(IDC_UART_LSR) & IDC_UART_LSR_DR)
2746 {
2747 max_rx_count--;
2748 // read bytes from IDC UART FIFO to SW buffer
2749 idc_port.rx_buf = (idc_port.rx_buf << 16) | (1 << 8) | DRV_Reg8(IDC_UART_RBR);
2750 idc_read_RBR = KAL_TRUE;
2751
2752 idc_rx_history[idc_rx_count] = (kal_uint8)(idc_port.rx_buf & 0xFF);
2753 idc_rx_history_time[idc_rx_count] = ust_get_current_time();
2754
2755 /*#if !defined(ATEST_DRV_ENABLE)
2756 MD_TRC(IDC_RX_MSG,idc_port.rx_buf & 0xFF);
2757 #endif*/
2758
2759#if defined(ATEST_DRV_ENABLE)
2760// DT_IDC_PRINTF("drv_idc: receive %x\n\r", idc_port.rx_buf & 0xFF);
2761#endif
2762 idc_rx_count++;
2763 if (idc_rx_count == 20)
2764 {
2765 idc_rx_count = 0;
2766 #if !defined(ATEST_DRV_ENABLE)
2767 MD_TRC(IDC_RX_HISTORY_MSG,
2768 idc_rx_history[0], idc_rx_history[1], idc_rx_history[2], idc_rx_history[3], idc_rx_history[4],
2769 idc_rx_history[5], idc_rx_history[6], idc_rx_history[7], idc_rx_history[8], idc_rx_history[9],
2770 idc_rx_history[10], idc_rx_history[11], idc_rx_history[12], idc_rx_history[13], idc_rx_history[14],
2771 idc_rx_history[15], idc_rx_history[16], idc_rx_history[17], idc_rx_history[18], idc_rx_history[19]
2772 );
2773 #else
2774 //kal_sprintf(idc_dbg_str, "drv_idc: Receive %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x\n\r",
2775 // idc_rx_history[0], idc_rx_history[1], idc_rx_history[2], idc_rx_history[3], idc_rx_history[4],
2776 // idc_rx_history[5], idc_rx_history[6], idc_rx_history[7], idc_rx_history[8], idc_rx_history[9],
2777 // idc_rx_history[10], idc_rx_history[11], idc_rx_history[12], idc_rx_history[13], idc_rx_history[14],
2778 // idc_rx_history[15], idc_rx_history[16], idc_rx_history[17], idc_rx_history[18], idc_rx_history[19]
2779 // );
2780 DT_IDC_PRINTF(idc_dbg_str);
2781 #endif
2782
2783#if !defined(ATEST_DRV_ENABLE)
2784 // Check if the IDC commands receive too frequently
2785 if ((idc_rx_history_time[19] - idc_rx_history_time[0]) < 100)
2786 {
2787 idc_rx_suspend = KAL_TRUE;
2788 // Reset timer and start to wait 10 ms (EL1 will call event_start for each 1 ms)
2789 idc_rx_suspend_timer = 0;
2790 //IRQMask(MD_IRQID_IDC_UART_IRQ);
2791 // Disable RX interrupt
2792 DRV_WriteReg32(IDC_UART_IER, IDC_UART_IER_ALLOFF);
2793 // Check writing register is finished
2794 while (DRV_Reg32(IDC_UART_IER) != IDC_UART_IER_ALLOFF)
2795 {
2796 #if !defined(__MAUI_BASIC__)
2797 MD_TRC(IDC_WAIT_IER_OFF_MSG,count++);
2798 #endif
2799 }
2800 // Clean RX FIFO
2801 DRV_WriteReg32_NPW(IDC_UART_FCR, IDC_UART_FCR_RXTRIG | IDC_UART_FCR_FIFOINI);
2802 MD_TRC(IDC_CLEAN_RXFIFO_MSG,__FUNCTION__);
2803 // Change idc_port state
2804 idc_port.main_state = IDC_SUSPEND;
2805 // Use DSB to ensure that the interrupt is disabled before leaving HISR
2806 MM_Sync();
2807 // Print warining message
2808 MD_TRC(IDC_RX_SUSPEND_MSG,);
2809 // Call CONSYS RX_OFF callback for EL1C request
2810 if (idc_port.pm_cb_handle[1].callback_func)
2811 idc_port.pm_cb_handle[1].callback_func(idc_port.pm_cb_handle[1].private_data);
2812 return;
2813 }
2814#endif
2815 }
2816
2817 // if there are two-byte data in fifo and data is valid, send them by ilm
2818 if((idc_port.rx_buf & 0x01010101) == 0x01000101)
2819 {
2820 IDC_ILM_MSG_T tmp;
2821 tmp.type = (idc_port.rx_buf & 0x001E0000) >> 17;
2822 tmp.msg = ((idc_port.rx_buf & 0x00E00000) >> 21) | ((idc_port.rx_buf & 0x000000FE) << 2);
2823 #if defined(ATEST_DRV_ENABLE)
2824 DT_IDC_PRINTF("%x %x\n\r", tmp.type, tmp.msg);
2825 #endif
2826 if (tmp.type == 0)
2827 {
2828 if (KAL_TRUE == idc_count_start)
2829 {
2830 idc_80211_rx_count++;
2831 MD_TRC(IDC_RX_80211_RX_MSG,tmp.type, tmp.msg);
2832 }
2833 }
2834 else if (tmp.type == 1)
2835 {
2836 if (KAL_TRUE == idc_count_start)
2837 {
2838 idc_80211_tx_count++;
2839 MD_TRC(IDC_RX_80211_TX_MSG,tmp.type, tmp.msg);
2840 }
2841 }
2842 else
2843 {
2844 if(idc_ilm_on == KAL_TRUE)
2845 {
2846 #if !defined(ATEST_DRV_ENABLE)
2847 MD_TRC(IDC_RX_SEND_ILM_MSG,idc_port.rx_buf, tmp.type, tmp.msg);
2848 #else
2849 kal_sprintf(idc_dbg_str, "drv_idc: MSG Send to EL1: %x, type:%x, msg:%x\n\r", idc_port.rx_buf, tmp.type, tmp.msg);
2850 DT_IDC_PRINTF(idc_dbg_str);
2851 #endif
2852 ilm_buf[ilm_num++] = tmp;
2853 if(ilm_num > 4)
2854 MD_TRC(IDC_ILMNUM_ABNORMAL_MSG,__LINE__);
2855 //msg_send_inline6(MOD_IDC_UART_HISR, idc_port.owner_id, DRIVER_PS_SAP | INLINE_ILM_FLAG_SAP, MSG_ID_IDC_RX_DATA, (void *) &tmp, sizeof(IDC_ILM_MSG_T));
2856 }
2857 else
2858 {
2859 #if !defined(ATEST_DRV_ENABLE)
2860 MD_TRC(IDC_RX_NOT_SEND_ILM_MSG,idc_port.rx_buf, tmp.type, tmp.msg);
2861 #else
2862 //kal_sprintf(idc_dbg_str, "drv_idc: MSG Not Send to EL1: %x, type:%x, msg:%x\n\r", idc_port.rx_buf, tmp.type, tmp.msg);
2863 DT_IDC_PRINTF(idc_dbg_str);
2864 #endif
2865 }
2866 }
2867
2868 idc_port.rx_buf = 0;
2869 }
2870 //rx_buf receive 2 abnormal byte
2871 else if((idc_port.rx_buf & 0x01010101) == 0x01000100)
2872 {
2873 MD_TRC(IDC_RX_ABNORMAL_MSG,idc_port.rx_buf);
2874 }
2875 else if((idc_port.rx_buf & 0x01010101) == 0x01010101)
2876 {
2877 MD_TRC(IDC_RX_ABNORMAL_MSG,idc_port.rx_buf);
2878 }
2879 if (max_rx_count == 0)
2880 return;
2881 }
2882
2883 return;
2884}
2885
2886#elif defined(__MD95__) || defined(__MD97__) || defined(__MD97P__)
2887void idc_send_rx_data_by_ilm_95(void)
2888{
2889 //kal_uint32 count = 0;
2890 kal_uint32 msg_tmp = 0;
2891 kal_uint8 rx_data = 0, rx_count_log = 0;
2892 while(DRV_Reg(IDC_UART_LSR) & IDC_UART_LSR_DR)
2893 {
2894 // read bytes from IDC UART FIFO to SW buffer
2895 idc_port.rx_buf = DRV_Reg8(IDC_UART_RBR);
2896 rx_data = (kal_uint8)(idc_port.rx_buf & 0xFF);
2897 //idc_port.rx_buf = (idc_port.rx_buf << 16) | (1 << 8) | DRV_Reg8(IDC_UART_RBR);
2898 idc_read_RBR = KAL_TRUE;
2899
2900 idc_rx_history[idc_rx_count] = (kal_uint8)(idc_port.rx_buf & 0xFF);
2901 idc_rx_history_time[idc_rx_count] = ust_get_current_time();
2902#if defined(ATEST_DRV_ENABLE)
2903 //DT_IDC_PRINTF("drv_idc: receive %x\n\r", idc_port.rx_buf & 0xFF);
2904#endif
2905 idc_rx_count++;
2906 if (idc_rx_count == 20)
2907 {
2908 idc_rx_count = 0;
2909 #if !defined(ATEST_DRV_ENABLE)
2910 MD_TRC(IDC_RX_HISTORY_MSG,
2911 idc_rx_history[0], idc_rx_history[1], idc_rx_history[2], idc_rx_history[3], idc_rx_history[4],
2912 idc_rx_history[5], idc_rx_history[6], idc_rx_history[7], idc_rx_history[8], idc_rx_history[9],
2913 idc_rx_history[10], idc_rx_history[11], idc_rx_history[12], idc_rx_history[13], idc_rx_history[14],
2914 idc_rx_history[15], idc_rx_history[16], idc_rx_history[17], idc_rx_history[18], idc_rx_history[19]
2915 );
2916 #else
2917 //kal_sprintf(idc_dbg_str, "drv_idc: Receive %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x %x\n\r",
2918 // idc_rx_history[0], idc_rx_history[1], idc_rx_history[2], idc_rx_history[3], idc_rx_history[4],
2919 // idc_rx_history[5], idc_rx_history[6], idc_rx_history[7], idc_rx_history[8], idc_rx_history[9],
2920 // idc_rx_history[10], idc_rx_history[11], idc_rx_history[12], idc_rx_history[13], idc_rx_history[14],
2921 // idc_rx_history[15], idc_rx_history[16], idc_rx_history[17], idc_rx_history[18], idc_rx_history[19]
2922 // );
2923 //DT_IDC_PRINTF(idc_dbg_str);
2924 #endif
2925 }
2926 rx_count_log = idc_rx_count;
2927 if(new_cmd_flag)
2928 {
2929 if(idc_cmd_byte_count == 0)
2930 {
2931 if(rx_count_log == 0)
2932 rx_count_log = 20;
2933 MD_TRC(IDC_NEW_CMD_ERROR_MSG, idc_new_cmd_error_cnt, (idc_cmd_byte_count), rx_data, (rx_count_log-1));
2934 MD_TRC(IDC_RX_HISTORY_MSG,
2935 idc_rx_history[0], idc_rx_history[1], idc_rx_history[2], idc_rx_history[3], idc_rx_history[4],
2936 idc_rx_history[5], idc_rx_history[6], idc_rx_history[7], idc_rx_history[8], idc_rx_history[9],
2937 idc_rx_history[10], idc_rx_history[11], idc_rx_history[12], idc_rx_history[13], idc_rx_history[14],
2938 idc_rx_history[15], idc_rx_history[16], idc_rx_history[17], idc_rx_history[18], idc_rx_history[19]
2939 );
2940
2941 }
2942 else if(idc_cmd_byte_count == 1)
2943 {
2944 if((rx_data & 0x3) == 0x1)
2945 {
2946 ilm.sub_type = (rx_data & 0xFC) >> 2;
2947 idc_cmd_byte_count ++;
2948 if(idc_cmd_byte_count == (ilm.elen + 2))
2949 {
2950 ilm_stage = KAL_TRUE;
2951 //DT_IDC_PRINTF("new cmd sent to ilm\n\r");
2952 }
2953
2954 }
2955 else
2956 {
2957 new_cmd_flag = KAL_FALSE;
2958 kal_mem_set(&ilm, 0, sizeof(IDC_ILM_MSG_T));
2959 //log error info. for debug
2960 idc_new_cmd_error_cnt++;
2961
2962 if(rx_count_log == 0)
2963 rx_count_log = 20;
2964 MD_TRC(IDC_NEW_CMD_ERROR_MSG, idc_new_cmd_error_cnt, (idc_cmd_byte_count+1), rx_data, (rx_count_log-1));
2965 MD_TRC(IDC_RX_HISTORY_MSG,
2966 idc_rx_history[0], idc_rx_history[1], idc_rx_history[2], idc_rx_history[3], idc_rx_history[4],
2967 idc_rx_history[5], idc_rx_history[6], idc_rx_history[7], idc_rx_history[8], idc_rx_history[9],
2968 idc_rx_history[10], idc_rx_history[11], idc_rx_history[12], idc_rx_history[13], idc_rx_history[14],
2969 idc_rx_history[15], idc_rx_history[16], idc_rx_history[17], idc_rx_history[18], idc_rx_history[19]
2970 );
2971 //DT_IDC_PRINTF("new cmd format error, count: %d, rx_data : %x\n\r", count , idc_port.rx_buf);
2972 }
2973
2974 }
2975 else
2976 {
2977 //byte is even number
2978 if((idc_cmd_byte_count % 2) == 1)
2979 {
2980 if((rx_data & 0x3) == 0x1)
2981 {
2982 if(idc_cmd_byte_count != 7)
2983 {
2984 msg_tmp = (rx_data & 0xFC) >> 2;
2985 ilm.msg1 = ilm.msg1 | (msg_tmp << (6 * (idc_cmd_byte_count - 2)));
2986 }
2987 else
2988 {
2989 msg_tmp = (rx_data & 0xC) >> 2;
2990 ilm.msg2 = (rx_data & 0xF0) >> 4;
2991
2992 ilm.msg1 = ilm.msg1 | (msg_tmp << (6 * (idc_cmd_byte_count - 2)));
2993 }
2994 idc_cmd_byte_count ++;
2995 if(idc_cmd_byte_count == (ilm.elen + 2))
2996 {
2997 ilm_stage = KAL_TRUE;
2998 //DT_IDC_PRINTF("new cmd sent to ilm\n\r");
2999 }
3000 }
3001 else
3002 {
3003 new_cmd_flag = KAL_FALSE;
3004 kal_mem_set(&ilm, 0, sizeof(IDC_ILM_MSG_T));
3005
3006 //log error info. for debug
3007 idc_new_cmd_error_cnt++;
3008
3009 if(rx_count_log == 0)
3010 rx_count_log = 20;
3011 MD_TRC(IDC_NEW_CMD_ERROR_MSG, idc_new_cmd_error_cnt, (idc_cmd_byte_count+1), rx_data, (rx_count_log-1));
3012 MD_TRC(IDC_RX_HISTORY_MSG,
3013 idc_rx_history[0], idc_rx_history[1], idc_rx_history[2], idc_rx_history[3], idc_rx_history[4],
3014 idc_rx_history[5], idc_rx_history[6], idc_rx_history[7], idc_rx_history[8], idc_rx_history[9],
3015 idc_rx_history[10], idc_rx_history[11], idc_rx_history[12], idc_rx_history[13], idc_rx_history[14],
3016 idc_rx_history[15], idc_rx_history[16], idc_rx_history[17], idc_rx_history[18], idc_rx_history[19]
3017 );
3018 //DT_IDC_PRINTF("new cmd format error, count: %d, rx_data : %x\n\r", count , idc_port.rx_buf);
3019 }
3020 }
3021 //byte is odd number
3022 else
3023 {
3024 if((rx_data & 0x3) == 0x3)
3025 {
3026 if(idc_cmd_byte_count != 8)
3027 {
3028 msg_tmp = (rx_data & 0xFC) >> 2;
3029 ilm.msg1 = ilm.msg1 | (msg_tmp << (6 * (idc_cmd_byte_count - 2)));
3030 }
3031 else
3032 {
3033 msg_tmp = (rx_data & 0xFC) >> 2;
3034 ilm.msg2 = ilm.msg2 | (msg_tmp << 4);
3035 }
3036 idc_cmd_byte_count ++;
3037 if(idc_cmd_byte_count ==(ilm.elen + 2))
3038 {
3039 ilm_stage = KAL_TRUE;
3040 //DT_IDC_PRINTF("new cmd sent to ilm\n\r");
3041 }
3042 }
3043 else
3044 {
3045 new_cmd_flag = KAL_FALSE;
3046 kal_mem_set(&ilm, 0, sizeof(IDC_ILM_MSG_T));
3047
3048 //log error info. for debug
3049 idc_new_cmd_error_cnt++;
3050
3051 if(rx_count_log == 0)
3052 rx_count_log = 20;
3053 MD_TRC(IDC_NEW_CMD_ERROR_MSG, idc_new_cmd_error_cnt, (idc_cmd_byte_count+1), rx_data, (rx_count_log-1));
3054 MD_TRC(IDC_RX_HISTORY_MSG,
3055 idc_rx_history[0], idc_rx_history[1], idc_rx_history[2], idc_rx_history[3], idc_rx_history[4],
3056 idc_rx_history[5], idc_rx_history[6], idc_rx_history[7], idc_rx_history[8], idc_rx_history[9],
3057 idc_rx_history[10], idc_rx_history[11], idc_rx_history[12], idc_rx_history[13], idc_rx_history[14],
3058 idc_rx_history[15], idc_rx_history[16], idc_rx_history[17], idc_rx_history[18], idc_rx_history[19]
3059 );
3060 //DT_IDC_PRINTF("new cmd format error, count: %d, rx_data : %x\n\r", count , idc_port.rx_buf);
3061 }
3062 }
3063 }
3064
3065 }
3066 else if(old_cmd_flag)
3067 {
3068 if((rx_data & 0x1) == 0x1)
3069 {
3070 msg_tmp = (rx_data & 0xFE) >> 1;
3071 ilm.msg1 = ilm.msg1 | (msg_tmp << 3);
3072 old_cmd_flag = KAL_FALSE;
3073 ilm_stage = KAL_TRUE;
3074 //DT_IDC_PRINTF("old cmd sent to ilm: msg = %x\n\r", ilm.msg1);
3075 }
3076 else if((rx_data & 0x1) == 0x0)
3077 {
3078 // if there is new format of IDC_CMD
3079 if((rx_data & 0x1F) == 0x1E)
3080 {
3081 ilm.type = (rx_data & 0x1E) >> 1;
3082 ilm.elen = (rx_data & 0xE0) >> 5;
3083 new_cmd_flag = KAL_TRUE;
3084 old_cmd_flag = KAL_FALSE;
3085 idc_cmd_byte_count = 1;
3086 //DT_IDC_PRINTF("new cmd : type = %x, ELEN = %d\n\r", ilm.type, ilm.elen);
3087 }
3088 else
3089 {
3090 ilm.type = (rx_data & 0x1E) >> 1;
3091 ilm.msg1 = (rx_data & 0xE0) >> 5;
3092 old_cmd_flag = KAL_TRUE;
3093 //DT_IDC_PRINTF("old cmd : type = %x\n\r", ilm.type);
3094 }
3095 }
3096 else
3097 old_cmd_flag = KAL_FALSE;
3098 }
3099 else
3100 {
3101 // if there is new format of IDC_CMD
3102 if((rx_data & 0x1F) == 0x1E)
3103 {
3104 kal_mem_set(&ilm, 0, sizeof(IDC_ILM_MSG_T));
3105 ilm.type = (rx_data & 0x1E) >> 1;
3106 ilm.elen = (rx_data & 0xE0) >> 5;
3107 new_cmd_flag = KAL_TRUE;
3108 idc_cmd_byte_count = 1;
3109 //DT_IDC_PRINTF("new cmd : type = %x, ELEN = %d\n\r", ilm.type, ilm.elen);
3110 }
3111 else if((rx_data & 0x1) == 0x0)
3112 {
3113 kal_mem_set(&ilm, 0, sizeof(IDC_ILM_MSG_T));
3114 ilm.type = (rx_data & 0x1E) >> 1;
3115 ilm.msg1 = (rx_data & 0xE0) >> 5;
3116 old_cmd_flag = KAL_TRUE;
3117 //DT_IDC_PRINTF("old cmd : type = %x\n\r", ilm.type);
3118 }
3119 }
3120
3121 if(ilm_stage)
3122 {
3123 MD_TRC(IDC_RX_FIFO_DATA_CNT_MSG, DRV_Reg8(IDC_RX_WOFFSET));
3124 if (ilm.type == 0)
3125 {
3126 idc_80211_rx_count++;
3127 MD_TRC(IDC_RX_80211_RX_MSG,ilm.type, ilm.msg1);
3128 //DT_IDC_PRINTF("80211_rx cmd , no ILM\n\r");
3129 }
3130 else if (ilm.type == 1)
3131 {
3132 idc_80211_tx_count++;
3133 MD_TRC(IDC_RX_80211_TX_MSG,ilm.type, ilm.msg1);
3134 //DT_IDC_PRINTF("80211_tx cmd , no ILM\n\r");
3135 }
3136 else if ((ilm.type == 0xF) && (ilm.sub_type == 1))
3137 {
3138 idc_consys_tx_grant_ntf++;
3139 MD_TRC(IDC_CONSYS_TX_GRANT_NTF_MSG,ilm.type, ilm.sub_type, ilm.msg1);
3140 //DT_IDC_PRINTF("CONNSYS_TX_GRANT_NTF cmd , no ILM\n\r");
3141 }
3142 else
3143 {
3144 idc_ilm_trigger = KAL_TRUE;
3145 if(idc_ilm_on == KAL_TRUE)
3146 {
3147 #if !defined(ATEST_DRV_ENABLE)
3148 MD_TRC(IDC_RX_95_SEND_ILM_MSG, rx_data, ilm.type, ilm.sub_type, ilm.msg1, ilm.msg2, (ilm.elen+2));
3149 #else
3150 //kal_sprintf(idc_dbg_str, "drv_idc: MSG Send to EL1: %x, type:%x, msg:%x, msg2:%x\n\r", idc_port.rx_buf, ilm.type, ilm.msg1, ilm.msg2);
3151 //DT_IDC_PRINTF(idc_dbg_str);
3152 #endif
3153 //msg_send_inline6(MOD_IDC_UART_HISR, idc_port.owner_id, DRIVER_PS_SAP | INLINE_ILM_FLAG_SAP, MSG_ID_IDC_RX_DATA, (void *) &ilm, sizeof(IDC_ILM_MSG_T));
3154 }
3155 else
3156 {
3157 #if !defined(ATEST_DRV_ENABLE)
3158 MD_TRC(IDC_RX_95_NOT_SEND_ILM_MSG, rx_data, ilm.type, ilm.sub_type, ilm.msg1, ilm.msg2, (ilm.elen+2));
3159 #else
3160 kal_sprintf(idc_dbg_str, "drv_idc: MSG Not Send to EL1: %x, type:%x, msg1:%x, msg2:%x\n\r", idc_port.rx_buf, ilm.type, ilm.msg1, ilm.msg2);
3161 DT_IDC_PRINTF(idc_dbg_str);
3162 #endif
3163 }
3164 }
3165
3166 idc_port.rx_buf = 0;
3167 rx_data = 0;
3168 new_cmd_flag = KAL_FALSE;
3169 old_cmd_flag = KAL_FALSE;
3170 ilm_stage = KAL_FALSE;
3171 idc_cmd_byte_count = 0;
3172 IDC_CMD_SUCCESS_CNT[IDC_CMD_SUCCESS_CNT_IDX]++;
3173 }
3174 if(idc_ilm_trigger == KAL_TRUE)
3175 {
3176 MM_Sync();
3177 return;
3178 }
3179
3180 }
3181 MM_Sync();
3182 return;
3183}
3184#endif
3185
3186void idc_pm_lisr(kal_uint32 vector)
3187{
3188 IRQMask(MD_IRQID_IDC_PM_INT);
3189
3190#if defined(__MD95__) || defined(__MD97__) || defined(__MD97P__)
3191 kal_uint32 i, pm_status;
3192 kal_uint16 status = 0, cc_status, rf_path_status;
3193
3194 pm_status = DRV_Reg32(IDC_PM_STATUS);
3195
3196 //old PM status
3197 for (i = 0; i < 4; ++i)
3198 {
3199 if (pm_status & (1 << i))
3200 {
3201 // Callback to EL1
3202 if (idc_port.pm_cb_handle[i].callback_func)
3203 idc_port.pm_cb_handle[i].callback_func(idc_port.pm_cb_handle[i].private_data);
3204 }
3205 }
3206
3207#if defined(__MD95__)
3208 //new PM status
3209 if (pm_status & (1 << 4))
3210 {
3211 cc_status = DRV_Reg8(IDC_CC_STATUS);
3212 rf_path_status = DRV_Reg8(IDC_NEW_PM_DEBUG);
3213 status = cc_status | (rf_path_status << 8);
3214 // Callback to EL1
3215 if (idc_port.pm_cb_handle[4].callback_func)
3216 {
3217 idc_port.pm_cb_handle[4].callback_func(&status);
3218 }
3219
3220 }
3221 MD_TRC(IDC_IDC_PM_LISR_STS_MSG, status);
3222#if !defined(__MAUI_BASIC__)
3223 kal_uint8 err_rx_buf;
3224 //new PM error status
3225 for (i = 5; i < 8; ++i)
3226 {
3227 err_rx_buf = DRV_Reg8(IDC_NEW_PM_ERR_RX_BUFF);
3228 if (pm_status & (1 << i))
3229 {
3230 MD_TRC(IDC_NEW_PM_ERROR_MSG, pm_status, err_rx_buf);
3231 }
3232 }
3233#endif
3234
3235#elif defined(__MD97__) || defined(__MD97P__)
3236 kal_uint16 cc_status_2 = 0;
3237 //new PM status
3238 if (pm_status & (1 << 4))
3239 {
3240 cc_status = DRV_Reg8(IDC_CC_STATUS);
3241 cc_status_2 = DRV_Reg8(IDC_CC4_STATUS);
3242 rf_path_status = DRV_Reg8(IDC_NEW_PM_DEBUG);
3243 status = cc_status | ((cc_status_2 & 0xF) << 8)| ((rf_path_status & 0xF) << 12);
3244 // Callback to EL1
3245 if (idc_port.pm_cb_handle[4].callback_func)
3246 {
3247 idc_port.pm_cb_handle[4].callback_func(&status);
3248 }
3249
3250 }
3251 MD_TRC(IDC_IDC_PM_LISR_STS_MSG, status);
3252#if !defined(MT6297)
3253#if !defined(__MAUI_BASIC__)
3254 kal_uint8 err_rx_buf;
3255 kal_uint8 err_status = 0;
3256
3257 err_status = DRV_Reg8(IDC_STATUS_1);
3258
3259 // new PM error status
3260 for(i = 0; i < IDC_NEW_PM_ERR_NUM; i++)
3261 {
3262 err_rx_buf = DRV_Reg8(IDC_NEW_PM_ERR_RX_BUFF);
3263 if (err_status & (1 << i))
3264 {
3265 MD_TRC(IDC_NEW_PM_ERROR_MSG, err_status, err_rx_buf);
3266 }
3267 }
3268#endif
3269 // Write 1 to clear ERR status
3270 DRV_WriteReg8(IDC_STATUS_1, 0xff);
3271 MO_Sync();
3272
3273#else
3274#if !defined(__MAUI_BASIC__)
3275 kal_uint8 err_rx_buf;
3276 //new PM error status
3277 for (i = 5; i < 8; ++i)
3278 {
3279 err_rx_buf = DRV_Reg8(IDC_NEW_PM_ERR_RX_BUFF);
3280 if (pm_status & (1 << i))
3281 {
3282 MD_TRC(IDC_NEW_PM_ERROR_MSG,pm_status, err_rx_buf);
3283 }
3284 }
3285#endif
3286#endif
3287
3288#endif
3289 // Write 1 to clear all status
3290 DRV_WriteReg8(IDC_PM_STATUS, 0xff);
3291 MO_Sync();
3292
3293 IRQUnmask(MD_IRQID_IDC_PM_INT);
3294#else
3295 kal_activate_hisr_index(IDC_PM_HISR);
3296#endif
3297
3298 return;
3299}
3300
3301void idc_pm_hisr(void)
3302{
3303 idc_in_pm_hisr = KAL_TRUE;
3304
3305#if (!defined(__MD95__)) && (!defined(__MD97__)) && (!defined(__MD97P__))
3306
3307 kal_uint32 i, pm_status;
3308 pm_status = DRV_Reg32(IDC_PM_STATUS);
3309#ifdef ATEST_DRV_ENABLE
3310// DT_IDC_PRINTF("drv_idc: PM HISR %d, pm_idx = %x\n\r", ++idc_isr_count, pm_status);
3311#endif
3312 //old PM status
3313 for (i = 0; i < 4; ++i)
3314 {
3315 if (pm_status & (1 << i))
3316 {
3317 // Callback to EL1
3318 if (idc_port.pm_cb_handle[i].callback_func)
3319 idc_port.pm_cb_handle[i].callback_func(idc_port.pm_cb_handle[i].private_data);
3320 }
3321 }
3322
3323 // Write 1 to clear all status
3324 DRV_WriteReg32_NPW(IDC_PM_STATUS, 0xff);
3325#endif
3326 idc_in_pm_hisr = KAL_FALSE;
3327#ifdef ATEST_DRV_ENABLE
3328 MM_Sync();
3329#endif
3330 IRQUnmask(MD_IRQID_IDC_PM_INT);
3331
3332 return;
3333}
3334
3335void drv_idc_schedule_update_n_return_rftx(kal_uint32 time, kal_uint8 *rf_path)
3336{
3337 kal_uint32 i = 0, j = 0;
3338 kal_uint32 expired_evt_flag = 0, stop_event_flag = 0;
3339
3340#if !defined(__MAUI_BASIC__)
3341 kal_uint32 tx_cnt = 0;
3342#endif
3343
3344 //clear stop_event_bitmap
3345 stop_event_bitmap = 0x0;
3346 stop_event_bitmap32_0_15 = 0x0;
3347 stop_event_bitmap32_16_31 = 0x0;
3348
3349 // Log FRC_TIME information
3350 //MD_TRC(IDC_SCHEDULE_UPDATE_MSG, (time & 0x3FFFFFFF));
3351
3352 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
3353 before_poll_time_U = ust_get_current_time();
3354 //********protect critical section*******
3355
3356 // Get FRC from EL1C & fetch the first 30bits
3357 idc_port.frc_time = time & 0x3FFFFFFF;
3358
3359
3360 //directly unlock atomic&return and do nothing if schedule_update after slp_ntf
3361 if(idc_port.main_state == IDC_IN_SLEEP)
3362 {
3363 after_poll_time_U = ust_get_current_time();
3364 kal_hrt_give_itc_lock(KAL_ITC_IDC);
3365
3366 //return rf_path to MD_IDC
3367 *rf_path = DRV_Reg8(IDC_NEW_PM_DEBUG);
3368
3369 return;
3370 }
3371
3372 //idc_ctrl_enter(IDC_CTRL_LOCK);//remove because LTE/NR RAT flow
3373
3374 // Stop uncompleted events
3375 if(idc_port.event_usage_bit_map != 0x1)
3376 {
3377 stop_event_flag = 1;
3378 for(i = 1; i < IDC_MAX_EVENT_NUM; i++)
3379 {
3380 if ((1 << i) & idc_port.event_usage_bit_map)
3381 {
3382 if(TIME_DIFF_WITHIN(idc_port.frc_time, idc_port.event_offset_table[i], 10000))
3383 {
3384 //clear all event state
3385 stop_event_bitmap = (stop_event_bitmap | (0x1 << i));
3386 idc_port.event_cnt--;
3387 idc_port.event_usage_bit_map &= ~(0x1 << i);
3388 idc_port.event_offset_table[i] = 0;
3389 for(j = 0; j < 9; j++)
3390 {
3391 idc_port.event_data_table[i][j] = 0;
3392 }
3393 for(j = 0; j < idc_port.event_byte_num[i]; j++)
3394 {
3395 idc_port.sram_table_usage[idc_port.event_sram_sta_idx[i] + j] = 0;
3396 }
3397 }
3398 }
3399 }
3400
3401 for(j = 1; j < IDC_MAX_EVENT_NUM; j++)
3402 {
3403 if((stop_event_bitmap >> j) & 0x1){
3404 if(j < 16)
3405 stop_event_bitmap32_0_15 = (stop_event_bitmap32_0_15 | (0x3 << (j*2)));
3406 else
3407 stop_event_bitmap32_16_31 = (stop_event_bitmap32_16_31 | (0x3 << ((j-16)*2)));
3408 }
3409 }
3410 }
3411
3412 if(idc_port.event_usage_bit_map == 0x1)
3413 idc_port.schedule_state = IDC_PLAN;
3414
3415 if(stop_event_flag == 1){
3416 event_status_0_15 = DRV_Reg32(IDC_CTRL_SCH_STATUS);
3417 event_status_16_31 = DRV_Reg32(IDC_CTRL_SCH_STATUS2);
3418
3419 expire_event_status_0_15 = stop_event_bitmap32_0_15 & event_status_0_15;
3420 expire_event_status_16_31 = stop_event_bitmap32_16_31 & event_status_16_31;
3421
3422 if(expire_event_status_0_15 | expire_event_status_16_31)
3423 {
3424#ifdef ATEST_DRV_ENABLE
3425 DT_IDC_PRINTF("drv_idc: [Warning] Some events are expired in scheduler & stopped. Status = %x \n\r", expire_event_status_0_15);
3426 while(1);
3427#endif
3428 expired_evt_flag = 1;
3429 }
3430 before_poll_time_STOP_EVENT = ust_get_current_time();
3431 if(stop_event_bitmap != 0x0)
3432 drv_idc_stop_event_97(stop_event_bitmap);
3433 after_poll_time_STOP_EVENT = ust_get_current_time();
3434 }
3435
3436 MM_Sync();
3437
3438 //idc_ctrl_leave(IDC_CTRL_LOCK);
3439 //********protect critical section*******
3440 after_poll_time_U = ust_get_current_time();
3441 kal_hrt_give_itc_lock(KAL_ITC_IDC);
3442
3443#if !defined(__MAUI_BASIC__)
3444 //get tx count
3445 tx_cnt = DRV_Reg32(IDC_CTRL_DATA_CNT) & 0xFFFF;
3446
3447 //print tx_count log
3448 if(idc_port.schedule_state == IDC_RUN)
3449 MD_TRC(IDC_TX_COUNT_MSG, tx_cnt);
3450#endif
3451
3452 if(expired_evt_flag == 1)
3453 MD_TRC(IDC_EVENTS_STILL_BUSY_2_MSG, expire_event_status_0_15, event_status_0_15, expire_event_status_16_31, event_status_16_31);
3454
3455 //return rf_path to MD_IDC
3456 *rf_path = DRV_Reg8(IDC_NEW_PM_DEBUG);
3457
3458 return;
3459
3460}
3461kal_bool drv_idc_schedule_event_lte_nr(IDC_EVENT_T event, kal_uint8 event_type,IDC_CTRL_DROP_CMD_T *drop_cmd)
3462{
3463 kal_uint32 i = 0, drop_cmd_flag = 0, mask = 0xFFFFFFFF;
3464 kal_uint32 w_data = 0, w_idx = 0, value = 0, tmp_sram_idx = 0, end_sram_idx = 0;
3465
3466 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
3467
3468 //If RAT is sleep, always just return.
3469 if(event_type == EVENT_LTE){
3470 if(LTE_FLAG == KAL_FALSE){
3471 kal_hrt_give_itc_lock(KAL_ITC_IDC);
3472 MD_TRC(IDC_SCHEDULE_IN_SLEEP_MSG, event_type);
3473
3474#ifdef ATEST_DRV_ENABLE
3475 DT_IDC_PRINTF("drv_idc: schedule when LTE RAT DISABLE\n\r");
3476#endif
3477 return KAL_FALSE;
3478 }
3479 }
3480
3481 else if(event_type == EVENT_NR){
3482 if(NR_FLAG == KAL_FALSE){
3483 kal_hrt_give_itc_lock(KAL_ITC_IDC);
3484 MD_TRC(IDC_SCHEDULE_IN_SLEEP_MSG, event_type);
3485
3486#ifdef ATEST_DRV_ENABLE
3487 DT_IDC_PRINTF("drv_idc: schedule when NR RAT DISABLE\n\r");
3488#endif
3489 return KAL_FALSE;
3490 }
3491 }
3492
3493 else if(event_type == EVENT_COMMON){
3494 if((LTE_FLAG == KAL_FALSE) && (NR_FLAG == KAL_FALSE)){
3495 kal_hrt_give_itc_lock(KAL_ITC_IDC);
3496 MD_TRC(IDC_SCHEDULE_IN_SLEEP_MSG, event_type);
3497
3498#ifdef ATEST_DRV_ENABLE
3499 DT_IDC_PRINTF("drv_idc: schedule when LTE AND NR RAT DISABLE\n\r");
3500#endif
3501 return KAL_FALSE;
3502 }
3503 }
3504
3505 before_poll_time_SCH = ust_get_current_time();
3506 //********protect critical section*******
3507 //idc_ctrl_enter(IDC_CTRL_LOCK);//remove because LTE/NR RAT flow
3508 idc_port.schedule_state = IDC_RUN;
3509
3510 event.offset = event.offset & 0x3FFFFFFF;
3511
3512 if(TIME_DIFF_EXCEED(event.offset, idc_port.frc_time, 10000))
3513 {
3514 kal_hrt_give_itc_lock(KAL_ITC_IDC);
3515 MD_TRC(IDC_SCHEDULE_OVER_10MS_MSG, idc_port.frc_time, event.offset, event.data[0], event.data[1]);
3516 //idc_ctrl_leave(IDC_CTRL_LOCK);
3517 //********protect critical section*******
3518 return KAL_FALSE;
3519 }
3520
3521 /**************************
3522event configuration*****************/
3523 //immediate event -> 0x00000001
3524 //LTE event -> 0x0000FFFE
3525 //Common event -> 0x00010000
3526 //NR event -> 0xFFFE0000
3527
3528 //event type switch LTE/NR/Common
3529 if(event_type == EVENT_LTE){
3530
3531 if((idc_port.event_usage_bit_map & (0x0000FFFE)) != 0x0000FFFE) // LTE events not full
3532 {
3533 event_idx_wrap = 0;
3534 // Find empty event
3535 while (1)
3536 {
3537 if ((1 << idc_port.event_w_index_lte) & idc_port.event_usage_bit_map)
3538 {
3539 idc_port.event_w_index_lte++;
3540 if (idc_port.event_w_index_lte > IDC_LTE_MAX_EVENT_IDX)
3541 {
3542 if(event_idx_wrap == 1)
3543 {
3544 //if EVENT_IDX_NOT_FOUND, return drop_cmd
3545 //********protect critical section*******
3546 after_poll_time_SCH = ust_get_current_time();
3547 kal_hrt_give_itc_lock(KAL_ITC_IDC);
3548
3549 MD_TRC(IDC_SCHEDULE_EVENT_IDX_NOT_FOUND_MSG);
3550 drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
3551
3552 return KAL_FALSE;
3553
3554 }
3555 idc_port.event_w_index_lte = IDC_LTE_STA_EVENT_IDX;
3556 event_idx_wrap = 1;
3557 }
3558 }
3559 else
3560 {
3561 break;
3562 }
3563 }
3564
3565 // Find empty sram_w_idx
3566 sram_wrap = 0;
3567 while (1)
3568 {
3569 if (idc_port.sram_table_usage[idc_port.sram_w_index] == 1)
3570 {
3571 idc_port.sram_w_index++;
3572 if (idc_port.sram_w_index == IDC_MAX_SRAM_SIZE)
3573 {
3574 if(sram_wrap == 1)
3575 {
3576 //if sram full, return drop_cmd
3577 //idc_ctrl_leave(IDC_CTRL_LOCK);
3578 //********protect critical section*******
3579 after_poll_time_SCH = ust_get_current_time();
3580 kal_hrt_give_itc_lock(KAL_ITC_IDC);
3581
3582 MD_TRC(IDC_SCHEDULE_SRAM_FULL_MSG);
3583 drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
3584
3585 return KAL_FALSE;
3586 }
3587
3588
3589 idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
3590 sram_wrap = 1;
3591 }
3592 }
3593 else
3594 {
3595 break;
3596 }
3597 }
3598
3599 //If there no sequential sram space enough, return drop_cmd
3600 for(i = 0; i < event.num; i++)
3601 {
3602 tmp_sram_idx = idc_port.sram_w_index + i;
3603 //wrap case
3604 if(tmp_sram_idx >= IDC_MAX_SRAM_SIZE)
3605 {
3606 idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
3607 tmp_sram_idx = IDC_SRAM_WRAP_IDX;
3608 }
3609 //DT_IDC_PRINTF("*** sram_idx : %d***", tmp_sram_idx);
3610 if(idc_port.sram_table_usage[tmp_sram_idx] == 1)
3611 {
3612 //return drop_cmd
3613 //idc_ctrl_leave(IDC_CTRL_LOCK);
3614 //********protect critical section*******
3615 after_poll_time_SCH = ust_get_current_time();
3616 kal_hrt_give_itc_lock(KAL_ITC_IDC);
3617
3618 MD_TRC(IDC_SCHEDULE_NO_SEQUENTIAL_SRAM_MSG);
3619 drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
3620
3621 return KAL_FALSE;
3622 }
3623
3624 }
3625
3626 end_sram_idx = idc_port.sram_w_index + event.num - 1;
3627 if(end_sram_idx >= IDC_MAX_SRAM_SIZE)
3628 end_sram_idx = end_sram_idx - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX;
3629
3630 // Set event data
3631 w_idx = idc_port.sram_w_index;
3632
3633 for(i = 0; i < event.num; i++)
3634 {
3635 w_data = w_data | (event.data[i] << (8 * (w_idx % 4)));
3636 mask &= ~(0xFF << (8 * (w_idx % 4)));
3637 w_idx++;
3638 if((w_idx % 4 == 0) || (i == event.num - 1))
3639 {
3640 if(w_idx % 4 == 0)
3641 value = DRV_Reg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1));
3642 else
3643 value = DRV_Reg32(IDC_CTRL_EVT_DATA(w_idx/4));
3644 value &= mask;
3645 value |= w_data;
3646 if(w_idx % 4 == 0)
3647 DRV_WriteReg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1), value);
3648 else
3649 DRV_WriteReg32(IDC_CTRL_EVT_DATA(w_idx/4), value);
3650 w_data = 0;
3651
3652 mask = 0xFFFFFFFF;
3653 }
3654
3655 if(w_idx == IDC_MAX_SRAM_SIZE)
3656 w_idx = IDC_SRAM_WRAP_IDX;
3657
3658 if((idc_port.sram_w_index + i) >= IDC_MAX_SRAM_SIZE)
3659 idc_port.sram_table_usage[idc_port.sram_w_index + i - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX] = 1;
3660 else
3661 idc_port.sram_table_usage[idc_port.sram_w_index + i] = 1;
3662 }
3663
3664 // Set event memory position
3665 DRV_WriteReg32(IDC_CTRL_EVENT_MEM_POS(idc_port.event_w_index_lte), (idc_port.sram_w_index << 8) + end_sram_idx);
3666
3667 // Set time stamps para & trigger event
3668 DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_w_index_lte), event.offset + (1 << 31));
3669
3670 // Record event_offset & event_data in the table
3671 idc_port.event_offset_table[idc_port.event_w_index_lte] = event.offset;
3672 kal_mem_cpy(idc_port.event_data_table[idc_port.event_w_index_lte], event.data, sizeof(event.data));
3673 idc_port.event_byte_num[idc_port.event_w_index_lte] = event.num;
3674 idc_port.event_sram_sta_idx[idc_port.event_w_index_lte] = idc_port.sram_w_index;
3675 // Record the number and usage bitmap for the scheduler
3676 idc_port.event_usage_bit_map |= (1 << idc_port.event_w_index_lte);
3677 // Add event_cnt
3678 idc_port.event_cnt++;
3679
3680 // Add sram_w_idx
3681 idc_port.sram_w_index += event.num;
3682 if (idc_port.sram_w_index == IDC_MAX_SRAM_SIZE)
3683 idc_port.sram_w_index = idc_port.sram_w_index - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX;
3684
3685 idc_port.event_w_index_lte++;
3686 if (idc_port.event_w_index_lte > IDC_LTE_MAX_EVENT_IDX)
3687 idc_port.event_w_index_lte = IDC_LTE_STA_EVENT_IDX;
3688
3689
3690 }
3691 else
3692 {
3693 //return drop_cmd
3694 drop_cmd_flag = 1;
3695 }
3696 }
3697
3698 else if(event_type == EVENT_NR){
3699 if((idc_port.event_usage_bit_map & (0xFFFE0000)) != 0xFFFE0000) // LTE events not full
3700 {
3701 event_idx_wrap = 0;
3702 // Find empty event
3703 while (1)
3704 {
3705 if ((1 << idc_port.event_w_index_nr) & idc_port.event_usage_bit_map)
3706 {
3707 idc_port.event_w_index_nr++;
3708 if (idc_port.event_w_index_nr > IDC_NR_MAX_EVENT_IDX)
3709 {
3710 if(event_idx_wrap == 1)
3711 {
3712 //if EVENT_IDX_NOT_FOUND, return drop_cmd
3713 //********protect critical section*******
3714 after_poll_time_SCH = ust_get_current_time();
3715 kal_hrt_give_itc_lock(KAL_ITC_IDC);
3716
3717 MD_TRC(IDC_SCHEDULE_EVENT_IDX_NOT_FOUND_MSG);
3718 drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
3719
3720 return KAL_FALSE;
3721
3722 }
3723 idc_port.event_w_index_nr = IDC_NR_STA_EVENT_IDX;
3724 event_idx_wrap = 1;
3725 }
3726 }
3727 else
3728 {
3729 break;
3730 }
3731 }
3732
3733 // Find empty sram_w_idx
3734 sram_wrap = 0;
3735 while (1)
3736 {
3737 if (idc_port.sram_table_usage[idc_port.sram_w_index] == 1)
3738 {
3739 idc_port.sram_w_index++;
3740 if (idc_port.sram_w_index == IDC_MAX_SRAM_SIZE)
3741 {
3742 if(sram_wrap == 1)
3743 {
3744 //if sram full, return drop_cmd
3745 //idc_ctrl_leave(IDC_CTRL_LOCK);
3746 //********protect critical section*******
3747 after_poll_time_SCH = ust_get_current_time();
3748 kal_hrt_give_itc_lock(KAL_ITC_IDC);
3749
3750 MD_TRC(IDC_SCHEDULE_SRAM_FULL_MSG);
3751 drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
3752
3753 return KAL_FALSE;
3754 }
3755
3756
3757 idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
3758 sram_wrap = 1;
3759 }
3760 }
3761 else
3762 {
3763 break;
3764 }
3765 }
3766
3767 //If there no sequential sram space enough, return drop_cmd
3768 for(i = 0; i < event.num; i++)
3769 {
3770 tmp_sram_idx = idc_port.sram_w_index + i;
3771 //wrap case
3772 if(tmp_sram_idx >= IDC_MAX_SRAM_SIZE)
3773 {
3774 idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
3775 tmp_sram_idx = IDC_SRAM_WRAP_IDX;
3776 }
3777 //DT_IDC_PRINTF("*** sram_idx : %d***", tmp_sram_idx);
3778 if(idc_port.sram_table_usage[tmp_sram_idx] == 1)
3779 {
3780 //return drop_cmd
3781 //idc_ctrl_leave(IDC_CTRL_LOCK);
3782 //********protect critical section*******
3783 after_poll_time_SCH = ust_get_current_time();
3784 kal_hrt_give_itc_lock(KAL_ITC_IDC);
3785
3786 MD_TRC(IDC_SCHEDULE_NO_SEQUENTIAL_SRAM_MSG);
3787 drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
3788
3789 return KAL_FALSE;
3790 }
3791
3792 }
3793
3794 end_sram_idx = idc_port.sram_w_index + event.num - 1;
3795 if(end_sram_idx >= IDC_MAX_SRAM_SIZE)
3796 end_sram_idx = end_sram_idx - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX;
3797
3798 // Set event data
3799 w_idx = idc_port.sram_w_index;
3800
3801 for(i = 0; i < event.num; i++)
3802 {
3803 w_data = w_data | (event.data[i] << (8 * (w_idx % 4)));
3804 mask &= ~(0xFF << (8 * (w_idx % 4)));
3805 w_idx++;
3806 if((w_idx % 4 == 0) || (i == event.num - 1))
3807 {
3808 if(w_idx % 4 == 0)
3809 value = DRV_Reg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1));
3810 else
3811 value = DRV_Reg32(IDC_CTRL_EVT_DATA(w_idx/4));
3812 value &= mask;
3813 value |= w_data;
3814 if(w_idx % 4 == 0)
3815 DRV_WriteReg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1), value);
3816 else
3817 DRV_WriteReg32(IDC_CTRL_EVT_DATA(w_idx/4), value);
3818 w_data = 0;
3819
3820 mask = 0xFFFFFFFF;
3821 }
3822
3823 if(w_idx == IDC_MAX_SRAM_SIZE)
3824 w_idx = IDC_SRAM_WRAP_IDX;
3825
3826 if((idc_port.sram_w_index + i) >= IDC_MAX_SRAM_SIZE)
3827 idc_port.sram_table_usage[idc_port.sram_w_index + i - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX] = 1;
3828 else
3829 idc_port.sram_table_usage[idc_port.sram_w_index + i] = 1;
3830 }
3831
3832 // Set event memory position
3833 DRV_WriteReg32(IDC_CTRL_EVENT_MEM_POS(idc_port.event_w_index_nr), (idc_port.sram_w_index << 8) + end_sram_idx);
3834
3835 // Set time stamps para & trigger event
3836 DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_w_index_nr), event.offset + (1 << 31));
3837
3838 // Record event_offset & event_data in the table
3839 idc_port.event_offset_table[idc_port.event_w_index_nr] = event.offset;
3840 kal_mem_cpy(idc_port.event_data_table[idc_port.event_w_index_nr], event.data, sizeof(event.data));
3841 idc_port.event_byte_num[idc_port.event_w_index_nr] = event.num;
3842 idc_port.event_sram_sta_idx[idc_port.event_w_index_nr] = idc_port.sram_w_index;
3843 // Record the number and usage bitmap for the scheduler
3844 idc_port.event_usage_bit_map |= (1 << idc_port.event_w_index_nr);
3845 // Add event_cnt
3846 idc_port.event_cnt++;
3847
3848 // Add sram_w_idx
3849 idc_port.sram_w_index += event.num;
3850 if (idc_port.sram_w_index == IDC_MAX_SRAM_SIZE)
3851 idc_port.sram_w_index = idc_port.sram_w_index - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX;
3852
3853 idc_port.event_w_index_nr++;
3854 if (idc_port.event_w_index_nr > IDC_NR_MAX_EVENT_IDX)
3855 idc_port.event_w_index_nr = IDC_NR_STA_EVENT_IDX;
3856
3857
3858 }
3859 else
3860 {
3861 //return drop_cmd
3862 drop_cmd_flag = 1;
3863 }
3864
3865 }
3866 else if(event_type == EVENT_COMMON){
3867
3868 idc_port.event_w_index_com = IDC_COMMON_STA_EVENT_IDX;
3869 if((idc_port.event_usage_bit_map & (0x00010000)) != 0x00010000) // LTE events not full
3870 {
3871 // Find empty sram_w_idx
3872 sram_wrap = 0;
3873 while (1)
3874 {
3875 if (idc_port.sram_table_usage[idc_port.sram_w_index] == 1)
3876 {
3877 idc_port.sram_w_index++;
3878 if (idc_port.sram_w_index == IDC_MAX_SRAM_SIZE)
3879 {
3880 if(sram_wrap == 1)
3881 {
3882 //if sram full, return drop_cmd
3883 //idc_ctrl_leave(IDC_CTRL_LOCK);
3884 //********protect critical section*******
3885 after_poll_time_SCH = ust_get_current_time();
3886 kal_hrt_give_itc_lock(KAL_ITC_IDC);
3887
3888 MD_TRC(IDC_SCHEDULE_SRAM_FULL_MSG);
3889 drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
3890
3891 return KAL_FALSE;
3892 }
3893
3894
3895 idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
3896 sram_wrap = 1;
3897 }
3898 }
3899 else
3900 {
3901 break;
3902 }
3903 }
3904
3905 //If there no sequential sram space enough, return drop_cmd
3906 for(i = 0; i < event.num; i++)
3907 {
3908 tmp_sram_idx = idc_port.sram_w_index + i;
3909 //wrap case
3910 if(tmp_sram_idx >= IDC_MAX_SRAM_SIZE)
3911 {
3912 idc_port.sram_w_index = IDC_SRAM_WRAP_IDX;
3913 tmp_sram_idx = IDC_SRAM_WRAP_IDX;
3914 }
3915 //DT_IDC_PRINTF("*** sram_idx : %d***", tmp_sram_idx);
3916 if(idc_port.sram_table_usage[tmp_sram_idx] == 1)
3917 {
3918 //return drop_cmd
3919 //idc_ctrl_leave(IDC_CTRL_LOCK);
3920 //********protect critical section*******
3921 after_poll_time_SCH = ust_get_current_time();
3922 kal_hrt_give_itc_lock(KAL_ITC_IDC);
3923
3924 MD_TRC(IDC_SCHEDULE_NO_SEQUENTIAL_SRAM_MSG);
3925 drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
3926
3927 return KAL_FALSE;
3928 }
3929
3930 }
3931
3932 end_sram_idx = idc_port.sram_w_index + event.num - 1;
3933 if(end_sram_idx >= IDC_MAX_SRAM_SIZE)
3934 end_sram_idx = end_sram_idx - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX;
3935
3936 // Set event data
3937 w_idx = idc_port.sram_w_index;
3938
3939 for(i = 0; i < event.num; i++)
3940 {
3941 w_data = w_data | (event.data[i] << (8 * (w_idx % 4)));
3942 mask &= ~(0xFF << (8 * (w_idx % 4)));
3943 w_idx++;
3944 if((w_idx % 4 == 0) || (i == event.num - 1))
3945 {
3946 if(w_idx % 4 == 0)
3947 value = DRV_Reg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1));
3948 else
3949 value = DRV_Reg32(IDC_CTRL_EVT_DATA(w_idx/4));
3950 value &= mask;
3951 value |= w_data;
3952 if(w_idx % 4 == 0)
3953 DRV_WriteReg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1), value);
3954 else
3955 DRV_WriteReg32(IDC_CTRL_EVT_DATA(w_idx/4), value);
3956 w_data = 0;
3957
3958 mask = 0xFFFFFFFF;
3959 }
3960
3961 if(w_idx == IDC_MAX_SRAM_SIZE)
3962 w_idx = IDC_SRAM_WRAP_IDX;
3963
3964 if((idc_port.sram_w_index + i) >= IDC_MAX_SRAM_SIZE)
3965 idc_port.sram_table_usage[idc_port.sram_w_index + i - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX] = 1;
3966 else
3967 idc_port.sram_table_usage[idc_port.sram_w_index + i] = 1;
3968 }
3969
3970 // Set event memory position
3971 DRV_WriteReg32(IDC_CTRL_EVENT_MEM_POS(idc_port.event_w_index_com), (idc_port.sram_w_index << 8) + end_sram_idx);
3972
3973 // Set time stamps para & trigger event
3974 DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(idc_port.event_w_index_com), event.offset + (1 << 31));
3975
3976 // Record event_offset & event_data in the table
3977 idc_port.event_offset_table[idc_port.event_w_index_com] = event.offset;
3978 kal_mem_cpy(idc_port.event_data_table[idc_port.event_w_index_com], event.data, sizeof(event.data));
3979 idc_port.event_byte_num[idc_port.event_w_index_com] = event.num;
3980 idc_port.event_sram_sta_idx[idc_port.event_w_index_com] = idc_port.sram_w_index;
3981 // Record the number and usage bitmap for the scheduler
3982 idc_port.event_usage_bit_map |= (1 << idc_port.event_w_index_com);
3983 // Add event_cnt
3984 idc_port.event_cnt++;
3985
3986 // Add sram_w_idx
3987 idc_port.sram_w_index += event.num;
3988 if (idc_port.sram_w_index == IDC_MAX_SRAM_SIZE)
3989 idc_port.sram_w_index = idc_port.sram_w_index - IDC_MAX_SRAM_SIZE + IDC_SRAM_WRAP_IDX;
3990
3991 }
3992 else
3993 {
3994 //return drop_cmd
3995 drop_cmd_flag = 1;
3996 }
3997 }
3998 else{
3999 // Error : Not legal event_type
4000 MD_TRC(IDC_EVNET_TYPE_ERR_MSG, event_type);
4001 }
4002
4003 //idc_ctrl_leave(IDC_CTRL_LOCK);
4004
4005 //********protect critical section*******
4006 MM_Sync();
4007 after_poll_time_SCH = ust_get_current_time();
4008 kal_hrt_give_itc_lock(KAL_ITC_IDC);
4009
4010 // Log TX information
4011 MD_TRC(IDC_SCHEDULE_LTE_NR_MSG, event.offset, event.data[0], event.data[1], event_type);
4012
4013 if(drop_cmd_flag == 1)
4014 {
4015 MD_TRC(IDC_SCHEDULE_EVENT_FULL_MSG);
4016 drv_idc_return_drop_cmd_lte_nr(event, drop_cmd, event_type);
4017 return KAL_FALSE;
4018 }
4019
4020 return KAL_TRUE;
4021}
4022void drv_idc_return_drop_cmd_lte_nr(IDC_EVENT_T event, IDC_CTRL_DROP_CMD_T *drop_cmd, kal_uint8 event_type)
4023{
4024
4025 drop_cmd->cmd_phytime = event.offset;
4026 drop_cmd->cmd_type = (event.data[0] & 0x1E) >> 1;
4027 if(drop_cmd->cmd_type == 0xF)
4028 drop_cmd->cmd_sub_type = (event.data[1] & 0xFC) >> 2;
4029
4030 drop_cmd->event_type = event_type;
4031
4032
4033 //DT_IDC_PRINTF("drop cmd happen!!!!!!!!!!!!\n\r");
4034 //Log schedule fail info.
4035 MD_TRC(IDC_SCHEDULE_FAIL_LTE_NR_MSG, drop_cmd->cmd_phytime, drop_cmd->cmd_type, drop_cmd->cmd_sub_type, drop_cmd->event_type);
4036
4037 return;
4038}
4039
4040void idc_auto_tx_lisr(kal_uint32 vector)
4041{
4042#if !defined(MT6297)
4043 IRQMask(MD_IRQID_IDC_UART_TX_FORCE_ON);
4044
4045#if !defined(__MAUI_BASIC__)
4046 kal_uint8 auto_tx_status = 0, tx_susp_int = 0, reset_int = 0;
4047
4048 auto_tx_status = DRV_Reg32(IDC_HW_TX_FORCE_ON);
4049
4050 tx_susp_int = (auto_tx_status >> 2) & 0x3;
4051 reset_int = (auto_tx_status >> 6) & 0x3;
4052
4053 MD_TRC(IDC_AUTO_TX_LISR_MSG, tx_susp_int, reset_int);
4054#endif
4055 DRV_WriteReg8(IDC_HW_IDC_FORCFE_ON_CLR, 0xF);
4056 IRQUnmask(MD_IRQID_IDC_UART_TX_FORCE_ON);
4057#endif
4058 return;
4059}
4060void drv_idc_auto_tx_config(kal_uint8 tx_susp_quota, kal_uint8 reset_quota)
4061{
4062 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
4063 if(AUTO_TX_CON_INIT_FLAG == KAL_TRUE){
4064 kal_hrt_give_itc_lock(KAL_ITC_IDC);
4065 MD_TRC(IDC_TAKE_FLAG_FAIL_MSG, __FUNCTION__);
4066 return;
4067 }
4068 else{
4069 AUTO_TX_CON_INIT_FLAG = KAL_TRUE;
4070 MM_Sync();
4071 kal_hrt_give_itc_lock(KAL_ITC_IDC);
4072 }
4073
4074 DRV_WriteReg8(IDC_TX_SUSP_QUOTA_CFG, tx_susp_quota);
4075 DRV_WriteReg8(IDC_CAL_WINDOW_CFG, reset_quota);
4076 MO_Sync();
4077
4078 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
4079 AUTO_TX_CON_INIT_FLAG = KAL_FALSE;
4080 MM_Sync();
4081 kal_hrt_give_itc_lock(KAL_ITC_IDC);
4082
4083 MD_TRC(IDC_AUTO_TX_CONFIG_MSG, tx_susp_quota, reset_quota);
4084
4085 return;
4086}
4087void drv_idc_auto_tx_en(kal_uint8 auto_tx_en)
4088{
4089 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
4090 if(AUTO_TX_EN_INIT_FLAG == KAL_TRUE){
4091 kal_hrt_give_itc_lock(KAL_ITC_IDC);
4092 MD_TRC(IDC_TAKE_FLAG_FAIL_MSG, __FUNCTION__);
4093 return;
4094 }
4095 else{
4096 AUTO_TX_EN_INIT_FLAG = KAL_TRUE;
4097 MM_Sync();
4098 kal_hrt_give_itc_lock(KAL_ITC_IDC);
4099 }
4100
4101 if(auto_tx_en == KAL_TRUE){
4102 DRV_WriteReg8(IDC_HW_TX_FORCFE_ON_MASK, 0x0);
4103 DRV_WriteReg8(IDC_TX_AUTO_DIS, 0x0);
4104 }
4105 else{
4106 DRV_WriteReg8(IDC_TX_AUTO_DIS, 0x1);
4107 DRV_WriteReg8(IDC_HW_TX_FORCFE_ON_MASK, 0x3);
4108 }
4109
4110 MO_Sync();
4111
4112 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
4113 AUTO_TX_EN_INIT_FLAG = KAL_FALSE;
4114 MM_Sync();
4115 kal_hrt_give_itc_lock(KAL_ITC_IDC);
4116
4117 MD_TRC(IDC_AUTO_TX_EN_MSG, auto_tx_en);
4118
4119 return;
4120}
4121void drv_idc_auto_tx_dis(void)
4122{
4123 DRV_WriteReg8(IDC_TX_AUTO_DIS, 0x1);
4124 DRV_WriteReg8(IDC_HW_TX_FORCFE_ON_MASK, 0x3);
4125 MO_Sync();
4126
4127 return;
4128}
4129
4130void drv_idc_set_enable_rat(kal_uint8 rat_status)
4131{
4132 kal_bool err_flag = KAL_FALSE;
4133
4134 //update FLAG...
4135 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
4136
4137 if(rat_status == IDC_RAT_LTE)
4138 LTE_FLAG = KAL_TRUE;
4139 else if (rat_status == IDC_RAT_NR)
4140 NR_FLAG = KAL_TRUE;
4141 else
4142 err_flag = KAL_TRUE;
4143
4144 MM_Sync();
4145
4146 kal_hrt_give_itc_lock(KAL_ITC_IDC);
4147
4148 if(err_flag == KAL_TRUE)
4149 MD_TRC(IDC_SET_RAT_ERR_MSG, rat_status);
4150 else
4151 MD_TRC(IDC_SET_RAT_MSG, rat_status);
4152 return;
4153}
4154void drv_idc_set_disable_rat(kal_uint8 rat_status)
4155{
4156 kal_bool err_flag = KAL_FALSE, clr_flag = KAL_FALSE;
4157
4158 //update FLAG...
4159 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
4160
4161 if(rat_status == IDC_RAT_LTE)
4162 LTE_FLAG = KAL_FALSE;
4163 else if (rat_status == IDC_RAT_NR)
4164 NR_FLAG = KAL_FALSE;
4165 else
4166 err_flag = KAL_TRUE;
4167
4168 if((LTE_FLAG == KAL_FALSE) && (NR_FLAG == KAL_FALSE)){
4169 //INIT_FlAG_CLR
4170 IDC_INIT_FLAG = KAL_FALSE;
4171 ACTIVATE_FLAG = KAL_FALSE;
4172 PM_INIT_FLAG = KAL_FALSE;
4173 AUTO_TX_CON_INIT_FLAG = KAL_FALSE;
4174 AUTO_TX_EN_INIT_FLAG = KAL_FALSE;
4175 clr_flag = KAL_TRUE;
4176 }
4177
4178 MM_Sync();
4179
4180 kal_hrt_give_itc_lock(KAL_ITC_IDC);
4181
4182 if(err_flag == KAL_TRUE)
4183 MD_TRC(IDC_SET_RAT_ERR_MSG, rat_status);
4184 else
4185 MD_TRC(IDC_DIS_RAT_MSG, rat_status);
4186
4187 if(clr_flag == KAL_TRUE)
4188 MD_TRC(IDC_CLR_IDC_INIT_FLAG_MSG,__FUNCTION__);
4189
4190 return;
4191}
4192void drv_idc_wakeup_notify(kal_uint8 rat_status)
4193{
4194 kal_bool err_flag = KAL_FALSE;
4195
4196 //update FLAG...
4197 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
4198
4199 if(rat_status == IDC_RAT_LTE)
4200 LTE_FLAG = KAL_TRUE;
4201 else if (rat_status == IDC_RAT_NR)
4202 NR_FLAG = KAL_TRUE;
4203 else
4204 err_flag = KAL_TRUE;
4205
4206 MM_Sync();
4207
4208 kal_hrt_give_itc_lock(KAL_ITC_IDC);
4209
4210 if(err_flag == KAL_TRUE)
4211 MD_TRC(IDC_SET_RAT_ERR_MSG, rat_status);
4212 else
4213 MD_TRC(IDC_SET_RAT_WAKEUP_MSG, rat_status);
4214
4215 return;
4216}
4217void drv_idc_sleep_notify(kal_uint8 rat_status)
4218{
4219 kal_bool err_flag = KAL_FALSE, clr_flag = KAL_FALSE;
4220
4221 /***itc protect region***/
4222 //update LTE/NR FLAG...
4223 kal_hrt_take_itc_lock(KAL_ITC_IDC, KAL_INFINITE_WAIT);
4224 before_poll_time_SLP_NTY = ust_get_current_time();
4225
4226 if(rat_status == IDC_RAT_LTE){
4227 LTE_FLAG = KAL_FALSE;
4228
4229 if((LTE_FLAG == KAL_FALSE) && (NR_FLAG == KAL_FALSE)){
4230 //stop all events
4231 drv_idc_stop_event_97(0xFFFFFFFE);
4232 //disable new PM
4233 DRV_WriteReg32(IDC_REMAPPING_EN, 0);
4234 //disable Auto-denial Tx
4235 drv_idc_auto_tx_dis();
4236 //turn off rx int.
4237 _idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
4238 _idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR);
4239
4240 IRQMask(MD_IRQID_IDC_UART_IRQ);
4241 // Turn off RX INT, turn on TX INT
4242 DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ETBEI);
4243 IRQUnmask(MD_IRQID_IDC_UART_IRQ);
4244
4245 //modify idc_port main state
4246 idc_port.main_state = IDC_IN_SLEEP;
4247
4248 //INIT_FlAG_CLR
4249 IDC_INIT_FLAG = KAL_FALSE;
4250 ACTIVATE_FLAG = KAL_FALSE;
4251 PM_INIT_FLAG = KAL_FALSE;
4252 AUTO_TX_CON_INIT_FLAG = KAL_FALSE;
4253 AUTO_TX_EN_INIT_FLAG = KAL_FALSE;
4254 clr_flag = KAL_TRUE;
4255
4256 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR);
4257 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
4258
4259 }
4260 else{
4261 //stop LTE events
4262 drv_idc_stop_event_97(0x0000FFFE);
4263
4264 }
4265 }
4266 else if (rat_status == IDC_RAT_NR){
4267 NR_FLAG = KAL_FALSE;
4268
4269 if((LTE_FLAG == KAL_FALSE) && (NR_FLAG == KAL_FALSE)){
4270 //stop all events
4271 drv_idc_stop_event_97(0xFFFFFFFE);
4272 //disable new PM
4273 DRV_WriteReg32(IDC_REMAPPING_EN, 0);
4274 //disable Auto-denial Tx
4275 drv_idc_auto_tx_dis();
4276 //turn off rx int.
4277 _idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
4278 _idc_atomic_lock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR);
4279
4280 IRQMask(MD_IRQID_IDC_UART_IRQ);
4281 // Turn off RX INT, turn on TX INT
4282 DRV_WriteReg32_NPW(IDC_UART_IER, IDC_UART_IER_ETBEI);
4283 IRQUnmask(MD_IRQID_IDC_UART_IRQ);
4284
4285 //modify idc_port main state
4286 idc_port.main_state = IDC_IN_SLEEP;
4287
4288 //INIT_FlAG_CLR
4289 IDC_INIT_FLAG = KAL_FALSE;
4290 ACTIVATE_FLAG = KAL_FALSE;
4291 PM_INIT_FLAG = KAL_FALSE;
4292 AUTO_TX_CON_INIT_FLAG = KAL_FALSE;
4293 AUTO_TX_EN_INIT_FLAG = KAL_FALSE;
4294 clr_flag = KAL_TRUE;
4295
4296 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_HISR);
4297 _idc_atomic_unlock(&idc_drv_atom_lock,IDC_ATLOCK_PWR_LISR);
4298
4299 }
4300 else{
4301 drv_idc_stop_event_97(0xFFFE0000);
4302 }
4303 }
4304 else
4305 err_flag = KAL_TRUE;
4306
4307 MM_Sync();
4308 after_poll_time_SLP_NTY = ust_get_current_time();
4309 kal_hrt_give_itc_lock(KAL_ITC_IDC);
4310 /***itc protect region***/
4311 if(err_flag == KAL_TRUE){
4312 MD_TRC(IDC_SET_RAT_ERR_MSG, rat_status);
4313 return;
4314 }
4315 else
4316 MD_TRC(IDC_SET_RAT_SLEEP_NOTIFY_MSG, rat_status);
4317 if(clr_flag == KAL_TRUE){
4318 MD_TRC(IDC_CLR_IDC_INIT_FLAG_MSG,__FUNCTION__);
4319 }
4320
4321 return;
4322}
4323void drv_idc_set_sram_wrap_idx(kal_uint32 start_idx)
4324{
4325
4326 DRV_WriteReg32_NPW(IDC_CTRL_WRAP_REG, start_idx);
4327 return;
4328}
4329void idc_set_immediate_event(kal_uint32 event_idx, kal_uint8* buf, kal_uint32 byte_num, kal_uint32 start_sram_idx, kal_uint32 end_sram_idx)
4330{
4331
4332 kal_uint32 i = 0, w_data = 0, w_idx = 0, mask = 0xFFFFFFFF, value = 0;
4333
4334 w_idx = start_sram_idx;
4335 for(i = 0; i < byte_num; i++)
4336 {
4337 w_data = w_data | ((*(buf + i)) << (8 * (w_idx % 4)));
4338 mask &= ~(0xFF << (8 * (w_idx % 4)));
4339
4340 w_idx++;
4341
4342 if((w_idx % 4 == 0) || (i == byte_num - 1))
4343 {
4344 if(w_idx % 4 == 0)
4345 value = DRV_Reg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1));
4346 else
4347 value = DRV_Reg32(IDC_CTRL_EVT_DATA(w_idx/4));
4348 value &= mask;
4349 value |= w_data;
4350 if(w_idx % 4 == 0)
4351 DRV_WriteReg32(IDC_CTRL_EVT_DATA((w_idx/4) - 1), value);
4352 else
4353 DRV_WriteReg32(IDC_CTRL_EVT_DATA(w_idx/4), value);
4354 w_data = 0;
4355
4356 mask = 0xFFFFFFFF;
4357 }
4358
4359 if(w_idx > IDC_MAX_NUM_BYTE)
4360 ASSERT(0);
4361
4362 }
4363 // Set event memory position
4364 DRV_WriteReg32(IDC_CTRL_EVENT_MEM_POS(event_idx), (start_sram_idx << 8) + end_sram_idx);
4365 //set immediate type & start
4366 DRV_WriteReg32(IDC_CTRL_EVENT_SETETING(event_idx), (0x3 << 30));
4367
4368 return;
4369}
4370void drv_idc_gps_b13_b14_set(kal_uint8 rat_status, kal_uint16 raw_data)
4371{
4372 kal_uint16 tmp = 0;
4373
4374 if(rat_status == IDC_RAT_LTE){
4375 tmp = (DRV_Reg32(GPS_B13_B14_REG) & GPS_LTE_MASK);
4376 tmp |= (raw_data & GPS_LTE_OFS);
4377 DRV_WriteReg32(GPS_B13_B14_REG, tmp);
4378 MO_Sync();
4379 }
4380 else if(rat_status == IDC_RAT_NR){
4381 tmp = (DRV_Reg32(GPS_B13_B14_REG) & GPS_NR_MASK);
4382 tmp |= (raw_data & GPS_NR_OFS);
4383 DRV_WriteReg32(GPS_B13_B14_REG, tmp);
4384 MO_Sync();
4385 }
4386 else{
4387 //set gps fail, wrong rat_sts
4388 MD_TRC(IDC_GPS_B13_B14_SET_MSG, KAL_FALSE, rat_status, raw_data);
4389 return;
4390 }
4391
4392 //set gps success
4393 MD_TRC(IDC_GPS_B13_B14_SET_MSG, KAL_TRUE, rat_status, raw_data);
4394 return;
4395}
4396#if defined(__MD93__)
4397int drv_idc_register_pm_callback(kal_uint8 pm_idx, IDC_DRV_TO_EL1_CALLBACK func_ptr , kal_bool private_data)
4398{
4399 ASSERT(pm_idx < IDC_PM_NUM) ;
4400
4401#ifdef ATEST_DRV_ENABLE
4402 DT_IDC_PRINTF("Register PM Callback, pm_idx = %d\n\r", pm_idx);
4403#endif
4404 idc_port.pm_cb_handle[pm_idx].callback_func = func_ptr;
4405 idc_port.pm_cb_handle[pm_idx].private_data = private_data;
4406 MM_Sync();
4407 return KAL_SUCCESS ;
4408}
4409
4410#elif defined(__MD95__) || defined(__MD97__) || defined(__MD97P__)
4411int drv_idc_register_pm_callback_95(kal_uint8 pm_idx, IDC_DRV_TO_EL1_CALLBACK func_ptr , void *private_data)
4412{
4413 ASSERT(pm_idx < IDC_PM_NUM) ;
4414
4415#ifdef ATEST_DRV_ENABLE
4416 DT_IDC_PRINTF("Register PM Callback, pm_idx = %d\n\r", pm_idx);
4417#endif
4418 idc_port.pm_cb_handle[pm_idx].callback_func = func_ptr;
4419 idc_port.pm_cb_handle[pm_idx].private_data = private_data;
4420 MM_Sync();
4421 return KAL_SUCCESS ;
4422}
4423#endif
4424
4425int drv_idc_unregister_pm_callback(kal_uint8 pm_idx)
4426{
4427 ASSERT(pm_idx < IDC_PM_NUM) ;
4428
4429#ifdef ATEST_DRV_ENABLE
4430 DT_IDC_PRINTF("Unregister PM Callback, pm_idx = %d\n\r", pm_idx);
4431#endif
4432 idc_port.pm_cb_handle[pm_idx].callback_func = NULL;
4433 idc_port.pm_cb_handle[pm_idx].private_data= 0;
4434 MM_Sync();
4435 return KAL_SUCCESS ;
4436}