b.liu | e958203 | 2025-04-17 19:18:16 +0800 | [diff] [blame^] | 1 | #include <asm/asm.h> |
| 2 | #include <asm/regdef.h> |
| 3 | |
| 4 | #define KSEG0 0x80000000 |
| 5 | |
| 6 | #define C0_CONFIG $16 |
| 7 | #define C0_TAGLO $28 |
| 8 | #define C0_TAGHI $29 |
| 9 | |
| 10 | #define CONF1_DA_SHIFT 7 /* D$ associativity */ |
| 11 | #define CONF1_DA_MASK 0x00000380 |
| 12 | #define CONF1_DA_BASE 1 |
| 13 | #define CONF1_DL_SHIFT 10 /* D$ line size */ |
| 14 | #define CONF1_DL_MASK 0x00001c00 |
| 15 | #define CONF1_DL_BASE 2 |
| 16 | #define CONF1_DS_SHIFT 13 /* D$ sets/way */ |
| 17 | #define CONF1_DS_MASK 0x0000e000 |
| 18 | #define CONF1_DS_BASE 64 |
| 19 | #define CONF1_IA_SHIFT 16 /* I$ associativity */ |
| 20 | #define CONF1_IA_MASK 0x00070000 |
| 21 | #define CONF1_IA_BASE 1 |
| 22 | #define CONF1_IL_SHIFT 19 /* I$ line size */ |
| 23 | #define CONF1_IL_MASK 0x00380000 |
| 24 | #define CONF1_IL_BASE 2 |
| 25 | #define CONF1_IS_SHIFT 22 /* Instruction cache sets/way */ |
| 26 | #define CONF1_IS_MASK 0x01c00000 |
| 27 | #define CONF1_IS_BASE 64 |
| 28 | |
| 29 | #define Index_Invalidate_I 0x00 |
| 30 | #define Index_Writeback_Inv_D 0x01 |
| 31 | |
| 32 | LEAF(_start) |
| 33 | |
| 34 | .set mips32 |
| 35 | .set noreorder |
| 36 | |
| 37 | /* save argument registers */ |
| 38 | move t4, a0 |
| 39 | move t5, a1 |
| 40 | move t6, a2 |
| 41 | move t7, a3 |
| 42 | |
| 43 | /* set up stack */ |
| 44 | li sp, RAMSTART + RAMSIZE - 16 |
| 45 | |
| 46 | #ifdef IMAGE_COPY |
| 47 | /* Copy decompressor code to the right place */ |
| 48 | li t2, LOADADDR |
| 49 | add a0, t2, 0 |
| 50 | la a1, code_start |
| 51 | la a2, code_stop |
| 52 | $L1: |
| 53 | lw t0, 0(a1) |
| 54 | sw t0, 0(a0) |
| 55 | add a1, 4 |
| 56 | add a0, 4 |
| 57 | blt a1, a2, $L1 |
| 58 | nop |
| 59 | #endif |
| 60 | |
| 61 | /* At this point we need to invalidate dcache and */ |
| 62 | /* icache before jumping to new code */ |
| 63 | |
| 64 | 1: /* Get cache sizes */ |
| 65 | mfc0 s0,C0_CONFIG,1 |
| 66 | |
| 67 | li s1,CONF1_DL_MASK |
| 68 | and s1,s0 |
| 69 | beq s1,zero,nodc |
| 70 | nop |
| 71 | |
| 72 | srl s1,CONF1_DL_SHIFT |
| 73 | li t0,CONF1_DL_BASE |
| 74 | sll s1,t0,s1 /* s1 has D$ cache line size */ |
| 75 | |
| 76 | li s2,CONF1_DA_MASK |
| 77 | and s2,s0 |
| 78 | srl s2,CONF1_DA_SHIFT |
| 79 | addiu s2,CONF1_DA_BASE /* s2 now has D$ associativity */ |
| 80 | |
| 81 | li t0,CONF1_DS_MASK |
| 82 | and t0,s0 |
| 83 | srl t0,CONF1_DS_SHIFT |
| 84 | li s3,CONF1_DS_BASE |
| 85 | sll s3,s3,t0 /* s3 has D$ sets per way */ |
| 86 | |
| 87 | multu s2,s3 /* sets/way * associativity */ |
| 88 | mflo t0 /* total cache lines */ |
| 89 | |
| 90 | multu s1,t0 /* D$ linesize * lines */ |
| 91 | mflo s2 /* s2 is now D$ size in bytes */ |
| 92 | |
| 93 | /* Initilize the D$: */ |
| 94 | mtc0 zero,C0_TAGLO |
| 95 | mtc0 zero,C0_TAGHI |
| 96 | |
| 97 | li t0,KSEG0 /* Just an address for the first $ line */ |
| 98 | addu t1,t0,s2 /* + size of cache == end */ |
| 99 | |
| 100 | 1: cache Index_Writeback_Inv_D,0(t0) |
| 101 | bne t0,t1,1b |
| 102 | addu t0,s1 |
| 103 | |
| 104 | nodc: |
| 105 | /* Now we get to do it all again for the I$ */ |
| 106 | |
| 107 | move s3,zero /* just in case there is no icache */ |
| 108 | move s4,zero |
| 109 | |
| 110 | li t0,CONF1_IL_MASK |
| 111 | and t0,s0 |
| 112 | beq t0,zero,noic |
| 113 | nop |
| 114 | |
| 115 | srl t0,CONF1_IL_SHIFT |
| 116 | li s3,CONF1_IL_BASE |
| 117 | sll s3,t0 /* s3 has I$ cache line size */ |
| 118 | |
| 119 | li t0,CONF1_IA_MASK |
| 120 | and t0,s0 |
| 121 | srl t0,CONF1_IA_SHIFT |
| 122 | addiu s4,t0,CONF1_IA_BASE /* s4 now has I$ associativity */ |
| 123 | |
| 124 | li t0,CONF1_IS_MASK |
| 125 | and t0,s0 |
| 126 | srl t0,CONF1_IS_SHIFT |
| 127 | li s5,CONF1_IS_BASE |
| 128 | sll s5,t0 /* s5 has I$ sets per way */ |
| 129 | |
| 130 | multu s4,s5 /* sets/way * associativity */ |
| 131 | mflo t0 /* s4 is now total cache lines */ |
| 132 | |
| 133 | multu s3,t0 /* I$ linesize * lines */ |
| 134 | mflo s4 /* s4 is cache size in bytes */ |
| 135 | |
| 136 | /* Initilize the I$: */ |
| 137 | mtc0 zero,C0_TAGLO |
| 138 | mtc0 zero,C0_TAGHI |
| 139 | |
| 140 | li t0,KSEG0 /* Just an address for the first $ line */ |
| 141 | addu t1,t0,s4 /* + size of cache == end */ |
| 142 | |
| 143 | 1: cache Index_Invalidate_I,0(t0) |
| 144 | bne t0,t1,1b |
| 145 | addu t0,s3 |
| 146 | noic: |
| 147 | /* jump to main */ |
| 148 | move a0,s4 /* icache size */ |
| 149 | move a1,s3 /* icache line size */ |
| 150 | move a2,s2 /* dcache size */ |
| 151 | #ifdef IMAGE_COPY |
| 152 | jal t2 |
| 153 | #else |
| 154 | jal entry |
| 155 | #endif |
| 156 | move a3,s1 /* dcache line size */ |
| 157 | |
| 158 | .set reorder |
| 159 | END(_start) |
| 160 | |