lh | 9ed821d | 2023-04-07 01:36:19 -0700 | [diff] [blame] | 1 | /*******************************************************************************
|
| 2 | * Copyright (C) 2007, ZTE Corporation.
|
| 3 | *
|
| 4 | * File Name:
|
| 5 | * File Mark:
|
| 6 | * Description:
|
| 7 | * Others:
|
| 8 | * Version: 1.0
|
| 9 | * Author: geanfeng
|
| 10 | * Date: 2013-09-25
|
| 11 | * History 1:
|
| 12 | * Date:
|
| 13 | * Version:
|
| 14 | * Author:
|
| 15 | * Modification:
|
| 16 | * History 2:
|
| 17 | ********************************************************************************/
|
| 18 |
|
| 19 | #ifndef _DRVS_BITOPS_H_
|
| 20 | #define _DRVS_BITOPS_H_
|
| 21 |
|
| 22 | #ifdef __cplusplus
|
| 23 | extern "C"
|
| 24 | {
|
| 25 | #endif
|
| 26 |
|
| 27 | /****************************************************************************
|
| 28 | * Include files
|
| 29 | ****************************************************************************/
|
| 30 |
|
| 31 | /****************************************************************************
|
| 32 | * Macros
|
| 33 | ****************************************************************************/
|
| 34 |
|
| 35 |
|
| 36 | /****************************************************************************
|
| 37 | * Types
|
| 38 | ****************************************************************************/
|
| 39 |
|
| 40 | /****************************************************************************
|
| 41 | * Constants
|
| 42 | ****************************************************************************/
|
| 43 |
|
| 44 | /****************************************************************************
|
| 45 | * Global Variables
|
| 46 | ****************************************************************************/
|
| 47 |
|
| 48 | /****************************************************************************
|
| 49 | * Function Prototypes
|
| 50 | ****************************************************************************/
|
| 51 | #ifndef set_bit
|
| 52 | /*
|
| 53 | * These functions are the basis of our bit ops.
|
| 54 | *
|
| 55 | * First, the atomic bitops. These use native endian.
|
| 56 | */
|
| 57 | static inline void ____atomic_set_bit(unsigned int bitnum, volatile unsigned long *p)
|
| 58 | {
|
| 59 | unsigned long flags;
|
| 60 | unsigned long mask = 1UL << (bitnum & 31);
|
| 61 |
|
| 62 | p += bitnum >> 5;
|
| 63 |
|
| 64 | LOCK_SAVE(flags);
|
| 65 | *p |= mask;
|
| 66 | LOCK_RESTORE(flags);
|
| 67 | }
|
| 68 |
|
| 69 | static inline void ____atomic_clear_bit(unsigned int bitnum, volatile unsigned long *p)
|
| 70 | {
|
| 71 | unsigned long flags;
|
| 72 | unsigned long mask = 1UL << (bitnum & 31);
|
| 73 |
|
| 74 | p += bitnum >> 5;
|
| 75 |
|
| 76 | LOCK_SAVE(flags);
|
| 77 | *p &= ~mask;
|
| 78 | LOCK_RESTORE(flags);
|
| 79 | }
|
| 80 |
|
| 81 | static inline void ____atomic_change_bit(unsigned int bitnum, volatile unsigned long *p)
|
| 82 | {
|
| 83 | unsigned long flags;
|
| 84 | unsigned long mask = 1UL << (bitnum & 31);
|
| 85 |
|
| 86 | p += bitnum >> 5;
|
| 87 |
|
| 88 | LOCK_SAVE(flags);
|
| 89 | *p ^= mask;
|
| 90 | LOCK_RESTORE(flags);
|
| 91 | }
|
| 92 |
|
| 93 | static inline int
|
| 94 | ____atomic_test_and_set_bit(unsigned int bitnum, volatile unsigned long *p)
|
| 95 | {
|
| 96 | unsigned long flags;
|
| 97 | unsigned int res;
|
| 98 | unsigned long mask = 1UL << (bitnum & 31);
|
| 99 |
|
| 100 | p += bitnum >> 5;
|
| 101 |
|
| 102 | LOCK_SAVE(flags);
|
| 103 | res = *p;
|
| 104 | *p = res | mask;
|
| 105 | LOCK_RESTORE(flags);
|
| 106 |
|
| 107 | return (res & mask) != 0;
|
| 108 | }
|
| 109 |
|
| 110 | static inline int
|
| 111 | ____atomic_test_and_clear_bit(unsigned int bitnum, volatile unsigned long *p)
|
| 112 | {
|
| 113 | unsigned long flags;
|
| 114 | unsigned int res;
|
| 115 | unsigned long mask = 1UL << (bitnum & 31);
|
| 116 |
|
| 117 | p += bitnum >> 5;
|
| 118 |
|
| 119 | LOCK_SAVE(flags);
|
| 120 | res = *p;
|
| 121 | *p = res & ~mask;
|
| 122 | LOCK_RESTORE(flags);
|
| 123 |
|
| 124 | return (res & mask) != 0;
|
| 125 | }
|
| 126 |
|
| 127 | static inline int
|
| 128 | ____atomic_test_and_change_bit(unsigned int bitnum, volatile unsigned long *p)
|
| 129 | {
|
| 130 | unsigned long flags;
|
| 131 | unsigned int res;
|
| 132 | unsigned long mask = 1UL << (bitnum & 31);
|
| 133 |
|
| 134 | p += bitnum >> 5;
|
| 135 |
|
| 136 | LOCK_SAVE(flags);
|
| 137 | res = *p;
|
| 138 | *p = res ^ mask;
|
| 139 | LOCK_RESTORE(flags);
|
| 140 |
|
| 141 | return (res & mask) != 0;
|
| 142 | }
|
| 143 |
|
| 144 |
|
| 145 | #define ATOMIC_BITOP(name,nr,p) ____atomic_##name(nr, p)
|
| 146 |
|
| 147 | /*
|
| 148 | * Native endian atomic definitions.
|
| 149 | */
|
| 150 | #define set_bit(nr,p) ATOMIC_BITOP(set_bit,nr,p)
|
| 151 | #define clear_bit(nr,p) ATOMIC_BITOP(clear_bit,nr,p)
|
| 152 | #define change_bit(nr,p) ATOMIC_BITOP(change_bit,nr,p)
|
| 153 | #define test_and_set_bit(nr,p) ATOMIC_BITOP(test_and_set_bit,nr,p)
|
| 154 | #define test_and_clear_bit(nr,p) ATOMIC_BITOP(test_and_clear_bit,nr,p)
|
| 155 | #define test_and_change_bit(nr,p) ATOMIC_BITOP(test_and_change_bit,nr,p)
|
| 156 |
|
| 157 | static inline void ____relax_set_bit(unsigned int bitnum, volatile unsigned long *p)
|
| 158 | {
|
| 159 | unsigned long mask = 1UL << (bitnum & 31);
|
| 160 |
|
| 161 | p += bitnum >> 5;
|
| 162 | *p |= mask;
|
| 163 | }
|
| 164 |
|
| 165 | static inline void ____relax_clear_bit(unsigned int bitnum, volatile unsigned long *p)
|
| 166 | {
|
| 167 | unsigned long mask = 1UL << (bitnum & 31);
|
| 168 |
|
| 169 | p += bitnum >> 5;
|
| 170 | *p &= ~mask;
|
| 171 | }
|
| 172 |
|
| 173 | static inline void ____relax_change_bit(unsigned int bitnum, volatile unsigned long *p)
|
| 174 | {
|
| 175 | unsigned long mask = 1UL << (bitnum & 31);
|
| 176 |
|
| 177 | p += bitnum >> 5;
|
| 178 | *p ^= mask;
|
| 179 | }
|
| 180 |
|
| 181 | static inline int
|
| 182 | ____relax_test_and_set_bit(unsigned int bitnum, volatile unsigned long *p)
|
| 183 | {
|
| 184 | unsigned int res;
|
| 185 | unsigned long mask = 1UL << (bitnum & 31);
|
| 186 |
|
| 187 | p += bitnum >> 5;
|
| 188 |
|
| 189 | res = *p;
|
| 190 | *p = res | mask;
|
| 191 |
|
| 192 | return (res & mask) != 0;
|
| 193 | }
|
| 194 |
|
| 195 | static inline int
|
| 196 | ____relax_test_and_clear_bit(unsigned int bitnum, volatile unsigned long *p)
|
| 197 | {
|
| 198 | unsigned int res;
|
| 199 | unsigned long mask = 1UL << (bitnum & 31);
|
| 200 |
|
| 201 | p += bitnum >> 5;
|
| 202 |
|
| 203 | res = *p;
|
| 204 | *p = res & ~mask;
|
| 205 |
|
| 206 | return (res & mask) != 0;
|
| 207 | }
|
| 208 |
|
| 209 | static inline int
|
| 210 | ____relax_test_and_change_bit(unsigned int bitnum, volatile unsigned long *p)
|
| 211 | {
|
| 212 | unsigned int res;
|
| 213 | unsigned long mask = 1UL << (bitnum & 31);
|
| 214 |
|
| 215 | p += bitnum >> 5;
|
| 216 |
|
| 217 | res = *p;
|
| 218 | *p = res ^ mask;
|
| 219 |
|
| 220 | return (res & mask) != 0;
|
| 221 | }
|
| 222 |
|
| 223 | #define RELAX_BITOP(name,nr,p) ____relax_##name(nr, p)
|
| 224 |
|
| 225 | /*
|
| 226 | * Native endian relax definitions.
|
| 227 | */
|
| 228 | #define __set_bit(nr,p) RELAX_BITOP(set_bit,nr,p)
|
| 229 | #define __clear_bit(nr,p) RELAX_BITOP(clear_bit,nr,p)
|
| 230 | #define __change_bit(nr,p) RELAX_BITOP(change_bit,nr,p)
|
| 231 | #define __test_and_set_bit(nr,p) RELAX_BITOP(test_and_set_bit,nr,p)
|
| 232 | #define __test_and_clear_bit(nr,p) RELAX_BITOP(test_and_clear_bit,nr,p)
|
| 233 | #define __test_and_change_bit(nr,p) RELAX_BITOP(test_and_change_bit,nr,p)
|
| 234 |
|
| 235 |
|
| 236 | /*
|
| 237 | * This routine doesn't need to be atomic.
|
| 238 | */
|
| 239 | static inline int test_bit(int nr, const void * addr)
|
| 240 | {
|
| 241 | return ((unsigned char *) addr)[nr >> 3] & (1U << (nr & 7));
|
| 242 | }
|
| 243 |
|
| 244 | #endif
|
| 245 |
|
| 246 | static inline void ____atomic_set_bits_u32(unsigned int start, unsigned int nr,
|
| 247 | unsigned int val, volatile unsigned long *p)
|
| 248 | {
|
| 249 | unsigned long flags;
|
| 250 | unsigned long start_u32;
|
| 251 | unsigned long nr_u32;
|
| 252 | unsigned long val_u32;
|
| 253 | unsigned long mask;
|
| 254 | unsigned long tmp;
|
| 255 |
|
| 256 | start_u32 = start & 31;
|
| 257 | nr_u32 = nr & 31;
|
| 258 | mask = ~(((1<<nr_u32) - 1) << start_u32);
|
| 259 | val_u32 = (val & ((1<<nr_u32) - 1)) << start_u32;
|
| 260 |
|
| 261 | LOCK_SAVE(flags);
|
| 262 | tmp = *p;
|
| 263 | tmp &= mask;
|
| 264 | tmp |= val_u32;
|
| 265 | *p = tmp;
|
| 266 | LOCK_RESTORE(flags);
|
| 267 | }
|
| 268 |
|
| 269 | static inline void ____relax_set_bits_u32(unsigned int start, unsigned int nr,
|
| 270 | unsigned int val, volatile unsigned long *p)
|
| 271 | {
|
| 272 | unsigned long start_u32;
|
| 273 | unsigned long nr_u32;
|
| 274 | unsigned long val_u32;
|
| 275 | unsigned long mask;
|
| 276 | unsigned long tmp;
|
| 277 |
|
| 278 | start_u32 = start & 31;
|
| 279 | nr_u32 = nr & 31;
|
| 280 | mask = ~(((1<<nr_u32) - 1) << start_u32);
|
| 281 | val_u32 = (val & ((1<<nr_u32) - 1)) << start_u32;
|
| 282 |
|
| 283 | tmp = *p;
|
| 284 | tmp &= mask;
|
| 285 | tmp |= val_u32;
|
| 286 | *p = tmp;
|
| 287 | }
|
| 288 | #define ATOMIC_BITSOP(name,start,nr,val,p) ____atomic_##name(start,nr,val,p)
|
| 289 | #define RELAX_BITSOP(name,start,nr,val,p) ____atomic_##name(start,nr,val,p)
|
| 290 |
|
| 291 | #define set_bits_u32(start,nr,val,p) ATOMIC_BITSOP(set_bits_u32,start,nr,val,p)
|
| 292 | #define __set_bits_u32(start,nr,val,p) RELAX_BITSOP(set_bits_u32,start,nr,val,p)
|
| 293 |
|
| 294 |
|
| 295 | #ifdef __cplusplus
|
| 296 | }
|
| 297 | #endif
|
| 298 |
|
| 299 | #endif
|