/******************************************************************************* | |
* Copyright (C) 2007, ZTE Corporation. | |
* | |
* File Name: | |
* File Mark: | |
* Description: | |
* Others: | |
* Version: 1.0 | |
* Author: geanfeng | |
* Date: 2013-09-25 | |
* History 1: | |
* Date: | |
* Version: | |
* Author: | |
* Modification: | |
* History 2: | |
********************************************************************************/ | |
#ifndef _DRVS_BITOPS_H_ | |
#define _DRVS_BITOPS_H_ | |
#ifdef __cplusplus | |
extern "C" | |
{ | |
#endif | |
/**************************************************************************** | |
* Include files | |
****************************************************************************/ | |
/**************************************************************************** | |
* Macros | |
****************************************************************************/ | |
/**************************************************************************** | |
* Types | |
****************************************************************************/ | |
/**************************************************************************** | |
* Constants | |
****************************************************************************/ | |
/**************************************************************************** | |
* Global Variables | |
****************************************************************************/ | |
/**************************************************************************** | |
* Function Prototypes | |
****************************************************************************/ | |
#ifndef set_bit | |
/* | |
* These functions are the basis of our bit ops. | |
* | |
* First, the atomic bitops. These use native endian. | |
*/ | |
static inline void ____atomic_set_bit(unsigned int bitnum, volatile unsigned long *p) | |
{ | |
unsigned long flags; | |
unsigned long mask = 1UL << (bitnum & 31); | |
p += bitnum >> 5; | |
LOCK_SAVE(flags); | |
*p |= mask; | |
LOCK_RESTORE(flags); | |
} | |
static inline void ____atomic_clear_bit(unsigned int bitnum, volatile unsigned long *p) | |
{ | |
unsigned long flags; | |
unsigned long mask = 1UL << (bitnum & 31); | |
p += bitnum >> 5; | |
LOCK_SAVE(flags); | |
*p &= ~mask; | |
LOCK_RESTORE(flags); | |
} | |
static inline void ____atomic_change_bit(unsigned int bitnum, volatile unsigned long *p) | |
{ | |
unsigned long flags; | |
unsigned long mask = 1UL << (bitnum & 31); | |
p += bitnum >> 5; | |
LOCK_SAVE(flags); | |
*p ^= mask; | |
LOCK_RESTORE(flags); | |
} | |
static inline int | |
____atomic_test_and_set_bit(unsigned int bitnum, volatile unsigned long *p) | |
{ | |
unsigned long flags; | |
unsigned int res; | |
unsigned long mask = 1UL << (bitnum & 31); | |
p += bitnum >> 5; | |
LOCK_SAVE(flags); | |
res = *p; | |
*p = res | mask; | |
LOCK_RESTORE(flags); | |
return (res & mask) != 0; | |
} | |
static inline int | |
____atomic_test_and_clear_bit(unsigned int bitnum, volatile unsigned long *p) | |
{ | |
unsigned long flags; | |
unsigned int res; | |
unsigned long mask = 1UL << (bitnum & 31); | |
p += bitnum >> 5; | |
LOCK_SAVE(flags); | |
res = *p; | |
*p = res & ~mask; | |
LOCK_RESTORE(flags); | |
return (res & mask) != 0; | |
} | |
static inline int | |
____atomic_test_and_change_bit(unsigned int bitnum, volatile unsigned long *p) | |
{ | |
unsigned long flags; | |
unsigned int res; | |
unsigned long mask = 1UL << (bitnum & 31); | |
p += bitnum >> 5; | |
LOCK_SAVE(flags); | |
res = *p; | |
*p = res ^ mask; | |
LOCK_RESTORE(flags); | |
return (res & mask) != 0; | |
} | |
#define ATOMIC_BITOP(name,nr,p) ____atomic_##name(nr, p) | |
/* | |
* Native endian atomic definitions. | |
*/ | |
#define set_bit(nr,p) ATOMIC_BITOP(set_bit,nr,p) | |
#define clear_bit(nr,p) ATOMIC_BITOP(clear_bit,nr,p) | |
#define change_bit(nr,p) ATOMIC_BITOP(change_bit,nr,p) | |
#define test_and_set_bit(nr,p) ATOMIC_BITOP(test_and_set_bit,nr,p) | |
#define test_and_clear_bit(nr,p) ATOMIC_BITOP(test_and_clear_bit,nr,p) | |
#define test_and_change_bit(nr,p) ATOMIC_BITOP(test_and_change_bit,nr,p) | |
static inline void ____relax_set_bit(unsigned int bitnum, volatile unsigned long *p) | |
{ | |
unsigned long mask = 1UL << (bitnum & 31); | |
p += bitnum >> 5; | |
*p |= mask; | |
} | |
static inline void ____relax_clear_bit(unsigned int bitnum, volatile unsigned long *p) | |
{ | |
unsigned long mask = 1UL << (bitnum & 31); | |
p += bitnum >> 5; | |
*p &= ~mask; | |
} | |
static inline void ____relax_change_bit(unsigned int bitnum, volatile unsigned long *p) | |
{ | |
unsigned long mask = 1UL << (bitnum & 31); | |
p += bitnum >> 5; | |
*p ^= mask; | |
} | |
static inline int | |
____relax_test_and_set_bit(unsigned int bitnum, volatile unsigned long *p) | |
{ | |
unsigned int res; | |
unsigned long mask = 1UL << (bitnum & 31); | |
p += bitnum >> 5; | |
res = *p; | |
*p = res | mask; | |
return (res & mask) != 0; | |
} | |
static inline int | |
____relax_test_and_clear_bit(unsigned int bitnum, volatile unsigned long *p) | |
{ | |
unsigned int res; | |
unsigned long mask = 1UL << (bitnum & 31); | |
p += bitnum >> 5; | |
res = *p; | |
*p = res & ~mask; | |
return (res & mask) != 0; | |
} | |
static inline int | |
____relax_test_and_change_bit(unsigned int bitnum, volatile unsigned long *p) | |
{ | |
unsigned int res; | |
unsigned long mask = 1UL << (bitnum & 31); | |
p += bitnum >> 5; | |
res = *p; | |
*p = res ^ mask; | |
return (res & mask) != 0; | |
} | |
#define RELAX_BITOP(name,nr,p) ____relax_##name(nr, p) | |
/* | |
* Native endian relax definitions. | |
*/ | |
#define __set_bit(nr,p) RELAX_BITOP(set_bit,nr,p) | |
#define __clear_bit(nr,p) RELAX_BITOP(clear_bit,nr,p) | |
#define __change_bit(nr,p) RELAX_BITOP(change_bit,nr,p) | |
#define __test_and_set_bit(nr,p) RELAX_BITOP(test_and_set_bit,nr,p) | |
#define __test_and_clear_bit(nr,p) RELAX_BITOP(test_and_clear_bit,nr,p) | |
#define __test_and_change_bit(nr,p) RELAX_BITOP(test_and_change_bit,nr,p) | |
/* | |
* This routine doesn't need to be atomic. | |
*/ | |
static inline int test_bit(int nr, const void * addr) | |
{ | |
return ((unsigned char *) addr)[nr >> 3] & (1U << (nr & 7)); | |
} | |
#endif | |
static inline void ____atomic_set_bits_u32(unsigned int start, unsigned int nr, | |
unsigned int val, volatile unsigned long *p) | |
{ | |
unsigned long flags; | |
unsigned long start_u32; | |
unsigned long nr_u32; | |
unsigned long val_u32; | |
unsigned long mask; | |
unsigned long tmp; | |
start_u32 = start & 31; | |
nr_u32 = nr & 31; | |
mask = ~(((1<<nr_u32) - 1) << start_u32); | |
val_u32 = (val & ((1<<nr_u32) - 1)) << start_u32; | |
LOCK_SAVE(flags); | |
tmp = *p; | |
tmp &= mask; | |
tmp |= val_u32; | |
*p = tmp; | |
LOCK_RESTORE(flags); | |
} | |
static inline void ____relax_set_bits_u32(unsigned int start, unsigned int nr, | |
unsigned int val, volatile unsigned long *p) | |
{ | |
unsigned long start_u32; | |
unsigned long nr_u32; | |
unsigned long val_u32; | |
unsigned long mask; | |
unsigned long tmp; | |
start_u32 = start & 31; | |
nr_u32 = nr & 31; | |
mask = ~(((1<<nr_u32) - 1) << start_u32); | |
val_u32 = (val & ((1<<nr_u32) - 1)) << start_u32; | |
tmp = *p; | |
tmp &= mask; | |
tmp |= val_u32; | |
*p = tmp; | |
} | |
#define ATOMIC_BITSOP(name,start,nr,val,p) ____atomic_##name(start,nr,val,p) | |
#define RELAX_BITSOP(name,start,nr,val,p) ____atomic_##name(start,nr,val,p) | |
#define set_bits_u32(start,nr,val,p) ATOMIC_BITSOP(set_bits_u32,start,nr,val,p) | |
#define __set_bits_u32(start,nr,val,p) RELAX_BITSOP(set_bits_u32,start,nr,val,p) | |
#ifdef __cplusplus | |
} | |
#endif | |
#endif |