mirror of
				https://github.com/AetherDroid/android_kernel_samsung_on5xelte.git
				synced 2025-10-31 08:08:51 +01:00 
			
		
		
		
	Fixed MTP to work with TWRP
This commit is contained in:
		
						commit
						f6dfaef42e
					
				
					 50820 changed files with 20846062 additions and 0 deletions
				
			
		
							
								
								
									
										43
									
								
								include/asm-generic/bitops/__ffs.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										43
									
								
								include/asm-generic/bitops/__ffs.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,43 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS___FFS_H_ | ||||
| #define _ASM_GENERIC_BITOPS___FFS_H_ | ||||
| 
 | ||||
| #include <asm/types.h> | ||||
| 
 | ||||
| /**
 | ||||
|  * __ffs - find first bit in word. | ||||
|  * @word: The word to search | ||||
|  * | ||||
|  * Undefined if no bit exists, so code should check against 0 first. | ||||
|  */ | ||||
| static __always_inline unsigned long __ffs(unsigned long word) | ||||
| { | ||||
| 	int num = 0; | ||||
| 
 | ||||
| #if BITS_PER_LONG == 64 | ||||
| 	if ((word & 0xffffffff) == 0) { | ||||
| 		num += 32; | ||||
| 		word >>= 32; | ||||
| 	} | ||||
| #endif | ||||
| 	if ((word & 0xffff) == 0) { | ||||
| 		num += 16; | ||||
| 		word >>= 16; | ||||
| 	} | ||||
| 	if ((word & 0xff) == 0) { | ||||
| 		num += 8; | ||||
| 		word >>= 8; | ||||
| 	} | ||||
| 	if ((word & 0xf) == 0) { | ||||
| 		num += 4; | ||||
| 		word >>= 4; | ||||
| 	} | ||||
| 	if ((word & 0x3) == 0) { | ||||
| 		num += 2; | ||||
| 		word >>= 2; | ||||
| 	} | ||||
| 	if ((word & 0x1) == 0) | ||||
| 		num += 1; | ||||
| 	return num; | ||||
| } | ||||
| 
 | ||||
| #endif /* _ASM_GENERIC_BITOPS___FFS_H_ */ | ||||
							
								
								
									
										43
									
								
								include/asm-generic/bitops/__fls.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										43
									
								
								include/asm-generic/bitops/__fls.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,43 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS___FLS_H_ | ||||
| #define _ASM_GENERIC_BITOPS___FLS_H_ | ||||
| 
 | ||||
| #include <asm/types.h> | ||||
| 
 | ||||
| /**
 | ||||
|  * __fls - find last (most-significant) set bit in a long word | ||||
|  * @word: the word to search | ||||
|  * | ||||
|  * Undefined if no set bit exists, so code should check against 0 first. | ||||
|  */ | ||||
| static __always_inline unsigned long __fls(unsigned long word) | ||||
| { | ||||
| 	int num = BITS_PER_LONG - 1; | ||||
| 
 | ||||
| #if BITS_PER_LONG == 64 | ||||
| 	if (!(word & (~0ul << 32))) { | ||||
| 		num -= 32; | ||||
| 		word <<= 32; | ||||
| 	} | ||||
| #endif | ||||
| 	if (!(word & (~0ul << (BITS_PER_LONG-16)))) { | ||||
| 		num -= 16; | ||||
| 		word <<= 16; | ||||
| 	} | ||||
| 	if (!(word & (~0ul << (BITS_PER_LONG-8)))) { | ||||
| 		num -= 8; | ||||
| 		word <<= 8; | ||||
| 	} | ||||
| 	if (!(word & (~0ul << (BITS_PER_LONG-4)))) { | ||||
| 		num -= 4; | ||||
| 		word <<= 4; | ||||
| 	} | ||||
| 	if (!(word & (~0ul << (BITS_PER_LONG-2)))) { | ||||
| 		num -= 2; | ||||
| 		word <<= 2; | ||||
| 	} | ||||
| 	if (!(word & (~0ul << (BITS_PER_LONG-1)))) | ||||
| 		num -= 1; | ||||
| 	return num; | ||||
| } | ||||
| 
 | ||||
| #endif /* _ASM_GENERIC_BITOPS___FLS_H_ */ | ||||
							
								
								
									
										25
									
								
								include/asm-generic/bitops/arch_hweight.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										25
									
								
								include/asm-generic/bitops/arch_hweight.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,25 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_ARCH_HWEIGHT_H_ | ||||
| #define _ASM_GENERIC_BITOPS_ARCH_HWEIGHT_H_ | ||||
| 
 | ||||
| #include <asm/types.h> | ||||
| 
 | ||||
| static inline unsigned int __arch_hweight32(unsigned int w) | ||||
| { | ||||
| 	return __sw_hweight32(w); | ||||
| } | ||||
| 
 | ||||
| static inline unsigned int __arch_hweight16(unsigned int w) | ||||
| { | ||||
| 	return __sw_hweight16(w); | ||||
| } | ||||
| 
 | ||||
| static inline unsigned int __arch_hweight8(unsigned int w) | ||||
| { | ||||
| 	return __sw_hweight8(w); | ||||
| } | ||||
| 
 | ||||
| static inline unsigned long __arch_hweight64(__u64 w) | ||||
| { | ||||
| 	return __sw_hweight64(w); | ||||
| } | ||||
| #endif /* _ASM_GENERIC_BITOPS_HWEIGHT_H_ */ | ||||
							
								
								
									
										189
									
								
								include/asm-generic/bitops/atomic.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										189
									
								
								include/asm-generic/bitops/atomic.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,189 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_ATOMIC_H_ | ||||
| #define _ASM_GENERIC_BITOPS_ATOMIC_H_ | ||||
| 
 | ||||
| #include <asm/types.h> | ||||
| #include <linux/irqflags.h> | ||||
| 
 | ||||
| #ifdef CONFIG_SMP | ||||
| #include <asm/spinlock.h> | ||||
| #include <asm/cache.h>		/* we use L1_CACHE_BYTES */ | ||||
| 
 | ||||
| /* Use an array of spinlocks for our atomic_ts.
 | ||||
|  * Hash function to index into a different SPINLOCK. | ||||
|  * Since "a" is usually an address, use one spinlock per cacheline. | ||||
|  */ | ||||
| #  define ATOMIC_HASH_SIZE 4 | ||||
| #  define ATOMIC_HASH(a) (&(__atomic_hash[ (((unsigned long) a)/L1_CACHE_BYTES) & (ATOMIC_HASH_SIZE-1) ])) | ||||
| 
 | ||||
| extern arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned; | ||||
| 
 | ||||
| /* Can't use raw_spin_lock_irq because of #include problems, so
 | ||||
|  * this is the substitute */ | ||||
| #define _atomic_spin_lock_irqsave(l,f) do {	\ | ||||
| 	arch_spinlock_t *s = ATOMIC_HASH(l);	\ | ||||
| 	local_irq_save(f);			\ | ||||
| 	arch_spin_lock(s);			\ | ||||
| } while(0) | ||||
| 
 | ||||
| #define _atomic_spin_unlock_irqrestore(l,f) do {	\ | ||||
| 	arch_spinlock_t *s = ATOMIC_HASH(l);		\ | ||||
| 	arch_spin_unlock(s);				\ | ||||
| 	local_irq_restore(f);				\ | ||||
| } while(0) | ||||
| 
 | ||||
| 
 | ||||
| #else | ||||
| #  define _atomic_spin_lock_irqsave(l,f) do { local_irq_save(f); } while (0) | ||||
| #  define _atomic_spin_unlock_irqrestore(l,f) do { local_irq_restore(f); } while (0) | ||||
| #endif | ||||
| 
 | ||||
| /*
 | ||||
|  * NMI events can occur at any time, including when interrupts have been | ||||
|  * disabled by *_irqsave().  So you can get NMI events occurring while a | ||||
|  * *_bit function is holding a spin lock.  If the NMI handler also wants | ||||
|  * to do bit manipulation (and they do) then you can get a deadlock | ||||
|  * between the original caller of *_bit() and the NMI handler. | ||||
|  * | ||||
|  * by Keith Owens | ||||
|  */ | ||||
| 
 | ||||
| /**
 | ||||
|  * set_bit - Atomically set a bit in memory | ||||
|  * @nr: the bit to set | ||||
|  * @addr: the address to start counting from | ||||
|  * | ||||
|  * This function is atomic and may not be reordered.  See __set_bit() | ||||
|  * if you do not require the atomic guarantees. | ||||
|  * | ||||
|  * Note: there are no guarantees that this function will not be reordered | ||||
|  * on non x86 architectures, so if you are writing portable code, | ||||
|  * make sure not to rely on its reordering guarantees. | ||||
|  * | ||||
|  * Note that @nr may be almost arbitrarily large; this function is not | ||||
|  * restricted to acting on a single-word quantity. | ||||
|  */ | ||||
| static inline void set_bit(int nr, volatile unsigned long *addr) | ||||
| { | ||||
| 	unsigned long mask = BIT_MASK(nr); | ||||
| 	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); | ||||
| 	unsigned long flags; | ||||
| 
 | ||||
| 	_atomic_spin_lock_irqsave(p, flags); | ||||
| 	*p  |= mask; | ||||
| 	_atomic_spin_unlock_irqrestore(p, flags); | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * clear_bit - Clears a bit in memory | ||||
|  * @nr: Bit to clear | ||||
|  * @addr: Address to start counting from | ||||
|  * | ||||
|  * clear_bit() is atomic and may not be reordered.  However, it does | ||||
|  * not contain a memory barrier, so if it is used for locking purposes, | ||||
|  * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic() | ||||
|  * in order to ensure changes are visible on other processors. | ||||
|  */ | ||||
| static inline void clear_bit(int nr, volatile unsigned long *addr) | ||||
| { | ||||
| 	unsigned long mask = BIT_MASK(nr); | ||||
| 	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); | ||||
| 	unsigned long flags; | ||||
| 
 | ||||
| 	_atomic_spin_lock_irqsave(p, flags); | ||||
| 	*p &= ~mask; | ||||
| 	_atomic_spin_unlock_irqrestore(p, flags); | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * change_bit - Toggle a bit in memory | ||||
|  * @nr: Bit to change | ||||
|  * @addr: Address to start counting from | ||||
|  * | ||||
|  * change_bit() is atomic and may not be reordered. It may be | ||||
|  * reordered on other architectures than x86. | ||||
|  * Note that @nr may be almost arbitrarily large; this function is not | ||||
|  * restricted to acting on a single-word quantity. | ||||
|  */ | ||||
| static inline void change_bit(int nr, volatile unsigned long *addr) | ||||
| { | ||||
| 	unsigned long mask = BIT_MASK(nr); | ||||
| 	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); | ||||
| 	unsigned long flags; | ||||
| 
 | ||||
| 	_atomic_spin_lock_irqsave(p, flags); | ||||
| 	*p ^= mask; | ||||
| 	_atomic_spin_unlock_irqrestore(p, flags); | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * test_and_set_bit - Set a bit and return its old value | ||||
|  * @nr: Bit to set | ||||
|  * @addr: Address to count from | ||||
|  * | ||||
|  * This operation is atomic and cannot be reordered. | ||||
|  * It may be reordered on other architectures than x86. | ||||
|  * It also implies a memory barrier. | ||||
|  */ | ||||
| static inline int test_and_set_bit(int nr, volatile unsigned long *addr) | ||||
| { | ||||
| 	unsigned long mask = BIT_MASK(nr); | ||||
| 	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); | ||||
| 	unsigned long old; | ||||
| 	unsigned long flags; | ||||
| 
 | ||||
| 	_atomic_spin_lock_irqsave(p, flags); | ||||
| 	old = *p; | ||||
| 	*p = old | mask; | ||||
| 	_atomic_spin_unlock_irqrestore(p, flags); | ||||
| 
 | ||||
| 	return (old & mask) != 0; | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * test_and_clear_bit - Clear a bit and return its old value | ||||
|  * @nr: Bit to clear | ||||
|  * @addr: Address to count from | ||||
|  * | ||||
|  * This operation is atomic and cannot be reordered. | ||||
|  * It can be reorderdered on other architectures other than x86. | ||||
|  * It also implies a memory barrier. | ||||
|  */ | ||||
| static inline int test_and_clear_bit(int nr, volatile unsigned long *addr) | ||||
| { | ||||
| 	unsigned long mask = BIT_MASK(nr); | ||||
| 	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); | ||||
| 	unsigned long old; | ||||
| 	unsigned long flags; | ||||
| 
 | ||||
| 	_atomic_spin_lock_irqsave(p, flags); | ||||
| 	old = *p; | ||||
| 	*p = old & ~mask; | ||||
| 	_atomic_spin_unlock_irqrestore(p, flags); | ||||
| 
 | ||||
| 	return (old & mask) != 0; | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * test_and_change_bit - Change a bit and return its old value | ||||
|  * @nr: Bit to change | ||||
|  * @addr: Address to count from | ||||
|  * | ||||
|  * This operation is atomic and cannot be reordered. | ||||
|  * It also implies a memory barrier. | ||||
|  */ | ||||
| static inline int test_and_change_bit(int nr, volatile unsigned long *addr) | ||||
| { | ||||
| 	unsigned long mask = BIT_MASK(nr); | ||||
| 	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); | ||||
| 	unsigned long old; | ||||
| 	unsigned long flags; | ||||
| 
 | ||||
| 	_atomic_spin_lock_irqsave(p, flags); | ||||
| 	old = *p; | ||||
| 	*p = old ^ mask; | ||||
| 	_atomic_spin_unlock_irqrestore(p, flags); | ||||
| 
 | ||||
| 	return (old & mask) != 0; | ||||
| } | ||||
| 
 | ||||
| #endif /* _ASM_GENERIC_BITOPS_ATOMIC_H */ | ||||
							
								
								
									
										15
									
								
								include/asm-generic/bitops/builtin-__ffs.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								include/asm-generic/bitops/builtin-__ffs.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,15 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_BUILTIN___FFS_H_ | ||||
| #define _ASM_GENERIC_BITOPS_BUILTIN___FFS_H_ | ||||
| 
 | ||||
| /**
 | ||||
|  * __ffs - find first bit in word. | ||||
|  * @word: The word to search | ||||
|  * | ||||
|  * Undefined if no bit exists, so code should check against 0 first. | ||||
|  */ | ||||
| static __always_inline unsigned long __ffs(unsigned long word) | ||||
| { | ||||
| 	return __builtin_ctzl(word); | ||||
| } | ||||
| 
 | ||||
| #endif | ||||
							
								
								
									
										15
									
								
								include/asm-generic/bitops/builtin-__fls.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								include/asm-generic/bitops/builtin-__fls.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,15 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_BUILTIN___FLS_H_ | ||||
| #define _ASM_GENERIC_BITOPS_BUILTIN___FLS_H_ | ||||
| 
 | ||||
| /**
 | ||||
|  * __fls - find last (most-significant) set bit in a long word | ||||
|  * @word: the word to search | ||||
|  * | ||||
|  * Undefined if no set bit exists, so code should check against 0 first. | ||||
|  */ | ||||
| static __always_inline unsigned long __fls(unsigned long word) | ||||
| { | ||||
| 	return (sizeof(word) * 8) - 1 - __builtin_clzl(word); | ||||
| } | ||||
| 
 | ||||
| #endif | ||||
							
								
								
									
										17
									
								
								include/asm-generic/bitops/builtin-ffs.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										17
									
								
								include/asm-generic/bitops/builtin-ffs.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,17 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_BUILTIN_FFS_H_ | ||||
| #define _ASM_GENERIC_BITOPS_BUILTIN_FFS_H_ | ||||
| 
 | ||||
| /**
 | ||||
|  * ffs - find first bit set | ||||
|  * @x: the word to search | ||||
|  * | ||||
|  * This is defined the same way as | ||||
|  * the libc and compiler builtin ffs routines, therefore | ||||
|  * differs in spirit from the above ffz (man ffs). | ||||
|  */ | ||||
| static __always_inline int ffs(int x) | ||||
| { | ||||
| 	return __builtin_ffs(x); | ||||
| } | ||||
| 
 | ||||
| #endif | ||||
							
								
								
									
										16
									
								
								include/asm-generic/bitops/builtin-fls.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								include/asm-generic/bitops/builtin-fls.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,16 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_BUILTIN_FLS_H_ | ||||
| #define _ASM_GENERIC_BITOPS_BUILTIN_FLS_H_ | ||||
| 
 | ||||
| /**
 | ||||
|  * fls - find last (most-significant) bit set | ||||
|  * @x: the word to search | ||||
|  * | ||||
|  * This is defined the same way as ffs. | ||||
|  * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32. | ||||
|  */ | ||||
| static __always_inline int fls(int x) | ||||
| { | ||||
| 	return x ? sizeof(x) * 8 - __builtin_clz(x) : 0; | ||||
| } | ||||
| 
 | ||||
| #endif | ||||
							
								
								
									
										43
									
								
								include/asm-generic/bitops/const_hweight.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										43
									
								
								include/asm-generic/bitops/const_hweight.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,43 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_CONST_HWEIGHT_H_ | ||||
| #define _ASM_GENERIC_BITOPS_CONST_HWEIGHT_H_ | ||||
| 
 | ||||
| /*
 | ||||
|  * Compile time versions of __arch_hweightN() | ||||
|  */ | ||||
| #define __const_hweight8(w)		\ | ||||
| 	((unsigned int)			\ | ||||
| 	 ((!!((w) & (1ULL << 0))) +	\ | ||||
| 	  (!!((w) & (1ULL << 1))) +	\ | ||||
| 	  (!!((w) & (1ULL << 2))) +	\ | ||||
| 	  (!!((w) & (1ULL << 3))) +	\ | ||||
| 	  (!!((w) & (1ULL << 4))) +	\ | ||||
| 	  (!!((w) & (1ULL << 5))) +	\ | ||||
| 	  (!!((w) & (1ULL << 6))) +	\ | ||||
| 	  (!!((w) & (1ULL << 7))))) | ||||
| 
 | ||||
| #define __const_hweight16(w) (__const_hweight8(w)  + __const_hweight8((w)  >> 8 )) | ||||
| #define __const_hweight32(w) (__const_hweight16(w) + __const_hweight16((w) >> 16)) | ||||
| #define __const_hweight64(w) (__const_hweight32(w) + __const_hweight32((w) >> 32)) | ||||
| 
 | ||||
| /*
 | ||||
|  * Generic interface. | ||||
|  */ | ||||
| #define hweight8(w)  (__builtin_constant_p(w) ? __const_hweight8(w)  : __arch_hweight8(w)) | ||||
| #define hweight16(w) (__builtin_constant_p(w) ? __const_hweight16(w) : __arch_hweight16(w)) | ||||
| #define hweight32(w) (__builtin_constant_p(w) ? __const_hweight32(w) : __arch_hweight32(w)) | ||||
| #define hweight64(w) (__builtin_constant_p(w) ? __const_hweight64(w) : __arch_hweight64(w)) | ||||
| 
 | ||||
| /*
 | ||||
|  * Interface for known constant arguments | ||||
|  */ | ||||
| #define HWEIGHT8(w)  (BUILD_BUG_ON_ZERO(!__builtin_constant_p(w)) + __const_hweight8(w)) | ||||
| #define HWEIGHT16(w) (BUILD_BUG_ON_ZERO(!__builtin_constant_p(w)) + __const_hweight16(w)) | ||||
| #define HWEIGHT32(w) (BUILD_BUG_ON_ZERO(!__builtin_constant_p(w)) + __const_hweight32(w)) | ||||
| #define HWEIGHT64(w) (BUILD_BUG_ON_ZERO(!__builtin_constant_p(w)) + __const_hweight64(w)) | ||||
| 
 | ||||
| /*
 | ||||
|  * Type invariant interface to the compile time constant hweight functions. | ||||
|  */ | ||||
| #define HWEIGHT(w)   HWEIGHT64((u64)w) | ||||
| 
 | ||||
| #endif /* _ASM_GENERIC_BITOPS_CONST_HWEIGHT_H_ */ | ||||
							
								
								
									
										57
									
								
								include/asm-generic/bitops/count_zeros.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										57
									
								
								include/asm-generic/bitops/count_zeros.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,57 @@ | |||
| /* Count leading and trailing zeros functions
 | ||||
|  * | ||||
|  * Copyright (C) 2012 Red Hat, Inc. All Rights Reserved. | ||||
|  * Written by David Howells (dhowells@redhat.com) | ||||
|  * | ||||
|  * This program is free software; you can redistribute it and/or | ||||
|  * modify it under the terms of the GNU General Public Licence | ||||
|  * as published by the Free Software Foundation; either version | ||||
|  * 2 of the Licence, or (at your option) any later version. | ||||
|  */ | ||||
| 
 | ||||
| #ifndef _ASM_GENERIC_BITOPS_COUNT_ZEROS_H_ | ||||
| #define _ASM_GENERIC_BITOPS_COUNT_ZEROS_H_ | ||||
| 
 | ||||
| #include <asm/bitops.h> | ||||
| 
 | ||||
| /**
 | ||||
|  * count_leading_zeros - Count the number of zeros from the MSB back | ||||
|  * @x: The value | ||||
|  * | ||||
|  * Count the number of leading zeros from the MSB going towards the LSB in @x. | ||||
|  * | ||||
|  * If the MSB of @x is set, the result is 0. | ||||
|  * If only the LSB of @x is set, then the result is BITS_PER_LONG-1. | ||||
|  * If @x is 0 then the result is COUNT_LEADING_ZEROS_0. | ||||
|  */ | ||||
| static inline int count_leading_zeros(unsigned long x) | ||||
| { | ||||
| 	if (sizeof(x) == 4) | ||||
| 		return BITS_PER_LONG - fls(x); | ||||
| 	else | ||||
| 		return BITS_PER_LONG - fls64(x); | ||||
| } | ||||
| 
 | ||||
| #define COUNT_LEADING_ZEROS_0 BITS_PER_LONG | ||||
| 
 | ||||
| /**
 | ||||
|  * count_trailing_zeros - Count the number of zeros from the LSB forwards | ||||
|  * @x: The value | ||||
|  * | ||||
|  * Count the number of trailing zeros from the LSB going towards the MSB in @x. | ||||
|  * | ||||
|  * If the LSB of @x is set, the result is 0. | ||||
|  * If only the MSB of @x is set, then the result is BITS_PER_LONG-1. | ||||
|  * If @x is 0 then the result is COUNT_TRAILING_ZEROS_0. | ||||
|  */ | ||||
| static inline int count_trailing_zeros(unsigned long x) | ||||
| { | ||||
| #define COUNT_TRAILING_ZEROS_0 (-1) | ||||
| 
 | ||||
| 	if (sizeof(x) == 4) | ||||
| 		return ffs(x); | ||||
| 	else | ||||
| 		return (x != 0) ? __ffs(x) : COUNT_TRAILING_ZEROS_0; | ||||
| } | ||||
| 
 | ||||
| #endif /* _ASM_GENERIC_BITOPS_COUNT_ZEROS_H_ */ | ||||
							
								
								
									
										11
									
								
								include/asm-generic/bitops/ext2-atomic-setbit.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								include/asm-generic/bitops/ext2-atomic-setbit.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,11 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_EXT2_ATOMIC_SETBIT_H_ | ||||
| #define _ASM_GENERIC_BITOPS_EXT2_ATOMIC_SETBIT_H_ | ||||
| 
 | ||||
| /*
 | ||||
|  * Atomic bitops based version of ext2 atomic bitops | ||||
|  */ | ||||
| 
 | ||||
| #define ext2_set_bit_atomic(l, nr, addr)	test_and_set_bit_le(nr, addr) | ||||
| #define ext2_clear_bit_atomic(l, nr, addr)	test_and_clear_bit_le(nr, addr) | ||||
| 
 | ||||
| #endif /* _ASM_GENERIC_BITOPS_EXT2_ATOMIC_SETBIT_H_ */ | ||||
							
								
								
									
										26
									
								
								include/asm-generic/bitops/ext2-atomic.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								include/asm-generic/bitops/ext2-atomic.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,26 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_EXT2_ATOMIC_H_ | ||||
| #define _ASM_GENERIC_BITOPS_EXT2_ATOMIC_H_ | ||||
| 
 | ||||
| /*
 | ||||
|  * Spinlock based version of ext2 atomic bitops | ||||
|  */ | ||||
| 
 | ||||
| #define ext2_set_bit_atomic(lock, nr, addr)		\ | ||||
| 	({						\ | ||||
| 		int ret;				\ | ||||
| 		spin_lock(lock);			\ | ||||
| 		ret = __test_and_set_bit_le(nr, addr);	\ | ||||
| 		spin_unlock(lock);			\ | ||||
| 		ret;					\ | ||||
| 	}) | ||||
| 
 | ||||
| #define ext2_clear_bit_atomic(lock, nr, addr)		\ | ||||
| 	({						\ | ||||
| 		int ret;				\ | ||||
| 		spin_lock(lock);			\ | ||||
| 		ret = __test_and_clear_bit_le(nr, addr);	\ | ||||
| 		spin_unlock(lock);			\ | ||||
| 		ret;					\ | ||||
| 	}) | ||||
| 
 | ||||
| #endif /* _ASM_GENERIC_BITOPS_EXT2_ATOMIC_H_ */ | ||||
							
								
								
									
										41
									
								
								include/asm-generic/bitops/ffs.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										41
									
								
								include/asm-generic/bitops/ffs.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,41 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_FFS_H_ | ||||
| #define _ASM_GENERIC_BITOPS_FFS_H_ | ||||
| 
 | ||||
| /**
 | ||||
|  * ffs - find first bit set | ||||
|  * @x: the word to search | ||||
|  * | ||||
|  * This is defined the same way as | ||||
|  * the libc and compiler builtin ffs routines, therefore | ||||
|  * differs in spirit from the above ffz (man ffs). | ||||
|  */ | ||||
| static inline int ffs(int x) | ||||
| { | ||||
| 	int r = 1; | ||||
| 
 | ||||
| 	if (!x) | ||||
| 		return 0; | ||||
| 	if (!(x & 0xffff)) { | ||||
| 		x >>= 16; | ||||
| 		r += 16; | ||||
| 	} | ||||
| 	if (!(x & 0xff)) { | ||||
| 		x >>= 8; | ||||
| 		r += 8; | ||||
| 	} | ||||
| 	if (!(x & 0xf)) { | ||||
| 		x >>= 4; | ||||
| 		r += 4; | ||||
| 	} | ||||
| 	if (!(x & 3)) { | ||||
| 		x >>= 2; | ||||
| 		r += 2; | ||||
| 	} | ||||
| 	if (!(x & 1)) { | ||||
| 		x >>= 1; | ||||
| 		r += 1; | ||||
| 	} | ||||
| 	return r; | ||||
| } | ||||
| 
 | ||||
| #endif /* _ASM_GENERIC_BITOPS_FFS_H_ */ | ||||
							
								
								
									
										12
									
								
								include/asm-generic/bitops/ffz.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								include/asm-generic/bitops/ffz.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,12 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_FFZ_H_ | ||||
| #define _ASM_GENERIC_BITOPS_FFZ_H_ | ||||
| 
 | ||||
| /*
 | ||||
|  * ffz - find first zero in word. | ||||
|  * @word: The word to search | ||||
|  * | ||||
|  * Undefined if no zero exists, so code should check against ~0UL first. | ||||
|  */ | ||||
| #define ffz(x)  __ffs(~(x)) | ||||
| 
 | ||||
| #endif /* _ASM_GENERIC_BITOPS_FFZ_H_ */ | ||||
							
								
								
									
										62
									
								
								include/asm-generic/bitops/find.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										62
									
								
								include/asm-generic/bitops/find.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,62 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_FIND_H_ | ||||
| #define _ASM_GENERIC_BITOPS_FIND_H_ | ||||
| 
 | ||||
| #ifndef find_next_bit | ||||
| /**
 | ||||
|  * find_next_bit - find the next set bit in a memory region | ||||
|  * @addr: The address to base the search on | ||||
|  * @offset: The bitnumber to start searching at | ||||
|  * @size: The bitmap size in bits | ||||
|  * | ||||
|  * Returns the bit number for the next set bit | ||||
|  * If no bits are set, returns @size. | ||||
|  */ | ||||
| extern unsigned long find_next_bit(const unsigned long *addr, unsigned long | ||||
| 		size, unsigned long offset); | ||||
| #endif | ||||
| 
 | ||||
| #ifndef find_next_zero_bit | ||||
| /**
 | ||||
|  * find_next_zero_bit - find the next cleared bit in a memory region | ||||
|  * @addr: The address to base the search on | ||||
|  * @offset: The bitnumber to start searching at | ||||
|  * @size: The bitmap size in bits | ||||
|  * | ||||
|  * Returns the bit number of the next zero bit | ||||
|  * If no bits are zero, returns @size. | ||||
|  */ | ||||
| extern unsigned long find_next_zero_bit(const unsigned long *addr, unsigned | ||||
| 		long size, unsigned long offset); | ||||
| #endif | ||||
| 
 | ||||
| #ifdef CONFIG_GENERIC_FIND_FIRST_BIT | ||||
| 
 | ||||
| /**
 | ||||
|  * find_first_bit - find the first set bit in a memory region | ||||
|  * @addr: The address to start the search at | ||||
|  * @size: The maximum number of bits to search | ||||
|  * | ||||
|  * Returns the bit number of the first set bit. | ||||
|  * If no bits are set, returns @size. | ||||
|  */ | ||||
| extern unsigned long find_first_bit(const unsigned long *addr, | ||||
| 				    unsigned long size); | ||||
| 
 | ||||
| /**
 | ||||
|  * find_first_zero_bit - find the first cleared bit in a memory region | ||||
|  * @addr: The address to start the search at | ||||
|  * @size: The maximum number of bits to search | ||||
|  * | ||||
|  * Returns the bit number of the first cleared bit. | ||||
|  * If no bits are zero, returns @size. | ||||
|  */ | ||||
| extern unsigned long find_first_zero_bit(const unsigned long *addr, | ||||
| 					 unsigned long size); | ||||
| #else /* CONFIG_GENERIC_FIND_FIRST_BIT */ | ||||
| 
 | ||||
| #define find_first_bit(addr, size) find_next_bit((addr), (size), 0) | ||||
| #define find_first_zero_bit(addr, size) find_next_zero_bit((addr), (size), 0) | ||||
| 
 | ||||
| #endif /* CONFIG_GENERIC_FIND_FIRST_BIT */ | ||||
| 
 | ||||
| #endif /*_ASM_GENERIC_BITOPS_FIND_H_ */ | ||||
							
								
								
									
										41
									
								
								include/asm-generic/bitops/fls.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										41
									
								
								include/asm-generic/bitops/fls.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,41 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_FLS_H_ | ||||
| #define _ASM_GENERIC_BITOPS_FLS_H_ | ||||
| 
 | ||||
| /**
 | ||||
|  * fls - find last (most-significant) bit set | ||||
|  * @x: the word to search | ||||
|  * | ||||
|  * This is defined the same way as ffs. | ||||
|  * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32. | ||||
|  */ | ||||
| 
 | ||||
| static __always_inline int fls(int x) | ||||
| { | ||||
| 	int r = 32; | ||||
| 
 | ||||
| 	if (!x) | ||||
| 		return 0; | ||||
| 	if (!(x & 0xffff0000u)) { | ||||
| 		x <<= 16; | ||||
| 		r -= 16; | ||||
| 	} | ||||
| 	if (!(x & 0xff000000u)) { | ||||
| 		x <<= 8; | ||||
| 		r -= 8; | ||||
| 	} | ||||
| 	if (!(x & 0xf0000000u)) { | ||||
| 		x <<= 4; | ||||
| 		r -= 4; | ||||
| 	} | ||||
| 	if (!(x & 0xc0000000u)) { | ||||
| 		x <<= 2; | ||||
| 		r -= 2; | ||||
| 	} | ||||
| 	if (!(x & 0x80000000u)) { | ||||
| 		x <<= 1; | ||||
| 		r -= 1; | ||||
| 	} | ||||
| 	return r; | ||||
| } | ||||
| 
 | ||||
| #endif /* _ASM_GENERIC_BITOPS_FLS_H_ */ | ||||
							
								
								
									
										36
									
								
								include/asm-generic/bitops/fls64.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										36
									
								
								include/asm-generic/bitops/fls64.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,36 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_FLS64_H_ | ||||
| #define _ASM_GENERIC_BITOPS_FLS64_H_ | ||||
| 
 | ||||
| #include <asm/types.h> | ||||
| 
 | ||||
| /**
 | ||||
|  * fls64 - find last set bit in a 64-bit word | ||||
|  * @x: the word to search | ||||
|  * | ||||
|  * This is defined in a similar way as the libc and compiler builtin | ||||
|  * ffsll, but returns the position of the most significant set bit. | ||||
|  * | ||||
|  * fls64(value) returns 0 if value is 0 or the position of the last | ||||
|  * set bit if value is nonzero. The last (most significant) bit is | ||||
|  * at position 64. | ||||
|  */ | ||||
| #if BITS_PER_LONG == 32 | ||||
| static __always_inline int fls64(__u64 x) | ||||
| { | ||||
| 	__u32 h = x >> 32; | ||||
| 	if (h) | ||||
| 		return fls(h) + 32; | ||||
| 	return fls(x); | ||||
| } | ||||
| #elif BITS_PER_LONG == 64 | ||||
| static __always_inline int fls64(__u64 x) | ||||
| { | ||||
| 	if (x == 0) | ||||
| 		return 0; | ||||
| 	return __fls(x) + 1; | ||||
| } | ||||
| #else | ||||
| #error BITS_PER_LONG not 32 or 64 | ||||
| #endif | ||||
| 
 | ||||
| #endif /* _ASM_GENERIC_BITOPS_FLS64_H_ */ | ||||
							
								
								
									
										7
									
								
								include/asm-generic/bitops/hweight.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								include/asm-generic/bitops/hweight.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,7 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_HWEIGHT_H_ | ||||
| #define _ASM_GENERIC_BITOPS_HWEIGHT_H_ | ||||
| 
 | ||||
| #include <asm-generic/bitops/arch_hweight.h> | ||||
| #include <asm-generic/bitops/const_hweight.h> | ||||
| 
 | ||||
| #endif /* _ASM_GENERIC_BITOPS_HWEIGHT_H_ */ | ||||
							
								
								
									
										97
									
								
								include/asm-generic/bitops/le.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										97
									
								
								include/asm-generic/bitops/le.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,97 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_LE_H_ | ||||
| #define _ASM_GENERIC_BITOPS_LE_H_ | ||||
| 
 | ||||
| #include <asm/types.h> | ||||
| #include <asm/byteorder.h> | ||||
| 
 | ||||
| #if defined(__LITTLE_ENDIAN) | ||||
| 
 | ||||
| #define BITOP_LE_SWIZZLE	0 | ||||
| 
 | ||||
| static inline unsigned long find_next_zero_bit_le(const void *addr, | ||||
| 		unsigned long size, unsigned long offset) | ||||
| { | ||||
| 	return find_next_zero_bit(addr, size, offset); | ||||
| } | ||||
| 
 | ||||
| static inline unsigned long find_next_bit_le(const void *addr, | ||||
| 		unsigned long size, unsigned long offset) | ||||
| { | ||||
| 	return find_next_bit(addr, size, offset); | ||||
| } | ||||
| 
 | ||||
| static inline unsigned long find_first_zero_bit_le(const void *addr, | ||||
| 		unsigned long size) | ||||
| { | ||||
| 	return find_first_zero_bit(addr, size); | ||||
| } | ||||
| 
 | ||||
| #elif defined(__BIG_ENDIAN) | ||||
| 
 | ||||
| #define BITOP_LE_SWIZZLE	((BITS_PER_LONG-1) & ~0x7) | ||||
| 
 | ||||
| #ifndef find_next_zero_bit_le | ||||
| extern unsigned long find_next_zero_bit_le(const void *addr, | ||||
| 		unsigned long size, unsigned long offset); | ||||
| #endif | ||||
| 
 | ||||
| #ifndef find_next_bit_le | ||||
| extern unsigned long find_next_bit_le(const void *addr, | ||||
| 		unsigned long size, unsigned long offset); | ||||
| #endif | ||||
| 
 | ||||
| #ifndef find_first_zero_bit_le | ||||
| #define find_first_zero_bit_le(addr, size) \ | ||||
| 	find_next_zero_bit_le((addr), (size), 0) | ||||
| #endif | ||||
| 
 | ||||
| #else | ||||
| #error "Please fix <asm/byteorder.h>" | ||||
| #endif | ||||
| 
 | ||||
| static inline int test_bit_le(int nr, const void *addr) | ||||
| { | ||||
| 	return test_bit(nr ^ BITOP_LE_SWIZZLE, addr); | ||||
| } | ||||
| 
 | ||||
| static inline void set_bit_le(int nr, void *addr) | ||||
| { | ||||
| 	set_bit(nr ^ BITOP_LE_SWIZZLE, addr); | ||||
| } | ||||
| 
 | ||||
| static inline void clear_bit_le(int nr, void *addr) | ||||
| { | ||||
| 	clear_bit(nr ^ BITOP_LE_SWIZZLE, addr); | ||||
| } | ||||
| 
 | ||||
| static inline void __set_bit_le(int nr, void *addr) | ||||
| { | ||||
| 	__set_bit(nr ^ BITOP_LE_SWIZZLE, addr); | ||||
| } | ||||
| 
 | ||||
| static inline void __clear_bit_le(int nr, void *addr) | ||||
| { | ||||
| 	__clear_bit(nr ^ BITOP_LE_SWIZZLE, addr); | ||||
| } | ||||
| 
 | ||||
| static inline int test_and_set_bit_le(int nr, void *addr) | ||||
| { | ||||
| 	return test_and_set_bit(nr ^ BITOP_LE_SWIZZLE, addr); | ||||
| } | ||||
| 
 | ||||
| static inline int test_and_clear_bit_le(int nr, void *addr) | ||||
| { | ||||
| 	return test_and_clear_bit(nr ^ BITOP_LE_SWIZZLE, addr); | ||||
| } | ||||
| 
 | ||||
| static inline int __test_and_set_bit_le(int nr, void *addr) | ||||
| { | ||||
| 	return __test_and_set_bit(nr ^ BITOP_LE_SWIZZLE, addr); | ||||
| } | ||||
| 
 | ||||
| static inline int __test_and_clear_bit_le(int nr, void *addr) | ||||
| { | ||||
| 	return __test_and_clear_bit(nr ^ BITOP_LE_SWIZZLE, addr); | ||||
| } | ||||
| 
 | ||||
| #endif /* _ASM_GENERIC_BITOPS_LE_H_ */ | ||||
							
								
								
									
										45
									
								
								include/asm-generic/bitops/lock.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										45
									
								
								include/asm-generic/bitops/lock.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,45 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_LOCK_H_ | ||||
| #define _ASM_GENERIC_BITOPS_LOCK_H_ | ||||
| 
 | ||||
| /**
 | ||||
|  * test_and_set_bit_lock - Set a bit and return its old value, for lock | ||||
|  * @nr: Bit to set | ||||
|  * @addr: Address to count from | ||||
|  * | ||||
|  * This operation is atomic and provides acquire barrier semantics. | ||||
|  * It can be used to implement bit locks. | ||||
|  */ | ||||
| #define test_and_set_bit_lock(nr, addr)	test_and_set_bit(nr, addr) | ||||
| 
 | ||||
| /**
 | ||||
|  * clear_bit_unlock - Clear a bit in memory, for unlock | ||||
|  * @nr: the bit to set | ||||
|  * @addr: the address to start counting from | ||||
|  * | ||||
|  * This operation is atomic and provides release barrier semantics. | ||||
|  */ | ||||
| #define clear_bit_unlock(nr, addr)	\ | ||||
| do {					\ | ||||
| 	smp_mb__before_atomic();	\ | ||||
| 	clear_bit(nr, addr);		\ | ||||
| } while (0) | ||||
| 
 | ||||
| /**
 | ||||
|  * __clear_bit_unlock - Clear a bit in memory, for unlock | ||||
|  * @nr: the bit to set | ||||
|  * @addr: the address to start counting from | ||||
|  * | ||||
|  * This operation is like clear_bit_unlock, however it is not atomic. | ||||
|  * It does provide release barrier semantics so it can be used to unlock | ||||
|  * a bit lock, however it would only be used if no other CPU can modify | ||||
|  * any bits in the memory until the lock is released (a good example is | ||||
|  * if the bit lock itself protects access to the other bits in the word). | ||||
|  */ | ||||
| #define __clear_bit_unlock(nr, addr)	\ | ||||
| do {					\ | ||||
| 	smp_mb();			\ | ||||
| 	__clear_bit(nr, addr);		\ | ||||
| } while (0) | ||||
| 
 | ||||
| #endif /* _ASM_GENERIC_BITOPS_LOCK_H_ */ | ||||
| 
 | ||||
							
								
								
									
										108
									
								
								include/asm-generic/bitops/non-atomic.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										108
									
								
								include/asm-generic/bitops/non-atomic.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,108 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_NON_ATOMIC_H_ | ||||
| #define _ASM_GENERIC_BITOPS_NON_ATOMIC_H_ | ||||
| 
 | ||||
| #include <asm/types.h> | ||||
| 
 | ||||
| /**
 | ||||
|  * __set_bit - Set a bit in memory | ||||
|  * @nr: the bit to set | ||||
|  * @addr: the address to start counting from | ||||
|  * | ||||
|  * Unlike set_bit(), this function is non-atomic and may be reordered. | ||||
|  * If it's called on the same region of memory simultaneously, the effect | ||||
|  * may be that only one operation succeeds. | ||||
|  */ | ||||
| static inline void __set_bit(int nr, volatile unsigned long *addr) | ||||
| { | ||||
| 	unsigned long mask = BIT_MASK(nr); | ||||
| 	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); | ||||
| 
 | ||||
| 	*p  |= mask; | ||||
| } | ||||
| 
 | ||||
| static inline void __clear_bit(int nr, volatile unsigned long *addr) | ||||
| { | ||||
| 	unsigned long mask = BIT_MASK(nr); | ||||
| 	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); | ||||
| 
 | ||||
| 	*p &= ~mask; | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * __change_bit - Toggle a bit in memory | ||||
|  * @nr: the bit to change | ||||
|  * @addr: the address to start counting from | ||||
|  * | ||||
|  * Unlike change_bit(), this function is non-atomic and may be reordered. | ||||
|  * If it's called on the same region of memory simultaneously, the effect | ||||
|  * may be that only one operation succeeds. | ||||
|  */ | ||||
| static inline void __change_bit(int nr, volatile unsigned long *addr) | ||||
| { | ||||
| 	unsigned long mask = BIT_MASK(nr); | ||||
| 	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); | ||||
| 
 | ||||
| 	*p ^= mask; | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * __test_and_set_bit - Set a bit and return its old value | ||||
|  * @nr: Bit to set | ||||
|  * @addr: Address to count from | ||||
|  * | ||||
|  * This operation is non-atomic and can be reordered. | ||||
|  * If two examples of this operation race, one can appear to succeed | ||||
|  * but actually fail.  You must protect multiple accesses with a lock. | ||||
|  */ | ||||
| static inline int __test_and_set_bit(int nr, volatile unsigned long *addr) | ||||
| { | ||||
| 	unsigned long mask = BIT_MASK(nr); | ||||
| 	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); | ||||
| 	unsigned long old = *p; | ||||
| 
 | ||||
| 	*p = old | mask; | ||||
| 	return (old & mask) != 0; | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * __test_and_clear_bit - Clear a bit and return its old value | ||||
|  * @nr: Bit to clear | ||||
|  * @addr: Address to count from | ||||
|  * | ||||
|  * This operation is non-atomic and can be reordered. | ||||
|  * If two examples of this operation race, one can appear to succeed | ||||
|  * but actually fail.  You must protect multiple accesses with a lock. | ||||
|  */ | ||||
| static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr) | ||||
| { | ||||
| 	unsigned long mask = BIT_MASK(nr); | ||||
| 	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); | ||||
| 	unsigned long old = *p; | ||||
| 
 | ||||
| 	*p = old & ~mask; | ||||
| 	return (old & mask) != 0; | ||||
| } | ||||
| 
 | ||||
| /* WARNING: non atomic and it can be reordered! */ | ||||
| static inline int __test_and_change_bit(int nr, | ||||
| 					    volatile unsigned long *addr) | ||||
| { | ||||
| 	unsigned long mask = BIT_MASK(nr); | ||||
| 	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); | ||||
| 	unsigned long old = *p; | ||||
| 
 | ||||
| 	*p = old ^ mask; | ||||
| 	return (old & mask) != 0; | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * test_bit - Determine whether a bit is set | ||||
|  * @nr: bit number to test | ||||
|  * @addr: Address to start counting from | ||||
|  */ | ||||
| static inline int test_bit(int nr, const volatile unsigned long *addr) | ||||
| { | ||||
| 	return 1UL & (addr[BIT_WORD(nr)] >> (nr & (BITS_PER_LONG-1))); | ||||
| } | ||||
| 
 | ||||
| #endif /* _ASM_GENERIC_BITOPS_NON_ATOMIC_H_ */ | ||||
							
								
								
									
										31
									
								
								include/asm-generic/bitops/sched.h
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										31
									
								
								include/asm-generic/bitops/sched.h
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,31 @@ | |||
| #ifndef _ASM_GENERIC_BITOPS_SCHED_H_ | ||||
| #define _ASM_GENERIC_BITOPS_SCHED_H_ | ||||
| 
 | ||||
| #include <linux/compiler.h>	/* unlikely() */ | ||||
| #include <asm/types.h> | ||||
| 
 | ||||
| /*
 | ||||
|  * Every architecture must define this function. It's the fastest | ||||
|  * way of searching a 100-bit bitmap.  It's guaranteed that at least | ||||
|  * one of the 100 bits is cleared. | ||||
|  */ | ||||
| static inline int sched_find_first_bit(const unsigned long *b) | ||||
| { | ||||
| #if BITS_PER_LONG == 64 | ||||
| 	if (b[0]) | ||||
| 		return __ffs(b[0]); | ||||
| 	return __ffs(b[1]) + 64; | ||||
| #elif BITS_PER_LONG == 32 | ||||
| 	if (b[0]) | ||||
| 		return __ffs(b[0]); | ||||
| 	if (b[1]) | ||||
| 		return __ffs(b[1]) + 32; | ||||
| 	if (b[2]) | ||||
| 		return __ffs(b[2]) + 64; | ||||
| 	return __ffs(b[3]) + 96; | ||||
| #else | ||||
| #error BITS_PER_LONG not defined | ||||
| #endif | ||||
| } | ||||
| 
 | ||||
| #endif /* _ASM_GENERIC_BITOPS_SCHED_H_ */ | ||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 awab228
						awab228