aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc/include/asm/bitops.h')
-rw-r--r--arch/powerpc/include/asm/bitops.h23
1 files changed, 21 insertions, 2 deletions
diff --git a/arch/powerpc/include/asm/bitops.h b/arch/powerpc/include/asm/bitops.h
index 4a4d3afd5340..299ab33505a6 100644
--- a/arch/powerpc/include/asm/bitops.h
+++ b/arch/powerpc/include/asm/bitops.h
@@ -216,15 +216,34 @@ static inline void arch___clear_bit_unlock(int nr, volatile unsigned long *addr)
216 */ 216 */
217static inline int fls(unsigned int x) 217static inline int fls(unsigned int x)
218{ 218{
219 return 32 - __builtin_clz(x); 219 int lz;
220
221 if (__builtin_constant_p(x))
222 return x ? 32 - __builtin_clz(x) : 0;
223 asm("cntlzw %0,%1" : "=r" (lz) : "r" (x));
224 return 32 - lz;
220} 225}
221 226
222#include <asm-generic/bitops/builtin-__fls.h> 227#include <asm-generic/bitops/builtin-__fls.h>
223 228
229/*
230 * 64-bit can do this using one cntlzd (count leading zeroes doubleword)
231 * instruction; for 32-bit we use the generic version, which does two
232 * 32-bit fls calls.
233 */
234#ifdef CONFIG_PPC64
224static inline int fls64(__u64 x) 235static inline int fls64(__u64 x)
225{ 236{
226 return 64 - __builtin_clzll(x); 237 int lz;
238
239 if (__builtin_constant_p(x))
240 return x ? 64 - __builtin_clzll(x) : 0;
241 asm("cntlzd %0,%1" : "=r" (lz) : "r" (x));
242 return 64 - lz;
227} 243}
244#else
245#include <asm-generic/bitops/fls64.h>
246#endif
228 247
229#ifdef CONFIG_PPC64 248#ifdef CONFIG_PPC64
230unsigned int __arch_hweight8(unsigned int w); 249unsigned int __arch_hweight8(unsigned int w);