summaryrefslogtreecommitdiff
path: root/include
diff options
context:
space:
mode:
Diffstat (limited to 'include')
-rw-r--r--include/asm-alpha/bitops.h37
-rw-r--r--include/asm-alpha/byteorder.h8
-rw-r--r--include/asm-alpha/compiler.h54
3 files changed, 53 insertions, 46 deletions
diff --git a/include/asm-alpha/bitops.h b/include/asm-alpha/bitops.h
index 1e1e49eccaa7..c2013d1d8a3a 100644
--- a/include/asm-alpha/bitops.h
+++ b/include/asm-alpha/bitops.h
@@ -264,13 +264,11 @@ static inline unsigned long ffz(unsigned long word)
{
#if defined(__alpha_cix__) && defined(__alpha_fix__)
/* Whee. EV67 can calculate it directly. */
- unsigned long result;
- __asm__("cttz %1,%0" : "=r"(result) : "r"(~word));
- return result;
+ return __kernel_cttz(~word);
#else
unsigned long bits, qofs, bofs;
- __asm__("cmpbge %1,%2,%0" : "=r"(bits) : "r"(word), "r"(~0UL));
+ bits = __kernel_cmpbge(word, ~0UL);
qofs = ffz_b(bits);
bits = __kernel_extbl(word, qofs);
bofs = ffz_b(bits);
@@ -286,13 +284,11 @@ static inline unsigned long __ffs(unsigned long word)
{
#if defined(__alpha_cix__) && defined(__alpha_fix__)
/* Whee. EV67 can calculate it directly. */
- unsigned long result;
- __asm__("cttz %1,%0" : "=r"(result) : "r"(word));
- return result;
+ return __kernel_cttz(word);
#else
unsigned long bits, qofs, bofs;
- __asm__("cmpbge $31,%1,%0" : "=r"(bits) : "r"(word));
+ bits = __kernel_cmpbge(word, 0);
qofs = ffz_b(bits);
bits = __kernel_extbl(word, qofs);
bofs = ffz_b(~bits);
@@ -311,8 +307,8 @@ static inline unsigned long __ffs(unsigned long word)
static inline int ffs(int word)
{
- int result = __ffs(word);
- return word ? result+1 : 0;
+ int result = __ffs(word) + 1;
+ return word ? result : 0;
}
/*
@@ -321,9 +317,7 @@ static inline int ffs(int word)
#if defined(__alpha_cix__) && defined(__alpha_fix__)
static inline int fls(int word)
{
- long result;
- __asm__("ctlz %1,%0" : "=r"(result) : "r"(word & 0xffffffff));
- return 64 - result;
+ return 64 - __kernel_ctlz(word & 0xffffffff);
}
#else
#define fls generic_fls
@@ -332,11 +326,10 @@ static inline int fls(int word)
/* Compute powers of two for the given integer. */
static inline int floor_log2(unsigned long word)
{
- long bit;
#if defined(__alpha_cix__) && defined(__alpha_fix__)
- __asm__("ctlz %1,%0" : "=r"(bit) : "r"(word));
- return 63 - bit;
+ return 63 - __kernel_ctlz(word);
#else
+ long bit;
for (bit = -1; word ; bit++)
word >>= 1;
return bit;
@@ -358,9 +351,7 @@ static inline int ceil_log2(unsigned int word)
/* Whee. EV67 can calculate it directly. */
static inline unsigned long hweight64(unsigned long w)
{
- unsigned long result;
- __asm__("ctpop %1,%0" : "=r"(result) : "r"(w));
- return result;
+ return __kernel_ctpop(w);
}
#define hweight32(x) hweight64((x) & 0xfffffffful)
@@ -415,11 +406,11 @@ find_next_zero_bit(void * addr, unsigned long size, unsigned long offset)
if (!size)
return result;
tmp = *p;
-found_first:
+ found_first:
tmp |= ~0UL << size;
if (tmp == ~0UL) /* Are any bits zero? */
return result + size; /* Nope. */
-found_middle:
+ found_middle:
return result + ffz(tmp);
}
@@ -456,11 +447,11 @@ find_next_bit(void * addr, unsigned long size, unsigned long offset)
if (!size)
return result;
tmp = *p;
-found_first:
+ found_first:
tmp &= ~0UL >> (64 - size);
if (!tmp)
return result + size;
-found_middle:
+ found_middle:
return result + __ffs(tmp);
}
diff --git a/include/asm-alpha/byteorder.h b/include/asm-alpha/byteorder.h
index 91b55ea3e754..ddf4e740e72a 100644
--- a/include/asm-alpha/byteorder.h
+++ b/include/asm-alpha/byteorder.h
@@ -2,6 +2,7 @@
#define _ALPHA_BYTEORDER_H
#include <asm/types.h>
+#include <asm/compiler.h>
#ifdef __GNUC__
@@ -23,11 +24,8 @@ static __inline __u32 __attribute__((__const)) __arch__swab32(__u32 x)
__u64 t0, t1, t2, t3;
- __asm__("inslh %1, 7, %0" /* t0 : 0000000000AABBCC */
- : "=r"(t0) : "r"(x));
- __asm__("inswl %1, 3, %0" /* t1 : 000000CCDD000000 */
- : "=r"(t1) : "r"(x));
-
+ t0 = __kernel_inslh(x, 7); /* t0 : 0000000000AABBCC */
+ t1 = __kernel_inswl(x, 3); /* t1 : 000000CCDD000000 */
t1 |= t0; /* t1 : 000000CCDDAABBCC */
t2 = t1 >> 16; /* t2 : 0000000000CCDDAA */
t0 = t1 & 0xFF00FF00; /* t0 : 00000000DD00BB00 */
diff --git a/include/asm-alpha/compiler.h b/include/asm-alpha/compiler.h
index 70d6ce7750dc..4d2bf568d9f7 100644
--- a/include/asm-alpha/compiler.h
+++ b/include/asm-alpha/compiler.h
@@ -9,40 +9,58 @@
* these tests and macros.
*/
-#if 0
-#define __kernel_insbl(val, shift) \
- (((unsigned long)(val) & 0xfful) << ((shift) * 8))
-#define __kernel_inswl(val, shift) \
- (((unsigned long)(val) & 0xfffful) << ((shift) * 8))
-#define __kernel_insql(val, shift) \
- ((unsigned long)(val) << ((shift) * 8))
+#if __GNUC__ == 3 && __GNUC_MINOR__ >= 4 || __GNUC__ > 3
+# define __kernel_insbl(val, shift) __builtin_alpha_insbl(val, shift)
+# define __kernel_inswl(val, shift) __builtin_alpha_inswl(val, shift)
+# define __kernel_insql(val, shift) __builtin_alpha_insql(val, shift)
+# define __kernel_inslh(val, shift) __builtin_alpha_inslh(val, shift)
+# define __kernel_extbl(val, shift) __builtin_alpha_extbl(val, shift)
+# define __kernel_extwl(val, shift) __builtin_alpha_extwl(val, shift)
+# define __kernel_cmpbge(a, b) __builtin_alpha_cmpbge(a, b)
+# define __kernel_cttz(x) __builtin_ctz(x)
+# define __kernel_ctlz(x) __builtin_clz(x)
+# define __kernel_ctpop(x) __builtin_popcount(x)
#else
-#define __kernel_insbl(val, shift) \
+# define __kernel_insbl(val, shift) \
({ unsigned long __kir; \
__asm__("insbl %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \
__kir; })
-#define __kernel_inswl(val, shift) \
+# define __kernel_inswl(val, shift) \
({ unsigned long __kir; \
__asm__("inswl %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \
__kir; })
-#define __kernel_insql(val, shift) \
+# define __kernel_insql(val, shift) \
({ unsigned long __kir; \
__asm__("insql %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \
__kir; })
-#endif
-
-#if 0 && (__GNUC__ > 2 || __GNUC_MINOR__ >= 92)
-#define __kernel_extbl(val, shift) (((val) >> (((shift) & 7) * 8)) & 0xfful)
-#define __kernel_extwl(val, shift) (((val) >> (((shift) & 7) * 8)) & 0xfffful)
-#else
-#define __kernel_extbl(val, shift) \
+# define __kernel_inslh(val, shift) \
+ ({ unsigned long __kir; \
+ __asm__("inslh %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \
+ __kir; })
+# define __kernel_extbl(val, shift) \
({ unsigned long __kir; \
__asm__("extbl %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \
__kir; })
-#define __kernel_extwl(val, shift) \
+# define __kernel_extwl(val, shift) \
({ unsigned long __kir; \
__asm__("extwl %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \
__kir; })
+# define __kernel_cmpbge(a, b) \
+ ({ unsigned long __kir; \
+ __asm__("cmpbge %r2,%1,%0" : "=r"(__kir) : "rI"(b), "rJ"(val)); \
+ __kir; })
+# define __kernel_cttz(x) \
+ ({ unsigned long __kir; \
+ __asm__("cttz %1,%0" : "=r"(__kir) : "r"(x)); \
+ __kir; })
+# define __kernel_ctlz(x) \
+ ({ unsigned long __kir; \
+ __asm__("ctlz %1,%0" : "=r"(__kir) : "r"(x)); \
+ __kir; })
+# define __kernel_ctpop(x) \
+ ({ unsigned long __kir; \
+ __asm__("ctpop %1,%0" : "=r"(__kir) : "r"(x)); \
+ __kir; })
#endif