PERFORCE change 72648 for review
John Baldwin
jhb at FreeBSD.org
Mon Mar 7 11:43:09 PST 2005
http://perforce.freebsd.org/chv.cgi?CH=72648
Change 72648 by jhb at jhb_slimer on 2005/03/07 19:42:43
- Make MPLOCKED a string and stop using __XSTRING().
- Add MP[LS]FENCE macros that use fence instructions on SMP and are
empty on UP ala MPLOCKED for use in membars.
- Add real acq and rel variants of atomic ops that use MP[LS]FENCE.
The acq variants also clobber "memory".
- Add *dst as an input to atomic_cmpset() (before it was only an input).
- Now that the fence stuff is abstracted via macros a bit, collapse
the mostly similar UP and SMP load/store primitives.
- Add the addr memory operand as an input to atomic_readandclear().
Affected files ...
.. //depot/projects/smpng/sys/amd64/include/atomic.h#7 edit
Differences ...
==== //depot/projects/smpng/sys/amd64/include/atomic.h#7 (text+ko) ====
@@ -64,8 +64,10 @@
* This allows kernel modules to be portable between UP and SMP systems.
*/
#if defined(KLD_MODULE)
-#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
-void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
+#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
+void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v) \
+void atomic_##NAME##_##TYPE_acq(volatile u_##TYPE *p, u_##TYPE v) \
+void atomic_##NAME##_##TYPE_rel(volatile u_##TYPE *p, u_##TYPE v);
int atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src);
int atomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src);
@@ -83,9 +85,13 @@
* the binaries will run on both types of systems.
*/
#if defined(SMP) || !defined(_KERNEL)
-#define MPLOCKED lock ;
+#define MPLOCKED "lock ; "
+#define MPLFENCE "lfence ; "
+#define MPSFENCE "sfence ; "
#else
-#define MPLOCKED
+#define MPLOCKED ""
+#define MPLFENCE ""
+#define MPSFENCE ""
#endif
/*
@@ -96,9 +102,26 @@
static __inline void \
atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
{ \
- __asm __volatile(__XSTRING(MPLOCKED) OP \
- : "+m" (*p) \
- : CONS (V)); \
+ __asm __volatile(MPLOCKED OP \
+ : "=m" (*p) \
+ : CONS (V), "m" (*p)); \
+} \
+ \
+static __inline void \
+atomic_##NAME##_##TYPE##_acq(volatile u_##TYPE *p, u_##TYPE v)\
+{ \
+ __asm __volatile(MPLFENCE MPLOCKED OP \
+ : "=m" (*p) \
+ : CONS (V), "m" (*p) \
+ : "memory"); \
+} \
+ \
+static __inline void \
+atomic_##NAME##_##TYPE##_rel(volatile u_##TYPE *p, u_##TYPE v)\
+{ \
+ __asm __volatile(MPSFENCE MPLOCKED OP \
+ : "=m" (*p) \
+ : CONS (V), "m" (*p)); \
} \
struct __hack
@@ -125,16 +148,16 @@
int res = exp;
__asm __volatile (
- " " __XSTRING(MPLOCKED) " "
- " cmpxchgl %1,%2 ; "
+ " " MPLOCKED " "
+ " cmpxchgl %2,%1 ; "
" setz %%al ; "
" movzbl %%al,%0 ; "
"1: "
"# atomic_cmpset_int"
- : "+a" (res) /* 0 (result) */
- : "r" (src), /* 1 */
- "m" (*(dst)) /* 2 */
- : "memory");
+ : "+a" (res), /* 0 (result) */
+ "=m" (*dst) /* 1 */
+ : "r" (src), /* 2 */
+ "m" (*dst)); /* 3 */
return (res);
}
@@ -145,44 +168,36 @@
long res = exp;
__asm __volatile (
- " " __XSTRING(MPLOCKED) " "
- " cmpxchgq %1,%2 ; "
+ " " MPLOCKED " "
+ " cmpxchgq %2,%1 ; "
" setz %%al ; "
" movzbq %%al,%0 ; "
"1: "
"# atomic_cmpset_long"
- : "+a" (res) /* 0 (result) */
- : "r" (src), /* 1 */
- "m" (*(dst)) /* 2 */
- : "memory");
+ : "+a" (res), /* 0 (result) */
+ "=m" (*dst) /* 1 */
+ : "r" (src), /* 2 */
+ "m" (*dst)); /* 3 */
return (res);
}
-#endif /* defined(__GNUC__) */
-#if defined(__GNUC__)
-
-#if defined(_KERNEL) && !defined(SMP)
-
-#define ATOMIC_STORE_LOAD(TYPE) \
-static __inline u_##TYPE \
-atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
+#define ATOMIC_CMPSET(TYPE) \
+static __inline int \
+atomic_cmpset_acq_##TYPE(volatile u_##TYPE *dst, u_##TYPE exp, u_##TYPE src)\
{ \
- u_##TYPE v; \
- \
- v = *p; \
- return (v); \
+ __asm __volatile(MPLFENCE ::: "memory"); \
+ return (atomic_cmpset_##TYPE(dst, exp, src); \
} \
\
-static __inline void \
-atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
+static __inline int \
+atomic_cmpset_rel_##TYPE(volatile u_##TYPE *dst, u_##TYPE exp, u_##TYPE src)\
{ \
- *p = v; \
+ __asm __volatile(MPSFENCE); \
+ return (atomic_cmpset_##TYPE(dst, exp, src); \
} \
struct __hack
-#else /* defined(SMP) */
-
#define ATOMIC_STORE_LOAD(TYPE) \
static __inline u_##TYPE \
atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
@@ -190,25 +205,29 @@
u_##TYPE v; \
\
v = *p; \
- __asm __volatile("lfence" ::: "memory"); \
+ __asm __volatile(MPLFENCE ::: "memory"); \
return (v); \
} \
\
static __inline void \
atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
{ \
- __asm __volatile("sfence" ::: "memory"); \
+ __asm __volatile(MPSFENCE); \
*p = v; \
} \
struct __hack
-#endif /* !define(SMP) */
-
#else /* !defined(__GNUC__) */
extern int atomic_cmpset_int(volatile u_int *, u_int, u_int);
extern int atomic_cmpset_long(volatile u_long *, u_long, u_long);
+#define ATOMIC_CMPSET(TYPE) \
+extern int atomic_cmpset_acq_##TYPE(volatile u_##TYPE *dst, u_##TYPE exp,\
+ u_##TYPE src); \
+extern int atomic_cmpset_rel_##TYPE(volatile u_##TYPE *dst, u_##TYPE exp,\
+ u_##TYPE src)
+
#define ATOMIC_STORE_LOAD(TYPE) \
extern u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \
extern void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
@@ -237,6 +256,9 @@
ATOMIC_ASM(add, long, "addq %1,%0", "ir", v);
ATOMIC_ASM(subtract, long, "subq %1,%0", "ir", v);
+ATOMIC_CMPSET(int);
+ATOMIC_CMPSET(long);
+
ATOMIC_STORE_LOAD(char);
ATOMIC_STORE_LOAD(short);
ATOMIC_STORE_LOAD(int);
@@ -245,47 +267,6 @@
#undef ATOMIC_ASM
#undef ATOMIC_STORE_LOAD
-#define atomic_set_acq_char atomic_set_char
-#define atomic_set_rel_char atomic_set_char
-#define atomic_clear_acq_char atomic_clear_char
-#define atomic_clear_rel_char atomic_clear_char
-#define atomic_add_acq_char atomic_add_char
-#define atomic_add_rel_char atomic_add_char
-#define atomic_subtract_acq_char atomic_subtract_char
-#define atomic_subtract_rel_char atomic_subtract_char
-
-#define atomic_set_acq_short atomic_set_short
-#define atomic_set_rel_short atomic_set_short
-#define atomic_clear_acq_short atomic_clear_short
-#define atomic_clear_rel_short atomic_clear_short
-#define atomic_add_acq_short atomic_add_short
-#define atomic_add_rel_short atomic_add_short
-#define atomic_subtract_acq_short atomic_subtract_short
-#define atomic_subtract_rel_short atomic_subtract_short
-
-#define atomic_set_acq_int atomic_set_int
-#define atomic_set_rel_int atomic_set_int
-#define atomic_clear_acq_int atomic_clear_int
-#define atomic_clear_rel_int atomic_clear_int
-#define atomic_add_acq_int atomic_add_int
-#define atomic_add_rel_int atomic_add_int
-#define atomic_subtract_acq_int atomic_subtract_int
-#define atomic_subtract_rel_int atomic_subtract_int
-#define atomic_cmpset_acq_int atomic_cmpset_int
-#define atomic_cmpset_rel_int atomic_cmpset_int
-
-#define atomic_set_acq_long atomic_set_long
-#define atomic_set_rel_long atomic_set_long
-#define atomic_clear_acq_long atomic_clear_long
-#define atomic_clear_rel_long atomic_clear_long
-#define atomic_add_acq_long atomic_add_long
-#define atomic_add_rel_long atomic_add_long
-#define atomic_subtract_acq_long atomic_subtract_long
-#define atomic_subtract_rel_long atomic_subtract_long
-
-#define atomic_cmpset_acq_ptr atomic_cmpset_ptr
-#define atomic_cmpset_rel_ptr atomic_cmpset_ptr
-
#define atomic_set_8 atomic_set_char
#define atomic_set_acq_8 atomic_set_acq_char
#define atomic_set_rel_8 atomic_set_rel_char
@@ -344,6 +325,22 @@
(u_long)exp, (u_long)src));
}
+static __inline int
+atomic_cmpset_acq_ptr(volatile void *dst, void *exp, void *src)
+{
+
+ return (atomic_cmpset_acq_long((volatile u_long *)dst,
+ (u_long)exp, (u_long)src));
+}
+
+static __inline int
+atomic_cmpset_rel_ptr(volatile void *dst, void *exp, void *src)
+{
+
+ return (atomic_cmpset_rel_long((volatile u_long *)dst,
+ (u_long)exp, (u_long)src));
+}
+
static __inline void *
atomic_load_acq_ptr(volatile void *p)
{
@@ -397,8 +394,9 @@
" xorl %0,%0 ; "
" xchgl %1,%0 ; "
"# atomic_readandclear_int"
- : "=&r" (result) /* 0 (result) */
- : "m" (*addr)); /* 1 (addr) */
+ : "=&r" (result), /* 0 (result) */
+ "=m" (*addr) /* 1 (addr) */
+ : "m" (*addr)); /* 2 (addr) */
return (result);
}
@@ -412,8 +410,9 @@
" xorq %0,%0 ; "
" xchgq %1,%0 ; "
"# atomic_readandclear_int"
- : "=&r" (result) /* 0 (result) */
- : "m" (*addr)); /* 1 (addr) */
+ : "=&r" (result), /* 0 (result) */
+ "=m" (*addr) /* 1 (addr) */
+ : "m" (*addr)); /* 2 (addr) */
return (result);
}
More information about the p4-projects
mailing list