svn commit: r234581 - in head/sys/powerpc: aim include

Nathan Whitehorn nwhitehorn at FreeBSD.org
Sun Apr 22 19:00:52 UTC 2012


Author: nwhitehorn
Date: Sun Apr 22 19:00:51 2012
New Revision: 234581
URL: http://svn.freebsd.org/changeset/base/234581

Log:
  Use lwsync to provide memory barriers on systems that support it instead
  of sync (lwsync is an alternate encoding of sync on systems that do not
  support it, providing graceful fallback). This provides more than an order
  of magnitude reduction in the time required to acquire or release a mutex.
  
  MFC after:	2 months

Modified:
  head/sys/powerpc/aim/slb.c
  head/sys/powerpc/include/atomic.h

Modified: head/sys/powerpc/aim/slb.c
==============================================================================
--- head/sys/powerpc/aim/slb.c	Sun Apr 22 18:56:56 2012	(r234580)
+++ head/sys/powerpc/aim/slb.c	Sun Apr 22 19:00:51 2012	(r234581)
@@ -139,7 +139,7 @@ make_new_leaf(uint64_t esid, uint64_t sl
 	 * that a lockless searcher always sees a valid path through
 	 * the tree.
 	 */
-	powerpc_sync();
+	mb();
 
 	idx = esid2idx(esid, parent->ua_level);
 	parent->u.ua_child[idx] = child;
@@ -187,7 +187,7 @@ make_intermediate(uint64_t esid, struct 
 	idx = esid2idx(child->ua_base, inter->ua_level);
 	inter->u.ua_child[idx] = child;
 	setbit(&inter->ua_alloc, idx);
-	powerpc_sync();
+	mb();
 
 	/* Set up parent to point to intermediate node ... */
 	idx = esid2idx(inter->ua_base, parent->ua_level);

Modified: head/sys/powerpc/include/atomic.h
==============================================================================
--- head/sys/powerpc/include/atomic.h	Sun Apr 22 18:56:56 2012	(r234580)
+++ head/sys/powerpc/include/atomic.h	Sun Apr 22 19:00:51 2012	(r234581)
@@ -36,12 +36,10 @@
 #error this file needs sys/cdefs.h as a prerequisite
 #endif
 
-#define	__ATOMIC_BARRIER					\
-    __asm __volatile("sync" : : : "memory")
-
-#define mb()	__ATOMIC_BARRIER
-#define	wmb()	mb()
-#define	rmb()	mb()
+/* NOTE: lwsync is equivalent to sync on systems without lwsync */
+#define mb()	__asm __volatile("lwsync" : : : "memory")
+#define wmb()	__asm __volatile("lwsync" : : : "memory")
+#define rmb()	__asm __volatile("lwsync" : : : "memory")
 
 /*
  * atomic_add(p, v)
@@ -94,13 +92,13 @@
     atomic_add_acq_##type(volatile u_##type *p, u_##type v) {	\
 	u_##type t;						\
 	__atomic_add_##type(p, v, t);				\
-	__ATOMIC_BARRIER;					\
+	rmb();							\
     }								\
 								\
     static __inline void					\
     atomic_add_rel_##type(volatile u_##type *p, u_##type v) {	\
 	u_##type t;						\
-	__ATOMIC_BARRIER;					\
+	wmb();							\
 	__atomic_add_##type(p, v, t);				\
     }								\
     /* _ATOMIC_ADD */
@@ -180,13 +178,13 @@ _ATOMIC_ADD(long)
     atomic_clear_acq_##type(volatile u_##type *p, u_##type v) {	\
 	u_##type t;						\
 	__atomic_clear_##type(p, v, t);				\
-	__ATOMIC_BARRIER;					\
+	rmb();							\
     }								\
 								\
     static __inline void					\
     atomic_clear_rel_##type(volatile u_##type *p, u_##type v) {	\
 	u_##type t;						\
-	__ATOMIC_BARRIER;					\
+	wmb();							\
 	__atomic_clear_##type(p, v, t);				\
     }								\
     /* _ATOMIC_CLEAR */
@@ -282,13 +280,13 @@ _ATOMIC_CLEAR(long)
     atomic_set_acq_##type(volatile u_##type *p, u_##type v) {	\
 	u_##type t;						\
 	__atomic_set_##type(p, v, t);				\
-	__ATOMIC_BARRIER;					\
+	rmb();							\
     }								\
 								\
     static __inline void					\
     atomic_set_rel_##type(volatile u_##type *p, u_##type v) {	\
 	u_##type t;						\
-	__ATOMIC_BARRIER;					\
+	wmb();							\
 	__atomic_set_##type(p, v, t);				\
     }								\
     /* _ATOMIC_SET */
@@ -368,13 +366,13 @@ _ATOMIC_SET(long)
     atomic_subtract_acq_##type(volatile u_##type *p, u_##type v) {	\
 	u_##type t;							\
 	__atomic_subtract_##type(p, v, t);				\
-	__ATOMIC_BARRIER;						\
+	rmb();								\
     }									\
 									\
     static __inline void						\
     atomic_subtract_rel_##type(volatile u_##type *p, u_##type v) {	\
 	u_##type t;							\
-	__ATOMIC_BARRIER;						\
+	wmb();								\
 	__atomic_subtract_##type(p, v, t);				\
     }									\
     /* _ATOMIC_SUBTRACT */
@@ -481,14 +479,14 @@ atomic_load_acq_##TYPE(volatile u_##TYPE
 	u_##TYPE v;						\
 								\
 	v = *p;							\
-	__ATOMIC_BARRIER;					\
+	rmb();							\
 	return (v);						\
 }								\
 								\
 static __inline void						\
 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)	\
 {								\
-	__ATOMIC_BARRIER;					\
+	wmb();							\
 	*p = v;							\
 }
 
@@ -598,14 +596,14 @@ atomic_cmpset_acq_int(volatile u_int *p,
 	int retval;
 
 	retval = atomic_cmpset_int(p, cmpval, newval);
-	__ATOMIC_BARRIER;
+	rmb();
 	return (retval);
 }
 
 static __inline int
 atomic_cmpset_rel_int(volatile u_int *p, u_int cmpval, u_int newval)
 {
-	__ATOMIC_BARRIER;
+	wmb();
 	return (atomic_cmpset_int(p, cmpval, newval));
 }
 
@@ -615,14 +613,14 @@ atomic_cmpset_acq_long(volatile u_long *
 	u_long retval;
 
 	retval = atomic_cmpset_long(p, cmpval, newval);
-	__ATOMIC_BARRIER;
+	rmb();
 	return (retval);
 }
 
 static __inline int
 atomic_cmpset_rel_long(volatile u_long *p, u_long cmpval, u_long newval)
 {
-	__ATOMIC_BARRIER;
+	wmb();
 	return (atomic_cmpset_long(p, cmpval, newval));
 }
 


More information about the svn-src-head mailing list