svn commit: r328515 - in stable/11/sys: conf contrib/ck contrib/ck/include contrib/ck/include/gcc contrib/ck/include/gcc/aarch64 contrib/ck/include/gcc/arm contrib/ck/include/gcc/ppc64 contrib/ck/i...

Olivier Houchard cognet at FreeBSD.org
Sun Jan 28 18:38:18 UTC 2018


Author: cognet
Date: Sun Jan 28 18:38:17 2018
New Revision: 328515
URL: https://svnweb.freebsd.org/changeset/base/328515

Log:
  MFC revisions r309268, r309260, r309264, r309266, r309267, r309270, r310846,
  r314435, r314564, r316665, r316691, r316702.
  Those import ConcurrencyKit in the FreeBSD kernel.
  A few people have showed interest in this being MFC'd, so here we go.

Added:
  stable/11/sys/contrib/ck/
     - copied from r309266, head/sys/contrib/ck/
  stable/11/sys/contrib/ck/FREEBSD-Xlist
     - copied, changed from r309270, head/sys/contrib/ck/FREEBSD-Xlist
  stable/11/sys/contrib/ck/include/ck_md.h
     - copied, changed from r309267, head/sys/contrib/ck/include/ck_md.h
  stable/11/sys/contrib/ck/include/gcc/aarch64/ck_pr_llsc.h
     - copied unchanged from r310846, head/sys/contrib/ck/include/gcc/aarch64/ck_pr_llsc.h
  stable/11/sys/contrib/ck/include/gcc/aarch64/ck_pr_lse.h
     - copied unchanged from r310846, head/sys/contrib/ck/include/gcc/aarch64/ck_pr_lse.h
  stable/11/sys/contrib/ck/include/gcc/arm/ck_pr_armv4.h
     - copied unchanged from r309267, head/sys/contrib/ck/include/gcc/arm/ck_pr_armv4.h
Deleted:
  stable/11/sys/contrib/ck/src/Makefile.in
Modified:
  stable/11/sys/conf/files
  stable/11/sys/contrib/ck/include/ck_epoch.h
  stable/11/sys/contrib/ck/include/ck_pr.h
  stable/11/sys/contrib/ck/include/gcc/aarch64/ck_pr.h
  stable/11/sys/contrib/ck/include/gcc/arm/ck_pr.h
  stable/11/sys/contrib/ck/include/gcc/ck_pr.h
  stable/11/sys/contrib/ck/include/gcc/ppc64/ck_pr.h
  stable/11/sys/contrib/ck/include/gcc/x86_64/ck_pr.h
  stable/11/sys/contrib/ck/src/ck_epoch.c
  stable/11/sys/mips/mips/stdatomic.c
Directory Properties:
  stable/11/   (props changed)

Modified: stable/11/sys/conf/files
==============================================================================
--- stable/11/sys/conf/files	Sun Jan 28 18:18:03 2018	(r328514)
+++ stable/11/sys/conf/files	Sun Jan 28 18:38:17 2018	(r328515)
@@ -338,6 +338,17 @@ compat/freebsd32/freebsd32_ioctl.c	optional compat_fre
 compat/freebsd32/freebsd32_misc.c	optional compat_freebsd32
 compat/freebsd32/freebsd32_syscalls.c	optional compat_freebsd32
 compat/freebsd32/freebsd32_sysent.c	optional compat_freebsd32
+contrib/ck/src/ck_array.c				standard compile-with "${NORMAL_C} -I$S/contrib/ck/include"
+contrib/ck/src/ck_barrier_centralized.c			standard compile-with "${NORMAL_C} -I$S/contrib/ck/include"
+contrib/ck/src/ck_barrier_combining.c			standard compile-with "${NORMAL_C} -I$S/contrib/ck/include"
+contrib/ck/src/ck_barrier_dissemination.c		standard compile-with "${NORMAL_C} -I$S/contrib/ck/include"
+contrib/ck/src/ck_barrier_mcs.c				standard compile-with "${NORMAL_C} -I$S/contrib/ck/include"
+contrib/ck/src/ck_barrier_tournament.c			standard compile-with "${NORMAL_C} -I$S/contrib/ck/include"
+contrib/ck/src/ck_epoch.c				standard compile-with "${NORMAL_C} -I$S/contrib/ck/include"
+contrib/ck/src/ck_hp.c					standard compile-with "${NORMAL_C} -I$S/contrib/ck/include"
+contrib/ck/src/ck_hs.c					standard compile-with "${NORMAL_C} -I$S/contrib/ck/include"
+contrib/ck/src/ck_ht.c					standard compile-with "${NORMAL_C} -I$S/contrib/ck/include"
+contrib/ck/src/ck_rhs.c					standard compile-with "${NORMAL_C} -I$S/contrib/ck/include"
 contrib/dev/acpica/common/ahids.c			optional acpi acpi_debug
 contrib/dev/acpica/common/ahuuids.c			optional acpi acpi_debug
 contrib/dev/acpica/components/debugger/dbcmds.c		optional acpi acpi_debug

Copied and modified: stable/11/sys/contrib/ck/FREEBSD-Xlist (from r309270, head/sys/contrib/ck/FREEBSD-Xlist)
==============================================================================
--- head/sys/contrib/ck/FREEBSD-Xlist	Mon Nov 28 21:16:03 2016	(r309270, copy source)
+++ stable/11/sys/contrib/ck/FREEBSD-Xlist	Sun Jan 28 18:38:17 2018	(r328515)
@@ -8,3 +8,4 @@
 */regressions
 */tools
 */include/ck_md.h.in
+*/src/Makefile.in

Modified: stable/11/sys/contrib/ck/include/ck_epoch.h
==============================================================================
--- head/sys/contrib/ck/include/ck_epoch.h	Mon Nov 28 20:27:58 2016	(r309266)
+++ stable/11/sys/contrib/ck/include/ck_epoch.h	Sun Jan 28 18:38:17 2018	(r328515)
@@ -83,6 +83,7 @@ struct ck_epoch_ref {
 };
 
 struct ck_epoch_record {
+	ck_stack_entry_t record_next;
 	struct ck_epoch *global;
 	unsigned int state;
 	unsigned int epoch;
@@ -92,17 +93,16 @@ struct ck_epoch_record {
 	} local CK_CC_CACHELINE;
 	unsigned int n_pending;
 	unsigned int n_peak;
-	unsigned long n_dispatch;
+	unsigned int n_dispatch;
+	void *ct;
 	ck_stack_t pending[CK_EPOCH_LENGTH];
-	ck_stack_entry_t record_next;
 } CK_CC_CACHELINE;
 typedef struct ck_epoch_record ck_epoch_record_t;
 
 struct ck_epoch {
 	unsigned int epoch;
-	char pad[CK_MD_CACHELINE - sizeof(unsigned int)];
-	ck_stack_t records;
 	unsigned int n_free;
+	ck_stack_t records;
 };
 typedef struct ck_epoch ck_epoch_t;
 
@@ -110,8 +110,15 @@ typedef struct ck_epoch ck_epoch_t;
  * Internal functions.
  */
 void _ck_epoch_addref(ck_epoch_record_t *, ck_epoch_section_t *);
-void _ck_epoch_delref(ck_epoch_record_t *, ck_epoch_section_t *);
+bool _ck_epoch_delref(ck_epoch_record_t *, ck_epoch_section_t *);
 
+CK_CC_FORCE_INLINE static void *
+ck_epoch_record_ct(const ck_epoch_record_t *record)
+{
+
+	return ck_pr_load_ptr(&record->ct);
+}
+
 /*
  * Marks the beginning of an epoch-protected section.
  */
@@ -160,9 +167,10 @@ ck_epoch_begin(ck_epoch_record_t *record, ck_epoch_sec
 }
 
 /*
- * Marks the end of an epoch-protected section.
+ * Marks the end of an epoch-protected section. Returns true if no more
+ * sections exist for the caller.
  */
-CK_CC_FORCE_INLINE static void
+CK_CC_FORCE_INLINE static bool
 ck_epoch_end(ck_epoch_record_t *record, ck_epoch_section_t *section)
 {
 
@@ -170,15 +178,19 @@ ck_epoch_end(ck_epoch_record_t *record, ck_epoch_secti
 	ck_pr_store_uint(&record->active, record->active - 1);
 
 	if (section != NULL)
-		_ck_epoch_delref(record, section);
+		return _ck_epoch_delref(record, section);
 
-	return;
+	return record->active == 0;
 }
 
 /*
  * Defers the execution of the function pointed to by the "cb"
  * argument until an epoch counter loop. This allows for a
  * non-blocking deferral.
+ *
+ * We can get away without a fence here due to the monotonic nature
+ * of the epoch counter. Worst case, this will result in some delays
+ * before object destruction.
  */
 CK_CC_FORCE_INLINE static void
 ck_epoch_call(ck_epoch_record_t *record,
@@ -195,13 +207,74 @@ ck_epoch_call(ck_epoch_record_t *record,
 	return;
 }
 
+/*
+ * Same as ck_epoch_call, but allows for records to be shared and is reentrant.
+ */
+CK_CC_FORCE_INLINE static void
+ck_epoch_call_strict(ck_epoch_record_t *record,
+	      ck_epoch_entry_t *entry,
+	      ck_epoch_cb_t *function)
+{
+	struct ck_epoch *epoch = record->global;
+	unsigned int e = ck_pr_load_uint(&epoch->epoch);
+	unsigned int offset = e & (CK_EPOCH_LENGTH - 1);
+
+	ck_pr_inc_uint(&record->n_pending);
+	entry->function = function;
+
+	/* Store fence is implied by push operation. */
+	ck_stack_push_upmc(&record->pending[offset], &entry->stack_entry);
+	return;
+}
+
+/*
+ * This callback is used for synchronize_wait to allow for custom blocking
+ * behavior.
+ */
+typedef void ck_epoch_wait_cb_t(ck_epoch_t *, ck_epoch_record_t *,
+    void *);
+
+/*
+ * Return latest epoch value. This operation provides load ordering.
+ */
+CK_CC_FORCE_INLINE static unsigned int
+ck_epoch_value(const ck_epoch_t *ep)
+{
+
+	ck_pr_fence_load();
+	return ck_pr_load_uint(&ep->epoch);
+}
+
 void ck_epoch_init(ck_epoch_t *);
-ck_epoch_record_t *ck_epoch_recycle(ck_epoch_t *);
-void ck_epoch_register(ck_epoch_t *, ck_epoch_record_t *);
+
+/*
+ * Attempts to recycle an unused epoch record. If one is successfully
+ * allocated, the record context pointer is also updated.
+ */
+ck_epoch_record_t *ck_epoch_recycle(ck_epoch_t *, void *);
+
+/*
+ * Registers an epoch record. An optional context pointer may be passed that
+ * is retrievable with ck_epoch_record_ct.
+ */
+void ck_epoch_register(ck_epoch_t *, ck_epoch_record_t *, void *);
+
+/*
+ * Marks a record as available for re-use by a subsequent recycle operation.
+ * Note that the record cannot be physically destroyed.
+ */
 void ck_epoch_unregister(ck_epoch_record_t *);
+
 bool ck_epoch_poll(ck_epoch_record_t *);
 void ck_epoch_synchronize(ck_epoch_record_t *);
+void ck_epoch_synchronize_wait(ck_epoch_t *, ck_epoch_wait_cb_t *, void *);
 void ck_epoch_barrier(ck_epoch_record_t *);
+void ck_epoch_barrier_wait(ck_epoch_record_t *, ck_epoch_wait_cb_t *, void *);
+
+/*
+ * Reclaim entries associated with a record. This is safe to call only on
+ * the caller's record or records that are using call_strict.
+ */
 void ck_epoch_reclaim(ck_epoch_record_t *);
 
 #endif /* CK_EPOCH_H */

Copied and modified: stable/11/sys/contrib/ck/include/ck_md.h (from r309267, head/sys/contrib/ck/include/ck_md.h)
==============================================================================
--- head/sys/contrib/ck/include/ck_md.h	Mon Nov 28 20:33:30 2016	(r309267, copy source)
+++ stable/11/sys/contrib/ck/include/ck_md.h	Sun Jan 28 18:38:17 2018	(r328515)
@@ -49,11 +49,15 @@
 #define CK_MD_VMA_BITS_UNKNOWN 
 #endif /* CK_MD_VMA_BITS_UNKNOWN */
 
+#ifndef CK_PR_DISABLE_DOUBLE
+#define CK_PR_DISABLE_DOUBLE
+#endif /* CK_PR_DISABLE_DOUBLE */
+
 #ifndef CK_MD_RMO
 #define CK_MD_RMO
 #endif /* CK_MD_RMO */
 
-#define CK_VERSION "0.5.2"
+#define CK_VERSION "0.6.0"
 #define CK_GIT_SHA ""
 
 /*

Modified: stable/11/sys/contrib/ck/include/ck_pr.h
==============================================================================
--- head/sys/contrib/ck/include/ck_pr.h	Mon Nov 28 20:27:58 2016	(r309266)
+++ stable/11/sys/contrib/ck/include/ck_pr.h	Sun Jan 28 18:38:17 2018	(r328515)
@@ -46,7 +46,11 @@
 #elif defined(__ppc__)
 #include "gcc/ppc/ck_pr.h"
 #elif defined(__arm__)
+#if __ARM_ARCH >= 6
 #include "gcc/arm/ck_pr.h"
+#else
+#include "gcc/arm/ck_pr_armv4.h"
+#endif
 #elif defined(__aarch64__)
 #include "gcc/aarch64/ck_pr.h"
 #elif !defined(__GNUC__)
@@ -169,7 +173,9 @@ ck_pr_rfo(const void *m)
 
 #define ck_pr_store_ptr(DST, VAL) CK_PR_STORE_SAFE((DST), (VAL), ptr)
 #define ck_pr_store_char(DST, VAL) CK_PR_STORE_SAFE((DST), (VAL), char)
+#ifndef CK_PR_DISABLE_DOUBLE
 #define ck_pr_store_double(DST, VAL) CK_PR_STORE_SAFE((DST), (VAL), double)
+#endif
 #define ck_pr_store_uint(DST, VAL) CK_PR_STORE_SAFE((DST), (VAL), uint)
 #define ck_pr_store_int(DST, VAL) CK_PR_STORE_SAFE((DST), (VAL), int)
 #define ck_pr_store_32(DST, VAL) CK_PR_STORE_SAFE((DST), (VAL), 32)
@@ -187,7 +193,9 @@ ck_pr_rfo(const void *m)
 
 #define CK_PR_LOAD_SAFE(SRC, TYPE) ck_pr_md_load_##TYPE((SRC))
 #define ck_pr_load_char(SRC) CK_PR_LOAD_SAFE((SRC), char)
+#ifndef CK_PR_DISABLE_DOUBLE
 #define ck_pr_load_double(SRC) CK_PR_LOAD_SAFE((SRC), double)
+#endif
 #define ck_pr_load_uint(SRC) CK_PR_LOAD_SAFE((SRC), uint)
 #define ck_pr_load_int(SRC) CK_PR_LOAD_SAFE((SRC), int)
 #define ck_pr_load_32(SRC) CK_PR_LOAD_SAFE((SRC), 32)
@@ -275,7 +283,8 @@ CK_PR_BIN_S(or, int, int, |)
 
 #endif /* CK_F_PR_LOAD_INT && CK_F_PR_CAS_INT_VALUE */
 
-#if defined(CK_F_PR_LOAD_DOUBLE) && defined(CK_F_PR_CAS_DOUBLE_VALUE)
+#if defined(CK_F_PR_LOAD_DOUBLE) && defined(CK_F_PR_CAS_DOUBLE_VALUE) && \
+	    !defined(CK_PR_DISABLE_DOUBLE)
 
 #ifndef CK_F_PR_ADD_DOUBLE
 #define CK_F_PR_ADD_DOUBLE
@@ -287,7 +296,7 @@ CK_PR_BIN_S(add, double, double, +)
 CK_PR_BIN_S(sub, double, double, -)
 #endif /* CK_F_PR_SUB_DOUBLE */
 
-#endif /* CK_F_PR_LOAD_DOUBLE && CK_F_PR_CAS_DOUBLE_VALUE */
+#endif /* CK_F_PR_LOAD_DOUBLE && CK_F_PR_CAS_DOUBLE_VALUE && !CK_PR_DISABLE_DOUBLE */
 
 #if defined(CK_F_PR_LOAD_UINT) && defined(CK_F_PR_CAS_UINT_VALUE)
 
@@ -675,7 +684,8 @@ CK_PR_UNARY_Z_S(dec, int, int, -, 1)
 
 #endif /* CK_F_PR_LOAD_INT && CK_F_PR_CAS_INT_VALUE */
 
-#if defined(CK_F_PR_LOAD_DOUBLE) && defined(CK_F_PR_CAS_DOUBLE_VALUE)
+#if defined(CK_F_PR_LOAD_DOUBLE) && defined(CK_F_PR_CAS_DOUBLE_VALUE) && \
+	    !defined(CK_PR_DISABLE_DOUBLE)
 
 #ifndef CK_F_PR_INC_DOUBLE
 #define CK_F_PR_INC_DOUBLE
@@ -687,7 +697,7 @@ CK_PR_UNARY_S(inc, add, double, double)
 CK_PR_UNARY_S(dec, sub, double, double)
 #endif /* CK_F_PR_DEC_DOUBLE */
 
-#endif /* CK_F_PR_LOAD_DOUBLE && CK_F_PR_CAS_DOUBLE_VALUE */
+#endif /* CK_F_PR_LOAD_DOUBLE && CK_F_PR_CAS_DOUBLE_VALUE && !CK_PR_DISABLE_DOUBLE */
 
 #if defined(CK_F_PR_LOAD_UINT) && defined(CK_F_PR_CAS_UINT_VALUE)
 
@@ -914,14 +924,15 @@ CK_PR_N_Z_S(int, int)
 
 #endif /* CK_F_PR_LOAD_INT && CK_F_PR_CAS_INT_VALUE */
 
-#if defined(CK_F_PR_LOAD_DOUBLE) && defined(CK_F_PR_CAS_DOUBLE_VALUE)
+#if defined(CK_F_PR_LOAD_DOUBLE) && defined(CK_F_PR_CAS_DOUBLE_VALUE) && \
+	    !defined(CK_PR_DISABLE_DOUBLE)
 
 #ifndef CK_F_PR_NEG_DOUBLE
 #define CK_F_PR_NEG_DOUBLE
 CK_PR_N_S(neg, double, double, -)
 #endif /* CK_F_PR_NEG_DOUBLE */
 
-#endif /* CK_F_PR_LOAD_DOUBLE && CK_F_PR_CAS_DOUBLE_VALUE */
+#endif /* CK_F_PR_LOAD_DOUBLE && CK_F_PR_CAS_DOUBLE_VALUE && !CK_PR_DISABLE_DOUBLE */
 
 #if defined(CK_F_PR_LOAD_UINT) && defined(CK_F_PR_CAS_UINT_VALUE)
 
@@ -1105,7 +1116,8 @@ CK_PR_FAS_S(int, int)
 
 #endif /* CK_F_PR_LOAD_INT && CK_F_PR_CAS_INT_VALUE */
 
-#if defined(CK_F_PR_LOAD_DOUBLE) && defined(CK_F_PR_CAS_DOUBLE_VALUE)
+#if defined(CK_F_PR_LOAD_DOUBLE) && defined(CK_F_PR_CAS_DOUBLE_VALUE) && \
+	    !defined(CK_PR_DISABLE_DOUBLE)
 
 #ifndef CK_F_PR_FAA_DOUBLE
 #define CK_F_PR_FAA_DOUBLE
@@ -1117,7 +1129,7 @@ CK_PR_FAA_S(double, double)
 CK_PR_FAS_S(double, double)
 #endif /* CK_F_PR_FAS_DOUBLE */
 
-#endif /* CK_F_PR_LOAD_DOUBLE && CK_F_PR_CAS_DOUBLE_VALUE */
+#endif /* CK_F_PR_LOAD_DOUBLE && CK_F_PR_CAS_DOUBLE_VALUE && !CK_PR_DISABLE_DOUBLE */
 
 #if defined(CK_F_PR_LOAD_UINT) && defined(CK_F_PR_CAS_UINT_VALUE)
 

Modified: stable/11/sys/contrib/ck/include/gcc/aarch64/ck_pr.h
==============================================================================
--- head/sys/contrib/ck/include/gcc/aarch64/ck_pr.h	Mon Nov 28 20:27:58 2016	(r309266)
+++ stable/11/sys/contrib/ck/include/gcc/aarch64/ck_pr.h	Sun Jan 28 18:38:17 2018	(r328515)
@@ -124,7 +124,9 @@ CK_PR_LOAD_S(uint, unsigned int, "ldr")
 CK_PR_LOAD_S(int, int, "ldr")
 CK_PR_LOAD_S(short, short, "ldrh")
 CK_PR_LOAD_S(char, char, "ldrb")
+#ifndef CK_PR_DISABLE_DOUBLE
 CK_PR_LOAD_S_64(double, double, "ldr")
+#endif
 
 #undef CK_PR_LOAD_S
 #undef CK_PR_LOAD_S_64
@@ -167,326 +169,59 @@ CK_PR_STORE_S(uint, unsigned int, "str")
 CK_PR_STORE_S(int, int, "str")
 CK_PR_STORE_S(short, short, "strh")
 CK_PR_STORE_S(char, char, "strb")
+#ifndef CK_PR_DISABLE_DOUBLE
 CK_PR_STORE_S_64(double, double, "str")
+#endif
 
 #undef CK_PR_STORE_S
 #undef CK_PR_STORE_S_64
 #undef CK_PR_STORE
 #undef CK_PR_STORE_64
 
-CK_CC_INLINE static bool
-ck_pr_cas_64_2_value(uint64_t target[2], uint64_t compare[2], uint64_t set[2], uint64_t value[2])
-{
-	uint64_t tmp1, tmp2;
-	__asm__ __volatile__("1:"
-	   		     "ldxp %0, %1, [%4];"
-			     "mov %2, %0;"
-			     "mov %3, %1;"
-			     "eor %0, %0, %5;"
-			     "eor %1, %1, %6;"
-			     "orr %1, %0, %1;"
-			     "mov %w0, #0;"
-			     "cbnz %1, 2f;"
-			     "stxp %w0, %7, %8, [%4];"
-			     "cbnz %w0, 1b;"
-			     "mov %w0, #1;"
-			     "2:"
-			     : "=&r" (tmp1), "=&r" (tmp2), "=&r" (value[0]), "=&r" (value[1]) 
-			     : "r" (target), "r" (compare[0]), "r" (compare[1]), "r" (set[0]), "r" (set[1]));
+#ifdef CK_MD_LSE_ENABLE
+#include "ck_pr_lse.h"
+#else
+#include "ck_pr_llsc.h"
+#endif
 
-	return (tmp1);
-}
+/*
+ * ck_pr_neg_*() functions can only be implemented via LL/SC, as there are no
+ * LSE alternatives.
+ */
+#define CK_PR_NEG(N, M, T, W, R)				\
+        CK_CC_INLINE static void				\
+        ck_pr_neg_##N(M *target)				\
+        {							\
+                T previous = 0;					\
+                T tmp = 0;					\
+                __asm__ __volatile__("1:"			\
+                                     "ldxr" W " %" R "0, [%2];"	\
+                                     "neg %" R "0, %" R "0;"	\
+                                     "stxr" W " %w1, %" R "0, [%2];"	\
+                                     "cbnz %w1, 1b;"		\
+                                        : "=&r" (previous),	\
+                                          "=&r" (tmp)		\
+                                        : "r"   (target)	\
+                                        : "memory", "cc");	\
+                return;						\
+        }
 
-CK_CC_INLINE static bool
-ck_pr_cas_ptr_2_value(void *target, void *compare, void *set, void *value)
-{
-	return (ck_pr_cas_64_2_value(CK_CPP_CAST(uint64_t *, target),
-				   CK_CPP_CAST(uint64_t *, compare),
-				   CK_CPP_CAST(uint64_t *, set),
-				   CK_CPP_CAST(uint64_t *, value)));
-}
+CK_PR_NEG(ptr, void, void *, "", "")
+CK_PR_NEG(64, uint64_t, uint64_t, "", "")
 
-CK_CC_INLINE static bool
-ck_pr_cas_64_2(uint64_t target[2], uint64_t compare[2], uint64_t set[2])
-{
-	uint64_t tmp1, tmp2;
-	__asm__ __volatile__("1:"
-	   		     "ldxp %0, %1, [%2];"
-			     "eor %0, %0, %3;"
-			     "eor %1, %1, %4;"
-			     "orr %1, %0, %1;"
-			     "mov %w0, #0;"
-			     "cbnz %1, 2f;"
-			     "stxp %w0, %5, %6, [%2];"
-			     "cbnz %w0, 1b;"
-			     "mov %w0, #1;"
-			     "2:"
-			     : "=&r" (tmp1), "=&r" (tmp2) 
-			     : "r" (target), "r" (compare[0]), "r" (compare[1]), "r" (set[0]), "r" (set[1]));
+#define CK_PR_NEG_S(S, T, W)					\
+        CK_PR_NEG(S, T, T, W, "w")				\
 
-	return (tmp1);
-}
-CK_CC_INLINE static bool
-ck_pr_cas_ptr_2(void *target, void *compare, void *set)
-{
-	return (ck_pr_cas_64_2(CK_CPP_CAST(uint64_t *, target),
-			     CK_CPP_CAST(uint64_t *, compare),
-			     CK_CPP_CAST(uint64_t *, set)));
-}
+CK_PR_NEG_S(32, uint32_t, "")
+CK_PR_NEG_S(uint, unsigned int, "")
+CK_PR_NEG_S(int, int, "")
+CK_PR_NEG_S(16, uint16_t, "h")
+CK_PR_NEG_S(8, uint8_t, "b")
+CK_PR_NEG_S(short, short, "h")
+CK_PR_NEG_S(char, char, "b")
 
-
-#define CK_PR_CAS(N, M, T, W, R)					\
-	CK_CC_INLINE static bool					\
-	ck_pr_cas_##N##_value(M *target, T compare, T set, M *value)	\
-	{								\
-		T previous;						\
-		T tmp;							\
-		__asm__ __volatile__("1:"				\
-		                     "ldxr" W " %" R "0, [%2];"		\
-		    		     "cmp  %" R "0, %" R "4;"		\
-		    		     "b.ne 2f;"				\
-		                     "stxr" W " %w1, %" R "3, [%2];"	\
-		    		     "cbnz %w1, 1b;"			\
-	    			     "2:"				\
-		    : "=&r" (previous),					\
-		    "=&r" (tmp)						\
-		    : "r"   (target),					\
-		    "r"   (set),					\
-		    "r"   (compare)					\
-		    : "memory", "cc");					\
-		*(T *)value = previous;					\
-		return (previous == compare);				\
-	}								\
-	CK_CC_INLINE static bool					\
-	ck_pr_cas_##N(M *target, T compare, T set)			\
-	{								\
-		T previous;						\
-		T tmp;							\
-		__asm__ __volatile__(					\
-		    		     "1:"				\
-		                     "ldxr" W " %" R "0, [%2];"		\
-		    		     "cmp  %" R "0, %" R "4;"		\
-		    		     "b.ne 2f;"				\
-		                     "stxr" W " %w1, %" R "3, [%2];"	\
-		    		     "cbnz %w1, 1b;"			\
-	    			     "2:"				\
-		    : "=&r" (previous),					\
-		    "=&r" (tmp)						\
-		    : "r"   (target),					\
-		    "r"   (set),					\
-		    "r"   (compare)					\
-		    : "memory", "cc");					\
-		return (previous == compare);				\
-	}
-
-CK_PR_CAS(ptr, void, void *, "", "")
-
-#define CK_PR_CAS_S(N, M, W, R)	CK_PR_CAS(N, M, M, W, R)
-CK_PR_CAS_S(64, uint64_t, "", "")
-CK_PR_CAS_S(double, double, "", "")
-CK_PR_CAS_S(32, uint32_t, "", "w")
-CK_PR_CAS_S(uint, unsigned int, "", "w")
-CK_PR_CAS_S(int, int, "", "w")
-CK_PR_CAS_S(16, uint16_t, "h", "w")
-CK_PR_CAS_S(8, uint8_t, "b", "w")
-CK_PR_CAS_S(short, short, "h", "w")
-CK_PR_CAS_S(char, char, "b", "w")
-
-
-#undef CK_PR_CAS_S
-#undef CK_PR_CAS
-
-#define CK_PR_FAS(N, M, T, W, R)				\
-	CK_CC_INLINE static T					\
-	ck_pr_fas_##N(M *target, T v)				\
-	{							\
-		T previous;					\
-		T tmp;						\
-		__asm__ __volatile__("1:"			\
-				     "ldxr" W " %" R "0, [%2];"	\
-				     "stxr" W " %w1, %" R "3, [%2];"\
-		    		     "cbnz %w1, 1b;"		\
-					: "=&r" (previous),	\
-		    			  "=&r" (tmp) 		\
-					: "r"   (target),	\
-					  "r"   (v)		\
-					: "memory", "cc");	\
-		return (previous);				\
-	}
-
-CK_PR_FAS(64, uint64_t, uint64_t, "", "")
-CK_PR_FAS(32, uint32_t, uint32_t, "", "w")
-CK_PR_FAS(ptr, void, void *, "", "")
-CK_PR_FAS(int, int, int, "", "w")
-CK_PR_FAS(uint, unsigned int, unsigned int, "", "w")
-CK_PR_FAS(16, uint16_t, uint16_t, "h", "w")
-CK_PR_FAS(8, uint8_t, uint8_t, "b", "w")
-CK_PR_FAS(short, short, short, "h", "w")
-CK_PR_FAS(char, char, char, "b", "w")
-
-
-#undef CK_PR_FAS
-
-#define CK_PR_UNARY(O, N, M, T, I, W, R)			\
-	CK_CC_INLINE static void				\
-	ck_pr_##O##_##N(M *target)				\
-	{							\
-		T previous = 0;					\
-		T tmp = 0;					\
-		__asm__ __volatile__("1:"			\
-				     "ldxr" W " %" R "0, [%2];"	\
-				      I ";"			\
-				     "stxr" W " %w1, %" R "0, [%2];"	\
-				     "cbnz %w1, 1b;"		\
-					: "=&r" (previous),	\
-		    			  "=&r" (tmp)		\
-					: "r"   (target)	\
-					: "memory", "cc");	\
-		return;						\
-	}
-
-CK_PR_UNARY(inc, ptr, void, void *, "add %0, %0, #1", "", "")
-CK_PR_UNARY(dec, ptr, void, void *, "sub %0, %0, #1", "", "")
-CK_PR_UNARY(not, ptr, void, void *, "mvn %0, %0", "", "")
-CK_PR_UNARY(neg, ptr, void, void *, "neg %0, %0", "", "")
-CK_PR_UNARY(inc, 64, uint64_t, uint64_t, "add %0, %0, #1", "", "")
-CK_PR_UNARY(dec, 64, uint64_t, uint64_t, "sub %0, %0, #1", "", "")
-CK_PR_UNARY(not, 64, uint64_t, uint64_t, "mvn %0, %0", "", "")
-CK_PR_UNARY(neg, 64, uint64_t, uint64_t, "neg %0, %0", "", "")
-
-#define CK_PR_UNARY_S(S, T, W)					\
-	CK_PR_UNARY(inc, S, T, T, "add %w0, %w0, #1", W, "w")	\
-	CK_PR_UNARY(dec, S, T, T, "sub %w0, %w0, #1", W, "w")	\
-	CK_PR_UNARY(not, S, T, T, "mvn %w0, %w0", W, "w")	\
-	CK_PR_UNARY(neg, S, T, T, "neg %w0, %w0", W, "w")	\
-
-CK_PR_UNARY_S(32, uint32_t, "")
-CK_PR_UNARY_S(uint, unsigned int, "")
-CK_PR_UNARY_S(int, int, "")
-CK_PR_UNARY_S(16, uint16_t, "h")
-CK_PR_UNARY_S(8, uint8_t, "b")
-CK_PR_UNARY_S(short, short, "h")
-CK_PR_UNARY_S(char, char, "b")
-
-#undef CK_PR_UNARY_S
-#undef CK_PR_UNARY
-
-#define CK_PR_BINARY(O, N, M, T, I, W, R)			\
-	CK_CC_INLINE static void				\
-	ck_pr_##O##_##N(M *target, T delta)			\
-	{							\
-		T previous;					\
-		T tmp;						\
-		__asm__ __volatile__("1:"			\
-				     "ldxr" W " %" R "0, [%2];"\
-				      I " %" R "0, %" R "0, %" R "3;"	\
-				     "stxr" W " %w1, %" R "0, [%2];"	\
-				     "cbnz %w1, 1b;"		\
-					: "=&r" (previous),	\
-		    			  "=&r" (tmp)		\
-					: "r"   (target),	\
-					  "r"   (delta)		\
-					: "memory", "cc");	\
-		return;						\
-	}
-
-CK_PR_BINARY(and, ptr, void, uintptr_t, "and", "", "")
-CK_PR_BINARY(add, ptr, void, uintptr_t, "add", "", "")
-CK_PR_BINARY(or, ptr, void, uintptr_t, "orr", "", "")
-CK_PR_BINARY(sub, ptr, void, uintptr_t, "sub", "", "")
-CK_PR_BINARY(xor, ptr, void, uintptr_t, "eor", "", "")
-CK_PR_BINARY(and, 64, uint64_t, uint64_t, "and", "", "")
-CK_PR_BINARY(add, 64, uint64_t, uint64_t, "add", "", "")
-CK_PR_BINARY(or, 64, uint64_t, uint64_t, "orr", "", "")
-CK_PR_BINARY(sub, 64, uint64_t, uint64_t, "sub", "", "")
-CK_PR_BINARY(xor, 64, uint64_t, uint64_t, "eor", "", "")
-
-#define CK_PR_BINARY_S(S, T, W)				\
-	CK_PR_BINARY(and, S, T, T, "and", W, "w")	\
-	CK_PR_BINARY(add, S, T, T, "add", W, "w")	\
-	CK_PR_BINARY(or, S, T, T, "orr", W, "w")	\
-	CK_PR_BINARY(sub, S, T, T, "sub", W, "w")	\
-	CK_PR_BINARY(xor, S, T, T, "eor", W, "w")
-
-CK_PR_BINARY_S(32, uint32_t, "")
-CK_PR_BINARY_S(uint, unsigned int, "")
-CK_PR_BINARY_S(int, int, "")
-CK_PR_BINARY_S(16, uint16_t, "h")
-CK_PR_BINARY_S(8, uint8_t, "b")
-CK_PR_BINARY_S(short, short, "h")
-CK_PR_BINARY_S(char, char, "b")
-
-#undef CK_PR_BINARY_S
-#undef CK_PR_BINARY
-
-CK_CC_INLINE static void *
-ck_pr_faa_ptr(void *target, uintptr_t delta)
-{
-	uintptr_t previous, r, tmp;
-
-	__asm__ __volatile__("1:"
-			     "ldxr %0, [%3];"
-			     "add %1, %4, %0;"
-			     "stxr %w2, %1, [%3];"
-			     "cbnz %w2, 1b;"
-				: "=&r" (previous),
-				  "=&r" (r),
-				  "=&r" (tmp)
-				: "r"   (target),
-				  "r"   (delta)
-				: "memory", "cc");
-
-	return (void *)(previous);
-}
-
-CK_CC_INLINE static uint64_t
-ck_pr_faa_64(uint64_t *target, uint64_t delta)
-{
-        uint64_t previous, r, tmp;
-
-        __asm__ __volatile__("1:"
-                             "ldxr %0, [%3];"
-                             "add %1, %4, %0;"
-                             "stxr %w2, %1, [%3];"
-                             "cbnz %w2, 1b;"
-                                : "=&r" (previous),
-                                  "=&r" (r),
-                                  "=&r" (tmp)
-                                : "r"   (target),
-                                  "r"   (delta)
-                                : "memory", "cc");
-
-        return (previous);
-}
-
-#define CK_PR_FAA(S, T, W)						\
-	CK_CC_INLINE static T						\
-	ck_pr_faa_##S(T *target, T delta)				\
-	{								\
-		T previous, r, tmp;					\
-		__asm__ __volatile__("1:"				\
-				     "ldxr" W " %w0, [%3];"		\
-				     "add %w1, %w4, %w0;"		\
-				     "stxr" W " %w2, %w1, [%3];"	\
-		    		     "cbnz %w2, 1b;"			\
-					: "=&r" (previous),		\
-					  "=&r" (r),			\
-		    			  "=&r" (tmp)			\
-					: "r"   (target),		\
-					  "r"   (delta)			\
-					: "memory", "cc");		\
-		return (previous);					\
-	}
-
-CK_PR_FAA(32, uint32_t, "")
-CK_PR_FAA(uint, unsigned int, "")
-CK_PR_FAA(int, int, "")
-CK_PR_FAA(16, uint16_t, "h")
-CK_PR_FAA(8, uint8_t, "b")
-CK_PR_FAA(short, short, "h")
-CK_PR_FAA(char, char, "b")
-
-#undef CK_PR_FAA
+#undef CK_PR_NEG_S
+#undef CK_PR_NEG
 
 #endif /* CK_PR_AARCH64_H */
 

Copied: stable/11/sys/contrib/ck/include/gcc/aarch64/ck_pr_llsc.h (from r310846, head/sys/contrib/ck/include/gcc/aarch64/ck_pr_llsc.h)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ stable/11/sys/contrib/ck/include/gcc/aarch64/ck_pr_llsc.h	Sun Jan 28 18:38:17 2018	(r328515, copy of r310846, head/sys/contrib/ck/include/gcc/aarch64/ck_pr_llsc.h)
@@ -0,0 +1,352 @@
+/*
+ * Copyright 2009-2016 Samy Al Bahra.
+ * Copyright 2013-2016 Olivier Houchard.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#ifndef CK_PR_AARCH64_LLSC_H
+#define CK_PR_AARCH64_LLSC_H
+
+#ifndef CK_PR_H
+#error Do not include this file directly, use ck_pr.h
+#endif
+
+CK_CC_INLINE static bool
+ck_pr_cas_64_2_value(uint64_t target[2], uint64_t compare[2], uint64_t set[2], uint64_t value[2])
+{
+        uint64_t tmp1, tmp2;
+
+        __asm__ __volatile__("1:"
+                             "ldxp %0, %1, [%4];"
+                             "mov %2, %0;"
+                             "mov %3, %1;"
+                             "eor %0, %0, %5;"
+                             "eor %1, %1, %6;"
+                             "orr %1, %0, %1;"
+                             "mov %w0, #0;"
+                             "cbnz %1, 2f;"
+                             "stxp %w0, %7, %8, [%4];"
+                             "cbnz %w0, 1b;"
+                             "mov %w0, #1;"
+                             "2:"
+                             : "=&r" (tmp1), "=&r" (tmp2), "=&r" (value[0]), "=&r" (value[1])
+                             : "r" (target), "r" (compare[0]), "r" (compare[1]), "r" (set[0]), "r" (set[1])
+                             : "cc", "memory");
+
+        return (tmp1);
+}
+
+CK_CC_INLINE static bool
+ck_pr_cas_ptr_2_value(void *target, void *compare, void *set, void *value)
+{
+        return (ck_pr_cas_64_2_value(CK_CPP_CAST(uint64_t *, target),
+                                   CK_CPP_CAST(uint64_t *, compare),
+                                   CK_CPP_CAST(uint64_t *, set),
+                                   CK_CPP_CAST(uint64_t *, value)));
+}
+
+CK_CC_INLINE static bool
+ck_pr_cas_64_2(uint64_t target[2], uint64_t compare[2], uint64_t set[2])
+{
+        uint64_t tmp1, tmp2;
+
+        __asm__ __volatile__("1:"
+                             "ldxp %0, %1, [%2];"
+                             "eor %0, %0, %3;"
+                             "eor %1, %1, %4;"
+                             "orr %1, %0, %1;"
+                             "mov %w0, #0;"
+                             "cbnz %1, 2f;"
+                             "stxp %w0, %5, %6, [%2];"
+                             "cbnz %w0, 1b;"
+                             "mov %w0, #1;"
+                             "2:"
+                             : "=&r" (tmp1), "=&r" (tmp2)
+                             : "r" (target), "r" (compare[0]), "r" (compare[1]), "r" (set[0]), "r" (set[1])
+                             : "cc", "memory");
+
+        return (tmp1);
+}
+CK_CC_INLINE static bool
+ck_pr_cas_ptr_2(void *target, void *compare, void *set)
+{
+        return (ck_pr_cas_64_2(CK_CPP_CAST(uint64_t *, target),
+                             CK_CPP_CAST(uint64_t *, compare),
+                             CK_CPP_CAST(uint64_t *, set)));
+}
+
+
+#define CK_PR_CAS(N, M, T, W, R)					\
+        CK_CC_INLINE static bool					\
+        ck_pr_cas_##N##_value(M *target, T compare, T set, M *value)	\
+        {								\
+                T previous;						\
+                T tmp;							\
+                __asm__ __volatile__("1:"				\
+                                     "ldxr" W " %" R "0, [%2];"		\
+                                     "cmp  %" R "0, %" R "4;"		\
+                                     "b.ne 2f;"				\
+                                     "stxr" W " %w1, %" R "3, [%2];"	\
+                                     "cbnz %w1, 1b;"			\
+                                     "2:"				\
+                    : "=&r" (previous),					\
+                    "=&r" (tmp)						\
+                    : "r"   (target),					\
+                    "r"   (set),					\
+                    "r"   (compare)					\
+                    : "memory", "cc");					\
+                *(T *)value = previous;					\
+                return (previous == compare);				\
+        }								\
+        CK_CC_INLINE static bool					\
+        ck_pr_cas_##N(M *target, T compare, T set)			\
+        {								\
+                T previous;						\
+                T tmp;							\
+                __asm__ __volatile__(					\
+                                     "1:"				\
+                                     "ldxr" W " %" R "0, [%2];"		\
+                                     "cmp  %" R "0, %" R "4;"		\
+                                     "b.ne 2f;"				\
+                                     "stxr" W " %w1, %" R "3, [%2];"	\
+                                     "cbnz %w1, 1b;"			\
+                                     "2:"				\
+                    : "=&r" (previous),					\
+                    "=&r" (tmp)						\
+                    : "r"   (target),					\
+                    "r"   (set),					\
+                    "r"   (compare)					\
+                    : "memory", "cc");					\
+                return (previous == compare);				\
+        }
+
+CK_PR_CAS(ptr, void, void *, "", "")
+
+#define CK_PR_CAS_S(N, M, W, R)	CK_PR_CAS(N, M, M, W, R)
+CK_PR_CAS_S(64, uint64_t, "", "")
+#ifndef CK_PR_DISABLE_DOUBLE
+CK_PR_CAS_S(double, double, "", "")
+#endif
+CK_PR_CAS_S(32, uint32_t, "", "w")
+CK_PR_CAS_S(uint, unsigned int, "", "w")
+CK_PR_CAS_S(int, int, "", "w")
+CK_PR_CAS_S(16, uint16_t, "h", "w")
+CK_PR_CAS_S(8, uint8_t, "b", "w")
+CK_PR_CAS_S(short, short, "h", "w")
+CK_PR_CAS_S(char, char, "b", "w")
+
+
+#undef CK_PR_CAS_S
+#undef CK_PR_CAS
+
+#define CK_PR_FAS(N, M, T, W, R)				\
+        CK_CC_INLINE static T					\
+        ck_pr_fas_##N(M *target, T v)				\
+        {							\
+                T previous;					\
+                T tmp;						\
+                __asm__ __volatile__("1:"			\
+                                     "ldxr" W " %" R "0, [%2];"	\
+                                     "stxr" W " %w1, %" R "3, [%2];"\
+                                     "cbnz %w1, 1b;"		\
+                                        : "=&r" (previous),	\
+                                          "=&r" (tmp) 		\
+                                        : "r"   (target),	\
+                                          "r"   (v)		\
+                                        : "memory", "cc");	\
+                return (previous);				\
+        }
+
+CK_PR_FAS(64, uint64_t, uint64_t, "", "")
+CK_PR_FAS(32, uint32_t, uint32_t, "", "w")
+CK_PR_FAS(ptr, void, void *, "", "")
+CK_PR_FAS(int, int, int, "", "w")
+CK_PR_FAS(uint, unsigned int, unsigned int, "", "w")
+CK_PR_FAS(16, uint16_t, uint16_t, "h", "w")
+CK_PR_FAS(8, uint8_t, uint8_t, "b", "w")
+CK_PR_FAS(short, short, short, "h", "w")
+CK_PR_FAS(char, char, char, "b", "w")
+
+
+#undef CK_PR_FAS
+
+#define CK_PR_UNARY(O, N, M, T, I, W, R)			\
+        CK_CC_INLINE static void				\
+        ck_pr_##O##_##N(M *target)				\
+        {							\
+                T previous = 0;					\
+                T tmp = 0;					\
+                __asm__ __volatile__("1:"			\
+                                     "ldxr" W " %" R "0, [%2];"	\
+                                      I ";"			\
+                                     "stxr" W " %w1, %" R "0, [%2];"	\
+                                     "cbnz %w1, 1b;"		\
+                                        : "=&r" (previous),	\
+                                          "=&r" (tmp)		\
+                                        : "r"   (target)	\
+                                        : "memory", "cc");	\
+                return;						\
+        }
+
+CK_PR_UNARY(inc, ptr, void, void *, "add %0, %0, #1", "", "")
+CK_PR_UNARY(dec, ptr, void, void *, "sub %0, %0, #1", "", "")
+CK_PR_UNARY(not, ptr, void, void *, "mvn %0, %0", "", "")
+CK_PR_UNARY(inc, 64, uint64_t, uint64_t, "add %0, %0, #1", "", "")
+CK_PR_UNARY(dec, 64, uint64_t, uint64_t, "sub %0, %0, #1", "", "")
+CK_PR_UNARY(not, 64, uint64_t, uint64_t, "mvn %0, %0", "", "")
+
+#define CK_PR_UNARY_S(S, T, W)					\
+        CK_PR_UNARY(inc, S, T, T, "add %w0, %w0, #1", W, "w")	\
+        CK_PR_UNARY(dec, S, T, T, "sub %w0, %w0, #1", W, "w")	\
+        CK_PR_UNARY(not, S, T, T, "mvn %w0, %w0", W, "w")	\
+
+CK_PR_UNARY_S(32, uint32_t, "")
+CK_PR_UNARY_S(uint, unsigned int, "")
+CK_PR_UNARY_S(int, int, "")
+CK_PR_UNARY_S(16, uint16_t, "h")
+CK_PR_UNARY_S(8, uint8_t, "b")
+CK_PR_UNARY_S(short, short, "h")
+CK_PR_UNARY_S(char, char, "b")
+
+#undef CK_PR_UNARY_S
+#undef CK_PR_UNARY
+
+#define CK_PR_BINARY(O, N, M, T, I, W, R)			\
+        CK_CC_INLINE static void				\
+        ck_pr_##O##_##N(M *target, T delta)			\
+        {							\
+                T previous;					\
+                T tmp;						\
+                __asm__ __volatile__("1:"			\
+                                     "ldxr" W " %" R "0, [%2];"\
+                                      I " %" R "0, %" R "0, %" R "3;"	\
+                                     "stxr" W " %w1, %" R "0, [%2];"	\
+                                     "cbnz %w1, 1b;"		\
+                                        : "=&r" (previous),	\
+                                          "=&r" (tmp)		\
+                                        : "r"   (target),	\
+                                          "r"   (delta)		\
+                                        : "memory", "cc");	\
+                return;						\
+        }
+
+CK_PR_BINARY(and, ptr, void, uintptr_t, "and", "", "")
+CK_PR_BINARY(add, ptr, void, uintptr_t, "add", "", "")
+CK_PR_BINARY(or, ptr, void, uintptr_t, "orr", "", "")
+CK_PR_BINARY(sub, ptr, void, uintptr_t, "sub", "", "")
+CK_PR_BINARY(xor, ptr, void, uintptr_t, "eor", "", "")
+CK_PR_BINARY(and, 64, uint64_t, uint64_t, "and", "", "")
+CK_PR_BINARY(add, 64, uint64_t, uint64_t, "add", "", "")
+CK_PR_BINARY(or, 64, uint64_t, uint64_t, "orr", "", "")
+CK_PR_BINARY(sub, 64, uint64_t, uint64_t, "sub", "", "")
+CK_PR_BINARY(xor, 64, uint64_t, uint64_t, "eor", "", "")
+
+#define CK_PR_BINARY_S(S, T, W)				\
+        CK_PR_BINARY(and, S, T, T, "and", W, "w")	\
+        CK_PR_BINARY(add, S, T, T, "add", W, "w")	\
+        CK_PR_BINARY(or, S, T, T, "orr", W, "w")	\
+        CK_PR_BINARY(sub, S, T, T, "sub", W, "w")	\
+        CK_PR_BINARY(xor, S, T, T, "eor", W, "w")
+
+CK_PR_BINARY_S(32, uint32_t, "")
+CK_PR_BINARY_S(uint, unsigned int, "")
+CK_PR_BINARY_S(int, int, "")
+CK_PR_BINARY_S(16, uint16_t, "h")
+CK_PR_BINARY_S(8, uint8_t, "b")
+CK_PR_BINARY_S(short, short, "h")
+CK_PR_BINARY_S(char, char, "b")
+
+#undef CK_PR_BINARY_S
+#undef CK_PR_BINARY
+
+CK_CC_INLINE static void *
+ck_pr_faa_ptr(void *target, uintptr_t delta)
+{
+        uintptr_t previous, r, tmp;
+
+        __asm__ __volatile__("1:"
+                             "ldxr %0, [%3];"
+                             "add %1, %4, %0;"
+                             "stxr %w2, %1, [%3];"
+                             "cbnz %w2, 1b;"
+                                : "=&r" (previous),
+                                  "=&r" (r),
+                                  "=&r" (tmp)
+                                : "r"   (target),
+                                  "r"   (delta)
+                                : "memory", "cc");
+
+        return (void *)(previous);
+}
+

*** DIFF OUTPUT TRUNCATED AT 1000 LINES ***


More information about the svn-src-all mailing list