[PATCH 2/4] Change the atomic implementation to C11 stdatomic.h and add a generic stdatomic.h wraper implementation for supported architectures

WeiY wei.a.yang at gmail.com
Sat Jun 22 13:24:40 UTC 2013


---
 cpukit/score/Makefile.am                           |    1 +
 .../include/rtems/score/genericstdatomicops.h      |  238 ++++++++++++++++++++
 cpukit/score/preinstall.am                         |    4 +
 3 files changed, 243 insertions(+)
 create mode 100644 cpukit/score/include/rtems/score/genericstdatomicops.h

diff --git a/cpukit/score/Makefile.am b/cpukit/score/Makefile.am
index 3f6e686..f246905 100644
--- a/cpukit/score/Makefile.am
+++ b/cpukit/score/Makefile.am
@@ -62,6 +62,7 @@ include_rtems_score_HEADERS += include/rtems/score/cpuopts.h
 include_rtems_score_HEADERS += include/rtems/score/basedefs.h
 include_rtems_score_HEADERS += include/rtems/score/atomic.h
 include_rtems_score_HEADERS += include/rtems/score/genericcpuatomic.h
+include_rtems_score_HEADERS += include/rtems/score/genericstdatomicops.h
 include_rtems_score_HEADERS += include/rtems/score/genericatomicops.h
 
 if HAS_PTHREADS
diff --git a/cpukit/score/include/rtems/score/genericstdatomicops.h b/cpukit/score/include/rtems/score/genericstdatomicops.h
new file mode 100644
index 0000000..b844f8f
--- /dev/null
+++ b/cpukit/score/include/rtems/score/genericstdatomicops.h
@@ -0,0 +1,238 @@
+/**
+ * @file  rtems/score/genericstdatomicops.h
+ *
+ * This include file includes the general atomic functions
+ * for all the SMP processors or working in SMP mode architectures.
+ * All the ops is based on the C11 stdatomic.h and if the architectures
+ * are supported by stdatomic.h it will work, otherwise it will ERROR.
+ * Supposing that if the macro "RTEMS_SMP" is defined it works
+ * in SMP mode.
+ */
+
+/*
+ * COPYRIGHT (c) 2013 Deng Hengyi.
+ *
+ * The license and distribution terms for this file may be
+ * found in the file LICENSE in this distribution or at
+ */
+
+#ifndef _RTEMS_SCORE_GENERAL_STD_ATOMIC_OPS_H
+#define _RTEMS_SCORE_GEMERAL_STD_ATOMIC_OPS_H
+
+#include <stdatomic.h>
+#include <rtems/score/genericcpuatomic.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/**
+ * @defgroup RTEMS generic SMP stdatomic implementation
+ *
+ */
+
+/**@{*/
+
+/**
+ * @brief Atomically load an atomic type value from address @a address.
+ */
+#define ATOMIC_LOAD(NAME, TYPE, R_TYPE, ORDER)                       \
+RTEMS_INLINE_ROUTINE R_TYPE _CPU_Atomic_Load_##NAME(                 \
+  volatile Atomic_##TYPE *address                                    \
+)                                                                    \
+{                                                                    \
+  return atomic_load_explicit(address, ORDER);                       \
+}
+
+ATOMIC_LOAD(int, Int, unsigned int, memory_order_relaxed);
+ATOMIC_LOAD(acq_int, Int, unsigned int, memory_order_acquire);
+ATOMIC_LOAD(long, Long, unsigned long, memory_order_relaxed);
+ATOMIC_LOAD(acq_long, Long, unsigned long, memory_order_acquire);
+ATOMIC_LOAD(ptr, Pointer, uintptr_t, memory_order_relaxed);
+ATOMIC_LOAD(acq_ptr, Pointer, uintptr_t, memory_order_acquire);
+ATOMIC_LOAD(32, Int32, uint32_t, memory_order_relaxed);
+ATOMIC_LOAD(acq_32, Int32, uint32_t, memory_order_acquire);
+ATOMIC_LOAD(64, Int64, uint64_t, memory_order_relaxed);
+ATOMIC_LOAD(acq_64, Int64, uint64_t, memory_order_acquire);
+
+/**
+ * @brief Atomically store an atomic type value @a value into address @a
+ * address.
+ */
+#define ATOMIC_STORE(NAME, TYPE, R_TYPE, ORDER)                      \
+RTEMS_INLINE_ROUTINE void _CPU_Atomic_Store_##NAME(	                 \
+  volatile Atomic_##TYPE *address,                                   \
+  R_TYPE value                                                       \
+)                                                                    \
+{                                                                    \
+  atomic_store_explicit(address, value, ORDER);                      \
+}                                       
+
+ATOMIC_STORE(int, Int, unsigned int, memory_order_relaxed);
+ATOMIC_STORE(rel_int, Int, unsigned int, memory_order_release);
+ATOMIC_STORE(long, Long, unsigned long, memory_order_relaxed);
+ATOMIC_STORE(rel_long, Long, unsigned long, memory_order_release);
+ATOMIC_STORE(ptr, Pointer, uintptr_t, memory_order_relaxed);
+ATOMIC_STORE(rel_ptr, Pointer, uintptr_t, memory_order_release);
+ATOMIC_STORE(32, Int32, uint32_t, memory_order_relaxed);
+ATOMIC_STORE(rel_32, Int32, uint32_t, memory_order_release);
+ATOMIC_STORE(64, Int64, uint64_t, memory_order_relaxed);
+ATOMIC_STORE(rel_64, Int64, uint64_t, memory_order_release);
+
+/**
+ * @brief Atomically load-add-store an atomic type value @a value into address
+ * @a address.
+ */
+#define ATOMIC_FETCH_ADD(NAME, TYPE, R_TYPE, ORDER)                  \
+RTEMS_INLINE_ROUTINE void _CPU_Atomic_Fetch_add_##NAME(	             \
+  volatile Atomic_##TYPE *address,                                   \
+  R_TYPE value                                                       \
+)                                                                    \
+{                                                                    \
+  atomic_fetch_add_explicit(address, value, ORDER);                  \
+}                                                                    
+
+ATOMIC_FETCH_ADD(int, Int, unsigned int, memory_order_relaxed);
+ATOMIC_FETCH_ADD(acq_int, Int, unsigned int, memory_order_acquire);
+ATOMIC_FETCH_ADD(rel_int, Int, unsigned int, memory_order_release);
+ATOMIC_FETCH_ADD(long, Long, unsigned long, memory_order_relaxed);
+ATOMIC_FETCH_ADD(acq_long, Long, unsigned long, memory_order_acquire);
+ATOMIC_FETCH_ADD(rel_long, Long, unsigned long, memory_order_release);
+ATOMIC_FETCH_ADD(ptr, Pointer, uintptr_t, memory_order_relaxed);
+ATOMIC_FETCH_ADD(acq_ptr, Pointer, uintptr_t, memory_order_acquire);
+ATOMIC_FETCH_ADD(rel_ptr, Pointer, uintptr_t, memory_order_release);
+ATOMIC_FETCH_ADD(32, Int32, uint32_t, memory_order_relaxed);
+ATOMIC_FETCH_ADD(acq_32, Int32, uint32_t, memory_order_acquire);
+ATOMIC_FETCH_ADD(rel_32, Int32, uint32_t, memory_order_release);
+ATOMIC_FETCH_ADD(64, Int64, uint64_t, memory_order_relaxed);
+ATOMIC_FETCH_ADD(acq_64, Int64, uint64_t, memory_order_acquire);
+ATOMIC_FETCH_ADD(rel_64, Int64, uint64_t, memory_order_release);
+
+/**
+ * @brief Atomically load-sub-store an atomic type value @a value into address
+ * @a address.
+ */
+#define ATOMIC_FETCH_SUB(NAME, TYPE, R_TYPE, ORDER)                  \
+RTEMS_INLINE_ROUTINE void _CPU_Atomic_Fetch_sub_##NAME(	             \
+  volatile Atomic_##TYPE *address,                                   \
+  R_TYPE value                                                       \
+)                                                                    \
+{                                                                    \
+  atomic_fetch_sub_explicit(address, value, ORDER);                  \
+}                                                                    
+
+ATOMIC_FETCH_SUB(int, Int, unsigned int, memory_order_relaxed);
+ATOMIC_FETCH_SUB(acq_int, Int, unsigned int, memory_order_acquire);
+ATOMIC_FETCH_SUB(rel_int, Int, unsigned int, memory_order_release);
+ATOMIC_FETCH_SUB(long, Long, unsigned long, memory_order_relaxed);
+ATOMIC_FETCH_SUB(acq_long, Long, unsigned long, memory_order_acquire);
+ATOMIC_FETCH_SUB(rel_long, Long, unsigned long, memory_order_release);
+ATOMIC_FETCH_SUB(ptr, Pointer, uintptr_t, memory_order_relaxed);
+ATOMIC_FETCH_SUB(acq_ptr, Pointer, uintptr_t, memory_order_acquire);
+ATOMIC_FETCH_SUB(rel_ptr, Pointer, uintptr_t, memory_order_release);
+ATOMIC_FETCH_SUB(32, Int32, uint32_t, memory_order_relaxed);
+ATOMIC_FETCH_SUB(acq_32, Int32, uint32_t, memory_order_acquire);
+ATOMIC_FETCH_SUB(rel_32, Int32, uint32_t, memory_order_release);
+ATOMIC_FETCH_SUB(64, Int64, uint64_t, memory_order_relaxed);
+ATOMIC_FETCH_SUB(acq_64, Int64, uint64_t, memory_order_acquire);
+ATOMIC_FETCH_SUB(rel_64, Int64, uint64_t, memory_order_release);
+
+/**
+ * @brief Atomically load-or-store an atomic type value @a value into address
+ * @a address.
+ */
+#define ATOMIC_FETCH_OR(NAME, TYPE, R_TYPE, ORDER)                   \
+RTEMS_INLINE_ROUTINE void _CPU_Atomic_Fetch_or_##NAME(	             \
+  volatile Atomic_##TYPE *address,                                   \
+  R_TYPE value                                                       \
+)                                                                    \
+{                                                                    \
+  atomic_fetch_or_explicit(address, value, ORDER);                   \
+}                                                                    
+
+ATOMIC_FETCH_OR(int, Int, unsigned int, memory_order_relaxed);
+ATOMIC_FETCH_OR(acq_int, Int, unsigned int, memory_order_acquire);
+ATOMIC_FETCH_OR(rel_int, Int, unsigned int, memory_order_release);
+ATOMIC_FETCH_OR(long, Long, unsigned long, memory_order_relaxed);
+ATOMIC_FETCH_OR(acq_long, Long, unsigned long, memory_order_acquire);
+ATOMIC_FETCH_OR(rel_long, Long, unsigned long, memory_order_release);
+ATOMIC_FETCH_OR(ptr, Pointer, uintptr_t, memory_order_relaxed);
+ATOMIC_FETCH_OR(acq_ptr, Pointer, uintptr_t, memory_order_acquire);
+ATOMIC_FETCH_OR(rel_ptr, Pointer, uintptr_t, memory_order_release);
+ATOMIC_FETCH_OR(32, Int32, uint32_t, memory_order_relaxed);
+ATOMIC_FETCH_OR(acq_32, Int32, uint32_t, memory_order_acquire);
+ATOMIC_FETCH_OR(rel_32, Int32, uint32_t, memory_order_release);
+ATOMIC_FETCH_OR(64, Int64, uint64_t, memory_order_relaxed);
+ATOMIC_FETCH_OR(acq_64, Int64, uint64_t, memory_order_acquire);
+ATOMIC_FETCH_OR(rel_64, Int64, uint64_t, memory_order_release);
+
+/**
+ * @brief Atomically load-and-store an atomic type value @a value into address
+ * @a address.
+ */
+#define ATOMIC_FETCH_AND(NAME, TYPE, R_TYPE, ORDER)                  \
+RTEMS_INLINE_ROUTINE void _CPU_Atomic_Fetch_and_##NAME(	             \
+  volatile Atomic_##TYPE *address,                                   \
+  R_TYPE value                                                       \
+)                                                                    \
+{                                                                    \
+  atomic_fetch_and_explicit(address, value, ORDER);                  \
+}                                                                    
+
+ATOMIC_FETCH_AND(int, Int, unsigned int, memory_order_relaxed);
+ATOMIC_FETCH_AND(acq_int, Int, unsigned int, memory_order_acquire);
+ATOMIC_FETCH_AND(rel_int, Int, unsigned int, memory_order_release);
+ATOMIC_FETCH_AND(long, Long, unsigned long, memory_order_relaxed);
+ATOMIC_FETCH_AND(acq_long, Long, unsigned long, memory_order_acquire);
+ATOMIC_FETCH_AND(rel_long, Long, unsigned long, memory_order_release);
+ATOMIC_FETCH_AND(ptr, Pointer, uintptr_t, memory_order_relaxed);
+ATOMIC_FETCH_AND(acq_ptr, Pointer, uintptr_t, memory_order_acquire);
+ATOMIC_FETCH_AND(rel_ptr, Pointer, uintptr_t, memory_order_release);
+ATOMIC_FETCH_AND(32, Int32, uint32_t, memory_order_relaxed);
+ATOMIC_FETCH_AND(acq_32, Int32, uint32_t, memory_order_acquire);
+ATOMIC_FETCH_AND(rel_32, Int32, uint32_t, memory_order_release);
+ATOMIC_FETCH_AND(64, Int64, uint64_t, memory_order_relaxed);
+ATOMIC_FETCH_AND(acq_64, Int64, uint64_t, memory_order_acquire);
+ATOMIC_FETCH_AND(rel_64, Int64, uint64_t, memory_order_release);
+
+/**
+ * @brief Atomically compare the value stored at @a address with @a
+ * old_value and if the two values are equal, update the value of @a
+ * address with @a new_value. Returns zero if the compare failed,
+ * nonzero otherwise.
+ */
+#define ATOMIC_COMPARE_EXCHANGE(NAME, TYPE, R_TYPE, ORDER_SUC, ORDER_FAI)    \
+RTEMS_INLINE_ROUTINE bool _CPU_Atomic_Compare_exchange_##NAME(       \
+  volatile Atomic_##TYPE *address,                                   \
+  R_TYPE *old_value,                                                 \
+  R_TYPE new_value                                                   \
+)                                                                    \
+{                                                                    \
+  return atomic_compare_exchange_weak_explicit(address,              \
+         old_value, new_value,                                       \
+         ORDER_SUC, ORDER_FAI);                                      \
+}                                                                    
+
+ATOMIC_COMPARE_EXCHANGE(int, Int, unsigned int, memory_order_relaxed, memory_order_relaxed);
+ATOMIC_COMPARE_EXCHANGE(acq_int, Int, unsigned int, memory_order_acquire, memory_order_relaxed);
+ATOMIC_COMPARE_EXCHANGE(rel_int, Int, unsigned int, memory_order_release, memory_order_relaxed);
+ATOMIC_COMPARE_EXCHANGE(long, Long, unsigned long, memory_order_relaxed, memory_order_relaxed);
+ATOMIC_COMPARE_EXCHANGE(acq_long, Long, unsigned long, memory_order_acquire, memory_order_relaxed);
+ATOMIC_COMPARE_EXCHANGE(rel_long, Long, unsigned long, memory_order_release, memory_order_relaxed);
+ATOMIC_COMPARE_EXCHANGE(ptr, Pointer, uintptr_t, memory_order_relaxed, memory_order_relaxed);
+ATOMIC_COMPARE_EXCHANGE(acq_ptr, Pointer, uintptr_t, memory_order_acquire, memory_order_relaxed);
+ATOMIC_COMPARE_EXCHANGE(rel_ptr, Pointer, uintptr_t, memory_order_release, memory_order_relaxed);
+ATOMIC_COMPARE_EXCHANGE(32, Int32, uint32_t, memory_order_relaxed, memory_order_relaxed);
+ATOMIC_COMPARE_EXCHANGE(acq_32, Int32, uint32_t, memory_order_acquire, memory_order_relaxed);
+ATOMIC_COMPARE_EXCHANGE(rel_32, Int32, uint32_t, memory_order_release, memory_order_relaxed);
+ATOMIC_COMPARE_EXCHANGE(64, Int64, uint64_t, memory_order_relaxed, memory_order_relaxed);
+ATOMIC_COMPARE_EXCHANGE(acq_64, Int64, uint64_t, memory_order_acquire, memory_order_relaxed);
+ATOMIC_COMPARE_EXCHANGE(rel_64, Int64, uint64_t, memory_order_release, memory_order_relaxed);
+
+#ifdef __cplusplus
+}
+#endif
+
+/**@}*/
+#endif
+/*  end of include file */
diff --git a/cpukit/score/preinstall.am b/cpukit/score/preinstall.am
index dc84b21..0053784 100644
--- a/cpukit/score/preinstall.am
+++ b/cpukit/score/preinstall.am
@@ -235,6 +235,10 @@ $(PROJECT_INCLUDE)/rtems/score/genericatomicops.h: include/rtems/score/genericat
 	$(INSTALL_DATA) $< $(PROJECT_INCLUDE)/rtems/score/genericatomicops.h
 PREINSTALL_FILES += $(PROJECT_INCLUDE)/rtems/score/genericatomicops.h
 
+$(PROJECT_INCLUDE)/rtems/score/genericstdatomicops.h: include/rtems/score/genericstdatomicops.h $(PROJECT_INCLUDE)/rtems/score/$(dirstamp)
+	$(INSTALL_DATA) $< $(PROJECT_INCLUDE)/rtems/score/genericstdatomicops.h
+PREINSTALL_FILES += $(PROJECT_INCLUDE)/rtems/score/genericstdatomicops.h
+
 if HAS_PTHREADS
 $(PROJECT_INCLUDE)/rtems/score/corespinlock.h: include/rtems/score/corespinlock.h $(PROJECT_INCLUDE)/rtems/score/$(dirstamp)
 	$(INSTALL_DATA) $< $(PROJECT_INCLUDE)/rtems/score/corespinlock.h
-- 
1.7.9.5




More information about the devel mailing list