[PATCH 1/4] modify the atomic API to be compatible with C11 stdatomic.h

WeiY wei.a.yang at gmail.com
Sat Jun 22 13:24:39 UTC 2013


---
 cpukit/score/cpu/arm/rtems/score/cpuatomic.h       |    1 +
 cpukit/score/include/rtems/score/atomic.h          |   80 +++----
 .../score/include/rtems/score/genericatomicops.h   |  240 ++++++++++----------
 .../score/include/rtems/score/genericcpuatomic.h   |   21 +-
 cpukit/score/inline/rtems/score/atomic.inl         |   80 +++----
 5 files changed, 217 insertions(+), 205 deletions(-)

diff --git a/cpukit/score/cpu/arm/rtems/score/cpuatomic.h b/cpukit/score/cpu/arm/rtems/score/cpuatomic.h
index 227b3ce..5680c3c 100644
--- a/cpukit/score/cpu/arm/rtems/score/cpuatomic.h
+++ b/cpukit/score/cpu/arm/rtems/score/cpuatomic.h
@@ -29,6 +29,7 @@ extern "C" {
 #if !defined(RTEMS_SMP)
 #include <rtems/score/genericatomicops.h>
 #else
+#include <rtems/score/genericstdatomicops.h>
 #endif
 
 #ifdef __cplusplus
diff --git a/cpukit/score/include/rtems/score/atomic.h b/cpukit/score/include/rtems/score/atomic.h
index 3b62cb1..a2126be 100644
--- a/cpukit/score/include/rtems/score/atomic.h
+++ b/cpukit/score/include/rtems/score/atomic.h
@@ -56,23 +56,23 @@ typedef enum {
  * a type of Atomic_Memory_barrier @a memory_barrier. The @a memory_barrier
  * shall not be ATOMIC_RELEASE_BARRIER.
  */
-RTEMS_INLINE_ROUTINE Atomic_Int _Atomic_Load_int(
+RTEMS_INLINE_ROUTINE unsigned int _Atomic_Load_int(
   volatile Atomic_Int *address,
   Atomic_Memory_barrier memory_barrier
 );
-RTEMS_INLINE_ROUTINE Atomic_Long _Atomic_Load_long(
+RTEMS_INLINE_ROUTINE unsigned long _Atomic_Load_long(
   volatile Atomic_Long *address,
   Atomic_Memory_barrier memory_barrier
 );
-RTEMS_INLINE_ROUTINE Atomic_Pointer _Atomic_Load_ptr(
+RTEMS_INLINE_ROUTINE uintptr_t _Atomic_Load_ptr(
   volatile Atomic_Pointer *address,
   Atomic_Memory_barrier memory_barrier
 );
-RTEMS_INLINE_ROUTINE Atomic_Int32 _Atomic_Load_32(
+RTEMS_INLINE_ROUTINE uint32_t _Atomic_Load_32(
   volatile Atomic_Int32 *address,
   Atomic_Memory_barrier memory_barrier
 );
-RTEMS_INLINE_ROUTINE Atomic_Int64 _Atomic_Load_64(
+RTEMS_INLINE_ROUTINE uint64_t _Atomic_Load_64(
   volatile Atomic_Int64 *address,
   Atomic_Memory_barrier memory_barrier
 );
@@ -84,27 +84,27 @@ RTEMS_INLINE_ROUTINE Atomic_Int64 _Atomic_Load_64(
  */
 RTEMS_INLINE_ROUTINE void _Atomic_Store_int(
   volatile Atomic_Int *address,
-  Atomic_Int value,
+  unsigned int value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Store_long(
   volatile Atomic_Long *address,
-  Atomic_Long value,
+  unsigned long value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Store_ptr(
   volatile Atomic_Pointer *address,
-  Atomic_Pointer value,
+  uintptr_t value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Store_32(
   volatile Atomic_Int32 *address,
-  Atomic_Int32 value,
+  uint32_t value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Store_64(
   volatile Atomic_Int64 *address,
-  Atomic_Int64 value,
+  uint64_t value,
   Atomic_Memory_barrier memory_barrier
 );
 
@@ -114,27 +114,27 @@ RTEMS_INLINE_ROUTINE void _Atomic_Store_64(
  */
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_int(
   volatile Atomic_Int *address,
-  Atomic_Int value,
+  unsigned int value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_long(
   volatile Atomic_Long *address,
-  Atomic_Long value,
+  unsigned long value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_ptr(
   volatile Atomic_Pointer *address,
-  Atomic_Pointer value,
+  uintptr_t value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_32(
   volatile Atomic_Int32 *address,
-  Atomic_Int32 value,
+  uint32_t value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_64(
   volatile Atomic_Int64 *address,
-  Atomic_Int64 value,
+  uint64_t value,
   Atomic_Memory_barrier memory_barrier
 );
 
@@ -144,27 +144,27 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_64(
  */
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_int(
   volatile Atomic_Int *address,
-  Atomic_Int value,
+  unsigned int value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_long(
   volatile Atomic_Long *address,
-  Atomic_Long value,
+  unsigned long value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_ptr(
   volatile Atomic_Pointer *address,
-  Atomic_Pointer value,
+  uintptr_t value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_32(
   volatile Atomic_Int32 *address,
-  Atomic_Int32 value,
+  uint32_t value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_64(
   volatile Atomic_Int64 *address,
-  Atomic_Int64 value,
+  uint64_t value,
   Atomic_Memory_barrier memory_barrier
 );
 
@@ -174,27 +174,27 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_64(
  */
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_int(
   volatile Atomic_Int *address,
-  Atomic_Int value,
+  unsigned int value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_long(
   volatile Atomic_Long *address,
-  Atomic_Long value,
+  unsigned long value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_ptr(
   volatile Atomic_Pointer *address,
-  Atomic_Pointer value,
+  uintptr_t value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_32(
   volatile Atomic_Int32 *address,
-  Atomic_Int32 value,
+  uint32_t value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_64(
   volatile Atomic_Int64 *address,
-  Atomic_Int64 value,
+  uint64_t value,
   Atomic_Memory_barrier memory_barrier
 );
 
@@ -204,27 +204,27 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_64(
  */
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_int(
   volatile Atomic_Int *address,
-  Atomic_Int value,
+  unsigned int value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_long(
   volatile Atomic_Long *address,
-  Atomic_Long value,
+  unsigned long value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_ptr(
   volatile Atomic_Pointer *address,
-  Atomic_Pointer value,
+  uintptr_t value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_32(
   volatile Atomic_Int32 *address,
-  Atomic_Int32 value,
+  uint32_t value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_64(
   volatile Atomic_Int64 *address,
-  Atomic_Int64 value,
+  uint64_t value,
   Atomic_Memory_barrier memory_barrier
 );
 
@@ -237,32 +237,32 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_64(
  */
 RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_int(
   volatile Atomic_Int *address,
-  Atomic_Int old_value,
-  Atomic_Int new_value,
+  unsigned int *old_value,
+  unsigned int new_value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_long(
   volatile Atomic_Long *address,
-  Atomic_Long old_value,
-  Atomic_Long new_value,
+  unsigned long *old_value,
+  unsigned long new_value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_ptr(
   volatile Atomic_Pointer *address,
-  Atomic_Pointer old_value,
-  Atomic_Pointer new_value,
+  uintptr_t *old_value,
+  uintptr_t new_value,
   Atomic_Memory_barrier memory_barrier  
 );
 RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_32(
   volatile Atomic_Int32 *address,
-  Atomic_Int32 old_value,
-  Atomic_Int32 new_value,
+  uint32_t *old_value,
+  uint32_t new_value,
   Atomic_Memory_barrier memory_barrier
 );
 RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_64(
   volatile Atomic_Int64 *address,
-  Atomic_Int64 old_value,
-  Atomic_Int64 new_value,
+  uint64_t *old_value,
+  uint64_t new_value,
   Atomic_Memory_barrier memory_barrier
 );
 
diff --git a/cpukit/score/include/rtems/score/genericatomicops.h b/cpukit/score/include/rtems/score/genericatomicops.h
index a6c09d3..271089f 100644
--- a/cpukit/score/include/rtems/score/genericatomicops.h
+++ b/cpukit/score/include/rtems/score/genericatomicops.h
@@ -36,190 +36,190 @@ extern "C" {
 /**
  * @brief Atomically load an atomic type value from address @a address.
  */
-#define ATOMIC_LOAD(NAME, TYPE)                                      \
-RTEMS_INLINE_ROUTINE Atomic_##TYPE _CPU_Atomic_Load_##NAME(	     \
+#define ATOMIC_LOAD(NAME, TYPE, R_TYPE)                              \
+RTEMS_INLINE_ROUTINE R_TYPE _CPU_Atomic_Load_##NAME(	               \
   volatile Atomic_##TYPE *address                                    \
 )                                                                    \
 {                                                                    \
-  Atomic_##TYPE  tmp;                                                \
+  R_TYPE  tmp;                                                       \
   ISR_Level   level;                                                 \
                                                                      \
   _ISR_Disable( level );                                             \
-  tmp = *address;                                                    \
+  tmp = address->__val;                                              \
   _ISR_Enable( level );                                              \
   return tmp;                                                        \
 }                                                                    \
 
-ATOMIC_LOAD(int, Int);
-ATOMIC_LOAD(acq_int, Int);
-ATOMIC_LOAD(long, Long);
-ATOMIC_LOAD(acq_long, Long);
-ATOMIC_LOAD(ptr, Pointer);
-ATOMIC_LOAD(acq_ptr, Pointer);
-ATOMIC_LOAD(32, Int32);
-ATOMIC_LOAD(acq_32, Int32);
-ATOMIC_LOAD(64, Int64);
-ATOMIC_LOAD(acq_64, Int64);
+ATOMIC_LOAD(int, Int, unsigned int);
+ATOMIC_LOAD(acq_int, Int, unsigned int);
+ATOMIC_LOAD(long, Long, unsigned long);
+ATOMIC_LOAD(acq_long, Long, unsigned long);
+ATOMIC_LOAD(ptr, Pointer, uintptr_t);
+ATOMIC_LOAD(acq_ptr, Pointer, uintptr_t);
+ATOMIC_LOAD(32, Int32, uint32_t);
+ATOMIC_LOAD(acq_32, Int32, uint32_t);
+ATOMIC_LOAD(64, Int64, uint64_t);
+ATOMIC_LOAD(acq_64, Int64, uint64_t);
 
 /**
  * @brief Atomically store an atomic type value @a value into address @a
  * address.
  */
-#define ATOMIC_STORE(NAME, TYPE)                                     \
-RTEMS_INLINE_ROUTINE void _CPU_Atomic_Store_##NAME(	             \
+#define ATOMIC_STORE(NAME, TYPE, R_TYPE)                             \
+RTEMS_INLINE_ROUTINE void _CPU_Atomic_Store_##NAME(	                 \
   volatile Atomic_##TYPE *address,                                   \
-  Atomic_##TYPE value                                                \
+  R_TYPE value                                                       \
 )                                                                    \
 {                                                                    \
   ISR_Level   level;                                                 \
                                                                      \
   _ISR_Disable( level );                                             \
-  *address = value;                                                  \
+  address->__val = value;                                            \
   _ISR_Enable( level );                                              \
 }                                                                    \
 
-ATOMIC_STORE(int, Int);
-ATOMIC_STORE(rel_int, Int);
-ATOMIC_STORE(long, Long);
-ATOMIC_STORE(rel_long, Long);
-ATOMIC_STORE(ptr, Pointer);
-ATOMIC_STORE(rel_ptr, Pointer);
-ATOMIC_STORE(32, Int32);
-ATOMIC_STORE(rel_32, Int32);
-ATOMIC_STORE(64, Int64);
-ATOMIC_STORE(rel_64, Int64);
+ATOMIC_STORE(int, Int, unsigned int);
+ATOMIC_STORE(rel_int, Int, unsigned int);
+ATOMIC_STORE(long, Long, unsigned long);
+ATOMIC_STORE(rel_long, Long, unsigned long);
+ATOMIC_STORE(ptr, Pointer, uintptr_t);
+ATOMIC_STORE(rel_ptr, Pointer, uintptr_t);
+ATOMIC_STORE(32, Int32, uint32_t);
+ATOMIC_STORE(rel_32, Int32, uint32_t);
+ATOMIC_STORE(64, Int64, uint64_t);
+ATOMIC_STORE(rel_64, Int64, uint64_t);
 
 /**
  * @brief Atomically load-add-store an atomic type value @a value into address
  * @a address.
  */
-#define ATOMIC_FETCH_ADD(NAME, TYPE)                                 \
+#define ATOMIC_FETCH_ADD(NAME, TYPE, R_TYPE)                         \
 RTEMS_INLINE_ROUTINE void _CPU_Atomic_Fetch_add_##NAME(	             \
   volatile Atomic_##TYPE *address,                                   \
-  Atomic_##TYPE value                                                \
+  R_TYPE value                                                       \
 )                                                                    \
 {                                                                    \
   ISR_Level   level;                                                 \
                                                                      \
   _ISR_Disable( level );                                             \
-  *address += value;                                                 \
+  address->__val += value;                                           \
   _ISR_Enable( level );                                              \
 }                                                                    \
 
-ATOMIC_FETCH_ADD(int, Int);
-ATOMIC_FETCH_ADD(acq_int, Int);
-ATOMIC_FETCH_ADD(rel_int, Int);
-ATOMIC_FETCH_ADD(long, Long);
-ATOMIC_FETCH_ADD(acq_long, Long);
-ATOMIC_FETCH_ADD(rel_long, Long);
-ATOMIC_FETCH_ADD(ptr, Pointer);
-ATOMIC_FETCH_ADD(acq_ptr, Pointer);
-ATOMIC_FETCH_ADD(rel_ptr, Pointer);
-ATOMIC_FETCH_ADD(32, Int32);
-ATOMIC_FETCH_ADD(acq_32, Int32);
-ATOMIC_FETCH_ADD(rel_32, Int32);
-ATOMIC_FETCH_ADD(64, Int64);
-ATOMIC_FETCH_ADD(acq_64, Int64);
-ATOMIC_FETCH_ADD(rel_64, Int64);
+ATOMIC_FETCH_ADD(int, Int, unsigned int);
+ATOMIC_FETCH_ADD(acq_int, Int, unsigned int);
+ATOMIC_FETCH_ADD(rel_int, Int, unsigned int);
+ATOMIC_FETCH_ADD(long, Long, unsigned long);
+ATOMIC_FETCH_ADD(acq_long, Long, unsigned long);
+ATOMIC_FETCH_ADD(rel_long, Long, unsigned long);
+ATOMIC_FETCH_ADD(ptr, Pointer, uintptr_t);
+ATOMIC_FETCH_ADD(acq_ptr, Pointer, uintptr_t);
+ATOMIC_FETCH_ADD(rel_ptr, Pointer, uintptr_t);
+ATOMIC_FETCH_ADD(32, Int32, uint32_t);
+ATOMIC_FETCH_ADD(acq_32, Int32, uint32_t);
+ATOMIC_FETCH_ADD(rel_32, Int32, uint32_t);
+ATOMIC_FETCH_ADD(64, Int64, uint64_t);
+ATOMIC_FETCH_ADD(acq_64, Int64, uint64_t);
+ATOMIC_FETCH_ADD(rel_64, Int64, uint64_t);
 
 /**
  * @brief Atomically load-sub-store an atomic type value @a value into address
  * @a address.
  */
-#define ATOMIC_FETCH_SUB(NAME, TYPE)                                 \
+#define ATOMIC_FETCH_SUB(NAME, TYPE, R_TYPE)                         \
 RTEMS_INLINE_ROUTINE void _CPU_Atomic_Fetch_sub_##NAME(	             \
   volatile Atomic_##TYPE *address,                                   \
-  Atomic_##TYPE value                                                \
+  R_TYPE value                                                       \
 )                                                                    \
 {                                                                    \
   ISR_Level   level;                                                 \
                                                                      \
   _ISR_Disable( level );                                             \
-  *address -= value;                                                 \
+  address->__val -= value;                                           \
   _ISR_Enable( level );                                              \
 }                                                                    \
 
-ATOMIC_FETCH_SUB(int, Int);
-ATOMIC_FETCH_SUB(acq_int, Int);
-ATOMIC_FETCH_SUB(rel_int, Int);
-ATOMIC_FETCH_SUB(long, Long);
-ATOMIC_FETCH_SUB(acq_long, Long);
-ATOMIC_FETCH_SUB(rel_long, Long);
-ATOMIC_FETCH_SUB(ptr, Pointer);
-ATOMIC_FETCH_SUB(acq_ptr, Pointer);
-ATOMIC_FETCH_SUB(rel_ptr, Pointer);
-ATOMIC_FETCH_SUB(32, Int32);
-ATOMIC_FETCH_SUB(acq_32, Int32);
-ATOMIC_FETCH_SUB(rel_32, Int32);
-ATOMIC_FETCH_SUB(64, Int64);
-ATOMIC_FETCH_SUB(acq_64, Int64);
-ATOMIC_FETCH_SUB(rel_64, Int64);
+ATOMIC_FETCH_SUB(int, Int, unsigned int);
+ATOMIC_FETCH_SUB(acq_int, Int, unsigned int);
+ATOMIC_FETCH_SUB(rel_int, Int, unsigned int);
+ATOMIC_FETCH_SUB(long, Long, unsigned long);
+ATOMIC_FETCH_SUB(acq_long, Long, unsigned long);
+ATOMIC_FETCH_SUB(rel_long, Long, unsigned long);
+ATOMIC_FETCH_SUB(ptr, Pointer, uintptr_t);
+ATOMIC_FETCH_SUB(acq_ptr, Pointer, uintptr_t);
+ATOMIC_FETCH_SUB(rel_ptr, Pointer, uintptr_t);
+ATOMIC_FETCH_SUB(32, Int32, uint32_t);
+ATOMIC_FETCH_SUB(acq_32, Int32, uint32_t);
+ATOMIC_FETCH_SUB(rel_32, Int32, uint32_t);
+ATOMIC_FETCH_SUB(64, Int64, uint64_t);
+ATOMIC_FETCH_SUB(acq_64, Int64, uint64_t);
+ATOMIC_FETCH_SUB(rel_64, Int64, uint64_t);
 
 /**
  * @brief Atomically load-or-store an atomic type value @a value into address
  * @a address.
  */
-#define ATOMIC_FETCH_OR(NAME, TYPE)                                  \
+#define ATOMIC_FETCH_OR(NAME, TYPE, R_TYPE)                          \
 RTEMS_INLINE_ROUTINE void _CPU_Atomic_Fetch_or_##NAME(	             \
   volatile Atomic_##TYPE *address,                                   \
-  Atomic_##TYPE value                                                \
+  R_TYPE value                                                       \
 )                                                                    \
 {                                                                    \
   ISR_Level   level;                                                 \
                                                                      \
   _ISR_Disable( level );                                             \
-  *address |= value;                                                 \
+  address->__val |= value;                                           \
   _ISR_Enable( level );                                              \
 }                                                                    \
 
-ATOMIC_FETCH_OR(int, Int);
-ATOMIC_FETCH_OR(acq_int, Int);
-ATOMIC_FETCH_OR(rel_int, Int);
-ATOMIC_FETCH_OR(long, Long);
-ATOMIC_FETCH_OR(acq_long, Long);
-ATOMIC_FETCH_OR(rel_long, Long);
-ATOMIC_FETCH_OR(ptr, Pointer);
-ATOMIC_FETCH_OR(acq_ptr, Pointer);
-ATOMIC_FETCH_OR(rel_ptr, Pointer);
-ATOMIC_FETCH_OR(32, Int32);
-ATOMIC_FETCH_OR(acq_32, Int32);
-ATOMIC_FETCH_OR(rel_32, Int32);
-ATOMIC_FETCH_OR(64, Int64);
-ATOMIC_FETCH_OR(acq_64, Int64);
-ATOMIC_FETCH_OR(rel_64, Int64);
+ATOMIC_FETCH_OR(int, Int, unsigned int);
+ATOMIC_FETCH_OR(acq_int, Int, unsigned int);
+ATOMIC_FETCH_OR(rel_int, Int, unsigned int);
+ATOMIC_FETCH_OR(long, Long, unsigned long);
+ATOMIC_FETCH_OR(acq_long, Long, unsigned long);
+ATOMIC_FETCH_OR(rel_long, Long, unsigned long);
+ATOMIC_FETCH_OR(ptr, Pointer, uintptr_t);
+ATOMIC_FETCH_OR(acq_ptr, Pointer, uintptr_t);
+ATOMIC_FETCH_OR(rel_ptr, Pointer, uintptr_t);
+ATOMIC_FETCH_OR(32, Int32, uint32_t);
+ATOMIC_FETCH_OR(acq_32, Int32, uint32_t);
+ATOMIC_FETCH_OR(rel_32, Int32, uint32_t);
+ATOMIC_FETCH_OR(64, Int64, uint64_t);
+ATOMIC_FETCH_OR(acq_64, Int64, uint64_t);
+ATOMIC_FETCH_OR(rel_64, Int64, uint64_t);
 
 /**
  * @brief Atomically load-and-store an atomic type value @a value into address
  * @a address.
  */
-#define ATOMIC_FETCH_AND(NAME, TYPE)                                 \
+#define ATOMIC_FETCH_AND(NAME, TYPE, R_TYPE)                         \
 RTEMS_INLINE_ROUTINE void _CPU_Atomic_Fetch_and_##NAME(	             \
   volatile Atomic_##TYPE *address,                                   \
-  Atomic_##TYPE value                                                \
+  R_TYPE value                                                       \
 )                                                                    \
 {                                                                    \
   ISR_Level   level;                                                 \
                                                                      \
   _ISR_Disable( level );                                             \
-  *address &= value;                                                 \
+  address->__val &= value;                                           \
   _ISR_Enable( level );                                              \
 }                                                                    \
 
-ATOMIC_FETCH_AND(int, Int);
-ATOMIC_FETCH_AND(acq_int, Int);
-ATOMIC_FETCH_AND(rel_int, Int);
-ATOMIC_FETCH_AND(long, Long);
-ATOMIC_FETCH_AND(acq_long, Long);
-ATOMIC_FETCH_AND(rel_long, Long);
-ATOMIC_FETCH_AND(ptr, Pointer);
-ATOMIC_FETCH_AND(acq_ptr, Pointer);
-ATOMIC_FETCH_AND(rel_ptr, Pointer);
-ATOMIC_FETCH_AND(32, Int32);
-ATOMIC_FETCH_AND(acq_32, Int32);
-ATOMIC_FETCH_AND(rel_32, Int32);
-ATOMIC_FETCH_AND(64, Int64);
-ATOMIC_FETCH_AND(acq_64, Int64);
-ATOMIC_FETCH_AND(rel_64, Int64);
+ATOMIC_FETCH_AND(int, Int, unsigned int);
+ATOMIC_FETCH_AND(acq_int, Int, unsigned int);
+ATOMIC_FETCH_AND(rel_int, Int, unsigned int);
+ATOMIC_FETCH_AND(long, Long, unsigned long);
+ATOMIC_FETCH_AND(acq_long, Long, unsigned long);
+ATOMIC_FETCH_AND(rel_long, Long, unsigned long);
+ATOMIC_FETCH_AND(ptr, Pointer, uintptr_t);
+ATOMIC_FETCH_AND(acq_ptr, Pointer, uintptr_t);
+ATOMIC_FETCH_AND(rel_ptr, Pointer, uintptr_t);
+ATOMIC_FETCH_AND(32, Int32, uint32_t);
+ATOMIC_FETCH_AND(acq_32, Int32, uint32_t);
+ATOMIC_FETCH_AND(rel_32, Int32, uint32_t);
+ATOMIC_FETCH_AND(64, Int64, uint64_t);
+ATOMIC_FETCH_AND(acq_64, Int64, uint64_t);
+ATOMIC_FETCH_AND(rel_64, Int64, uint64_t);
 
 /**
  * @brief Atomically compare the value stored at @a address with @a
@@ -227,19 +227,19 @@ ATOMIC_FETCH_AND(rel_64, Int64);
  * address with @a new_value. Returns zero if the compare failed,
  * nonzero otherwise.
  */
-#define ATOMIC_COMPARE_EXCHANGE(NAME, TYPE)                          \
+#define ATOMIC_COMPARE_EXCHANGE(NAME, TYPE, R_TYPE)                  \
 RTEMS_INLINE_ROUTINE int _CPU_Atomic_Compare_exchange_##NAME(        \
   volatile Atomic_##TYPE *address,                                   \
-  Atomic_##TYPE old_value,                                           \
-  Atomic_##TYPE new_value                                            \
+  R_TYPE *old_value,                                                 \
+  R_TYPE new_value                                                   \
 )                                                                    \
 {                                                                    \
   ISR_Level   level;                                                 \
   int ret;                                                           \
                                                                      \
   _ISR_Disable( level );                                             \
-  if (*address == old_value) {                                       \
-    *address = new_value;                                            \
+  if (address->__val == *old_value) {                                \
+    address->__val = new_value;                                      \
     ret = TRUE;                                                      \
   } else {                                                           \
     ret = FALSE;                                                     \
@@ -249,21 +249,21 @@ RTEMS_INLINE_ROUTINE int _CPU_Atomic_Compare_exchange_##NAME(        \
   return ret;                                                        \
 }                                                                    \
 
-ATOMIC_COMPARE_EXCHANGE(int, Int);
-ATOMIC_COMPARE_EXCHANGE(acq_int, Int);
-ATOMIC_COMPARE_EXCHANGE(rel_int, Int);
-ATOMIC_COMPARE_EXCHANGE(long, Long);
-ATOMIC_COMPARE_EXCHANGE(acq_long, Long);
-ATOMIC_COMPARE_EXCHANGE(rel_long, Long);
-ATOMIC_COMPARE_EXCHANGE(ptr, Pointer);
-ATOMIC_COMPARE_EXCHANGE(acq_ptr, Pointer);
-ATOMIC_COMPARE_EXCHANGE(rel_ptr, Pointer);
-ATOMIC_COMPARE_EXCHANGE(32, Int32);
-ATOMIC_COMPARE_EXCHANGE(acq_32, Int32);
-ATOMIC_COMPARE_EXCHANGE(rel_32, Int32);
-ATOMIC_COMPARE_EXCHANGE(64, Int64);
-ATOMIC_COMPARE_EXCHANGE(acq_64, Int64);
-ATOMIC_COMPARE_EXCHANGE(rel_64, Int64);
+ATOMIC_COMPARE_EXCHANGE(int, Int, unsigned int);
+ATOMIC_COMPARE_EXCHANGE(acq_int, Int, unsigned int);
+ATOMIC_COMPARE_EXCHANGE(rel_int, Int, unsigned int);
+ATOMIC_COMPARE_EXCHANGE(long, Long, unsigned long);
+ATOMIC_COMPARE_EXCHANGE(acq_long, Long, unsigned long);
+ATOMIC_COMPARE_EXCHANGE(rel_long, Long, unsigned long);
+ATOMIC_COMPARE_EXCHANGE(ptr, Pointer, uintptr_t);
+ATOMIC_COMPARE_EXCHANGE(acq_ptr, Pointer, uintptr_t);
+ATOMIC_COMPARE_EXCHANGE(rel_ptr, Pointer, uintptr_t);
+ATOMIC_COMPARE_EXCHANGE(32, Int32, uint32_t);
+ATOMIC_COMPARE_EXCHANGE(acq_32, Int32, uint32_t);
+ATOMIC_COMPARE_EXCHANGE(rel_32, Int32, uint32_t);
+ATOMIC_COMPARE_EXCHANGE(64, Int64, uint64_t);
+ATOMIC_COMPARE_EXCHANGE(acq_64, Int64, uint64_t);
+ATOMIC_COMPARE_EXCHANGE(rel_64, Int64, uint64_t);
 
 #ifdef __cplusplus
 }
diff --git a/cpukit/score/include/rtems/score/genericcpuatomic.h b/cpukit/score/include/rtems/score/genericcpuatomic.h
index 2599d0d..ed5868a 100644
--- a/cpukit/score/include/rtems/score/genericcpuatomic.h
+++ b/cpukit/score/include/rtems/score/genericcpuatomic.h
@@ -16,6 +16,7 @@
 #define _RTEMS_SCORE_GEMERAL_ATOMIC_CPU_H
 
 #include <stdint.h>
+#include <stdatomic.h>
 
 #ifdef __cplusplus
 extern "C" {
@@ -31,27 +32,37 @@ extern "C" {
 /**
  * @brief atomic operation unsigned integer type
  */
-typedef unsigned int Atomic_Int;
+//typedef unsigned int Atomic_Int;
+//typedef _Atomic(unsigned int) Atomic_Int;
+typedef atomic_uint Atomic_Int;
 
 /**
  * @brief atomic operation unsigned long integer type
  */
-typedef unsigned long Atomic_Long;
+//typedef unsigned long Atomic_Long;
+//typedef _Atomic(unsigned long) Atomic_Long;
+typedef atomic_ulong Atomic_Long;
 
 /**
  * @brief atomic operation unsigned 32-bit integer type
  */
-typedef uint32_t Atomic_Int32;
+//typedef uint32_t Atomic_Int32;
+//typedef _Atomic(uint32_t) Atomic_Int32;
+typedef atomic_uint_least32_t Atomic_Int32;
 
 /**
  * @brief atomic operation unsigned 64-bit integer type
  */
-typedef uint64_t Atomic_Int64;
+//typedef uint64_t Atomic_Int64;
+//typedef _Atomic(uint64_t) Atomic_Int64;
+typedef atomic_uint_least64_t Atomic_Int64;
 
 /**
  * @brief atomic operation unsigned integer the size of a pointer type
  */
-typedef uintptr_t Atomic_Pointer;
+//typedef uintptr_t Atomic_Pointer;
+//typedef _Atomic(uintptr_t) Atomic_Pointer;
+typedef atomic_uintptr_t Atomic_Pointer;
 
 #ifdef __cplusplus
 }
diff --git a/cpukit/score/inline/rtems/score/atomic.inl b/cpukit/score/inline/rtems/score/atomic.inl
index 313366c..3486924 100644
--- a/cpukit/score/inline/rtems/score/atomic.inl
+++ b/cpukit/score/inline/rtems/score/atomic.inl
@@ -31,7 +31,7 @@
 #ifndef _RTEMS_SCORE_ATOMIC_INL
 #define _RTEMS_SCORE_ATOMIC_INL
 
-RTEMS_INLINE_ROUTINE Atomic_Int _Atomic_Load_int(
+RTEMS_INLINE_ROUTINE unsigned int _Atomic_Load_int(
   volatile Atomic_Int *address,
   Atomic_Memory_barrier memory_barrier
 )
@@ -41,7 +41,7 @@ RTEMS_INLINE_ROUTINE Atomic_Int _Atomic_Load_int(
   return _CPU_Atomic_Load_int(address);
 }
 
-RTEMS_INLINE_ROUTINE Atomic_Long _Atomic_Load_long(
+RTEMS_INLINE_ROUTINE unsigned long _Atomic_Load_long(
   volatile Atomic_Long *address,
   Atomic_Memory_barrier memory_barrier
 )
@@ -51,7 +51,7 @@ RTEMS_INLINE_ROUTINE Atomic_Long _Atomic_Load_long(
   return _CPU_Atomic_Load_long(address);
 }
 
-RTEMS_INLINE_ROUTINE Atomic_Pointer _Atomic_Load_ptr(
+RTEMS_INLINE_ROUTINE uintptr_t _Atomic_Load_ptr(
   volatile Atomic_Pointer *address,
   Atomic_Memory_barrier memory_barrier
 )
@@ -61,7 +61,7 @@ RTEMS_INLINE_ROUTINE Atomic_Pointer _Atomic_Load_ptr(
   return _CPU_Atomic_Load_ptr(address);
 }
 
-RTEMS_INLINE_ROUTINE Atomic_Int32 _Atomic_Load_32(
+RTEMS_INLINE_ROUTINE uint32_t _Atomic_Load_32(
   volatile Atomic_Int32 *address,
   Atomic_Memory_barrier memory_barrier
 )
@@ -71,7 +71,7 @@ RTEMS_INLINE_ROUTINE Atomic_Int32 _Atomic_Load_32(
   return _CPU_Atomic_Load_32(address);
 }
 
-RTEMS_INLINE_ROUTINE Atomic_Int64 _Atomic_Load_64(
+RTEMS_INLINE_ROUTINE uint64_t _Atomic_Load_64(
   volatile Atomic_Int64 *address,
   Atomic_Memory_barrier memory_barrier
 )
@@ -84,7 +84,7 @@ RTEMS_INLINE_ROUTINE Atomic_Int64 _Atomic_Load_64(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Store_int(
   volatile Atomic_Int *address,
-  Atomic_Int value,
+  unsigned int value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -95,7 +95,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Store_int(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Store_long(
   volatile Atomic_Long *address,
-  Atomic_Long value,
+  unsigned long value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -106,7 +106,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Store_long(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Store_ptr(
   volatile Atomic_Pointer *address,
-  Atomic_Pointer value,
+  uintptr_t value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -117,7 +117,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Store_ptr(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Store_32(
   volatile Atomic_Int32 *address,
-  Atomic_Int32 value,
+  uint32_t value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -128,7 +128,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Store_32(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Store_64(
   volatile Atomic_Int64 *address,
-  Atomic_Int64 value,
+  uint64_t value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -139,7 +139,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Store_64(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_int(
   volatile Atomic_Int *address,
-  Atomic_Int value,
+  unsigned int value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -153,7 +153,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_int(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_long(
   volatile Atomic_Long *address,
-  Atomic_Long value,
+  unsigned long value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -167,7 +167,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_long(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_ptr(
   volatile Atomic_Pointer *address,
-  Atomic_Pointer value,
+  uintptr_t value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -181,7 +181,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_ptr(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_32(
   volatile Atomic_Int32 *address,
-  Atomic_Int32 value,
+  uint32_t value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -195,7 +195,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_32(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_64(
   volatile Atomic_Int64 *address,
-  Atomic_Int64 value,
+  uint64_t value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -209,7 +209,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_64(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_int(
   volatile Atomic_Int *address,
-  Atomic_Int value,
+  unsigned int value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -223,7 +223,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_int(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_long(
   volatile Atomic_Long *address,
-  Atomic_Long value,
+  unsigned long value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -237,7 +237,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_long(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_ptr(
   volatile Atomic_Pointer *address,
-  Atomic_Pointer value,
+  uintptr_t value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -251,7 +251,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_ptr(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_32(
   volatile Atomic_Int32 *address,
-  Atomic_Int32 value,
+  uint32_t value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -265,7 +265,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_32(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_64(
   volatile Atomic_Int64 *address,
-  Atomic_Int64 value,
+  uint64_t value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -279,7 +279,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_64(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_int(
   volatile Atomic_Int *address,
-  Atomic_Int value,
+  unsigned int value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -293,7 +293,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_int(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_long(
   volatile Atomic_Long *address,
-  Atomic_Long value,
+  unsigned long value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -307,7 +307,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_long(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_ptr(
   volatile Atomic_Pointer *address,
-  Atomic_Pointer value,
+  uintptr_t value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -321,7 +321,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_ptr(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_32(
   volatile Atomic_Int32 *address,
-  Atomic_Int32 value,
+  uint32_t value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -335,7 +335,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_32(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_64(
   volatile Atomic_Int64 *address,
-  Atomic_Int64 value,
+  uint64_t value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -349,7 +349,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_64(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_int(
   volatile Atomic_Int *address,
-  Atomic_Int value,
+  unsigned int value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -363,7 +363,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_int(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_long(
   volatile Atomic_Long *address,
-  Atomic_Long value,
+  unsigned long value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -377,7 +377,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_long(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_ptr(
   volatile Atomic_Pointer *address,
-  Atomic_Pointer value,
+  uintptr_t value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -391,7 +391,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_ptr(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_32(
   volatile Atomic_Int32 *address,
-  Atomic_Int32 value,
+  uint32_t value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -405,7 +405,7 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_32(
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_64(
   volatile Atomic_Int64 *address,
-  Atomic_Int64 value,
+  uint64_t value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -419,8 +419,8 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_64(
 
 RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_int(
   volatile Atomic_Int *address,
-  Atomic_Int old_value,
-  Atomic_Int new_value,
+  unsigned int *old_value,
+  unsigned int new_value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -434,8 +434,8 @@ RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_int(
 
 RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_long(
   volatile Atomic_Long *address,
-  Atomic_Long old_value,
-  Atomic_Long new_value,
+  unsigned long *old_value,
+  unsigned long new_value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -449,8 +449,8 @@ RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_long(
 
 RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_ptr(
   volatile Atomic_Pointer *address,
-  Atomic_Pointer old_value,
-  Atomic_Pointer new_value,
+  uintptr_t *old_value,
+  uintptr_t new_value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -464,8 +464,8 @@ RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_ptr(
 
 RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_32(
   volatile Atomic_Int32 *address,
-  Atomic_Int32 old_value,  
-  Atomic_Int32 new_value,
+  uint32_t *old_value,  
+  uint32_t new_value,
   Atomic_Memory_barrier memory_barrier
 )
 {
@@ -479,8 +479,8 @@ RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_32(
 
 RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_64(
   volatile Atomic_Int64 *address,
-  Atomic_Int64 old_value,
-  Atomic_Int64 new_value,
+  uint64_t *old_value,
+  uint64_t new_value,
   Atomic_Memory_barrier memory_barrier
 )
 {
-- 
1.7.9.5




More information about the devel mailing list