[PATCH 2/7] score: atomic support for RTEMS. Generic atomic operations API definition.

Gedare Bloom gedare at rtems.org
Mon Feb 4 19:33:33 UTC 2013


From: WeiY <wei.a.yang at gmail.com>

---
 cpukit/rtems/include/rtems/rtems/atomic.h          |   35 ++
 cpukit/score/include/rtems/score/atomic.h          |  277 +++++++++++
 .../score/include/rtems/score/genericcpuatomic.h   |   60 +++
 cpukit/score/inline/rtems/score/atomic.inl         |  504 ++++++++++++++++++++
 4 files changed, 876 insertions(+), 0 deletions(-)
 create mode 100644 cpukit/rtems/include/rtems/rtems/atomic.h
 create mode 100644 cpukit/score/include/rtems/score/atomic.h
 create mode 100644 cpukit/score/include/rtems/score/genericcpuatomic.h
 create mode 100644 cpukit/score/inline/rtems/score/atomic.inl

diff --git a/cpukit/rtems/include/rtems/rtems/atomic.h b/cpukit/rtems/include/rtems/rtems/atomic.h
new file mode 100644
index 0000000..0686d60
--- /dev/null
+++ b/cpukit/rtems/include/rtems/rtems/atomic.h
@@ -0,0 +1,35 @@
+/**
+ * @file rtems/rtems/atomic.h
+ *
+ *  This include file provides the application interface
+ *  to atomic operations.
+ *
+ *  The low-level layer of the atomic interface is hidden from the application
+ *  and exists between the BSP and RTEMS.
+ */
+
+/*  COPYRIGHT (c) 1989-2011.
+ *  On-Line Applications Research Corporation (OAR).
+ *
+ *  The license and distribution terms for this file may be
+ *  found in the file LICENSE in this distribution or at
+ *  http://www.rtems.com/license/LICENSE.
+ */
+
+#ifndef _RTEMS_RTEMS_ATOMIC_H
+#define _RTEMS_RTEMS_ATOMIC_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <rtems/score/atomic.h>
+
+/**@}*/
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
+/* end of include file */
diff --git a/cpukit/score/include/rtems/score/atomic.h b/cpukit/score/include/rtems/score/atomic.h
new file mode 100644
index 0000000..3b62cb1
--- /dev/null
+++ b/cpukit/score/include/rtems/score/atomic.h
@@ -0,0 +1,277 @@
+/**
+ * @file  rtems/score/atomic.h
+ * 
+ * This include file defines the interface for all the atomic
+ * operations which can be used in the synchronization primitives
+ * or in the lock-less algorithms. You should not use these API
+ * in the other components directly. 
+ */
+
+/*
+ * COPYRIGHT (c) 2012 Deng Hengyi.
+ *
+ * The license and distribution terms for this file may be
+ * found in the file LICENSE in this distribution or at
+ * http://www.rtems.com/license/LICENSE.
+ */
+
+#ifndef _RTEMS_SCORE_ATOMIC_H
+#define _RTEMS_SCORE_ATOMIC_H
+
+#include <rtems/score/cpuatomic.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/**
+ * @defgroup RTEMS atomic interface
+ *
+ */
+
+/**@{*/
+
+/**
+ * @brief the enumeration Atomic_Memory_barrier specifies the detailed regular
+ * memory synchronization operations used in the atomic operation API 
+ * definitions.  
+ */
+typedef enum {
+  /** no operation orders memory. */
+  ATOMIC_RELAXED_BARRIER,
+  /** a load operation performs an acquire operation on the affected memory
+  * location. This flag guarantees that the effects of load operation are 
+  * completed before the effects of any later data accesses.
+  */
+  ATOMIC_ACQUIRE_BARRIER,
+  /** a store operation performs a release operation on the affected memory
+  * location. This flag guarantee that all effects of all previous data 
+  * accesses are completed before the store operation takes place.
+  */
+  ATOMIC_RELEASE_BARRIER
+} Atomic_Memory_barrier;
+
+/**
+ * @brief Atomically load an atomic type value from address @a address with
+ * a type of Atomic_Memory_barrier @a memory_barrier. The @a memory_barrier
+ * shall not be ATOMIC_RELEASE_BARRIER.
+ */
+RTEMS_INLINE_ROUTINE Atomic_Int _Atomic_Load_int(
+  volatile Atomic_Int *address,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE Atomic_Long _Atomic_Load_long(
+  volatile Atomic_Long *address,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE Atomic_Pointer _Atomic_Load_ptr(
+  volatile Atomic_Pointer *address,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE Atomic_Int32 _Atomic_Load_32(
+  volatile Atomic_Int32 *address,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE Atomic_Int64 _Atomic_Load_64(
+  volatile Atomic_Int64 *address,
+  Atomic_Memory_barrier memory_barrier
+);
+
+/**
+ * @brief Atomically store an atomic type value @a value into address @a 
+ * address with a type of Atomic_Memory_barrier @a memory_barrier. The @a 
+ * memory_barrier shall not be ATOMIC_ACQUIRE_BARRIER.
+ */
+RTEMS_INLINE_ROUTINE void _Atomic_Store_int(
+  volatile Atomic_Int *address,
+  Atomic_Int value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Store_long(
+  volatile Atomic_Long *address,
+  Atomic_Long value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Store_ptr(
+  volatile Atomic_Pointer *address,
+  Atomic_Pointer value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Store_32(
+  volatile Atomic_Int32 *address,
+  Atomic_Int32 value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Store_64(
+  volatile Atomic_Int64 *address,
+  Atomic_Int64 value,
+  Atomic_Memory_barrier memory_barrier
+);
+
+/**
+ * @brief Atomically load-add-store an atomic type value @a value into address
+ * @a address with a type of Atomic_Memory_barrier @a memory_barrier.
+ */
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_int(
+  volatile Atomic_Int *address,
+  Atomic_Int value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_long(
+  volatile Atomic_Long *address,
+  Atomic_Long value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_ptr(
+  volatile Atomic_Pointer *address,
+  Atomic_Pointer value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_32(
+  volatile Atomic_Int32 *address,
+  Atomic_Int32 value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_64(
+  volatile Atomic_Int64 *address,
+  Atomic_Int64 value,
+  Atomic_Memory_barrier memory_barrier
+);
+
+/**
+ * @brief Atomically load-sub-store an atomic type value @a value into address
+ * @a address with a type of Atomic_Memory_barrier @a memory_barrier.
+ */
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_int(
+  volatile Atomic_Int *address,
+  Atomic_Int value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_long(
+  volatile Atomic_Long *address,
+  Atomic_Long value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_ptr(
+  volatile Atomic_Pointer *address,
+  Atomic_Pointer value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_32(
+  volatile Atomic_Int32 *address,
+  Atomic_Int32 value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_64(
+  volatile Atomic_Int64 *address,
+  Atomic_Int64 value,
+  Atomic_Memory_barrier memory_barrier
+);
+
+/**
+ * @brief Atomically load-or-store an atomic type value @a value into address
+ * @a address with a type of Atomic_Memory_barrier @a memory_barrier.
+ */
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_int(
+  volatile Atomic_Int *address,
+  Atomic_Int value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_long(
+  volatile Atomic_Long *address,
+  Atomic_Long value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_ptr(
+  volatile Atomic_Pointer *address,
+  Atomic_Pointer value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_32(
+  volatile Atomic_Int32 *address,
+  Atomic_Int32 value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_64(
+  volatile Atomic_Int64 *address,
+  Atomic_Int64 value,
+  Atomic_Memory_barrier memory_barrier
+);
+
+/**
+ * @brief Atomically load-and-store an atomic type value @a value into address
+ * @a address with a type of Atomic_Memory_barrier @a memory_barrier.
+ */
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_int(
+  volatile Atomic_Int *address,
+  Atomic_Int value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_long(
+  volatile Atomic_Long *address,
+  Atomic_Long value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_ptr(
+  volatile Atomic_Pointer *address,
+  Atomic_Pointer value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_32(
+  volatile Atomic_Int32 *address,
+  Atomic_Int32 value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_64(
+  volatile Atomic_Int64 *address,
+  Atomic_Int64 value,
+  Atomic_Memory_barrier memory_barrier
+);
+
+/**
+ * @brief Atomically compare the value stored at @a address with @a 
+ * old_value and if the two values are equal, update the value of @a 
+ * address with @a new_value. Returns zero if the compare failed, 
+ * nonzero otherwise. The operation uses a type of Atomic_Memory_barrier
+ * @a memory_barrier.
+ */
+RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_int(
+  volatile Atomic_Int *address,
+  Atomic_Int old_value,
+  Atomic_Int new_value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_long(
+  volatile Atomic_Long *address,
+  Atomic_Long old_value,
+  Atomic_Long new_value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_ptr(
+  volatile Atomic_Pointer *address,
+  Atomic_Pointer old_value,
+  Atomic_Pointer new_value,
+  Atomic_Memory_barrier memory_barrier  
+);
+RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_32(
+  volatile Atomic_Int32 *address,
+  Atomic_Int32 old_value,
+  Atomic_Int32 new_value,
+  Atomic_Memory_barrier memory_barrier
+);
+RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_64(
+  volatile Atomic_Int64 *address,
+  Atomic_Int64 old_value,
+  Atomic_Int64 new_value,
+  Atomic_Memory_barrier memory_barrier
+);
+
+#include <rtems/score/atomic.inl>
+
+#ifdef __cplusplus
+}
+#endif
+
+/**@}*/
+#endif
+/*  end of include file */
diff --git a/cpukit/score/include/rtems/score/genericcpuatomic.h b/cpukit/score/include/rtems/score/genericcpuatomic.h
new file mode 100644
index 0000000..2b49472
--- /dev/null
+++ b/cpukit/score/include/rtems/score/genericcpuatomic.h
@@ -0,0 +1,60 @@
+/**
+ * @file  rtems/score/genericcpuatomic.h
+ * 
+ * This include file includes the general atomic data type
+ * for all the architecture. 
+ */
+
+/*
+ * COPYRIGHT (c) 2012 Deng Hengyi.
+ *
+ * The license and distribution terms for this file may be
+ * found in the file LICENSE in this distribution or at
+ */
+
+#ifndef _RTEMS_SCORE_GENERAL_ATOMIC_CPU_H
+#define _RTEMS_SCORE_GEMERAL_ATOMIC_CPU_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/**
+ * @defgroup RTEMS general atomic data type
+ *
+ */
+
+/**@{*/
+
+/**
+ * @brief atomic operation unsigned integer type
+ */
+typedef unsigned int Atomic_Int;
+
+/**
+ * @brief atomic operation unsigned long integer type
+ */
+typedef unsigned long Atomic_Long;
+
+/**
+ * @brief atomic operation unsigned 32-bit integer type
+ */
+typedef uint32_t Atomic_Int32;
+
+/**
+ * @brief atomic operation unsigned 64-bit integer type
+ */
+typedef uint64_t Atomic_Int64;
+
+/**
+ * @brief atomic operation unsigned integer the size of a pointer type
+ */
+typedef uintptr_t Atomic_Pointer;
+
+#ifdef __cplusplus
+}
+#endif
+
+/**@}*/
+#endif
+/*  end of include file */
diff --git a/cpukit/score/inline/rtems/score/atomic.inl b/cpukit/score/inline/rtems/score/atomic.inl
new file mode 100644
index 0000000..7deec01
--- /dev/null
+++ b/cpukit/score/inline/rtems/score/atomic.inl
@@ -0,0 +1,504 @@
+/*
+ *  Atomic Manager
+ *
+ *  COPYRIGHT (c) 2012 Deng Hengyi.
+ *
+ *  The license and distribution terms for this file may be
+ *  found in the file LICENSE in this distribution or at
+ *  http://www.rtems.com/license/LICENSE.
+ *
+ *
+ *  The functions in this file implement the API to the RTEMS Atomic Manager and
+ *  The API is designed to be compatable with C1X atomic definition as far as
+ *  possible. And its implementation reuses the FreeBSD kernel atomic operation.
+ *  The functions below are implemented with CPU dependent inline routines
+ *  found in the path
+ *
+ *  rtems/cpukit/score/cpu/xxx/rtems/score/cpuatomic.h
+ *
+ *  In the event that a CPU does not support a specific atomic function it has, the 
+ *  CPU dependent routine does nothing (but does exist).
+ */
+
+#ifndef _RTEMS_SCORE_ATOMIC_H
+# error "Never use <rtems/score/atomic.inl> directly; include <rtems/score/atomic.h> instead."
+#endif
+
+#include <rtems/score/types.h>
+
+#ifndef _RTEMS_SCORE_ATOMIC_INL
+#define _RTEMS_SCORE_ATOMIC_INL
+
+RTEMS_INLINE_ROUTINE Atomic_Int _Atomic_Load_int(
+  volatile Atomic_Int *address,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Load_acq_int(address);
+  else
+    return _CPU_Atomic_Load_int(address);
+}
+
+RTEMS_INLINE_ROUTINE Atomic_Long _Atomic_Load_long(
+  volatile Atomic_Long *address,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Load_acq_long(address);
+  else
+    return _CPU_Atomic_Load_long(address);
+}
+
+RTEMS_INLINE_ROUTINE Atomic_Pointer _Atomic_Load_ptr(
+  volatile Atomic_Pointer *address,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Load_acq_ptr(address);
+  else
+    return _CPU_Atomic_Load_ptr(address);
+}
+
+RTEMS_INLINE_ROUTINE Atomic_Int32 _Atomic_Load_32(
+  volatile Atomic_Int32 *address,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Load_acq_32(address);
+  else
+    return _CPU_Atomic_Load_32(address);
+}
+
+RTEMS_INLINE_ROUTINE Atomic_Int64 _Atomic_Load_64(
+  volatile Atomic_Int64 *address,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Load_acq_64(address);
+  else
+    return _CPU_Atomic_Load_64(address);
+}
+
+
+RTEMS_INLINE_ROUTINE void _Atomic_Store_int(
+  volatile Atomic_Int *address,
+  Atomic_Int value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Store_rel_int(address, value);
+  else
+    return _CPU_Atomic_Store_int(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Store_long(
+  volatile Atomic_Long *address,
+  Atomic_Long value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Store_rel_long(address, value);
+  else
+    return _CPU_Atomic_Store_long(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Store_ptr(
+  volatile Atomic_Pointer *address,
+  Atomic_Pointer value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Store_rel_ptr(address, value);
+  else
+    return _CPU_Atomic_Store_ptr(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Store_32(
+  volatile Atomic_Int32 *address,
+  Atomic_Int32 value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Store_rel_32(address, value);
+  else
+    return _CPU_Atomic_Store_32(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Store_64(
+  volatile Atomic_Int64 *address,
+  Atomic_Int64 value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Store_rel_64(address, value);
+  else
+    return _CPU_Atomic_Store_64(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_int(
+  volatile Atomic_Int *address,
+  Atomic_Int value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_add_acq_int(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_add_rel_int(address, value);
+  else
+    return _CPU_Atomic_Fetch_add_int(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_long(
+  volatile Atomic_Long *address,
+  Atomic_Long value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_add_acq_long(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_add_rel_long(address, value);
+  else
+    return _CPU_Atomic_Fetch_add_long(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_ptr(
+  volatile Atomic_Pointer *address,
+  Atomic_Pointer value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_add_acq_ptr(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_add_rel_ptr(address, value);
+  else
+    return _CPU_Atomic_Fetch_add_ptr(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_32(
+  volatile Atomic_Int32 *address,
+  Atomic_Int32 value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_add_acq_32(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_add_rel_32(address, value);
+  else
+    return _CPU_Atomic_Fetch_add_32(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_64(
+  volatile Atomic_Int64 *address,
+  Atomic_Int64 value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_add_acq_64(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_add_rel_64(address, value);
+  else
+    return _CPU_Atomic_Fetch_add_64(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_int(
+  volatile Atomic_Int *address,
+  Atomic_Int value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_sub_acq_int(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_sub_rel_int(address, value);
+  else
+    return _CPU_Atomic_Fetch_sub_int(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_long(
+  volatile Atomic_Long *address,
+  Atomic_Long value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_sub_acq_long(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_sub_rel_long(address, value);
+  else
+    return _CPU_Atomic_Fetch_sub_long(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_ptr(
+  volatile Atomic_Pointer *address,
+  Atomic_Pointer value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_sub_acq_ptr(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_sub_rel_ptr(address, value);
+  else
+    return _CPU_Atomic_Fetch_sub_ptr(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_32(
+  volatile Atomic_Int32 *address,
+  Atomic_Int32 value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_sub_acq_32(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_sub_rel_32(address, value);
+  else
+    return _CPU_Atomic_Fetch_sub_32(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_64(
+  volatile Atomic_Int64 *address,
+  Atomic_Int64 value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_sub_acq_64(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_sub_rel_64(address, value);
+  else
+    return _CPU_Atomic_Fetch_sub_64(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_int(
+  volatile Atomic_Int *address,
+  Atomic_Int value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_or_acq_int(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_or_rel_int(address, value);
+  else
+    return _CPU_Atomic_Fetch_or_int(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_long(
+  volatile Atomic_Long *address,
+  Atomic_Long value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_or_acq_long(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_or_rel_long(address, value);
+  else
+    return _CPU_Atomic_Fetch_or_long(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_ptr(
+  volatile Atomic_Pointer *address,
+  Atomic_Pointer value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_or_acq_ptr(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_or_rel_ptr(address, value);
+  else
+    return _CPU_Atomic_Fetch_or_ptr(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_32(
+  volatile Atomic_Int32 *address,
+  Atomic_Int32 value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_or_acq_32(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_or_rel_32(address, value);
+  else
+    return _CPU_Atomic_Fetch_or_32(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_64(
+  volatile Atomic_Int64 *address,
+  Atomic_Int64 value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_or_acq_64(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_or_rel_64(address, value);
+  else
+    return _CPU_Atomic_Fetch_or_64(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_int(
+  volatile Atomic_Int *address,
+  Atomic_Int value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_and_acq_int(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_and_rel_int(address, value);
+  else
+    return _CPU_Atomic_Fetch_and_int(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_long(
+  volatile Atomic_Long *address,
+  Atomic_Long value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_and_acq_long(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_and_rel_long(address, value);
+  else
+    return _CPU_Atomic_Fetch_and_long(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_ptr(
+  volatile Atomic_Pointer *address,
+  Atomic_Pointer value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_and_acq_ptr(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_and_rel_ptr(address, value);
+  else
+    return _CPU_Atomic_Fetch_and_ptr(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_32(
+  volatile Atomic_Int32 *address,
+  Atomic_Int32 value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_and_acq_32(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_and_rel_32(address, value);
+  else
+    return _CPU_Atomic_Fetch_and_32(address, value);
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_64(
+  volatile Atomic_Int64 *address,
+  Atomic_Int64 value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_and_acq_64(address, value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Fetch_and_rel_64(address, value);
+  else
+    return _CPU_Atomic_Fetch_and_64(address, value);
+}
+
+RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_int(
+  volatile Atomic_Int *address,
+  Atomic_Int old_value,
+  Atomic_Int new_value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Compare_exchange_acq_int(address, old_value, new_value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Compare_exchange_rel_int(address, old_value, new_value);
+  else
+    return _CPU_Atomic_Compare_exchange_int(address, old_value, new_value);
+}
+
+RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_long(
+  volatile Atomic_Long *address,
+  Atomic_Long old_value,
+  Atomic_Long new_value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Compare_exchange_acq_long(address, old_value, new_value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Compare_exchange_rel_long(address, old_value, new_value);
+  else
+    return _CPU_Atomic_Compare_exchange_long(address, old_value, new_value);
+}
+
+RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_ptr(
+  volatile Atomic_Pointer *address,
+  Atomic_Pointer old_value,
+  Atomic_Pointer new_value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Compare_exchange_acq_ptr(address, old_value, new_value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Compare_exchange_rel_ptr(address, old_value, new_value);
+  else
+    return _CPU_Atomic_Compare_exchange_ptr(address, old_value, new_value);
+}
+
+RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_32(
+  volatile Atomic_Int32 *address,
+  Atomic_Int32 old_value,  
+  Atomic_Int32 new_value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Compare_exchange_acq_32(address, old_value, new_value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Compare_exchange_rel_32(address, old_value, new_value);
+  else
+    return _CPU_Atomic_Compare_exchange_32(address, old_value, new_value);
+}
+
+RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_64(
+  volatile Atomic_Int64 *address,
+  Atomic_Int64 old_value,
+  Atomic_Int64 new_value,
+  Atomic_Memory_barrier memory_barrier
+)
+{
+  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Compare_exchange_acq_64(address, old_value, new_value);
+  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+    return _CPU_Atomic_Compare_exchange_rel_64(address, old_value, new_value);
+  else
+    return _CPU_Atomic_Compare_exchange_64(address, old_value, new_value);
+}
+
+#endif
+/* end of include file */
-- 
1.7.1




More information about the devel mailing list