[PATCH 2/5] A generic atomic implementation for smp architectures based on stdatomic.h

WeiY wei.a.yang at gmail.com
Sun Jul 7 15:52:10 UTC 2013


Signed-off-by: WeiY <wei.a.yang at gmail.com>
---
 cpukit/score/Makefile.am                        |    1 +
 cpukit/score/include/rtems/score/cpustdatomic.h |  350 +++++++++++++++++++++++
 cpukit/score/preinstall.am                      |    4 +
 3 files changed, 355 insertions(+)
 create mode 100644 cpukit/score/include/rtems/score/cpustdatomic.h

diff --git a/cpukit/score/Makefile.am b/cpukit/score/Makefile.am
index 3f6e686..82bf26d 100644
--- a/cpukit/score/Makefile.am
+++ b/cpukit/score/Makefile.am
@@ -63,6 +63,7 @@ include_rtems_score_HEADERS += include/rtems/score/basedefs.h
 include_rtems_score_HEADERS += include/rtems/score/atomic.h
 include_rtems_score_HEADERS += include/rtems/score/genericcpuatomic.h
 include_rtems_score_HEADERS += include/rtems/score/genericatomicops.h
+include_rtems_score_HEADERS += include/rtems/score/cpustdatomic.h
 
 if HAS_PTHREADS
 include_rtems_score_HEADERS += include/rtems/score/corespinlock.h
diff --git a/cpukit/score/include/rtems/score/cpustdatomic.h b/cpukit/score/include/rtems/score/cpustdatomic.h
new file mode 100644
index 0000000..7dcc9d8
--- /dev/null
+++ b/cpukit/score/include/rtems/score/cpustdatomic.h
@@ -0,0 +1,350 @@
+/**
+ * @file  rtems/score/cpustdatomic.h
+ * 
+ * This include file defines the generic data struct and implementation
+ * based on stdatomic.h for all the support architectures. You should not
+ * include this header file directly, because it will be used by atomic.h
+ * which should be included by score components
+ */
+
+/*
+ * COPYRIGHT (c) 2013 Deng Hengyi.
+ *
+ * The license and distribution terms for this file may be
+ * found in the file LICENSE in this distribution or at
+ * http://www.rtems.com/license/LICENSE.
+ */
+
+#ifndef _RTEMS_SCORE_GENERAL_STDATOMIC_CPU_H_
+#define _RTEMS_SCORE_GENERAL_STDATOMIC_CPU_H_
+
+#include <stdatomic.h>
+#include <rtems/score/types.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/**
+ * @defgroup RTEMS general stdatomic data type and implementation.
+ *
+ */
+
+/**@{*/
+
+/**
+ * @brief atomic operation unsigned integer type
+ */
+typedef atomic_uint_fast32_t Atomic_Uint;
+
+/**
+ * @brief atomic operation unsigned integer the size of a pointer type
+ */
+typedef atomic_uintptr_t Atomic_Pointer;
+
+/**
+ * @brief atomic operation flag type
+ */
+typedef atomic_flag Atomic_Flag;
+
+/**
+ * @brief the enumeration Atomic_Memory_barrier specifies the detailed regular
+ * memory synchronization operations used in the atomic operation API
+ * definitions.
+ */
+typedef enum {
+  /** no operation orders memory. */
+  ATOMIC_ORDER_RELAXED = memory_order_relaxed,
+  /** a load operation performs an acquire operation on the affected memory
+  * location. This flag guarantees that the effects of load operation are
+  * completed before the effects of any later data accesses.
+  */
+  ATOMIC_ORDER_ACQUIRE = memory_order_acquire,
+  /** a store operation performs a release operation on the affected memory
+  * location. This flag guarantee that all effects of all previous data
+  * accesses are completed before the store operation takes place.
+  */
+  ATOMIC_ORDER_RELEASE = memory_order_release
+} Atomic_Order;
+
+/**
+ * @brief Atomically load an atomic type value from object @a object with
+ * a type of Atomic_Order @a order. The @a order shall not be
+ * ATOMIC_ORDER_RELEASE.
+ */
+RTEMS_INLINE_ROUTINE uint_fast32_t _CPU_Atomic_Load_uint(
+  volatile Atomic_Uint *object,
+  Atomic_Order order
+)
+{
+  return atomic_load_explicit(object, order);
+}
+RTEMS_INLINE_ROUTINE uintptr_t _CPU_Atomic_Load_ptr(
+  volatile Atomic_Pointer *object,
+  Atomic_Order order
+)
+{
+  return atomic_load_explicit(object, order);
+}
+RTEMS_INLINE_ROUTINE bool _CPU_Atomic_Load_flag(
+  volatile Atomic_Flag *object,
+  Atomic_Order order
+)
+{
+  return atomic_load_explicit(object, order);
+}
+
+/**
+ * @brief Atomically store an atomic type value @a value into object @a
+ * object with a type of Atomic_Order @a order. The @a
+ * order shall not be ATOMIC_ORDER_ACQUIRE.
+ */
+RTEMS_INLINE_ROUTINE void _CPU_Atomic_Store_uint(
+  volatile Atomic_Uint *object,
+  uint_fast32_t value,
+  Atomic_Order order
+)
+{
+  atomic_store_explicit(object, value, order);
+}
+RTEMS_INLINE_ROUTINE void _CPU_Atomic_Store_ptr(
+  volatile Atomic_Pointer *object,
+  uintptr_t value,
+  Atomic_Order order
+)
+{
+  atomic_store_explicit(object, value, order);
+}
+RTEMS_INLINE_ROUTINE void _CPU_Atomic_Store_flag(
+  volatile Atomic_Flag *object,
+  bool value,
+  Atomic_Order order
+)
+{
+  atomic_store_explicit(object, value, order);
+}
+
+/**
+ * @brief Atomically load-add-store an atomic type value @a value into object
+ * @a object with a type of Atomic_Order @a order. Return the value of @a object
+ * before atomic operation.
+ */
+RTEMS_INLINE_ROUTINE uint_fast32_t _CPU_Atomic_Fetch_add_uint(
+  volatile Atomic_Uint *object,
+  uint_fast32_t value,
+  Atomic_Order order
+)
+{
+  return atomic_fetch_add_explicit(object, value, order);
+}
+RTEMS_INLINE_ROUTINE uintptr_t _CPU_Atomic_Fetch_add_ptr(
+  volatile Atomic_Pointer *object,
+  uintptr_t value,
+  Atomic_Order order
+)
+{
+  return atomic_fetch_add_explicit(object, value, order);
+}
+RTEMS_INLINE_ROUTINE bool _CPU_Atomic_Fetch_add_flag(
+  volatile Atomic_Flag *object,
+  bool value,
+  Atomic_Order order
+)
+{
+  return atomic_fetch_add_explicit(object, value, order);
+}
+
+/**
+ * @brief Atomically load-sub-store an atomic type value @a value into object
+ * @a object with a type of Atomic_Order @a order. Return the value of @a object
+ * before atomic operation.
+ */
+RTEMS_INLINE_ROUTINE uint_fast32_t _CPU_Atomic_Fetch_sub_uint(
+  volatile Atomic_Uint *object,
+  uint_fast32_t value,
+  Atomic_Order order
+)
+{
+  return atomic_fetch_sub_explicit(object, value, order);
+}
+RTEMS_INLINE_ROUTINE uintptr_t _CPU_Atomic_Fetch_sub_ptr(
+  volatile Atomic_Pointer *object,
+  uintptr_t value,
+  Atomic_Order order
+)
+{
+  return atomic_fetch_sub_explicit(object, value, order);
+}
+RTEMS_INLINE_ROUTINE bool _CPU_Atomic_Fetch_sub_flag(
+  volatile Atomic_Flag *object,
+  bool value,
+  Atomic_Order order
+)
+{
+  return atomic_fetch_sub_explicit(object, value, order);
+}
+
+/**
+ * @brief Atomically load-or-store an atomic type value @a value into object
+ * @a object with a type of Atomic_Order @a order. Return the value of @a object
+ * before atomic operation.
+ */
+RTEMS_INLINE_ROUTINE uint_fast32_t _CPU_Atomic_Fetch_or_uint(
+  volatile Atomic_Uint *object,
+  uint_fast32_t value,
+  Atomic_Order order
+)
+{
+  return atomic_fetch_or_explicit(object, value, order);
+}
+RTEMS_INLINE_ROUTINE uintptr_t _CPU_Atomic_Fetch_or_ptr(
+  volatile Atomic_Pointer *object,
+  uintptr_t value,
+  Atomic_Order order
+)
+{
+  return atomic_fetch_or_explicit(object, value, order);
+}
+RTEMS_INLINE_ROUTINE bool _CPU_Atomic_Fetch_or_flag(
+  volatile Atomic_Flag *object,
+  bool value,
+  Atomic_Order order
+)
+{
+  return atomic_fetch_or_explicit(object, value, order);
+}
+
+/**
+ * @brief Atomically load-and-store an atomic type value @a value into object
+ * @a object with a type of Atomic_Order @a order. Return the value of @a object
+ * before atomic operation.
+ */
+ RTEMS_INLINE_ROUTINE uint_fast32_t _CPU_Atomic_Fetch_and_uint(
+  volatile Atomic_Uint *object,
+  uint_fast32_t value,
+  Atomic_Order order
+)
+{
+  return atomic_fetch_and_explicit(object, value, order);
+}
+RTEMS_INLINE_ROUTINE uintptr_t _CPU_Atomic_Fetch_and_ptr(
+  volatile Atomic_Pointer *object,
+  uintptr_t value,
+  Atomic_Order order
+)
+{
+  return atomic_fetch_and_explicit(object, value, order);
+}
+RTEMS_INLINE_ROUTINE bool _CPU_Atomic_Fetch_and_flag(
+  volatile Atomic_Flag *object,
+  bool value,
+  Atomic_Order order
+)
+{
+  return atomic_fetch_and_explicit(object, value, order);
+}
+
+/**
+* @brief Atomically exchange the value of @a
+* object with @a value. Returns the value before exchange.
+* The operation uses a type of Atomic_Order @a order.
+*/
+RTEMS_INLINE_ROUTINE uint_fast32_t _CPU_Atomic_Exchange_uint(
+ volatile Atomic_Uint *object,
+ uint_fast32_t value,
+ Atomic_Order order
+)
+{
+  return atomic_exchange_explicit(object, value, order);
+}
+RTEMS_INLINE_ROUTINE uintptr_t _CPU_Atomic_Exchange_ptr(
+ volatile Atomic_Pointer *object,
+ uintptr_t value,
+ Atomic_Order order
+)
+{
+  return atomic_exchange_explicit(object, value, order);
+}
+RTEMS_INLINE_ROUTINE bool _CPU_Atomic_Exchange_flag(
+ volatile Atomic_Flag *object,
+ bool value,
+ Atomic_Order order
+)
+{
+  return atomic_exchange_explicit(object, value, order);
+}
+
+/**
+ * @brief Atomically compare the value stored at @a object with @a
+ * old_value and if the two values are equal, update the value of @a
+ * address with @a new_value. Returns zero if the compare failed,
+ * nonzero otherwise. The operation uses a type of Atomic_Order
+ * @a order_succ for successful order and a type of Atomic_Order
+ * @a order_fail for failled order.
+ */
+RTEMS_INLINE_ROUTINE bool _CPU_Atomic_Compare_exchange_uint(
+  volatile Atomic_Uint *object,
+  uint_fast32_t *old_value,
+  uint_fast32_t new_value,
+  Atomic_Order order_succ,
+  Atomic_Order order_fail
+)
+{
+  return atomic_compare_exchange_strong_explicit(object, old_value,
+    new_value, order_succ, order_fail);
+}
+RTEMS_INLINE_ROUTINE bool _CPU_Atomic_Compare_exchange_ptr(
+  volatile Atomic_Pointer *object,
+  uintptr_t *old_value,
+  uintptr_t new_value,
+  Atomic_Order order_succ,
+  Atomic_Order order_fail
+)
+{
+  return atomic_compare_exchange_strong_explicit(object, old_value,
+    new_value, order_succ, order_fail);
+}
+RTEMS_INLINE_ROUTINE bool _CPU_Atomic_Compare_exchange_flag(
+  volatile Atomic_Flag *object,
+  bool *old_value,
+  bool new_value,
+  Atomic_Order order_succ,
+  Atomic_Order order_fail
+)
+{
+  return atomic_compare_exchange_strong_explicit(object, old_value,
+    new_value, order_succ, order_fail);
+}
+
+/**
+* @brief Atomically clear the value of an atomic flag type object @a
+* with a type of Atomic_Order @a order.
+*/
+RTEMS_INLINE_ROUTINE void _CPU_Atomic_Clear_flag(
+ volatile Atomic_Flag *object,
+ Atomic_Order order
+)
+{
+  return atomic_flag_clear_explicit(object, order);
+}
+
+/**
+* @brief Atomically test and clear the value of an atomic flag type
+* object @a with a type of Atomic_Order @a order.
+* if it is successful it will reture ture otherwise false.
+*/
+RTEMS_INLINE_ROUTINE bool _CPU_Atomic_Test_set_flag(
+ volatile Atomic_Flag *object,
+ Atomic_Order order
+)
+{
+  return atomic_flag_test_and_set_explicit(object, order);
+}
+
+#ifdef __cplusplus
+}
+#endif
+
+/**@}*/
+#endif
+/*  end of include file */
diff --git a/cpukit/score/preinstall.am b/cpukit/score/preinstall.am
index dc84b21..1b7955e 100644
--- a/cpukit/score/preinstall.am
+++ b/cpukit/score/preinstall.am
@@ -235,6 +235,10 @@ $(PROJECT_INCLUDE)/rtems/score/genericatomicops.h: include/rtems/score/genericat
 	$(INSTALL_DATA) $< $(PROJECT_INCLUDE)/rtems/score/genericatomicops.h
 PREINSTALL_FILES += $(PROJECT_INCLUDE)/rtems/score/genericatomicops.h
 
+$(PROJECT_INCLUDE)/rtems/score/cpustdatomic.h: include/rtems/score/cpustdatomic.h $(PROJECT_INCLUDE)/rtems/score/$(dirstamp)
+	$(INSTALL_DATA) $< $(PROJECT_INCLUDE)/rtems/score/cpustdatomic.h
+PREINSTALL_FILES += $(PROJECT_INCLUDE)/rtems/score/cpustdatomic.h
+
 if HAS_PTHREADS
 $(PROJECT_INCLUDE)/rtems/score/corespinlock.h: include/rtems/score/corespinlock.h $(PROJECT_INCLUDE)/rtems/score/$(dirstamp)
 	$(INSTALL_DATA) $< $(PROJECT_INCLUDE)/rtems/score/corespinlock.h
-- 
1.7.9.5




More information about the devel mailing list