[rtems commit] score: Add Atomic_Uint

Sebastian Huber sebh at rtems.org
Fri Feb 14 09:22:33 UTC 2014


Module:    rtems
Branch:    master
Commit:    945853b7cf35f55fc8a749aa106bd77a491fe125
Changeset: http://git.rtems.org/rtems/commit/?id=945853b7cf35f55fc8a749aa106bd77a491fe125

Author:    Sebastian Huber <sebastian.huber at embedded-brains.de>
Date:      Thu Feb 13 15:38:52 2014 +0100

score: Add Atomic_Uint

---

 cpukit/score/include/rtems/score/atomic.h       |   83 +++++++++++++++++++++
 cpukit/score/include/rtems/score/cpustdatomic.h |   88 +++++++++++++++++++++++
 testsuites/smptests/smpatomic01/init.c          |   54 ++++++++++++++
 3 files changed, 225 insertions(+), 0 deletions(-)

diff --git a/cpukit/score/include/rtems/score/atomic.h b/cpukit/score/include/rtems/score/atomic.h
index cdb75f8..757eaf7 100644
--- a/cpukit/score/include/rtems/score/atomic.h
+++ b/cpukit/score/include/rtems/score/atomic.h
@@ -34,6 +34,7 @@ extern "C" {
 /**
  * @brief atomic data initializer for static initialization.
  */
+#define ATOMIC_INITIALIZER_UINT(value) CPU_ATOMIC_INITIALIZER_UINT(value)
 #define ATOMIC_INITIALIZER_ULONG(value) CPU_ATOMIC_INITIALIZER_ULONG(value)
 #define ATOMIC_INITIALIZER_PTR(value) CPU_ATOMIC_INITIALIZER_PTR(value)
 
@@ -48,6 +49,14 @@ extern "C" {
  * @param object an atomic type pointer of object.
  * @param pointer a pointer to be stored into object.
  */
+static inline void _Atomic_Init_uint(
+  volatile Atomic_Uint *object,
+  unsigned int value
+)
+{
+  _CPU_atomic_Init_uint(object, value);
+}
+
 static inline void _Atomic_Init_ulong(
   volatile Atomic_Ulong *object,
   unsigned long value
@@ -72,6 +81,14 @@ static inline void _Atomic_Init_ptr(
  * 
  * The order shall not be ATOMIC_ORDER_RELEASE.
  */
+static inline unsigned int _Atomic_Load_uint(
+  volatile Atomic_Uint *object,
+  Atomic_Order order
+)
+{
+  return _CPU_atomic_Load_uint( object, order );
+}
+
 static inline unsigned long _Atomic_Load_ulong(
   volatile Atomic_Ulong *object,
   Atomic_Order order
@@ -97,6 +114,15 @@ static inline void *_Atomic_Load_ptr(
  * 
  * The order shall not be ATOMIC_ORDER_ACQUIRE.
  */
+static inline void _Atomic_Store_uint(
+  volatile Atomic_Uint *object,
+  unsigned int value,
+  Atomic_Order order
+)
+{
+  _CPU_atomic_Store_uint( object, value, order );
+}
+
 static inline void _Atomic_Store_ulong(
   volatile Atomic_Ulong *object,
   unsigned long value,
@@ -124,6 +150,15 @@ static inline void _Atomic_Store_ptr(
  * 
  * @retval a result value before add ops.
  */
+static inline unsigned int _Atomic_Fetch_add_uint(
+  volatile Atomic_Uint *object,
+  unsigned int value,
+  Atomic_Order order
+)
+{
+  return _CPU_atomic_Fetch_add_uint( object, value, order );
+}
+
 static inline unsigned long _Atomic_Fetch_add_ulong(
   volatile Atomic_Ulong *object,
   unsigned long value,
@@ -151,6 +186,15 @@ static inline uintptr_t _Atomic_Fetch_add_ptr(
  * 
  * @retval a result value before sub ops.
  */
+static inline unsigned int _Atomic_Fetch_sub_uint(
+  volatile Atomic_Uint *object,
+  unsigned int value,
+  Atomic_Order order
+)
+{
+  return _CPU_atomic_Fetch_sub_uint( object, value, order );
+}
+
 static inline unsigned long _Atomic_Fetch_sub_ulong(
   volatile Atomic_Ulong *object,
   unsigned long value,
@@ -178,6 +222,15 @@ static inline uintptr_t _Atomic_Fetch_sub_ptr(
  * 
  * @retval a result value before or ops.
  */
+static inline unsigned int _Atomic_Fetch_or_uint(
+  volatile Atomic_Uint *object,
+  unsigned int value,
+  Atomic_Order order
+)
+{
+  return _CPU_atomic_Fetch_or_uint( object, value, order );
+}
+
 static inline unsigned long _Atomic_Fetch_or_ulong(
   volatile Atomic_Ulong *object,
   unsigned long value,
@@ -205,6 +258,15 @@ static inline uintptr_t _Atomic_Fetch_or_ptr(
  * 
  * @retval a result value before and ops.
  */
+static inline unsigned int _Atomic_Fetch_and_uint(
+  volatile Atomic_Uint *object,
+  unsigned int value,
+  Atomic_Order order
+)
+{
+  return _CPU_atomic_Fetch_and_uint( object, value, order );
+}
+
 static inline unsigned long _Atomic_Fetch_and_ulong(
   volatile Atomic_Ulong *object,
   unsigned long value,
@@ -232,6 +294,15 @@ static inline uintptr_t _Atomic_Fetch_and_ptr(
  * 
  * @retval a result value before exchange ops.
  */
+static inline unsigned int _Atomic_Exchange_uint(
+ volatile Atomic_Uint *object,
+ unsigned int value,
+ Atomic_Order order
+)
+{
+  return _CPU_atomic_Exchange_uint( object, value, order );
+}
+
 static inline unsigned long _Atomic_Exchange_ulong(
  volatile Atomic_Ulong *object,
  unsigned long value,
@@ -264,6 +335,18 @@ static inline void *_Atomic_Exchange_ptr(
  * @retval true if the compare exchange successully.
  * @retval false if the compare exchange failed.
  */
+static inline bool _Atomic_Compare_exchange_uint(
+  volatile Atomic_Uint *object,
+  unsigned int *old_value,
+  unsigned int new_value,
+  Atomic_Order order_succ,
+  Atomic_Order order_fail
+)
+{
+  return _CPU_atomic_Compare_exchange_uint( object, old_value, new_value,
+    order_succ, order_fail );
+}
+
 static inline bool _Atomic_Compare_exchange_ulong(
   volatile Atomic_Ulong *object,
   unsigned long *old_value,
diff --git a/cpukit/score/include/rtems/score/cpustdatomic.h b/cpukit/score/include/rtems/score/cpustdatomic.h
index e545dc0..e364eb9 100644
--- a/cpukit/score/include/rtems/score/cpustdatomic.h
+++ b/cpukit/score/include/rtems/score/cpustdatomic.h
@@ -35,6 +35,11 @@ extern "C" {
 /**
  * @brief atomic operation unsigned integer type
  */
+typedef atomic_uint Atomic_Uint;
+
+/**
+ * @brief atomic operation unsigned long integer type
+ */
 typedef atomic_ulong Atomic_Ulong;
 
 /**
@@ -71,6 +76,7 @@ typedef enum {
 /**
  * @brief atomic data initializer for static initialization.
  */
+#define CPU_ATOMIC_INITIALIZER_UINT(value) ATOMIC_VAR_INIT(value)
 #define CPU_ATOMIC_INITIALIZER_ULONG(value) ATOMIC_VAR_INIT(value)
 #define CPU_ATOMIC_INITIALIZER_PTR(pointer) \
   ATOMIC_VAR_INIT((uintptr_t) pointer)
@@ -83,6 +89,14 @@ typedef enum {
  * @param object an atomic type pointer of object.
  * @param value a value to be stored into object.
  */
+static inline void _CPU_atomic_Init_uint(
+  volatile Atomic_Uint *object,
+  unsigned int value
+)
+{
+  atomic_init( object, value );
+}
+
 static inline void _CPU_atomic_Init_ulong(
   volatile Atomic_Ulong *object,
   unsigned long value
@@ -107,6 +121,14 @@ static inline void _CPU_atomic_Init_ptr(
  * 
  * The order shall not be ATOMIC_ORDER_RELEASE.
  */
+static inline unsigned int _CPU_atomic_Load_uint(
+  volatile Atomic_Uint *object,
+  Atomic_Order order
+)
+{
+  return atomic_load_explicit( object, (memory_order) order );
+}
+
 static inline unsigned long _CPU_atomic_Load_ulong(
   volatile Atomic_Ulong *object,
   Atomic_Order order
@@ -132,6 +154,15 @@ static inline void *_CPU_atomic_Load_ptr(
  * 
  * The order shall not be ATOMIC_ORDER_ACQUIRE.
  */
+static inline void _CPU_atomic_Store_uint(
+  volatile Atomic_Uint *object,
+  unsigned int value,
+  Atomic_Order order
+)
+{
+  atomic_store_explicit( object, value, (memory_order) order );
+}
+
 static inline void _CPU_atomic_Store_ulong(
   volatile Atomic_Ulong *object,
   unsigned long value,
@@ -159,6 +190,15 @@ static inline void _CPU_atomic_Store_ptr(
  * 
  * @retval a result value before add ops.
  */
+static inline unsigned int _CPU_atomic_Fetch_add_uint(
+  volatile Atomic_Uint *object,
+  unsigned int value,
+  Atomic_Order order
+)
+{
+  return atomic_fetch_add_explicit( object, value, (memory_order) order );
+}
+
 static inline unsigned long _CPU_atomic_Fetch_add_ulong(
   volatile Atomic_Ulong *object,
   unsigned long value,
@@ -186,6 +226,15 @@ static inline uintptr_t _CPU_atomic_Fetch_add_ptr(
  * 
  * @retval a result value before sub ops.
  */
+static inline unsigned int _CPU_atomic_Fetch_sub_uint(
+  volatile Atomic_Uint *object,
+  unsigned int value,
+  Atomic_Order order
+)
+{
+  return atomic_fetch_sub_explicit( object, value, (memory_order) order );
+}
+
 static inline unsigned long _CPU_atomic_Fetch_sub_ulong(
   volatile Atomic_Ulong *object,
   unsigned long value,
@@ -213,6 +262,15 @@ static inline uintptr_t _CPU_atomic_Fetch_sub_ptr(
  * 
  * @retval a result value before or ops.
  */
+static inline unsigned int _CPU_atomic_Fetch_or_uint(
+  volatile Atomic_Uint *object,
+  unsigned int value,
+  Atomic_Order order
+)
+{
+  return atomic_fetch_or_explicit( object, value, (memory_order) order );
+}
+
 static inline unsigned long _CPU_atomic_Fetch_or_ulong(
   volatile Atomic_Ulong *object,
   unsigned long value,
@@ -240,6 +298,15 @@ static inline uintptr_t _CPU_atomic_Fetch_or_ptr(
  * 
  * @retval a result value before and ops.
  */
+static inline unsigned int _CPU_atomic_Fetch_and_uint(
+  volatile Atomic_Uint *object,
+  unsigned int value,
+  Atomic_Order order
+)
+{
+  return atomic_fetch_and_explicit( object, value, (memory_order) order );
+}
+
 static inline unsigned long _CPU_atomic_Fetch_and_ulong(
   volatile Atomic_Ulong *object,
   unsigned long value,
@@ -267,6 +334,15 @@ static inline uintptr_t _CPU_atomic_Fetch_and_ptr(
  * 
  * @retval a result value before exchange ops.
  */
+static inline unsigned int _CPU_atomic_Exchange_uint(
+ volatile Atomic_Uint *object,
+ unsigned int value,
+ Atomic_Order order
+)
+{
+  return atomic_exchange_explicit( object, value, (memory_order) order );
+}
+
 static inline unsigned long _CPU_atomic_Exchange_ulong(
  volatile Atomic_Ulong *object,
  unsigned long value,
@@ -303,6 +379,18 @@ static inline void *_CPU_atomic_Exchange_ptr(
  * @retval true if the compare exchange successully.
  * @retval false if the compare exchange failed.
  */
+static inline bool _CPU_atomic_Compare_exchange_uint(
+  volatile Atomic_Uint *object,
+  unsigned int *old_value,
+  unsigned int new_value,
+  Atomic_Order order_succ,
+  Atomic_Order order_fail
+)
+{
+  return atomic_compare_exchange_strong_explicit( object, old_value,
+    new_value, order_succ, order_fail );
+}
+
 static inline bool _CPU_atomic_Compare_exchange_ulong(
   volatile Atomic_Ulong *object,
   unsigned long *old_value,
diff --git a/testsuites/smptests/smpatomic01/init.c b/testsuites/smptests/smpatomic01/init.c
index feffe5e..2901a98 100644
--- a/testsuites/smptests/smpatomic01/init.c
+++ b/testsuites/smptests/smpatomic01/init.c
@@ -80,6 +80,7 @@ typedef struct {
   SMP_barrier_Control barrier;
   size_t worker_count;
   rtems_id stop_worker_timer_id;
+  Atomic_Uint atomic_int_value;
   Atomic_Ulong atomic_value;
   unsigned long per_worker_value[CPU_COUNT];
   unsigned long normal_value;
@@ -383,23 +384,30 @@ static void worker_task(size_t worker_index)
 
 static void test_static_and_dynamic_initialization(void)
 {
+  static Atomic_Uint static_uint =
+    ATOMIC_INITIALIZER_UINT(0xc01dc0feU);
   static Atomic_Ulong static_ulong =
     ATOMIC_INITIALIZER_ULONG(0xdeadbeefUL);
   static Atomic_Pointer static_ptr =
     ATOMIC_INITIALIZER_PTR(&static_ptr);
   static Atomic_Flag static_flag = ATOMIC_INITIALIZER_FLAG;
 
+  Atomic_Uint stack_uint;
   Atomic_Ulong stack_ulong;
   Atomic_Pointer stack_ptr;
   Atomic_Flag stack_flag;
 
   puts("=== static and dynamic initialization test case ===");
 
+  _Atomic_Init_uint(&stack_uint, 0xc01dc0feU);
   _Atomic_Init_ulong(&stack_ulong, 0xdeadbeefUL);
   _Atomic_Init_ptr(&stack_ptr, &static_ptr);
   _Atomic_Flag_clear(&stack_flag, ATOMIC_ORDER_RELAXED);
 
   rtems_test_assert(
+    memcmp(&stack_uint, &static_uint, sizeof(stack_uint)) == 0
+  );
+  rtems_test_assert(
     memcmp(&stack_ulong, &static_ulong, sizeof(stack_ulong)) == 0
   );
   rtems_test_assert(
@@ -410,6 +418,9 @@ static void test_static_and_dynamic_initialization(void)
   );
 
   rtems_test_assert(
+    _Atomic_Load_uint(&stack_uint, ATOMIC_ORDER_RELAXED) == 0xc01dc0feU
+  );
+  rtems_test_assert(
     _Atomic_Load_ulong(&stack_ulong, ATOMIC_ORDER_RELAXED) == 0xdeadbeefUL
   );
   rtems_test_assert(
@@ -460,11 +471,18 @@ typedef void (*simple_test_body)(test_context *ctx);
 
 static void test_simple_atomic_add_body(test_context *ctx)
 {
+  unsigned int ia = 8, ib = 4;
+  unsigned int ic;
   unsigned long a = 2, b = 1;
   unsigned long c;
 
   puts("=== atomic simple add test case ==\n");
 
+  _Atomic_Store_uint(&ctx->atomic_int_value, ia, ATOMIC_ORDER_RELAXED);
+  _Atomic_Fetch_add_uint(&ctx->atomic_int_value, ib, ATOMIC_ORDER_RELAXED);
+  ic = _Atomic_Load_uint(&ctx->atomic_int_value, ATOMIC_ORDER_RELAXED);
+  rtems_test_assert(ic == (ia + ib));
+
   _Atomic_Store_ulong(&ctx->atomic_value, a, ATOMIC_ORDER_RELAXED);
   _Atomic_Fetch_add_ulong(&ctx->atomic_value, b, ATOMIC_ORDER_RELAXED);
   c = _Atomic_Load_ulong(&ctx->atomic_value, ATOMIC_ORDER_RELAXED);
@@ -473,11 +491,18 @@ static void test_simple_atomic_add_body(test_context *ctx)
 
 static void test_simple_atomic_sub_body(test_context *ctx)
 {
+  unsigned int ia = 8, ib = 4;
+  unsigned int ic;
   unsigned long a = 2, b = 1;
   unsigned long c;
 
   puts("=== atomic simple sub test case ==\n");
 
+  _Atomic_Store_uint(&ctx->atomic_int_value, ia, ATOMIC_ORDER_RELAXED);
+  _Atomic_Fetch_sub_uint(&ctx->atomic_int_value, ib, ATOMIC_ORDER_RELAXED);
+  ic = _Atomic_Load_uint(&ctx->atomic_int_value, ATOMIC_ORDER_RELAXED);
+  rtems_test_assert(ic == (ia - ib));
+
   _Atomic_Store_ulong(&ctx->atomic_value, a, ATOMIC_ORDER_RELAXED);
   _Atomic_Fetch_sub_ulong(&ctx->atomic_value, b, ATOMIC_ORDER_RELAXED);
   c = _Atomic_Load_ulong(&ctx->atomic_value, ATOMIC_ORDER_RELAXED);
@@ -486,11 +511,18 @@ static void test_simple_atomic_sub_body(test_context *ctx)
 
 static void test_simple_atomic_or_body(test_context *ctx)
 {
+  unsigned int ia = 8, ib = 4;
+  unsigned int ic;
   unsigned long a = 2, b = 1;
   unsigned long c;
 
   puts("=== atomic simple or test case ==\n");
 
+  _Atomic_Store_uint(&ctx->atomic_int_value, ia, ATOMIC_ORDER_RELAXED);
+  _Atomic_Fetch_or_uint(&ctx->atomic_int_value, ib, ATOMIC_ORDER_RELAXED);
+  ic = _Atomic_Load_uint(&ctx->atomic_int_value, ATOMIC_ORDER_RELAXED);
+  rtems_test_assert(ic == (ia | ib));
+
   _Atomic_Store_ulong(&ctx->atomic_value, a, ATOMIC_ORDER_RELAXED);
   _Atomic_Fetch_or_ulong(&ctx->atomic_value, b, ATOMIC_ORDER_RELAXED);
   c = _Atomic_Load_ulong(&ctx->atomic_value, ATOMIC_ORDER_RELAXED);
@@ -499,11 +531,18 @@ static void test_simple_atomic_or_body(test_context *ctx)
 
 static void test_simple_atomic_and_body(test_context *ctx)
 {
+  unsigned int ia = 8, ib = 4;
+  unsigned int ic;
   unsigned long a = 2, b = 1;
   unsigned long c;
 
   puts("=== atomic simple and test case ==\n");
 
+  _Atomic_Store_uint(&ctx->atomic_int_value, ia, ATOMIC_ORDER_RELAXED);
+  _Atomic_Fetch_and_uint(&ctx->atomic_int_value, ib, ATOMIC_ORDER_RELAXED);
+  ic = _Atomic_Load_uint(&ctx->atomic_int_value, ATOMIC_ORDER_RELAXED);
+  rtems_test_assert(ic == (ia & ib));
+
   _Atomic_Store_ulong(&ctx->atomic_value, a, ATOMIC_ORDER_RELAXED);
   _Atomic_Fetch_and_ulong(&ctx->atomic_value, b, ATOMIC_ORDER_RELAXED);
   c = _Atomic_Load_ulong(&ctx->atomic_value, ATOMIC_ORDER_RELAXED);
@@ -512,11 +551,18 @@ static void test_simple_atomic_and_body(test_context *ctx)
 
 static void test_simple_atomic_exchange_body(test_context *ctx)
 {
+  unsigned int ia = 8, ib = 4;
+  unsigned int ic;
   unsigned long a = 2, b = 1;
   unsigned long c;
 
   puts("=== atomic simple exchange test case ==\n");
 
+  _Atomic_Store_uint(&ctx->atomic_int_value, ia, ATOMIC_ORDER_RELAXED);
+  _Atomic_Exchange_uint(&ctx->atomic_int_value, ib, ATOMIC_ORDER_RELAXED);
+  ic = _Atomic_Load_uint(&ctx->atomic_int_value, ATOMIC_ORDER_RELAXED);
+  rtems_test_assert(ic == ib);
+
   _Atomic_Store_ulong(&ctx->atomic_value, a, ATOMIC_ORDER_RELAXED);
   _Atomic_Exchange_ulong(&ctx->atomic_value, b, ATOMIC_ORDER_RELAXED);
   c = _Atomic_Load_ulong(&ctx->atomic_value, ATOMIC_ORDER_RELAXED);
@@ -525,11 +571,19 @@ static void test_simple_atomic_exchange_body(test_context *ctx)
 
 static void test_simple_atomic_compare_exchange_body(test_context *ctx)
 {
+  unsigned int ia = 8, ib = 4;
+  unsigned int ic;
   unsigned long a = 2, b = 1;
   unsigned long c;
 
   puts("=== atomic simple compare exchange test case ==\n");
 
+  _Atomic_Store_uint(&ctx->atomic_int_value, ia, ATOMIC_ORDER_RELAXED);
+  _Atomic_Compare_exchange_uint(&ctx->atomic_int_value, &ia, ib,
+    ATOMIC_ORDER_RELAXED, ATOMIC_ORDER_RELAXED);
+  ic = _Atomic_Load_uint(&ctx->atomic_int_value, ATOMIC_ORDER_RELAXED);
+  rtems_test_assert(ic == ib);
+
   _Atomic_Store_ulong(&ctx->atomic_value, a, ATOMIC_ORDER_RELAXED);
   _Atomic_Compare_exchange_ulong(&ctx->atomic_value, &a, b,
     ATOMIC_ORDER_RELAXED, ATOMIC_ORDER_RELAXED);




More information about the vc mailing list