[PATCH] score: Use unsigned long for atomic integers
Sebastian Huber
sebastian.huber at embedded-brains.de
Mon Sep 2 13:11:46 UTC 2013
Use unsigned long instead of uint_fast32_t since C11 provides only a
ATOMIC_LONG_LOCK_FREE macro constant. This makes it also possible to
use properly typed integer literals like 123UL. It is now clear which
compatible type should be used for the atomic integer.
---
cpukit/score/include/rtems/score/atomic.h | 72 +++++++++++-----------
cpukit/score/include/rtems/score/cpustdatomic.h | 56 ++++++++--------
testsuites/smptests/smpatomic01/tasks.c | 4 +-
testsuites/smptests/smpatomic02/tasks.c | 4 +-
testsuites/smptests/smpatomic03/tasks.c | 6 +-
testsuites/smptests/smpatomic04/tasks.c | 6 +-
testsuites/smptests/smpatomic05/tasks.c | 6 +-
testsuites/smptests/smpatomic06/tasks.c | 6 +-
testsuites/smptests/smpatomic07/tasks.c | 6 +-
testsuites/smptests/smpatomic08/init.c | 78 +++++++++++-----------
10 files changed, 122 insertions(+), 122 deletions(-)
diff --git a/cpukit/score/include/rtems/score/atomic.h b/cpukit/score/include/rtems/score/atomic.h
index e085dea..b4e7a76 100644
--- a/cpukit/score/include/rtems/score/atomic.h
+++ b/cpukit/score/include/rtems/score/atomic.h
@@ -48,12 +48,12 @@ extern "C" {
* @param object an atomic type pointer of object.
* @param value a value to be stored into object.
*/
-static inline void _Atomic_Init_uint(
- volatile Atomic_Uint *object,
- uint_fast32_t value
+static inline void _Atomic_Init_ulong(
+ volatile Atomic_Ulong *object,
+ unsigned long value
)
{
- _CPU_atomic_Init_uint(object, value);
+ _CPU_atomic_Init_ulong(object, value);
}
static inline void _Atomic_Init_ptr(
@@ -72,12 +72,12 @@ static inline void _Atomic_Init_ptr(
*
* The order shall not be ATOMIC_ORDER_RELEASE.
*/
-static inline uint_fast32_t _Atomic_Load_uint(
- volatile Atomic_Uint *object,
+static inline unsigned long _Atomic_Load_ulong(
+ volatile Atomic_Ulong *object,
Atomic_Order order
)
{
- return _CPU_atomic_Load_uint( object, order );
+ return _CPU_atomic_Load_ulong( object, order );
}
static inline uintptr_t _Atomic_Load_ptr(
@@ -97,13 +97,13 @@ static inline uintptr_t _Atomic_Load_ptr(
*
* The order shall not be ATOMIC_ORDER_ACQUIRE.
*/
-static inline void _Atomic_Store_uint(
- volatile Atomic_Uint *object,
- uint_fast32_t value,
+static inline void _Atomic_Store_ulong(
+ volatile Atomic_Ulong *object,
+ unsigned long value,
Atomic_Order order
)
{
- _CPU_atomic_Store_uint( object, value, order );
+ _CPU_atomic_Store_ulong( object, value, order );
}
static inline void _Atomic_Store_ptr(
@@ -124,13 +124,13 @@ static inline void _Atomic_Store_ptr(
*
* @retval a result value before add ops.
*/
-static inline uint_fast32_t _Atomic_Fetch_add_uint(
- volatile Atomic_Uint *object,
- uint_fast32_t value,
+static inline unsigned long _Atomic_Fetch_add_ulong(
+ volatile Atomic_Ulong *object,
+ unsigned long value,
Atomic_Order order
)
{
- return _CPU_atomic_Fetch_add_uint( object, value, order );
+ return _CPU_atomic_Fetch_add_ulong( object, value, order );
}
static inline uintptr_t _Atomic_Fetch_add_ptr(
@@ -151,13 +151,13 @@ static inline uintptr_t _Atomic_Fetch_add_ptr(
*
* @retval a result value before sub ops.
*/
-static inline uint_fast32_t _Atomic_Fetch_sub_uint(
- volatile Atomic_Uint *object,
- uint_fast32_t value,
+static inline unsigned long _Atomic_Fetch_sub_ulong(
+ volatile Atomic_Ulong *object,
+ unsigned long value,
Atomic_Order order
)
{
- return _CPU_atomic_Fetch_sub_uint( object, value, order );
+ return _CPU_atomic_Fetch_sub_ulong( object, value, order );
}
static inline uintptr_t _Atomic_Fetch_sub_ptr(
@@ -178,13 +178,13 @@ static inline uintptr_t _Atomic_Fetch_sub_ptr(
*
* @retval a result value before or ops.
*/
-static inline uint_fast32_t _Atomic_Fetch_or_uint(
- volatile Atomic_Uint *object,
- uint_fast32_t value,
+static inline unsigned long _Atomic_Fetch_or_ulong(
+ volatile Atomic_Ulong *object,
+ unsigned long value,
Atomic_Order order
)
{
- return _CPU_atomic_Fetch_or_uint( object, value, order );
+ return _CPU_atomic_Fetch_or_ulong( object, value, order );
}
static inline uintptr_t _Atomic_Fetch_or_ptr(
@@ -205,13 +205,13 @@ static inline uintptr_t _Atomic_Fetch_or_ptr(
*
* @retval a result value before and ops.
*/
-static inline uint_fast32_t _Atomic_Fetch_and_uint(
- volatile Atomic_Uint *object,
- uint_fast32_t value,
+static inline unsigned long _Atomic_Fetch_and_ulong(
+ volatile Atomic_Ulong *object,
+ unsigned long value,
Atomic_Order order
)
{
- return _CPU_atomic_Fetch_and_uint( object, value, order );
+ return _CPU_atomic_Fetch_and_ulong( object, value, order );
}
static inline uintptr_t _Atomic_Fetch_and_ptr(
@@ -232,13 +232,13 @@ static inline uintptr_t _Atomic_Fetch_and_ptr(
*
* @retval a result value before exchange ops.
*/
-static inline uint_fast32_t _Atomic_Exchange_uint(
- volatile Atomic_Uint *object,
- uint_fast32_t value,
+static inline unsigned long _Atomic_Exchange_ulong(
+ volatile Atomic_Ulong *object,
+ unsigned long value,
Atomic_Order order
)
{
- return _CPU_atomic_Exchange_uint( object, value, order );
+ return _CPU_atomic_Exchange_ulong( object, value, order );
}
static inline uintptr_t _Atomic_Exchange_ptr(
@@ -264,15 +264,15 @@ static inline uintptr_t _Atomic_Exchange_ptr(
* @retval true if the compare exchange successully.
* @retval false if the compare exchange failed.
*/
-static inline bool _Atomic_Compare_exchange_uint(
- volatile Atomic_Uint *object,
- uint_fast32_t *old_value,
- uint_fast32_t new_value,
+static inline bool _Atomic_Compare_exchange_ulong(
+ volatile Atomic_Ulong *object,
+ unsigned long *old_value,
+ unsigned long new_value,
Atomic_Order order_succ,
Atomic_Order order_fail
)
{
- return _CPU_atomic_Compare_exchange_uint( object, old_value, new_value,
+ return _CPU_atomic_Compare_exchange_ulong( object, old_value, new_value,
order_succ, order_fail );
}
diff --git a/cpukit/score/include/rtems/score/cpustdatomic.h b/cpukit/score/include/rtems/score/cpustdatomic.h
index 6ec5828..d5c2142 100644
--- a/cpukit/score/include/rtems/score/cpustdatomic.h
+++ b/cpukit/score/include/rtems/score/cpustdatomic.h
@@ -35,7 +35,7 @@ extern "C" {
/**
* @brief atomic operation unsigned integer type
*/
-typedef atomic_uint_fast32_t Atomic_Uint;
+typedef atomic_ulong Atomic_Ulong;
/**
* @brief atomic operation unsigned integer the size of a pointer type
@@ -82,9 +82,9 @@ typedef enum {
* @param object an atomic type pointer of object.
* @param value a value to be stored into object.
*/
-static inline void _CPU_atomic_Init_uint(
- volatile Atomic_Uint *object,
- uint_fast32_t value
+static inline void _CPU_atomic_Init_ulong(
+ volatile Atomic_Ulong *object,
+ unsigned long value
)
{
atomic_init(object, value);
@@ -106,8 +106,8 @@ static inline void _CPU_atomic_Init_ptr(
*
* The order shall not be ATOMIC_ORDER_RELEASE.
*/
-static inline uint_fast32_t _CPU_atomic_Load_uint(
- volatile Atomic_Uint *object,
+static inline unsigned long _CPU_atomic_Load_ulong(
+ volatile Atomic_Ulong *object,
Atomic_Order order
)
{
@@ -131,9 +131,9 @@ static inline uintptr_t _CPU_atomic_Load_ptr(
*
* The order shall not be ATOMIC_ORDER_ACQUIRE.
*/
-static inline void _CPU_atomic_Store_uint(
- volatile Atomic_Uint *object,
- uint_fast32_t value,
+static inline void _CPU_atomic_Store_ulong(
+ volatile Atomic_Ulong *object,
+ unsigned long value,
Atomic_Order order
)
{
@@ -158,9 +158,9 @@ static inline void _CPU_atomic_Store_ptr(
*
* @retval a result value before add ops.
*/
-static inline uint_fast32_t _CPU_atomic_Fetch_add_uint(
- volatile Atomic_Uint *object,
- uint_fast32_t value,
+static inline unsigned long _CPU_atomic_Fetch_add_ulong(
+ volatile Atomic_Ulong *object,
+ unsigned long value,
Atomic_Order order
)
{
@@ -185,9 +185,9 @@ static inline uintptr_t _CPU_atomic_Fetch_add_ptr(
*
* @retval a result value before sub ops.
*/
-static inline uint_fast32_t _CPU_atomic_Fetch_sub_uint(
- volatile Atomic_Uint *object,
- uint_fast32_t value,
+static inline unsigned long _CPU_atomic_Fetch_sub_ulong(
+ volatile Atomic_Ulong *object,
+ unsigned long value,
Atomic_Order order
)
{
@@ -212,9 +212,9 @@ static inline uintptr_t _CPU_atomic_Fetch_sub_ptr(
*
* @retval a result value before or ops.
*/
-static inline uint_fast32_t _CPU_atomic_Fetch_or_uint(
- volatile Atomic_Uint *object,
- uint_fast32_t value,
+static inline unsigned long _CPU_atomic_Fetch_or_ulong(
+ volatile Atomic_Ulong *object,
+ unsigned long value,
Atomic_Order order
)
{
@@ -239,9 +239,9 @@ static inline uintptr_t _CPU_atomic_Fetch_or_ptr(
*
* @retval a result value before and ops.
*/
-static inline uint_fast32_t _CPU_atomic_Fetch_and_uint(
- volatile Atomic_Uint *object,
- uint_fast32_t value,
+static inline unsigned long _CPU_atomic_Fetch_and_ulong(
+ volatile Atomic_Ulong *object,
+ unsigned long value,
Atomic_Order order
)
{
@@ -266,9 +266,9 @@ static inline uintptr_t _CPU_atomic_Fetch_and_ptr(
*
* @retval a result value before exchange ops.
*/
-static inline uint_fast32_t _CPU_atomic_Exchange_uint(
- volatile Atomic_Uint *object,
- uint_fast32_t value,
+static inline unsigned long _CPU_atomic_Exchange_ulong(
+ volatile Atomic_Ulong *object,
+ unsigned long value,
Atomic_Order order
)
{
@@ -298,10 +298,10 @@ static inline uintptr_t _CPU_atomic_Exchange_ptr(
* @retval true if the compare exchange successully.
* @retval false if the compare exchange failed.
*/
-static inline bool _CPU_atomic_Compare_exchange_uint(
- volatile Atomic_Uint *object,
- uint_fast32_t *old_value,
- uint_fast32_t new_value,
+static inline bool _CPU_atomic_Compare_exchange_ulong(
+ volatile Atomic_Ulong *object,
+ unsigned long *old_value,
+ unsigned long new_value,
Atomic_Order order_succ,
Atomic_Order order_fail
)
diff --git a/testsuites/smptests/smpatomic01/tasks.c b/testsuites/smptests/smpatomic01/tasks.c
index 65ad81b..4dd2ff6 100644
--- a/testsuites/smptests/smpatomic01/tasks.c
+++ b/testsuites/smptests/smpatomic01/tasks.c
@@ -52,12 +52,12 @@ rtems_task Test_task(
/* Print that the task is up and running. */
/* test relaxed barrier */
- ATOMIC_LOAD_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_RELAXED);
+ ATOMIC_LOAD_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_RELAXED);
ATOMIC_LOAD_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_RELAXED);
/* test acquire barrier */
- ATOMIC_LOAD_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_ACQUIRE);
+ ATOMIC_LOAD_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_ACQUIRE);
ATOMIC_LOAD_NO_BARRIER(ptr, Pointer, unsigned long, cpu_num, ATOMIC_ORDER_ACQUIRE);
diff --git a/testsuites/smptests/smpatomic02/tasks.c b/testsuites/smptests/smpatomic02/tasks.c
index 22de08c..c5c9ba5 100644
--- a/testsuites/smptests/smpatomic02/tasks.c
+++ b/testsuites/smptests/smpatomic02/tasks.c
@@ -52,12 +52,12 @@ rtems_task Test_task(
/* Print that the task is up and running. */
/* test relaxed barrier */
- ATOMIC_STORE_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_RELAXED);
+ ATOMIC_STORE_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_RELAXED);
ATOMIC_STORE_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_RELAXED);
/* test release barrier */
- ATOMIC_STORE_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_RELEASE);
+ ATOMIC_STORE_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_RELEASE);
ATOMIC_STORE_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_RELEASE);
diff --git a/testsuites/smptests/smpatomic03/tasks.c b/testsuites/smptests/smpatomic03/tasks.c
index 2c45a00..5010549 100644
--- a/testsuites/smptests/smpatomic03/tasks.c
+++ b/testsuites/smptests/smpatomic03/tasks.c
@@ -55,17 +55,17 @@ rtems_task Test_task(
/* Print that the task is up and running. */
/* test relaxed barrier */
- ATOMIC_FETCH_ADD_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_RELAXED);
+ ATOMIC_FETCH_ADD_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_RELAXED);
ATOMIC_FETCH_ADD_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_RELAXED);
/* test acquire barrier */
- ATOMIC_FETCH_ADD_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_ACQUIRE);
+ ATOMIC_FETCH_ADD_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_ACQUIRE);
ATOMIC_FETCH_ADD_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_ACQUIRE);
/* test release barrier */
- ATOMIC_FETCH_ADD_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_RELEASE);
+ ATOMIC_FETCH_ADD_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_RELEASE);
ATOMIC_FETCH_ADD_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_RELEASE);
diff --git a/testsuites/smptests/smpatomic04/tasks.c b/testsuites/smptests/smpatomic04/tasks.c
index 3630eb2..18632cd 100644
--- a/testsuites/smptests/smpatomic04/tasks.c
+++ b/testsuites/smptests/smpatomic04/tasks.c
@@ -55,17 +55,17 @@ rtems_task Test_task(
/* Print that the task is up and running. */
/* test relaxed barrier */
- ATOMIC_FETCH_SUB_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_RELAXED);
+ ATOMIC_FETCH_SUB_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_RELAXED);
ATOMIC_FETCH_SUB_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_RELAXED);
/* test acquire barrier */
- ATOMIC_FETCH_SUB_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_ACQUIRE);
+ ATOMIC_FETCH_SUB_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_ACQUIRE);
ATOMIC_FETCH_SUB_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_ACQUIRE);
/* test release barrier */
- ATOMIC_FETCH_SUB_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_RELEASE);
+ ATOMIC_FETCH_SUB_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_RELEASE);
ATOMIC_FETCH_SUB_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_RELEASE);
diff --git a/testsuites/smptests/smpatomic05/tasks.c b/testsuites/smptests/smpatomic05/tasks.c
index 5e7da77..cf41cc1 100644
--- a/testsuites/smptests/smpatomic05/tasks.c
+++ b/testsuites/smptests/smpatomic05/tasks.c
@@ -55,17 +55,17 @@ rtems_task Test_task(
/* Print that the task is up and running. */
/* test relaxed barrier */
- ATOMIC_FETCH_AND_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_RELAXED);
+ ATOMIC_FETCH_AND_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_RELAXED);
ATOMIC_FETCH_AND_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_RELAXED);
/* test acquire barrier */
- ATOMIC_FETCH_AND_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_ACQUIRE);
+ ATOMIC_FETCH_AND_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_ACQUIRE);
ATOMIC_FETCH_AND_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_ACQUIRE);
/* test release barrier */
- ATOMIC_FETCH_AND_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_RELEASE);
+ ATOMIC_FETCH_AND_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_RELEASE);
ATOMIC_FETCH_AND_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_RELEASE);
diff --git a/testsuites/smptests/smpatomic06/tasks.c b/testsuites/smptests/smpatomic06/tasks.c
index 772d745..a0409c7 100644
--- a/testsuites/smptests/smpatomic06/tasks.c
+++ b/testsuites/smptests/smpatomic06/tasks.c
@@ -55,17 +55,17 @@ rtems_task Test_task(
/* Print that the task is up and running. */
/* test relaxed barrier */
- ATOMIC_FETCH_OR_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_RELAXED);
+ ATOMIC_FETCH_OR_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_RELAXED);
ATOMIC_FETCH_OR_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_RELAXED);
/* test acquire barrier */
- ATOMIC_FETCH_OR_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_ACQUIRE);
+ ATOMIC_FETCH_OR_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_ACQUIRE);
ATOMIC_FETCH_OR_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_ACQUIRE);
/* test release barrier */
- ATOMIC_FETCH_OR_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_RELEASE);
+ ATOMIC_FETCH_OR_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_RELEASE);
ATOMIC_FETCH_OR_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_RELEASE);
diff --git a/testsuites/smptests/smpatomic07/tasks.c b/testsuites/smptests/smpatomic07/tasks.c
index ab1b51c..345439b 100644
--- a/testsuites/smptests/smpatomic07/tasks.c
+++ b/testsuites/smptests/smpatomic07/tasks.c
@@ -71,17 +71,17 @@ rtems_task Test_task(
/* Print that the task is up and running. */
/* test relaxed barrier */
- ATOMIC_CAS_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_RELAXED);
+ ATOMIC_CAS_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_RELAXED);
ATOMIC_CAS_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_RELAXED);
/* test acquire barrier */
- ATOMIC_CAS_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_ACQUIRE);
+ ATOMIC_CAS_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_ACQUIRE);
ATOMIC_CAS_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_ACQUIRE);
/* test release barrier */
- ATOMIC_CAS_NO_BARRIER(uint, Uint, uint_fast32_t, cpu_num, ATOMIC_ORDER_RELEASE);
+ ATOMIC_CAS_NO_BARRIER(ulong, Ulong, unsigned long, cpu_num, ATOMIC_ORDER_RELEASE);
ATOMIC_CAS_NO_BARRIER(ptr, Pointer, uintptr_t, cpu_num, ATOMIC_ORDER_RELEASE);
diff --git a/testsuites/smptests/smpatomic08/init.c b/testsuites/smptests/smpatomic08/init.c
index 031f7c9..5e6b648 100644
--- a/testsuites/smptests/smpatomic08/init.c
+++ b/testsuites/smptests/smpatomic08/init.c
@@ -24,12 +24,12 @@
/* FIXME: Add barrier to Score */
typedef struct {
- Atomic_Uint value;
- Atomic_Uint sense;
+ Atomic_Ulong value;
+ Atomic_Ulong sense;
} SMP_barrier_Control;
typedef struct {
- uint_fast32_t sense;
+ unsigned long sense;
} SMP_barrier_State;
#define SMP_BARRIER_CONTROL_INITIALIZER \
@@ -40,26 +40,26 @@ typedef struct {
static void _SMP_barrier_Wait(
SMP_barrier_Control *control,
SMP_barrier_State *state,
- uint_fast32_t count
+ unsigned long count
)
{
- uint_fast32_t sense = ~state->sense;
- uint_fast32_t previous_value;
+ unsigned long sense = ~state->sense;
+ unsigned long previous_value;
state->sense = sense;
- previous_value = _Atomic_Fetch_add_uint(
+ previous_value = _Atomic_Fetch_add_ulong(
&control->value,
1,
ATOMIC_ORDER_RELAXED
);
if ( previous_value + 1 == count ) {
- _Atomic_Store_uint( &control->value, 0, ATOMIC_ORDER_RELAXED );
- _Atomic_Store_uint( &control->sense, sense, ATOMIC_ORDER_RELEASE );
+ _Atomic_Store_ulong( &control->value, 0, ATOMIC_ORDER_RELAXED );
+ _Atomic_Store_ulong( &control->sense, sense, ATOMIC_ORDER_RELEASE );
} else {
while (
- _Atomic_Load_uint( &control->sense, ATOMIC_ORDER_ACQUIRE ) != sense
+ _Atomic_Load_ulong( &control->sense, ATOMIC_ORDER_ACQUIRE ) != sense
) {
/* Wait */
}
@@ -73,13 +73,13 @@ static void _SMP_barrier_Wait(
#define CPU_COUNT 32
typedef struct {
- Atomic_Uint stop;
+ Atomic_Ulong stop;
SMP_barrier_Control barrier;
size_t worker_count;
rtems_id stop_worker_timer_id;
- Atomic_Uint atomic_value;
- uint_fast32_t per_worker_value[CPU_COUNT];
- uint32_t normal_value;
+ Atomic_Ulong atomic_value;
+ unsigned long per_worker_value[CPU_COUNT];
+ unsigned long normal_value;
Atomic_Flag global_flag;
} test_context;
@@ -96,7 +96,7 @@ static test_context test_instance = {
static bool stop(test_context *ctx)
{
- return _Atomic_Load_uint(&ctx->stop, ATOMIC_ORDER_RELAXED) != 0;
+ return _Atomic_Load_ulong(&ctx->stop, ATOMIC_ORDER_RELAXED) != 0;
}
static bool is_master_worker(size_t worker_index)
@@ -110,14 +110,14 @@ static void test_fini(
bool atomic
)
{
- uint_fast32_t expected_value = 0;
- uint_fast32_t actual_value;
+ unsigned long expected_value = 0;
+ unsigned long actual_value;
size_t worker_index;
printf("=== atomic %s test case ==\n", test);
for (worker_index = 0; worker_index < ctx->worker_count; ++worker_index) {
- uint_fast32_t worker_value = ctx->per_worker_value[worker_index];
+ unsigned long worker_value = ctx->per_worker_value[worker_index];
expected_value += worker_value;
@@ -129,7 +129,7 @@ static void test_fini(
}
if (atomic) {
- actual_value = _Atomic_Load_uint(&ctx->atomic_value, ATOMIC_ORDER_RELAXED);
+ actual_value = _Atomic_Load_ulong(&ctx->atomic_value, ATOMIC_ORDER_RELAXED);
} else {
actual_value = ctx->normal_value;
}
@@ -145,16 +145,16 @@ static void test_fini(
static void test_atomic_add_init(test_context *ctx)
{
- _Atomic_Init_uint(&ctx->atomic_value, 0);
+ _Atomic_Init_ulong(&ctx->atomic_value, 0);
}
static void test_atomic_add_body(test_context *ctx, size_t worker_index)
{
- uint_fast32_t counter = 0;
+ unsigned long counter = 0;
while (!stop(ctx)) {
++counter;
- _Atomic_Fetch_add_uint(&ctx->atomic_value, 1, ATOMIC_ORDER_RELAXED);
+ _Atomic_Fetch_add_ulong(&ctx->atomic_value, 1, ATOMIC_ORDER_RELAXED);
}
ctx->per_worker_value[worker_index] = counter;
@@ -173,7 +173,7 @@ static void test_atomic_flag_init(test_context *ctx)
static void test_atomic_flag_body(test_context *ctx, size_t worker_index)
{
- uint_fast32_t counter = 0;
+ unsigned long counter = 0;
while (!stop(ctx)) {
while (_Atomic_Flag_test_and_set(&ctx->global_flag, ATOMIC_ORDER_ACQUIRE)) {
@@ -196,16 +196,16 @@ static void test_atomic_flag_fini(test_context *ctx)
static void test_atomic_sub_init(test_context *ctx)
{
- _Atomic_Init_uint(&ctx->atomic_value, 0);
+ _Atomic_Init_ulong(&ctx->atomic_value, 0);
}
static void test_atomic_sub_body(test_context *ctx, size_t worker_index)
{
- uint_fast32_t counter = 0;
+ unsigned long counter = 0;
while (!stop(ctx)) {
--counter;
- _Atomic_Fetch_sub_uint(&ctx->atomic_value, 1, ATOMIC_ORDER_RELAXED);
+ _Atomic_Fetch_sub_ulong(&ctx->atomic_value, 1, ATOMIC_ORDER_RELAXED);
}
ctx->per_worker_value[worker_index] = counter;
@@ -218,21 +218,21 @@ static void test_atomic_sub_fini(test_context *ctx)
static void test_atomic_compare_exchange_init(test_context *ctx)
{
- _Atomic_Init_uint(&ctx->atomic_value, 0);
+ _Atomic_Init_ulong(&ctx->atomic_value, 0);
ctx->normal_value = 0;
}
static void test_atomic_compare_exchange_body(test_context *ctx, size_t worker_index)
{
- uint_fast32_t counter = 0;
+ unsigned long counter = 0;
while (!stop(ctx)) {
bool success;
do {
- uint_fast32_t zero = 0;
+ unsigned long zero = 0;
- success = _Atomic_Compare_exchange_uint(
+ success = _Atomic_Compare_exchange_ulong(
&ctx->atomic_value,
&zero,
1,
@@ -244,7 +244,7 @@ static void test_atomic_compare_exchange_body(test_context *ctx, size_t worker_i
++counter;
++ctx->normal_value;
- _Atomic_Store_uint(&ctx->atomic_value, 0, ATOMIC_ORDER_RELEASE);
+ _Atomic_Store_ulong(&ctx->atomic_value, 0, ATOMIC_ORDER_RELEASE);
}
ctx->per_worker_value[worker_index] = counter;
@@ -257,26 +257,26 @@ static void test_atomic_compare_exchange_fini(test_context *ctx)
static void test_atomic_or_and_init(test_context *ctx)
{
- _Atomic_Init_uint(&ctx->atomic_value, 0);
+ _Atomic_Init_ulong(&ctx->atomic_value, 0);
}
static void test_atomic_or_and_body(test_context *ctx, size_t worker_index)
{
- uint_fast32_t the_bit = 1UL << worker_index;
- uint_fast32_t current_bit = 0;
+ unsigned long the_bit = 1UL << worker_index;
+ unsigned long current_bit = 0;
while (!stop(ctx)) {
- uint_fast32_t previous;
+ unsigned long previous;
if (current_bit != 0) {
- previous = _Atomic_Fetch_and_uint(
+ previous = _Atomic_Fetch_and_ulong(
&ctx->atomic_value,
~the_bit,
ATOMIC_ORDER_RELAXED
);
current_bit = 0;
} else {
- previous = _Atomic_Fetch_or_uint(
+ previous = _Atomic_Fetch_or_ulong(
&ctx->atomic_value,
the_bit,
ATOMIC_ORDER_RELAXED
@@ -325,14 +325,14 @@ static void stop_worker_timer(rtems_id timer_id, void *arg)
{
test_context *ctx = arg;
- _Atomic_Store_uint(&ctx->stop, 1, ATOMIC_ORDER_RELAXED);
+ _Atomic_Store_ulong(&ctx->stop, 1, ATOMIC_ORDER_RELAXED);
}
static void start_worker_stop_timer(test_context *ctx)
{
rtems_status_code sc;
- _Atomic_Store_uint(&ctx->stop, 0, ATOMIC_ORDER_RELEASE);
+ _Atomic_Store_ulong(&ctx->stop, 0, ATOMIC_ORDER_RELEASE);
sc = rtems_timer_fire_after(
ctx->stop_worker_timer_id,
--
1.7.7
More information about the devel
mailing list