[PATCH 7/7] score: atomic support for RTEMS. Cleanup.

Gedare Bloom gedare at rtems.org
Mon Feb 4 19:33:38 UTC 2013


From: WeiY <wei.a.yang at gmail.com>

---
 cpukit/score/cpu/i386/rtems/score/cpuatomic.h    |  212 +++++++++++-----------
 cpukit/score/cpu/powerpc/rtems/score/cpuatomic.h |   76 ++++----
 cpukit/score/inline/rtems/score/atomic.inl       |  156 ++++++++---------
 3 files changed, 218 insertions(+), 226 deletions(-)

diff --git a/cpukit/score/cpu/i386/rtems/score/cpuatomic.h b/cpukit/score/cpu/i386/rtems/score/cpuatomic.h
index ae57584..eff6a35 100644
--- a/cpukit/score/cpu/i386/rtems/score/cpuatomic.h
+++ b/cpukit/score/cpu/i386/rtems/score/cpuatomic.h
@@ -67,91 +67,91 @@ extern "C" {
  * is always consistent, so we only need to take care of compiler.
  */
 #define	ATOMIC_STORE_LOAD(NAME, TYPE, LOP, SOP)               \
-static inline Atomic_##TYPE                           \
-_CPU_Atomic_Load_##NAME(volatile Atomic_##TYPE *p)      \
-{                                                       \
-  Atomic_##TYPE tmp;                                    \
-                                                        \
-  tmp = *p;                                             \
-  __asm __volatile("" : : : "memory");                  \
-  return (tmp);                                         \
-}                                                       \
-                                                        \
+static inline Atomic_##TYPE                                   \
+_CPU_Atomic_Load_##NAME(volatile Atomic_##TYPE *p)            \
+{                                                             \
+  Atomic_##TYPE tmp;                                          \
+                                                              \
+  tmp = *p;                                                   \
+  __asm __volatile("" : : : "memory");                        \
+  return (tmp);                                               \
+}                                                             \
+                                                              \
 static inline _CPU_Atomic_Load_acq_##NAME(volatile Atomic_##TYPE *p)  \
-{                                                       \
-  Atomic_##TYPE tmp;                                    \
-                                                        \
-  tmp = *p;                                             \
-  __asm __volatile("" : : : "memory");                  \
-  return (tmp);                                         \
-}                                                       \
-                                                        \
-static inline void                                    \
+{                                                             \
+  Atomic_##TYPE tmp;                                          \
+                                                              \
+  tmp = *p;                                                   \
+  __asm __volatile("" : : : "memory");                        \
+  return (tmp);                                               \
+}                                                             \
+                                                              \
+static inline void                                            \
 _CPU_Atomic_Store_##NAME(volatile Atomic_##TYPE *p, Atomic_##TYPE v) \
-{                                                                    \
-  __asm __volatile("" : : : "memory");                               \
-  *p = v;                                                            \
-}                                                                    \
-                                                        \
-static inline void                                    \
+{                                                             \
+  __asm __volatile("" : : : "memory");                        \
+  *p = v;                                                     \
+}                                                             \
+                                                              \
+static inline void                                            \
 _CPU_Atomic_Store_rel_##NAME(volatile Atomic_##TYPE *p, Atomic_##TYPE v) \
-{                                                                        \
-  __asm __volatile("" : : : "memory");                                   \
-  *p = v;                                                                \
-}                                                                        \
+{                                                             \
+  __asm __volatile("" : : : "memory");                        \
+  *p = v;                                                     \
+}                                                             \
 
 #else /* !(!SMP) */
 
 #define	ATOMIC_STORE_LOAD(NAME, TYPE, LOP, SOP)               \
-static inline Atomic_##TYPE                           \
-_CPU_Atomic_Load_##NAME(volatile Atomic_##TYPE *p)      \
-{                                                       \
-  Atomic_##TYPE res;                                    \
-                                                        \
-  __asm __volatile(MPLOCKED LOP                         \
-  : "=a" (res),                 /* 0 */                 \
-  "=m" (*p)                     /* 1 */                 \
-  : "m" (*p)                    /* 2 */                 \
-  : "memory", "cc");                                    \
-                                                        \
-  return (res);                                         \
-}                                                       \
-                                                        \
-static inline Atomic_##TYPE                           \
-_CPU_Atomic_Load_acq_##NAME(volatile Atomic_##TYPE *p)  \
-{                                                       \
-  Atomic_##TYPE res;                                    \
-                                                        \
-  __asm __volatile(MPLOCKED LOP                         \
-  : "=a" (res),			/* 0 */                 \
-  "=m" (*p)			/* 1 */                 \
-  : "m" (*p)			/* 2 */                 \
-  : "memory", "cc");                                    \
-                                                        \
-  return (res);                                         \
-}							\
-							\
-/*							\
- * The XCHG instruction asserts LOCK automagically.	\
- */							\
-static inline void                                    \
+static inline Atomic_##TYPE                                   \
+_CPU_Atomic_Load_##NAME(volatile Atomic_##TYPE *p)            \
+{                                                             \
+  Atomic_##TYPE res;                                          \
+                                                              \
+  __asm __volatile(MPLOCKED LOP                               \
+  : "=a" (res),                 /* 0 */                       \
+  "=m" (*p)                     /* 1 */                       \
+  : "m" (*p)                    /* 2 */                       \
+  : "memory", "cc");                                          \
+                                                              \
+  return (res);                                               \
+}                                                             \
+                                                              \
+static inline Atomic_##TYPE                                   \
+_CPU_Atomic_Load_acq_##NAME(volatile Atomic_##TYPE *p)        \
+{                                                             \
+  Atomic_##TYPE res;                                          \
+                                                              \
+  __asm __volatile(MPLOCKED LOP                               \
+  : "=a" (res),			/* 0 */                       \
+  "=m" (*p)			/* 1 */                       \
+  : "m" (*p)			/* 2 */                       \
+  : "memory", "cc");                                          \
+                                                              \
+  return (res);                                               \
+}                                                             \
+                                                              \
+/*                                                            \
+ * The XCHG instruction asserts LOCK automagically.           \
+ */                                                           \
+static inline void                                            \
 _CPU_Atomic_Store_##NAME(volatile Atomic_##TYPE *p, Atomic_##TYPE v) \
-{                                                                    \
-  __asm __volatile(SOP                                               \
-  : "=m" (*p),                  /* 0 */                              \
-  "+r" (v)                      /* 1 */                              \
-  : "m" (*p)                    /* 2 */                              \
-  : "memory");                                                       \
-}                                                                    \
-static inline void					             \
+{                                                             \
+  __asm __volatile(SOP                                        \
+  : "=m" (*p),                  /* 0 */                       \
+  "+r" (v)                      /* 1 */                       \
+  : "m" (*p)                    /* 2 */                       \
+  : "memory");                                                \
+}                                                             \
+static inline void					      \
 _CPU_Atomic_Store_rel_##NAME(volatile Atomic_##TYPE *p, Atomic_##TYPE v) \
-{                                                                        \
-  __asm __volatile(SOP                                                   \
-  : "=m" (*p),			/* 0 */                                  \
-  "+r" (v)			/* 1 */		                         \
-  : "m" (*p)			/* 2 */	                                 \
-  : "memory");                                                           \
-}                                                                        \
+{                                                             \
+  __asm __volatile(SOP                                        \
+  : "=m" (*p),			/* 0 */                       \
+  "+r" (v)			/* 1 */		              \
+  : "m" (*p)			/* 2 */	                      \
+  : "memory");                                                \
+}                                                             \
 
 #endif /* !SMP */
 
@@ -160,8 +160,8 @@ _CPU_Atomic_Store_rel_##NAME(volatile Atomic_##TYPE *p, Atomic_##TYPE v) \
  * GCC aggressively reorders operations and memory clobbering is necessary
  * in order to avoid that for memory barriers.
  */
-#define	ATOMIC_FETCH_GENERIC(NAME, TYPENAME, TYPE, OP, CONS, V)                         \
-static inline void                                                                      \
+#define	ATOMIC_FETCH_GENERIC(NAME, TYPENAME, TYPE, OP, CONS, V)               \
+static inline void                                                            \
 _CPU_Atomic_Fetch_##NAME##_##TYPENAME(volatile Atomic_##TYPE *p, Atomic_##TYPE v) \
 {                                                                             \
   __asm __volatile(MPLOCKED OP                                                \
@@ -251,37 +251,37 @@ ATOMIC_FETCH_GENERIC(and, long, Long, "andl %1,%0", "ir", v);
 #define	_CPU_Atomic_Compare_exchange_rel_long _CPU_Atomic_Compare_exchange_long
 
 /* Operations on 32-bit double words. */
-#define	_CPU_Atomic_Fetch_or_32(p, v)  \
+#define	_CPU_Atomic_Fetch_or_32(p, v)      \
     _CPU_Atomic_Fetch_or_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 #define	_CPU_Atomic_Fetch_or_acq_32(p, v)  \
     _CPU_Atomic_Fetch_or_acq_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 #define	_CPU_Atomic_Fetch_or_rel_32(p, v)  \
     _CPU_Atomic_Fetch_or_rel_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_and_32(p, v)  \
+#define	_CPU_Atomic_Fetch_and_32(p, v)     \
     _CPU_Atomic_Fetch_and_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_and_acq_32(p, v)  \
+#define	_CPU_Atomic_Fetch_and_acq_32(p, v) \
     _CPU_Atomic_Fetch_and_acq_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_and_rel_32(p, v)  \
+#define	_CPU_Atomic_Fetch_and_rel_32(p, v) \
     _CPU_Atomic_Fetch_and_rel_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_add_32(p, v)  \
+#define	_CPU_Atomic_Fetch_add_32(p, v)     \
     _CPU_Atomic_Fetch_add_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_add_acq_32(p, v)  \
+#define	_CPU_Atomic_Fetch_add_acq_32(p, v) \
     _CPU_Atomic_Fetch_add_acq_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_add_rel_32(p, v)  \
+#define	_CPU_Atomic_Fetch_add_rel_32(p, v) \
     _CPU_Atomic_Fetch_add_rel_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_sub_32(p, v)  \
+#define	_CPU_Atomic_Fetch_sub_32(p, v)     \
     _CPU_Atomic_Fetch_sub_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_sub_acq_32(p, v)  \
+#define	_CPU_Atomic_Fetch_sub_acq_32(p, v) \
     _CPU_Atomic_Fetch_sub_acq_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_sub_rel_32(p, v)  \
+#define	_CPU_Atomic_Fetch_sub_rel_32(p, v) \
     _CPU_Atomic_Fetch_sub_rel_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define _CPU_Atomic_Load_32(p)  \
+#define _CPU_Atomic_Load_32(p)             \
     _CPU_Atomic_Load_int((volatile Atomic_Int *)(p))
-#define	_CPU_Atomic_Load_acq_32(p)  \
+#define	_CPU_Atomic_Load_acq_32(p)         \
     _CPU_Atomic_Load_acq_int((volatile Atomic_Int *)(p))
-#define _CPU_Atomic_Store_32(p, v)  \
+#define _CPU_Atomic_Store_32(p, v)         \
     _CPU_Atomic_Store_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Store_rel_32(p, v)  \
+#define	_CPU_Atomic_Store_rel_32(p, v)     \
     _CPU_Atomic_Store_rel_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 #define	_CPU_Atomic_Compare_exchange_32(dst, old, new)  \
     _CPU_Atomic_Compare_exchange_int((volatile Atomic_Int *)(dst), (Atomic_Int)(old), (Atomic_Int)(new))
@@ -291,37 +291,37 @@ ATOMIC_FETCH_GENERIC(and, long, Long, "andl %1,%0", "ir", v);
     _CPU_Atomic_Compare_exchange_rel_int((volatile Atomic_Int *)(dst), (Atomic_Int)(old), (Atomic_Int)(new))
 
 /* Operations on pointers. */
-#define	_CPU_Atomic_Fetch_or_ptr(p, v) \
+#define	_CPU_Atomic_Fetch_or_ptr(p, v)     \
     _CPU_Atomic_Fetch_or_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 #define	_CPU_Atomic_Fetch_or_acq_ptr(p, v) \
     _CPU_Atomic_Fetch_or_acq_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 #define	_CPU_Atomic_Fetch_or_rel_ptr(p, v) \
     _CPU_Atomic_Fetch_or_rel_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_and_ptr(p, v) \
+#define	_CPU_Atomic_Fetch_and_ptr(p, v)    \
     _CPU_Atomic_Fetch_and_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_and_acq_ptr(p, v) \
+#define	_CPU_Atomic_Fetch_and_acq_ptr(p, v)\
     _CPU_Atomic_Fetch_and_acq_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_and_rel_ptr(p, v) \
+#define	_CPU_Atomic_Fetch_and_rel_ptr(p, v)\
     _CPU_Atomic_Fetch_and_rel_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_add_ptr(p, v) \
+#define	_CPU_Atomic_Fetch_add_ptr(p, v)    \
     _CPU_Atomic_Fetch_add_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_add_acq_ptr(p, v) \
+#define	_CPU_Atomic_Fetch_add_acq_ptr(p, v)\
     _CPU_Atomic_Fetch_add_acq_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_add_rel_ptr(p, v) \
+#define	_CPU_Atomic_Fetch_add_rel_ptr(p, v)\
     _CPU_Atomic_Fetch_add_rel_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_sub_ptr(p, v) \
+#define	_CPU_Atomic_Fetch_sub_ptr(p, v)    \
     _CPU_Atomic_Fetch_sub_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_sub_acq_ptr(p, v) \
+#define	_CPU_Atomic_Fetch_sub_acq_ptr(p, v)\
     _CPU_Atomic_Fetch_sub_acq_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define	_CPU_Atomic_Fetch_sub_rel_ptr(p, v) \
+#define	_CPU_Atomic_Fetch_sub_rel_ptr(p, v)\
     _CPU_Atomic_Fetch_sub_rel_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
-#define _CPU_Atomic_Load_ptr(p) \
+#define _CPU_Atomic_Load_ptr(p)            \
     _CPU_Atomic_Load_int((volatile Atomic_Int *)(p))
-#define	_CPU_Atomic_Load_acq_ptr(p) \
+#define	_CPU_Atomic_Load_acq_ptr(p)        \
     _CPU_Atomic_Load_acq_int((volatile Atomic_Int *)(p))
-#define _CPU_Atomic_Store_ptr(p, v) \
+#define _CPU_Atomic_Store_ptr(p, v)        \
     _CPU_Atomic_Store_int((volatile Atomic_Int *)(p), (v))
-#define	_CPU_Atomic_Store_rel_ptr(p, v) \
+#define	_CPU_Atomic_Store_rel_ptr(p, v)    \
     _CPU_Atomic_Store_rel_int((volatile Atomic_Int *)(p), (v))
 #define	_CPU_Atomic_Compare_exchange_ptr(dst, old, new) \
     _CPU_Atomic_Compare_exchange_int((volatile Atomic_Int *)(dst), (Atomic_Int)(old), (Atomic_Int)(new))
diff --git a/cpukit/score/cpu/powerpc/rtems/score/cpuatomic.h b/cpukit/score/cpu/powerpc/rtems/score/cpuatomic.h
index eeefa36..0ffb447 100644
--- a/cpukit/score/cpu/powerpc/rtems/score/cpuatomic.h
+++ b/cpukit/score/cpu/powerpc/rtems/score/cpuatomic.h
@@ -88,39 +88,39 @@ extern "C" {
      : "cc", "memory")                              \
      /* __CPU_Atomic_Fetch_add_long */
 
-#define	_ATOMIC_ADD(typename, type)                           \
+#define	_ATOMIC_ADD(typename, type)                 \
   static __inline void                              \
   _CPU_Atomic_Fetch_add_##typename(volatile Atomic_##type *p, Atomic_##type v) {  \
     Atomic_##type t;                                \
-    __CPU_Atomic_Fetch_add_##typename(p, v, t);         \
+    __CPU_Atomic_Fetch_add_##typename(p, v, t);     \
   }                                                 \
                                                     \
   static __inline void                              \
   _CPU_Atomic_Fetch_add_acq_##typename(volatile Atomic_##type *p, Atomic_##type v) { \
-    Atomic_##type t;                                            \
-   __CPU_Atomic_Fetch_add_##typename(p, v, t);                      \
-   __ATOMIC_BARRIER;                                            \
-  }                                                             \
-                                                                \
-  static __inline void                                          \
+    Atomic_##type t;                                \
+   __CPU_Atomic_Fetch_add_##typename(p, v, t);      \
+   __ATOMIC_BARRIER;                                \
+  }                                                 \
+                                                    \
+  static __inline void                              \
   _CPU_Atomic_Fetch_add_rel_##typename(volatile Atomic_##type *p, Atomic_##type v) { \
-    Atomic_##type t;                                            \
-    __ATOMIC_BARRIER;                                           \
-    __CPU_Atomic_Fetch_add_##typename(p, v, t);                     \
-  }                                                             \
+    Atomic_##type t;                                \
+    __ATOMIC_BARRIER;                               \
+    __CPU_Atomic_Fetch_add_##typename(p, v, t);     \
+  }                                                 \
   /* _ATOMIC_ADD */
 
 _ATOMIC_ADD(int, Int)
 _ATOMIC_ADD(long, Long)
 
-#define _CPU_Atomic_Fetch_add_32(p, v)  \
+#define _CPU_Atomic_Fetch_add_32(p, v)      \
     _CPU_Atomic_Fetch_add_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 #define _CPU_Atomic_Fetch_add_acq_32(p, v)  \
     _CPU_Atomic_Fetch_add_acq_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 #define _CPU_Atomic_Fetch_add_rel_32(p, v)  \
     _CPU_Atomic_Fetch_add_rel_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 
-#define _CPU_Atomic_Fetch_add_ptr(p, v) \
+#define _CPU_Atomic_Fetch_add_ptr(p, v)     \
     _CPU_Atomic_Fetch_add_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 #define _CPU_Atomic_Fetch_add_acq_ptr(p, v) \
     _CPU_Atomic_Fetch_add_acq_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
@@ -158,17 +158,17 @@ _ATOMIC_ADD(long, Long)
   : "cc", "memory")                                             \
   /* _CPU_Atomic_Fetch_and_long */
 
-#define	_ATOMIC_AND(typename, type)                                       \
+#define	_ATOMIC_AND(typename, type)                             \
   static __inline void                                          \
   _CPU_Atomic_Fetch_and_##typename(volatile Atomic_##type *p, Atomic_##type v) {  \
     Atomic_##type t;                                            \
-    __CPU_Atomic_Fetch_and_##typename(p, v, t);                     \
+    __CPU_Atomic_Fetch_and_##typename(p, v, t);                 \
   }                                                             \
                                                                 \
   static __inline void                                          \
   _CPU_Atomic_Fetch_and_acq_##typename(volatile Atomic_##type *p, Atomic_##type v) {  \
     Atomic_##type t;                                            \
-    __CPU_Atomic_Fetch_and_##typename(p, v, t);                     \
+    __CPU_Atomic_Fetch_and_##typename(p, v, t);                 \
     __ATOMIC_BARRIER;                                           \
   }                                                             \
                                                                 \
@@ -176,7 +176,7 @@ _ATOMIC_ADD(long, Long)
   _CPU_Atomic_Fetch_and_rel_##typename(volatile Atomic_##type *p, Atomic_##type v) {  \
     Atomic_##type t;                                            \
     __ATOMIC_BARRIER;                                           \
-    __CPU_Atomic_Fetch_and_##typename(p, v, t);                     \
+    __CPU_Atomic_Fetch_and_##typename(p, v, t);                 \
   }                                                             \
   /* _ATOMIC_AND */
 
@@ -184,14 +184,14 @@ _ATOMIC_ADD(long, Long)
 _ATOMIC_AND(int, Int)
 _ATOMIC_AND(long, Long)
 
-#define _CPU_Atomic_Fetch_and_32(p, v)  \
+#define _CPU_Atomic_Fetch_and_32(p, v)      \
     _CPU_Atomic_Fetch_and_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 #define _CPU_Atomic_Fetch_and_acq_32(p, v)  \
     _CPU_Atomic_Fetch_and_acq_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 #define _CPU_Atomic_Fetch_and_rel_32(p, v)  \
     _CPU_Atomic_Fetch_and_rel_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 
-#define _CPU_Atomic_Fetch_and_ptr(p, v) \
+#define _CPU_Atomic_Fetch_and_ptr(p, v)     \
     _CPU_Atomic_Fetch_and_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 #define _CPU_Atomic_Fetch_and_acq_ptr(p, v) \
     _CPU_Atomic_Fetch_and_acq_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
@@ -230,17 +230,17 @@ _ATOMIC_AND(long, Long)
   : "cc", "memory")                                     \
   /* __CPU_Atomic_Fetch_or_long */
 
-#define	_ATOMIC_OR(typename, type)                                \
+#define	_ATOMIC_OR(typename, type)                      \
   static __inline void                                  \
   _CPU_Atomic_Fetch_or_##typename(volatile Atomic_##type *p, Atomic_##type v) {  \
     Atomic_##type t;                                    \
-    __CPU_Atomic_Fetch_or_##typename(p, v, t);              \
+    __CPU_Atomic_Fetch_or_##typename(p, v, t);          \
   }                                                     \
                                                         \
   static __inline void                                  \
   _CPU_Atomic_Fetch_or_acq_##typename(volatile Atomic_##type *p, Atomic_##type v) { \
     Atomic_##type t;                                    \
-    __CPU_Atomic_Fetch_or_##typename(p, v, t);              \
+    __CPU_Atomic_Fetch_or_##typename(p, v, t);          \
     __ATOMIC_BARRIER;                                   \
   }                                                     \
                                                         \
@@ -248,21 +248,21 @@ _ATOMIC_AND(long, Long)
   _CPU_Atomic_Fetch_or_rel_##typename(volatile Atomic_##type *p, Atomic_##type v) {	\
     Atomic_##type t;                                    \
     __ATOMIC_BARRIER;                                   \
-    __CPU_Atomic_Fetch_or_##typename(p, v, t);              \
+    __CPU_Atomic_Fetch_or_##typename(p, v, t);          \
   }                                                     \
   /* _ATOMIC_OR */
 
 _ATOMIC_OR(int, Int)
 _ATOMIC_OR(long, Long)
 
-#define _CPU_Atomic_Fetch_or_32(p, v)  \
+#define _CPU_Atomic_Fetch_or_32(p, v)      \
     _CPU_Atomic_Fetch_or_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 #define _CPU_Atomic_Fetch_or_acq_32(p, v)  \
     _CPU_Atomic_Fetch_or_acq_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 #define _CPU_Atomic_Fetch_or_rel_32(p, v)  \
     _CPU_Atomic_Fetch_or_rel_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 
-#define _CPU_Atomic_Fetch_or_ptr(p, v) \
+#define _CPU_Atomic_Fetch_or_ptr(p, v)     \
     _CPU_Atomic_Fetch_or_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 #define _CPU_Atomic_Fetch_or_acq_ptr(p, v) \
     _CPU_Atomic_Fetch_or_acq_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
@@ -300,17 +300,17 @@ _ATOMIC_OR(long, Long)
   : "cc", "memory")                                     \
   /* __CPU_Atomic_Fetch_sub_long */
 
-#define	_ATOMIC_SUB(typename, type)                               \
+#define	_ATOMIC_SUB(typename, type)                     \
   static __inline void                                  \
-  _CPU_Atomic_Fetch_sub_##typename(volatile Atomic_##type *p, Atomic_##type v) { \
+  _CPU_Atomic_Fetch_sub_##typename(volatile Atomic_##type *p, Atomic_##type v) {     \
     Atomic_##type t;                                    \
-    __CPU_Atomic_Fetch_sub_##typename(p, v, t);             \
+    __CPU_Atomic_Fetch_sub_##typename(p, v, t);         \
   }                                                     \
                                                         \
   static __inline void                                  \
   _CPU_Atomic_Fetch_sub_acq_##typename(volatile Atomic_##type *p, Atomic_##type v) { \
     Atomic_##type t;                                    \
-    __CPU_Atomic_Fetch_sub_##typename(p, v, t);             \
+    __CPU_Atomic_Fetch_sub_##typename(p, v, t);         \
     __ATOMIC_BARRIER;                                   \
   }                                                     \
                                                         \
@@ -318,7 +318,7 @@ _ATOMIC_OR(long, Long)
   _CPU_Atomic_Fetch_sub_rel_##typename(volatile Atomic_##type *p, Atomic_##type v) { \
     Atomic_##type t;                                    \
     __ATOMIC_BARRIER;                                   \
-    __CPU_Atomic_Fetch_sub_##typename(p, v, t);             \
+    __CPU_Atomic_Fetch_sub_##typename(p, v, t);         \
   }                                                     \
   /* _ATOMIC_SUB */
 
@@ -326,14 +326,14 @@ _ATOMIC_OR(long, Long)
 _ATOMIC_SUB(int, Int)
 _ATOMIC_SUB(long, Long)
 
-#define _CPU_Atomic_Fetch_sub_32(p, v)  \
+#define _CPU_Atomic_Fetch_sub_32(p, v)      \
     _CPU_Atomic_Fetch_sub_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 #define _CPU_Atomic_Fetch_sub_acq_32(p, v)  \
     _CPU_Atomic_Fetch_sub_acq_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 #define _CPU_Atomic_Fetch_sub_rel_32(p, v)  \
     _CPU_Atomic_Fetch_sub_rel_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 
-#define _CPU_Atomic_Fetch_sub_ptr(p, v) \
+#define _CPU_Atomic_Fetch_sub_ptr(p, v)     \
     _CPU_Atomic_Fetch_sub_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
 #define _CPU_Atomic_Fetch_sub_acq_ptr(p, v) \
     _CPU_Atomic_Fetch_sub_acq_int((volatile Atomic_Int *)(p), (Atomic_Int)(v))
@@ -367,7 +367,7 @@ _CPU_Atomic_Load_acq_##TYPENAME(volatile Atomic_##TYPE *p)      \
 }                                                               \
                                                                 \
 static __inline void                                            \
-_CPU_Atomic_Store_##TYPENAME(volatile Atomic_##TYPE *p, Atomic_##TYPE v)  \
+_CPU_Atomic_Store_##TYPENAME(volatile Atomic_##TYPE *p, Atomic_##TYPE v)      \
 {                                                               \
   *p = v;                                                       \
 }                                                               \
@@ -380,7 +380,7 @@ _CPU_Atomic_Store_rel_##TYPENAME(volatile Atomic_##TYPE *p, Atomic_##TYPE v)  \
 
 ATOMIC_STORE_LOAD(int, Int)
 
-#define _CPU_Atomic_Load_32(p)  \
+#define _CPU_Atomic_Load_32(p)      \
     _CPU_Atomic_Load_int((volatile Atomic_Int *)(p))
 #define _CPU_Atomic_Load_acq_32(p)  \
     _CPU_Atomic_Load_acq_int((volatile Atomic_Int *)(p))
@@ -413,7 +413,7 @@ _CPU_Atomic_Store_rel_long(volatile Atomic_Long *addr, Atomic_Long val)
   _CPU_Atomic_Store_rel_int((volatile Atomic_Int *)addr, (Atomic_Int)val);
 }
 
-#define _CPU_Atomic_Load_ptr(p) \
+#define _CPU_Atomic_Load_ptr(p)     \
     _CPU_Atomic_Load_int((volatile Atomic_Int *)(p))
 #define _CPU_Atomic_Load_acq_ptr(p) \
     _CPU_Atomic_Load_acq_int((volatile Atomic_Int *)(p))
@@ -511,14 +511,14 @@ _CPU_Atomic_Compare_exchange_rel_long(volatile Atomic_Long *p, Atomic_Long cmpva
   return (_CPU_Atomic_Compare_exchange_long(p, cmpval, newval));
 }
 
-#define _CPU_Atomic_Compare_exchange_32(dst, old, new)  \
+#define _CPU_Atomic_Compare_exchange_32(dst, old, new)      \
     _CPU_Atomic_Compare_exchange_int((volatile Atomic_Int *)(dst), (Atomic_Int)(old), (Atomic_Int)(new))
 #define _CPU_Atomic_Compare_exchange_acq_32(dst, old, new)  \
     _CPU_Atomic_Compare_exchange_acq_int((volatile Atomic_Int *)(dst), (Atomic_Int)(old), (Atomic_Int)(new))
 #define _CPU_Atomic_Compare_exchange_rel_32(dst, old, new)  \
     _CPU_Atomic_Compare_exchange_rel_int((volatile Atomic_Int *)(dst), (Atomic_Int)(old), (Atomic_Int)(new))
 
-#define _CPU_Atomic_Compare_exchange_ptr(dst, old, new) \
+#define _CPU_Atomic_Compare_exchange_ptr(dst, old, new)     \
 		_CPU_Atomic_Compare_exchange_int((volatile Atomic_Int *)(dst), (Atomic_Int)(old), (Atomic_Int)(new))
 #define _CPU_Atomic_Compare_exchange_acq_ptr(dst, old, new) \
 		_CPU_Atomic_Compare_exchange_acq_int((volatile Atomic_Int *)(dst), (Atomic_Int)(old), \
diff --git a/cpukit/score/inline/rtems/score/atomic.inl b/cpukit/score/inline/rtems/score/atomic.inl
index 7deec01..313366c 100644
--- a/cpukit/score/inline/rtems/score/atomic.inl
+++ b/cpukit/score/inline/rtems/score/atomic.inl
@@ -6,7 +6,9 @@
  *  The license and distribution terms for this file may be
  *  found in the file LICENSE in this distribution or at
  *  http://www.rtems.com/license/LICENSE.
- *
+ */
+
+/*
  *
  *  The functions in this file implement the API to the RTEMS Atomic Manager and
  *  The API is designed to be compatable with C1X atomic definition as far as
@@ -16,8 +18,8 @@
  *
  *  rtems/cpukit/score/cpu/xxx/rtems/score/cpuatomic.h
  *
- *  In the event that a CPU does not support a specific atomic function it has, the 
- *  CPU dependent routine does nothing (but does exist).
+ *  In the event that a CPU does not support a specific atomic function it has, 
+ *  the CPU dependent routine does nothing (but does exist).
  */
 
 #ifndef _RTEMS_SCORE_ATOMIC_H
@@ -36,8 +38,7 @@ RTEMS_INLINE_ROUTINE Atomic_Int _Atomic_Load_int(
 {
   if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Load_acq_int(address);
-  else
-    return _CPU_Atomic_Load_int(address);
+  return _CPU_Atomic_Load_int(address);
 }
 
 RTEMS_INLINE_ROUTINE Atomic_Long _Atomic_Load_long(
@@ -45,10 +46,9 @@ RTEMS_INLINE_ROUTINE Atomic_Long _Atomic_Load_long(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Load_acq_long(address);
-  else
-    return _CPU_Atomic_Load_long(address);
+  return _CPU_Atomic_Load_long(address);
 }
 
 RTEMS_INLINE_ROUTINE Atomic_Pointer _Atomic_Load_ptr(
@@ -56,10 +56,9 @@ RTEMS_INLINE_ROUTINE Atomic_Pointer _Atomic_Load_ptr(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Load_acq_ptr(address);
-  else
-    return _CPU_Atomic_Load_ptr(address);
+  return _CPU_Atomic_Load_ptr(address);
 }
 
 RTEMS_INLINE_ROUTINE Atomic_Int32 _Atomic_Load_32(
@@ -67,10 +66,9 @@ RTEMS_INLINE_ROUTINE Atomic_Int32 _Atomic_Load_32(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Load_acq_32(address);
-  else
-    return _CPU_Atomic_Load_32(address);
+  return _CPU_Atomic_Load_32(address);
 }
 
 RTEMS_INLINE_ROUTINE Atomic_Int64 _Atomic_Load_64(
@@ -78,10 +76,9 @@ RTEMS_INLINE_ROUTINE Atomic_Int64 _Atomic_Load_64(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Load_acq_64(address);
-  else
-    return _CPU_Atomic_Load_64(address);
+  return _CPU_Atomic_Load_64(address);
 }
 
 
@@ -91,10 +88,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Store_int(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Store_rel_int(address, value);
-  else
-    return _CPU_Atomic_Store_int(address, value);
+  return _CPU_Atomic_Store_int(address, value);
 }
 
 RTEMS_INLINE_ROUTINE void _Atomic_Store_long(
@@ -103,10 +99,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Store_long(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Store_rel_long(address, value);
-  else
-    return _CPU_Atomic_Store_long(address, value);
+  return _CPU_Atomic_Store_long(address, value);
 }
 
 RTEMS_INLINE_ROUTINE void _Atomic_Store_ptr(
@@ -115,10 +110,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Store_ptr(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Store_rel_ptr(address, value);
-  else
-    return _CPU_Atomic_Store_ptr(address, value);
+  return _CPU_Atomic_Store_ptr(address, value);
 }
 
 RTEMS_INLINE_ROUTINE void _Atomic_Store_32(
@@ -127,10 +121,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Store_32(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Store_rel_32(address, value);
-  else
-    return _CPU_Atomic_Store_32(address, value);
+  return _CPU_Atomic_Store_32(address, value);
 }
 
 RTEMS_INLINE_ROUTINE void _Atomic_Store_64(
@@ -139,10 +132,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Store_64(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Store_rel_64(address, value);
-  else
-    return _CPU_Atomic_Store_64(address, value);
+  return _CPU_Atomic_Store_64(address, value);
 }
 
 RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_int(
@@ -151,9 +143,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_int(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_add_acq_int(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_add_rel_int(address, value);
   else
     return _CPU_Atomic_Fetch_add_int(address, value);
@@ -165,9 +157,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_long(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_add_acq_long(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_add_rel_long(address, value);
   else
     return _CPU_Atomic_Fetch_add_long(address, value);
@@ -179,9 +171,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_ptr(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_add_acq_ptr(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_add_rel_ptr(address, value);
   else
     return _CPU_Atomic_Fetch_add_ptr(address, value);
@@ -193,9 +185,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_32(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_add_acq_32(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_add_rel_32(address, value);
   else
     return _CPU_Atomic_Fetch_add_32(address, value);
@@ -207,9 +199,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_64(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_add_acq_64(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_add_rel_64(address, value);
   else
     return _CPU_Atomic_Fetch_add_64(address, value);
@@ -221,9 +213,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_int(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_sub_acq_int(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_sub_rel_int(address, value);
   else
     return _CPU_Atomic_Fetch_sub_int(address, value);
@@ -235,9 +227,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_long(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_sub_acq_long(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_sub_rel_long(address, value);
   else
     return _CPU_Atomic_Fetch_sub_long(address, value);
@@ -249,9 +241,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_ptr(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_sub_acq_ptr(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_sub_rel_ptr(address, value);
   else
     return _CPU_Atomic_Fetch_sub_ptr(address, value);
@@ -263,9 +255,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_32(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_sub_acq_32(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_sub_rel_32(address, value);
   else
     return _CPU_Atomic_Fetch_sub_32(address, value);
@@ -277,9 +269,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_64(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_sub_acq_64(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_sub_rel_64(address, value);
   else
     return _CPU_Atomic_Fetch_sub_64(address, value);
@@ -291,9 +283,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_int(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_or_acq_int(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_or_rel_int(address, value);
   else
     return _CPU_Atomic_Fetch_or_int(address, value);
@@ -305,9 +297,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_long(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_or_acq_long(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_or_rel_long(address, value);
   else
     return _CPU_Atomic_Fetch_or_long(address, value);
@@ -319,9 +311,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_ptr(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_or_acq_ptr(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_or_rel_ptr(address, value);
   else
     return _CPU_Atomic_Fetch_or_ptr(address, value);
@@ -333,9 +325,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_32(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_or_acq_32(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_or_rel_32(address, value);
   else
     return _CPU_Atomic_Fetch_or_32(address, value);
@@ -347,9 +339,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_64(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_or_acq_64(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_or_rel_64(address, value);
   else
     return _CPU_Atomic_Fetch_or_64(address, value);
@@ -361,9 +353,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_int(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_and_acq_int(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_and_rel_int(address, value);
   else
     return _CPU_Atomic_Fetch_and_int(address, value);
@@ -375,9 +367,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_long(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_and_acq_long(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_and_rel_long(address, value);
   else
     return _CPU_Atomic_Fetch_and_long(address, value);
@@ -389,9 +381,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_ptr(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_and_acq_ptr(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_and_rel_ptr(address, value);
   else
     return _CPU_Atomic_Fetch_and_ptr(address, value);
@@ -403,9 +395,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_32(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_and_acq_32(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_and_rel_32(address, value);
   else
     return _CPU_Atomic_Fetch_and_32(address, value);
@@ -417,9 +409,9 @@ RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_64(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_and_acq_64(address, value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Fetch_and_rel_64(address, value);
   else
     return _CPU_Atomic_Fetch_and_64(address, value);
@@ -432,9 +424,9 @@ RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_int(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Compare_exchange_acq_int(address, old_value, new_value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Compare_exchange_rel_int(address, old_value, new_value);
   else
     return _CPU_Atomic_Compare_exchange_int(address, old_value, new_value);
@@ -447,9 +439,9 @@ RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_long(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Compare_exchange_acq_long(address, old_value, new_value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Compare_exchange_rel_long(address, old_value, new_value);
   else
     return _CPU_Atomic_Compare_exchange_long(address, old_value, new_value);
@@ -462,9 +454,9 @@ RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_ptr(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Compare_exchange_acq_ptr(address, old_value, new_value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Compare_exchange_rel_ptr(address, old_value, new_value);
   else
     return _CPU_Atomic_Compare_exchange_ptr(address, old_value, new_value);
@@ -477,9 +469,9 @@ RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_32(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Compare_exchange_acq_32(address, old_value, new_value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Compare_exchange_rel_32(address, old_value, new_value);
   else
     return _CPU_Atomic_Compare_exchange_32(address, old_value, new_value);
@@ -492,9 +484,9 @@ RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_64(
   Atomic_Memory_barrier memory_barrier
 )
 {
-  if(ATOMIC_ACQUIRE_BARRIER == memory_barrier)
+  if (ATOMIC_ACQUIRE_BARRIER == memory_barrier)
     return _CPU_Atomic_Compare_exchange_acq_64(address, old_value, new_value);
-  else if(ATOMIC_RELEASE_BARRIER == memory_barrier)
+  else if (ATOMIC_RELEASE_BARRIER == memory_barrier)
     return _CPU_Atomic_Compare_exchange_rel_64(address, old_value, new_value);
   else
     return _CPU_Atomic_Compare_exchange_64(address, old_value, new_value);
-- 
1.7.1




More information about the devel mailing list