[rtems commit] bsps/powerpc: Fix AtliVec context switch

Sebastian Huber sebh at rtems.org
Tue Jul 19 06:15:09 UTC 2016


Module:    rtems
Branch:    4.11
Commit:    dc0f537b27ec5ca221ea67fde1d262c26ca3ec2e
Changeset: http://git.rtems.org/rtems/commit/?id=dc0f537b27ec5ca221ea67fde1d262c26ca3ec2e

Author:    Sebastian Huber <sebastian.huber at embedded-brains.de>
Date:      Tue Jul 19 06:43:10 2016 +0200

bsps/powerpc: Fix AtliVec context switch

Properly pass the stack aligned context to _CPU_Context_switch_altivec()
since _CPU_altivec_ctxt_off defined via ppc_context.

Update #2761.

---

 c/src/lib/libcpu/powerpc/mpc6xx/altivec/vec_sup.c |   6 ++
 c/src/lib/libcpu/powerpc/new-exceptions/cpu_asm.S | 124 +++++++++++-----------
 2 files changed, 68 insertions(+), 62 deletions(-)

diff --git a/c/src/lib/libcpu/powerpc/mpc6xx/altivec/vec_sup.c b/c/src/lib/libcpu/powerpc/mpc6xx/altivec/vec_sup.c
index 07b9fd2..141779c 100644
--- a/c/src/lib/libcpu/powerpc/mpc6xx/altivec/vec_sup.c
+++ b/c/src/lib/libcpu/powerpc/mpc6xx/altivec/vec_sup.c
@@ -234,6 +234,12 @@ unsigned          pvr;
 	 * Therefore, we compute it here and store it in memory...
 	 */
 	_CPU_altivec_ctxt_off  = offsetof(ppc_context, altivec);
+
+	/*
+	 * See ppc_get_context() and PPC_CONTEXT_OFFSET_GPR1
+	 */
+	_CPU_altivec_ctxt_off += PPC_DEFAULT_CACHE_LINE_SIZE;
+
 	/* 
 	 * Add space possibly needed for alignment
 	 */
diff --git a/c/src/lib/libcpu/powerpc/new-exceptions/cpu_asm.S b/c/src/lib/libcpu/powerpc/new-exceptions/cpu_asm.S
index 5d8c70d..ff8651a 100644
--- a/c/src/lib/libcpu/powerpc/new-exceptions/cpu_asm.S
+++ b/c/src/lib/libcpu/powerpc/new-exceptions/cpu_asm.S
@@ -23,7 +23,7 @@
  *  COPYRIGHT (c) 1989-1997.
  *  On-Line Applications Research Corporation (OAR).
  *
- *  Copyright (c) 2011-2015 embedded brains GmbH
+ *  Copyright (c) 2011, 2016 embedded brains GmbH
  *
  *  The license and distribution terms for this file may in
  *  the file LICENSE in this distribution or at
@@ -255,7 +255,7 @@ PROC (_CPU_Context_switch):
 
 	/* Align to a cache line */
 	clrrwi	r3, r3, PPC_DEFAULT_CACHE_LINE_POWER
-	clrrwi	r5, r4, PPC_DEFAULT_CACHE_LINE_POWER
+	clrrwi	r4, r4, PPC_DEFAULT_CACHE_LINE_POWER
 
 	DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_0)
 
@@ -410,7 +410,7 @@ PROC (_CPU_Context_switch):
 check_is_executing:
 
 	/* Check the is executing indicator of the heir context */
-	addi	r6, r5, PPC_CONTEXT_OFFSET_IS_EXECUTING
+	addi	r6, r4, PPC_CONTEXT_OFFSET_IS_EXECUTING
 	lwarx	r7, r0, r6
 	cmpwi	r7, 0
 	bne	check_thread_dispatch_necessary
@@ -422,96 +422,96 @@ check_is_executing:
 	isync
 #endif
 
-	/* Restore context from r5 */
+	/* Restore context from r4 */
 restore_context:
 
 #if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC)
-	mr	r14, r5
+	mr	r14, r4
 	.extern	_CPU_Context_switch_altivec
 	bl	_CPU_Context_switch_altivec
-	mr	r5, r14
+	mr	r4, r14
 #endif
 
-	lwz	r1, PPC_CONTEXT_OFFSET_GPR1(r5)
-	lwz	r6, PPC_CONTEXT_OFFSET_MSR(r5)
-	lwz	r7, PPC_CONTEXT_OFFSET_LR(r5)
-	lwz	r8, PPC_CONTEXT_OFFSET_CR(r5)
+	lwz	r1, PPC_CONTEXT_OFFSET_GPR1(r4)
+	lwz	r6, PPC_CONTEXT_OFFSET_MSR(r4)
+	lwz	r7, PPC_CONTEXT_OFFSET_LR(r4)
+	lwz	r8, PPC_CONTEXT_OFFSET_CR(r4)
 
-	PPC_GPR_LOAD	r14, PPC_CONTEXT_OFFSET_GPR14(r5)
-	PPC_GPR_LOAD	r15, PPC_CONTEXT_OFFSET_GPR15(r5)
+	PPC_GPR_LOAD	r14, PPC_CONTEXT_OFFSET_GPR14(r4)
+	PPC_GPR_LOAD	r15, PPC_CONTEXT_OFFSET_GPR15(r4)
 
 	DATA_CACHE_TOUCH(r0, r1)
 
-	PPC_GPR_LOAD	r16, PPC_CONTEXT_OFFSET_GPR16(r5)
-	PPC_GPR_LOAD	r17, PPC_CONTEXT_OFFSET_GPR17(r5)
-	PPC_GPR_LOAD	r18, PPC_CONTEXT_OFFSET_GPR18(r5)
-	PPC_GPR_LOAD	r19, PPC_CONTEXT_OFFSET_GPR19(r5)
+	PPC_GPR_LOAD	r16, PPC_CONTEXT_OFFSET_GPR16(r4)
+	PPC_GPR_LOAD	r17, PPC_CONTEXT_OFFSET_GPR17(r4)
+	PPC_GPR_LOAD	r18, PPC_CONTEXT_OFFSET_GPR18(r4)
+	PPC_GPR_LOAD	r19, PPC_CONTEXT_OFFSET_GPR19(r4)
 
-	PPC_GPR_LOAD	r20, PPC_CONTEXT_OFFSET_GPR20(r5)
-	PPC_GPR_LOAD	r21, PPC_CONTEXT_OFFSET_GPR21(r5)
-	PPC_GPR_LOAD	r22, PPC_CONTEXT_OFFSET_GPR22(r5)
-	PPC_GPR_LOAD	r23, PPC_CONTEXT_OFFSET_GPR23(r5)
+	PPC_GPR_LOAD	r20, PPC_CONTEXT_OFFSET_GPR20(r4)
+	PPC_GPR_LOAD	r21, PPC_CONTEXT_OFFSET_GPR21(r4)
+	PPC_GPR_LOAD	r22, PPC_CONTEXT_OFFSET_GPR22(r4)
+	PPC_GPR_LOAD	r23, PPC_CONTEXT_OFFSET_GPR23(r4)
 
-	PPC_GPR_LOAD	r24, PPC_CONTEXT_OFFSET_GPR24(r5)
-	PPC_GPR_LOAD	r25, PPC_CONTEXT_OFFSET_GPR25(r5)
-	PPC_GPR_LOAD	r26, PPC_CONTEXT_OFFSET_GPR26(r5)
-	PPC_GPR_LOAD	r27, PPC_CONTEXT_OFFSET_GPR27(r5)
+	PPC_GPR_LOAD	r24, PPC_CONTEXT_OFFSET_GPR24(r4)
+	PPC_GPR_LOAD	r25, PPC_CONTEXT_OFFSET_GPR25(r4)
+	PPC_GPR_LOAD	r26, PPC_CONTEXT_OFFSET_GPR26(r4)
+	PPC_GPR_LOAD	r27, PPC_CONTEXT_OFFSET_GPR27(r4)
 
-	PPC_GPR_LOAD	r28, PPC_CONTEXT_OFFSET_GPR28(r5)
-	PPC_GPR_LOAD	r29, PPC_CONTEXT_OFFSET_GPR29(r5)
-	PPC_GPR_LOAD	r30, PPC_CONTEXT_OFFSET_GPR30(r5)
-	PPC_GPR_LOAD	r31, PPC_CONTEXT_OFFSET_GPR31(r5)
+	PPC_GPR_LOAD	r28, PPC_CONTEXT_OFFSET_GPR28(r4)
+	PPC_GPR_LOAD	r29, PPC_CONTEXT_OFFSET_GPR29(r4)
+	PPC_GPR_LOAD	r30, PPC_CONTEXT_OFFSET_GPR30(r4)
+	PPC_GPR_LOAD	r31, PPC_CONTEXT_OFFSET_GPR31(r4)
 
-	lwz	r2, PPC_CONTEXT_OFFSET_GPR2(r5)
+	lwz	r2, PPC_CONTEXT_OFFSET_GPR2(r4)
 
 #ifdef PPC_MULTILIB_ALTIVEC
 	li	r9, PPC_CONTEXT_OFFSET_V20
-	lvx	v20, r5, r9
+	lvx	v20, r4, r9
 	li	r9, PPC_CONTEXT_OFFSET_V21
-	lvx	v21, r5, r9
+	lvx	v21, r4, r9
 	li	r9, PPC_CONTEXT_OFFSET_V22
-	lvx	v22, r5, r9
+	lvx	v22, r4, r9
 	li	r9, PPC_CONTEXT_OFFSET_V23
-	lvx	v23, r5, r9
+	lvx	v23, r4, r9
 	li	r9, PPC_CONTEXT_OFFSET_V24
-	lvx	v24, r5, r9
+	lvx	v24, r4, r9
 	li	r9, PPC_CONTEXT_OFFSET_V25
-	lvx	v25, r5, r9
+	lvx	v25, r4, r9
 	li	r9, PPC_CONTEXT_OFFSET_V26
-	lvx	v26, r5, r9
+	lvx	v26, r4, r9
 	li	r9, PPC_CONTEXT_OFFSET_V27
-	lvx	v27, r5, r9
+	lvx	v27, r4, r9
 	li	r9, PPC_CONTEXT_OFFSET_V28
-	lvx	v28, r5, r9
+	lvx	v28, r4, r9
 	li	r9, PPC_CONTEXT_OFFSET_V29
-	lvx	v29, r5, r9
+	lvx	v29, r4, r9
 	li	r9, PPC_CONTEXT_OFFSET_V30
-	lvx	v30, r5, r9
+	lvx	v30, r4, r9
 	li	r9, PPC_CONTEXT_OFFSET_V31
-	lvx	v31, r5, r9
-	lwz	r9, PPC_CONTEXT_OFFSET_VRSAVE(r5)
+	lvx	v31, r4, r9
+	lwz	r9, PPC_CONTEXT_OFFSET_VRSAVE(r4)
 	mtvrsave	r9
 #endif
 
 #ifdef PPC_MULTILIB_FPU
-	lfd	f14, PPC_CONTEXT_OFFSET_F14(r5)
-	lfd	f15, PPC_CONTEXT_OFFSET_F15(r5)
-	lfd	f16, PPC_CONTEXT_OFFSET_F16(r5)
-	lfd	f17, PPC_CONTEXT_OFFSET_F17(r5)
-	lfd	f18, PPC_CONTEXT_OFFSET_F18(r5)
-	lfd	f19, PPC_CONTEXT_OFFSET_F19(r5)
-	lfd	f20, PPC_CONTEXT_OFFSET_F20(r5)
-	lfd	f21, PPC_CONTEXT_OFFSET_F21(r5)
-	lfd	f22, PPC_CONTEXT_OFFSET_F22(r5)
-	lfd	f23, PPC_CONTEXT_OFFSET_F23(r5)
-	lfd	f24, PPC_CONTEXT_OFFSET_F24(r5)
-	lfd	f25, PPC_CONTEXT_OFFSET_F25(r5)
-	lfd	f26, PPC_CONTEXT_OFFSET_F26(r5)
-	lfd	f27, PPC_CONTEXT_OFFSET_F27(r5)
-	lfd	f28, PPC_CONTEXT_OFFSET_F28(r5)
-	lfd	f29, PPC_CONTEXT_OFFSET_F29(r5)
-	lfd	f30, PPC_CONTEXT_OFFSET_F30(r5)
-	lfd	f31, PPC_CONTEXT_OFFSET_F31(r5)
+	lfd	f14, PPC_CONTEXT_OFFSET_F14(r4)
+	lfd	f15, PPC_CONTEXT_OFFSET_F15(r4)
+	lfd	f16, PPC_CONTEXT_OFFSET_F16(r4)
+	lfd	f17, PPC_CONTEXT_OFFSET_F17(r4)
+	lfd	f18, PPC_CONTEXT_OFFSET_F18(r4)
+	lfd	f19, PPC_CONTEXT_OFFSET_F19(r4)
+	lfd	f20, PPC_CONTEXT_OFFSET_F20(r4)
+	lfd	f21, PPC_CONTEXT_OFFSET_F21(r4)
+	lfd	f22, PPC_CONTEXT_OFFSET_F22(r4)
+	lfd	f23, PPC_CONTEXT_OFFSET_F23(r4)
+	lfd	f24, PPC_CONTEXT_OFFSET_F24(r4)
+	lfd	f25, PPC_CONTEXT_OFFSET_F25(r4)
+	lfd	f26, PPC_CONTEXT_OFFSET_F26(r4)
+	lfd	f27, PPC_CONTEXT_OFFSET_F27(r4)
+	lfd	f28, PPC_CONTEXT_OFFSET_F28(r4)
+	lfd	f29, PPC_CONTEXT_OFFSET_F29(r4)
+	lfd	f30, PPC_CONTEXT_OFFSET_F30(r4)
+	lfd	f31, PPC_CONTEXT_OFFSET_F31(r4)
 #endif
 
 	mtcr	r8
@@ -527,7 +527,7 @@ restore_context:
 	PUBLIC_PROC (_CPU_Context_restore)
 PROC (_CPU_Context_restore):
 	/* Align to a cache line */
-	clrrwi	r5, r3, PPC_DEFAULT_CACHE_LINE_POWER
+	clrrwi	r4, r3, PPC_DEFAULT_CACHE_LINE_POWER
 
 #if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC)
 	li	r3, 0
@@ -559,7 +559,7 @@ check_thread_dispatch_necessary:
 	/* Calculate the heir context pointer */
 	sub	r7, r4, r7
 	add	r4, r8, r7
-	clrrwi	r5, r4, PPC_DEFAULT_CACHE_LINE_POWER
+	clrrwi	r4, r4, PPC_DEFAULT_CACHE_LINE_POWER
 
 	/* Update the executing */
 	stw	r8, PER_CPU_OFFSET_EXECUTING(r6)



More information about the vc mailing list