fix asm_switch_context() for the x86 and x86_64 archs
authorarighi <arighi@38d2e660-2303-0410-9eaa-f027e97ec537>
Wed, 17 Mar 2010 11:53:05 +0000 (11:53 +0000)
committerarighi <arighi@38d2e660-2303-0410-9eaa-f027e97ec537>
Wed, 17 Mar 2010 11:53:05 +0000 (11:53 +0000)
Save and restore all the callee-clobbered registers minus the return
registers, according to the x86 and x86_64 ABI.

git-svn-id: https://src.develer.com/svnoss/bertos/trunk@3216 38d2e660-2303-0410-9eaa-f027e97ec537

bertos/cpu/frame.h
bertos/emul/switch_i386.S
bertos/emul/switch_x86_64.s

index dbb20a1ec67aa6692d504f2fb83b3d9e57024887..90995a8a7c13c652d859532852a68487968b53aa 100644 (file)
@@ -53,7 +53,7 @@
 #if CPU_X86
        #if CPU_X86_32
 
-               #define CPU_SAVED_REGS_CNT      4
+               #define CPU_SAVED_REGS_CNT      2
                #define CPU_STACK_GROWS_UPWARD  0
                #define CPU_SP_ON_EMPTY_SLOT    0
 
index b605c15edde69ee29af960b6f9ce9bdcf4f76fe2..abcb487b61d64e5e79f3491741b192bd90de1c45 100644 (file)
  * \author Bernie Innocenti <bernie@codewiz.org>
  *
  * \brief i386 context switch
+ *
+ * x86 function calling convention:
+ * --------------------------------
+ *  arguments         | callee-saved        | extra caller-saved | return
+ * [callee-clobbered] |                     | [callee-clobbered] |
+ * -------------------------------------------------------------------------
+ * eax edx ecx        | ebx edi esi ebp [*] | <none>             | eax, edx
+ *
+ *  [*]  In the frame-pointers case rbp must hold a base address for the
+ *       current stack frame.
+ *
+ * asm_switch_context() can be considered as a normal function call, so we need
+ * to save all the callee-clobbered registers minus the return registers.
  */
 
 #ifdef __APPLE__
 .globl SWITCH_CONTEXT
 SWITCH_CONTEXT:
        pushl   %ebp
-       pushl   %edi
-       pushl   %esi
-       pushl   %ebx
-       movl    0x24(%esp),%ebp         /* ebp = save_sp */
+       pushl   %ecx
+       movl    0x10(%esp),%ebp         /* ebp = save_sp */
        movl    %esp,(%ebp)             /* *save_sp = esp */
-       movl    0x20(%esp),%ebp         /* ebp = new_sp */
+       movl    0x0c(%esp),%ebp         /* ebp = new_sp */
        movl    (%ebp),%esp             /* esp = *new_sp */
-       popl    %ebx
-       popl    %esi
-       popl    %edi
+       popl    %ecx
        popl    %ebp
        ret
index de0f8f65ad4315cb77974ddf67718233fed2c8de..df12bed17dffe6f3e1fc3d3a9e5ebc7ab9d7181f 100644 (file)
  * \version $Id$
  * \author Bernie Innocenti <bernie@codewiz.org>
  *
- * \brief i386 context switch
+ * \brief x86_64 context switch
+ *
+ * x86_64 function call convention:
+ * --------------------------------
+ *  arguments           |  callee-saved      | extra caller-saved | return
+ * [callee-clobbered]   |                    | [callee-clobbered] |
+ * -------------------------------------------------------------------------
+ * rdi rsi rdx rcx r8-9 | rbx rbp [*] r12-15 | r10-11             | rax, rdx
+ *
+ *  [*]  In the frame-pointers case rbp must hold a base address for the
+ *       current stack frame.
+ *
+ * asm_switch_context() can be considered as a normal function call, so we need
+ * to save all the callee-clobbered registers minus the return registers.
  */
 
 /* void asm_switch_context(void **new_sp [%rdi], void **save_sp [%rsi]) */
@@ -42,7 +55,7 @@ asm_switch_context:
        pushq   %rbp
        pushq   %rdi
        pushq   %rsi
-       pushq   %rbx
+       pushq   %rcx
        pushq   %r8
        pushq   %r9
        pushq   %r10
@@ -53,7 +66,7 @@ asm_switch_context:
        popq    %r10
        popq    %r9
        popq    %r8
-       popq    %rbx
+       popq    %rcx
        popq    %rsi
        popq    %rdi
        popq    %rbp