| 1 | /* | 
| 2 |  * This file is part of the MicroPython project, http://micropython.org/ | 
| 3 |  * | 
| 4 |  * The MIT License (MIT) | 
| 5 |  * | 
| 6 |  * Copyright (c) 2013-2017 Damien P. George | 
| 7 |  * | 
| 8 |  * Permission is hereby granted, free of charge, to any person obtaining a copy | 
| 9 |  * of this software and associated documentation files (the "Software"), to deal | 
| 10 |  * in the Software without restriction, including without limitation the rights | 
| 11 |  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | 
| 12 |  * copies of the Software, and to permit persons to whom the Software is | 
| 13 |  * furnished to do so, subject to the following conditions: | 
| 14 |  * | 
| 15 |  * The above copyright notice and this permission notice shall be included in | 
| 16 |  * all copies or substantial portions of the Software. | 
| 17 |  * | 
| 18 |  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | 
| 19 |  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | 
| 20 |  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | 
| 21 |  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | 
| 22 |  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | 
| 23 |  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | 
| 24 |  * THE SOFTWARE. | 
| 25 |  */ | 
| 26 |  | 
| 27 | #include "py/mpstate.h" | 
| 28 |  | 
| 29 | #if MICROPY_NLR_X64 | 
| 30 |  | 
| 31 | #undef nlr_push | 
| 32 |  | 
| 33 | // x86-64 callee-save registers are: | 
| 34 | //  rbx, rbp, rsp, r12, r13, r14, r15 | 
| 35 |  | 
| 36 | __attribute__((used)) unsigned int nlr_push_tail(nlr_buf_t *nlr); | 
| 37 |  | 
| 38 | unsigned int nlr_push(nlr_buf_t *nlr) { | 
| 39 |     (void)nlr; | 
| 40 |  | 
| 41 |     #if MICROPY_NLR_OS_WINDOWS | 
| 42 |  | 
| 43 |     __asm volatile ( | 
| 44 |         "movq   (%rsp), %rax        \n"  // load return %rip | 
| 45 |         "movq   %rax, 16(%rcx)      \n"  // store %rip into nlr_buf | 
| 46 |         "movq   %rbp, 24(%rcx)      \n"  // store %rbp into nlr_buf | 
| 47 |         "movq   %rsp, 32(%rcx)      \n"  // store %rsp into nlr_buf | 
| 48 |         "movq   %rbx, 40(%rcx)      \n"  // store %rbx into nlr_buf | 
| 49 |         "movq   %r12, 48(%rcx)      \n"  // store %r12 into nlr_buf | 
| 50 |         "movq   %r13, 56(%rcx)      \n"  // store %r13 into nlr_buf | 
| 51 |         "movq   %r14, 64(%rcx)      \n"  // store %r14 into nlr_buf | 
| 52 |         "movq   %r15, 72(%rcx)      \n"  // store %r15 into nlr_buf | 
| 53 |         "movq   %rdi, 80(%rcx)      \n"  // store %rdr into nlr_buf | 
| 54 |         "movq   %rsi, 88(%rcx)      \n"  // store %rsi into nlr_buf | 
| 55 |         "jmp    nlr_push_tail       \n"  // do the rest in C | 
| 56 |         ); | 
| 57 |  | 
| 58 |     #else | 
| 59 |  | 
| 60 |     __asm volatile ( | 
| 61 |         #if defined(__APPLE__) || defined(__MACH__) | 
| 62 |         "pop    %rbp                \n"  // undo function's prelude | 
| 63 |         #endif | 
| 64 |         "movq   (%rsp), %rax        \n"  // load return %rip | 
| 65 |         "movq   %rax, 16(%rdi)      \n"  // store %rip into nlr_buf | 
| 66 |         "movq   %rbp, 24(%rdi)      \n"  // store %rbp into nlr_buf | 
| 67 |         "movq   %rsp, 32(%rdi)      \n"  // store %rsp into nlr_buf | 
| 68 |         "movq   %rbx, 40(%rdi)      \n"  // store %rbx into nlr_buf | 
| 69 |         "movq   %r12, 48(%rdi)      \n"  // store %r12 into nlr_buf | 
| 70 |         "movq   %r13, 56(%rdi)      \n"  // store %r13 into nlr_buf | 
| 71 |         "movq   %r14, 64(%rdi)      \n"  // store %r14 into nlr_buf | 
| 72 |         "movq   %r15, 72(%rdi)      \n"  // store %r15 into nlr_buf | 
| 73 |         #if defined(__APPLE__) || defined(__MACH__) | 
| 74 |         "jmp    _nlr_push_tail      \n"  // do the rest in C | 
| 75 |         #else | 
| 76 |         "jmp    nlr_push_tail       \n"  // do the rest in C | 
| 77 |         #endif | 
| 78 |         ); | 
| 79 |  | 
| 80 |     #endif | 
| 81 |  | 
| 82 |     return 0; // needed to silence compiler warning | 
| 83 | } | 
| 84 |  | 
| 85 | NORETURN void nlr_jump(void *val) { | 
| 86 |     MP_NLR_JUMP_HEAD(val, top) | 
| 87 |  | 
| 88 |     __asm volatile ( | 
| 89 |         "movq   %0, %%rcx           \n"  // %rcx points to nlr_buf | 
| 90 |         #if MICROPY_NLR_OS_WINDOWS | 
| 91 |         "movq   88(%%rcx), %%rsi    \n"  // load saved %rsi | 
| 92 |         "movq   80(%%rcx), %%rdi    \n"  // load saved %rdi | 
| 93 |         #endif | 
| 94 |         "movq   72(%%rcx), %%r15    \n"  // load saved %r15 | 
| 95 |         "movq   64(%%rcx), %%r14    \n"  // load saved %r14 | 
| 96 |         "movq   56(%%rcx), %%r13    \n"  // load saved %r13 | 
| 97 |         "movq   48(%%rcx), %%r12    \n"  // load saved %r12 | 
| 98 |         "movq   40(%%rcx), %%rbx    \n"  // load saved %rbx | 
| 99 |         "movq   32(%%rcx), %%rsp    \n"  // load saved %rsp | 
| 100 |         "movq   24(%%rcx), %%rbp    \n"  // load saved %rbp | 
| 101 |         "movq   16(%%rcx), %%rax    \n"  // load saved %rip | 
| 102 |         "movq   %%rax, (%%rsp)      \n"  // store saved %rip to stack | 
| 103 |         "xorq   %%rax, %%rax        \n"  // clear return register | 
| 104 |         "inc    %%al                \n"  // increase to make 1, non-local return | 
| 105 |         "ret                        \n"  // return | 
| 106 |         :                           // output operands | 
| 107 |         : "r"  (top)                 // input operands | 
| 108 |         :                           // clobbered registers | 
| 109 |         ); | 
| 110 |  | 
| 111 |     MP_UNREACHABLE | 
| 112 | } | 
| 113 |  | 
| 114 | #endif // MICROPY_NLR_X64 | 
| 115 |  |