#include "genesis/thread.h"
/* Minimize conditionalization for different OS naming schemes. */
-#if defined __linux__ || defined __FreeBSD__ /* (but *not* OpenBSD) */
+#if defined __linux__ || defined __FreeBSD__ || defined __OpenBSD__ || defined __NetBSD__ || defined __sun
#define GNAME(var) var
#else
#define GNAME(var) _##var
#endif
-/* Get the right type of alignment. Linux and FreeBSD (but not OpenBSD)
+/* Get the right type of alignment. Linux, FreeBSD and OpenBSD
* want alignment in bytes. */
-#if defined(__linux__) || defined(__FreeBSD__)
+#if defined(__linux__) || defined(__FreeBSD__) || defined(__OpenBSD__) || defined __NetBSD__ || defined(__sun)
#define align_4byte 4
#define align_8byte 8
#define align_16byte 16
#define align_32byte 32
+#define align_page 32768
#else
#define align_4byte 2
#define align_8byte 3
#define align_16byte 4
+#define align_page 15
#endif
/*
* for this instruction in the SIGILL handler and if we see it, we
* advance the EIP by two bytes to skip over ud2 instruction and
* call sigtrap_handler. */
-#if defined(LISP_FEATURE_DARWIN)
+#if defined(LISP_FEATURE_UD2_BREAKPOINTS)
#define TRAP ud2
#else
#define TRAP int3
.globl GNAME(call_into_lisp_first_time)
TYPE(GNAME(call_into_lisp_first_time))
-/* The *ALIEN-STACK* pointer is set up on the first call_into_lisp when
- * the stack changes. We don't worry too much about saving registers
+/* We don't worry too much about saving registers
* here, because we never expect to return from the initial call to lisp
* anyway */
GNAME(call_into_lisp_first_time):
push %rbp # Save old frame pointer.
mov %rsp,%rbp # Establish new frame.
- mov %rsp,ALIEN_STACK + SYMBOL_VALUE_OFFSET
#if defined(LISP_FEATURE_DARWIN)
movq GSYM(GNAME(all_threads)),%rax
#else
xor %rdx,%rdx # clear any descriptor registers
xor %rdi,%rdi # that we can't be sure we'll
xor %rsi,%rsi # initialise properly. XX do r8-r15 too?
- shl $3,%rcx # (fixnumize num-args)
+ shl $(N_FIXNUM_TAG_BITS),%rcx # (fixnumize num-args)
cmp $0,%rcx
je Ldone
mov 0(%rbx),%rdx # arg0
xor %rbx,%rbx # available
/* Alloc new frame. */
- mov %rsp,%rbx # The current sp marks start of new frame.
- push %rbp # fp in save location S0
- sub $16,%rsp # Ensure 3 slots are allocated, one above.
- mov %rbx,%rbp # Switch to new frame.
+ push %rbp # Dummy for return address
+ push %rbp # fp in save location S1
+ mov %rsp,%rbp # The current sp marks start of new frame.
+ sub $8,%rsp # Ensure 3 slots are allocated, two above.
Lcall:
call *CLOSURE_FUN_OFFSET(%rax)
/* FIXME Restore the NPX state. */
- /* return value is already in rax where lisp expects it */
+ mov %rdx,%rax # c-val
leave
ret
SIZE(GNAME(call_into_lisp))
.text
.globl GNAME(fpu_save)
TYPE(GNAME(fpu_save))
- .align 2,0x90
+ .align align_16byte,0x90
GNAME(fpu_save):
fnsave (%rdi) # Save the NPX state. (resets NPX)
ret
.globl GNAME(fpu_restore)
TYPE(GNAME(fpu_restore))
- .align 2,0x90
+ .align align_16byte,0x90
GNAME(fpu_restore):
frstor (%rdi) # Restore the NPX state.
ret
.globl GNAME(undefined_tramp)
TYPE(GNAME(undefined_tramp))
GNAME(undefined_tramp):
+ pop 8(%rbp) # Save return PC for backtrace.
TRAP
.byte trap_Error
.byte 2
ret
SIZE(GNAME(undefined_tramp))
+/* KLUDGE: FIND-ESCAPED-FRAME (SYS:SRC;CODE;DEBUG-INT.LISP) needs
+ * to know the name of the function immediately following the
+ * undefined-function trampoline. */
.text
.align align_16byte,0x90
/*
* fun-end breakpoint magic
*/
+
+/*
+ * For an explanation of the magic involved in function-end
+ * breakpoints, see the implementation in ppc-assem.S.
+ */
+
.text
.globl GNAME(fun_end_breakpoint_guts)
.align align_16byte
multiple_value_return:
.globl GNAME(fun_end_breakpoint_trap)
+ .align align_16byte,0x90
GNAME(fun_end_breakpoint_trap):
TRAP
.byte trap_FunEndBreakpoint
ret
SIZE(GNAME(fast_bzero))
+\f
+/* When LISP_FEATURE_C_STACK_IS_CONTROL_STACK, we cannot safely scrub
+ * the control stack from C, largely due to not knowing where the
+ * active stack frame ends. On such platforms, we reimplement the
+ * core scrubbing logic in assembly, in this case here:
+ */
+ .text
+ .align align_16byte,0x90
+ .globl GNAME(arch_scrub_control_stack)
+ TYPE(GNAME(arch_scrub_control_stack))
+GNAME(arch_scrub_control_stack):
+ /* We are passed three parameters:
+ * A (struct thread *) in RDI,
+ * the address of the guard page in RSI, and
+ * the address of the hard guard page in RDX.
+ * We may trash RAX, RCX, and R8-R11 with impunity.
+ * [RSP] is our return address, [RSP-8] is the first
+ * stack slot to scrub. */
+
+ /* We start by setting up our scrub pointer in RAX, our
+ * guard page upper bound in R8, and our hard guard
+ * page upper bound in R9. */
+ lea -8(%rsp), %rax
+#ifdef LISP_FEATURE_DARWIN
+ mov GSYM(GNAME(os_vm_page_size)),%r9
+#else
+ mov os_vm_page_size,%r9
+#endif
+ lea (%rsi,%r9), %r8
+ lea (%rdx,%r9), %r9
+
+ /* Now we begin our main scrub loop. */
+ascs_outer_loop:
+
+ /* If we're about to scrub the hard guard page, exit. */
+ cmp %r9, %rax
+ jae ascs_check_guard_page
+ cmp %rax, %rdx
+ jbe ascs_finished
+
+ascs_check_guard_page:
+ /* If we're about to scrub the guard page, and the guard
+ * page is protected, exit. */
+ cmp %r8, %rax
+ jae ascs_clear_loop
+ cmp %rax, %rsi
+ ja ascs_clear_loop
+ cmpq $(NIL), THREAD_CONTROL_STACK_GUARD_PAGE_PROTECTED_OFFSET(%rdi)
+ jne ascs_finished
+
+ /* Clear memory backwards to the start of the (4KiB) page */
+ascs_clear_loop:
+ movq $0, (%rax)
+ test $0xfff, %rax
+ lea -8(%rax), %rax
+ jnz ascs_clear_loop
+
+ /* If we're about to hit the hard guard page, exit. */
+ cmp %r9, %rax
+ jae ascs_finished
+
+ /* If the next (previous?) 4KiB page contains a non-zero
+ * word, continue scrubbing. */
+ascs_check_loop:
+ testq $-1, (%rax)
+ jnz ascs_outer_loop
+ test $0xfff, %rax
+ lea -8(%rax), %rax
+ jnz ascs_check_loop
+
+ascs_finished:
+ ret
+ SIZE(GNAME(arch_scrub_control_stack))
+\f
+ .globl GNAME(gc_safepoint_page)
+ .data
+ .align align_page
+GNAME(gc_safepoint_page):
+ .fill 32768,1,0
+\f
END()