|
|
|
@ -7,6 +7,7 @@
|
|
|
|
|
#include <ucontext.h>
|
|
|
|
|
#include <unistd.h>
|
|
|
|
|
|
|
|
|
|
#include "arch_context.h"
|
|
|
|
|
#include "software_interrupt.h"
|
|
|
|
|
|
|
|
|
|
#define ARCH_SIG_JMP_OFF 8
|
|
|
|
@ -31,7 +32,6 @@ struct arch_context {
|
|
|
|
|
mcontext_t mctx;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
extern void __attribute__((noreturn)) worker_thread_sandbox_switch_preempt(void);
|
|
|
|
|
extern __thread struct arch_context worker_thread_base_context;
|
|
|
|
|
|
|
|
|
|
static void __attribute__((noinline)) arch_context_init(struct arch_context *actx, reg_t ip, reg_t sp)
|
|
|
|
@ -44,14 +44,15 @@ static void __attribute__((noinline)) arch_context_init(struct arch_context *act
|
|
|
|
|
* context_switch conventions: bp is expected to be on top of the stack
|
|
|
|
|
* when co-op context switching..
|
|
|
|
|
*
|
|
|
|
|
* so push sp on this new stack and use
|
|
|
|
|
* that new sp as sp for switching to sandbox!
|
|
|
|
|
* Temporarily switches the active stack to the stack pointer stored in sp
|
|
|
|
|
* to push the stack pointer sp to the top of its own stack.
|
|
|
|
|
* This acts as the base pointer
|
|
|
|
|
*/
|
|
|
|
|
asm volatile("movq %%rsp, %%rbx\n\t"
|
|
|
|
|
"movq %%rax, %%rsp\n\t"
|
|
|
|
|
"pushq %%rax\n\t"
|
|
|
|
|
"movq %%rsp, %%rax\n\t"
|
|
|
|
|
"movq %%rbx, %%rsp\n\t"
|
|
|
|
|
asm volatile("movq %%rsp, %%rbx\n\t" /* Temporarily save pointer of active stack to B */
|
|
|
|
|
"movq %%rax, %%rsp\n\t" /* Set active stack to stack pointer in A(C variable sp) */
|
|
|
|
|
"pushq %%rax\n\t" /* Push A(C variable sp) onto the stack at sp */
|
|
|
|
|
"movq %%rsp, %%rax\n\t" /* Write the incremented stack pointer to A(C variable sp) */
|
|
|
|
|
"movq %%rbx, %%rsp\n\t" /* Restore original stack saved in B */
|
|
|
|
|
: "=a"(sp)
|
|
|
|
|
: "a"(sp)
|
|
|
|
|
: "memory", "cc", "rbx");
|
|
|
|
@ -122,6 +123,9 @@ arch_context_switch(struct arch_context *current, struct arch_context *next)
|
|
|
|
|
/* if both current and next are NULL, there is no state change */
|
|
|
|
|
assert(current != NULL || next != NULL);
|
|
|
|
|
|
|
|
|
|
/* Assumption: The caller does not switch to itself */
|
|
|
|
|
assert(current != next);
|
|
|
|
|
|
|
|
|
|
/* Set any NULLs to worker_thread_base_context to resume execution of main */
|
|
|
|
|
if (current == NULL) current = &worker_thread_base_context;
|
|
|
|
|
if (next == NULL) next = &worker_thread_base_context;
|
|
|
|
@ -129,27 +133,58 @@ arch_context_switch(struct arch_context *current, struct arch_context *next)
|
|
|
|
|
reg_t *current_registers = current->regs, *next_registers = next->regs;
|
|
|
|
|
assert(current_registers && next_registers);
|
|
|
|
|
|
|
|
|
|
asm volatile("pushq %%rbp\n\t"
|
|
|
|
|
"movq %%rsp, %%rbp\n\t"
|
|
|
|
|
"movq $2f, 8(%%rax)\n\t"
|
|
|
|
|
"movq %%rsp, (%%rax)\n\t"
|
|
|
|
|
"cmpq $0, (%%rbx)\n\t"
|
|
|
|
|
"je 1f\n\t"
|
|
|
|
|
"movq (%%rbx), %%rsp\n\t"
|
|
|
|
|
"jmpq *8(%%rbx)\n\t"
|
|
|
|
|
"1:\n\t"
|
|
|
|
|
"call worker_thread_sandbox_switch_preempt\n\t"
|
|
|
|
|
".align 8\n\t"
|
|
|
|
|
"2:\n\t"
|
|
|
|
|
"movq $0, (%%rbx)\n\t"
|
|
|
|
|
".align 8\n\t"
|
|
|
|
|
"3:\n\t"
|
|
|
|
|
"popq %%rbp\n\t"
|
|
|
|
|
:
|
|
|
|
|
: "a"(current_registers), "b"(next_registers)
|
|
|
|
|
: "memory", "cc", "rcx", "rdx", "rsi", "rdi", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
|
|
|
|
|
"xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7", "xmm8", "xmm9", "xmm10", "xmm11",
|
|
|
|
|
"xmm12", "xmm13", "xmm14", "xmm15");
|
|
|
|
|
asm volatile(
|
|
|
|
|
/* Create a new stack frame */
|
|
|
|
|
"pushq %%rbp\n\t" /* stack[stack_len++] = base_pointer */
|
|
|
|
|
"movq %%rsp, %%rbp\n\t" /* base_pointer = stack_pointer. Start new Frame */
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
|
* Save the IP and stack pointer to the context of the sandbox we're switching from
|
|
|
|
|
*/
|
|
|
|
|
"movq $2f, 8(%%rax)\n\t" /* Write the address of label 2 to current_registers[1] (instruction_pointer). */
|
|
|
|
|
"movq %%rsp, (%%rax)\n\t" /* current_registers[0] (stack_pointer) = stack_pointer */
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
|
* Check if the variant of the context we're trying to switch to is SLOW (mcontext-based)
|
|
|
|
|
* If it is, jump to label 1 to restore the preempted sandbox
|
|
|
|
|
*/
|
|
|
|
|
"cmpq $0, (%%rbx)\n\t" /* if (stack pointer == 0) */
|
|
|
|
|
"je 1f\n\t" /* goto 1; restore the existing sandbox using mcontext */
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
|
* Fast Path
|
|
|
|
|
* We can just write update the stack pointer and jump to the target instruction
|
|
|
|
|
*/
|
|
|
|
|
"movq (%%rbx), %%rsp\n\t" /* stack_pointer = next_registers[0] (stack_pointer) */
|
|
|
|
|
"jmpq *8(%%rbx)\n\t" /* immediate jump to next_registers[1] (instruction_pointer) */
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
|
* Slow Path
|
|
|
|
|
* If the stack pointer equaled 0, that means the sandbox was preempted and we need to
|
|
|
|
|
* fallback to a full mcontext-based context switch. We do this by invoking
|
|
|
|
|
* arch_context_mcontext_restore, which fires a SIGUSR1 signal. The SIGUSR1 signal handler
|
|
|
|
|
* executes the mcontext-based context switch.
|
|
|
|
|
*/
|
|
|
|
|
"1:\n\t"
|
|
|
|
|
"call arch_context_mcontext_restore\n\t"
|
|
|
|
|
".align 8\n\t"
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
|
* Where preempted sandbox resumes
|
|
|
|
|
* rbx contains the preempted sandbox's IP and SP in this context
|
|
|
|
|
*/
|
|
|
|
|
"2:\n\t"
|
|
|
|
|
"movq $0, (%%rbx)\n\t" /* stack pointer = 0 */
|
|
|
|
|
".align 8\n\t"
|
|
|
|
|
|
|
|
|
|
/* This label is used in conjunction with a static offset */
|
|
|
|
|
"3:\n\t"
|
|
|
|
|
"popq %%rbp\n\t" /* base_pointer = stack[--stack_len]; Base Pointer is restored */
|
|
|
|
|
:
|
|
|
|
|
: "a"(current_registers), "b"(next_registers)
|
|
|
|
|
: "memory", "cc", "rcx", "rdx", "rsi", "rdi", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15", "xmm0",
|
|
|
|
|
"xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7", "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13",
|
|
|
|
|
"xmm14", "xmm15");
|
|
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|