|
@@ -50,12 +50,14 @@ NEVER_INLINE bool safe_memcpy(void* dest_ptr, const void* src_ptr, size_t n, voi
|
|
|
if (!(dest & 0x3) && !(src & 0x3) && n >= 12) {
|
|
|
size_t size_ts = n / sizeof(size_t);
|
|
|
asm volatile(
|
|
|
+ ".globl safe_memcpy_ins_1 \n"
|
|
|
"safe_memcpy_ins_1: \n"
|
|
|
#if ARCH(I386)
|
|
|
"rep movsl \n"
|
|
|
#else
|
|
|
"rep movsq \n"
|
|
|
#endif
|
|
|
+ ".globl safe_memcpy_1_faulted \n"
|
|
|
"safe_memcpy_1_faulted: \n" // handle_safe_access_fault() set edx/rdx to the fault address!
|
|
|
: "=S"(src),
|
|
|
"=D"(dest),
|
|
@@ -74,8 +76,10 @@ NEVER_INLINE bool safe_memcpy(void* dest_ptr, const void* src_ptr, size_t n, voi
|
|
|
}
|
|
|
}
|
|
|
asm volatile(
|
|
|
+ ".globl safe_memcpy_ins_2 \n"
|
|
|
"safe_memcpy_ins_2: \n"
|
|
|
"rep movsb \n"
|
|
|
+ ".globl safe_memcpy_2_faulted \n"
|
|
|
"safe_memcpy_2_faulted: \n" // handle_safe_access_fault() set edx/rdx to the fault address!
|
|
|
: "=c"(remainder),
|
|
|
[fault_at] "=d"(fault_at)
|
|
@@ -99,11 +103,13 @@ NEVER_INLINE ssize_t safe_strnlen(const char* str, size_t max_n, void*& fault_at
|
|
|
"test %[max_n], %[max_n] \n"
|
|
|
"je 2f \n"
|
|
|
"dec %[max_n] \n"
|
|
|
+ ".globl safe_strnlen_ins \n"
|
|
|
"safe_strnlen_ins: \n"
|
|
|
"cmpb $0,(%[str], %[count], 1) \n"
|
|
|
"je 2f \n"
|
|
|
"inc %[count] \n"
|
|
|
"jmp 1b \n"
|
|
|
+ ".globl safe_strnlen_faulted \n"
|
|
|
"safe_strnlen_faulted: \n" // handle_safe_access_fault() set edx/rdx to the fault address!
|
|
|
"xor %[count_on_error], %[count_on_error] \n"
|
|
|
"dec %[count_on_error] \n" // return -1 on fault
|
|
@@ -131,12 +137,14 @@ NEVER_INLINE bool safe_memset(void* dest_ptr, int c, size_t n, void*& fault_at)
|
|
|
expanded_c |= expanded_c << 8;
|
|
|
expanded_c |= expanded_c << 16;
|
|
|
asm volatile(
|
|
|
+ ".globl safe_memset_ins_1 \n"
|
|
|
"safe_memset_ins_1: \n"
|
|
|
#if ARCH(I386)
|
|
|
"rep stosl \n"
|
|
|
#else
|
|
|
"rep stosq \n"
|
|
|
#endif
|
|
|
+ ".globl safe_memset_1_faulted \n"
|
|
|
"safe_memset_1_faulted: \n" // handle_safe_access_fault() set edx/rdx to the fault address!
|
|
|
: "=D"(dest),
|
|
|
"=c"(remainder),
|
|
@@ -154,8 +162,10 @@ NEVER_INLINE bool safe_memset(void* dest_ptr, int c, size_t n, void*& fault_at)
|
|
|
}
|
|
|
}
|
|
|
asm volatile(
|
|
|
+ ".globl safe_memset_ins_2 \n"
|
|
|
"safe_memset_ins_2: \n"
|
|
|
"rep stosb \n"
|
|
|
+ ".globl safe_memset_2_faulted \n"
|
|
|
"safe_memset_2_faulted: \n" // handle_safe_access_fault() set edx/rdx to the fault address!
|
|
|
: "=D"(dest),
|
|
|
"=c"(remainder),
|
|
@@ -177,8 +187,10 @@ NEVER_INLINE Optional<u32> safe_atomic_fetch_add_relaxed(volatile u32* var, u32
|
|
|
bool error;
|
|
|
asm volatile(
|
|
|
"xor %[error], %[error] \n"
|
|
|
+ ".globl safe_atomic_fetch_add_relaxed_ins \n"
|
|
|
"safe_atomic_fetch_add_relaxed_ins: \n"
|
|
|
"lock xadd %[result], %[var] \n"
|
|
|
+ ".globl safe_atomic_fetch_add_relaxed_faulted \n"
|
|
|
"safe_atomic_fetch_add_relaxed_faulted: \n"
|
|
|
: [error] "=d"(error), [result] "=a"(result), [var] "=m"(*var)
|
|
|
: [val] "a"(val)
|
|
@@ -195,8 +207,10 @@ NEVER_INLINE Optional<u32> safe_atomic_exchange_relaxed(volatile u32* var, u32 v
|
|
|
bool error;
|
|
|
asm volatile(
|
|
|
"xor %[error], %[error] \n"
|
|
|
+ ".globl safe_atomic_exchange_relaxed_ins \n"
|
|
|
"safe_atomic_exchange_relaxed_ins: \n"
|
|
|
"xchg %[val], %[var] \n"
|
|
|
+ ".globl safe_atomic_exchange_relaxed_faulted \n"
|
|
|
"safe_atomic_exchange_relaxed_faulted: \n"
|
|
|
: [error] "=d"(error), "=a"(result), [var] "=m"(*var)
|
|
|
: [val] "a"(val)
|
|
@@ -213,8 +227,10 @@ NEVER_INLINE Optional<u32> safe_atomic_load_relaxed(volatile u32* var)
|
|
|
bool error;
|
|
|
asm volatile(
|
|
|
"xor %[error], %[error] \n"
|
|
|
+ ".globl safe_atomic_load_relaxed_ins \n"
|
|
|
"safe_atomic_load_relaxed_ins: \n"
|
|
|
"mov (%[var]), %[result] \n"
|
|
|
+ ".globl safe_atomic_load_relaxed_faulted \n"
|
|
|
"safe_atomic_load_relaxed_faulted: \n"
|
|
|
: [error] "=d"(error), [result] "=c"(result)
|
|
|
: [var] "b"(var)
|
|
@@ -230,8 +246,10 @@ NEVER_INLINE bool safe_atomic_store_relaxed(volatile u32* var, u32 val)
|
|
|
bool error;
|
|
|
asm volatile(
|
|
|
"xor %[error], %[error] \n"
|
|
|
+ ".globl safe_atomic_store_relaxed_ins \n"
|
|
|
"safe_atomic_store_relaxed_ins: \n"
|
|
|
"xchg %[val], %[var] \n"
|
|
|
+ ".globl safe_atomic_store_relaxed_faulted \n"
|
|
|
"safe_atomic_store_relaxed_faulted: \n"
|
|
|
: [error] "=d"(error), [var] "=m"(*var)
|
|
|
: [val] "r"(val)
|
|
@@ -248,8 +266,10 @@ NEVER_INLINE Optional<bool> safe_atomic_compare_exchange_relaxed(volatile u32* v
|
|
|
bool did_exchange;
|
|
|
asm volatile(
|
|
|
"xor %[error], %[error] \n"
|
|
|
+ ".globl safe_atomic_compare_exchange_relaxed_ins \n"
|
|
|
"safe_atomic_compare_exchange_relaxed_ins: \n"
|
|
|
"lock cmpxchg %[val], %[var] \n"
|
|
|
+ ".globl safe_atomic_compare_exchange_relaxed_faulted \n"
|
|
|
"safe_atomic_compare_exchange_relaxed_faulted: \n"
|
|
|
: [error] "=d"(error), "=a"(expected), [var] "=m"(*var), "=@ccz"(did_exchange)
|
|
|
: "a"(expected), [val] "b"(val)
|