| 1 | #include "asan_mapping.h" |
| 2 | #include "sanitizer_common/sanitizer_asm.h" |
| 3 | |
| 4 | #if defined(__x86_64__) |
| 5 | #include "sanitizer_common/sanitizer_platform.h" |
| 6 | |
| 7 | .file "asan_rtl_x86_64.S" |
| 8 | |
| 9 | #define NAME(n, reg, op, s, i) n##_##op##_##i##_##s##_##reg |
| 10 | |
| 11 | #define FNAME(reg, op, s, i) NAME(__asan_check, reg, op, s, i) |
| 12 | #define RLABEL(reg, op, s, i) NAME(.return, reg, op, s, i) |
| 13 | #define CLABEL(reg, op, s, i) NAME(.check, reg, op, s, i) |
| 14 | #define FLABEL(reg, op, s, i) NAME(.fail, reg, op, s, i) |
| 15 | |
| 16 | #define BEGINF(reg, op, s, i) \ |
| 17 | .section .text.FNAME(reg, op, s, i),"ax",@progbits ;\ |
| 18 | .globl FNAME(reg, op, s, i) ;\ |
| 19 | .hidden FNAME(reg, op, s, i) ;\ |
| 20 | ASM_TYPE_FUNCTION(FNAME(reg, op, s, i)) ;\ |
| 21 | .cfi_startproc ;\ |
| 22 | FNAME(reg, op, s, i): ;\ |
| 23 | |
| 24 | #define ENDF .cfi_endproc ;\ |
| 25 | |
| 26 | // Access check functions for 1,2 and 4 byte types, which require extra checks. |
| 27 | #define ASAN_MEMORY_ACCESS_INITIAL_CHECK_ADD(reg, op, s) \ |
| 28 | mov %##reg,%r10 ;\ |
| 29 | shr $0x3,%r10 ;\ |
| 30 | .if ASAN_SHADOW_OFFSET_CONST < 0x80000000 ;\ |
| 31 | movsbl ASAN_SHADOW_OFFSET_CONST(%r10),%r10d ;\ |
| 32 | .else ;\ |
| 33 | movabsq $ASAN_SHADOW_OFFSET_CONST,%r11 ;\ |
| 34 | movsbl (%r10,%r11),%r10d ;\ |
| 35 | .endif ;\ |
| 36 | test %r10d,%r10d ;\ |
| 37 | jne CLABEL(reg, op, s, add) ;\ |
| 38 | RLABEL(reg, op, s, add): ;\ |
| 39 | retq ;\ |
| 40 | |
| 41 | #define (reg, op, i) \ |
| 42 | CLABEL(reg, op, 1, i): ;\ |
| 43 | mov %##reg,%r11 ;\ |
| 44 | and $0x7,%r11d ;\ |
| 45 | cmp %r10d,%r11d ;\ |
| 46 | jl RLABEL(reg, op, 1, i);\ |
| 47 | mov %##reg,%rdi ;\ |
| 48 | jmp __asan_report_##op##1_asm ;\ |
| 49 | |
| 50 | #define (reg, op, i) \ |
| 51 | CLABEL(reg, op, 2, i): ;\ |
| 52 | mov %##reg,%r11 ;\ |
| 53 | and $0x7,%r11d ;\ |
| 54 | add $0x1,%r11d ;\ |
| 55 | cmp %r10d,%r11d ;\ |
| 56 | jl RLABEL(reg, op, 2, i);\ |
| 57 | mov %##reg,%rdi ;\ |
| 58 | jmp __asan_report_##op##2_asm ;\ |
| 59 | |
| 60 | #define (reg, op, i) \ |
| 61 | CLABEL(reg, op, 4, i): ;\ |
| 62 | mov %##reg,%r11 ;\ |
| 63 | and $0x7,%r11d ;\ |
| 64 | add $0x3,%r11d ;\ |
| 65 | cmp %r10d,%r11d ;\ |
| 66 | jl RLABEL(reg, op, 4, i);\ |
| 67 | mov %##reg,%rdi ;\ |
| 68 | jmp __asan_report_##op##4_asm ;\ |
| 69 | |
| 70 | #define ASAN_MEMORY_ACCESS_CALLBACK_ADD_1(reg, op) \ |
| 71 | BEGINF(reg, op, 1, add) ;\ |
| 72 | ASAN_MEMORY_ACCESS_INITIAL_CHECK_ADD(reg, op, 1) ;\ |
| 73 | ASAN_MEMORY_ACCESS_EXTRA_CHECK_1(reg, op, add) ;\ |
| 74 | ENDF |
| 75 | |
| 76 | #define ASAN_MEMORY_ACCESS_CALLBACK_ADD_2(reg, op) \ |
| 77 | BEGINF(reg, op, 2, add) ;\ |
| 78 | ASAN_MEMORY_ACCESS_INITIAL_CHECK_ADD(reg, op, 2) ;\ |
| 79 | ASAN_MEMORY_ACCESS_EXTRA_CHECK_2(reg, op, add) ;\ |
| 80 | ENDF |
| 81 | |
| 82 | #define ASAN_MEMORY_ACCESS_CALLBACK_ADD_4(reg, op) \ |
| 83 | BEGINF(reg, op, 4, add) ;\ |
| 84 | ASAN_MEMORY_ACCESS_INITIAL_CHECK_ADD(reg, op, 4) ;\ |
| 85 | ASAN_MEMORY_ACCESS_EXTRA_CHECK_4(reg, op, add) ;\ |
| 86 | ENDF |
| 87 | |
| 88 | // Access check functions for 8 and 16 byte types: no extra checks required. |
| 89 | #define ASAN_MEMORY_ACCESS_CHECK_ADD(reg, op, s, c) \ |
| 90 | mov %##reg,%r10 ;\ |
| 91 | shr $0x3,%r10 ;\ |
| 92 | .if ASAN_SHADOW_OFFSET_CONST < 0x80000000 ;\ |
| 93 | ##c $0x0,ASAN_SHADOW_OFFSET_CONST(%r10) ;\ |
| 94 | .else ;\ |
| 95 | movabsq $ASAN_SHADOW_OFFSET_CONST,%r11 ;\ |
| 96 | ##c $0x0,(%r10,%r11) ;\ |
| 97 | .endif ;\ |
| 98 | jne FLABEL(reg, op, s, add) ;\ |
| 99 | retq ;\ |
| 100 | |
| 101 | #define ASAN_MEMORY_ACCESS_FAIL(reg, op, s, i) \ |
| 102 | FLABEL(reg, op, s, i): ;\ |
| 103 | mov %##reg,%rdi ;\ |
| 104 | jmp __asan_report_##op##s##_asm;\ |
| 105 | |
| 106 | #define ASAN_MEMORY_ACCESS_CALLBACK_ADD_8(reg, op) \ |
| 107 | BEGINF(reg, op, 8, add) ;\ |
| 108 | ASAN_MEMORY_ACCESS_CHECK_ADD(reg, op, 8, cmpb) ;\ |
| 109 | ASAN_MEMORY_ACCESS_FAIL(reg, op, 8, add) ;\ |
| 110 | ENDF |
| 111 | |
| 112 | #define ASAN_MEMORY_ACCESS_CALLBACK_ADD_16(reg, op) \ |
| 113 | BEGINF(reg, op, 16, add) ;\ |
| 114 | ASAN_MEMORY_ACCESS_CHECK_ADD(reg, op, 16, cmpw) ;\ |
| 115 | ASAN_MEMORY_ACCESS_FAIL(reg, op, 16, add) ;\ |
| 116 | ENDF |
| 117 | |
| 118 | #define ASAN_MEMORY_ACCESS_CALLBACKS_ADD(reg) \ |
| 119 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_1(reg, load) \ |
| 120 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_1(reg, store) \ |
| 121 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_2(reg, load) \ |
| 122 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_2(reg, store) \ |
| 123 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_4(reg, load) \ |
| 124 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_4(reg, store) \ |
| 125 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_8(reg, load) \ |
| 126 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_8(reg, store) \ |
| 127 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_16(reg, load) \ |
| 128 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_16(reg, store) \ |
| 129 | |
| 130 | |
| 131 | // Instantiate all but R10 and R11 callbacks. We are using PLTSafe class with |
| 132 | // the intrinsic, which guarantees that the code generation will never emit |
| 133 | // R10 or R11 callback. |
| 134 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RAX) |
| 135 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RBX) |
| 136 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RCX) |
| 137 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RDX) |
| 138 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RSI) |
| 139 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RDI) |
| 140 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RBP) |
| 141 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R8) |
| 142 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R9) |
| 143 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R12) |
| 144 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R13) |
| 145 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R14) |
| 146 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R15) |
| 147 | |
| 148 | #endif |
| 149 | |
| 150 | NO_EXEC_STACK_DIRECTIVE |
| 151 | |