| 1 | #include "asan_mapping.h" |
| 2 | #include "sanitizer_common/sanitizer_asm.h" |
| 3 | |
| 4 | #if defined(__x86_64__) |
| 5 | #include "sanitizer_common/sanitizer_platform.h" |
| 6 | |
| 7 | .file "asan_rtl_x86_64.S" |
| 8 | .att_syntax |
| 9 | |
| 10 | #define NAME(n, reg, op, s, i) n##_##op##_##i##_##s##_##reg |
| 11 | |
| 12 | #define FNAME(reg, op, s, i) NAME(__asan_check, reg, op, s, i) |
| 13 | #define RLABEL(reg, op, s, i) NAME(.return, reg, op, s, i) |
| 14 | #define CLABEL(reg, op, s, i) NAME(.check, reg, op, s, i) |
| 15 | #define FLABEL(reg, op, s, i) NAME(.fail, reg, op, s, i) |
| 16 | |
| 17 | #define BEGINF(reg, op, s, i) \ |
| 18 | .section .text.FNAME(reg, op, s, i),"ax",@progbits ;\ |
| 19 | .globl FNAME(reg, op, s, i) ;\ |
| 20 | .hidden FNAME(reg, op, s, i) ;\ |
| 21 | ASM_TYPE_FUNCTION(FNAME(reg, op, s, i)) ;\ |
| 22 | .cfi_startproc ;\ |
| 23 | FNAME(reg, op, s, i): ;\ |
| 24 | |
| 25 | #define ENDF .cfi_endproc ;\ |
| 26 | |
| 27 | // Access check functions for 1,2 and 4 byte types, which require extra checks. |
| 28 | #define ASAN_MEMORY_ACCESS_INITIAL_CHECK_ADD(reg, op, s) \ |
| 29 | mov %##reg,%r10 ;\ |
| 30 | shr $0x3,%r10 ;\ |
| 31 | .if ASAN_SHADOW_OFFSET_CONST < 0x80000000 ;\ |
| 32 | movsbl ASAN_SHADOW_OFFSET_CONST(%r10),%r10d ;\ |
| 33 | .else ;\ |
| 34 | movabsq $ASAN_SHADOW_OFFSET_CONST,%r11 ;\ |
| 35 | movsbl (%r10,%r11),%r10d ;\ |
| 36 | .endif ;\ |
| 37 | test %r10d,%r10d ;\ |
| 38 | jne CLABEL(reg, op, s, add) ;\ |
| 39 | RLABEL(reg, op, s, add): ;\ |
| 40 | retq ;\ |
| 41 | |
| 42 | #define (reg, op, i) \ |
| 43 | CLABEL(reg, op, 1, i): ;\ |
| 44 | mov %##reg,%r11 ;\ |
| 45 | and $0x7,%r11d ;\ |
| 46 | cmp %r10d,%r11d ;\ |
| 47 | jl RLABEL(reg, op, 1, i);\ |
| 48 | mov %##reg,%rdi ;\ |
| 49 | jmp __asan_report_##op##1_asm ;\ |
| 50 | |
| 51 | #define (reg, op, i) \ |
| 52 | CLABEL(reg, op, 2, i): ;\ |
| 53 | mov %##reg,%r11 ;\ |
| 54 | and $0x7,%r11d ;\ |
| 55 | add $0x1,%r11d ;\ |
| 56 | cmp %r10d,%r11d ;\ |
| 57 | jl RLABEL(reg, op, 2, i);\ |
| 58 | mov %##reg,%rdi ;\ |
| 59 | jmp __asan_report_##op##2_asm ;\ |
| 60 | |
| 61 | #define (reg, op, i) \ |
| 62 | CLABEL(reg, op, 4, i): ;\ |
| 63 | mov %##reg,%r11 ;\ |
| 64 | and $0x7,%r11d ;\ |
| 65 | add $0x3,%r11d ;\ |
| 66 | cmp %r10d,%r11d ;\ |
| 67 | jl RLABEL(reg, op, 4, i);\ |
| 68 | mov %##reg,%rdi ;\ |
| 69 | jmp __asan_report_##op##4_asm ;\ |
| 70 | |
| 71 | #define ASAN_MEMORY_ACCESS_CALLBACK_ADD_1(reg, op) \ |
| 72 | BEGINF(reg, op, 1, add) ;\ |
| 73 | ASAN_MEMORY_ACCESS_INITIAL_CHECK_ADD(reg, op, 1) ;\ |
| 74 | ASAN_MEMORY_ACCESS_EXTRA_CHECK_1(reg, op, add) ;\ |
| 75 | ENDF |
| 76 | |
| 77 | #define ASAN_MEMORY_ACCESS_CALLBACK_ADD_2(reg, op) \ |
| 78 | BEGINF(reg, op, 2, add) ;\ |
| 79 | ASAN_MEMORY_ACCESS_INITIAL_CHECK_ADD(reg, op, 2) ;\ |
| 80 | ASAN_MEMORY_ACCESS_EXTRA_CHECK_2(reg, op, add) ;\ |
| 81 | ENDF |
| 82 | |
| 83 | #define ASAN_MEMORY_ACCESS_CALLBACK_ADD_4(reg, op) \ |
| 84 | BEGINF(reg, op, 4, add) ;\ |
| 85 | ASAN_MEMORY_ACCESS_INITIAL_CHECK_ADD(reg, op, 4) ;\ |
| 86 | ASAN_MEMORY_ACCESS_EXTRA_CHECK_4(reg, op, add) ;\ |
| 87 | ENDF |
| 88 | |
| 89 | // Access check functions for 8 and 16 byte types: no extra checks required. |
| 90 | #define ASAN_MEMORY_ACCESS_CHECK_ADD(reg, op, s, c) \ |
| 91 | mov %##reg,%r10 ;\ |
| 92 | shr $0x3,%r10 ;\ |
| 93 | .if ASAN_SHADOW_OFFSET_CONST < 0x80000000 ;\ |
| 94 | ##c $0x0,ASAN_SHADOW_OFFSET_CONST(%r10) ;\ |
| 95 | .else ;\ |
| 96 | movabsq $ASAN_SHADOW_OFFSET_CONST,%r11 ;\ |
| 97 | ##c $0x0,(%r10,%r11) ;\ |
| 98 | .endif ;\ |
| 99 | jne FLABEL(reg, op, s, add) ;\ |
| 100 | retq ;\ |
| 101 | |
| 102 | #define ASAN_MEMORY_ACCESS_FAIL(reg, op, s, i) \ |
| 103 | FLABEL(reg, op, s, i): ;\ |
| 104 | mov %##reg,%rdi ;\ |
| 105 | jmp __asan_report_##op##s##_asm;\ |
| 106 | |
| 107 | #define ASAN_MEMORY_ACCESS_CALLBACK_ADD_8(reg, op) \ |
| 108 | BEGINF(reg, op, 8, add) ;\ |
| 109 | ASAN_MEMORY_ACCESS_CHECK_ADD(reg, op, 8, cmpb) ;\ |
| 110 | ASAN_MEMORY_ACCESS_FAIL(reg, op, 8, add) ;\ |
| 111 | ENDF |
| 112 | |
| 113 | #define ASAN_MEMORY_ACCESS_CALLBACK_ADD_16(reg, op) \ |
| 114 | BEGINF(reg, op, 16, add) ;\ |
| 115 | ASAN_MEMORY_ACCESS_CHECK_ADD(reg, op, 16, cmpw) ;\ |
| 116 | ASAN_MEMORY_ACCESS_FAIL(reg, op, 16, add) ;\ |
| 117 | ENDF |
| 118 | |
| 119 | #define ASAN_MEMORY_ACCESS_CALLBACKS_ADD(reg) \ |
| 120 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_1(reg, load) \ |
| 121 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_1(reg, store) \ |
| 122 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_2(reg, load) \ |
| 123 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_2(reg, store) \ |
| 124 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_4(reg, load) \ |
| 125 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_4(reg, store) \ |
| 126 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_8(reg, load) \ |
| 127 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_8(reg, store) \ |
| 128 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_16(reg, load) \ |
| 129 | ASAN_MEMORY_ACCESS_CALLBACK_ADD_16(reg, store) \ |
| 130 | |
| 131 | |
| 132 | // Instantiate all but R10 and R11 callbacks. We are using PLTSafe class with |
| 133 | // the intrinsic, which guarantees that the code generation will never emit |
| 134 | // R10 or R11 callback. |
| 135 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RAX) |
| 136 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RBX) |
| 137 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RCX) |
| 138 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RDX) |
| 139 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RSI) |
| 140 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RDI) |
| 141 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(RBP) |
| 142 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R8) |
| 143 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R9) |
| 144 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R12) |
| 145 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R13) |
| 146 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R14) |
| 147 | ASAN_MEMORY_ACCESS_CALLBACKS_ADD(R15) |
| 148 | |
| 149 | #endif |
| 150 | |
| 151 | NO_EXEC_STACK_DIRECTIVE |
| 152 | |