| 1 | //===- X86ManualFoldTables.def ----------------------------*- C++ -*-==// |
| 2 | // |
| 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
| 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | // \file |
| 9 | // This file defines all the entries in X86 memory folding tables that need |
| 10 | // special handling. |
| 11 | //===----------------------------------------------------------------------===// |
| 12 | |
| 13 | #ifndef NOFOLD |
| 14 | #define NOFOLD(INSN) |
| 15 | #endif |
| 16 | NOFOLD(BTC16rr) |
| 17 | NOFOLD(BTC32rr) |
| 18 | NOFOLD(BTC64rr) |
| 19 | NOFOLD(BTR16rr) |
| 20 | NOFOLD(BTR32rr) |
| 21 | NOFOLD(BTR64rr) |
| 22 | NOFOLD(BTS16rr) |
| 23 | NOFOLD(BTS32rr) |
| 24 | NOFOLD(BTS64rr) |
| 25 | NOFOLD(VCOMPRESSPDZ128rrk) |
| 26 | NOFOLD(VCOMPRESSPDZ256rrk) |
| 27 | NOFOLD(VCOMPRESSPDZrrk) |
| 28 | NOFOLD(VCOMPRESSPSZ128rrk) |
| 29 | NOFOLD(VCOMPRESSPSZ256rrk) |
| 30 | NOFOLD(VCOMPRESSPSZrrk) |
| 31 | NOFOLD(VCVTPS2PHZ128rrk) |
| 32 | NOFOLD(VCVTPS2PHZ256rrk) |
| 33 | NOFOLD(VCVTPS2PHZrrk) |
| 34 | NOFOLD(VEXTRACTF32X4Z256rrik) |
| 35 | NOFOLD(VEXTRACTF32X4Zrrik) |
| 36 | NOFOLD(VEXTRACTF32X8Zrrik) |
| 37 | NOFOLD(VEXTRACTF64X2Z256rrik) |
| 38 | NOFOLD(VEXTRACTF64X2Zrrik) |
| 39 | NOFOLD(VEXTRACTF64X4Zrrik) |
| 40 | NOFOLD(VEXTRACTI32X4Z256rrik) |
| 41 | NOFOLD(VEXTRACTI32X4Zrrik) |
| 42 | NOFOLD(VEXTRACTI32X8Zrrik) |
| 43 | NOFOLD(VEXTRACTI64X2Z256rrik) |
| 44 | NOFOLD(VEXTRACTI64X2Zrrik) |
| 45 | NOFOLD(VEXTRACTI64X4Zrrik) |
| 46 | NOFOLD(VMOVAPDZ128mrk) |
| 47 | NOFOLD(VMOVAPDZ256mrk) |
| 48 | NOFOLD(VMOVAPDZmrk) |
| 49 | NOFOLD(VMOVAPSZ128mrk) |
| 50 | NOFOLD(VMOVAPSZ256mrk) |
| 51 | NOFOLD(VMOVAPSZmrk) |
| 52 | NOFOLD(VMOVDQA32Z128mrk) |
| 53 | NOFOLD(VMOVDQA32Z256mrk) |
| 54 | NOFOLD(VMOVDQA32Zmrk) |
| 55 | NOFOLD(VMOVDQA64Z128mrk) |
| 56 | NOFOLD(VMOVDQA64Z256mrk) |
| 57 | NOFOLD(VMOVDQA64Zmrk) |
| 58 | NOFOLD(VMOVDQU16Z128mrk) |
| 59 | NOFOLD(VMOVDQU16Z256mrk) |
| 60 | NOFOLD(VMOVDQU16Zmrk) |
| 61 | NOFOLD(VMOVDQU32Z128mrk) |
| 62 | NOFOLD(VMOVDQU32Z256mrk) |
| 63 | NOFOLD(VMOVDQU32Zmrk) |
| 64 | NOFOLD(VMOVDQU64Z128mrk) |
| 65 | NOFOLD(VMOVDQU64Z256mrk) |
| 66 | NOFOLD(VMOVDQU64Zmrk) |
| 67 | NOFOLD(VMOVDQU8Z128mrk) |
| 68 | NOFOLD(VMOVDQU8Z256mrk) |
| 69 | NOFOLD(VMOVDQU8Zmrk) |
| 70 | NOFOLD(VMOVUPDZ128mrk) |
| 71 | NOFOLD(VMOVUPDZ256mrk) |
| 72 | NOFOLD(VMOVUPDZmrk) |
| 73 | NOFOLD(VMOVUPSZ128mrk) |
| 74 | NOFOLD(VMOVUPSZ256mrk) |
| 75 | NOFOLD(VMOVUPSZmrk) |
| 76 | NOFOLD(VPCOMPRESSBZ128rrk) |
| 77 | NOFOLD(VPCOMPRESSBZ256rrk) |
| 78 | NOFOLD(VPCOMPRESSBZrrk) |
| 79 | NOFOLD(VPCOMPRESSDZ128rrk) |
| 80 | NOFOLD(VPCOMPRESSDZ256rrk) |
| 81 | NOFOLD(VPCOMPRESSDZrrk) |
| 82 | NOFOLD(VPCOMPRESSQZ128rrk) |
| 83 | NOFOLD(VPCOMPRESSQZ256rrk) |
| 84 | NOFOLD(VPCOMPRESSQZrrk) |
| 85 | NOFOLD(VPCOMPRESSWZ128rrk) |
| 86 | NOFOLD(VPCOMPRESSWZ256rrk) |
| 87 | NOFOLD(VPCOMPRESSWZrrk) |
| 88 | NOFOLD(VPMOVDBZ128rrk) |
| 89 | NOFOLD(VPMOVDBZ256rrk) |
| 90 | NOFOLD(VPMOVDBZrrk) |
| 91 | NOFOLD(VPMOVDWZ128rrk) |
| 92 | NOFOLD(VPMOVDWZ256rrk) |
| 93 | NOFOLD(VPMOVDWZrrk) |
| 94 | NOFOLD(VPMOVQBZ128rrk) |
| 95 | NOFOLD(VPMOVQBZ256rrk) |
| 96 | NOFOLD(VPMOVQBZrrk) |
| 97 | NOFOLD(VPMOVQDZ128rrk) |
| 98 | NOFOLD(VPMOVQDZ256rrk) |
| 99 | NOFOLD(VPMOVQDZrrk) |
| 100 | NOFOLD(VPMOVQWZ128rrk) |
| 101 | NOFOLD(VPMOVQWZ256rrk) |
| 102 | NOFOLD(VPMOVQWZrrk) |
| 103 | NOFOLD(VPMOVSDBZ128rrk) |
| 104 | NOFOLD(VPMOVSDBZ256rrk) |
| 105 | NOFOLD(VPMOVSDBZrrk) |
| 106 | NOFOLD(VPMOVSDWZ128rrk) |
| 107 | NOFOLD(VPMOVSDWZ256rrk) |
| 108 | NOFOLD(VPMOVSDWZrrk) |
| 109 | NOFOLD(VPMOVSQBZ128rrk) |
| 110 | NOFOLD(VPMOVSQBZ256rrk) |
| 111 | NOFOLD(VPMOVSQBZrrk) |
| 112 | NOFOLD(VPMOVSQDZ128rrk) |
| 113 | NOFOLD(VPMOVSQDZ256rrk) |
| 114 | NOFOLD(VPMOVSQDZrrk) |
| 115 | NOFOLD(VPMOVSQWZ128rrk) |
| 116 | NOFOLD(VPMOVSQWZ256rrk) |
| 117 | NOFOLD(VPMOVSQWZrrk) |
| 118 | NOFOLD(VPMOVSWBZ128rrk) |
| 119 | NOFOLD(VPMOVSWBZ256rrk) |
| 120 | NOFOLD(VPMOVSWBZrrk) |
| 121 | NOFOLD(VPMOVUSDBZ128rrk) |
| 122 | NOFOLD(VPMOVUSDBZ256rrk) |
| 123 | NOFOLD(VPMOVUSDBZrrk) |
| 124 | NOFOLD(VPMOVUSDWZ128rrk) |
| 125 | NOFOLD(VPMOVUSDWZ256rrk) |
| 126 | NOFOLD(VPMOVUSDWZrrk) |
| 127 | NOFOLD(VPMOVUSQBZ128rrk) |
| 128 | NOFOLD(VPMOVUSQBZ256rrk) |
| 129 | NOFOLD(VPMOVUSQBZrrk) |
| 130 | NOFOLD(VPMOVUSQDZ128rrk) |
| 131 | NOFOLD(VPMOVUSQDZ256rrk) |
| 132 | NOFOLD(VPMOVUSQDZrrk) |
| 133 | NOFOLD(VPMOVUSQWZ128rrk) |
| 134 | NOFOLD(VPMOVUSQWZ256rrk) |
| 135 | NOFOLD(VPMOVUSQWZrrk) |
| 136 | NOFOLD(VPMOVUSWBZ128rrk) |
| 137 | NOFOLD(VPMOVUSWBZ256rrk) |
| 138 | NOFOLD(VPMOVUSWBZrrk) |
| 139 | NOFOLD(VPMOVWBZ128rrk) |
| 140 | NOFOLD(VPMOVWBZ256rrk) |
| 141 | NOFOLD(VPMOVWBZrrk) |
| 142 | NOFOLD(ARPL16rr) |
| 143 | NOFOLD(BT16rr) |
| 144 | NOFOLD(BT32rr) |
| 145 | NOFOLD(BT64rr) |
| 146 | NOFOLD(CMPXCHG16rr) |
| 147 | NOFOLD(CMPXCHG32rr) |
| 148 | NOFOLD(CMPXCHG64rr) |
| 149 | NOFOLD(CMPXCHG8rr) |
| 150 | NOFOLD(LLDT16r) |
| 151 | NOFOLD(LMSW16r) |
| 152 | NOFOLD(LTRr) |
| 153 | NOFOLD(NOOPLr) |
| 154 | NOFOLD(NOOPQr) |
| 155 | NOFOLD(NOOPWr) |
| 156 | NOFOLD(POP16rmr) |
| 157 | NOFOLD(POP32rmr) |
| 158 | NOFOLD(POP64rmr) |
| 159 | NOFOLD(PUSH16rmr) |
| 160 | NOFOLD(PUSH32rmr) |
| 161 | NOFOLD(PUSH64rmr) |
| 162 | NOFOLD(VCOMPRESSPDZ128rr) |
| 163 | NOFOLD(VCOMPRESSPDZ256rr) |
| 164 | NOFOLD(VCOMPRESSPDZrr) |
| 165 | NOFOLD(VCOMPRESSPSZ128rr) |
| 166 | NOFOLD(VCOMPRESSPSZ256rr) |
| 167 | NOFOLD(VCOMPRESSPSZrr) |
| 168 | NOFOLD(VERRr) |
| 169 | NOFOLD(VERWr) |
| 170 | NOFOLD(VMREAD32rr) |
| 171 | NOFOLD(VMREAD64rr) |
| 172 | NOFOLD(VPCOMPRESSBZ128rr) |
| 173 | NOFOLD(VPCOMPRESSBZ256rr) |
| 174 | NOFOLD(VPCOMPRESSBZrr) |
| 175 | NOFOLD(VPCOMPRESSDZ128rr) |
| 176 | NOFOLD(VPCOMPRESSDZ256rr) |
| 177 | NOFOLD(VPCOMPRESSDZrr) |
| 178 | NOFOLD(VPCOMPRESSQZ128rr) |
| 179 | NOFOLD(VPCOMPRESSQZ256rr) |
| 180 | NOFOLD(VPCOMPRESSQZrr) |
| 181 | NOFOLD(VPCOMPRESSWZ128rr) |
| 182 | NOFOLD(VPCOMPRESSWZ256rr) |
| 183 | NOFOLD(VPCOMPRESSWZrr) |
| 184 | NOFOLD(LAR16rr) |
| 185 | NOFOLD(LAR32rr) |
| 186 | NOFOLD(LAR64rr) |
| 187 | NOFOLD(LSL16rr) |
| 188 | NOFOLD(LSL32rr) |
| 189 | NOFOLD(LSL64rr) |
| 190 | NOFOLD(MOVSX16rr16) |
| 191 | NOFOLD(MOVZX16rr16) |
| 192 | NOFOLD(VMWRITE32rr) |
| 193 | NOFOLD(VMWRITE64rr) |
| 194 | NOFOLD(VBLENDMPDZ128rrkz) |
| 195 | NOFOLD(VBLENDMPDZ256rrkz) |
| 196 | NOFOLD(VBLENDMPDZrrkz) |
| 197 | NOFOLD(VBLENDMPSZ128rrkz) |
| 198 | NOFOLD(VBLENDMPSZ256rrkz) |
| 199 | NOFOLD(VBLENDMPSZrrkz) |
| 200 | NOFOLD(VPBLENDMBZ128rrkz) |
| 201 | NOFOLD(VPBLENDMBZ256rrkz) |
| 202 | NOFOLD(VPBLENDMBZrrkz) |
| 203 | NOFOLD(VPBLENDMDZ128rrkz) |
| 204 | NOFOLD(VPBLENDMDZ256rrkz) |
| 205 | NOFOLD(VPBLENDMDZrrkz) |
| 206 | NOFOLD(VPBLENDMQZ128rrkz) |
| 207 | NOFOLD(VPBLENDMQZ256rrkz) |
| 208 | NOFOLD(VPBLENDMQZrrkz) |
| 209 | NOFOLD(VPBLENDMWZ128rrkz) |
| 210 | NOFOLD(VPBLENDMWZ256rrkz) |
| 211 | NOFOLD(VPBLENDMWZrrkz) |
| 212 | NOFOLD(UD1Lr) |
| 213 | NOFOLD(UD1Qr) |
| 214 | NOFOLD(UD1Wr) |
| 215 | // Exclude these two b/c they would conflict with {MMX_MOVD64from64rr, MMX_MOVQ64mr} in unfolding table |
| 216 | NOFOLD(MMX_MOVQ64rr) |
| 217 | NOFOLD(MMX_MOVQ64rr_REV) |
| 218 | // INSERTPSrmi has no count_s while INSERTPSrri has count_s. |
| 219 | // count_s is to indicate which element in dst vector is inserted. |
| 220 | // if count_s!=0, we can not fold INSERTPSrri into INSERTPSrmi |
| 221 | // |
| 222 | // the following folding can happen when count_s==0 |
| 223 | // load xmm0, m32 |
| 224 | // INSERTPSrri xmm1, xmm0, imm |
| 225 | // => |
| 226 | // INSERTPSrmi xmm1, m32, imm |
| 227 | NOFOLD(INSERTPSrri) |
| 228 | NOFOLD(VINSERTPSZrri) |
| 229 | NOFOLD(VINSERTPSrri) |
| 230 | // Memory faults are suppressed for CFCMOV with memory operand. |
| 231 | NOFOLD(CFCMOV16rr_REV) |
| 232 | NOFOLD(CFCMOV32rr_REV) |
| 233 | NOFOLD(CFCMOV64rr_REV) |
| 234 | NOFOLD(CFCMOV16rr_ND) |
| 235 | NOFOLD(CFCMOV32rr_ND) |
| 236 | NOFOLD(CFCMOV64rr_ND) |
| 237 | #undef NOFOLD |
| 238 | |
| 239 | #ifndef ENTRY |
| 240 | #define ENTRY(REG, MEM, FLAGS) |
| 241 | #endif |
| 242 | // The following entries are added manually b/c the encodings of reg form does not match the |
| 243 | // encoding of memory form |
| 244 | ENTRY(ADD16ri_DB, ADD16mi, TB_NO_REVERSE) |
| 245 | ENTRY(ADD16rr_DB, ADD16mr, TB_NO_REVERSE) |
| 246 | ENTRY(ADD32ri_DB, ADD32mi, TB_NO_REVERSE) |
| 247 | ENTRY(ADD32rr_DB, ADD32mr, TB_NO_REVERSE) |
| 248 | ENTRY(ADD64ri32_DB, ADD64mi32, TB_NO_REVERSE) |
| 249 | ENTRY(ADD64rr_DB, ADD64mr, TB_NO_REVERSE) |
| 250 | ENTRY(ADD8ri_DB, ADD8mi, TB_NO_REVERSE) |
| 251 | ENTRY(ADD8rr_DB, ADD8mr, TB_NO_REVERSE) |
| 252 | ENTRY(ADD16rr_DB, ADD16rm, TB_NO_REVERSE) |
| 253 | ENTRY(ADD32rr_DB, ADD32rm, TB_NO_REVERSE) |
| 254 | ENTRY(ADD64rr_DB, ADD64rm, TB_NO_REVERSE) |
| 255 | ENTRY(ADD8rr_DB, ADD8rm, TB_NO_REVERSE) |
| 256 | ENTRY(MMX_MOVD64from64rr, MMX_MOVQ64mr, TB_FOLDED_STORE) |
| 257 | ENTRY(MMX_MOVD64grr, MMX_MOVD64mr, TB_FOLDED_STORE) |
| 258 | ENTRY(MOV64toSDrr, MOV64mr, TB_FOLDED_STORE | TB_NO_REVERSE) |
| 259 | ENTRY(MOVDI2SSrr, MOV32mr, TB_FOLDED_STORE | TB_NO_REVERSE) |
| 260 | ENTRY(MOVPQIto64rr, MOVPQI2QImr, TB_FOLDED_STORE | TB_NO_REVERSE) |
| 261 | ENTRY(MOVSDto64rr, MOVSDmr, TB_FOLDED_STORE | TB_NO_REVERSE) |
| 262 | ENTRY(MOVSS2DIrr, MOVSSmr, TB_FOLDED_STORE) |
| 263 | ENTRY(MOVLHPSrr, MOVHPSrm, TB_NO_REVERSE) |
| 264 | ENTRY(PUSH16r, PUSH16rmm, TB_FOLDED_LOAD) |
| 265 | ENTRY(PUSH32r, PUSH32rmm, TB_FOLDED_LOAD) |
| 266 | ENTRY(PUSH64r, PUSH64rmm, TB_FOLDED_LOAD) |
| 267 | ENTRY(TAILJMPr, TAILJMPm, TB_FOLDED_LOAD) |
| 268 | ENTRY(TAILJMPr64, TAILJMPm64, TB_FOLDED_LOAD) |
| 269 | ENTRY(TAILJMPr64_REX, TAILJMPm64_REX, TB_FOLDED_LOAD) |
| 270 | ENTRY(TCRETURNri, TCRETURNmi, TB_FOLDED_LOAD | TB_NO_FORWARD) |
| 271 | ENTRY(TCRETURNri64, TCRETURNmi64, TB_FOLDED_LOAD | TB_NO_FORWARD) |
| 272 | ENTRY(VMOVLHPSZrr, VMOVHPSZ128rm, TB_NO_REVERSE) |
| 273 | ENTRY(VMOVLHPSrr, VMOVHPSrm, TB_NO_REVERSE) |
| 274 | ENTRY(VMOV64toSDZrr, MOV64mr, TB_FOLDED_STORE | TB_NO_REVERSE) |
| 275 | ENTRY(VMOV64toSDrr, MOV64mr, TB_FOLDED_STORE | TB_NO_REVERSE) |
| 276 | ENTRY(VMOVDI2SSZrr, MOV32mr, TB_FOLDED_STORE | TB_NO_REVERSE) |
| 277 | ENTRY(VMOVDI2SSrr, MOV32mr, TB_FOLDED_STORE | TB_NO_REVERSE) |
| 278 | ENTRY(VMOVPQIto64Zrr, VMOVPQI2QIZmr, TB_FOLDED_STORE | TB_NO_REVERSE) |
| 279 | ENTRY(VMOVPQIto64rr, VMOVPQI2QImr, TB_FOLDED_STORE | TB_NO_REVERSE) |
| 280 | ENTRY(VMOVSDto64Zrr, VMOVSDZmr, TB_FOLDED_STORE | TB_NO_REVERSE) |
| 281 | ENTRY(VMOVSDto64rr, VMOVSDmr, TB_FOLDED_STORE | TB_NO_REVERSE) |
| 282 | ENTRY(VMOVSS2DIZrr, VMOVSSZmr, TB_FOLDED_STORE) |
| 283 | ENTRY(VMOVSS2DIrr, VMOVSSmr, TB_FOLDED_STORE) |
| 284 | ENTRY(MMX_MOVD64to64rr, MMX_MOVQ64rm, 0) |
| 285 | ENTRY(MOV64toPQIrr, MOVQI2PQIrm, TB_NO_REVERSE) |
| 286 | ENTRY(MOV64toSDrr, MOVSDrm_alt, TB_NO_REVERSE) |
| 287 | ENTRY(MOVDI2SSrr, MOVSSrm_alt, 0) |
| 288 | ENTRY(VMOV64toPQIZrr, VMOVQI2PQIZrm, TB_NO_REVERSE) |
| 289 | ENTRY(VMOV64toPQIrr, VMOVQI2PQIrm, TB_NO_REVERSE) |
| 290 | ENTRY(VMOV64toSDZrr, VMOVSDZrm_alt, TB_NO_REVERSE) |
| 291 | ENTRY(VMOV64toSDrr, VMOVSDrm_alt, TB_NO_REVERSE) |
| 292 | ENTRY(VMOVDI2SSZrr, VMOVSSZrm_alt, 0) |
| 293 | ENTRY(VMOVDI2SSrr, VMOVSSrm_alt, 0) |
| 294 | ENTRY(MOVSDrr, MOVLPDrm, TB_NO_REVERSE) |
| 295 | ENTRY(VMOVSDZrr, VMOVLPDZ128rm, TB_NO_REVERSE) |
| 296 | ENTRY(VMOVSDrr, VMOVLPDrm, TB_NO_REVERSE) |
| 297 | #undef ENTRY |
| 298 | |