1//===- X86ManualFoldTables.def ----------------------------*- C++ -*-==//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8// \file
9// This file defines all the entries in X86 memory folding tables that need
10// special handling.
11//===----------------------------------------------------------------------===//
12
13#ifndef NOFOLD
14#define NOFOLD(INSN)
15#endif
16NOFOLD(BTC16rr)
17NOFOLD(BTC32rr)
18NOFOLD(BTC64rr)
19NOFOLD(BTR16rr)
20NOFOLD(BTR32rr)
21NOFOLD(BTR64rr)
22NOFOLD(BTS16rr)
23NOFOLD(BTS32rr)
24NOFOLD(BTS64rr)
25NOFOLD(VCOMPRESSPDZ128rrk)
26NOFOLD(VCOMPRESSPDZ256rrk)
27NOFOLD(VCOMPRESSPDZrrk)
28NOFOLD(VCOMPRESSPSZ128rrk)
29NOFOLD(VCOMPRESSPSZ256rrk)
30NOFOLD(VCOMPRESSPSZrrk)
31NOFOLD(VCVTPS2PHZ128rrk)
32NOFOLD(VCVTPS2PHZ256rrk)
33NOFOLD(VCVTPS2PHZrrk)
34NOFOLD(VEXTRACTF32X4Z256rrik)
35NOFOLD(VEXTRACTF32X4Zrrik)
36NOFOLD(VEXTRACTF32X8Zrrik)
37NOFOLD(VEXTRACTF64X2Z256rrik)
38NOFOLD(VEXTRACTF64X2Zrrik)
39NOFOLD(VEXTRACTF64X4Zrrik)
40NOFOLD(VEXTRACTI32X4Z256rrik)
41NOFOLD(VEXTRACTI32X4Zrrik)
42NOFOLD(VEXTRACTI32X8Zrrik)
43NOFOLD(VEXTRACTI64X2Z256rrik)
44NOFOLD(VEXTRACTI64X2Zrrik)
45NOFOLD(VEXTRACTI64X4Zrrik)
46NOFOLD(VMOVAPDZ128mrk)
47NOFOLD(VMOVAPDZ256mrk)
48NOFOLD(VMOVAPDZmrk)
49NOFOLD(VMOVAPSZ128mrk)
50NOFOLD(VMOVAPSZ256mrk)
51NOFOLD(VMOVAPSZmrk)
52NOFOLD(VMOVDQA32Z128mrk)
53NOFOLD(VMOVDQA32Z256mrk)
54NOFOLD(VMOVDQA32Zmrk)
55NOFOLD(VMOVDQA64Z128mrk)
56NOFOLD(VMOVDQA64Z256mrk)
57NOFOLD(VMOVDQA64Zmrk)
58NOFOLD(VMOVDQU16Z128mrk)
59NOFOLD(VMOVDQU16Z256mrk)
60NOFOLD(VMOVDQU16Zmrk)
61NOFOLD(VMOVDQU32Z128mrk)
62NOFOLD(VMOVDQU32Z256mrk)
63NOFOLD(VMOVDQU32Zmrk)
64NOFOLD(VMOVDQU64Z128mrk)
65NOFOLD(VMOVDQU64Z256mrk)
66NOFOLD(VMOVDQU64Zmrk)
67NOFOLD(VMOVDQU8Z128mrk)
68NOFOLD(VMOVDQU8Z256mrk)
69NOFOLD(VMOVDQU8Zmrk)
70NOFOLD(VMOVUPDZ128mrk)
71NOFOLD(VMOVUPDZ256mrk)
72NOFOLD(VMOVUPDZmrk)
73NOFOLD(VMOVUPSZ128mrk)
74NOFOLD(VMOVUPSZ256mrk)
75NOFOLD(VMOVUPSZmrk)
76NOFOLD(VPCOMPRESSBZ128rrk)
77NOFOLD(VPCOMPRESSBZ256rrk)
78NOFOLD(VPCOMPRESSBZrrk)
79NOFOLD(VPCOMPRESSDZ128rrk)
80NOFOLD(VPCOMPRESSDZ256rrk)
81NOFOLD(VPCOMPRESSDZrrk)
82NOFOLD(VPCOMPRESSQZ128rrk)
83NOFOLD(VPCOMPRESSQZ256rrk)
84NOFOLD(VPCOMPRESSQZrrk)
85NOFOLD(VPCOMPRESSWZ128rrk)
86NOFOLD(VPCOMPRESSWZ256rrk)
87NOFOLD(VPCOMPRESSWZrrk)
88NOFOLD(VPMOVDBZ128rrk)
89NOFOLD(VPMOVDBZ256rrk)
90NOFOLD(VPMOVDBZrrk)
91NOFOLD(VPMOVDWZ128rrk)
92NOFOLD(VPMOVDWZ256rrk)
93NOFOLD(VPMOVDWZrrk)
94NOFOLD(VPMOVQBZ128rrk)
95NOFOLD(VPMOVQBZ256rrk)
96NOFOLD(VPMOVQBZrrk)
97NOFOLD(VPMOVQDZ128rrk)
98NOFOLD(VPMOVQDZ256rrk)
99NOFOLD(VPMOVQDZrrk)
100NOFOLD(VPMOVQWZ128rrk)
101NOFOLD(VPMOVQWZ256rrk)
102NOFOLD(VPMOVQWZrrk)
103NOFOLD(VPMOVSDBZ128rrk)
104NOFOLD(VPMOVSDBZ256rrk)
105NOFOLD(VPMOVSDBZrrk)
106NOFOLD(VPMOVSDWZ128rrk)
107NOFOLD(VPMOVSDWZ256rrk)
108NOFOLD(VPMOVSDWZrrk)
109NOFOLD(VPMOVSQBZ128rrk)
110NOFOLD(VPMOVSQBZ256rrk)
111NOFOLD(VPMOVSQBZrrk)
112NOFOLD(VPMOVSQDZ128rrk)
113NOFOLD(VPMOVSQDZ256rrk)
114NOFOLD(VPMOVSQDZrrk)
115NOFOLD(VPMOVSQWZ128rrk)
116NOFOLD(VPMOVSQWZ256rrk)
117NOFOLD(VPMOVSQWZrrk)
118NOFOLD(VPMOVSWBZ128rrk)
119NOFOLD(VPMOVSWBZ256rrk)
120NOFOLD(VPMOVSWBZrrk)
121NOFOLD(VPMOVUSDBZ128rrk)
122NOFOLD(VPMOVUSDBZ256rrk)
123NOFOLD(VPMOVUSDBZrrk)
124NOFOLD(VPMOVUSDWZ128rrk)
125NOFOLD(VPMOVUSDWZ256rrk)
126NOFOLD(VPMOVUSDWZrrk)
127NOFOLD(VPMOVUSQBZ128rrk)
128NOFOLD(VPMOVUSQBZ256rrk)
129NOFOLD(VPMOVUSQBZrrk)
130NOFOLD(VPMOVUSQDZ128rrk)
131NOFOLD(VPMOVUSQDZ256rrk)
132NOFOLD(VPMOVUSQDZrrk)
133NOFOLD(VPMOVUSQWZ128rrk)
134NOFOLD(VPMOVUSQWZ256rrk)
135NOFOLD(VPMOVUSQWZrrk)
136NOFOLD(VPMOVUSWBZ128rrk)
137NOFOLD(VPMOVUSWBZ256rrk)
138NOFOLD(VPMOVUSWBZrrk)
139NOFOLD(VPMOVWBZ128rrk)
140NOFOLD(VPMOVWBZ256rrk)
141NOFOLD(VPMOVWBZrrk)
142NOFOLD(ARPL16rr)
143NOFOLD(BT16rr)
144NOFOLD(BT32rr)
145NOFOLD(BT64rr)
146NOFOLD(CMPXCHG16rr)
147NOFOLD(CMPXCHG32rr)
148NOFOLD(CMPXCHG64rr)
149NOFOLD(CMPXCHG8rr)
150NOFOLD(LLDT16r)
151NOFOLD(LMSW16r)
152NOFOLD(LTRr)
153NOFOLD(NOOPLr)
154NOFOLD(NOOPQr)
155NOFOLD(NOOPWr)
156NOFOLD(POP16rmr)
157NOFOLD(POP32rmr)
158NOFOLD(POP64rmr)
159NOFOLD(PUSH16rmr)
160NOFOLD(PUSH32rmr)
161NOFOLD(PUSH64rmr)
162NOFOLD(VCOMPRESSPDZ128rr)
163NOFOLD(VCOMPRESSPDZ256rr)
164NOFOLD(VCOMPRESSPDZrr)
165NOFOLD(VCOMPRESSPSZ128rr)
166NOFOLD(VCOMPRESSPSZ256rr)
167NOFOLD(VCOMPRESSPSZrr)
168NOFOLD(VERRr)
169NOFOLD(VERWr)
170NOFOLD(VMREAD32rr)
171NOFOLD(VMREAD64rr)
172NOFOLD(VPCOMPRESSBZ128rr)
173NOFOLD(VPCOMPRESSBZ256rr)
174NOFOLD(VPCOMPRESSBZrr)
175NOFOLD(VPCOMPRESSDZ128rr)
176NOFOLD(VPCOMPRESSDZ256rr)
177NOFOLD(VPCOMPRESSDZrr)
178NOFOLD(VPCOMPRESSQZ128rr)
179NOFOLD(VPCOMPRESSQZ256rr)
180NOFOLD(VPCOMPRESSQZrr)
181NOFOLD(VPCOMPRESSWZ128rr)
182NOFOLD(VPCOMPRESSWZ256rr)
183NOFOLD(VPCOMPRESSWZrr)
184NOFOLD(LAR16rr)
185NOFOLD(LAR32rr)
186NOFOLD(LAR64rr)
187NOFOLD(LSL16rr)
188NOFOLD(LSL32rr)
189NOFOLD(LSL64rr)
190NOFOLD(MOVSX16rr16)
191NOFOLD(MOVZX16rr16)
192NOFOLD(VMWRITE32rr)
193NOFOLD(VMWRITE64rr)
194NOFOLD(VBLENDMPDZ128rrkz)
195NOFOLD(VBLENDMPDZ256rrkz)
196NOFOLD(VBLENDMPDZrrkz)
197NOFOLD(VBLENDMPSZ128rrkz)
198NOFOLD(VBLENDMPSZ256rrkz)
199NOFOLD(VBLENDMPSZrrkz)
200NOFOLD(VPBLENDMBZ128rrkz)
201NOFOLD(VPBLENDMBZ256rrkz)
202NOFOLD(VPBLENDMBZrrkz)
203NOFOLD(VPBLENDMDZ128rrkz)
204NOFOLD(VPBLENDMDZ256rrkz)
205NOFOLD(VPBLENDMDZrrkz)
206NOFOLD(VPBLENDMQZ128rrkz)
207NOFOLD(VPBLENDMQZ256rrkz)
208NOFOLD(VPBLENDMQZrrkz)
209NOFOLD(VPBLENDMWZ128rrkz)
210NOFOLD(VPBLENDMWZ256rrkz)
211NOFOLD(VPBLENDMWZrrkz)
212NOFOLD(UD1Lr)
213NOFOLD(UD1Qr)
214NOFOLD(UD1Wr)
215// Exclude these two b/c they would conflict with {MMX_MOVD64from64rr, MMX_MOVQ64mr} in unfolding table
216NOFOLD(MMX_MOVQ64rr)
217NOFOLD(MMX_MOVQ64rr_REV)
218// INSERTPSrmi has no count_s while INSERTPSrri has count_s.
219// count_s is to indicate which element in dst vector is inserted.
220// if count_s!=0, we can not fold INSERTPSrri into INSERTPSrmi
221//
222// the following folding can happen when count_s==0
223// load xmm0, m32
224// INSERTPSrri xmm1, xmm0, imm
225// =>
226// INSERTPSrmi xmm1, m32, imm
227NOFOLD(INSERTPSrri)
228NOFOLD(VINSERTPSZrri)
229NOFOLD(VINSERTPSrri)
230// Memory faults are suppressed for CFCMOV with memory operand.
231NOFOLD(CFCMOV16rr_REV)
232NOFOLD(CFCMOV32rr_REV)
233NOFOLD(CFCMOV64rr_REV)
234NOFOLD(CFCMOV16rr_ND)
235NOFOLD(CFCMOV32rr_ND)
236NOFOLD(CFCMOV64rr_ND)
237#undef NOFOLD
238
239#ifndef ENTRY
240#define ENTRY(REG, MEM, FLAGS)
241#endif
242// The following entries are added manually b/c the encodings of reg form does not match the
243// encoding of memory form
244ENTRY(ADD16ri_DB, ADD16mi, TB_NO_REVERSE)
245ENTRY(ADD16rr_DB, ADD16mr, TB_NO_REVERSE)
246ENTRY(ADD32ri_DB, ADD32mi, TB_NO_REVERSE)
247ENTRY(ADD32rr_DB, ADD32mr, TB_NO_REVERSE)
248ENTRY(ADD64ri32_DB, ADD64mi32, TB_NO_REVERSE)
249ENTRY(ADD64rr_DB, ADD64mr, TB_NO_REVERSE)
250ENTRY(ADD8ri_DB, ADD8mi, TB_NO_REVERSE)
251ENTRY(ADD8rr_DB, ADD8mr, TB_NO_REVERSE)
252ENTRY(ADD16rr_DB, ADD16rm, TB_NO_REVERSE)
253ENTRY(ADD32rr_DB, ADD32rm, TB_NO_REVERSE)
254ENTRY(ADD64rr_DB, ADD64rm, TB_NO_REVERSE)
255ENTRY(ADD8rr_DB, ADD8rm, TB_NO_REVERSE)
256ENTRY(MMX_MOVD64from64rr, MMX_MOVQ64mr, TB_FOLDED_STORE)
257ENTRY(MMX_MOVD64grr, MMX_MOVD64mr, TB_FOLDED_STORE)
258ENTRY(MOV64toSDrr, MOV64mr, TB_FOLDED_STORE | TB_NO_REVERSE)
259ENTRY(MOVDI2SSrr, MOV32mr, TB_FOLDED_STORE | TB_NO_REVERSE)
260ENTRY(MOVPQIto64rr, MOVPQI2QImr, TB_FOLDED_STORE | TB_NO_REVERSE)
261ENTRY(MOVSDto64rr, MOVSDmr, TB_FOLDED_STORE | TB_NO_REVERSE)
262ENTRY(MOVSS2DIrr, MOVSSmr, TB_FOLDED_STORE)
263ENTRY(MOVLHPSrr, MOVHPSrm, TB_NO_REVERSE)
264ENTRY(PUSH16r, PUSH16rmm, TB_FOLDED_LOAD)
265ENTRY(PUSH32r, PUSH32rmm, TB_FOLDED_LOAD)
266ENTRY(PUSH64r, PUSH64rmm, TB_FOLDED_LOAD)
267ENTRY(TAILJMPr, TAILJMPm, TB_FOLDED_LOAD)
268ENTRY(TAILJMPr64, TAILJMPm64, TB_FOLDED_LOAD)
269ENTRY(TAILJMPr64_REX, TAILJMPm64_REX, TB_FOLDED_LOAD)
270ENTRY(TCRETURNri, TCRETURNmi, TB_FOLDED_LOAD | TB_NO_FORWARD)
271ENTRY(TCRETURNri64, TCRETURNmi64, TB_FOLDED_LOAD | TB_NO_FORWARD)
272ENTRY(VMOVLHPSZrr, VMOVHPSZ128rm, TB_NO_REVERSE)
273ENTRY(VMOVLHPSrr, VMOVHPSrm, TB_NO_REVERSE)
274ENTRY(VMOV64toSDZrr, MOV64mr, TB_FOLDED_STORE | TB_NO_REVERSE)
275ENTRY(VMOV64toSDrr, MOV64mr, TB_FOLDED_STORE | TB_NO_REVERSE)
276ENTRY(VMOVDI2SSZrr, MOV32mr, TB_FOLDED_STORE | TB_NO_REVERSE)
277ENTRY(VMOVDI2SSrr, MOV32mr, TB_FOLDED_STORE | TB_NO_REVERSE)
278ENTRY(VMOVPQIto64Zrr, VMOVPQI2QIZmr, TB_FOLDED_STORE | TB_NO_REVERSE)
279ENTRY(VMOVPQIto64rr, VMOVPQI2QImr, TB_FOLDED_STORE | TB_NO_REVERSE)
280ENTRY(VMOVSDto64Zrr, VMOVSDZmr, TB_FOLDED_STORE | TB_NO_REVERSE)
281ENTRY(VMOVSDto64rr, VMOVSDmr, TB_FOLDED_STORE | TB_NO_REVERSE)
282ENTRY(VMOVSS2DIZrr, VMOVSSZmr, TB_FOLDED_STORE)
283ENTRY(VMOVSS2DIrr, VMOVSSmr, TB_FOLDED_STORE)
284ENTRY(MMX_MOVD64to64rr, MMX_MOVQ64rm, 0)
285ENTRY(MOV64toPQIrr, MOVQI2PQIrm, TB_NO_REVERSE)
286ENTRY(MOV64toSDrr, MOVSDrm_alt, TB_NO_REVERSE)
287ENTRY(MOVDI2SSrr, MOVSSrm_alt, 0)
288ENTRY(VMOV64toPQIZrr, VMOVQI2PQIZrm, TB_NO_REVERSE)
289ENTRY(VMOV64toPQIrr, VMOVQI2PQIrm, TB_NO_REVERSE)
290ENTRY(VMOV64toSDZrr, VMOVSDZrm_alt, TB_NO_REVERSE)
291ENTRY(VMOV64toSDrr, VMOVSDrm_alt, TB_NO_REVERSE)
292ENTRY(VMOVDI2SSZrr, VMOVSSZrm_alt, 0)
293ENTRY(VMOVDI2SSrr, VMOVSSrm_alt, 0)
294ENTRY(MOVSDrr, MOVLPDrm, TB_NO_REVERSE)
295ENTRY(VMOVSDZrr, VMOVLPDZ128rm, TB_NO_REVERSE)
296ENTRY(VMOVSDrr, VMOVLPDrm, TB_NO_REVERSE)
297#undef ENTRY
298// Prefixes for instructions that are unsafe for masked-load folding.
299// Folding with the same mask is only safe if every active destination
300// element reads only from source elements that are also active under the same mask.
301// These instructions perform element rearrangement/broadcasting that may cause
302// active destination elements to read from masked-off source elements.
303// Matches the following patterns: OPCODE[,Z,Z128,Z256][,rr,ri,rri][k,kz].
304#ifndef NOFOLD_SAME_MASK_PREFIX
305#define NOFOLD_SAME_MASK_PREFIX(PREFIX)
306#endif
307NOFOLD_SAME_MASK_PREFIX(VALIGND)
308NOFOLD_SAME_MASK_PREFIX(VALIGNQ)
309NOFOLD_SAME_MASK_PREFIX(VBROADCASTF32X2)
310NOFOLD_SAME_MASK_PREFIX(VBROADCASTI32X2)
311NOFOLD_SAME_MASK_PREFIX(VBROADCASTSD)
312NOFOLD_SAME_MASK_PREFIX(VBROADCASTSS)
313NOFOLD_SAME_MASK_PREFIX(VDBPSADBW)
314NOFOLD_SAME_MASK_PREFIX(VEXPANDPD)
315NOFOLD_SAME_MASK_PREFIX(VEXPANDPS)
316NOFOLD_SAME_MASK_PREFIX(VGF2P8AFFINEINVQB)
317NOFOLD_SAME_MASK_PREFIX(VGF2P8AFFINEQB)
318NOFOLD_SAME_MASK_PREFIX(VINSERTF32X4)
319NOFOLD_SAME_MASK_PREFIX(VINSERTF32X8)
320NOFOLD_SAME_MASK_PREFIX(VINSERTF64X2)
321NOFOLD_SAME_MASK_PREFIX(VINSERTF64X4)
322NOFOLD_SAME_MASK_PREFIX(VINSERTI32X4)
323NOFOLD_SAME_MASK_PREFIX(VINSERTI32X8)
324NOFOLD_SAME_MASK_PREFIX(VINSERTI64X2)
325NOFOLD_SAME_MASK_PREFIX(VINSERTI64X4)
326NOFOLD_SAME_MASK_PREFIX(VMOVDDUP)
327NOFOLD_SAME_MASK_PREFIX(VMOVSHDUP)
328NOFOLD_SAME_MASK_PREFIX(VMOVSLDUP)
329NOFOLD_SAME_MASK_PREFIX(VMPSADBW)
330NOFOLD_SAME_MASK_PREFIX(VPACKSSDW)
331NOFOLD_SAME_MASK_PREFIX(VPACKSSWB)
332NOFOLD_SAME_MASK_PREFIX(VPACKUSDW)
333NOFOLD_SAME_MASK_PREFIX(VPACKUSWB)
334NOFOLD_SAME_MASK_PREFIX(VPALIGNR)
335NOFOLD_SAME_MASK_PREFIX(VPBROADCASTB)
336NOFOLD_SAME_MASK_PREFIX(VPBROADCASTD)
337NOFOLD_SAME_MASK_PREFIX(VPBROADCASTQ)
338NOFOLD_SAME_MASK_PREFIX(VPBROADCASTW)
339NOFOLD_SAME_MASK_PREFIX(VPCONFLICTD)
340NOFOLD_SAME_MASK_PREFIX(VPCONFLICTQ)
341NOFOLD_SAME_MASK_PREFIX(VPERMB)
342NOFOLD_SAME_MASK_PREFIX(VPERMD)
343NOFOLD_SAME_MASK_PREFIX(VPERMI2B)
344NOFOLD_SAME_MASK_PREFIX(VPERMI2D)
345NOFOLD_SAME_MASK_PREFIX(VPERMI2PD)
346NOFOLD_SAME_MASK_PREFIX(VPERMI2PS)
347NOFOLD_SAME_MASK_PREFIX(VPERMI2Q)
348NOFOLD_SAME_MASK_PREFIX(VPERMI2W)
349NOFOLD_SAME_MASK_PREFIX(VPERMPD)
350NOFOLD_SAME_MASK_PREFIX(VPERMPS)
351NOFOLD_SAME_MASK_PREFIX(VPERMQ)
352NOFOLD_SAME_MASK_PREFIX(VPERMT2B)
353NOFOLD_SAME_MASK_PREFIX(VPERMT2D)
354NOFOLD_SAME_MASK_PREFIX(VPERMT2PD)
355NOFOLD_SAME_MASK_PREFIX(VPERMT2PS)
356NOFOLD_SAME_MASK_PREFIX(VPERMT2Q)
357NOFOLD_SAME_MASK_PREFIX(VPERMT2W)
358NOFOLD_SAME_MASK_PREFIX(VPERMW)
359NOFOLD_SAME_MASK_PREFIX(VPEXPANDB)
360NOFOLD_SAME_MASK_PREFIX(VPEXPANDD)
361NOFOLD_SAME_MASK_PREFIX(VPEXPANDQ)
362NOFOLD_SAME_MASK_PREFIX(VPEXPANDW)
363NOFOLD_SAME_MASK_PREFIX(VPMULTISHIFTQB)
364NOFOLD_SAME_MASK_PREFIX(VPSHUFD)
365NOFOLD_SAME_MASK_PREFIX(VPSHUFHW)
366NOFOLD_SAME_MASK_PREFIX(VPSHUFLW)
367NOFOLD_SAME_MASK_PREFIX(VPUNPCKHBW)
368NOFOLD_SAME_MASK_PREFIX(VPUNPCKHDQ)
369NOFOLD_SAME_MASK_PREFIX(VPUNPCKHQDQ)
370NOFOLD_SAME_MASK_PREFIX(VPUNPCKHWD)
371NOFOLD_SAME_MASK_PREFIX(VPUNPCKLBW)
372NOFOLD_SAME_MASK_PREFIX(VPUNPCKLDQ)
373NOFOLD_SAME_MASK_PREFIX(VPUNPCKLQDQ)
374NOFOLD_SAME_MASK_PREFIX(VPUNPCKLWD)
375NOFOLD_SAME_MASK_PREFIX(VSHUFF32X4)
376NOFOLD_SAME_MASK_PREFIX(VSHUFF64X2)
377NOFOLD_SAME_MASK_PREFIX(VSHUFI32X4)
378NOFOLD_SAME_MASK_PREFIX(VSHUFI64X2)
379NOFOLD_SAME_MASK_PREFIX(VSHUFPD)
380NOFOLD_SAME_MASK_PREFIX(VSHUFPS)
381NOFOLD_SAME_MASK_PREFIX(VUNPCKHPD)
382NOFOLD_SAME_MASK_PREFIX(VUNPCKHPS)
383NOFOLD_SAME_MASK_PREFIX(VUNPCKLPD)
384NOFOLD_SAME_MASK_PREFIX(VUNPCKLPS)
385#undef NOFOLD_SAME_MASK_PREFIX
386#ifndef NOFOLD_SAME_MASK
387#define NOFOLD_SAME_MASK(INSN)
388#endif
389// VPERMILPD/VPERMILPS rik forms: Only rik forms are listed here; rrk forms are NOT blocked
390NOFOLD_SAME_MASK(VPERMILPDZ128rik)
391NOFOLD_SAME_MASK(VPERMILPDZ128rikz)
392NOFOLD_SAME_MASK(VPERMILPDZ256rik)
393NOFOLD_SAME_MASK(VPERMILPDZ256rikz)
394NOFOLD_SAME_MASK(VPERMILPDZrik)
395NOFOLD_SAME_MASK(VPERMILPDZrikz)
396NOFOLD_SAME_MASK(VPERMILPSZ128rik)
397NOFOLD_SAME_MASK(VPERMILPSZ128rikz)
398NOFOLD_SAME_MASK(VPERMILPSZ256rik)
399NOFOLD_SAME_MASK(VPERMILPSZ256rikz)
400NOFOLD_SAME_MASK(VPERMILPSZrik)
401NOFOLD_SAME_MASK(VPERMILPSZrikz)
402#undef NOFOLD_SAME_MASK
403