source: clamav/trunk/libclamav/c++/X86GenFastISel.inc@ 319

Last change on this file since 319 was 319, checked in by Yuri Dario, 14 years ago

clamav: update trunk to 0.97.

File size: 149.0 KB
Line 
1//===- TableGen'erated file -------------------------------------*- C++ -*-===//
2//
3// "Fast" Instruction Selector for the X86 target
4//
5// Automatically generated file, do not edit!
6//
7//===----------------------------------------------------------------------===//
8
9// FastEmit functions for ISD::Constant.
10
11unsigned FastEmit_ISD_Constant_MVT_i8_i(MVT RetVT, uint64_t imm0) {
12 if (RetVT.SimpleTy != MVT::i8)
13 return 0;
14 return FastEmitInst_i(X86::MOV8ri, X86::GR8RegisterClass, imm0);
15}
16
17unsigned FastEmit_ISD_Constant_MVT_i16_i(MVT RetVT, uint64_t imm0) {
18 if (RetVT.SimpleTy != MVT::i16)
19 return 0;
20 return FastEmitInst_i(X86::MOV16ri, X86::GR16RegisterClass, imm0);
21}
22
23unsigned FastEmit_ISD_Constant_MVT_i32_i(MVT RetVT, uint64_t imm0) {
24 if (RetVT.SimpleTy != MVT::i32)
25 return 0;
26 return FastEmitInst_i(X86::MOV32ri, X86::GR32RegisterClass, imm0);
27}
28
29unsigned FastEmit_ISD_Constant_MVT_i64_i(MVT RetVT, uint64_t imm0) {
30 if (RetVT.SimpleTy != MVT::i64)
31 return 0;
32 return FastEmitInst_i(X86::MOV64ri, X86::GR64RegisterClass, imm0);
33}
34
35unsigned FastEmit_ISD_Constant_i(MVT VT, MVT RetVT, uint64_t imm0) {
36 switch (VT.SimpleTy) {
37 case MVT::i8: return FastEmit_ISD_Constant_MVT_i8_i(RetVT, imm0);
38 case MVT::i16: return FastEmit_ISD_Constant_MVT_i16_i(RetVT, imm0);
39 case MVT::i32: return FastEmit_ISD_Constant_MVT_i32_i(RetVT, imm0);
40 case MVT::i64: return FastEmit_ISD_Constant_MVT_i64_i(RetVT, imm0);
41 default: return 0;
42 }
43}
44
45// Top-level FastEmit function.
46
47unsigned FastEmit_i(MVT VT, MVT RetVT, unsigned Opcode, uint64_t imm0) {
48 switch (Opcode) {
49 case ISD::Constant: return FastEmit_ISD_Constant_i(VT, RetVT, imm0);
50 default: return 0;
51 }
52}
53
54// FastEmit functions for ISD::ANY_EXTEND.
55
56unsigned FastEmit_ISD_ANY_EXTEND_MVT_i8_MVT_i16_r(unsigned Op0, bool Op0IsKill) {
57 return FastEmitInst_r(X86::MOVZX16rr8, X86::GR16RegisterClass, Op0, Op0IsKill);
58}
59
60unsigned FastEmit_ISD_ANY_EXTEND_MVT_i8_MVT_i32_r(unsigned Op0, bool Op0IsKill) {
61 return FastEmitInst_r(X86::MOVZX32rr8, X86::GR32RegisterClass, Op0, Op0IsKill);
62}
63
64unsigned FastEmit_ISD_ANY_EXTEND_MVT_i8_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
65 return FastEmitInst_r(X86::MOVZX64rr8, X86::GR64RegisterClass, Op0, Op0IsKill);
66}
67
68unsigned FastEmit_ISD_ANY_EXTEND_MVT_i8_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
69switch (RetVT.SimpleTy) {
70 case MVT::i16: return FastEmit_ISD_ANY_EXTEND_MVT_i8_MVT_i16_r(Op0, Op0IsKill);
71 case MVT::i32: return FastEmit_ISD_ANY_EXTEND_MVT_i8_MVT_i32_r(Op0, Op0IsKill);
72 case MVT::i64: return FastEmit_ISD_ANY_EXTEND_MVT_i8_MVT_i64_r(Op0, Op0IsKill);
73 default: return 0;
74}
75}
76
77unsigned FastEmit_ISD_ANY_EXTEND_MVT_i16_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
78 if (RetVT.SimpleTy != MVT::i64)
79 return 0;
80 return FastEmitInst_r(X86::MOVZX64rr16, X86::GR64RegisterClass, Op0, Op0IsKill);
81}
82
83unsigned FastEmit_ISD_ANY_EXTEND_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
84 switch (VT.SimpleTy) {
85 case MVT::i8: return FastEmit_ISD_ANY_EXTEND_MVT_i8_r(RetVT, Op0, Op0IsKill);
86 case MVT::i16: return FastEmit_ISD_ANY_EXTEND_MVT_i16_r(RetVT, Op0, Op0IsKill);
87 default: return 0;
88 }
89}
90
91// FastEmit functions for ISD::BIT_CONVERT.
92
93unsigned FastEmit_ISD_BIT_CONVERT_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
94 if (RetVT.SimpleTy != MVT::f32)
95 return 0;
96 if ((Subtarget->hasAVX())) {
97 return FastEmitInst_r(X86::VMOVDI2SSrr, X86::FR32RegisterClass, Op0, Op0IsKill);
98 }
99 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
100 return FastEmitInst_r(X86::MOVDI2SSrr, X86::FR32RegisterClass, Op0, Op0IsKill);
101 }
102 return 0;
103}
104
105unsigned FastEmit_ISD_BIT_CONVERT_MVT_i64_MVT_f64_r(unsigned Op0, bool Op0IsKill) {
106 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
107 return FastEmitInst_r(X86::MOV64toSDrr, X86::FR64RegisterClass, Op0, Op0IsKill);
108 }
109 return 0;
110}
111
112unsigned FastEmit_ISD_BIT_CONVERT_MVT_i64_MVT_v8i8_r(unsigned Op0, bool Op0IsKill) {
113 return FastEmitInst_r(X86::MMX_MOVD64to64rr, X86::VR64RegisterClass, Op0, Op0IsKill);
114}
115
116unsigned FastEmit_ISD_BIT_CONVERT_MVT_i64_MVT_v4i16_r(unsigned Op0, bool Op0IsKill) {
117 return FastEmitInst_r(X86::MMX_MOVD64to64rr, X86::VR64RegisterClass, Op0, Op0IsKill);
118}
119
120unsigned FastEmit_ISD_BIT_CONVERT_MVT_i64_MVT_v2i32_r(unsigned Op0, bool Op0IsKill) {
121 return FastEmitInst_r(X86::MMX_MOVD64to64rr, X86::VR64RegisterClass, Op0, Op0IsKill);
122}
123
124unsigned FastEmit_ISD_BIT_CONVERT_MVT_i64_MVT_v1i64_r(unsigned Op0, bool Op0IsKill) {
125 return FastEmitInst_r(X86::MMX_MOVD64to64rr, X86::VR64RegisterClass, Op0, Op0IsKill);
126}
127
128unsigned FastEmit_ISD_BIT_CONVERT_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
129switch (RetVT.SimpleTy) {
130 case MVT::f64: return FastEmit_ISD_BIT_CONVERT_MVT_i64_MVT_f64_r(Op0, Op0IsKill);
131 case MVT::v8i8: return FastEmit_ISD_BIT_CONVERT_MVT_i64_MVT_v8i8_r(Op0, Op0IsKill);
132 case MVT::v4i16: return FastEmit_ISD_BIT_CONVERT_MVT_i64_MVT_v4i16_r(Op0, Op0IsKill);
133 case MVT::v2i32: return FastEmit_ISD_BIT_CONVERT_MVT_i64_MVT_v2i32_r(Op0, Op0IsKill);
134 case MVT::v1i64: return FastEmit_ISD_BIT_CONVERT_MVT_i64_MVT_v1i64_r(Op0, Op0IsKill);
135 default: return 0;
136}
137}
138
139unsigned FastEmit_ISD_BIT_CONVERT_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
140 if (RetVT.SimpleTy != MVT::i32)
141 return 0;
142 if ((Subtarget->hasAVX())) {
143 return FastEmitInst_r(X86::VMOVSS2DIrr, X86::GR32RegisterClass, Op0, Op0IsKill);
144 }
145 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
146 return FastEmitInst_r(X86::MOVSS2DIrr, X86::GR32RegisterClass, Op0, Op0IsKill);
147 }
148 return 0;
149}
150
151unsigned FastEmit_ISD_BIT_CONVERT_MVT_f64_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
152 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
153 return FastEmitInst_r(X86::MOVSDto64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
154 }
155 return 0;
156}
157
158unsigned FastEmit_ISD_BIT_CONVERT_MVT_f64_MVT_v8i8_r(unsigned Op0, bool Op0IsKill) {
159 return FastEmitInst_r(X86::MMX_MOVFR642Qrr, X86::VR64RegisterClass, Op0, Op0IsKill);
160}
161
162unsigned FastEmit_ISD_BIT_CONVERT_MVT_f64_MVT_v4i16_r(unsigned Op0, bool Op0IsKill) {
163 return FastEmitInst_r(X86::MMX_MOVFR642Qrr, X86::VR64RegisterClass, Op0, Op0IsKill);
164}
165
166unsigned FastEmit_ISD_BIT_CONVERT_MVT_f64_MVT_v2i32_r(unsigned Op0, bool Op0IsKill) {
167 return FastEmitInst_r(X86::MMX_MOVFR642Qrr, X86::VR64RegisterClass, Op0, Op0IsKill);
168}
169
170unsigned FastEmit_ISD_BIT_CONVERT_MVT_f64_MVT_v1i64_r(unsigned Op0, bool Op0IsKill) {
171 return FastEmitInst_r(X86::MMX_MOVFR642Qrr, X86::VR64RegisterClass, Op0, Op0IsKill);
172}
173
174unsigned FastEmit_ISD_BIT_CONVERT_MVT_f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
175switch (RetVT.SimpleTy) {
176 case MVT::i64: return FastEmit_ISD_BIT_CONVERT_MVT_f64_MVT_i64_r(Op0, Op0IsKill);
177 case MVT::v8i8: return FastEmit_ISD_BIT_CONVERT_MVT_f64_MVT_v8i8_r(Op0, Op0IsKill);
178 case MVT::v4i16: return FastEmit_ISD_BIT_CONVERT_MVT_f64_MVT_v4i16_r(Op0, Op0IsKill);
179 case MVT::v2i32: return FastEmit_ISD_BIT_CONVERT_MVT_f64_MVT_v2i32_r(Op0, Op0IsKill);
180 case MVT::v1i64: return FastEmit_ISD_BIT_CONVERT_MVT_f64_MVT_v1i64_r(Op0, Op0IsKill);
181 default: return 0;
182}
183}
184
185unsigned FastEmit_ISD_BIT_CONVERT_MVT_v8i8_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
186 return FastEmitInst_r(X86::MMX_MOVD64from64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
187}
188
189unsigned FastEmit_ISD_BIT_CONVERT_MVT_v8i8_MVT_f64_r(unsigned Op0, bool Op0IsKill) {
190 return FastEmitInst_r(X86::MMX_MOVQ2FR64rr, X86::FR64RegisterClass, Op0, Op0IsKill);
191}
192
193unsigned FastEmit_ISD_BIT_CONVERT_MVT_v8i8_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
194switch (RetVT.SimpleTy) {
195 case MVT::i64: return FastEmit_ISD_BIT_CONVERT_MVT_v8i8_MVT_i64_r(Op0, Op0IsKill);
196 case MVT::f64: return FastEmit_ISD_BIT_CONVERT_MVT_v8i8_MVT_f64_r(Op0, Op0IsKill);
197 default: return 0;
198}
199}
200
201unsigned FastEmit_ISD_BIT_CONVERT_MVT_v4i16_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
202 return FastEmitInst_r(X86::MMX_MOVD64from64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
203}
204
205unsigned FastEmit_ISD_BIT_CONVERT_MVT_v4i16_MVT_f64_r(unsigned Op0, bool Op0IsKill) {
206 return FastEmitInst_r(X86::MMX_MOVQ2FR64rr, X86::FR64RegisterClass, Op0, Op0IsKill);
207}
208
209unsigned FastEmit_ISD_BIT_CONVERT_MVT_v4i16_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
210switch (RetVT.SimpleTy) {
211 case MVT::i64: return FastEmit_ISD_BIT_CONVERT_MVT_v4i16_MVT_i64_r(Op0, Op0IsKill);
212 case MVT::f64: return FastEmit_ISD_BIT_CONVERT_MVT_v4i16_MVT_f64_r(Op0, Op0IsKill);
213 default: return 0;
214}
215}
216
217unsigned FastEmit_ISD_BIT_CONVERT_MVT_v2i32_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
218 return FastEmitInst_r(X86::MMX_MOVD64from64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
219}
220
221unsigned FastEmit_ISD_BIT_CONVERT_MVT_v2i32_MVT_f64_r(unsigned Op0, bool Op0IsKill) {
222 return FastEmitInst_r(X86::MMX_MOVQ2FR64rr, X86::FR64RegisterClass, Op0, Op0IsKill);
223}
224
225unsigned FastEmit_ISD_BIT_CONVERT_MVT_v2i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
226switch (RetVT.SimpleTy) {
227 case MVT::i64: return FastEmit_ISD_BIT_CONVERT_MVT_v2i32_MVT_i64_r(Op0, Op0IsKill);
228 case MVT::f64: return FastEmit_ISD_BIT_CONVERT_MVT_v2i32_MVT_f64_r(Op0, Op0IsKill);
229 default: return 0;
230}
231}
232
233unsigned FastEmit_ISD_BIT_CONVERT_MVT_v1i64_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
234 return FastEmitInst_r(X86::MMX_MOVD64from64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
235}
236
237unsigned FastEmit_ISD_BIT_CONVERT_MVT_v1i64_MVT_f64_r(unsigned Op0, bool Op0IsKill) {
238 return FastEmitInst_r(X86::MMX_MOVQ2FR64rr, X86::FR64RegisterClass, Op0, Op0IsKill);
239}
240
241unsigned FastEmit_ISD_BIT_CONVERT_MVT_v1i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
242switch (RetVT.SimpleTy) {
243 case MVT::i64: return FastEmit_ISD_BIT_CONVERT_MVT_v1i64_MVT_i64_r(Op0, Op0IsKill);
244 case MVT::f64: return FastEmit_ISD_BIT_CONVERT_MVT_v1i64_MVT_f64_r(Op0, Op0IsKill);
245 default: return 0;
246}
247}
248
249unsigned FastEmit_ISD_BIT_CONVERT_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
250 switch (VT.SimpleTy) {
251 case MVT::i32: return FastEmit_ISD_BIT_CONVERT_MVT_i32_r(RetVT, Op0, Op0IsKill);
252 case MVT::i64: return FastEmit_ISD_BIT_CONVERT_MVT_i64_r(RetVT, Op0, Op0IsKill);
253 case MVT::f32: return FastEmit_ISD_BIT_CONVERT_MVT_f32_r(RetVT, Op0, Op0IsKill);
254 case MVT::f64: return FastEmit_ISD_BIT_CONVERT_MVT_f64_r(RetVT, Op0, Op0IsKill);
255 case MVT::v8i8: return FastEmit_ISD_BIT_CONVERT_MVT_v8i8_r(RetVT, Op0, Op0IsKill);
256 case MVT::v4i16: return FastEmit_ISD_BIT_CONVERT_MVT_v4i16_r(RetVT, Op0, Op0IsKill);
257 case MVT::v2i32: return FastEmit_ISD_BIT_CONVERT_MVT_v2i32_r(RetVT, Op0, Op0IsKill);
258 case MVT::v1i64: return FastEmit_ISD_BIT_CONVERT_MVT_v1i64_r(RetVT, Op0, Op0IsKill);
259 default: return 0;
260 }
261}
262
263// FastEmit functions for ISD::BRIND.
264
265unsigned FastEmit_ISD_BRIND_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
266 if (RetVT.SimpleTy != MVT::isVoid)
267 return 0;
268 if ((!Subtarget->is64Bit())) {
269 return FastEmitInst_r(X86::JMP32r, X86::GR32RegisterClass, Op0, Op0IsKill);
270 }
271 return 0;
272}
273
274unsigned FastEmit_ISD_BRIND_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
275 if (RetVT.SimpleTy != MVT::isVoid)
276 return 0;
277 if ((Subtarget->is64Bit())) {
278 return FastEmitInst_r(X86::JMP64r, X86::GR64RegisterClass, Op0, Op0IsKill);
279 }
280 return 0;
281}
282
283unsigned FastEmit_ISD_BRIND_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
284 switch (VT.SimpleTy) {
285 case MVT::i32: return FastEmit_ISD_BRIND_MVT_i32_r(RetVT, Op0, Op0IsKill);
286 case MVT::i64: return FastEmit_ISD_BRIND_MVT_i64_r(RetVT, Op0, Op0IsKill);
287 default: return 0;
288 }
289}
290
291// FastEmit functions for ISD::BSWAP.
292
293unsigned FastEmit_ISD_BSWAP_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
294 if (RetVT.SimpleTy != MVT::i32)
295 return 0;
296 return FastEmitInst_r(X86::BSWAP32r, X86::GR32RegisterClass, Op0, Op0IsKill);
297}
298
299unsigned FastEmit_ISD_BSWAP_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
300 if (RetVT.SimpleTy != MVT::i64)
301 return 0;
302 return FastEmitInst_r(X86::BSWAP64r, X86::GR64RegisterClass, Op0, Op0IsKill);
303}
304
305unsigned FastEmit_ISD_BSWAP_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
306 switch (VT.SimpleTy) {
307 case MVT::i32: return FastEmit_ISD_BSWAP_MVT_i32_r(RetVT, Op0, Op0IsKill);
308 case MVT::i64: return FastEmit_ISD_BSWAP_MVT_i64_r(RetVT, Op0, Op0IsKill);
309 default: return 0;
310 }
311}
312
313// FastEmit functions for ISD::FABS.
314
315unsigned FastEmit_ISD_FABS_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
316 if (RetVT.SimpleTy != MVT::f32)
317 return 0;
318 if ((!Subtarget->hasSSE1())) {
319 return FastEmitInst_r(X86::ABS_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill);
320 }
321 return 0;
322}
323
324unsigned FastEmit_ISD_FABS_MVT_f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
325 if (RetVT.SimpleTy != MVT::f64)
326 return 0;
327 if ((!Subtarget->hasSSE2())) {
328 return FastEmitInst_r(X86::ABS_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill);
329 }
330 return 0;
331}
332
333unsigned FastEmit_ISD_FABS_MVT_f80_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
334 if (RetVT.SimpleTy != MVT::f80)
335 return 0;
336 return FastEmitInst_r(X86::ABS_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill);
337}
338
339unsigned FastEmit_ISD_FABS_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
340 switch (VT.SimpleTy) {
341 case MVT::f32: return FastEmit_ISD_FABS_MVT_f32_r(RetVT, Op0, Op0IsKill);
342 case MVT::f64: return FastEmit_ISD_FABS_MVT_f64_r(RetVT, Op0, Op0IsKill);
343 case MVT::f80: return FastEmit_ISD_FABS_MVT_f80_r(RetVT, Op0, Op0IsKill);
344 default: return 0;
345 }
346}
347
348// FastEmit functions for ISD::FCOS.
349
350unsigned FastEmit_ISD_FCOS_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
351 if (RetVT.SimpleTy != MVT::f32)
352 return 0;
353 if ((!Subtarget->hasSSE1())) {
354 return FastEmitInst_r(X86::COS_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill);
355 }
356 return 0;
357}
358
359unsigned FastEmit_ISD_FCOS_MVT_f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
360 if (RetVT.SimpleTy != MVT::f64)
361 return 0;
362 if ((!Subtarget->hasSSE2())) {
363 return FastEmitInst_r(X86::COS_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill);
364 }
365 return 0;
366}
367
368unsigned FastEmit_ISD_FCOS_MVT_f80_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
369 if (RetVT.SimpleTy != MVT::f80)
370 return 0;
371 return FastEmitInst_r(X86::COS_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill);
372}
373
374unsigned FastEmit_ISD_FCOS_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
375 switch (VT.SimpleTy) {
376 case MVT::f32: return FastEmit_ISD_FCOS_MVT_f32_r(RetVT, Op0, Op0IsKill);
377 case MVT::f64: return FastEmit_ISD_FCOS_MVT_f64_r(RetVT, Op0, Op0IsKill);
378 case MVT::f80: return FastEmit_ISD_FCOS_MVT_f80_r(RetVT, Op0, Op0IsKill);
379 default: return 0;
380 }
381}
382
383// FastEmit functions for ISD::FNEG.
384
385unsigned FastEmit_ISD_FNEG_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
386 if (RetVT.SimpleTy != MVT::f32)
387 return 0;
388 if ((!Subtarget->hasSSE1())) {
389 return FastEmitInst_r(X86::CHS_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill);
390 }
391 return 0;
392}
393
394unsigned FastEmit_ISD_FNEG_MVT_f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
395 if (RetVT.SimpleTy != MVT::f64)
396 return 0;
397 if ((!Subtarget->hasSSE2())) {
398 return FastEmitInst_r(X86::CHS_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill);
399 }
400 return 0;
401}
402
403unsigned FastEmit_ISD_FNEG_MVT_f80_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
404 if (RetVT.SimpleTy != MVT::f80)
405 return 0;
406 return FastEmitInst_r(X86::CHS_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill);
407}
408
409unsigned FastEmit_ISD_FNEG_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
410 switch (VT.SimpleTy) {
411 case MVT::f32: return FastEmit_ISD_FNEG_MVT_f32_r(RetVT, Op0, Op0IsKill);
412 case MVT::f64: return FastEmit_ISD_FNEG_MVT_f64_r(RetVT, Op0, Op0IsKill);
413 case MVT::f80: return FastEmit_ISD_FNEG_MVT_f80_r(RetVT, Op0, Op0IsKill);
414 default: return 0;
415 }
416}
417
418// FastEmit functions for ISD::FP_EXTEND.
419
420unsigned FastEmit_ISD_FP_EXTEND_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
421 if (RetVT.SimpleTy != MVT::f64)
422 return 0;
423 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
424 return FastEmitInst_r(X86::CVTSS2SDrr, X86::FR64RegisterClass, Op0, Op0IsKill);
425 }
426 return 0;
427}
428
429unsigned FastEmit_ISD_FP_EXTEND_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
430 switch (VT.SimpleTy) {
431 case MVT::f32: return FastEmit_ISD_FP_EXTEND_MVT_f32_r(RetVT, Op0, Op0IsKill);
432 default: return 0;
433 }
434}
435
436// FastEmit functions for ISD::FP_ROUND.
437
438unsigned FastEmit_ISD_FP_ROUND_MVT_f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
439 if (RetVT.SimpleTy != MVT::f32)
440 return 0;
441 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
442 return FastEmitInst_r(X86::CVTSD2SSrr, X86::FR32RegisterClass, Op0, Op0IsKill);
443 }
444 return 0;
445}
446
447unsigned FastEmit_ISD_FP_ROUND_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
448 switch (VT.SimpleTy) {
449 case MVT::f64: return FastEmit_ISD_FP_ROUND_MVT_f64_r(RetVT, Op0, Op0IsKill);
450 default: return 0;
451 }
452}
453
454// FastEmit functions for ISD::FP_TO_SINT.
455
456unsigned FastEmit_ISD_FP_TO_SINT_MVT_f32_MVT_i32_r(unsigned Op0, bool Op0IsKill) {
457 if ((Subtarget->hasAVX())) {
458 return FastEmitInst_r(X86::VCVTTSS2SIrr, X86::GR32RegisterClass, Op0, Op0IsKill);
459 }
460 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
461 return FastEmitInst_r(X86::CVTTSS2SIrr, X86::GR32RegisterClass, Op0, Op0IsKill);
462 }
463 return 0;
464}
465
466unsigned FastEmit_ISD_FP_TO_SINT_MVT_f32_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
467 if ((Subtarget->hasAVX())) {
468 return FastEmitInst_r(X86::VCVTTSS2SI64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
469 }
470 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
471 return FastEmitInst_r(X86::CVTTSS2SI64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
472 }
473 return 0;
474}
475
476unsigned FastEmit_ISD_FP_TO_SINT_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
477switch (RetVT.SimpleTy) {
478 case MVT::i32: return FastEmit_ISD_FP_TO_SINT_MVT_f32_MVT_i32_r(Op0, Op0IsKill);
479 case MVT::i64: return FastEmit_ISD_FP_TO_SINT_MVT_f32_MVT_i64_r(Op0, Op0IsKill);
480 default: return 0;
481}
482}
483
484unsigned FastEmit_ISD_FP_TO_SINT_MVT_f64_MVT_i32_r(unsigned Op0, bool Op0IsKill) {
485 if ((Subtarget->hasAVX())) {
486 return FastEmitInst_r(X86::VCVTTSD2SIrr, X86::GR32RegisterClass, Op0, Op0IsKill);
487 }
488 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
489 return FastEmitInst_r(X86::CVTTSD2SIrr, X86::GR32RegisterClass, Op0, Op0IsKill);
490 }
491 return 0;
492}
493
494unsigned FastEmit_ISD_FP_TO_SINT_MVT_f64_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
495 if ((Subtarget->hasAVX())) {
496 return FastEmitInst_r(X86::VCVTTSD2SI64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
497 }
498 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
499 return FastEmitInst_r(X86::CVTTSD2SI64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
500 }
501 return 0;
502}
503
504unsigned FastEmit_ISD_FP_TO_SINT_MVT_f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
505switch (RetVT.SimpleTy) {
506 case MVT::i32: return FastEmit_ISD_FP_TO_SINT_MVT_f64_MVT_i32_r(Op0, Op0IsKill);
507 case MVT::i64: return FastEmit_ISD_FP_TO_SINT_MVT_f64_MVT_i64_r(Op0, Op0IsKill);
508 default: return 0;
509}
510}
511
512unsigned FastEmit_ISD_FP_TO_SINT_MVT_v4f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
513 if (RetVT.SimpleTy != MVT::v4i32)
514 return 0;
515 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
516 return FastEmitInst_r(X86::Int_CVTTPS2DQrr, X86::VR128RegisterClass, Op0, Op0IsKill);
517 }
518 return 0;
519}
520
521unsigned FastEmit_ISD_FP_TO_SINT_MVT_v2f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
522 if (RetVT.SimpleTy != MVT::v2i32)
523 return 0;
524 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
525 return FastEmitInst_r(X86::Int_CVTTPD2PIrr, X86::VR64RegisterClass, Op0, Op0IsKill);
526 }
527 return 0;
528}
529
530unsigned FastEmit_ISD_FP_TO_SINT_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
531 switch (VT.SimpleTy) {
532 case MVT::f32: return FastEmit_ISD_FP_TO_SINT_MVT_f32_r(RetVT, Op0, Op0IsKill);
533 case MVT::f64: return FastEmit_ISD_FP_TO_SINT_MVT_f64_r(RetVT, Op0, Op0IsKill);
534 case MVT::v4f32: return FastEmit_ISD_FP_TO_SINT_MVT_v4f32_r(RetVT, Op0, Op0IsKill);
535 case MVT::v2f64: return FastEmit_ISD_FP_TO_SINT_MVT_v2f64_r(RetVT, Op0, Op0IsKill);
536 default: return 0;
537 }
538}
539
540// FastEmit functions for ISD::FSIN.
541
542unsigned FastEmit_ISD_FSIN_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
543 if (RetVT.SimpleTy != MVT::f32)
544 return 0;
545 if ((!Subtarget->hasSSE1())) {
546 return FastEmitInst_r(X86::SIN_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill);
547 }
548 return 0;
549}
550
551unsigned FastEmit_ISD_FSIN_MVT_f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
552 if (RetVT.SimpleTy != MVT::f64)
553 return 0;
554 if ((!Subtarget->hasSSE2())) {
555 return FastEmitInst_r(X86::SIN_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill);
556 }
557 return 0;
558}
559
560unsigned FastEmit_ISD_FSIN_MVT_f80_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
561 if (RetVT.SimpleTy != MVT::f80)
562 return 0;
563 return FastEmitInst_r(X86::SIN_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill);
564}
565
566unsigned FastEmit_ISD_FSIN_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
567 switch (VT.SimpleTy) {
568 case MVT::f32: return FastEmit_ISD_FSIN_MVT_f32_r(RetVT, Op0, Op0IsKill);
569 case MVT::f64: return FastEmit_ISD_FSIN_MVT_f64_r(RetVT, Op0, Op0IsKill);
570 case MVT::f80: return FastEmit_ISD_FSIN_MVT_f80_r(RetVT, Op0, Op0IsKill);
571 default: return 0;
572 }
573}
574
575// FastEmit functions for ISD::FSQRT.
576
577unsigned FastEmit_ISD_FSQRT_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
578 if (RetVT.SimpleTy != MVT::f32)
579 return 0;
580 if ((!Subtarget->hasSSE1())) {
581 return FastEmitInst_r(X86::SQRT_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill);
582 }
583 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
584 return FastEmitInst_r(X86::SQRTSSr, X86::FR32RegisterClass, Op0, Op0IsKill);
585 }
586 return 0;
587}
588
589unsigned FastEmit_ISD_FSQRT_MVT_f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
590 if (RetVT.SimpleTy != MVT::f64)
591 return 0;
592 if ((!Subtarget->hasSSE2())) {
593 return FastEmitInst_r(X86::SQRT_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill);
594 }
595 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
596 return FastEmitInst_r(X86::SQRTSDr, X86::FR64RegisterClass, Op0, Op0IsKill);
597 }
598 return 0;
599}
600
601unsigned FastEmit_ISD_FSQRT_MVT_f80_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
602 if (RetVT.SimpleTy != MVT::f80)
603 return 0;
604 return FastEmitInst_r(X86::SQRT_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill);
605}
606
607unsigned FastEmit_ISD_FSQRT_MVT_v4f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
608 if (RetVT.SimpleTy != MVT::v4f32)
609 return 0;
610 if ((Subtarget->hasAVX())) {
611 return FastEmitInst_r(X86::VSQRTPSr, X86::VR128RegisterClass, Op0, Op0IsKill);
612 }
613 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
614 return FastEmitInst_r(X86::SQRTPSr, X86::VR128RegisterClass, Op0, Op0IsKill);
615 }
616 return 0;
617}
618
619unsigned FastEmit_ISD_FSQRT_MVT_v8f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
620 if (RetVT.SimpleTy != MVT::v8f32)
621 return 0;
622 if ((Subtarget->hasAVX())) {
623 return FastEmitInst_r(X86::VSQRTPSYr, X86::VR256RegisterClass, Op0, Op0IsKill);
624 }
625 return 0;
626}
627
628unsigned FastEmit_ISD_FSQRT_MVT_v2f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
629 if (RetVT.SimpleTy != MVT::v2f64)
630 return 0;
631 if ((Subtarget->hasAVX())) {
632 return FastEmitInst_r(X86::VSQRTPDr, X86::VR128RegisterClass, Op0, Op0IsKill);
633 }
634 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
635 return FastEmitInst_r(X86::SQRTPDr, X86::VR128RegisterClass, Op0, Op0IsKill);
636 }
637 return 0;
638}
639
640unsigned FastEmit_ISD_FSQRT_MVT_v4f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
641 if (RetVT.SimpleTy != MVT::v4f64)
642 return 0;
643 if ((Subtarget->hasAVX())) {
644 return FastEmitInst_r(X86::VSQRTPDYr, X86::VR256RegisterClass, Op0, Op0IsKill);
645 }
646 return 0;
647}
648
649unsigned FastEmit_ISD_FSQRT_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
650 switch (VT.SimpleTy) {
651 case MVT::f32: return FastEmit_ISD_FSQRT_MVT_f32_r(RetVT, Op0, Op0IsKill);
652 case MVT::f64: return FastEmit_ISD_FSQRT_MVT_f64_r(RetVT, Op0, Op0IsKill);
653 case MVT::f80: return FastEmit_ISD_FSQRT_MVT_f80_r(RetVT, Op0, Op0IsKill);
654 case MVT::v4f32: return FastEmit_ISD_FSQRT_MVT_v4f32_r(RetVT, Op0, Op0IsKill);
655 case MVT::v8f32: return FastEmit_ISD_FSQRT_MVT_v8f32_r(RetVT, Op0, Op0IsKill);
656 case MVT::v2f64: return FastEmit_ISD_FSQRT_MVT_v2f64_r(RetVT, Op0, Op0IsKill);
657 case MVT::v4f64: return FastEmit_ISD_FSQRT_MVT_v4f64_r(RetVT, Op0, Op0IsKill);
658 default: return 0;
659 }
660}
661
662// FastEmit functions for ISD::SCALAR_TO_VECTOR.
663
664unsigned FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i32_MVT_v2i32_r(unsigned Op0, bool Op0IsKill) {
665 if ((Subtarget->hasMMX() && !Subtarget->hasAVX())) {
666 return FastEmitInst_r(X86::MMX_MOVD64rr, X86::VR64RegisterClass, Op0, Op0IsKill);
667 }
668 return 0;
669}
670
671unsigned FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i32_MVT_v4i32_r(unsigned Op0, bool Op0IsKill) {
672 if ((Subtarget->hasAVX())) {
673 return FastEmitInst_r(X86::VMOVDI2PDIrr, X86::VR128RegisterClass, Op0, Op0IsKill);
674 }
675 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
676 return FastEmitInst_r(X86::MOVDI2PDIrr, X86::VR128RegisterClass, Op0, Op0IsKill);
677 }
678 return 0;
679}
680
681unsigned FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
682switch (RetVT.SimpleTy) {
683 case MVT::v2i32: return FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i32_MVT_v2i32_r(Op0, Op0IsKill);
684 case MVT::v4i32: return FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i32_MVT_v4i32_r(Op0, Op0IsKill);
685 default: return 0;
686}
687}
688
689unsigned FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i64_MVT_v1i64_r(unsigned Op0, bool Op0IsKill) {
690 if ((Subtarget->hasMMX() && !Subtarget->hasAVX())) {
691 return FastEmitInst_r(X86::MMX_MOVD64rrv164, X86::VR64RegisterClass, Op0, Op0IsKill);
692 }
693 return 0;
694}
695
696unsigned FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i64_MVT_v2i64_r(unsigned Op0, bool Op0IsKill) {
697 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
698 return FastEmitInst_r(X86::MOV64toPQIrr, X86::VR128RegisterClass, Op0, Op0IsKill);
699 }
700 return 0;
701}
702
703unsigned FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
704switch (RetVT.SimpleTy) {
705 case MVT::v1i64: return FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i64_MVT_v1i64_r(Op0, Op0IsKill);
706 case MVT::v2i64: return FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i64_MVT_v2i64_r(Op0, Op0IsKill);
707 default: return 0;
708}
709}
710
711unsigned FastEmit_ISD_SCALAR_TO_VECTOR_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
712 switch (VT.SimpleTy) {
713 case MVT::i32: return FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i32_r(RetVT, Op0, Op0IsKill);
714 case MVT::i64: return FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i64_r(RetVT, Op0, Op0IsKill);
715 default: return 0;
716 }
717}
718
719// FastEmit functions for ISD::SIGN_EXTEND.
720
721unsigned FastEmit_ISD_SIGN_EXTEND_MVT_i8_MVT_i16_r(unsigned Op0, bool Op0IsKill) {
722 return FastEmitInst_r(X86::MOVSX16rr8, X86::GR16RegisterClass, Op0, Op0IsKill);
723}
724
725unsigned FastEmit_ISD_SIGN_EXTEND_MVT_i8_MVT_i32_r(unsigned Op0, bool Op0IsKill) {
726 return FastEmitInst_r(X86::MOVSX32rr8, X86::GR32RegisterClass, Op0, Op0IsKill);
727}
728
729unsigned FastEmit_ISD_SIGN_EXTEND_MVT_i8_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
730 return FastEmitInst_r(X86::MOVSX64rr8, X86::GR64RegisterClass, Op0, Op0IsKill);
731}
732
733unsigned FastEmit_ISD_SIGN_EXTEND_MVT_i8_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
734switch (RetVT.SimpleTy) {
735 case MVT::i16: return FastEmit_ISD_SIGN_EXTEND_MVT_i8_MVT_i16_r(Op0, Op0IsKill);
736 case MVT::i32: return FastEmit_ISD_SIGN_EXTEND_MVT_i8_MVT_i32_r(Op0, Op0IsKill);
737 case MVT::i64: return FastEmit_ISD_SIGN_EXTEND_MVT_i8_MVT_i64_r(Op0, Op0IsKill);
738 default: return 0;
739}
740}
741
742unsigned FastEmit_ISD_SIGN_EXTEND_MVT_i16_MVT_i32_r(unsigned Op0, bool Op0IsKill) {
743 return FastEmitInst_r(X86::MOVSX32rr16, X86::GR32RegisterClass, Op0, Op0IsKill);
744}
745
746unsigned FastEmit_ISD_SIGN_EXTEND_MVT_i16_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
747 return FastEmitInst_r(X86::MOVSX64rr16, X86::GR64RegisterClass, Op0, Op0IsKill);
748}
749
750unsigned FastEmit_ISD_SIGN_EXTEND_MVT_i16_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
751switch (RetVT.SimpleTy) {
752 case MVT::i32: return FastEmit_ISD_SIGN_EXTEND_MVT_i16_MVT_i32_r(Op0, Op0IsKill);
753 case MVT::i64: return FastEmit_ISD_SIGN_EXTEND_MVT_i16_MVT_i64_r(Op0, Op0IsKill);
754 default: return 0;
755}
756}
757
758unsigned FastEmit_ISD_SIGN_EXTEND_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
759 if (RetVT.SimpleTy != MVT::i64)
760 return 0;
761 return FastEmitInst_r(X86::MOVSX64rr32, X86::GR64RegisterClass, Op0, Op0IsKill);
762}
763
764unsigned FastEmit_ISD_SIGN_EXTEND_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
765 switch (VT.SimpleTy) {
766 case MVT::i8: return FastEmit_ISD_SIGN_EXTEND_MVT_i8_r(RetVT, Op0, Op0IsKill);
767 case MVT::i16: return FastEmit_ISD_SIGN_EXTEND_MVT_i16_r(RetVT, Op0, Op0IsKill);
768 case MVT::i32: return FastEmit_ISD_SIGN_EXTEND_MVT_i32_r(RetVT, Op0, Op0IsKill);
769 default: return 0;
770 }
771}
772
773// FastEmit functions for ISD::SINT_TO_FP.
774
775unsigned FastEmit_ISD_SINT_TO_FP_MVT_i32_MVT_f32_r(unsigned Op0, bool Op0IsKill) {
776 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
777 return FastEmitInst_r(X86::CVTSI2SSrr, X86::FR32RegisterClass, Op0, Op0IsKill);
778 }
779 return 0;
780}
781
782unsigned FastEmit_ISD_SINT_TO_FP_MVT_i32_MVT_f64_r(unsigned Op0, bool Op0IsKill) {
783 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
784 return FastEmitInst_r(X86::CVTSI2SDrr, X86::FR64RegisterClass, Op0, Op0IsKill);
785 }
786 return 0;
787}
788
789unsigned FastEmit_ISD_SINT_TO_FP_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
790switch (RetVT.SimpleTy) {
791 case MVT::f32: return FastEmit_ISD_SINT_TO_FP_MVT_i32_MVT_f32_r(Op0, Op0IsKill);
792 case MVT::f64: return FastEmit_ISD_SINT_TO_FP_MVT_i32_MVT_f64_r(Op0, Op0IsKill);
793 default: return 0;
794}
795}
796
797unsigned FastEmit_ISD_SINT_TO_FP_MVT_i64_MVT_f32_r(unsigned Op0, bool Op0IsKill) {
798 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
799 return FastEmitInst_r(X86::CVTSI2SS64rr, X86::FR32RegisterClass, Op0, Op0IsKill);
800 }
801 return 0;
802}
803
804unsigned FastEmit_ISD_SINT_TO_FP_MVT_i64_MVT_f64_r(unsigned Op0, bool Op0IsKill) {
805 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
806 return FastEmitInst_r(X86::CVTSI2SD64rr, X86::FR64RegisterClass, Op0, Op0IsKill);
807 }
808 return 0;
809}
810
811unsigned FastEmit_ISD_SINT_TO_FP_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
812switch (RetVT.SimpleTy) {
813 case MVT::f32: return FastEmit_ISD_SINT_TO_FP_MVT_i64_MVT_f32_r(Op0, Op0IsKill);
814 case MVT::f64: return FastEmit_ISD_SINT_TO_FP_MVT_i64_MVT_f64_r(Op0, Op0IsKill);
815 default: return 0;
816}
817}
818
819unsigned FastEmit_ISD_SINT_TO_FP_MVT_v2i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
820 if (RetVT.SimpleTy != MVT::v2f64)
821 return 0;
822 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
823 return FastEmitInst_r(X86::Int_CVTPI2PDrr, X86::VR128RegisterClass, Op0, Op0IsKill);
824 }
825 return 0;
826}
827
828unsigned FastEmit_ISD_SINT_TO_FP_MVT_v4i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
829 if (RetVT.SimpleTy != MVT::v4f32)
830 return 0;
831 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
832 return FastEmitInst_r(X86::Int_CVTDQ2PSrr, X86::VR128RegisterClass, Op0, Op0IsKill);
833 }
834 return 0;
835}
836
837unsigned FastEmit_ISD_SINT_TO_FP_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
838 switch (VT.SimpleTy) {
839 case MVT::i32: return FastEmit_ISD_SINT_TO_FP_MVT_i32_r(RetVT, Op0, Op0IsKill);
840 case MVT::i64: return FastEmit_ISD_SINT_TO_FP_MVT_i64_r(RetVT, Op0, Op0IsKill);
841 case MVT::v2i32: return FastEmit_ISD_SINT_TO_FP_MVT_v2i32_r(RetVT, Op0, Op0IsKill);
842 case MVT::v4i32: return FastEmit_ISD_SINT_TO_FP_MVT_v4i32_r(RetVT, Op0, Op0IsKill);
843 default: return 0;
844 }
845}
846
847// FastEmit functions for ISD::TRUNCATE.
848
849unsigned FastEmit_ISD_TRUNCATE_MVT_i16_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
850 if (RetVT.SimpleTy != MVT::i8)
851 return 0;
852 if ((Subtarget->is64Bit())) {
853 return FastEmitInst_extractsubreg(RetVT, Op0, Op0IsKill, X86::sub_8bit);
854 }
855 return 0;
856}
857
858unsigned FastEmit_ISD_TRUNCATE_MVT_i32_MVT_i8_r(unsigned Op0, bool Op0IsKill) {
859 if ((Subtarget->is64Bit())) {
860 return FastEmitInst_extractsubreg(MVT::i8, Op0, Op0IsKill, X86::sub_8bit);
861 }
862 return 0;
863}
864
865unsigned FastEmit_ISD_TRUNCATE_MVT_i32_MVT_i16_r(unsigned Op0, bool Op0IsKill) {
866 return FastEmitInst_extractsubreg(MVT::i16, Op0, Op0IsKill, X86::sub_16bit);
867}
868
869unsigned FastEmit_ISD_TRUNCATE_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
870switch (RetVT.SimpleTy) {
871 case MVT::i8: return FastEmit_ISD_TRUNCATE_MVT_i32_MVT_i8_r(Op0, Op0IsKill);
872 case MVT::i16: return FastEmit_ISD_TRUNCATE_MVT_i32_MVT_i16_r(Op0, Op0IsKill);
873 default: return 0;
874}
875}
876
877unsigned FastEmit_ISD_TRUNCATE_MVT_i64_MVT_i8_r(unsigned Op0, bool Op0IsKill) {
878 return FastEmitInst_extractsubreg(MVT::i8, Op0, Op0IsKill, X86::sub_8bit);
879}
880
881unsigned FastEmit_ISD_TRUNCATE_MVT_i64_MVT_i16_r(unsigned Op0, bool Op0IsKill) {
882 return FastEmitInst_extractsubreg(MVT::i16, Op0, Op0IsKill, X86::sub_16bit);
883}
884
885unsigned FastEmit_ISD_TRUNCATE_MVT_i64_MVT_i32_r(unsigned Op0, bool Op0IsKill) {
886 return FastEmitInst_extractsubreg(MVT::i32, Op0, Op0IsKill, X86::sub_32bit);
887}
888
889unsigned FastEmit_ISD_TRUNCATE_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
890switch (RetVT.SimpleTy) {
891 case MVT::i8: return FastEmit_ISD_TRUNCATE_MVT_i64_MVT_i8_r(Op0, Op0IsKill);
892 case MVT::i16: return FastEmit_ISD_TRUNCATE_MVT_i64_MVT_i16_r(Op0, Op0IsKill);
893 case MVT::i32: return FastEmit_ISD_TRUNCATE_MVT_i64_MVT_i32_r(Op0, Op0IsKill);
894 default: return 0;
895}
896}
897
898unsigned FastEmit_ISD_TRUNCATE_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
899 switch (VT.SimpleTy) {
900 case MVT::i16: return FastEmit_ISD_TRUNCATE_MVT_i16_r(RetVT, Op0, Op0IsKill);
901 case MVT::i32: return FastEmit_ISD_TRUNCATE_MVT_i32_r(RetVT, Op0, Op0IsKill);
902 case MVT::i64: return FastEmit_ISD_TRUNCATE_MVT_i64_r(RetVT, Op0, Op0IsKill);
903 default: return 0;
904 }
905}
906
907// FastEmit functions for ISD::ZERO_EXTEND.
908
909unsigned FastEmit_ISD_ZERO_EXTEND_MVT_i8_MVT_i16_r(unsigned Op0, bool Op0IsKill) {
910 return FastEmitInst_r(X86::MOVZX16rr8, X86::GR16RegisterClass, Op0, Op0IsKill);
911}
912
913unsigned FastEmit_ISD_ZERO_EXTEND_MVT_i8_MVT_i32_r(unsigned Op0, bool Op0IsKill) {
914 return FastEmitInst_r(X86::MOVZX32rr8, X86::GR32RegisterClass, Op0, Op0IsKill);
915}
916
917unsigned FastEmit_ISD_ZERO_EXTEND_MVT_i8_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
918 return FastEmitInst_r(X86::MOVZX64rr8, X86::GR64RegisterClass, Op0, Op0IsKill);
919}
920
921unsigned FastEmit_ISD_ZERO_EXTEND_MVT_i8_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
922switch (RetVT.SimpleTy) {
923 case MVT::i16: return FastEmit_ISD_ZERO_EXTEND_MVT_i8_MVT_i16_r(Op0, Op0IsKill);
924 case MVT::i32: return FastEmit_ISD_ZERO_EXTEND_MVT_i8_MVT_i32_r(Op0, Op0IsKill);
925 case MVT::i64: return FastEmit_ISD_ZERO_EXTEND_MVT_i8_MVT_i64_r(Op0, Op0IsKill);
926 default: return 0;
927}
928}
929
930unsigned FastEmit_ISD_ZERO_EXTEND_MVT_i16_MVT_i32_r(unsigned Op0, bool Op0IsKill) {
931 return FastEmitInst_r(X86::MOVZX32rr16, X86::GR32RegisterClass, Op0, Op0IsKill);
932}
933
934unsigned FastEmit_ISD_ZERO_EXTEND_MVT_i16_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
935 return FastEmitInst_r(X86::MOVZX64rr16, X86::GR64RegisterClass, Op0, Op0IsKill);
936}
937
938unsigned FastEmit_ISD_ZERO_EXTEND_MVT_i16_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
939switch (RetVT.SimpleTy) {
940 case MVT::i32: return FastEmit_ISD_ZERO_EXTEND_MVT_i16_MVT_i32_r(Op0, Op0IsKill);
941 case MVT::i64: return FastEmit_ISD_ZERO_EXTEND_MVT_i16_MVT_i64_r(Op0, Op0IsKill);
942 default: return 0;
943}
944}
945
946unsigned FastEmit_ISD_ZERO_EXTEND_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
947 if (RetVT.SimpleTy != MVT::i64)
948 return 0;
949 return FastEmitInst_r(X86::MOVZX64rr32, X86::GR64RegisterClass, Op0, Op0IsKill);
950}
951
952unsigned FastEmit_ISD_ZERO_EXTEND_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
953 switch (VT.SimpleTy) {
954 case MVT::i8: return FastEmit_ISD_ZERO_EXTEND_MVT_i8_r(RetVT, Op0, Op0IsKill);
955 case MVT::i16: return FastEmit_ISD_ZERO_EXTEND_MVT_i16_r(RetVT, Op0, Op0IsKill);
956 case MVT::i32: return FastEmit_ISD_ZERO_EXTEND_MVT_i32_r(RetVT, Op0, Op0IsKill);
957 default: return 0;
958 }
959}
960
961// FastEmit functions for X86ISD::CALL.
962
963unsigned FastEmit_X86ISD_CALL_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
964 if (RetVT.SimpleTy != MVT::isVoid)
965 return 0;
966 return FastEmitInst_r(X86::CALL32r, X86::GR32RegisterClass, Op0, Op0IsKill);
967}
968
969unsigned FastEmit_X86ISD_CALL_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
970 if (RetVT.SimpleTy != MVT::isVoid)
971 return 0;
972 if ((!Subtarget->isTargetWin64())) {
973 return FastEmitInst_r(X86::CALL64r, X86::GR64RegisterClass, Op0, Op0IsKill);
974 }
975 if ((Subtarget->isTargetWin64())) {
976 return FastEmitInst_r(X86::WINCALL64r, X86::GR64RegisterClass, Op0, Op0IsKill);
977 }
978 return 0;
979}
980
981unsigned FastEmit_X86ISD_CALL_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
982 switch (VT.SimpleTy) {
983 case MVT::i32: return FastEmit_X86ISD_CALL_MVT_i32_r(RetVT, Op0, Op0IsKill);
984 case MVT::i64: return FastEmit_X86ISD_CALL_MVT_i64_r(RetVT, Op0, Op0IsKill);
985 default: return 0;
986 }
987}
988
989// FastEmit functions for X86ISD::EH_RETURN.
990
991unsigned FastEmit_X86ISD_EH_RETURN_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
992 if (RetVT.SimpleTy != MVT::isVoid)
993 return 0;
994 return FastEmitInst_r(X86::EH_RETURN, X86::GR32RegisterClass, Op0, Op0IsKill);
995}
996
997unsigned FastEmit_X86ISD_EH_RETURN_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
998 if (RetVT.SimpleTy != MVT::isVoid)
999 return 0;
1000 return FastEmitInst_r(X86::EH_RETURN64, X86::GR64RegisterClass, Op0, Op0IsKill);
1001}
1002
1003unsigned FastEmit_X86ISD_EH_RETURN_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1004 switch (VT.SimpleTy) {
1005 case MVT::i32: return FastEmit_X86ISD_EH_RETURN_MVT_i32_r(RetVT, Op0, Op0IsKill);
1006 case MVT::i64: return FastEmit_X86ISD_EH_RETURN_MVT_i64_r(RetVT, Op0, Op0IsKill);
1007 default: return 0;
1008 }
1009}
1010
1011// FastEmit functions for X86ISD::FRCP.
1012
1013unsigned FastEmit_X86ISD_FRCP_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1014 if (RetVT.SimpleTy != MVT::f32)
1015 return 0;
1016 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
1017 return FastEmitInst_r(X86::RCPSSr, X86::FR32RegisterClass, Op0, Op0IsKill);
1018 }
1019 return 0;
1020}
1021
1022unsigned FastEmit_X86ISD_FRCP_MVT_v4f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1023 if (RetVT.SimpleTy != MVT::v4f32)
1024 return 0;
1025 if ((Subtarget->hasAVX())) {
1026 return FastEmitInst_r(X86::VRCPPSr, X86::VR128RegisterClass, Op0, Op0IsKill);
1027 }
1028 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
1029 return FastEmitInst_r(X86::RCPPSr, X86::VR128RegisterClass, Op0, Op0IsKill);
1030 }
1031 return 0;
1032}
1033
1034unsigned FastEmit_X86ISD_FRCP_MVT_v8f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1035 if (RetVT.SimpleTy != MVT::v8f32)
1036 return 0;
1037 if ((Subtarget->hasAVX())) {
1038 return FastEmitInst_r(X86::VRCPPSYr, X86::VR256RegisterClass, Op0, Op0IsKill);
1039 }
1040 return 0;
1041}
1042
1043unsigned FastEmit_X86ISD_FRCP_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1044 switch (VT.SimpleTy) {
1045 case MVT::f32: return FastEmit_X86ISD_FRCP_MVT_f32_r(RetVT, Op0, Op0IsKill);
1046 case MVT::v4f32: return FastEmit_X86ISD_FRCP_MVT_v4f32_r(RetVT, Op0, Op0IsKill);
1047 case MVT::v8f32: return FastEmit_X86ISD_FRCP_MVT_v8f32_r(RetVT, Op0, Op0IsKill);
1048 default: return 0;
1049 }
1050}
1051
1052// FastEmit functions for X86ISD::FRSQRT.
1053
1054unsigned FastEmit_X86ISD_FRSQRT_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1055 if (RetVT.SimpleTy != MVT::f32)
1056 return 0;
1057 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
1058 return FastEmitInst_r(X86::RSQRTSSr, X86::FR32RegisterClass, Op0, Op0IsKill);
1059 }
1060 return 0;
1061}
1062
1063unsigned FastEmit_X86ISD_FRSQRT_MVT_v4f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1064 if (RetVT.SimpleTy != MVT::v4f32)
1065 return 0;
1066 if ((Subtarget->hasAVX())) {
1067 return FastEmitInst_r(X86::VRSQRTPSr, X86::VR128RegisterClass, Op0, Op0IsKill);
1068 }
1069 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
1070 return FastEmitInst_r(X86::RSQRTPSr, X86::VR128RegisterClass, Op0, Op0IsKill);
1071 }
1072 return 0;
1073}
1074
1075unsigned FastEmit_X86ISD_FRSQRT_MVT_v8f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1076 if (RetVT.SimpleTy != MVT::v8f32)
1077 return 0;
1078 if ((Subtarget->hasAVX())) {
1079 return FastEmitInst_r(X86::VRSQRTPSYr, X86::VR256RegisterClass, Op0, Op0IsKill);
1080 }
1081 return 0;
1082}
1083
1084unsigned FastEmit_X86ISD_FRSQRT_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1085 switch (VT.SimpleTy) {
1086 case MVT::f32: return FastEmit_X86ISD_FRSQRT_MVT_f32_r(RetVT, Op0, Op0IsKill);
1087 case MVT::v4f32: return FastEmit_X86ISD_FRSQRT_MVT_v4f32_r(RetVT, Op0, Op0IsKill);
1088 case MVT::v8f32: return FastEmit_X86ISD_FRSQRT_MVT_v8f32_r(RetVT, Op0, Op0IsKill);
1089 default: return 0;
1090 }
1091}
1092
1093// FastEmit functions for X86ISD::MEMBARRIER.
1094
1095unsigned FastEmit_X86ISD_MEMBARRIER_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1096 if (RetVT.SimpleTy != MVT::isVoid)
1097 return 0;
1098 if ((Subtarget->is64Bit())) {
1099 return FastEmitInst_r(X86::Int_MemBarrierNoSSE64, X86::GR64RegisterClass, Op0, Op0IsKill);
1100 }
1101 return 0;
1102}
1103
1104unsigned FastEmit_X86ISD_MEMBARRIER_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1105 switch (VT.SimpleTy) {
1106 case MVT::i64: return FastEmit_X86ISD_MEMBARRIER_MVT_i64_r(RetVT, Op0, Op0IsKill);
1107 default: return 0;
1108 }
1109}
1110
1111// FastEmit functions for X86ISD::MOVQ2DQ.
1112
1113unsigned FastEmit_X86ISD_MOVQ2DQ_MVT_v1i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1114 if (RetVT.SimpleTy != MVT::v2i64)
1115 return 0;
1116 return FastEmitInst_r(X86::MMX_MOVQ2DQrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1117}
1118
1119unsigned FastEmit_X86ISD_MOVQ2DQ_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1120 switch (VT.SimpleTy) {
1121 case MVT::v1i64: return FastEmit_X86ISD_MOVQ2DQ_MVT_v1i64_r(RetVT, Op0, Op0IsKill);
1122 default: return 0;
1123 }
1124}
1125
1126// FastEmit functions for X86ISD::MOVSHDUP.
1127
1128unsigned FastEmit_X86ISD_MOVSHDUP_MVT_v4i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1129 if (RetVT.SimpleTy != MVT::v4i32)
1130 return 0;
1131 return FastEmitInst_r(X86::MOVSHDUPrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1132}
1133
1134unsigned FastEmit_X86ISD_MOVSHDUP_MVT_v4f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1135 if (RetVT.SimpleTy != MVT::v4f32)
1136 return 0;
1137 return FastEmitInst_r(X86::MOVSHDUPrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1138}
1139
1140unsigned FastEmit_X86ISD_MOVSHDUP_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1141 switch (VT.SimpleTy) {
1142 case MVT::v4i32: return FastEmit_X86ISD_MOVSHDUP_MVT_v4i32_r(RetVT, Op0, Op0IsKill);
1143 case MVT::v4f32: return FastEmit_X86ISD_MOVSHDUP_MVT_v4f32_r(RetVT, Op0, Op0IsKill);
1144 default: return 0;
1145 }
1146}
1147
1148// FastEmit functions for X86ISD::MOVSLDUP.
1149
1150unsigned FastEmit_X86ISD_MOVSLDUP_MVT_v4i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1151 if (RetVT.SimpleTy != MVT::v4i32)
1152 return 0;
1153 return FastEmitInst_r(X86::MOVSLDUPrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1154}
1155
1156unsigned FastEmit_X86ISD_MOVSLDUP_MVT_v4f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1157 if (RetVT.SimpleTy != MVT::v4f32)
1158 return 0;
1159 return FastEmitInst_r(X86::MOVSLDUPrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1160}
1161
1162unsigned FastEmit_X86ISD_MOVSLDUP_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1163 switch (VT.SimpleTy) {
1164 case MVT::v4i32: return FastEmit_X86ISD_MOVSLDUP_MVT_v4i32_r(RetVT, Op0, Op0IsKill);
1165 case MVT::v4f32: return FastEmit_X86ISD_MOVSLDUP_MVT_v4f32_r(RetVT, Op0, Op0IsKill);
1166 default: return 0;
1167 }
1168}
1169
1170// FastEmit functions for X86ISD::VZEXT_MOVL.
1171
1172unsigned FastEmit_X86ISD_VZEXT_MOVL_MVT_v2i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1173 if (RetVT.SimpleTy != MVT::v2i64)
1174 return 0;
1175 if ((Subtarget->hasAVX())) {
1176 return FastEmitInst_r(X86::VMOVZPQILo2PQIrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1177 }
1178 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
1179 return FastEmitInst_r(X86::MOVZPQILo2PQIrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1180 }
1181 return 0;
1182}
1183
1184unsigned FastEmit_X86ISD_VZEXT_MOVL_MVT_v2f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1185 if (RetVT.SimpleTy != MVT::v2f64)
1186 return 0;
1187 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
1188 return FastEmitInst_r(X86::MOVZPQILo2PQIrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1189 }
1190 return 0;
1191}
1192
1193unsigned FastEmit_X86ISD_VZEXT_MOVL_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1194 switch (VT.SimpleTy) {
1195 case MVT::v2i64: return FastEmit_X86ISD_VZEXT_MOVL_MVT_v2i64_r(RetVT, Op0, Op0IsKill);
1196 case MVT::v2f64: return FastEmit_X86ISD_VZEXT_MOVL_MVT_v2f64_r(RetVT, Op0, Op0IsKill);
1197 default: return 0;
1198 }
1199}
1200
1201// Top-level FastEmit function.
1202
1203unsigned FastEmit_r(MVT VT, MVT RetVT, unsigned Opcode, unsigned Op0, bool Op0IsKill) {
1204 switch (Opcode) {
1205 case ISD::ANY_EXTEND: return FastEmit_ISD_ANY_EXTEND_r(VT, RetVT, Op0, Op0IsKill);
1206 case ISD::BIT_CONVERT: return FastEmit_ISD_BIT_CONVERT_r(VT, RetVT, Op0, Op0IsKill);
1207 case ISD::BRIND: return FastEmit_ISD_BRIND_r(VT, RetVT, Op0, Op0IsKill);
1208 case ISD::BSWAP: return FastEmit_ISD_BSWAP_r(VT, RetVT, Op0, Op0IsKill);
1209 case ISD::FABS: return FastEmit_ISD_FABS_r(VT, RetVT, Op0, Op0IsKill);
1210 case ISD::FCOS: return FastEmit_ISD_FCOS_r(VT, RetVT, Op0, Op0IsKill);
1211 case ISD::FNEG: return FastEmit_ISD_FNEG_r(VT, RetVT, Op0, Op0IsKill);
1212 case ISD::FP_EXTEND: return FastEmit_ISD_FP_EXTEND_r(VT, RetVT, Op0, Op0IsKill);
1213 case ISD::FP_ROUND: return FastEmit_ISD_FP_ROUND_r(VT, RetVT, Op0, Op0IsKill);
1214 case ISD::FP_TO_SINT: return FastEmit_ISD_FP_TO_SINT_r(VT, RetVT, Op0, Op0IsKill);
1215 case ISD::FSIN: return FastEmit_ISD_FSIN_r(VT, RetVT, Op0, Op0IsKill);
1216 case ISD::FSQRT: return FastEmit_ISD_FSQRT_r(VT, RetVT, Op0, Op0IsKill);
1217 case ISD::SCALAR_TO_VECTOR: return FastEmit_ISD_SCALAR_TO_VECTOR_r(VT, RetVT, Op0, Op0IsKill);
1218 case ISD::SIGN_EXTEND: return FastEmit_ISD_SIGN_EXTEND_r(VT, RetVT, Op0, Op0IsKill);
1219 case ISD::SINT_TO_FP: return FastEmit_ISD_SINT_TO_FP_r(VT, RetVT, Op0, Op0IsKill);
1220 case ISD::TRUNCATE: return FastEmit_ISD_TRUNCATE_r(VT, RetVT, Op0, Op0IsKill);
1221 case ISD::ZERO_EXTEND: return FastEmit_ISD_ZERO_EXTEND_r(VT, RetVT, Op0, Op0IsKill);
1222 case X86ISD::CALL: return FastEmit_X86ISD_CALL_r(VT, RetVT, Op0, Op0IsKill);
1223 case X86ISD::EH_RETURN: return FastEmit_X86ISD_EH_RETURN_r(VT, RetVT, Op0, Op0IsKill);
1224 case X86ISD::FRCP: return FastEmit_X86ISD_FRCP_r(VT, RetVT, Op0, Op0IsKill);
1225 case X86ISD::FRSQRT: return FastEmit_X86ISD_FRSQRT_r(VT, RetVT, Op0, Op0IsKill);
1226 case X86ISD::MEMBARRIER: return FastEmit_X86ISD_MEMBARRIER_r(VT, RetVT, Op0, Op0IsKill);
1227 case X86ISD::MOVQ2DQ: return FastEmit_X86ISD_MOVQ2DQ_r(VT, RetVT, Op0, Op0IsKill);
1228 case X86ISD::MOVSHDUP: return FastEmit_X86ISD_MOVSHDUP_r(VT, RetVT, Op0, Op0IsKill);
1229 case X86ISD::MOVSLDUP: return FastEmit_X86ISD_MOVSLDUP_r(VT, RetVT, Op0, Op0IsKill);
1230 case X86ISD::VZEXT_MOVL: return FastEmit_X86ISD_VZEXT_MOVL_r(VT, RetVT, Op0, Op0IsKill);
1231 default: return 0;
1232 }
1233}
1234
1235// FastEmit functions for ISD::ADD.
1236
1237unsigned FastEmit_ISD_ADD_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1238 if (RetVT.SimpleTy != MVT::i8)
1239 return 0;
1240 return FastEmitInst_ri(X86::ADD8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
1241}
1242
1243unsigned FastEmit_ISD_ADD_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1244 if (RetVT.SimpleTy != MVT::i16)
1245 return 0;
1246 return FastEmitInst_ri(X86::ADD16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
1247}
1248
1249unsigned FastEmit_ISD_ADD_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1250 if (RetVT.SimpleTy != MVT::i32)
1251 return 0;
1252 return FastEmitInst_ri(X86::ADD32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
1253}
1254
1255unsigned FastEmit_ISD_ADD_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1256 switch (VT.SimpleTy) {
1257 case MVT::i8: return FastEmit_ISD_ADD_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
1258 case MVT::i16: return FastEmit_ISD_ADD_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
1259 case MVT::i32: return FastEmit_ISD_ADD_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
1260 default: return 0;
1261 }
1262}
1263
1264// FastEmit functions for ISD::ADDC.
1265
1266unsigned FastEmit_ISD_ADDC_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1267 if (RetVT.SimpleTy != MVT::i32)
1268 return 0;
1269 return FastEmitInst_ri(X86::ADD32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
1270}
1271
1272unsigned FastEmit_ISD_ADDC_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1273 switch (VT.SimpleTy) {
1274 case MVT::i32: return FastEmit_ISD_ADDC_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
1275 default: return 0;
1276 }
1277}
1278
1279// FastEmit functions for ISD::ADDE.
1280
1281unsigned FastEmit_ISD_ADDE_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1282 if (RetVT.SimpleTy != MVT::i8)
1283 return 0;
1284 return FastEmitInst_ri(X86::ADC8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
1285}
1286
1287unsigned FastEmit_ISD_ADDE_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1288 if (RetVT.SimpleTy != MVT::i16)
1289 return 0;
1290 return FastEmitInst_ri(X86::ADC16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
1291}
1292
1293unsigned FastEmit_ISD_ADDE_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1294 if (RetVT.SimpleTy != MVT::i32)
1295 return 0;
1296 return FastEmitInst_ri(X86::ADC32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
1297}
1298
1299unsigned FastEmit_ISD_ADDE_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1300 switch (VT.SimpleTy) {
1301 case MVT::i8: return FastEmit_ISD_ADDE_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
1302 case MVT::i16: return FastEmit_ISD_ADDE_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
1303 case MVT::i32: return FastEmit_ISD_ADDE_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
1304 default: return 0;
1305 }
1306}
1307
1308// FastEmit functions for ISD::AND.
1309
1310unsigned FastEmit_ISD_AND_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1311 if (RetVT.SimpleTy != MVT::i8)
1312 return 0;
1313 return FastEmitInst_ri(X86::AND8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
1314}
1315
1316unsigned FastEmit_ISD_AND_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1317 if (RetVT.SimpleTy != MVT::i16)
1318 return 0;
1319 return FastEmitInst_ri(X86::AND16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
1320}
1321
1322unsigned FastEmit_ISD_AND_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1323 if (RetVT.SimpleTy != MVT::i32)
1324 return 0;
1325 return FastEmitInst_ri(X86::AND32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
1326}
1327
1328unsigned FastEmit_ISD_AND_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1329 switch (VT.SimpleTy) {
1330 case MVT::i8: return FastEmit_ISD_AND_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
1331 case MVT::i16: return FastEmit_ISD_AND_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
1332 case MVT::i32: return FastEmit_ISD_AND_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
1333 default: return 0;
1334 }
1335}
1336
1337// FastEmit functions for ISD::MUL.
1338
1339unsigned FastEmit_ISD_MUL_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1340 if (RetVT.SimpleTy != MVT::i16)
1341 return 0;
1342 return FastEmitInst_ri(X86::IMUL16rri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
1343}
1344
1345unsigned FastEmit_ISD_MUL_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1346 if (RetVT.SimpleTy != MVT::i32)
1347 return 0;
1348 return FastEmitInst_ri(X86::IMUL32rri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
1349}
1350
1351unsigned FastEmit_ISD_MUL_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1352 switch (VT.SimpleTy) {
1353 case MVT::i16: return FastEmit_ISD_MUL_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
1354 case MVT::i32: return FastEmit_ISD_MUL_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
1355 default: return 0;
1356 }
1357}
1358
1359// FastEmit functions for ISD::OR.
1360
1361unsigned FastEmit_ISD_OR_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1362 if (RetVT.SimpleTy != MVT::i8)
1363 return 0;
1364 return FastEmitInst_ri(X86::OR8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
1365}
1366
1367unsigned FastEmit_ISD_OR_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1368 if (RetVT.SimpleTy != MVT::i16)
1369 return 0;
1370 return FastEmitInst_ri(X86::OR16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
1371}
1372
1373unsigned FastEmit_ISD_OR_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1374 if (RetVT.SimpleTy != MVT::i32)
1375 return 0;
1376 return FastEmitInst_ri(X86::OR32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
1377}
1378
1379unsigned FastEmit_ISD_OR_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1380 switch (VT.SimpleTy) {
1381 case MVT::i8: return FastEmit_ISD_OR_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
1382 case MVT::i16: return FastEmit_ISD_OR_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
1383 case MVT::i32: return FastEmit_ISD_OR_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
1384 default: return 0;
1385 }
1386}
1387
1388// FastEmit functions for ISD::ROTL.
1389
1390unsigned FastEmit_ISD_ROTL_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1391 if (RetVT.SimpleTy != MVT::i8)
1392 return 0;
1393 return FastEmitInst_ri(X86::ROL8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
1394}
1395
1396unsigned FastEmit_ISD_ROTL_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1397 switch (VT.SimpleTy) {
1398 case MVT::i8: return FastEmit_ISD_ROTL_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
1399 default: return 0;
1400 }
1401}
1402
1403// FastEmit functions for ISD::ROTR.
1404
1405unsigned FastEmit_ISD_ROTR_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1406 if (RetVT.SimpleTy != MVT::i8)
1407 return 0;
1408 return FastEmitInst_ri(X86::ROR8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
1409}
1410
1411unsigned FastEmit_ISD_ROTR_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1412 switch (VT.SimpleTy) {
1413 case MVT::i8: return FastEmit_ISD_ROTR_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
1414 default: return 0;
1415 }
1416}
1417
1418// FastEmit functions for ISD::SHL.
1419
1420unsigned FastEmit_ISD_SHL_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1421 if (RetVT.SimpleTy != MVT::i8)
1422 return 0;
1423 return FastEmitInst_ri(X86::SHL8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
1424}
1425
1426unsigned FastEmit_ISD_SHL_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1427 switch (VT.SimpleTy) {
1428 case MVT::i8: return FastEmit_ISD_SHL_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
1429 default: return 0;
1430 }
1431}
1432
1433// FastEmit functions for ISD::SRA.
1434
1435unsigned FastEmit_ISD_SRA_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1436 if (RetVT.SimpleTy != MVT::i8)
1437 return 0;
1438 return FastEmitInst_ri(X86::SAR8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
1439}
1440
1441unsigned FastEmit_ISD_SRA_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1442 switch (VT.SimpleTy) {
1443 case MVT::i8: return FastEmit_ISD_SRA_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
1444 default: return 0;
1445 }
1446}
1447
1448// FastEmit functions for ISD::SRL.
1449
1450unsigned FastEmit_ISD_SRL_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1451 if (RetVT.SimpleTy != MVT::i8)
1452 return 0;
1453 return FastEmitInst_ri(X86::SHR8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
1454}
1455
1456unsigned FastEmit_ISD_SRL_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1457 switch (VT.SimpleTy) {
1458 case MVT::i8: return FastEmit_ISD_SRL_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
1459 default: return 0;
1460 }
1461}
1462
1463// FastEmit functions for ISD::SUB.
1464
1465unsigned FastEmit_ISD_SUB_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1466 if (RetVT.SimpleTy != MVT::i8)
1467 return 0;
1468 return FastEmitInst_ri(X86::SUB8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
1469}
1470
1471unsigned FastEmit_ISD_SUB_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1472 if (RetVT.SimpleTy != MVT::i16)
1473 return 0;
1474 return FastEmitInst_ri(X86::SUB16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
1475}
1476
1477unsigned FastEmit_ISD_SUB_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1478 if (RetVT.SimpleTy != MVT::i32)
1479 return 0;
1480 return FastEmitInst_ri(X86::SUB32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
1481}
1482
1483unsigned FastEmit_ISD_SUB_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1484 switch (VT.SimpleTy) {
1485 case MVT::i8: return FastEmit_ISD_SUB_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
1486 case MVT::i16: return FastEmit_ISD_SUB_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
1487 case MVT::i32: return FastEmit_ISD_SUB_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
1488 default: return 0;
1489 }
1490}
1491
1492// FastEmit functions for ISD::SUBC.
1493
1494unsigned FastEmit_ISD_SUBC_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1495 if (RetVT.SimpleTy != MVT::i32)
1496 return 0;
1497 return FastEmitInst_ri(X86::SUB32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
1498}
1499
1500unsigned FastEmit_ISD_SUBC_MVT_i64_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1501 if (RetVT.SimpleTy != MVT::i64)
1502 return 0;
1503 return FastEmitInst_ri(X86::SUB64ri32, X86::GR64RegisterClass, Op0, Op0IsKill, imm1);
1504}
1505
1506unsigned FastEmit_ISD_SUBC_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1507 switch (VT.SimpleTy) {
1508 case MVT::i32: return FastEmit_ISD_SUBC_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
1509 case MVT::i64: return FastEmit_ISD_SUBC_MVT_i64_ri(RetVT, Op0, Op0IsKill, imm1);
1510 default: return 0;
1511 }
1512}
1513
1514// FastEmit functions for ISD::SUBE.
1515
1516unsigned FastEmit_ISD_SUBE_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1517 if (RetVT.SimpleTy != MVT::i8)
1518 return 0;
1519 return FastEmitInst_ri(X86::SBB8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
1520}
1521
1522unsigned FastEmit_ISD_SUBE_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1523 if (RetVT.SimpleTy != MVT::i16)
1524 return 0;
1525 return FastEmitInst_ri(X86::SBB16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
1526}
1527
1528unsigned FastEmit_ISD_SUBE_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1529 if (RetVT.SimpleTy != MVT::i32)
1530 return 0;
1531 return FastEmitInst_ri(X86::SBB32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
1532}
1533
1534unsigned FastEmit_ISD_SUBE_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1535 switch (VT.SimpleTy) {
1536 case MVT::i8: return FastEmit_ISD_SUBE_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
1537 case MVT::i16: return FastEmit_ISD_SUBE_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
1538 case MVT::i32: return FastEmit_ISD_SUBE_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
1539 default: return 0;
1540 }
1541}
1542
1543// FastEmit functions for ISD::XOR.
1544
1545unsigned FastEmit_ISD_XOR_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1546 if (RetVT.SimpleTy != MVT::i8)
1547 return 0;
1548 return FastEmitInst_ri(X86::XOR8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
1549}
1550
1551unsigned FastEmit_ISD_XOR_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1552 if (RetVT.SimpleTy != MVT::i16)
1553 return 0;
1554 return FastEmitInst_ri(X86::XOR16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
1555}
1556
1557unsigned FastEmit_ISD_XOR_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1558 if (RetVT.SimpleTy != MVT::i32)
1559 return 0;
1560 return FastEmitInst_ri(X86::XOR32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
1561}
1562
1563unsigned FastEmit_ISD_XOR_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1564 switch (VT.SimpleTy) {
1565 case MVT::i8: return FastEmit_ISD_XOR_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
1566 case MVT::i16: return FastEmit_ISD_XOR_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
1567 case MVT::i32: return FastEmit_ISD_XOR_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
1568 default: return 0;
1569 }
1570}
1571
1572// FastEmit functions for X86ISD::CMP.
1573
1574unsigned FastEmit_X86ISD_CMP_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1575 if (RetVT.SimpleTy != MVT::i32)
1576 return 0;
1577 return FastEmitInst_ri(X86::CMP8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
1578}
1579
1580unsigned FastEmit_X86ISD_CMP_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1581 if (RetVT.SimpleTy != MVT::i32)
1582 return 0;
1583 return FastEmitInst_ri(X86::CMP16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
1584}
1585
1586unsigned FastEmit_X86ISD_CMP_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1587 if (RetVT.SimpleTy != MVT::i32)
1588 return 0;
1589 return FastEmitInst_ri(X86::CMP32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
1590}
1591
1592unsigned FastEmit_X86ISD_CMP_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1593 switch (VT.SimpleTy) {
1594 case MVT::i8: return FastEmit_X86ISD_CMP_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
1595 case MVT::i16: return FastEmit_X86ISD_CMP_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
1596 case MVT::i32: return FastEmit_X86ISD_CMP_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
1597 default: return 0;
1598 }
1599}
1600
1601// FastEmit functions for X86ISD::TC_RETURN.
1602
1603unsigned FastEmit_X86ISD_TC_RETURN_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1604 if (RetVT.SimpleTy != MVT::isVoid)
1605 return 0;
1606 if ((!Subtarget->is64Bit())) {
1607 return FastEmitInst_ri(X86::TCRETURNri, X86::GR32_TCRegisterClass, Op0, Op0IsKill, imm1);
1608 }
1609 return 0;
1610}
1611
1612unsigned FastEmit_X86ISD_TC_RETURN_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1613 switch (VT.SimpleTy) {
1614 case MVT::i32: return FastEmit_X86ISD_TC_RETURN_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
1615 default: return 0;
1616 }
1617}
1618
1619// Top-level FastEmit function.
1620
1621unsigned FastEmit_ri(MVT VT, MVT RetVT, unsigned Opcode, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
1622 switch (Opcode) {
1623 case ISD::ADD: return FastEmit_ISD_ADD_ri(VT, RetVT, Op0, Op0IsKill, imm1);
1624 case ISD::ADDC: return FastEmit_ISD_ADDC_ri(VT, RetVT, Op0, Op0IsKill, imm1);
1625 case ISD::ADDE: return FastEmit_ISD_ADDE_ri(VT, RetVT, Op0, Op0IsKill, imm1);
1626 case ISD::AND: return FastEmit_ISD_AND_ri(VT, RetVT, Op0, Op0IsKill, imm1);
1627 case ISD::MUL: return FastEmit_ISD_MUL_ri(VT, RetVT, Op0, Op0IsKill, imm1);
1628 case ISD::OR: return FastEmit_ISD_OR_ri(VT, RetVT, Op0, Op0IsKill, imm1);
1629 case ISD::ROTL: return FastEmit_ISD_ROTL_ri(VT, RetVT, Op0, Op0IsKill, imm1);
1630 case ISD::ROTR: return FastEmit_ISD_ROTR_ri(VT, RetVT, Op0, Op0IsKill, imm1);
1631 case ISD::SHL: return FastEmit_ISD_SHL_ri(VT, RetVT, Op0, Op0IsKill, imm1);
1632 case ISD::SRA: return FastEmit_ISD_SRA_ri(VT, RetVT, Op0, Op0IsKill, imm1);
1633 case ISD::SRL: return FastEmit_ISD_SRL_ri(VT, RetVT, Op0, Op0IsKill, imm1);
1634 case ISD::SUB: return FastEmit_ISD_SUB_ri(VT, RetVT, Op0, Op0IsKill, imm1);
1635 case ISD::SUBC: return FastEmit_ISD_SUBC_ri(VT, RetVT, Op0, Op0IsKill, imm1);
1636 case ISD::SUBE: return FastEmit_ISD_SUBE_ri(VT, RetVT, Op0, Op0IsKill, imm1);
1637 case ISD::XOR: return FastEmit_ISD_XOR_ri(VT, RetVT, Op0, Op0IsKill, imm1);
1638 case X86ISD::CMP: return FastEmit_X86ISD_CMP_ri(VT, RetVT, Op0, Op0IsKill, imm1);
1639 case X86ISD::TC_RETURN: return FastEmit_X86ISD_TC_RETURN_ri(VT, RetVT, Op0, Op0IsKill, imm1);
1640 default: return 0;
1641 }
1642}
1643
1644// FastEmit functions for ISD::ADD.
1645
1646unsigned FastEmit_ISD_ADD_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1647 if (RetVT.SimpleTy != MVT::i8)
1648 return 0;
1649 return FastEmitInst_rr(X86::ADD8rr, X86::GR8RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1650}
1651
1652unsigned FastEmit_ISD_ADD_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1653 if (RetVT.SimpleTy != MVT::i16)
1654 return 0;
1655 return FastEmitInst_rr(X86::ADD16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1656}
1657
1658unsigned FastEmit_ISD_ADD_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1659 if (RetVT.SimpleTy != MVT::i32)
1660 return 0;
1661 return FastEmitInst_rr(X86::ADD32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1662}
1663
1664unsigned FastEmit_ISD_ADD_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1665 if (RetVT.SimpleTy != MVT::i64)
1666 return 0;
1667 return FastEmitInst_rr(X86::ADD64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1668}
1669
1670unsigned FastEmit_ISD_ADD_MVT_v8i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1671 if (RetVT.SimpleTy != MVT::v8i8)
1672 return 0;
1673 if ((Subtarget->hasMMX() && !Subtarget->hasAVX())) {
1674 return FastEmitInst_rr(X86::MMX_PADDBrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1675 }
1676 return 0;
1677}
1678
1679unsigned FastEmit_ISD_ADD_MVT_v16i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1680 if (RetVT.SimpleTy != MVT::v16i8)
1681 return 0;
1682 if ((Subtarget->hasAVX())) {
1683 return FastEmitInst_rr(X86::VPADDBrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1684 }
1685 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
1686 return FastEmitInst_rr(X86::PADDBrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1687 }
1688 return 0;
1689}
1690
1691unsigned FastEmit_ISD_ADD_MVT_v4i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1692 if (RetVT.SimpleTy != MVT::v4i16)
1693 return 0;
1694 if ((Subtarget->hasMMX() && !Subtarget->hasAVX())) {
1695 return FastEmitInst_rr(X86::MMX_PADDWrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1696 }
1697 return 0;
1698}
1699
1700unsigned FastEmit_ISD_ADD_MVT_v8i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1701 if (RetVT.SimpleTy != MVT::v8i16)
1702 return 0;
1703 if ((Subtarget->hasAVX())) {
1704 return FastEmitInst_rr(X86::VPADDWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1705 }
1706 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
1707 return FastEmitInst_rr(X86::PADDWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1708 }
1709 return 0;
1710}
1711
1712unsigned FastEmit_ISD_ADD_MVT_v2i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1713 if (RetVT.SimpleTy != MVT::v2i32)
1714 return 0;
1715 if ((Subtarget->hasMMX() && !Subtarget->hasAVX())) {
1716 return FastEmitInst_rr(X86::MMX_PADDDrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1717 }
1718 return 0;
1719}
1720
1721unsigned FastEmit_ISD_ADD_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1722 if (RetVT.SimpleTy != MVT::v4i32)
1723 return 0;
1724 if ((Subtarget->hasAVX())) {
1725 return FastEmitInst_rr(X86::VPADDDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1726 }
1727 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
1728 return FastEmitInst_rr(X86::PADDDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1729 }
1730 return 0;
1731}
1732
1733unsigned FastEmit_ISD_ADD_MVT_v1i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1734 if (RetVT.SimpleTy != MVT::v1i64)
1735 return 0;
1736 if ((Subtarget->hasMMX() && !Subtarget->hasAVX())) {
1737 return FastEmitInst_rr(X86::MMX_PADDQrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1738 }
1739 return 0;
1740}
1741
1742unsigned FastEmit_ISD_ADD_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1743 if (RetVT.SimpleTy != MVT::v2i64)
1744 return 0;
1745 if ((Subtarget->hasAVX())) {
1746 return FastEmitInst_rr(X86::VPADDQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1747 }
1748 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
1749 return FastEmitInst_rr(X86::PADDQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1750 }
1751 return 0;
1752}
1753
1754unsigned FastEmit_ISD_ADD_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1755 switch (VT.SimpleTy) {
1756 case MVT::i8: return FastEmit_ISD_ADD_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1757 case MVT::i16: return FastEmit_ISD_ADD_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1758 case MVT::i32: return FastEmit_ISD_ADD_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1759 case MVT::i64: return FastEmit_ISD_ADD_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1760 case MVT::v8i8: return FastEmit_ISD_ADD_MVT_v8i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1761 case MVT::v16i8: return FastEmit_ISD_ADD_MVT_v16i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1762 case MVT::v4i16: return FastEmit_ISD_ADD_MVT_v4i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1763 case MVT::v8i16: return FastEmit_ISD_ADD_MVT_v8i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1764 case MVT::v2i32: return FastEmit_ISD_ADD_MVT_v2i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1765 case MVT::v4i32: return FastEmit_ISD_ADD_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1766 case MVT::v1i64: return FastEmit_ISD_ADD_MVT_v1i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1767 case MVT::v2i64: return FastEmit_ISD_ADD_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1768 default: return 0;
1769 }
1770}
1771
1772// FastEmit functions for ISD::ADDC.
1773
1774unsigned FastEmit_ISD_ADDC_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1775 if (RetVT.SimpleTy != MVT::i32)
1776 return 0;
1777 return FastEmitInst_rr(X86::ADD32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1778}
1779
1780unsigned FastEmit_ISD_ADDC_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1781 if (RetVT.SimpleTy != MVT::i64)
1782 return 0;
1783 return FastEmitInst_rr(X86::ADD64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1784}
1785
1786unsigned FastEmit_ISD_ADDC_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1787 switch (VT.SimpleTy) {
1788 case MVT::i32: return FastEmit_ISD_ADDC_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1789 case MVT::i64: return FastEmit_ISD_ADDC_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1790 default: return 0;
1791 }
1792}
1793
1794// FastEmit functions for ISD::ADDE.
1795
1796unsigned FastEmit_ISD_ADDE_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1797 if (RetVT.SimpleTy != MVT::i8)
1798 return 0;
1799 return FastEmitInst_rr(X86::ADC8rr, X86::GR8RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1800}
1801
1802unsigned FastEmit_ISD_ADDE_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1803 if (RetVT.SimpleTy != MVT::i16)
1804 return 0;
1805 return FastEmitInst_rr(X86::ADC16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1806}
1807
1808unsigned FastEmit_ISD_ADDE_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1809 if (RetVT.SimpleTy != MVT::i32)
1810 return 0;
1811 return FastEmitInst_rr(X86::ADC32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1812}
1813
1814unsigned FastEmit_ISD_ADDE_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1815 if (RetVT.SimpleTy != MVT::i64)
1816 return 0;
1817 return FastEmitInst_rr(X86::ADC64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1818}
1819
1820unsigned FastEmit_ISD_ADDE_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1821 switch (VT.SimpleTy) {
1822 case MVT::i8: return FastEmit_ISD_ADDE_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1823 case MVT::i16: return FastEmit_ISD_ADDE_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1824 case MVT::i32: return FastEmit_ISD_ADDE_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1825 case MVT::i64: return FastEmit_ISD_ADDE_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1826 default: return 0;
1827 }
1828}
1829
1830// FastEmit functions for ISD::AND.
1831
1832unsigned FastEmit_ISD_AND_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1833 if (RetVT.SimpleTy != MVT::i8)
1834 return 0;
1835 return FastEmitInst_rr(X86::AND8rr, X86::GR8RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1836}
1837
1838unsigned FastEmit_ISD_AND_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1839 if (RetVT.SimpleTy != MVT::i16)
1840 return 0;
1841 return FastEmitInst_rr(X86::AND16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1842}
1843
1844unsigned FastEmit_ISD_AND_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1845 if (RetVT.SimpleTy != MVT::i32)
1846 return 0;
1847 return FastEmitInst_rr(X86::AND32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1848}
1849
1850unsigned FastEmit_ISD_AND_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1851 if (RetVT.SimpleTy != MVT::i64)
1852 return 0;
1853 return FastEmitInst_rr(X86::AND64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1854}
1855
1856unsigned FastEmit_ISD_AND_MVT_v1i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1857 if (RetVT.SimpleTy != MVT::v1i64)
1858 return 0;
1859 if ((Subtarget->hasMMX() && !Subtarget->hasAVX())) {
1860 return FastEmitInst_rr(X86::MMX_PANDrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1861 }
1862 return 0;
1863}
1864
1865unsigned FastEmit_ISD_AND_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1866 if (RetVT.SimpleTy != MVT::v2i64)
1867 return 0;
1868 if ((Subtarget->hasAVX())) {
1869 return FastEmitInst_rr(X86::VPANDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1870 }
1871 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
1872 return FastEmitInst_rr(X86::ANDPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1873 }
1874 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
1875 return FastEmitInst_rr(X86::PANDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1876 }
1877 return 0;
1878}
1879
1880unsigned FastEmit_ISD_AND_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1881 switch (VT.SimpleTy) {
1882 case MVT::i8: return FastEmit_ISD_AND_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1883 case MVT::i16: return FastEmit_ISD_AND_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1884 case MVT::i32: return FastEmit_ISD_AND_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1885 case MVT::i64: return FastEmit_ISD_AND_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1886 case MVT::v1i64: return FastEmit_ISD_AND_MVT_v1i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1887 case MVT::v2i64: return FastEmit_ISD_AND_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1888 default: return 0;
1889 }
1890}
1891
1892// FastEmit functions for ISD::FADD.
1893
1894unsigned FastEmit_ISD_FADD_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1895 if (RetVT.SimpleTy != MVT::f32)
1896 return 0;
1897 if ((!Subtarget->hasSSE1())) {
1898 return FastEmitInst_rr(X86::ADD_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1899 }
1900 if ((Subtarget->hasAVX())) {
1901 return FastEmitInst_rr(X86::VADDSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1902 }
1903 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
1904 return FastEmitInst_rr(X86::ADDSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1905 }
1906 return 0;
1907}
1908
1909unsigned FastEmit_ISD_FADD_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1910 if (RetVT.SimpleTy != MVT::f64)
1911 return 0;
1912 if ((!Subtarget->hasSSE2())) {
1913 return FastEmitInst_rr(X86::ADD_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1914 }
1915 if ((Subtarget->hasAVX())) {
1916 return FastEmitInst_rr(X86::VADDSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1917 }
1918 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
1919 return FastEmitInst_rr(X86::ADDSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1920 }
1921 return 0;
1922}
1923
1924unsigned FastEmit_ISD_FADD_MVT_f80_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1925 if (RetVT.SimpleTy != MVT::f80)
1926 return 0;
1927 return FastEmitInst_rr(X86::ADD_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1928}
1929
1930unsigned FastEmit_ISD_FADD_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1931 if (RetVT.SimpleTy != MVT::v4f32)
1932 return 0;
1933 if ((Subtarget->hasAVX())) {
1934 return FastEmitInst_rr(X86::VADDPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1935 }
1936 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
1937 return FastEmitInst_rr(X86::ADDPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1938 }
1939 return 0;
1940}
1941
1942unsigned FastEmit_ISD_FADD_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1943 if (RetVT.SimpleTy != MVT::v8f32)
1944 return 0;
1945 if ((Subtarget->hasAVX())) {
1946 return FastEmitInst_rr(X86::VADDPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1947 }
1948 return 0;
1949}
1950
1951unsigned FastEmit_ISD_FADD_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1952 if (RetVT.SimpleTy != MVT::v2f64)
1953 return 0;
1954 if ((Subtarget->hasAVX())) {
1955 return FastEmitInst_rr(X86::VADDPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1956 }
1957 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
1958 return FastEmitInst_rr(X86::ADDPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1959 }
1960 return 0;
1961}
1962
1963unsigned FastEmit_ISD_FADD_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1964 if (RetVT.SimpleTy != MVT::v4f64)
1965 return 0;
1966 if ((Subtarget->hasAVX())) {
1967 return FastEmitInst_rr(X86::VADDPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1968 }
1969 return 0;
1970}
1971
1972unsigned FastEmit_ISD_FADD_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1973 switch (VT.SimpleTy) {
1974 case MVT::f32: return FastEmit_ISD_FADD_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1975 case MVT::f64: return FastEmit_ISD_FADD_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1976 case MVT::f80: return FastEmit_ISD_FADD_MVT_f80_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1977 case MVT::v4f32: return FastEmit_ISD_FADD_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1978 case MVT::v8f32: return FastEmit_ISD_FADD_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1979 case MVT::v2f64: return FastEmit_ISD_FADD_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1980 case MVT::v4f64: return FastEmit_ISD_FADD_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1981 default: return 0;
1982 }
1983}
1984
1985// FastEmit functions for ISD::FDIV.
1986
1987unsigned FastEmit_ISD_FDIV_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1988 if (RetVT.SimpleTy != MVT::f32)
1989 return 0;
1990 if ((!Subtarget->hasSSE1())) {
1991 return FastEmitInst_rr(X86::DIV_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1992 }
1993 if ((Subtarget->hasAVX())) {
1994 return FastEmitInst_rr(X86::VDIVSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1995 }
1996 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
1997 return FastEmitInst_rr(X86::DIVSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1998 }
1999 return 0;
2000}
2001
2002unsigned FastEmit_ISD_FDIV_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2003 if (RetVT.SimpleTy != MVT::f64)
2004 return 0;
2005 if ((!Subtarget->hasSSE2())) {
2006 return FastEmitInst_rr(X86::DIV_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2007 }
2008 if ((Subtarget->hasAVX())) {
2009 return FastEmitInst_rr(X86::VDIVSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2010 }
2011 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2012 return FastEmitInst_rr(X86::DIVSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2013 }
2014 return 0;
2015}
2016
2017unsigned FastEmit_ISD_FDIV_MVT_f80_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2018 if (RetVT.SimpleTy != MVT::f80)
2019 return 0;
2020 return FastEmitInst_rr(X86::DIV_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2021}
2022
2023unsigned FastEmit_ISD_FDIV_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2024 if (RetVT.SimpleTy != MVT::v4f32)
2025 return 0;
2026 if ((Subtarget->hasAVX())) {
2027 return FastEmitInst_rr(X86::VDIVPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2028 }
2029 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
2030 return FastEmitInst_rr(X86::DIVPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2031 }
2032 return 0;
2033}
2034
2035unsigned FastEmit_ISD_FDIV_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2036 if (RetVT.SimpleTy != MVT::v8f32)
2037 return 0;
2038 if ((Subtarget->hasAVX())) {
2039 return FastEmitInst_rr(X86::VDIVPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2040 }
2041 return 0;
2042}
2043
2044unsigned FastEmit_ISD_FDIV_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2045 if (RetVT.SimpleTy != MVT::v2f64)
2046 return 0;
2047 if ((Subtarget->hasAVX())) {
2048 return FastEmitInst_rr(X86::VDIVPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2049 }
2050 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2051 return FastEmitInst_rr(X86::DIVPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2052 }
2053 return 0;
2054}
2055
2056unsigned FastEmit_ISD_FDIV_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2057 if (RetVT.SimpleTy != MVT::v4f64)
2058 return 0;
2059 if ((Subtarget->hasAVX())) {
2060 return FastEmitInst_rr(X86::VDIVPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2061 }
2062 return 0;
2063}
2064
2065unsigned FastEmit_ISD_FDIV_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2066 switch (VT.SimpleTy) {
2067 case MVT::f32: return FastEmit_ISD_FDIV_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2068 case MVT::f64: return FastEmit_ISD_FDIV_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2069 case MVT::f80: return FastEmit_ISD_FDIV_MVT_f80_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2070 case MVT::v4f32: return FastEmit_ISD_FDIV_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2071 case MVT::v8f32: return FastEmit_ISD_FDIV_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2072 case MVT::v2f64: return FastEmit_ISD_FDIV_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2073 case MVT::v4f64: return FastEmit_ISD_FDIV_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2074 default: return 0;
2075 }
2076}
2077
2078// FastEmit functions for ISD::FMUL.
2079
2080unsigned FastEmit_ISD_FMUL_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2081 if (RetVT.SimpleTy != MVT::f32)
2082 return 0;
2083 if ((!Subtarget->hasSSE1())) {
2084 return FastEmitInst_rr(X86::MUL_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2085 }
2086 if ((Subtarget->hasAVX())) {
2087 return FastEmitInst_rr(X86::VMULSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2088 }
2089 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
2090 return FastEmitInst_rr(X86::MULSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2091 }
2092 return 0;
2093}
2094
2095unsigned FastEmit_ISD_FMUL_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2096 if (RetVT.SimpleTy != MVT::f64)
2097 return 0;
2098 if ((!Subtarget->hasSSE2())) {
2099 return FastEmitInst_rr(X86::MUL_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2100 }
2101 if ((Subtarget->hasAVX())) {
2102 return FastEmitInst_rr(X86::VMULSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2103 }
2104 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2105 return FastEmitInst_rr(X86::MULSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2106 }
2107 return 0;
2108}
2109
2110unsigned FastEmit_ISD_FMUL_MVT_f80_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2111 if (RetVT.SimpleTy != MVT::f80)
2112 return 0;
2113 return FastEmitInst_rr(X86::MUL_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2114}
2115
2116unsigned FastEmit_ISD_FMUL_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2117 if (RetVT.SimpleTy != MVT::v4f32)
2118 return 0;
2119 if ((Subtarget->hasAVX())) {
2120 return FastEmitInst_rr(X86::VMULPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2121 }
2122 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
2123 return FastEmitInst_rr(X86::MULPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2124 }
2125 return 0;
2126}
2127
2128unsigned FastEmit_ISD_FMUL_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2129 if (RetVT.SimpleTy != MVT::v8f32)
2130 return 0;
2131 if ((Subtarget->hasAVX())) {
2132 return FastEmitInst_rr(X86::VMULPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2133 }
2134 return 0;
2135}
2136
2137unsigned FastEmit_ISD_FMUL_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2138 if (RetVT.SimpleTy != MVT::v2f64)
2139 return 0;
2140 if ((Subtarget->hasAVX())) {
2141 return FastEmitInst_rr(X86::VMULPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2142 }
2143 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2144 return FastEmitInst_rr(X86::MULPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2145 }
2146 return 0;
2147}
2148
2149unsigned FastEmit_ISD_FMUL_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2150 if (RetVT.SimpleTy != MVT::v4f64)
2151 return 0;
2152 if ((Subtarget->hasAVX())) {
2153 return FastEmitInst_rr(X86::VMULPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2154 }
2155 return 0;
2156}
2157
2158unsigned FastEmit_ISD_FMUL_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2159 switch (VT.SimpleTy) {
2160 case MVT::f32: return FastEmit_ISD_FMUL_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2161 case MVT::f64: return FastEmit_ISD_FMUL_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2162 case MVT::f80: return FastEmit_ISD_FMUL_MVT_f80_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2163 case MVT::v4f32: return FastEmit_ISD_FMUL_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2164 case MVT::v8f32: return FastEmit_ISD_FMUL_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2165 case MVT::v2f64: return FastEmit_ISD_FMUL_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2166 case MVT::v4f64: return FastEmit_ISD_FMUL_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2167 default: return 0;
2168 }
2169}
2170
2171// FastEmit functions for ISD::FSUB.
2172
2173unsigned FastEmit_ISD_FSUB_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2174 if (RetVT.SimpleTy != MVT::f32)
2175 return 0;
2176 if ((!Subtarget->hasSSE1())) {
2177 return FastEmitInst_rr(X86::SUB_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2178 }
2179 if ((Subtarget->hasAVX())) {
2180 return FastEmitInst_rr(X86::VSUBSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2181 }
2182 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
2183 return FastEmitInst_rr(X86::SUBSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2184 }
2185 return 0;
2186}
2187
2188unsigned FastEmit_ISD_FSUB_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2189 if (RetVT.SimpleTy != MVT::f64)
2190 return 0;
2191 if ((!Subtarget->hasSSE2())) {
2192 return FastEmitInst_rr(X86::SUB_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2193 }
2194 if ((Subtarget->hasAVX())) {
2195 return FastEmitInst_rr(X86::VSUBSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2196 }
2197 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2198 return FastEmitInst_rr(X86::SUBSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2199 }
2200 return 0;
2201}
2202
2203unsigned FastEmit_ISD_FSUB_MVT_f80_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2204 if (RetVT.SimpleTy != MVT::f80)
2205 return 0;
2206 return FastEmitInst_rr(X86::SUB_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2207}
2208
2209unsigned FastEmit_ISD_FSUB_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2210 if (RetVT.SimpleTy != MVT::v4f32)
2211 return 0;
2212 if ((Subtarget->hasAVX())) {
2213 return FastEmitInst_rr(X86::VSUBPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2214 }
2215 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
2216 return FastEmitInst_rr(X86::SUBPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2217 }
2218 return 0;
2219}
2220
2221unsigned FastEmit_ISD_FSUB_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2222 if (RetVT.SimpleTy != MVT::v8f32)
2223 return 0;
2224 if ((Subtarget->hasAVX())) {
2225 return FastEmitInst_rr(X86::VSUBPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2226 }
2227 return 0;
2228}
2229
2230unsigned FastEmit_ISD_FSUB_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2231 if (RetVT.SimpleTy != MVT::v2f64)
2232 return 0;
2233 if ((Subtarget->hasAVX())) {
2234 return FastEmitInst_rr(X86::VSUBPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2235 }
2236 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2237 return FastEmitInst_rr(X86::SUBPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2238 }
2239 return 0;
2240}
2241
2242unsigned FastEmit_ISD_FSUB_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2243 if (RetVT.SimpleTy != MVT::v4f64)
2244 return 0;
2245 if ((Subtarget->hasAVX())) {
2246 return FastEmitInst_rr(X86::VSUBPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2247 }
2248 return 0;
2249}
2250
2251unsigned FastEmit_ISD_FSUB_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2252 switch (VT.SimpleTy) {
2253 case MVT::f32: return FastEmit_ISD_FSUB_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2254 case MVT::f64: return FastEmit_ISD_FSUB_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2255 case MVT::f80: return FastEmit_ISD_FSUB_MVT_f80_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2256 case MVT::v4f32: return FastEmit_ISD_FSUB_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2257 case MVT::v8f32: return FastEmit_ISD_FSUB_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2258 case MVT::v2f64: return FastEmit_ISD_FSUB_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2259 case MVT::v4f64: return FastEmit_ISD_FSUB_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2260 default: return 0;
2261 }
2262}
2263
2264// FastEmit functions for ISD::MUL.
2265
2266unsigned FastEmit_ISD_MUL_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2267 if (RetVT.SimpleTy != MVT::i8)
2268 return 0;
2269 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DL, TII.get(TargetOpcode::COPY), X86::AL).addReg(Op0);
2270 return FastEmitInst_r(X86::MUL8r, X86::GR8RegisterClass, Op1, Op1IsKill);
2271}
2272
2273unsigned FastEmit_ISD_MUL_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2274 if (RetVT.SimpleTy != MVT::i16)
2275 return 0;
2276 return FastEmitInst_rr(X86::IMUL16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2277}
2278
2279unsigned FastEmit_ISD_MUL_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2280 if (RetVT.SimpleTy != MVT::i32)
2281 return 0;
2282 return FastEmitInst_rr(X86::IMUL32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2283}
2284
2285unsigned FastEmit_ISD_MUL_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2286 if (RetVT.SimpleTy != MVT::i64)
2287 return 0;
2288 return FastEmitInst_rr(X86::IMUL64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2289}
2290
2291unsigned FastEmit_ISD_MUL_MVT_v4i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2292 if (RetVT.SimpleTy != MVT::v4i16)
2293 return 0;
2294 if ((Subtarget->hasMMX() && !Subtarget->hasAVX())) {
2295 return FastEmitInst_rr(X86::MMX_PMULLWrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2296 }
2297 return 0;
2298}
2299
2300unsigned FastEmit_ISD_MUL_MVT_v8i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2301 if (RetVT.SimpleTy != MVT::v8i16)
2302 return 0;
2303 if ((Subtarget->hasAVX())) {
2304 return FastEmitInst_rr(X86::VPMULLWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2305 }
2306 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2307 return FastEmitInst_rr(X86::PMULLWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2308 }
2309 return 0;
2310}
2311
2312unsigned FastEmit_ISD_MUL_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2313 if (RetVT.SimpleTy != MVT::v4i32)
2314 return 0;
2315 if ((Subtarget->hasAVX())) {
2316 return FastEmitInst_rr(X86::VPMULLDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2317 }
2318 if ((Subtarget->hasSSE41() && !Subtarget->hasAVX())) {
2319 return FastEmitInst_rr(X86::PMULLDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2320 }
2321 return 0;
2322}
2323
2324unsigned FastEmit_ISD_MUL_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2325 switch (VT.SimpleTy) {
2326 case MVT::i8: return FastEmit_ISD_MUL_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2327 case MVT::i16: return FastEmit_ISD_MUL_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2328 case MVT::i32: return FastEmit_ISD_MUL_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2329 case MVT::i64: return FastEmit_ISD_MUL_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2330 case MVT::v4i16: return FastEmit_ISD_MUL_MVT_v4i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2331 case MVT::v8i16: return FastEmit_ISD_MUL_MVT_v8i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2332 case MVT::v4i32: return FastEmit_ISD_MUL_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2333 default: return 0;
2334 }
2335}
2336
2337// FastEmit functions for ISD::OR.
2338
2339unsigned FastEmit_ISD_OR_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2340 if (RetVT.SimpleTy != MVT::i8)
2341 return 0;
2342 return FastEmitInst_rr(X86::OR8rr, X86::GR8RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2343}
2344
2345unsigned FastEmit_ISD_OR_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2346 if (RetVT.SimpleTy != MVT::i16)
2347 return 0;
2348 return FastEmitInst_rr(X86::OR16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2349}
2350
2351unsigned FastEmit_ISD_OR_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2352 if (RetVT.SimpleTy != MVT::i32)
2353 return 0;
2354 return FastEmitInst_rr(X86::OR32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2355}
2356
2357unsigned FastEmit_ISD_OR_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2358 if (RetVT.SimpleTy != MVT::i64)
2359 return 0;
2360 return FastEmitInst_rr(X86::OR64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2361}
2362
2363unsigned FastEmit_ISD_OR_MVT_v1i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2364 if (RetVT.SimpleTy != MVT::v1i64)
2365 return 0;
2366 if ((Subtarget->hasMMX() && !Subtarget->hasAVX())) {
2367 return FastEmitInst_rr(X86::MMX_PORrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2368 }
2369 return 0;
2370}
2371
2372unsigned FastEmit_ISD_OR_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2373 if (RetVT.SimpleTy != MVT::v2i64)
2374 return 0;
2375 if ((Subtarget->hasAVX())) {
2376 return FastEmitInst_rr(X86::VPORrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2377 }
2378 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
2379 return FastEmitInst_rr(X86::ORPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2380 }
2381 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2382 return FastEmitInst_rr(X86::PORrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2383 }
2384 return 0;
2385}
2386
2387unsigned FastEmit_ISD_OR_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2388 switch (VT.SimpleTy) {
2389 case MVT::i8: return FastEmit_ISD_OR_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2390 case MVT::i16: return FastEmit_ISD_OR_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2391 case MVT::i32: return FastEmit_ISD_OR_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2392 case MVT::i64: return FastEmit_ISD_OR_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2393 case MVT::v1i64: return FastEmit_ISD_OR_MVT_v1i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2394 case MVT::v2i64: return FastEmit_ISD_OR_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2395 default: return 0;
2396 }
2397}
2398
2399// FastEmit functions for ISD::ROTL.
2400
2401unsigned FastEmit_ISD_ROTL_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2402 if (RetVT.SimpleTy != MVT::i8)
2403 return 0;
2404 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DL, TII.get(TargetOpcode::COPY), X86::CL).addReg(Op1);
2405 return FastEmitInst_r(X86::ROL8rCL, X86::GR8RegisterClass, Op0, Op0IsKill);
2406}
2407
2408unsigned FastEmit_ISD_ROTL_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2409 switch (VT.SimpleTy) {
2410 case MVT::i8: return FastEmit_ISD_ROTL_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2411 default: return 0;
2412 }
2413}
2414
2415// FastEmit functions for ISD::ROTR.
2416
2417unsigned FastEmit_ISD_ROTR_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2418 if (RetVT.SimpleTy != MVT::i8)
2419 return 0;
2420 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DL, TII.get(TargetOpcode::COPY), X86::CL).addReg(Op1);
2421 return FastEmitInst_r(X86::ROR8rCL, X86::GR8RegisterClass, Op0, Op0IsKill);
2422}
2423
2424unsigned FastEmit_ISD_ROTR_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2425 switch (VT.SimpleTy) {
2426 case MVT::i8: return FastEmit_ISD_ROTR_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2427 default: return 0;
2428 }
2429}
2430
2431// FastEmit functions for ISD::SHL.
2432
2433unsigned FastEmit_ISD_SHL_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2434 if (RetVT.SimpleTy != MVT::i8)
2435 return 0;
2436 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DL, TII.get(TargetOpcode::COPY), X86::CL).addReg(Op1);
2437 return FastEmitInst_r(X86::SHL8rCL, X86::GR8RegisterClass, Op0, Op0IsKill);
2438}
2439
2440unsigned FastEmit_ISD_SHL_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2441 switch (VT.SimpleTy) {
2442 case MVT::i8: return FastEmit_ISD_SHL_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2443 default: return 0;
2444 }
2445}
2446
2447// FastEmit functions for ISD::SRA.
2448
2449unsigned FastEmit_ISD_SRA_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2450 if (RetVT.SimpleTy != MVT::i8)
2451 return 0;
2452 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DL, TII.get(TargetOpcode::COPY), X86::CL).addReg(Op1);
2453 return FastEmitInst_r(X86::SAR8rCL, X86::GR8RegisterClass, Op0, Op0IsKill);
2454}
2455
2456unsigned FastEmit_ISD_SRA_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2457 switch (VT.SimpleTy) {
2458 case MVT::i8: return FastEmit_ISD_SRA_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2459 default: return 0;
2460 }
2461}
2462
2463// FastEmit functions for ISD::SRL.
2464
2465unsigned FastEmit_ISD_SRL_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2466 if (RetVT.SimpleTy != MVT::i8)
2467 return 0;
2468 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DL, TII.get(TargetOpcode::COPY), X86::CL).addReg(Op1);
2469 return FastEmitInst_r(X86::SHR8rCL, X86::GR8RegisterClass, Op0, Op0IsKill);
2470}
2471
2472unsigned FastEmit_ISD_SRL_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2473 switch (VT.SimpleTy) {
2474 case MVT::i8: return FastEmit_ISD_SRL_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2475 default: return 0;
2476 }
2477}
2478
2479// FastEmit functions for ISD::SUB.
2480
2481unsigned FastEmit_ISD_SUB_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2482 if (RetVT.SimpleTy != MVT::i8)
2483 return 0;
2484 return FastEmitInst_rr(X86::SUB8rr, X86::GR8RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2485}
2486
2487unsigned FastEmit_ISD_SUB_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2488 if (RetVT.SimpleTy != MVT::i16)
2489 return 0;
2490 return FastEmitInst_rr(X86::SUB16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2491}
2492
2493unsigned FastEmit_ISD_SUB_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2494 if (RetVT.SimpleTy != MVT::i32)
2495 return 0;
2496 return FastEmitInst_rr(X86::SUB32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2497}
2498
2499unsigned FastEmit_ISD_SUB_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2500 if (RetVT.SimpleTy != MVT::i64)
2501 return 0;
2502 return FastEmitInst_rr(X86::SUB64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2503}
2504
2505unsigned FastEmit_ISD_SUB_MVT_v8i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2506 if (RetVT.SimpleTy != MVT::v8i8)
2507 return 0;
2508 if ((Subtarget->hasMMX() && !Subtarget->hasAVX())) {
2509 return FastEmitInst_rr(X86::MMX_PSUBBrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2510 }
2511 return 0;
2512}
2513
2514unsigned FastEmit_ISD_SUB_MVT_v16i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2515 if (RetVT.SimpleTy != MVT::v16i8)
2516 return 0;
2517 if ((Subtarget->hasAVX())) {
2518 return FastEmitInst_rr(X86::VPSUBBrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2519 }
2520 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2521 return FastEmitInst_rr(X86::PSUBBrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2522 }
2523 return 0;
2524}
2525
2526unsigned FastEmit_ISD_SUB_MVT_v4i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2527 if (RetVT.SimpleTy != MVT::v4i16)
2528 return 0;
2529 if ((Subtarget->hasMMX() && !Subtarget->hasAVX())) {
2530 return FastEmitInst_rr(X86::MMX_PSUBWrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2531 }
2532 return 0;
2533}
2534
2535unsigned FastEmit_ISD_SUB_MVT_v8i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2536 if (RetVT.SimpleTy != MVT::v8i16)
2537 return 0;
2538 if ((Subtarget->hasAVX())) {
2539 return FastEmitInst_rr(X86::VPSUBWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2540 }
2541 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2542 return FastEmitInst_rr(X86::PSUBWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2543 }
2544 return 0;
2545}
2546
2547unsigned FastEmit_ISD_SUB_MVT_v2i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2548 if (RetVT.SimpleTy != MVT::v2i32)
2549 return 0;
2550 if ((Subtarget->hasMMX() && !Subtarget->hasAVX())) {
2551 return FastEmitInst_rr(X86::MMX_PSUBDrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2552 }
2553 return 0;
2554}
2555
2556unsigned FastEmit_ISD_SUB_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2557 if (RetVT.SimpleTy != MVT::v4i32)
2558 return 0;
2559 if ((Subtarget->hasAVX())) {
2560 return FastEmitInst_rr(X86::VPSUBDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2561 }
2562 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2563 return FastEmitInst_rr(X86::PSUBDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2564 }
2565 return 0;
2566}
2567
2568unsigned FastEmit_ISD_SUB_MVT_v1i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2569 if (RetVT.SimpleTy != MVT::v1i64)
2570 return 0;
2571 if ((Subtarget->hasMMX() && !Subtarget->hasAVX())) {
2572 return FastEmitInst_rr(X86::MMX_PSUBQrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2573 }
2574 return 0;
2575}
2576
2577unsigned FastEmit_ISD_SUB_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2578 if (RetVT.SimpleTy != MVT::v2i64)
2579 return 0;
2580 if ((Subtarget->hasAVX())) {
2581 return FastEmitInst_rr(X86::VPSUBQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2582 }
2583 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2584 return FastEmitInst_rr(X86::PSUBQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2585 }
2586 return 0;
2587}
2588
2589unsigned FastEmit_ISD_SUB_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2590 switch (VT.SimpleTy) {
2591 case MVT::i8: return FastEmit_ISD_SUB_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2592 case MVT::i16: return FastEmit_ISD_SUB_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2593 case MVT::i32: return FastEmit_ISD_SUB_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2594 case MVT::i64: return FastEmit_ISD_SUB_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2595 case MVT::v8i8: return FastEmit_ISD_SUB_MVT_v8i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2596 case MVT::v16i8: return FastEmit_ISD_SUB_MVT_v16i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2597 case MVT::v4i16: return FastEmit_ISD_SUB_MVT_v4i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2598 case MVT::v8i16: return FastEmit_ISD_SUB_MVT_v8i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2599 case MVT::v2i32: return FastEmit_ISD_SUB_MVT_v2i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2600 case MVT::v4i32: return FastEmit_ISD_SUB_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2601 case MVT::v1i64: return FastEmit_ISD_SUB_MVT_v1i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2602 case MVT::v2i64: return FastEmit_ISD_SUB_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2603 default: return 0;
2604 }
2605}
2606
2607// FastEmit functions for ISD::SUBC.
2608
2609unsigned FastEmit_ISD_SUBC_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2610 if (RetVT.SimpleTy != MVT::i32)
2611 return 0;
2612 return FastEmitInst_rr(X86::SUB32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2613}
2614
2615unsigned FastEmit_ISD_SUBC_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2616 if (RetVT.SimpleTy != MVT::i64)
2617 return 0;
2618 return FastEmitInst_rr(X86::SUB64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2619}
2620
2621unsigned FastEmit_ISD_SUBC_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2622 switch (VT.SimpleTy) {
2623 case MVT::i32: return FastEmit_ISD_SUBC_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2624 case MVT::i64: return FastEmit_ISD_SUBC_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2625 default: return 0;
2626 }
2627}
2628
2629// FastEmit functions for ISD::SUBE.
2630
2631unsigned FastEmit_ISD_SUBE_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2632 if (RetVT.SimpleTy != MVT::i8)
2633 return 0;
2634 return FastEmitInst_rr(X86::SBB8rr, X86::GR8RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2635}
2636
2637unsigned FastEmit_ISD_SUBE_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2638 if (RetVT.SimpleTy != MVT::i16)
2639 return 0;
2640 return FastEmitInst_rr(X86::SBB16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2641}
2642
2643unsigned FastEmit_ISD_SUBE_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2644 if (RetVT.SimpleTy != MVT::i32)
2645 return 0;
2646 return FastEmitInst_rr(X86::SBB32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2647}
2648
2649unsigned FastEmit_ISD_SUBE_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2650 if (RetVT.SimpleTy != MVT::i64)
2651 return 0;
2652 return FastEmitInst_rr(X86::SBB64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2653}
2654
2655unsigned FastEmit_ISD_SUBE_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2656 switch (VT.SimpleTy) {
2657 case MVT::i8: return FastEmit_ISD_SUBE_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2658 case MVT::i16: return FastEmit_ISD_SUBE_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2659 case MVT::i32: return FastEmit_ISD_SUBE_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2660 case MVT::i64: return FastEmit_ISD_SUBE_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2661 default: return 0;
2662 }
2663}
2664
2665// FastEmit functions for ISD::XOR.
2666
2667unsigned FastEmit_ISD_XOR_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2668 if (RetVT.SimpleTy != MVT::i8)
2669 return 0;
2670 return FastEmitInst_rr(X86::XOR8rr, X86::GR8RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2671}
2672
2673unsigned FastEmit_ISD_XOR_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2674 if (RetVT.SimpleTy != MVT::i16)
2675 return 0;
2676 return FastEmitInst_rr(X86::XOR16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2677}
2678
2679unsigned FastEmit_ISD_XOR_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2680 if (RetVT.SimpleTy != MVT::i32)
2681 return 0;
2682 return FastEmitInst_rr(X86::XOR32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2683}
2684
2685unsigned FastEmit_ISD_XOR_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2686 if (RetVT.SimpleTy != MVT::i64)
2687 return 0;
2688 return FastEmitInst_rr(X86::XOR64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2689}
2690
2691unsigned FastEmit_ISD_XOR_MVT_v1i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2692 if (RetVT.SimpleTy != MVT::v1i64)
2693 return 0;
2694 if ((Subtarget->hasMMX() && !Subtarget->hasAVX())) {
2695 return FastEmitInst_rr(X86::MMX_PXORrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2696 }
2697 return 0;
2698}
2699
2700unsigned FastEmit_ISD_XOR_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2701 if (RetVT.SimpleTy != MVT::v2i64)
2702 return 0;
2703 if ((Subtarget->hasAVX())) {
2704 return FastEmitInst_rr(X86::VPXORrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2705 }
2706 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
2707 return FastEmitInst_rr(X86::XORPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2708 }
2709 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2710 return FastEmitInst_rr(X86::PXORrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2711 }
2712 return 0;
2713}
2714
2715unsigned FastEmit_ISD_XOR_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2716 switch (VT.SimpleTy) {
2717 case MVT::i8: return FastEmit_ISD_XOR_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2718 case MVT::i16: return FastEmit_ISD_XOR_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2719 case MVT::i32: return FastEmit_ISD_XOR_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2720 case MVT::i64: return FastEmit_ISD_XOR_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2721 case MVT::v1i64: return FastEmit_ISD_XOR_MVT_v1i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2722 case MVT::v2i64: return FastEmit_ISD_XOR_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2723 default: return 0;
2724 }
2725}
2726
2727// FastEmit functions for X86ISD::BT.
2728
2729unsigned FastEmit_X86ISD_BT_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2730 if (RetVT.SimpleTy != MVT::i32)
2731 return 0;
2732 return FastEmitInst_rr(X86::BT16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2733}
2734
2735unsigned FastEmit_X86ISD_BT_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2736 if (RetVT.SimpleTy != MVT::i32)
2737 return 0;
2738 return FastEmitInst_rr(X86::BT32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2739}
2740
2741unsigned FastEmit_X86ISD_BT_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2742 if (RetVT.SimpleTy != MVT::i32)
2743 return 0;
2744 return FastEmitInst_rr(X86::BT64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2745}
2746
2747unsigned FastEmit_X86ISD_BT_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2748 switch (VT.SimpleTy) {
2749 case MVT::i16: return FastEmit_X86ISD_BT_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2750 case MVT::i32: return FastEmit_X86ISD_BT_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2751 case MVT::i64: return FastEmit_X86ISD_BT_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2752 default: return 0;
2753 }
2754}
2755
2756// FastEmit functions for X86ISD::CMP.
2757
2758unsigned FastEmit_X86ISD_CMP_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2759 if (RetVT.SimpleTy != MVT::i32)
2760 return 0;
2761 return FastEmitInst_rr(X86::CMP8rr, X86::GR8RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2762}
2763
2764unsigned FastEmit_X86ISD_CMP_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2765 if (RetVT.SimpleTy != MVT::i32)
2766 return 0;
2767 return FastEmitInst_rr(X86::CMP16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2768}
2769
2770unsigned FastEmit_X86ISD_CMP_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2771 if (RetVT.SimpleTy != MVT::i32)
2772 return 0;
2773 return FastEmitInst_rr(X86::CMP32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2774}
2775
2776unsigned FastEmit_X86ISD_CMP_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2777 if (RetVT.SimpleTy != MVT::i32)
2778 return 0;
2779 return FastEmitInst_rr(X86::CMP64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2780}
2781
2782unsigned FastEmit_X86ISD_CMP_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2783 if (RetVT.SimpleTy != MVT::i32)
2784 return 0;
2785 if ((!Subtarget->hasSSE1())) {
2786 return FastEmitInst_rr(X86::UCOM_FpIr32, X86::RFP32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2787 }
2788 if ((Subtarget->hasAVX())) {
2789 return FastEmitInst_rr(X86::VUCOMISSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2790 }
2791 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
2792 return FastEmitInst_rr(X86::UCOMISSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2793 }
2794 return 0;
2795}
2796
2797unsigned FastEmit_X86ISD_CMP_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2798 if (RetVT.SimpleTy != MVT::i32)
2799 return 0;
2800 if ((!Subtarget->hasSSE2())) {
2801 return FastEmitInst_rr(X86::UCOM_FpIr64, X86::RFP64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2802 }
2803 if ((Subtarget->hasAVX())) {
2804 return FastEmitInst_rr(X86::VUCOMISDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2805 }
2806 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2807 return FastEmitInst_rr(X86::UCOMISDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2808 }
2809 return 0;
2810}
2811
2812unsigned FastEmit_X86ISD_CMP_MVT_f80_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2813 if (RetVT.SimpleTy != MVT::i32)
2814 return 0;
2815 return FastEmitInst_rr(X86::UCOM_FpIr80, X86::RFP80RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2816}
2817
2818unsigned FastEmit_X86ISD_CMP_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2819 switch (VT.SimpleTy) {
2820 case MVT::i8: return FastEmit_X86ISD_CMP_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2821 case MVT::i16: return FastEmit_X86ISD_CMP_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2822 case MVT::i32: return FastEmit_X86ISD_CMP_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2823 case MVT::i64: return FastEmit_X86ISD_CMP_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2824 case MVT::f32: return FastEmit_X86ISD_CMP_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2825 case MVT::f64: return FastEmit_X86ISD_CMP_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2826 case MVT::f80: return FastEmit_X86ISD_CMP_MVT_f80_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2827 default: return 0;
2828 }
2829}
2830
2831// FastEmit functions for X86ISD::COMI.
2832
2833unsigned FastEmit_X86ISD_COMI_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2834 if (RetVT.SimpleTy != MVT::i32)
2835 return 0;
2836 if ((Subtarget->hasAVX())) {
2837 return FastEmitInst_rr(X86::Int_VCOMISSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2838 }
2839 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
2840 return FastEmitInst_rr(X86::Int_COMISSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2841 }
2842 return 0;
2843}
2844
2845unsigned FastEmit_X86ISD_COMI_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2846 if (RetVT.SimpleTy != MVT::i32)
2847 return 0;
2848 if ((Subtarget->hasAVX())) {
2849 return FastEmitInst_rr(X86::Int_VCOMISDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2850 }
2851 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2852 return FastEmitInst_rr(X86::Int_COMISDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2853 }
2854 return 0;
2855}
2856
2857unsigned FastEmit_X86ISD_COMI_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2858 switch (VT.SimpleTy) {
2859 case MVT::v4f32: return FastEmit_X86ISD_COMI_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2860 case MVT::v2f64: return FastEmit_X86ISD_COMI_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2861 default: return 0;
2862 }
2863}
2864
2865// FastEmit functions for X86ISD::FAND.
2866
2867unsigned FastEmit_X86ISD_FAND_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2868 if (RetVT.SimpleTy != MVT::f32)
2869 return 0;
2870 if ((Subtarget->hasAVX())) {
2871 return FastEmitInst_rr(X86::VFsANDPSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2872 }
2873 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
2874 return FastEmitInst_rr(X86::FsANDPSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2875 }
2876 return 0;
2877}
2878
2879unsigned FastEmit_X86ISD_FAND_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2880 if (RetVT.SimpleTy != MVT::f64)
2881 return 0;
2882 if ((Subtarget->hasAVX())) {
2883 return FastEmitInst_rr(X86::VFsANDPDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2884 }
2885 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2886 return FastEmitInst_rr(X86::FsANDPDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2887 }
2888 return 0;
2889}
2890
2891unsigned FastEmit_X86ISD_FAND_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2892 switch (VT.SimpleTy) {
2893 case MVT::f32: return FastEmit_X86ISD_FAND_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2894 case MVT::f64: return FastEmit_X86ISD_FAND_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2895 default: return 0;
2896 }
2897}
2898
2899// FastEmit functions for X86ISD::FMAX.
2900
2901unsigned FastEmit_X86ISD_FMAX_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2902 if (RetVT.SimpleTy != MVT::f32)
2903 return 0;
2904 if ((Subtarget->hasAVX())) {
2905 return FastEmitInst_rr(X86::VMAXSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2906 }
2907 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
2908 return FastEmitInst_rr(X86::MAXSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2909 }
2910 return 0;
2911}
2912
2913unsigned FastEmit_X86ISD_FMAX_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2914 if (RetVT.SimpleTy != MVT::f64)
2915 return 0;
2916 if ((Subtarget->hasAVX())) {
2917 return FastEmitInst_rr(X86::VMAXSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2918 }
2919 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2920 return FastEmitInst_rr(X86::MAXSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2921 }
2922 return 0;
2923}
2924
2925unsigned FastEmit_X86ISD_FMAX_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2926 if (RetVT.SimpleTy != MVT::v4f32)
2927 return 0;
2928 if ((Subtarget->hasAVX())) {
2929 return FastEmitInst_rr(X86::VMAXPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2930 }
2931 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
2932 return FastEmitInst_rr(X86::MAXPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2933 }
2934 return 0;
2935}
2936
2937unsigned FastEmit_X86ISD_FMAX_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2938 if (RetVT.SimpleTy != MVT::v8f32)
2939 return 0;
2940 if ((Subtarget->hasAVX())) {
2941 return FastEmitInst_rr(X86::VMAXPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2942 }
2943 return 0;
2944}
2945
2946unsigned FastEmit_X86ISD_FMAX_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2947 if (RetVT.SimpleTy != MVT::v2f64)
2948 return 0;
2949 if ((Subtarget->hasAVX())) {
2950 return FastEmitInst_rr(X86::VMAXPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2951 }
2952 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
2953 return FastEmitInst_rr(X86::MAXPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2954 }
2955 return 0;
2956}
2957
2958unsigned FastEmit_X86ISD_FMAX_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2959 if (RetVT.SimpleTy != MVT::v4f64)
2960 return 0;
2961 if ((Subtarget->hasAVX())) {
2962 return FastEmitInst_rr(X86::VMAXPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2963 }
2964 return 0;
2965}
2966
2967unsigned FastEmit_X86ISD_FMAX_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2968 switch (VT.SimpleTy) {
2969 case MVT::f32: return FastEmit_X86ISD_FMAX_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2970 case MVT::f64: return FastEmit_X86ISD_FMAX_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2971 case MVT::v4f32: return FastEmit_X86ISD_FMAX_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2972 case MVT::v8f32: return FastEmit_X86ISD_FMAX_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2973 case MVT::v2f64: return FastEmit_X86ISD_FMAX_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2974 case MVT::v4f64: return FastEmit_X86ISD_FMAX_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2975 default: return 0;
2976 }
2977}
2978
2979// FastEmit functions for X86ISD::FMIN.
2980
2981unsigned FastEmit_X86ISD_FMIN_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2982 if (RetVT.SimpleTy != MVT::f32)
2983 return 0;
2984 if ((Subtarget->hasAVX())) {
2985 return FastEmitInst_rr(X86::VMINSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2986 }
2987 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
2988 return FastEmitInst_rr(X86::MINSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2989 }
2990 return 0;
2991}
2992
2993unsigned FastEmit_X86ISD_FMIN_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2994 if (RetVT.SimpleTy != MVT::f64)
2995 return 0;
2996 if ((Subtarget->hasAVX())) {
2997 return FastEmitInst_rr(X86::VMINSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2998 }
2999 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
3000 return FastEmitInst_rr(X86::MINSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3001 }
3002 return 0;
3003}
3004
3005unsigned FastEmit_X86ISD_FMIN_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3006 if (RetVT.SimpleTy != MVT::v4f32)
3007 return 0;
3008 if ((Subtarget->hasAVX())) {
3009 return FastEmitInst_rr(X86::VMINPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3010 }
3011 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
3012 return FastEmitInst_rr(X86::MINPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3013 }
3014 return 0;
3015}
3016
3017unsigned FastEmit_X86ISD_FMIN_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3018 if (RetVT.SimpleTy != MVT::v8f32)
3019 return 0;
3020 if ((Subtarget->hasAVX())) {
3021 return FastEmitInst_rr(X86::VMINPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3022 }
3023 return 0;
3024}
3025
3026unsigned FastEmit_X86ISD_FMIN_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3027 if (RetVT.SimpleTy != MVT::v2f64)
3028 return 0;
3029 if ((Subtarget->hasAVX())) {
3030 return FastEmitInst_rr(X86::VMINPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3031 }
3032 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
3033 return FastEmitInst_rr(X86::MINPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3034 }
3035 return 0;
3036}
3037
3038unsigned FastEmit_X86ISD_FMIN_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3039 if (RetVT.SimpleTy != MVT::v4f64)
3040 return 0;
3041 if ((Subtarget->hasAVX())) {
3042 return FastEmitInst_rr(X86::VMINPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3043 }
3044 return 0;
3045}
3046
3047unsigned FastEmit_X86ISD_FMIN_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3048 switch (VT.SimpleTy) {
3049 case MVT::f32: return FastEmit_X86ISD_FMIN_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3050 case MVT::f64: return FastEmit_X86ISD_FMIN_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3051 case MVT::v4f32: return FastEmit_X86ISD_FMIN_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3052 case MVT::v8f32: return FastEmit_X86ISD_FMIN_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3053 case MVT::v2f64: return FastEmit_X86ISD_FMIN_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3054 case MVT::v4f64: return FastEmit_X86ISD_FMIN_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3055 default: return 0;
3056 }
3057}
3058
3059// FastEmit functions for X86ISD::FOR.
3060
3061unsigned FastEmit_X86ISD_FOR_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3062 if (RetVT.SimpleTy != MVT::f32)
3063 return 0;
3064 if ((Subtarget->hasAVX())) {
3065 return FastEmitInst_rr(X86::VFsORPSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3066 }
3067 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
3068 return FastEmitInst_rr(X86::FsORPSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3069 }
3070 return 0;
3071}
3072
3073unsigned FastEmit_X86ISD_FOR_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3074 if (RetVT.SimpleTy != MVT::f64)
3075 return 0;
3076 if ((Subtarget->hasAVX())) {
3077 return FastEmitInst_rr(X86::VFsORPDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3078 }
3079 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
3080 return FastEmitInst_rr(X86::FsORPDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3081 }
3082 return 0;
3083}
3084
3085unsigned FastEmit_X86ISD_FOR_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3086 switch (VT.SimpleTy) {
3087 case MVT::f32: return FastEmit_X86ISD_FOR_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3088 case MVT::f64: return FastEmit_X86ISD_FOR_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3089 default: return 0;
3090 }
3091}
3092
3093// FastEmit functions for X86ISD::FXOR.
3094
3095unsigned FastEmit_X86ISD_FXOR_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3096 if (RetVT.SimpleTy != MVT::f32)
3097 return 0;
3098 if ((Subtarget->hasAVX())) {
3099 return FastEmitInst_rr(X86::VFsXORPSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3100 }
3101 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
3102 return FastEmitInst_rr(X86::FsXORPSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3103 }
3104 return 0;
3105}
3106
3107unsigned FastEmit_X86ISD_FXOR_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3108 if (RetVT.SimpleTy != MVT::f64)
3109 return 0;
3110 if ((Subtarget->hasAVX())) {
3111 return FastEmitInst_rr(X86::VFsXORPDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3112 }
3113 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
3114 return FastEmitInst_rr(X86::FsXORPDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3115 }
3116 return 0;
3117}
3118
3119unsigned FastEmit_X86ISD_FXOR_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3120 switch (VT.SimpleTy) {
3121 case MVT::f32: return FastEmit_X86ISD_FXOR_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3122 case MVT::f64: return FastEmit_X86ISD_FXOR_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3123 default: return 0;
3124 }
3125}
3126
3127// FastEmit functions for X86ISD::MOVHLPS.
3128
3129unsigned FastEmit_X86ISD_MOVHLPS_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3130 if (RetVT.SimpleTy != MVT::v4i32)
3131 return 0;
3132 return FastEmitInst_rr(X86::MOVHLPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3133}
3134
3135unsigned FastEmit_X86ISD_MOVHLPS_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3136 if (RetVT.SimpleTy != MVT::v4f32)
3137 return 0;
3138 return FastEmitInst_rr(X86::MOVHLPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3139}
3140
3141unsigned FastEmit_X86ISD_MOVHLPS_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3142 switch (VT.SimpleTy) {
3143 case MVT::v4i32: return FastEmit_X86ISD_MOVHLPS_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3144 case MVT::v4f32: return FastEmit_X86ISD_MOVHLPS_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3145 default: return 0;
3146 }
3147}
3148
3149// FastEmit functions for X86ISD::MOVLHPS.
3150
3151unsigned FastEmit_X86ISD_MOVLHPS_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3152 if (RetVT.SimpleTy != MVT::v4i32)
3153 return 0;
3154 return FastEmitInst_rr(X86::MOVLHPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3155}
3156
3157unsigned FastEmit_X86ISD_MOVLHPS_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3158 if (RetVT.SimpleTy != MVT::v2i64)
3159 return 0;
3160 return FastEmitInst_rr(X86::MOVLHPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3161}
3162
3163unsigned FastEmit_X86ISD_MOVLHPS_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3164 if (RetVT.SimpleTy != MVT::v4f32)
3165 return 0;
3166 return FastEmitInst_rr(X86::MOVLHPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3167}
3168
3169unsigned FastEmit_X86ISD_MOVLHPS_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3170 switch (VT.SimpleTy) {
3171 case MVT::v4i32: return FastEmit_X86ISD_MOVLHPS_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3172 case MVT::v2i64: return FastEmit_X86ISD_MOVLHPS_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3173 case MVT::v4f32: return FastEmit_X86ISD_MOVLHPS_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3174 default: return 0;
3175 }
3176}
3177
3178// FastEmit functions for X86ISD::PCMPEQB.
3179
3180unsigned FastEmit_X86ISD_PCMPEQB_MVT_v8i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3181 if (RetVT.SimpleTy != MVT::v8i8)
3182 return 0;
3183 return FastEmitInst_rr(X86::MMX_PCMPEQBrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3184}
3185
3186unsigned FastEmit_X86ISD_PCMPEQB_MVT_v16i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3187 if (RetVT.SimpleTy != MVT::v16i8)
3188 return 0;
3189 return FastEmitInst_rr(X86::PCMPEQBrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3190}
3191
3192unsigned FastEmit_X86ISD_PCMPEQB_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3193 switch (VT.SimpleTy) {
3194 case MVT::v8i8: return FastEmit_X86ISD_PCMPEQB_MVT_v8i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3195 case MVT::v16i8: return FastEmit_X86ISD_PCMPEQB_MVT_v16i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3196 default: return 0;
3197 }
3198}
3199
3200// FastEmit functions for X86ISD::PCMPEQD.
3201
3202unsigned FastEmit_X86ISD_PCMPEQD_MVT_v2i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3203 if (RetVT.SimpleTy != MVT::v2i32)
3204 return 0;
3205 return FastEmitInst_rr(X86::MMX_PCMPEQDrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3206}
3207
3208unsigned FastEmit_X86ISD_PCMPEQD_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3209 if (RetVT.SimpleTy != MVT::v4i32)
3210 return 0;
3211 return FastEmitInst_rr(X86::PCMPEQDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3212}
3213
3214unsigned FastEmit_X86ISD_PCMPEQD_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3215 switch (VT.SimpleTy) {
3216 case MVT::v2i32: return FastEmit_X86ISD_PCMPEQD_MVT_v2i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3217 case MVT::v4i32: return FastEmit_X86ISD_PCMPEQD_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3218 default: return 0;
3219 }
3220}
3221
3222// FastEmit functions for X86ISD::PCMPEQQ.
3223
3224unsigned FastEmit_X86ISD_PCMPEQQ_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3225 if (RetVT.SimpleTy != MVT::v2i64)
3226 return 0;
3227 return FastEmitInst_rr(X86::PCMPEQQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3228}
3229
3230unsigned FastEmit_X86ISD_PCMPEQQ_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3231 switch (VT.SimpleTy) {
3232 case MVT::v2i64: return FastEmit_X86ISD_PCMPEQQ_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3233 default: return 0;
3234 }
3235}
3236
3237// FastEmit functions for X86ISD::PCMPEQW.
3238
3239unsigned FastEmit_X86ISD_PCMPEQW_MVT_v4i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3240 if (RetVT.SimpleTy != MVT::v4i16)
3241 return 0;
3242 return FastEmitInst_rr(X86::MMX_PCMPEQWrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3243}
3244
3245unsigned FastEmit_X86ISD_PCMPEQW_MVT_v8i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3246 if (RetVT.SimpleTy != MVT::v8i16)
3247 return 0;
3248 return FastEmitInst_rr(X86::PCMPEQWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3249}
3250
3251unsigned FastEmit_X86ISD_PCMPEQW_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3252 switch (VT.SimpleTy) {
3253 case MVT::v4i16: return FastEmit_X86ISD_PCMPEQW_MVT_v4i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3254 case MVT::v8i16: return FastEmit_X86ISD_PCMPEQW_MVT_v8i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3255 default: return 0;
3256 }
3257}
3258
3259// FastEmit functions for X86ISD::PCMPGTB.
3260
3261unsigned FastEmit_X86ISD_PCMPGTB_MVT_v8i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3262 if (RetVT.SimpleTy != MVT::v8i8)
3263 return 0;
3264 return FastEmitInst_rr(X86::MMX_PCMPGTBrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3265}
3266
3267unsigned FastEmit_X86ISD_PCMPGTB_MVT_v16i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3268 if (RetVT.SimpleTy != MVT::v16i8)
3269 return 0;
3270 return FastEmitInst_rr(X86::PCMPGTBrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3271}
3272
3273unsigned FastEmit_X86ISD_PCMPGTB_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3274 switch (VT.SimpleTy) {
3275 case MVT::v8i8: return FastEmit_X86ISD_PCMPGTB_MVT_v8i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3276 case MVT::v16i8: return FastEmit_X86ISD_PCMPGTB_MVT_v16i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3277 default: return 0;
3278 }
3279}
3280
3281// FastEmit functions for X86ISD::PCMPGTD.
3282
3283unsigned FastEmit_X86ISD_PCMPGTD_MVT_v2i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3284 if (RetVT.SimpleTy != MVT::v2i32)
3285 return 0;
3286 return FastEmitInst_rr(X86::MMX_PCMPGTDrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3287}
3288
3289unsigned FastEmit_X86ISD_PCMPGTD_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3290 if (RetVT.SimpleTy != MVT::v4i32)
3291 return 0;
3292 return FastEmitInst_rr(X86::PCMPGTDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3293}
3294
3295unsigned FastEmit_X86ISD_PCMPGTD_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3296 switch (VT.SimpleTy) {
3297 case MVT::v2i32: return FastEmit_X86ISD_PCMPGTD_MVT_v2i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3298 case MVT::v4i32: return FastEmit_X86ISD_PCMPGTD_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3299 default: return 0;
3300 }
3301}
3302
3303// FastEmit functions for X86ISD::PCMPGTQ.
3304
3305unsigned FastEmit_X86ISD_PCMPGTQ_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3306 if (RetVT.SimpleTy != MVT::v2i64)
3307 return 0;
3308 return FastEmitInst_rr(X86::PCMPGTQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3309}
3310
3311unsigned FastEmit_X86ISD_PCMPGTQ_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3312 switch (VT.SimpleTy) {
3313 case MVT::v2i64: return FastEmit_X86ISD_PCMPGTQ_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3314 default: return 0;
3315 }
3316}
3317
3318// FastEmit functions for X86ISD::PCMPGTW.
3319
3320unsigned FastEmit_X86ISD_PCMPGTW_MVT_v4i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3321 if (RetVT.SimpleTy != MVT::v4i16)
3322 return 0;
3323 return FastEmitInst_rr(X86::MMX_PCMPGTWrr, X86::VR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3324}
3325
3326unsigned FastEmit_X86ISD_PCMPGTW_MVT_v8i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3327 if (RetVT.SimpleTy != MVT::v8i16)
3328 return 0;
3329 return FastEmitInst_rr(X86::PCMPGTWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3330}
3331
3332unsigned FastEmit_X86ISD_PCMPGTW_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3333 switch (VT.SimpleTy) {
3334 case MVT::v4i16: return FastEmit_X86ISD_PCMPGTW_MVT_v4i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3335 case MVT::v8i16: return FastEmit_X86ISD_PCMPGTW_MVT_v8i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3336 default: return 0;
3337 }
3338}
3339
3340// FastEmit functions for X86ISD::PSHUFB.
3341
3342unsigned FastEmit_X86ISD_PSHUFB_MVT_v16i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3343 if (RetVT.SimpleTy != MVT::v16i8)
3344 return 0;
3345 if ((Subtarget->hasSSSE3() && !Subtarget->hasAVX())) {
3346 return FastEmitInst_rr(X86::PSHUFBrr128, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3347 }
3348 return 0;
3349}
3350
3351unsigned FastEmit_X86ISD_PSHUFB_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3352 switch (VT.SimpleTy) {
3353 case MVT::v16i8: return FastEmit_X86ISD_PSHUFB_MVT_v16i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3354 default: return 0;
3355 }
3356}
3357
3358// FastEmit functions for X86ISD::PTEST.
3359
3360unsigned FastEmit_X86ISD_PTEST_MVT_v4i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3361 if (RetVT.SimpleTy != MVT::i32)
3362 return 0;
3363 if ((Subtarget->hasAVX())) {
3364 return FastEmitInst_rr(X86::VPTESTYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3365 }
3366 return 0;
3367}
3368
3369unsigned FastEmit_X86ISD_PTEST_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3370 if (RetVT.SimpleTy != MVT::i32)
3371 return 0;
3372 if ((Subtarget->hasAVX())) {
3373 return FastEmitInst_rr(X86::VPTESTrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3374 }
3375 if ((Subtarget->hasSSE41() && !Subtarget->hasAVX())) {
3376 return FastEmitInst_rr(X86::PTESTrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3377 }
3378 return 0;
3379}
3380
3381unsigned FastEmit_X86ISD_PTEST_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3382 switch (VT.SimpleTy) {
3383 case MVT::v4i64: return FastEmit_X86ISD_PTEST_MVT_v4i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3384 case MVT::v4f32: return FastEmit_X86ISD_PTEST_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3385 default: return 0;
3386 }
3387}
3388
3389// FastEmit functions for X86ISD::PUNPCKHBW.
3390
3391unsigned FastEmit_X86ISD_PUNPCKHBW_MVT_v16i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3392 if (RetVT.SimpleTy != MVT::v16i8)
3393 return 0;
3394 return FastEmitInst_rr(X86::PUNPCKHBWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3395}
3396
3397unsigned FastEmit_X86ISD_PUNPCKHBW_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3398 switch (VT.SimpleTy) {
3399 case MVT::v16i8: return FastEmit_X86ISD_PUNPCKHBW_MVT_v16i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3400 default: return 0;
3401 }
3402}
3403
3404// FastEmit functions for X86ISD::PUNPCKHDQ.
3405
3406unsigned FastEmit_X86ISD_PUNPCKHDQ_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3407 if (RetVT.SimpleTy != MVT::v4i32)
3408 return 0;
3409 return FastEmitInst_rr(X86::PUNPCKHDQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3410}
3411
3412unsigned FastEmit_X86ISD_PUNPCKHDQ_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3413 switch (VT.SimpleTy) {
3414 case MVT::v4i32: return FastEmit_X86ISD_PUNPCKHDQ_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3415 default: return 0;
3416 }
3417}
3418
3419// FastEmit functions for X86ISD::PUNPCKHQDQ.
3420
3421unsigned FastEmit_X86ISD_PUNPCKHQDQ_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3422 if (RetVT.SimpleTy != MVT::v2i64)
3423 return 0;
3424 return FastEmitInst_rr(X86::PUNPCKHQDQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3425}
3426
3427unsigned FastEmit_X86ISD_PUNPCKHQDQ_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3428 switch (VT.SimpleTy) {
3429 case MVT::v2i64: return FastEmit_X86ISD_PUNPCKHQDQ_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3430 default: return 0;
3431 }
3432}
3433
3434// FastEmit functions for X86ISD::PUNPCKHWD.
3435
3436unsigned FastEmit_X86ISD_PUNPCKHWD_MVT_v8i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3437 if (RetVT.SimpleTy != MVT::v8i16)
3438 return 0;
3439 return FastEmitInst_rr(X86::PUNPCKHWDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3440}
3441
3442unsigned FastEmit_X86ISD_PUNPCKHWD_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3443 switch (VT.SimpleTy) {
3444 case MVT::v8i16: return FastEmit_X86ISD_PUNPCKHWD_MVT_v8i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3445 default: return 0;
3446 }
3447}
3448
3449// FastEmit functions for X86ISD::PUNPCKLBW.
3450
3451unsigned FastEmit_X86ISD_PUNPCKLBW_MVT_v16i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3452 if (RetVT.SimpleTy != MVT::v16i8)
3453 return 0;
3454 return FastEmitInst_rr(X86::PUNPCKLBWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3455}
3456
3457unsigned FastEmit_X86ISD_PUNPCKLBW_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3458 switch (VT.SimpleTy) {
3459 case MVT::v16i8: return FastEmit_X86ISD_PUNPCKLBW_MVT_v16i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3460 default: return 0;
3461 }
3462}
3463
3464// FastEmit functions for X86ISD::PUNPCKLDQ.
3465
3466unsigned FastEmit_X86ISD_PUNPCKLDQ_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3467 if (RetVT.SimpleTy != MVT::v4i32)
3468 return 0;
3469 return FastEmitInst_rr(X86::PUNPCKLDQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3470}
3471
3472unsigned FastEmit_X86ISD_PUNPCKLDQ_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3473 switch (VT.SimpleTy) {
3474 case MVT::v4i32: return FastEmit_X86ISD_PUNPCKLDQ_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3475 default: return 0;
3476 }
3477}
3478
3479// FastEmit functions for X86ISD::PUNPCKLQDQ.
3480
3481unsigned FastEmit_X86ISD_PUNPCKLQDQ_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3482 if (RetVT.SimpleTy != MVT::v2i64)
3483 return 0;
3484 return FastEmitInst_rr(X86::PUNPCKLQDQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3485}
3486
3487unsigned FastEmit_X86ISD_PUNPCKLQDQ_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3488 switch (VT.SimpleTy) {
3489 case MVT::v2i64: return FastEmit_X86ISD_PUNPCKLQDQ_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3490 default: return 0;
3491 }
3492}
3493
3494// FastEmit functions for X86ISD::PUNPCKLWD.
3495
3496unsigned FastEmit_X86ISD_PUNPCKLWD_MVT_v8i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3497 if (RetVT.SimpleTy != MVT::v8i16)
3498 return 0;
3499 return FastEmitInst_rr(X86::PUNPCKLWDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3500}
3501
3502unsigned FastEmit_X86ISD_PUNPCKLWD_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3503 switch (VT.SimpleTy) {
3504 case MVT::v8i16: return FastEmit_X86ISD_PUNPCKLWD_MVT_v8i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3505 default: return 0;
3506 }
3507}
3508
3509// FastEmit functions for X86ISD::TESTP.
3510
3511unsigned FastEmit_X86ISD_TESTP_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3512 if (RetVT.SimpleTy != MVT::i32)
3513 return 0;
3514 if ((Subtarget->hasAVX())) {
3515 return FastEmitInst_rr(X86::VTESTPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3516 }
3517 return 0;
3518}
3519
3520unsigned FastEmit_X86ISD_TESTP_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3521 if (RetVT.SimpleTy != MVT::i32)
3522 return 0;
3523 if ((Subtarget->hasAVX())) {
3524 return FastEmitInst_rr(X86::VTESTPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3525 }
3526 return 0;
3527}
3528
3529unsigned FastEmit_X86ISD_TESTP_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3530 if (RetVT.SimpleTy != MVT::i32)
3531 return 0;
3532 if ((Subtarget->hasAVX())) {
3533 return FastEmitInst_rr(X86::VTESTPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3534 }
3535 return 0;
3536}
3537
3538unsigned FastEmit_X86ISD_TESTP_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3539 if (RetVT.SimpleTy != MVT::i32)
3540 return 0;
3541 if ((Subtarget->hasAVX())) {
3542 return FastEmitInst_rr(X86::VTESTPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3543 }
3544 return 0;
3545}
3546
3547unsigned FastEmit_X86ISD_TESTP_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3548 switch (VT.SimpleTy) {
3549 case MVT::v4f32: return FastEmit_X86ISD_TESTP_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3550 case MVT::v8f32: return FastEmit_X86ISD_TESTP_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3551 case MVT::v2f64: return FastEmit_X86ISD_TESTP_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3552 case MVT::v4f64: return FastEmit_X86ISD_TESTP_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3553 default: return 0;
3554 }
3555}
3556
3557// FastEmit functions for X86ISD::UCOMI.
3558
3559unsigned FastEmit_X86ISD_UCOMI_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3560 if (RetVT.SimpleTy != MVT::i32)
3561 return 0;
3562 if ((Subtarget->hasAVX())) {
3563 return FastEmitInst_rr(X86::Int_VUCOMISSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3564 }
3565 if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) {
3566 return FastEmitInst_rr(X86::Int_UCOMISSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3567 }
3568 return 0;
3569}
3570
3571unsigned FastEmit_X86ISD_UCOMI_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3572 if (RetVT.SimpleTy != MVT::i32)
3573 return 0;
3574 if ((Subtarget->hasAVX())) {
3575 return FastEmitInst_rr(X86::Int_VUCOMISDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3576 }
3577 if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) {
3578 return FastEmitInst_rr(X86::Int_UCOMISDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3579 }
3580 return 0;
3581}
3582
3583unsigned FastEmit_X86ISD_UCOMI_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3584 switch (VT.SimpleTy) {
3585 case MVT::v4f32: return FastEmit_X86ISD_UCOMI_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3586 case MVT::v2f64: return FastEmit_X86ISD_UCOMI_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3587 default: return 0;
3588 }
3589}
3590
3591// FastEmit functions for X86ISD::UNPCKHPD.
3592
3593unsigned FastEmit_X86ISD_UNPCKHPD_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3594 if (RetVT.SimpleTy != MVT::v2f64)
3595 return 0;
3596 return FastEmitInst_rr(X86::UNPCKHPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3597 if ((Subtarget->hasAVX())) {
3598 return FastEmitInst_rr(X86::VUNPCKHPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3599 }
3600 return 0;
3601}
3602
3603unsigned FastEmit_X86ISD_UNPCKHPD_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3604 switch (VT.SimpleTy) {
3605 case MVT::v2f64: return FastEmit_X86ISD_UNPCKHPD_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3606 default: return 0;
3607 }
3608}
3609
3610// FastEmit functions for X86ISD::UNPCKHPS.
3611
3612unsigned FastEmit_X86ISD_UNPCKHPS_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3613 if (RetVT.SimpleTy != MVT::v4f32)
3614 return 0;
3615 return FastEmitInst_rr(X86::UNPCKHPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3616 if ((Subtarget->hasAVX())) {
3617 return FastEmitInst_rr(X86::VUNPCKHPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3618 }
3619 return 0;
3620}
3621
3622unsigned FastEmit_X86ISD_UNPCKHPS_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3623 switch (VT.SimpleTy) {
3624 case MVT::v4f32: return FastEmit_X86ISD_UNPCKHPS_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3625 default: return 0;
3626 }
3627}
3628
3629// FastEmit functions for X86ISD::UNPCKLPD.
3630
3631unsigned FastEmit_X86ISD_UNPCKLPD_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3632 if (RetVT.SimpleTy != MVT::v2f64)
3633 return 0;
3634 return FastEmitInst_rr(X86::UNPCKLPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3635 if ((Subtarget->hasAVX())) {
3636 return FastEmitInst_rr(X86::VUNPCKLPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3637 }
3638 return 0;
3639}
3640
3641unsigned FastEmit_X86ISD_UNPCKLPD_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3642 switch (VT.SimpleTy) {
3643 case MVT::v2f64: return FastEmit_X86ISD_UNPCKLPD_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3644 default: return 0;
3645 }
3646}
3647
3648// FastEmit functions for X86ISD::UNPCKLPS.
3649
3650unsigned FastEmit_X86ISD_UNPCKLPS_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3651 if (RetVT.SimpleTy != MVT::v4f32)
3652 return 0;
3653 return FastEmitInst_rr(X86::UNPCKLPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3654 if ((Subtarget->hasAVX())) {
3655 return FastEmitInst_rr(X86::VUNPCKLPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3656 }
3657 return 0;
3658}
3659
3660unsigned FastEmit_X86ISD_UNPCKLPS_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3661 switch (VT.SimpleTy) {
3662 case MVT::v4f32: return FastEmit_X86ISD_UNPCKLPS_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3663 default: return 0;
3664 }
3665}
3666
3667// Top-level FastEmit function.
3668
3669unsigned FastEmit_rr(MVT VT, MVT RetVT, unsigned Opcode, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3670 switch (Opcode) {
3671 case ISD::ADD: return FastEmit_ISD_ADD_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3672 case ISD::ADDC: return FastEmit_ISD_ADDC_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3673 case ISD::ADDE: return FastEmit_ISD_ADDE_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3674 case ISD::AND: return FastEmit_ISD_AND_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3675 case ISD::FADD: return FastEmit_ISD_FADD_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3676 case ISD::FDIV: return FastEmit_ISD_FDIV_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3677 case ISD::FMUL: return FastEmit_ISD_FMUL_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3678 case ISD::FSUB: return FastEmit_ISD_FSUB_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3679 case ISD::MUL: return FastEmit_ISD_MUL_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3680 case ISD::OR: return FastEmit_ISD_OR_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3681 case ISD::ROTL: return FastEmit_ISD_ROTL_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3682 case ISD::ROTR: return FastEmit_ISD_ROTR_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3683 case ISD::SHL: return FastEmit_ISD_SHL_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3684 case ISD::SRA: return FastEmit_ISD_SRA_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3685 case ISD::SRL: return FastEmit_ISD_SRL_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3686 case ISD::SUB: return FastEmit_ISD_SUB_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3687 case ISD::SUBC: return FastEmit_ISD_SUBC_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3688 case ISD::SUBE: return FastEmit_ISD_SUBE_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3689 case ISD::XOR: return FastEmit_ISD_XOR_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3690 case X86ISD::BT: return FastEmit_X86ISD_BT_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3691 case X86ISD::CMP: return FastEmit_X86ISD_CMP_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3692 case X86ISD::COMI: return FastEmit_X86ISD_COMI_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3693 case X86ISD::FAND: return FastEmit_X86ISD_FAND_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3694 case X86ISD::FMAX: return FastEmit_X86ISD_FMAX_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3695 case X86ISD::FMIN: return FastEmit_X86ISD_FMIN_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3696 case X86ISD::FOR: return FastEmit_X86ISD_FOR_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3697 case X86ISD::FXOR: return FastEmit_X86ISD_FXOR_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3698 case X86ISD::MOVHLPS: return FastEmit_X86ISD_MOVHLPS_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3699 case X86ISD::MOVLHPS: return FastEmit_X86ISD_MOVLHPS_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3700 case X86ISD::PCMPEQB: return FastEmit_X86ISD_PCMPEQB_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3701 case X86ISD::PCMPEQD: return FastEmit_X86ISD_PCMPEQD_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3702 case X86ISD::PCMPEQQ: return FastEmit_X86ISD_PCMPEQQ_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3703 case X86ISD::PCMPEQW: return FastEmit_X86ISD_PCMPEQW_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3704 case X86ISD::PCMPGTB: return FastEmit_X86ISD_PCMPGTB_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3705 case X86ISD::PCMPGTD: return FastEmit_X86ISD_PCMPGTD_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3706 case X86ISD::PCMPGTQ: return FastEmit_X86ISD_PCMPGTQ_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3707 case X86ISD::PCMPGTW: return FastEmit_X86ISD_PCMPGTW_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3708 case X86ISD::PSHUFB: return FastEmit_X86ISD_PSHUFB_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3709 case X86ISD::PTEST: return FastEmit_X86ISD_PTEST_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3710 case X86ISD::PUNPCKHBW: return FastEmit_X86ISD_PUNPCKHBW_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3711 case X86ISD::PUNPCKHDQ: return FastEmit_X86ISD_PUNPCKHDQ_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3712 case X86ISD::PUNPCKHQDQ: return FastEmit_X86ISD_PUNPCKHQDQ_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3713 case X86ISD::PUNPCKHWD: return FastEmit_X86ISD_PUNPCKHWD_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3714 case X86ISD::PUNPCKLBW: return FastEmit_X86ISD_PUNPCKLBW_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3715 case X86ISD::PUNPCKLDQ: return FastEmit_X86ISD_PUNPCKLDQ_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3716 case X86ISD::PUNPCKLQDQ: return FastEmit_X86ISD_PUNPCKLQDQ_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3717 case X86ISD::PUNPCKLWD: return FastEmit_X86ISD_PUNPCKLWD_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3718 case X86ISD::TESTP: return FastEmit_X86ISD_TESTP_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3719 case X86ISD::UCOMI: return FastEmit_X86ISD_UCOMI_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3720 case X86ISD::UNPCKHPD: return FastEmit_X86ISD_UNPCKHPD_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3721 case X86ISD::UNPCKHPS: return FastEmit_X86ISD_UNPCKHPS_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3722 case X86ISD::UNPCKLPD: return FastEmit_X86ISD_UNPCKLPD_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3723 case X86ISD::UNPCKLPS: return FastEmit_X86ISD_UNPCKLPS_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3724 default: return 0;
3725 }
3726}
3727
Note: See TracBrowser for help on using the repository browser.