llvm.org GIT mirror llvm / 2926869
Fix a long-standing wart in the code generator: two-address instruction lowering actually *removes* one of the operands, instead of just assigning both operands the same register. This make reasoning about instructions unnecessarily complex, because you need to know if you are before or after register allocation to match up operand #'s with the target description file. Changing this also gets rid of a bunch of hacky code in various places. This patch also includes changes to fold loads into cmp/test instructions in the X86 backend, along with a significant simplification to the X86 spill folding code. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@30108 91177308-0d34-0410-b5e6-96231b3b80d8 Chris Lattner 13 years ago
9 changed file(s) with 579 addition(s) and 606 deletion(s). Raw diff Collapse all Expand all
261261
262262 MachineInstr *MI = getInstructionFromIndex(index);
263263
264 // NewRegLiveIn - This instruction might have multiple uses of the spilled
265 // register. In this case, for the first use, keep track of the new vreg
266 // that we reload it into. If we see a second use, reuse this vreg
267 // instead of creating live ranges for two reloads.
268 unsigned NewRegLiveIn = 0;
269
270 for_operand:
264 RestartInstruction:
271265 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
272266 MachineOperand& mop = MI->getOperand(i);
273267 if (mop.isRegister() && mop.getReg() == li.reg) {
274 if (NewRegLiveIn && mop.isUse()) {
275 // We already emitted a reload of this value, reuse it for
276 // subsequent operands.
277 MI->getOperand(i).setReg(NewRegLiveIn);
278 DEBUG(std::cerr << "\t\t\t\treused reload into reg" << NewRegLiveIn
279 << " for operand #" << i << '\n');
280 } else if (MachineInstr* fmi = mri_->foldMemoryOperand(MI, i, slot)) {
268 if (MachineInstr *fmi = mri_->foldMemoryOperand(MI, i, slot)) {
281269 // Attempt to fold the memory reference into the instruction. If we
282270 // can do this, we don't need to insert spill code.
283271 if (lv_)
291279 ++numFolded;
292280 // Folding the load/store can completely change the instruction in
293281 // unpredictable ways, rescan it from the beginning.
294 goto for_operand;
282 goto RestartInstruction;
295283 } else {
296 // This is tricky. We need to add information in the interval about
297 // the spill code so we have to use our extra load/store slots.
284 // Create a new virtual register for the spill interval.
285 unsigned NewVReg = mf_->getSSARegMap()->createVirtualRegister(rc);
286
287 // Scan all of the operands of this instruction rewriting operands
288 // to use NewVReg instead of li.reg as appropriate. We do this for
289 // two reasons:
298290 //
299 // If we have a use we are going to have a load so we start the
300 // interval from the load slot onwards. Otherwise we start from the
301 // def slot.
302 unsigned start = (mop.isUse() ?
303 getLoadIndex(index) :
304 getDefIndex(index));
305 // If we have a def we are going to have a store right after it so
306 // we end the interval after the use of the next
307 // instruction. Otherwise we end after the use of this instruction.
308 unsigned end = 1 + (mop.isDef() ?
309 getStoreIndex(index) :
310 getUseIndex(index));
291 // 1. If the instr reads the same spilled vreg multiple times, we
292 // want to reuse the NewVReg.
293 // 2. If the instr is a two-addr instruction, we are required to
294 // keep the src/dst regs pinned.
295 //
296 // Keep track of whether we replace a use and/or def so that we can
297 // create the spill interval with the appropriate range.
298 mop.setReg(NewVReg);
299
300 bool HasUse = mop.isUse();
301 bool HasDef = mop.isDef();
302 for (unsigned j = i+1, e = MI->getNumOperands(); j != e; ++j) {
303 if (MI->getOperand(j).isReg() &&
304 MI->getOperand(j).getReg() == li.reg) {
305 MI->getOperand(j).setReg(NewVReg);
306 HasUse |= MI->getOperand(j).isUse();
307 HasDef |= MI->getOperand(j).isDef();
308 }
309 }
311310
312311 // create a new register for this spill
313 NewRegLiveIn = mf_->getSSARegMap()->createVirtualRegister(rc);
314 MI->getOperand(i).setReg(NewRegLiveIn);
315312 vrm.grow();
316 vrm.assignVirt2StackSlot(NewRegLiveIn, slot);
317 LiveInterval& nI = getOrCreateInterval(NewRegLiveIn);
313 vrm.assignVirt2StackSlot(NewVReg, slot);
314 LiveInterval &nI = getOrCreateInterval(NewVReg);
318315 assert(nI.empty());
319316
320317 // the spill weight is now infinity as it
321318 // cannot be spilled again
322319 nI.weight = float(HUGE_VAL);
323 LiveRange LR(start, end, nI.getNextValue(~0U, 0));
324 DEBUG(std::cerr << " +" << LR);
325 nI.addRange(LR);
320
321 if (HasUse) {
322 LiveRange LR(getLoadIndex(index), getUseIndex(index),
323 nI.getNextValue(~0U, 0));
324 DEBUG(std::cerr << " +" << LR);
325 nI.addRange(LR);
326 }
327 if (HasDef) {
328 LiveRange LR(getDefIndex(index), getStoreIndex(index),
329 nI.getNextValue(~0U, 0));
330 DEBUG(std::cerr << " +" << LR);
331 nI.addRange(LR);
332 }
333
326334 added.push_back(&nI);
327335
328336 // update live variables if it is available
329337 if (lv_)
330 lv_->addVirtualRegisterKilled(NewRegLiveIn, MI);
331
332 // If this is a live in, reuse it for subsequent live-ins. If it's
333 // a def, we can't do this.
334 if (!mop.isUse()) NewRegLiveIn = 0;
338 lv_->addVirtualRegisterKilled(NewVReg, MI);
335339
336340 DEBUG(std::cerr << "\t\t\t\tadded new interval: ";
337341 nI.print(std::cerr, mri_); std::cerr << '\n');
444448 // operand, and is a def-and-use.
445449 if (mi->getOperand(0).isRegister() &&
446450 mi->getOperand(0).getReg() == interval.reg &&
447 mi->getOperand(0).isDef() && mi->getOperand(0).isUse()) {
451 mi->getNumOperands() > 1 && mi->getOperand(1).isRegister() &&
452 mi->getOperand(1).getReg() == interval.reg &&
453 mi->getOperand(0).isDef() && mi->getOperand(1).isUse()) {
448454 // If this is a two-address definition, then we have already processed
449455 // the live range. The only problem is that we didn't realize there
450456 // are actually two values in the live interval. Because of this we
202202 physReg = getFreeReg(virtualReg);
203203 } else {
204204 // must be same register number as the first operand
205 // This maps a = b + c into b += c, and saves b into a's spot
205 // This maps a = b + c into b = b + c, and saves b into a's spot.
206206 assert(MI->getOperand(1).isRegister() &&
207207 MI->getOperand(1).getReg() &&
208208 MI->getOperand(1).isUse() &&
209209 "Two address instruction invalid!");
210210
211211 physReg = MI->getOperand(1).getReg();
212 spillVirtReg(MBB, next(MI), virtualReg, physReg);
213 MI->getOperand(1).setDef();
214 MI->RemoveOperand(0);
215 break; // This is the last operand to process
216212 }
217213 spillVirtReg(MBB, next(MI), virtualReg, physReg);
218214 } else {
205205 }
206206 }
207207
208 assert(mi->getOperand(0).isDef());
209 mi->getOperand(0).setUse();
210 mi->RemoveOperand(1);
208 assert(mi->getOperand(0).isDef() && mi->getOperand(1).isUse());
209 mi->getOperand(1).setReg(mi->getOperand(0).getReg());
211210 MadeChange = true;
212211
213212 DEBUG(std::cerr << "\t\trewrite to:\t"; mi->print(std::cerr, &TM));
5656 // VirtRegMap implementation
5757 //===----------------------------------------------------------------------===//
5858
59 VirtRegMap::VirtRegMap(MachineFunction &mf)
60 : TII(*mf.getTarget().getInstrInfo()), MF(mf),
61 Virt2PhysMap(NO_PHYS_REG), Virt2StackSlotMap(NO_STACK_SLOT) {
62 grow();
63 }
64
5965 void VirtRegMap::grow() {
6066 Virt2PhysMap.grow(MF.getSSARegMap()->getLastVirtReg());
6167 Virt2StackSlotMap.grow(MF.getSSARegMap()->getLastVirtReg());
9197 }
9298
9399 ModRef MRInfo;
94 if (!OldMI->getOperand(OpNo).isDef()) {
95 assert(OldMI->getOperand(OpNo).isUse() && "Operand is not use or def?");
100 if (OpNo < 2 && TII.isTwoAddrInstr(OldMI->getOpcode())) {
101 // Folded a two-address operand.
102 MRInfo = isModRef;
103 } else if (OldMI->getOperand(OpNo).isDef()) {
104 MRInfo = isMod;
105 } else {
96106 MRInfo = isRef;
97 } else {
98 MRInfo = OldMI->getOperand(OpNo).isUse() ? isModRef : isMod;
99107 }
100108
101109 // add new memory reference
491499 // that we can choose to reuse the physregs instead of emitting reloads.
492500 AvailableSpills Spills(MRI, TII);
493501
494 // DefAndUseVReg - When we see a def&use operand that is spilled, keep track
495 // of it. ".first" is the machine operand index (should always be 0 for now),
496 // and ".second" is the virtual register that is spilled.
497 std::vector > DefAndUseVReg;
498
499502 // MaybeDeadStores - When we need to write a value back into a stack slot,
500503 // keep track of the inserted store. If the stack slot value is never read
501504 // (because the value was used from some available register, for example), and
515518 /// reuse.
516519 ReuseInfo ReusedOperands(MI);
517520
518 DefAndUseVReg.clear();
519
520521 // Process all of the spilled uses and all non spilled reg references.
521522 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
522523 MachineOperand &MO = MI.getOperand(i);
546547 if (!MO.isUse())
547548 continue; // Handle defs in the loop below (handle use&def here though)
548549
549 // If this is both a def and a use, we need to emit a store to the
550 // stack slot after the instruction. Keep track of D&U operands
551 // because we are about to change it to a physreg here.
552 if (MO.isDef()) {
553 // Remember that this was a def-and-use operand, and that the
554 // stack slot is live after this instruction executes.
555 DefAndUseVReg.push_back(std::make_pair(i, VirtReg));
556 }
557
558550 int StackSlot = VRM.getStackSlot(VirtReg);
559551 unsigned PhysReg;
560552
561553 // Check to see if this stack slot is available.
562554 if ((PhysReg = Spills.getSpillSlotPhysReg(StackSlot))) {
563555
564 // Don't reuse it for a def&use operand if we aren't allowed to change
565 // the physreg!
566 if (!MO.isDef() || Spills.canClobberPhysReg(StackSlot)) {
556 // This spilled operand might be part of a two-address operand. If this
557 // is the case, then changing it will necessarily require changing the
558 // def part of the instruction as well. However, in some cases, we
559 // aren't allowed to modify the reused register. If none of these cases
560 // apply, reuse it.
561 bool CanReuse = true;
562 if (i == 1 && MI.getOperand(0).isReg() &&
563 MI.getOperand(0).getReg() == VirtReg &&
564 TII->isTwoAddrInstr(MI.getOpcode())) {
565 // Okay, we have a two address operand. We can reuse this physreg as
566 // long as we are allowed to clobber the value.
567 CanReuse = Spills.canClobberPhysReg(StackSlot);
568 }
569
570 if (CanReuse) {
567571 // If this stack slot value is already available, reuse it!
568572 DEBUG(std::cerr << "Reusing SS#" << StackSlot << " from physreg "
569573 << MRI->getName(PhysReg) << " for vreg"
776780 unsigned VirtReg = MO.getReg();
777781
778782 if (!MRegisterInfo::isVirtualRegister(VirtReg)) {
779 // Check to see if this is a def-and-use vreg operand that we do need
780 // to insert a store for.
781 bool OpTakenCareOf = false;
782 if (MO.isUse() && !DefAndUseVReg.empty()) {
783 for (unsigned dau = 0, e = DefAndUseVReg.size(); dau != e; ++dau)
784 if (DefAndUseVReg[dau].first == i) {
785 VirtReg = DefAndUseVReg[dau].second;
786 OpTakenCareOf = true;
787 break;
788 }
783 // Check to see if this is a noop copy. If so, eliminate the
784 // instruction before considering the dest reg to be changed.
785 unsigned Src, Dst;
786 if (TII->isMoveInstr(MI, Src, Dst) && Src == Dst) {
787 ++NumDCE;
788 DEBUG(std::cerr << "Removing now-noop copy: " << MI);
789 MBB.erase(&MI);
790 VRM.RemoveFromFoldedVirtMap(&MI);
791 goto ProcessNextInst;
789792 }
790
791 if (!OpTakenCareOf) {
792 // Check to see if this is a noop copy. If so, eliminate the
793 // instruction before considering the dest reg to be changed.
794 unsigned Src, Dst;
795 if (TII->isMoveInstr(MI, Src, Dst) && Src == Dst) {
796 ++NumDCE;
797 DEBUG(std::cerr << "Removing now-noop copy: " << MI);
798 MBB.erase(&MI);
799 VRM.RemoveFromFoldedVirtMap(&MI);
800 goto ProcessNextInst;
801 }
802 Spills.ClobberPhysReg(VirtReg);
803 continue;
804 }
793 Spills.ClobberPhysReg(VirtReg);
794 continue;
805795 }
806796
807797 // The only vregs left are stack slot definitions.
808798 int StackSlot = VRM.getStackSlot(VirtReg);
809799 const TargetRegisterClass *RC =
810800 MBB.getParent()->getSSARegMap()->getRegClass(VirtReg);
801
802 // If this def is part of a two-address operand, make sure to execute
803 // the store from the correct physical register.
811804 unsigned PhysReg;
812
813 // If this is a def&use operand, and we used a different physreg for
814 // it than the one assigned, make sure to execute the store from the
815 // correct physical register.
816 if (MO.getReg() == VirtReg)
805 if (i == 0 && TII->isTwoAddrInstr(MI.getOpcode()))
806 PhysReg = MI.getOperand(1).getReg();
807 else
817808 PhysReg = VRM.getPhys(VirtReg);
818 else
819 PhysReg = MO.getReg();
820809
821810 PhysRegsUsed[PhysReg] = true;
822811 MRI->storeRegToStackSlot(MBB, next(MII), PhysReg, StackSlot, RC);
2222
2323 namespace llvm {
2424 class MachineInstr;
25 class TargetInstrInfo;
2526
2627 class VirtRegMap {
2728 public:
3031 std::pair > MI2VirtMapTy;
3132
3233 private:
34 const TargetInstrInfo &TII;
35
3336 MachineFunction &MF;
3437 /// Virt2PhysMap - This is a virtual to physical register
3538 /// mapping. Each virtual register is required to have an entry in
5760 };
5861
5962 public:
60 VirtRegMap(MachineFunction &mf)
61 : MF(mf), Virt2PhysMap(NO_PHYS_REG), Virt2StackSlotMap(NO_STACK_SLOT) {
62 grow();
63 }
63 VirtRegMap(MachineFunction &mf);
6464
6565 void grow();
6666
2121
2222 X86InstrInfo::X86InstrInfo(X86TargetMachine &tm)
2323 : TargetInstrInfo(X86Insts, sizeof(X86Insts)/sizeof(X86Insts[0])),
24 TM(tm) {
24 TM(tm), RI(*this) {
2525 }
2626
2727
4545 cl::Hidden);
4646 }
4747
48 X86RegisterInfo::X86RegisterInfo()
49 : X86GenRegisterInfo(X86::ADJCALLSTACKDOWN, X86::ADJCALLSTACKUP) {}
48 X86RegisterInfo::X86RegisterInfo(const TargetInstrInfo &tii)
49 : X86GenRegisterInfo(X86::ADJCALLSTACKDOWN, X86::ADJCALLSTACKUP), TII(tii) {}
5050
5151 void X86RegisterInfo::storeRegToStackSlot(MachineBasicBlock &MBB,
5252 MachineBasicBlock::iterator MI,
138138 BuildMI(MBB, MI, Opc, 1, DestReg).addReg(SrcReg);
139139 }
140140
141
142 static MachineInstr *MakeMInst(unsigned Opcode, unsigned FrameIndex,
143 MachineInstr *MI) {
144 return addFrameReference(BuildMI(Opcode, 4), FrameIndex);
145 }
146
147 static MachineInstr *MakeMRInst(unsigned Opcode, unsigned FrameIndex,
148 MachineInstr *MI) {
149 return addFrameReference(BuildMI(Opcode, 5), FrameIndex)
150 .addReg(MI->getOperand(1).getReg());
151 }
152
153 static MachineInstr *MakeMRIInst(unsigned Opcode, unsigned FrameIndex,
154 MachineInstr *MI) {
155 return addFrameReference(BuildMI(Opcode, 6), FrameIndex)
156 .addReg(MI->getOperand(1).getReg())
157 .addImm(MI->getOperand(2).getImmedValue());
158 }
159
160 static MachineInstr *MakeMIInst(unsigned Opcode, unsigned FrameIndex,
161 MachineInstr *MI) {
162 if (MI->getOperand(1).isImmediate())
163 return addFrameReference(BuildMI(Opcode, 5), FrameIndex)
164 .addImm(MI->getOperand(1).getImmedValue());
165 else if (MI->getOperand(1).isGlobalAddress())
166 return addFrameReference(BuildMI(Opcode, 5), FrameIndex)
167 .addGlobalAddress(MI->getOperand(1).getGlobal(),
168 MI->getOperand(1).getOffset());
169 else if (MI->getOperand(1).isJumpTableIndex())
170 return addFrameReference(BuildMI(Opcode, 5), FrameIndex)
171 .addJumpTableIndex(MI->getOperand(1).getJumpTableIndex());
172 assert(0 && "Unknown operand for MakeMI!");
173 return 0;
141 static MachineInstr *FuseTwoAddrInst(unsigned Opcode, unsigned FrameIndex,
142 MachineInstr *MI) {
143 unsigned NumOps = MI->getNumOperands()-2;
144 // Create the base instruction with the memory operand as the first part.
145 MachineInstrBuilder MIB = addFrameReference(BuildMI(Opcode, 4+NumOps),
146 FrameIndex);
147
148 // Loop over the rest of the ri operands, converting them over.
149 for (unsigned i = 0; i != NumOps; ++i) {
150 if (MI->getOperand(i+2).isReg())
151 MIB = MIB.addReg(MI->getOperand(i+2).getReg());
152 else {
153 assert(MI->getOperand(i+2).isImm() && "Unknown operand type!");
154 MIB = MIB.addImm(MI->getOperand(i+2).getImm());
155 }
156 }
157 return MIB;
158 }
159
160 static MachineInstr *FuseInst(unsigned Opcode, unsigned OpNo,
161 unsigned FrameIndex, MachineInstr *MI) {
162 MachineInstrBuilder MIB = BuildMI(Opcode, MI->getNumOperands()+3);
163
164 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
165 MachineOperand &MO = MI->getOperand(i);
166 if (i == OpNo) {
167 assert(MO.isReg() && "Expected to fold into reg operand!");
168 MIB = addFrameReference(MIB, FrameIndex);
169 } else if (MO.isReg())
170 MIB = MIB.addReg(MO.getReg(), MO.getUseType());
171 else if (MO.isImm())
172 MIB = MIB.addImm(MO.getImm());
173 else if (MO.isGlobalAddress())
174 MIB = MIB.addGlobalAddress(MO.getGlobal(), MO.getOffset());
175 else if (MO.isJumpTableIndex())
176 MIB = MIB.addJumpTableIndex(MO.getJumpTableIndex());
177 else
178 assert(0 && "Unknown operand for FuseInst!");
179 }
180 return MIB;
174181 }
175182
176183 static MachineInstr *MakeM0Inst(unsigned Opcode, unsigned FrameIndex,
177184 MachineInstr *MI) {
178185 return addFrameReference(BuildMI(Opcode, 5), FrameIndex).addImm(0);
179 }
180
181 static MachineInstr *MakeRMInst(unsigned Opcode, unsigned FrameIndex,
182 MachineInstr *MI) {
183 const MachineOperand& op = MI->getOperand(0);
184 return addFrameReference(BuildMI(Opcode, 5, op.getReg(), op.getUseType()),
185 FrameIndex);
186 }
187
188 static MachineInstr *MakeRMIInst(unsigned Opcode, unsigned FrameIndex,
189 MachineInstr *MI) {
190 const MachineOperand& op = MI->getOperand(0);
191 return addFrameReference(BuildMI(Opcode, 6, op.getReg(), op.getUseType()),
192 FrameIndex).addImm(MI->getOperand(2).getImmedValue());
193186 }
194187
195188
203196 struct TableEntry {
204197 unsigned from; // Original opcode.
205198 unsigned to; // New opcode.
206 unsigned make; // Form of make required to produce the
207 // new instruction.
208199
209200 // less operators used by STL search.
210201 bool operator<(const TableEntry &TE) const { return from < TE.from; }
256247 #endif
257248
258249
259 MachineInstr* X86RegisterInfo::foldMemoryOperand(MachineInstr* MI,
250 MachineInstr* X86RegisterInfo::foldMemoryOperand(MachineInstr *MI,
260251 unsigned i,
261252 int FrameIndex) const {
262253 // Check switch flag
263254 if (NoFusing) return NULL;
264255
265 // Selection of instruction makes
266 enum {
267 makeM0Inst,
268 makeMIInst,
269 makeMInst,
270 makeMRIInst,
271 makeMRInst,
272 makeRMIInst,
273 makeRMInst
274 };
275
276256 // Table (and size) to search
277257 const TableEntry *OpcodeTablePtr = NULL;
278258 unsigned OpcodeTableSize = 0;
279
280 if (i == 0) { // If operand 0
259 bool isTwoAddrFold = false;
260
261 // Folding a memory location into the two-address part of a two-address
262 // instruction is different than folding it other places. It requires
263 // replacing the *two* registers with the memory location.
264 if (MI->getNumOperands() >= 2 && MI->getOperand(0).isReg() &&
265 MI->getOperand(1).isReg() && i < 2 &&
266 MI->getOperand(0).getReg() == MI->getOperand(1).getReg() &&
267 TII.isTwoAddrInstr(MI->getOpcode())) {
281268 static const TableEntry OpcodeTable[] = {
282 { X86::ADC32ri, X86::ADC32mi, makeMIInst },
283 { X86::ADC32ri8, X86::ADC32mi8, makeMIInst },
284 { X86::ADC32rr, X86::ADC32mr, makeMRInst },
285 { X86::ADD16ri, X86::ADD16mi, makeMIInst },
286 { X86::ADD16ri8, X86::ADD16mi8, makeMIInst },
287 { X86::ADD16rr, X86::ADD16mr, makeMRInst },
288 { X86::ADD32ri, X86::ADD32mi, makeMIInst },
289 { X86::ADD32ri8, X86::ADD32mi8, makeMIInst },
290 { X86::ADD32rr, X86::ADD32mr, makeMRInst },
291 { X86::ADD8ri, X86::ADD8mi, makeMIInst },
292 { X86::ADD8rr, X86::ADD8mr, makeMRInst },
293 { X86::AND16ri, X86::AND16mi, makeMIInst },
294 { X86::AND16ri8, X86::AND16mi8, makeMIInst },
295 { X86::AND16rr, X86::AND16mr, makeMRInst },
296 { X86::AND32ri, X86::AND32mi, makeMIInst },
297 { X86::AND32ri8, X86::AND32mi8, makeMIInst },
298 { X86::AND32rr, X86::AND32mr, makeMRInst },
299 { X86::AND8ri, X86::AND8mi, makeMIInst },
300 { X86::AND8rr, X86::AND8mr, makeMRInst },
301 { X86::DEC16r, X86::DEC16m, makeMInst },
302 { X86::DEC32r, X86::DEC32m, makeMInst },
303 { X86::DEC8r, X86::DEC8m, makeMInst },
304 { X86::DIV16r, X86::DIV16m, makeMInst },
305 { X86::DIV32r, X86::DIV32m, makeMInst },
306 { X86::DIV8r, X86::DIV8m, makeMInst },
307 { X86::FsMOVAPDrr, X86::MOVSDmr, makeMRInst },
308 { X86::FsMOVAPSrr, X86::MOVSSmr, makeMRInst },
309 { X86::IDIV16r, X86::IDIV16m, makeMInst },
310 { X86::IDIV32r, X86::IDIV32m, makeMInst },
311 { X86::IDIV8r, X86::IDIV8m, makeMInst },
312 { X86::IMUL16r, X86::IMUL16m, makeMInst },
313 { X86::IMUL32r, X86::IMUL32m, makeMInst },
314 { X86::IMUL8r, X86::IMUL8m, makeMInst },
315 { X86::INC16r, X86::INC16m, makeMInst },
316 { X86::INC32r, X86::INC32m, makeMInst },
317 { X86::INC8r, X86::INC8m, makeMInst },
318 { X86::MOV16r0, X86::MOV16mi, makeM0Inst },
319 { X86::MOV16ri, X86::MOV16mi, makeMIInst },
320 { X86::MOV16rr, X86::MOV16mr, makeMRInst },
321 { X86::MOV32r0, X86::MOV32mi, makeM0Inst },
322 { X86::MOV32ri, X86::MOV32mi, makeMIInst },
323 { X86::MOV32rr, X86::MOV32mr, makeMRInst },
324 { X86::MOV8r0, X86::MOV8mi, makeM0Inst },
325 { X86::MOV8ri, X86::MOV8mi, makeMIInst },
326 { X86::MOV8rr, X86::MOV8mr, makeMRInst },
327 { X86::MOVAPDrr, X86::MOVAPDmr, makeMRInst },
328 { X86::MOVAPSrr, X86::MOVAPSmr, makeMRInst },
329 { X86::MOVPDI2DIrr, X86::MOVPDI2DImr, makeMRInst },
330 { X86::MOVPS2SSrr, X86::MOVPS2SSmr, makeMRInst },
331 { X86::MOVSDrr, X86::MOVSDmr, makeMRInst },
332 { X86::MOVSSrr, X86::MOVSSmr, makeMRInst },
333 { X86::MOVUPDrr, X86::MOVUPDmr, makeMRInst },
334 { X86::MOVUPSrr, X86::MOVUPSmr, makeMRInst },
335 { X86::MUL16r, X86::MUL16m, makeMInst },
336 { X86::MUL32r, X86::MUL32m, makeMInst },
337 { X86::MUL8r, X86::MUL8m, makeMInst },
338 { X86::NEG16r, X86::NEG16m, makeMInst },
339 { X86::NEG32r, X86::NEG32m, makeMInst },
340 { X86::NEG8r, X86::NEG8m, makeMInst },
341 { X86::NOT16r, X86::NOT16m, makeMInst },
342 { X86::NOT32r, X86::NOT32m, makeMInst },
343 { X86::NOT8r, X86::NOT8m, makeMInst },
344 { X86::OR16ri, X86::OR16mi, makeMIInst },
345 { X86::OR16ri8, X86::OR16mi8, makeMIInst },
346 { X86::OR16rr, X86::OR16mr, makeMRInst },
347 { X86::OR32ri, X86::OR32mi, makeMIInst },
348 { X86::OR32ri8, X86::OR32mi8, makeMIInst },
349 { X86::OR32rr, X86::OR32mr, makeMRInst },
350 { X86::OR8ri, X86::OR8mi, makeMIInst },
351 { X86::OR8rr, X86::OR8mr, makeMRInst },
352 { X86::ROL16r1, X86::ROL16m1, makeMInst },
353 { X86::ROL16rCL, X86::ROL16mCL, makeMInst },
354 { X86::ROL16ri, X86::ROL16mi, makeMIInst },
355 { X86::ROL32r1, X86::ROL32m1, makeMInst },
356 { X86::ROL32rCL, X86::ROL32mCL, makeMInst },
357 { X86::ROL32ri, X86::ROL32mi, makeMIInst },
358 { X86::ROL8r1, X86::ROL8m1, makeMInst },
359 { X86::ROL8rCL, X86::ROL8mCL, makeMInst },
360 { X86::ROL8ri, X86::ROL8mi, makeMIInst },
361 { X86::ROR16r1, X86::ROR16m1, makeMInst },
362 { X86::ROR16rCL, X86::ROR16mCL, makeMInst },
363 { X86::ROR16ri, X86::ROR16mi, makeMIInst },
364 { X86::ROR32r1, X86::ROR32m1, makeMInst },
365 { X86::ROR32rCL, X86::ROR32mCL, makeMInst },
366 { X86::ROR32ri, X86::ROR32mi, makeMIInst },
367 { X86::ROR8r1, X86::ROR8m1, makeMInst },
368 { X86::ROR8rCL, X86::ROR8mCL, makeMInst },
369 { X86::ROR8ri, X86::ROR8mi, makeMIInst },
370 { X86::SAR16r1, X86::SAR16m1, makeMInst },
371 { X86::SAR16rCL, X86::SAR16mCL, makeMInst },
372 { X86::SAR16ri, X86::SAR16mi, makeMIInst },
373 { X86::SAR32r1, X86::SAR32m1, makeMInst },
374 { X86::SAR32rCL, X86::SAR32mCL, makeMInst },
375 { X86::SAR32ri, X86::SAR32mi, makeMIInst },
376 { X86::SAR8r1, X86::SAR8m1, makeMInst },
377 { X86::SAR8rCL, X86::SAR8mCL, makeMInst },
378 { X86::SAR8ri, X86::SAR8mi, makeMIInst },
379 { X86::SBB32ri, X86::SBB32mi, makeMIInst },
380 { X86::SBB32ri8, X86::SBB32mi8, makeMIInst },
381 { X86::SBB32rr, X86::SBB32mr, makeMRInst },
382 { X86::SETAEr, X86::SETAEm, makeMInst },
383 { X86::SETAr, X86::SETAm, makeMInst },
384 { X86::SETBEr, X86::SETBEm, makeMInst },
385 { X86::SETBr, X86::SETBm, makeMInst },
386 { X86::SETEr, X86::SETEm, makeMInst },
387 { X86::SETGEr, X86::SETGEm, makeMInst },
388 { X86::SETGr, X86::SETGm, makeMInst },
389 { X86::SETLEr, X86::SETLEm, makeMInst },
390 { X86::SETLr, X86::SETLm, makeMInst },
391 { X86::SETNEr, X86::SETNEm, makeMInst },
392 { X86::SETNPr, X86::SETNPm, makeMInst },
393 { X86::SETNSr, X86::SETNSm, makeMInst },
394 { X86::SETPr, X86::SETPm, makeMInst },
395 { X86::SETSr, X86::SETSm, makeMInst },
396 { X86::SHL16r1, X86::SHL16m1, makeMInst },
397 { X86::SHL16rCL, X86::SHL16mCL, makeMInst },
398 { X86::SHL16ri, X86::SHL16mi, makeMIInst },
399 { X86::SHL32r1, X86::SHL32m1, makeMInst },
400 { X86::SHL32rCL, X86::SHL32mCL, makeMInst },
401 { X86::SHL32ri, X86::SHL32mi, makeMIInst },
402 { X86::SHL8r1, X86::SHL8m1, makeMInst },
403 { X86::SHL8rCL, X86::SHL8mCL, makeMInst },
404 { X86::SHL8ri, X86::SHL8mi, makeMIInst },
405 { X86::SHLD16rrCL, X86::SHLD16mrCL, makeMRInst },
406 { X86::SHLD16rri8, X86::SHLD16mri8, makeMRIInst },
407 { X86::SHLD32rrCL, X86::SHLD32mrCL, makeMRInst },
408 { X86::SHLD32rri8, X86::SHLD32mri8, makeMRIInst },
409 { X86::SHR16r1, X86::SHR16m1, makeMInst },
410 { X86::SHR16rCL, X86::SHR16mCL, makeMInst },
411 { X86::SHR16ri, X86::SHR16mi, makeMIInst },
412 { X86::SHR32r1, X86::SHR32m1, makeMInst },
413 { X86::SHR32rCL, X86::SHR32mCL, makeMInst },
414 { X86::SHR32ri, X86::SHR32mi, makeMIInst },
415 { X86::SHR8r1, X86::SHR8m1, makeMInst },
416 { X86::SHR8rCL, X86::SHR8mCL, makeMInst },
417 { X86::SHR8ri, X86::SHR8mi, makeMIInst },
418 { X86::SHRD16rrCL, X86::SHRD16mrCL, makeMRInst },
419 { X86::SHRD16rri8, X86::SHRD16mri8, makeMRIInst },
420 { X86::SHRD32rrCL, X86::SHRD32mrCL, makeMRInst },
421 { X86::SHRD32rri8, X86::SHRD32mri8, makeMRIInst },
422 { X86::SUB16ri, X86::SUB16mi, makeMIInst },
423 { X86::SUB16ri8, X86::SUB16mi8, makeMIInst },
424 { X86::SUB16rr, X86::SUB16mr, makeMRInst },
425 { X86::SUB32ri, X86::SUB32mi, makeMIInst },
426 { X86::SUB32ri8, X86::SUB32mi8, makeMIInst },
427 { X86::SUB32rr, X86::SUB32mr, makeMRInst },
428 { X86::SUB8ri, X86::SUB8mi, makeMIInst },
429 { X86::SUB8rr, X86::SUB8mr, makeMRInst },
430 { X86::XCHG16rr, X86::XCHG16mr, makeMRInst },
431 { X86::XCHG32rr, X86::XCHG32mr, makeMRInst },
432 { X86::XCHG8rr, X86::XCHG8mr, makeMRInst },
433 { X86::XOR16ri, X86::XOR16mi, makeMIInst },
434 { X86::XOR16ri8, X86::XOR16mi8, makeMIInst },
435 { X86::XOR16rr, X86::XOR16mr, makeMRInst },
436 { X86::XOR32ri, X86::XOR32mi, makeMIInst },
437 { X86::XOR32ri8, X86::XOR32mi8, makeMIInst },
438 { X86::XOR32rr, X86::XOR32mr, makeMRInst },
439 { X86::XOR8ri, X86::XOR8mi, makeMIInst },
440 { X86::XOR8rr, X86::XOR8mr, makeMRInst }
269 { X86::ADC32ri, X86::ADC32mi },
270 { X86::ADC32ri8, X86::ADC32mi8 },
271 { X86::ADC32rr, X86::ADC32mr },
272 { X86::ADD16ri, X86::ADD16mi },
273 { X86::ADD16ri8, X86::ADD16mi8 },
274 { X86::ADD16rr, X86::ADD16mr },
275 { X86::ADD32ri, X86::ADD32mi },
276 { X86::ADD32ri8, X86::ADD32mi8 },
277 { X86::ADD32rr, X86::ADD32mr },
278 { X86::ADD8ri, X86::ADD8mi },
279 { X86::ADD8rr, X86::ADD8mr },
280 { X86::AND16ri, X86::AND16mi },
281 { X86::AND16ri8, X86::AND16mi8 },
282 { X86::AND16rr, X86::AND16mr },
283 { X86::AND32ri, X86::AND32mi },
284 { X86::AND32ri8, X86::AND32mi8 },
285 { X86::AND32rr, X86::AND32mr },
286 { X86::AND8ri, X86::AND8mi },
287 { X86::AND8rr, X86::AND8mr },
288 { X86::DEC16r, X86::DEC16m },
289 { X86::DEC32r, X86::DEC32m },
290 { X86::DEC8r, X86::DEC8m },
291 { X86::INC16r, X86::INC16m },
292 { X86::INC32r, X86::INC32m },
293 { X86::INC8r, X86::INC8m },
294 { X86::NEG16r, X86::NEG16m },
295 { X86::NEG32r, X86::NEG32m },
296 { X86::NEG8r, X86::NEG8m },
297 { X86::NOT16r, X86::NOT16m },
298 { X86::NOT32r, X86::NOT32m },
299 { X86::NOT8r, X86::NOT8m },
300 { X86::OR16ri, X86::OR16mi },
301 { X86::OR16ri8, X86::OR16mi8 },
302 { X86::OR16rr, X86::OR16mr },
303 { X86::OR32ri, X86::OR32mi },
304 { X86::OR32ri8, X86::OR32mi8 },
305 { X86::OR32rr, X86::OR32mr },
306 { X86::OR8ri, X86::OR8mi },
307 { X86::OR8rr, X86::OR8mr },
308 { X86::ROL16r1, X86::ROL16m1 },
309 { X86::ROL16rCL, X86::ROL16mCL },
310 { X86::ROL16ri, X86::ROL16mi },
311 { X86::ROL32r1, X86::ROL32m1 },
312 { X86::ROL32rCL, X86::ROL32mCL },
313 { X86::ROL32ri, X86::ROL32mi },
314 { X86::ROL8r1, X86::ROL8m1 },
315 { X86::ROL8rCL, X86::ROL8mCL },
316 { X86::ROL8ri, X86::ROL8mi },
317 { X86::ROR16r1, X86::ROR16m1 },
318 { X86::ROR16rCL, X86::ROR16mCL },
319 { X86::ROR16ri, X86::ROR16mi },
320 { X86::ROR32r1, X86::ROR32m1 },
321 { X86::ROR32rCL, X86::ROR32mCL },
322 { X86::ROR32ri, X86::ROR32mi },
323 { X86::ROR8r1, X86::ROR8m1 },
324 { X86::ROR8rCL, X86::ROR8mCL },
325 { X86::ROR8ri, X86::ROR8mi },
326 { X86::SAR16r1, X86::SAR16m1 },
327 { X86::SAR16rCL, X86::SAR16mCL },
328 { X86::SAR16ri, X86::SAR16mi },
329 { X86::SAR32r1, X86::SAR32m1 },
330 { X86::SAR32rCL, X86::SAR32mCL },
331 { X86::SAR32ri, X86::SAR32mi },
332 { X86::SAR8r1, X86::SAR8m1 },
333 { X86::SAR8rCL, X86::SAR8mCL },
334 { X86::SAR8ri, X86::SAR8mi },
335 { X86::SBB32ri, X86::SBB32mi },
336 { X86::SBB32ri8, X86::SBB32mi8 },
337 { X86::SBB32rr, X86::SBB32mr },
338 { X86::SHL16r1, X86::SHL16m1 },
339 { X86::SHL16rCL, X86::SHL16mCL },
340 { X86::SHL16ri, X86::SHL16mi },
341 { X86::SHL32r1, X86::SHL32m1 },
342 { X86::SHL32rCL, X86::SHL32mCL },
343 { X86::SHL32ri, X86::SHL32mi },
344 { X86::SHL8r1, X86::SHL8m1 },
345 { X86::SHL8rCL, X86::SHL8mCL },
346 { X86::SHL8ri, X86::SHL8mi },
347 { X86::SHLD16rrCL, X86::SHLD16mrCL },
348 { X86::SHLD16rri8, X86::SHLD16mri8 },
349 { X86::SHLD32rrCL, X86::SHLD32mrCL },
350 { X86::SHLD32rri8, X86::SHLD32mri8 },
351 { X86::SHR16r1, X86::SHR16m1 },
352 { X86::SHR16rCL, X86::SHR16mCL },
353 { X86::SHR16ri, X86::SHR16mi },
354 { X86::SHR32r1, X86::SHR32m1 },
355 { X86::SHR32rCL, X86::SHR32mCL },
356 { X86::SHR32ri, X86::SHR32mi },
357 { X86::SHR8r1, X86::SHR8m1 },
358 { X86::SHR8rCL, X86::SHR8mCL },
359 { X86::SHR8ri, X86::SHR8mi },
360 { X86::SHRD16rrCL, X86::SHRD16mrCL },
361 { X86::SHRD16rri8, X86::SHRD16mri8 },
362 { X86::SHRD32rrCL, X86::SHRD32mrCL },
363 { X86::SHRD32rri8, X86::SHRD32mri8 },
364 { X86::SUB16ri, X86::SUB16mi },
365 { X86::SUB16ri8, X86::SUB16mi8 },
366 { X86::SUB16rr, X86::SUB16mr },
367 { X86::SUB32ri, X86::SUB32mi },
368 { X86::SUB32ri8, X86::SUB32mi8 },
369 { X86::SUB32rr, X86::SUB32mr },
370 { X86::SUB8ri, X86::SUB8mi },
371 { X86::SUB8rr, X86::SUB8mr },
372 { X86::XOR16ri, X86::XOR16mi },
373 { X86::XOR16ri8, X86::XOR16mi8 },
374 { X86::XOR16rr, X86::XOR16mr },
375 { X86::XOR32ri, X86::XOR32mi },
376 { X86::XOR32ri8, X86::XOR32mi8 },
377 { X86::XOR32rr, X86::XOR32mr },
378 { X86::XOR8ri, X86::XOR8mi },
379 { X86::XOR8rr, X86::XOR8mr }
380 };
381 ASSERT_SORTED(OpcodeTable);
382 OpcodeTablePtr = OpcodeTable;
383 OpcodeTableSize = ARRAY_SIZE(OpcodeTable);
384 isTwoAddrFold = true;
385 } else if (i == 0) { // If operand 0
386 if (MI->getOpcode() == X86::MOV16r0)
387 return MakeM0Inst(X86::MOV16mi, FrameIndex, MI);
388 else if (MI->getOpcode() == X86::MOV32r0)
389 return MakeM0Inst(X86::MOV32mi, FrameIndex, MI);
390 else if (MI->getOpcode() == X86::MOV8r0)
391 return MakeM0Inst(X86::MOV8mi, FrameIndex, MI);
392
393 static const TableEntry OpcodeTable[] = {
394 { X86::CMP16ri, X86::CMP16mi },
395 { X86::CMP16ri8, X86::CMP16mi8 },
396 { X86::CMP32ri, X86::CMP32mi },
397 { X86::CMP32ri8, X86::CMP32mi8 },
398 { X86::CMP8ri, X86::CMP8mi },
399 { X86::DIV16r, X86::DIV16m },
400 { X86::DIV32r, X86::DIV32m },
401 { X86::DIV8r, X86::DIV8m },
402 { X86::FsMOVAPDrr, X86::MOVSDmr },
403 { X86::FsMOVAPSrr, X86::MOVSSmr },
404 { X86::IDIV16r, X86::IDIV16m },
405 { X86::IDIV32r, X86::IDIV32m },
406 { X86::IDIV8r, X86::IDIV8m },
407 { X86::IMUL16r, X86::IMUL16m },
408 { X86::IMUL32r, X86::IMUL32m },
409 { X86::IMUL8r, X86::IMUL8m },
410 { X86::MOV16ri, X86::MOV16mi },
411 { X86::MOV16rr, X86::MOV16mr },
412 { X86::MOV32ri, X86::MOV32mi },
413 { X86::MOV32rr, X86::MOV32mr },
414 { X86::MOV8ri, X86::MOV8mi },
415 { X86::MOV8rr, X86::MOV8mr },
416 { X86::MOVAPDrr, X86::MOVAPDmr },
417 { X86::MOVAPSrr, X86::MOVAPSmr },
418 { X86::MOVPDI2DIrr, X86::MOVPDI2DImr },
419 { X86::MOVPS2SSrr, X86::MOVPS2SSmr },
420 { X86::MOVSDrr, X86::MOVSDmr },
421 { X86::MOVSSrr, X86::MOVSSmr },
422 { X86::MOVUPDrr, X86::MOVUPDmr },
423 { X86::MOVUPSrr, X86::MOVUPSmr },
424 { X86::MUL16r, X86::MUL16m },
425 { X86::MUL32r, X86::MUL32m },
426 { X86::MUL8r, X86::MUL8m },
427 { X86::SETAEr, X86::SETAEm },
428 { X86::SETAr, X86::SETAm },
429 { X86::SETBEr, X86::SETBEm },
430 { X86::SETBr, X86::SETBm },
431 { X86::SETEr, X86::SETEm },
432 { X86::SETGEr, X86::SETGEm },
433 { X86::SETGr, X86::SETGm },
434 { X86::SETLEr, X86::SETLEm },
435 { X86::SETLr, X86::SETLm },
436 { X86::SETNEr, X86::SETNEm },
437 { X86::SETNPr, X86::SETNPm },
438 { X86::SETNSr, X86::SETNSm },
439 { X86::SETPr, X86::SETPm },
440 { X86::SETSr, X86::SETSm },
441 { X86::TEST16ri, X86::TEST16mi },
442 { X86::TEST32ri, X86::TEST32mi },
443 { X86::TEST8ri, X86::TEST8mi },
444 { X86::XCHG16rr, X86::XCHG16mr },
445 { X86::XCHG32rr, X86::XCHG32mr },
446 { X86::XCHG8rr, X86::XCHG8mr }
441447 };
442448 ASSERT_SORTED(OpcodeTable);
443449 OpcodeTablePtr = OpcodeTable;
444450 OpcodeTableSize = ARRAY_SIZE(OpcodeTable);
445451 } else if (i == 1) {
446452 static const TableEntry OpcodeTable[] = {
447 { X86::ADC32rr, X86::ADC32rm, makeRMInst },
448 { X86::ADD16rr, X86::ADD16rm, makeRMInst },
449 { X86::ADD32rr, X86::ADD32rm, makeRMInst },
450 { X86::ADD8rr, X86::ADD8rm, makeRMInst },
451 { X86::ADDPDrr, X86::ADDPDrm, makeRMInst },
452 { X86::ADDPSrr, X86::ADDPSrm, makeRMInst },
453 { X86::ADDSDrr, X86::ADDSDrm, makeRMInst },
454 { X86::ADDSSrr, X86::ADDSSrm, makeRMInst },
455 { X86::ADDSUBPDrr, X86::ADDSUBPDrm, makeRMInst },
456 { X86::ADDSUBPSrr, X86::ADDSUBPSrm, makeRMInst },
457 { X86::AND16rr, X86::AND16rm, makeRMInst },
458 { X86::AND32rr, X86::AND32rm, makeRMInst },
459 { X86::AND8rr, X86::AND8rm, makeRMInst },
460 { X86::ANDNPDrr, X86::ANDNPDrm, makeRMInst },
461 { X86::ANDNPSrr, X86::ANDNPSrm, makeRMInst },
462 { X86::ANDPDrr, X86::ANDPDrm, makeRMInst },
463 { X86::ANDPSrr, X86::ANDPSrm, makeRMInst },
464 { X86::CMOVA16rr, X86::CMOVA16rm, makeRMInst },
465 { X86::CMOVA32rr, X86::CMOVA32rm, makeRMInst },
466 { X86::CMOVAE16rr, X86::CMOVAE16rm, makeRMInst },
467 { X86::CMOVAE32rr, X86::CMOVAE32rm, makeRMInst },
468 { X86::CMOVB16rr, X86::CMOVB16rm, makeRMInst },
469 { X86::CMOVB32rr, X86::CMOVB32rm, makeRMInst },
470 { X86::CMOVBE16rr, X86::CMOVBE16rm, makeRMInst },
471 { X86::CMOVBE32rr, X86::CMOVBE32rm, makeRMInst },
472 { X86::CMOVE16rr, X86::CMOVE16rm, makeRMInst },
473 { X86::CMOVE32rr, X86::CMOVE32rm, makeRMInst },
474 { X86::CMOVG16rr, X86::CMOVG16rm, makeRMInst },
475 { X86::CMOVG32rr, X86::CMOVG32rm, makeRMInst },
476 { X86::CMOVGE16rr, X86::CMOVGE16rm, makeRMInst },
477 { X86::CMOVGE32rr, X86::CMOVGE32rm, makeRMInst },
478 { X86::CMOVL16rr, X86::CMOVL16rm, makeRMInst },
479 { X86::CMOVL32rr, X86::CMOVL32rm, makeRMInst },
480 { X86::CMOVLE16rr, X86::CMOVLE16rm, makeRMInst },
481 { X86::CMOVLE32rr, X86::CMOVLE32rm, makeRMInst },
482 { X86::CMOVNE16rr, X86::CMOVNE16rm, makeRMInst },
483 { X86::CMOVNE32rr, X86::CMOVNE32rm, makeRMInst },
484 { X86::CMOVNP16rr, X86::CMOVNP16rm, makeRMInst },
485 { X86::CMOVNP32rr, X86::CMOVNP32rm, makeRMInst },
486 { X86::CMOVNS16rr, X86::CMOVNS16rm, makeRMInst },
487 { X86::CMOVNS32rr, X86::CMOVNS32rm, makeRMInst },
488 { X86::CMOVP16rr, X86::CMOVP16rm, makeRMInst },
489 { X86::CMOVP32rr, X86::CMOVP32rm, makeRMInst },
490 { X86::CMOVS16rr, X86::CMOVS16rm, makeRMInst },
491 { X86::CMOVS32rr, X86::CMOVS32rm, makeRMInst },
492 { X86::CMP16ri, X86::CMP16mi, makeMIInst },
493 { X86::CMP16ri8, X86::CMP16mi8, makeMIInst },
494 { X86::CMP16rr, X86::CMP16rm, makeRMInst },
495 { X86::CMP32ri, X86::CMP32mi, makeMIInst },
496 { X86::CMP32ri8, X86::CMP32mi8, makeRMInst },
497 { X86::CMP32rr, X86::CMP32rm, makeRMInst },
498 { X86::CMP8ri, X86::CMP8mi, makeRMInst },
499 { X86::CMP8rr, X86::CMP8rm, makeRMInst },
500 { X86::CMPPDrri, X86::CMPPDrmi, makeRMIInst },
501 { X86::CMPPSrri, X86::CMPPSrmi, makeRMIInst },
502 { X86::CMPSDrr, X86::CMPSDrm, makeRMInst },
503 { X86::CMPSSrr, X86::CMPSSrm, makeRMInst },
504 { X86::CVTSD2SSrr, X86::CVTSD2SSrm, makeRMInst },
505 { X86::CVTSI2SDrr, X86::CVTSI2SDrm, makeRMInst },
506 { X86::CVTSI2SSrr, X86::CVTSI2SSrm, makeRMInst },
507 { X86::CVTSS2SDrr, X86::CVTSS2SDrm, makeRMInst },
508 { X86::CVTTSD2SIrr, X86::CVTTSD2SIrm, makeRMInst },
509 { X86::CVTTSS2SIrr, X86::CVTTSS2SIrm, makeRMInst },
510 { X86::DIVPDrr, X86::DIVPDrm, makeRMInst },
511 { X86::DIVPSrr, X86::DIVPSrm, makeRMInst },
512 { X86::DIVSDrr, X86::DIVSDrm, makeRMInst },
513 { X86::DIVSSrr, X86::DIVSSrm, makeRMInst },
514 { X86::FsMOVAPDrr, X86::MOVSDrm, makeRMInst },
515 { X86::FsMOVAPSrr, X86::MOVSSrm, makeRMInst },
516 { X86::HADDPDrr, X86::HADDPDrm, makeRMInst },
517 { X86::HADDPSrr, X86::HADDPSrm, makeRMInst },
518 { X86::HSUBPDrr, X86::HSUBPDrm, makeRMInst },
519 { X86::HSUBPSrr, X86::HSUBPSrm, makeRMInst },
520 { X86::IMUL16rr, X86::IMUL16rm, makeRMInst },
521 { X86::IMUL16rri, X86::IMUL16rmi, makeRMIInst },
522 { X86::IMUL16rri8, X86::IMUL16rmi8, makeRMIInst },
523 { X86::IMUL32rr, X86::IMUL32rm, makeRMInst },
524 { X86::IMUL32rri, X86::IMUL32rmi, makeRMIInst },
525 { X86::IMUL32rri8, X86::IMUL32rmi8, makeRMIInst },
526 { X86::Int_CMPSDrr, X86::Int_CMPSDrm, makeRMInst },
527 { X86::Int_CMPSSrr, X86::Int_CMPSSrm, makeRMInst },
528 { X86::Int_COMISDrr, X86::Int_COMISDrm, makeRMInst },
529 { X86::Int_COMISSrr, X86::Int_COMISSrm, makeRMInst },
530 { X86::Int_CVTDQ2PDrr, X86::Int_CVTDQ2PDrm, makeRMInst },
531 { X86::Int_CVTDQ2PSrr, X86::Int_CVTDQ2PSrm, makeRMInst },
532 { X86::Int_CVTPD2DQrr, X86::Int_CVTPD2DQrm, makeRMInst },
533 { X86::Int_CVTPD2PSrr, X86::Int_CVTPD2PSrm, makeRMInst },
534 { X86::Int_CVTPS2DQrr, X86::Int_CVTPS2DQrm, makeRMInst },
535 { X86::Int_CVTPS2PDrr, X86::Int_CVTPS2PDrm, makeRMInst },
536 { X86::Int_CVTSD2SIrr, X86::Int_CVTSD2SIrm, makeRMInst },
537 { X86::Int_CVTSD2SSrr, X86::Int_CVTSD2SSrm, makeRMInst },
538 { X86::Int_CVTSI2SDrr, X86::Int_CVTSI2SDrm, makeRMInst },
539 { X86::Int_CVTSI2SSrr, X86::Int_CVTSI2SSrm, makeRMInst },
540 { X86::Int_CVTSS2SDrr, X86::Int_CVTSS2SDrm, makeRMInst },
541 { X86::Int_CVTSS2SIrr, X86::Int_CVTSS2SIrm, makeRMInst },
542 { X86::Int_CVTTPD2DQrr, X86::Int_CVTTPD2DQrm, makeRMInst },
543 { X86::Int_CVTTPS2DQrr, X86::Int_CVTTPS2DQrm, makeRMInst },
544 { X86::Int_CVTTSD2SIrr, X86::Int_CVTTSD2SIrm, makeRMInst },
545 { X86::Int_CVTTSS2SIrr, X86::Int_CVTTSS2SIrm, makeRMInst },
546 { X86::Int_UCOMISDrr, X86::Int_UCOMISDrm, makeRMInst },
547 { X86::Int_UCOMISSrr, X86::Int_UCOMISSrm, makeRMInst },
548 { X86::MAXPDrr, X86::MAXPDrm, makeRMInst },
549 { X86::MAXPSrr, X86::MAXPSrm, makeRMInst },
550 { X86::MINPDrr, X86::MINPDrm, makeRMInst },
551 { X86::MINPSrr, X86::MINPSrm, makeRMInst },
552 { X86::MOV16rr, X86::MOV16rm, makeRMInst },
553 { X86::MOV32rr, X86::MOV32rm, makeRMInst },
554 { X86::MOV8rr, X86::MOV8rm, makeRMInst },
555 { X86::MOVAPDrr, X86::MOVAPDrm, makeRMInst },
556 { X86::MOVAPSrr, X86::MOVAPSrm, makeRMInst },
557 { X86::MOVDDUPrr, X86::MOVDDUPrm, makeRMInst },
558 { X86::MOVDI2PDIrr, X86::MOVDI2PDIrm, makeRMInst },
559 { X86::MOVQI2PQIrr, X86::MOVQI2PQIrm, makeRMInst },
560 { X86::MOVSD2PDrr, X86::MOVSD2PDrm, makeRMInst },
561 { X86::MOVSDrr, X86::MOVSDrm, makeRMInst },
562 { X86::MOVSHDUPrr, X86::MOVSHDUPrm, makeRMInst },
563 { X86::MOVSLDUPrr, X86::MOVSLDUPrm, makeRMInst },
564 { X86::MOVSS2PSrr, X86::MOVSS2PSrm, makeRMInst },
565 { X86::MOVSSrr, X86::MOVSSrm, makeRMInst },
566 { X86::MOVSX16rr8, X86::MOVSX16rm8, makeRMInst },
567 { X86::MOVSX32rr16, X86::MOVSX32rm16, makeRMInst },
568 { X86::MOVSX32rr8, X86::MOVSX32rm8, makeRMInst },
569 { X86::MOVUPDrr, X86::MOVUPDrm, makeRMInst },
570 { X86::MOVUPSrr, X86::MOVUPSrm, makeRMInst },
571 { X86::MOVZX16rr8, X86::MOVZX16rm8, makeRMInst },
572 { X86::MOVZX32rr16, X86::MOVZX32rm16, makeRMInst },
573 { X86::MOVZX32rr8, X86::MOVZX32rm8, makeRMInst },
574 { X86::MULPDrr, X86::MULPDrm, makeRMInst },
575 { X86::MULPSrr, X86::MULPSrm, makeRMInst },
576 { X86::MULSDrr, X86::MULSDrm, makeRMInst },
577 { X86::MULSSrr, X86::MULSSrm, makeRMInst },
578 { X86::OR16rr, X86::OR16rm, makeRMInst },
579 { X86::OR32rr, X86::OR32rm, makeRMInst },
580 { X86::OR8rr, X86::OR8rm, makeRMInst },
581 { X86::ORPDrr, X86::ORPDrm, makeRMInst },
582 { X86::ORPSrr, X86::ORPSrm, makeRMInst },
583 { X86::PACKSSDWrr, X86::PACKSSDWrm, makeRMInst },
584 { X86::PACKSSWBrr, X86::PACKSSWBrm, makeRMInst },
585 { X86::PACKUSWBrr, X86::PACKUSWBrm, makeRMInst },
586 { X86::PADDBrr, X86::PADDBrm, makeRMInst },
587 { X86::PADDDrr, X86::PADDDrm, makeRMInst },
588 { X86::PADDSBrr, X86::PADDSBrm, makeRMInst },
589 { X86::PADDSWrr, X86::PADDSWrm, makeRMInst },
590 { X86::PADDWrr, X86::PADDWrm, makeRMInst },
591 { X86::PANDNrr, X86::PANDNrm, makeRMInst },
592 { X86::PANDrr, X86::PANDrm, makeRMInst },
593 { X86::PAVGBrr, X86::PAVGBrm, makeRMInst },
594 { X86::PAVGWrr, X86::PAVGWrm, makeRMInst },
595 { X86::PCMPEQBrr, X86::PCMPEQBrm, makeRMInst },
596 { X86::PCMPEQDrr, X86::PCMPEQDrm, makeRMInst },
597 { X86::PCMPEQWrr, X86::PCMPEQWrm, makeRMInst },
598 { X86::PCMPGTBrr, X86::PCMPGTBrm, makeRMInst },
599 { X86::PCMPGTDrr, X86::PCMPGTDrm, makeRMInst },
600 { X86::PCMPGTWrr, X86::PCMPGTWrm, makeRMInst },
601 { X86::PINSRWrri, X86::PINSRWrmi, makeRMIInst },
602 { X86::PMADDWDrr, X86::PMADDWDrm, makeRMInst },
603 { X86::PMAXSWrr, X86::PMAXSWrm, makeRMInst },
604 { X86::PMAXUBrr, X86::PMAXUBrm, makeRMInst },
605 { X86::PMINSWrr, X86::PMINSWrm, makeRMInst },
606 { X86::PMINUBrr, X86::PMINUBrm, makeRMInst },
607 { X86::PMULHUWrr, X86::PMULHUWrm, makeRMInst },
608 { X86::PMULHWrr, X86::PMULHWrm, makeRMInst },
609 { X86::PMULLWrr, X86::PMULLWrm, makeRMInst },
610 { X86::PMULUDQrr, X86::PMULUDQrm, makeRMInst },
611 { X86::PORrr, X86::PORrm, makeRMInst },
612 { X86::PSADBWrr, X86::PSADBWrm, makeRMInst },
613 { X86::PSHUFDri, X86::PSHUFDmi, makeRMIInst },
614 { X86::PSHUFHWri, X86::PSHUFHWmi, makeRMIInst },
615 { X86::PSHUFLWri, X86::PSHUFLWmi, makeRMIInst },
616 { X86::PSLLDrr, X86::PSLLDrm, makeRMInst },
617 { X86::PSLLQrr, X86::PSLLQrm, makeRMInst },
618 { X86::PSLLWrr, X86::PSLLWrm, makeRMInst },
619 { X86::PSRADrr, X86::PSRADrm, makeRMInst },
620 { X86::PSRAWrr, X86::PSRAWrm, makeRMInst },
621 { X86::PSRLDrr, X86::PSRLDrm, makeRMInst },
622 { X86::PSRLQrr, X86::PSRLQrm, makeRMInst },
623 { X86::PSRLWrr, X86::PSRLWrm, makeRMInst },
624 { X86::PSUBBrr, X86::PSUBBrm, makeRMInst },
625 { X86::PSUBDrr, X86::PSUBDrm, makeRMInst },
626 { X86::PSUBSBrr, X86::PSUBSBrm, makeRMInst },
627 { X86::PSUBSWrr, X86::PSUBSWrm, makeRMInst },
628 { X86::PSUBWrr, X86::PSUBWrm, makeRMInst },
629 { X86::PUNPCKHBWrr, X86::PUNPCKHBWrm, makeRMInst },
630 { X86::PUNPCKHDQrr, X86::PUNPCKHDQrm, makeRMInst },
631 { X86::PUNPCKHQDQrr, X86::PUNPCKHQDQrm, makeRMInst },
632 { X86::PUNPCKHWDrr, X86::PUNPCKHWDrm, makeRMInst },
633 { X86::PUNPCKLBWrr, X86::PUNPCKLBWrm, makeRMInst },
634 { X86::PUNPCKLDQrr, X86::PUNPCKLDQrm, makeRMInst },
635 { X86::PUNPCKLQDQrr, X86::PUNPCKLQDQrm, makeRMInst },
636 { X86::PUNPCKLWDrr, X86::PUNPCKLWDrm, makeRMInst },
637 { X86::PXORrr, X86::PXORrm, makeRMInst },
638 { X86::RCPPSr, X86::RCPPSm, makeRMInst },
639 { X86::RSQRTPSr, X86::RSQRTPSm, makeRMInst },
640 { X86::SBB32rr, X86::SBB32rm, makeRMInst },
641 { X86::SHUFPDrri, X86::SHUFPDrmi, makeRMIInst },
642 { X86::SHUFPSrri, X86::SHUFPSrmi, makeRMIInst },
643 { X86::SQRTPDr, X86::SQRTPDm, makeRMInst },
644 { X86::SQRTPSr, X86::SQRTPSm, makeRMInst },
645 { X86::SQRTSDr, X86::SQRTSDm, makeRMInst },
646 { X86::SQRTSSr, X86::SQRTSSm, makeRMInst },
647 { X86::SUB16rr, X86::SUB16rm, makeRMInst },
648 { X86::SUB32rr, X86::SUB32rm, makeRMInst },
649 { X86::SUB8rr, X86::SUB8rm, makeRMInst },
650 { X86::SUBPDrr, X86::SUBPDrm, makeRMInst },
651 { X86::SUBPSrr, X86::SUBPSrm, makeRMInst },
652 { X86::SUBSDrr, X86::SUBSDrm, makeRMInst },
653 { X86::SUBSSrr, X86::SUBSSrm, makeRMInst },
654 { X86::TEST16ri, X86::TEST16mi, makeMIInst },
655 { X86::TEST16rr, X86::TEST16rm, makeRMInst },
656 { X86::TEST32ri, X86::TEST32mi, makeMIInst },
657 { X86::TEST32rr, X86::TEST32rm, makeRMInst },
658 { X86::TEST8ri, X86::TEST8mi, makeMIInst },
659 { X86::TEST8rr, X86::TEST8rm, makeRMInst },
660 { X86::UCOMISDrr, X86::UCOMISDrm, makeRMInst },
661 { X86::UCOMISSrr, X86::UCOMISSrm, makeRMInst },
662 { X86::UNPCKHPDrr, X86::UNPCKHPDrm, makeRMInst },
663 { X86::UNPCKHPSrr, X86::UNPCKHPSrm, makeRMInst },
664 { X86::UNPCKLPDrr, X86::UNPCKLPDrm, makeRMInst },
665 { X86::UNPCKLPSrr, X86::UNPCKLPSrm, makeRMInst },
666 { X86::XCHG16rr, X86::XCHG16rm, makeRMInst },
667 { X86::XCHG32rr, X86::XCHG32rm, makeRMInst },
668 { X86::XCHG8rr, X86::XCHG8rm, makeRMInst },
669 { X86::XOR16rr, X86::XOR16rm, makeRMInst },
670 { X86::XOR32rr, X86::XOR32rm, makeRMInst },
671 { X86::XOR8rr, X86::XOR8rm, makeRMInst },
672 { X86::XORPDrr, X86::XORPDrm, makeRMInst },
673 { X86::XORPSrr, X86::XORPSrm, makeRMInst }
453 { X86::CMP16rr, X86::CMP16rm },
454 { X86::CMP32rr, X86::CMP32rm },
455 { X86::CMP8rr, X86::CMP8rm },
456 { X86::CMPPDrri, X86::CMPPDrmi },
457 { X86::CMPPSrri, X86::CMPPSrmi },
458 { X86::CMPSDrr, X86::CMPSDrm },
459 { X86::CMPSSrr, X86::CMPSSrm },
460 { X86::CVTSD2SSrr, X86::CVTSD2SSrm },
461 { X86::CVTSI2SDrr, X86::CVTSI2SDrm },
462 { X86::CVTSI2SSrr, X86::CVTSI2SSrm },
463 { X86::CVTSS2SDrr, X86::CVTSS2SDrm },
464 { X86::CVTTSD2SIrr, X86::CVTTSD2SIrm },
465 { X86::CVTTSS2SIrr, X86::CVTTSS2SIrm },
466 { X86::FsMOVAPDrr, X86::MOVSDrm },
467 { X86::FsMOVAPSrr, X86::MOVSSrm },
468 { X86::IMUL16rri, X86::IMUL16rmi },
469 { X86::IMUL16rri8, X86::IMUL16rmi8 },
470 { X86::IMUL32rri, X86::IMUL32rmi },
471 { X86::IMUL32rri8, X86::IMUL32rmi8 },
472 { X86::Int_CMPSDrr, X86::Int_CMPSDrm },
473 { X86::Int_CMPSSrr, X86::Int_CMPSSrm },
474 { X86::Int_COMISDrr, X86::Int_COMISDrm },
475 { X86::Int_COMISSrr, X86::Int_COMISSrm },
476 { X86::Int_CVTDQ2PDrr, X86::Int_CVTDQ2PDrm },
477 { X86::Int_CVTDQ2PSrr, X86::Int_CVTDQ2PSrm },
478 { X86::Int_CVTPD2DQrr, X86::Int_CVTPD2DQrm },
479 { X86::Int_CVTPD2PSrr, X86::Int_CVTPD2PSrm },
480 { X86::Int_CVTPS2DQrr, X86::Int_CVTPS2DQrm },
481 { X86::Int_CVTPS2PDrr, X86::Int_CVTPS2PDrm },
482 { X86::Int_CVTSD2SIrr, X86::Int_CVTSD2SIrm },
483 { X86::Int_CVTSD2SSrr, X86::Int_CVTSD2SSrm },
484 { X86::Int_CVTSI2SDrr, X86::Int_CVTSI2SDrm },
485 { X86::Int_CVTSI2SSrr, X86::Int_CVTSI2SSrm },
486 { X86::Int_CVTSS2SDrr, X86::Int_CVTSS2SDrm },
487 { X86::Int_CVTSS2SIrr, X86::Int_CVTSS2SIrm },
488 { X86::Int_CVTTPD2DQrr, X86::Int_CVTTPD2DQrm },
489 { X86::Int_CVTTPS2DQrr, X86::Int_CVTTPS2DQrm },
490 { X86::Int_CVTTSD2SIrr, X86::Int_CVTTSD2SIrm },
491 { X86::Int_CVTTSS2SIrr, X86::Int_CVTTSS2SIrm },
492 { X86::Int_UCOMISDrr, X86::Int_UCOMISDrm },
493 { X86::Int_UCOMISSrr, X86::Int_UCOMISSrm },
494 { X86::MOV16rr, X86::MOV16rm },
495 { X86::MOV32rr, X86::MOV32rm },
496 { X86::MOV8rr, X86::MOV8rm },
497 { X86::MOVAPDrr, X86::MOVAPDrm },
498 { X86::MOVAPSrr, X86::MOVAPSrm },
499 { X86::MOVDDUPrr, X86::MOVDDUPrm },
500 { X86::MOVDI2PDIrr, X86::MOVDI2PDIrm },
501 { X86::MOVQI2PQIrr, X86::MOVQI2PQIrm },
502 { X86::MOVSD2PDrr, X86::MOVSD2PDrm },
503 { X86::MOVSDrr, X86::MOVSDrm },
504 { X86::MOVSHDUPrr, X86::MOVSHDUPrm },
505 { X86::MOVSLDUPrr, X86::MOVSLDUPrm },
506 { X86::MOVSS2PSrr, X86::MOVSS2PSrm },
507 { X86::MOVSSrr, X86::MOVSSrm },
508 { X86::MOVSX16rr8, X86::MOVSX16rm8 },
509 { X86::MOVSX32rr16, X86::MOVSX32rm16 },
510 { X86::MOVSX32rr8, X86::MOVSX32rm8 },
511 { X86::MOVUPDrr, X86::MOVUPDrm },
512 { X86::MOVUPSrr, X86::MOVUPSrm },
513 { X86::MOVZX16rr8, X86::MOVZX16rm8 },
514 { X86::MOVZX32rr16, X86::MOVZX32rm16 },
515 { X86::MOVZX32rr8, X86::MOVZX32rm8 },
516 { X86::PSHUFDri, X86::PSHUFDmi },
517 { X86::PSHUFHWri, X86::PSHUFHWmi },
518 { X86::PSHUFLWri, X86::PSHUFLWmi },
519 { X86::TEST16rr, X86::TEST16rm },
520 { X86::TEST32rr, X86::TEST32rm },
521 { X86::TEST8rr, X86::TEST8rm },
522 { X86::UCOMISDrr, X86::UCOMISDrm },
523 { X86::UCOMISSrr, X86::UCOMISSrm },
524 { X86::XCHG16rr, X86::XCHG16rm },
525 { X86::XCHG32rr, X86::XCHG32rm },
526 { X86::XCHG8rr, X86::XCHG8rm }
674527 };
675528 ASSERT_SORTED(OpcodeTable);
676529 OpcodeTablePtr = OpcodeTable;
677530 OpcodeTableSize = ARRAY_SIZE(OpcodeTable);
531 } else if (i == 2) {
532 static const TableEntry OpcodeTable[] = {
533 { X86::ADC32rr, X86::ADC32rm },
534 { X86::ADD16rr, X86::ADD16rm },
535 { X86::ADD32rr, X86::ADD32rm },
536 { X86::ADD8rr, X86::ADD8rm },
537 { X86::ADDPDrr, X86::ADDPDrm },
538 { X86::ADDPSrr, X86::ADDPSrm },
539 { X86::ADDSDrr, X86::ADDSDrm },
540 { X86::ADDSSrr, X86::ADDSSrm },
541 { X86::ADDSUBPDrr, X86::ADDSUBPDrm },
542 { X86::ADDSUBPSrr, X86::ADDSUBPSrm },
543 { X86::AND16rr, X86::AND16rm },
544 { X86::AND32rr, X86::AND32rm },
545 { X86::AND8rr, X86::AND8rm },
546 { X86::ANDNPDrr, X86::ANDNPDrm },
547 { X86::ANDNPSrr, X86::ANDNPSrm },
548 { X86::ANDPDrr, X86::ANDPDrm },
549 { X86::ANDPSrr, X86::ANDPSrm },
550 { X86::CMOVA16rr, X86::CMOVA16rm },
551 { X86::CMOVA32rr, X86::CMOVA32rm },
552 { X86::CMOVAE16rr, X86::CMOVAE16rm },
553 { X86::CMOVAE32rr, X86::CMOVAE32rm },
554 { X86::CMOVB16rr, X86::CMOVB16rm },
555 { X86::CMOVB32rr, X86::CMOVB32rm },
556 { X86::CMOVBE16rr, X86::CMOVBE16rm },
557 { X86::CMOVBE32rr, X86::CMOVBE32rm },
558 { X86::CMOVE16rr, X86::CMOVE16rm },
559 { X86::CMOVE32rr, X86::CMOVE32rm },
560 { X86::CMOVG16rr, X86::CMOVG16rm },
561 { X86::CMOVG32rr, X86::CMOVG32rm },
562 { X86::CMOVGE16rr, X86::CMOVGE16rm },
563 { X86::CMOVGE32rr, X86::CMOVGE32rm },
564 { X86::CMOVL16rr, X86::CMOVL16rm },
565 { X86::CMOVL32rr, X86::CMOVL32rm },
566 { X86::CMOVLE16rr, X86::CMOVLE16rm },
567 { X86::CMOVLE32rr, X86::CMOVLE32rm },
568 { X86::CMOVNE16rr, X86::CMOVNE16rm },
569 { X86::CMOVNE32rr, X86::CMOVNE32rm },
570 { X86::CMOVNP16rr, X86::CMOVNP16rm },
571 { X86::CMOVNP32rr, X86::CMOVNP32rm },
572 { X86::CMOVNS16rr, X86::CMOVNS16rm },
573 { X86::CMOVNS32rr, X86::CMOVNS32rm },
574 { X86::CMOVP16rr, X86::CMOVP16rm },
575 { X86::CMOVP32rr, X86::CMOVP32rm },
576 { X86::CMOVS16rr, X86::CMOVS16rm },
577 { X86::CMOVS32rr, X86::CMOVS32rm },
578 { X86::DIVPDrr, X86::DIVPDrm },
579 { X86::DIVPSrr, X86::DIVPSrm },
580 { X86::DIVSDrr, X86::DIVSDrm },
581 { X86::DIVSSrr, X86::DIVSSrm },
582 { X86::HADDPDrr, X86::HADDPDrm },
583 { X86::HADDPSrr, X86::HADDPSrm },
584 { X86::HSUBPDrr, X86::HSUBPDrm },
585 { X86::HSUBPSrr, X86::HSUBPSrm },
586 { X86::IMUL16rr, X86::IMUL16rm },
587 { X86::IMUL32rr, X86::IMUL32rm },
588 { X86::MAXPDrr, X86::MAXPDrm },
589 { X86::MAXPSrr, X86::MAXPSrm },
590 { X86::MINPDrr, X86::MINPDrm },
591 { X86::MINPSrr, X86::MINPSrm },
592 { X86::MULPDrr, X86::MULPDrm },
593 { X86::MULPSrr, X86::MULPSrm },
594 { X86::MULSDrr, X86::MULSDrm },
595 { X86::MULSSrr, X86::MULSSrm },
596 { X86::OR16rr, X86::OR16rm },
597 { X86::OR32rr, X86::OR32rm },
598 { X86::OR8rr, X86::OR8rm },
599 { X86::ORPDrr, X86::ORPDrm },
600 { X86::ORPSrr, X86::ORPSrm },
601 { X86::PACKSSDWrr, X86::PACKSSDWrm },
602 { X86::PACKSSWBrr, X86::PACKSSWBrm },
603 { X86::PACKUSWBrr, X86::PACKUSWBrm },
604 { X86::PADDBrr, X86::PADDBrm },
605 { X86::PADDDrr, X86::PADDDrm },
606 { X86::PADDSBrr, X86::PADDSBrm },
607 { X86::PADDSWrr, X86::PADDSWrm },
608 { X86::PADDWrr, X86::PADDWrm },
609 { X86::PANDNrr, X86::PANDNrm },
610 { X86::PANDrr, X86::PANDrm },
611 { X86::PAVGBrr, X86::PAVGBrm },
612 { X86::PAVGWrr, X86::PAVGWrm },
613 { X86::PCMPEQBrr, X86::PCMPEQBrm },
614 { X86::PCMPEQDrr, X86::PCMPEQDrm },
615 { X86::PCMPEQWrr, X86::PCMPEQWrm },
616 { X86::PCMPGTBrr, X86::PCMPGTBrm },
617 { X86::PCMPGTDrr, X86::PCMPGTDrm },
618 { X86::PCMPGTWrr, X86::PCMPGTWrm },
619 { X86::PINSRWrri, X86::PINSRWrmi },
620 { X86::PMADDWDrr, X86::PMADDWDrm },
621 { X86::PMAXSWrr, X86::PMAXSWrm },
622 { X86::PMAXUBrr, X86::PMAXUBrm },
623 { X86::PMINSWrr, X86::PMINSWrm },
624 { X86::PMINUBrr, X86::PMINUBrm },
625 { X86::PMULHUWrr, X86::PMULHUWrm },
626 { X86::PMULHWrr, X86::PMULHWrm },
627 { X86::PMULLWrr, X86::PMULLWrm },
628 { X86::PMULUDQrr, X86::PMULUDQrm },
629 { X86::PORrr, X86::PORrm },
630 { X86::PSADBWrr, X86::PSADBWrm },
631 { X86::PSLLDrr, X86::PSLLDrm },
632 { X86::PSLLQrr, X86::PSLLQrm },
633 { X86::PSLLWrr, X86::PSLLWrm },
634 { X86::PSRADrr, X86::PSRADrm },
635 { X86::PSRAWrr, X86::PSRAWrm },
636 { X86::PSRLDrr, X86::PSRLDrm },
637 { X86::PSRLQrr, X86::PSRLQrm },
638 { X86::PSRLWrr, X86::PSRLWrm },
639 { X86::PSUBBrr, X86::PSUBBrm },
640 { X86::PSUBDrr, X86::PSUBDrm },
641 { X86::PSUBSBrr, X86::PSUBSBrm },
642 { X86::PSUBSWrr, X86::PSUBSWrm },
643 { X86::PSUBWrr, X86::PSUBWrm },
644 { X86::PUNPCKHBWrr, X86::PUNPCKHBWrm },
645 { X86::PUNPCKHDQrr, X86::PUNPCKHDQrm },
646 { X86::PUNPCKHQDQrr, X86::PUNPCKHQDQrm },
647 { X86::PUNPCKHWDrr, X86::PUNPCKHWDrm },
648 { X86::PUNPCKLBWrr, X86::PUNPCKLBWrm },
649 { X86::PUNPCKLDQrr, X86::PUNPCKLDQrm },
650 { X86::PUNPCKLQDQrr, X86::PUNPCKLQDQrm },
651 { X86::PUNPCKLWDrr, X86::PUNPCKLWDrm },
652 { X86::PXORrr, X86::PXORrm },
653 { X86::RCPPSr, X86::RCPPSm },
654 { X86::RSQRTPSr, X86::RSQRTPSm },
655 { X86::SBB32rr, X86::SBB32rm },
656 { X86::SHUFPDrri, X86::SHUFPDrmi },
657 { X86::SHUFPSrri, X86::SHUFPSrmi },
658 { X86::SQRTPDr, X86::SQRTPDm },
659 { X86::SQRTPSr, X86::SQRTPSm },
660 { X86::SQRTSDr, X86::SQRTSDm },
661 { X86::SQRTSSr, X86::SQRTSSm },
662 { X86::SUB16rr, X86::SUB16rm },
663 { X86::SUB32rr, X86::SUB32rm },
664 { X86::SUB8rr, X86::SUB8rm },
665 { X86::SUBPDrr, X86::SUBPDrm },
666 { X86::SUBPSrr, X86::SUBPSrm },
667 { X86::SUBSDrr, X86::SUBSDrm },
668 { X86::SUBSSrr, X86::SUBSSrm },
669 { X86::UNPCKHPDrr, X86::UNPCKHPDrm },
670 { X86::UNPCKHPSrr, X86::UNPCKHPSrm },
671 { X86::UNPCKLPDrr, X86::UNPCKLPDrm },
672 { X86::UNPCKLPSrr, X86::UNPCKLPSrm },
673 { X86::XOR16rr, X86::XOR16rm },
674 { X86::XOR32rr, X86::XOR32rm },
675 { X86::XOR8rr, X86::XOR8rm },
676 { X86::XORPDrr, X86::XORPDrm },
677 { X86::XORPSrr, X86::XORPSrm }
678 };
679 ASSERT_SORTED(OpcodeTable);
680 OpcodeTablePtr = OpcodeTable;
681 OpcodeTableSize = ARRAY_SIZE(OpcodeTable);
678682 }
679683
680 // If table selected
684 // If table selected...
681685 if (OpcodeTablePtr) {
682 // Opcode to fuse
686 // Find the Opcode to fuse
683687 unsigned fromOpcode = MI->getOpcode();
684688 // Lookup fromOpcode in table
685 const TableEntry *entry = TableLookup(OpcodeTablePtr, OpcodeTableSize,
686 fromOpcode);
687
688 // If opcode found in table
689 if (entry) {
690 // Fused opcode
691 unsigned toOpcode = entry->to;
689 if (const TableEntry *Entry = TableLookup(OpcodeTablePtr, OpcodeTableSize,
690 fromOpcode)) {
691 if (isTwoAddrFold)
692 return FuseTwoAddrInst(Entry->to, FrameIndex, MI);
692693
693 // Make new instruction
694 switch (entry->make) {
695 case makeM0Inst: return MakeM0Inst(toOpcode, FrameIndex, MI);
696 case makeMIInst: return MakeMIInst(toOpcode, FrameIndex, MI);
697 case makeMInst: return MakeMInst(toOpcode, FrameIndex, MI);
698 case makeMRIInst: return MakeMRIInst(toOpcode, FrameIndex, MI);
699 case makeMRInst: return MakeMRInst(toOpcode, FrameIndex, MI);
700 case makeRMIInst: return MakeRMIInst(toOpcode, FrameIndex, MI);
701 case makeRMInst: return MakeRMInst(toOpcode, FrameIndex, MI);
702 default: assert(0 && "Unknown instruction make");
703 }
694 return FuseInst(Entry->to, i, FrameIndex, MI);
704695 }
705696 }
706697
760751
761752 MachineInstr *New = 0;
762753 if (Old->getOpcode() == X86::ADJCALLSTACKDOWN) {
763 New=BuildMI(X86::SUB32ri, 1, X86::ESP, MachineOperand::UseAndDef)
764 .addImm(Amount);
754 New=BuildMI(X86::SUB32ri, 2, X86::ESP).addReg(X86::ESP).addImm(Amount);
765755 } else {
766756 assert(Old->getOpcode() == X86::ADJCALLSTACKUP);
767757 // factor out the amount the callee already popped.
769759 Amount -= CalleeAmt;
770760 if (Amount) {
771761 unsigned Opc = Amount < 128 ? X86::ADD32ri8 : X86::ADD32ri;
772 New = BuildMI(Opc, 1, X86::ESP,
773 MachineOperand::UseAndDef).addImm(Amount);
762 New = BuildMI(Opc, 2, X86::ESP).addReg(X86::ESP).addImm(Amount);
774763 }
775764 }
776765
784773 if (unsigned CalleeAmt = I->getOperand(1).getImmedValue()) {
785774 unsigned Opc = CalleeAmt < 128 ? X86::SUB32ri8 : X86::SUB32ri;
786775 MachineInstr *New =
787 BuildMI(Opc, 1, X86::ESP,
788 MachineOperand::UseAndDef).addImm(CalleeAmt);
776 BuildMI(Opc, 1, X86::ESP).addReg(X86::ESP).addImm(CalleeAmt);
789777 MBB.insert(I, New);
790778 }
791779 }
869857 MBB.insert(MBBI, MI);
870858 } else {
871859 unsigned Opc = NumBytes < 128 ? X86::SUB32ri8 : X86::SUB32ri;
872 MI = BuildMI(Opc, 1, X86::ESP,MachineOperand::UseAndDef).addImm(NumBytes);
860 MI = BuildMI(Opc, 2, X86::ESP).addReg(X86::ESP).addImm(NumBytes);
873861 MBB.insert(MBBI, MI);
874862 }
875863 }
896884 // If it's main() on Cygwin\Mingw32 we should align stack as well
897885 if (Fn->hasExternalLinkage() && Fn->getName() == "main" &&
898886 Subtarget->TargetType == X86Subtarget::isCygwin) {
899 MI = BuildMI(X86::AND32ri, 2, X86::ESP).addImm(-Align);
887 MI = BuildMI(X86::AND32ri, 2, X86::ESP).addReg(X86::ESP).addImm(-Align);
900888 MBB.insert(MBBI, MI);
901889
902890 // Probe the stack
928916 int EBPOffset = MFI->getObjectOffset(MFI->getObjectIndexEnd()-1)+4;
929917
930918 // mov ESP, EBP
931 BuildMI(MBB, MBBI, X86::MOV32rr, 1,X86::ESP).addReg(X86::EBP);
919 BuildMI(MBB, MBBI, X86::MOV32rr, 1, X86::ESP).addReg(X86::EBP);
932920
933921 // pop EBP
934922 BuildMI(MBB, MBBI, X86::POP32r, 0, X86::EBP);
959947
960948 if (NumBytes > 0) {
961949 unsigned Opc = NumBytes < 128 ? X86::ADD32ri8 : X86::ADD32ri;
962 BuildMI(MBB, MBBI, Opc, 2)
963 .addReg(X86::ESP, MachineOperand::UseAndDef).addImm(NumBytes);
950 BuildMI(MBB, MBBI, Opc, 2, X86::ESP).addReg(X86::ESP).addImm(NumBytes);
964951 } else if ((int)NumBytes < 0) {
965952 unsigned Opc = -NumBytes < 128 ? X86::SUB32ri8 : X86::SUB32ri;
966 BuildMI(MBB, MBBI, Opc, 2)
967 .addReg(X86::ESP, MachineOperand::UseAndDef).addImm(-NumBytes);
953 BuildMI(MBB, MBBI, Opc, 2, X86::ESP).addReg(X86::ESP).addImm(-NumBytes);
968954 }
969955 }
970956 }
1414 #define X86REGISTERINFO_H
1515
1616 #include "llvm/Target/MRegisterInfo.h"
17
18 class llvm::Type;
19
2017 #include "X86GenRegisterInfo.h.inc"
2118
2219 namespace llvm {
20 class Type;
21 class TargetInstrInfo;
2322
2423 struct X86RegisterInfo : public X86GenRegisterInfo {
25 X86RegisterInfo();
24 const TargetInstrInfo &TII;
25 X86RegisterInfo(const TargetInstrInfo &tii);
2626
2727 /// Code Generation virtual methods...
2828 void storeRegToStackSlot(MachineBasicBlock &MBB,
210210 unsigned OpNo = CGI.getOperandNamed(VarName);
211211 CodeGenInstruction::OperandInfo OpInfo = CGI.OperandList[OpNo];
212212
213 // If this is a two-address instruction and we are not accessing the
214 // 0th operand, remove an operand.
213 // If this is a two-address instruction, verify the second operand isn't
214 // used.
215215 unsigned MIOp = OpInfo.MIOperandNo;
216 if (CGI.isTwoAddress && MIOp != 0) {
217 if (MIOp == 1)
218 throw "Should refer to operand #0 instead of #1 for two-address"
219 " instruction '" + CGI.TheDef->getName() + "'!";
220 --MIOp;
221 }
216 if (CGI.isTwoAddress && MIOp == 1)
217 throw "Should refer to operand #0 instead of #1 for two-address"
218 " instruction '" + CGI.TheDef->getName() + "'!";
222219
223220 if (CurVariant == Variant || CurVariant == ~0U)
224221 Operands.push_back(AsmWriterOperand(OpInfo.PrinterMethodName, MIOp,