v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
assembler-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_X64)
31 
32 #include "macro-assembler.h"
33 #include "serialize.h"
34 
35 namespace v8 {
36 namespace internal {
37 
38 // -----------------------------------------------------------------------------
39 // Implementation of CpuFeatures
40 
41 
42 #ifdef DEBUG
43 bool CpuFeatures::initialized_ = false;
44 #endif
45 uint64_t CpuFeatures::supported_ = CpuFeatures::kDefaultCpuFeatures;
46 uint64_t CpuFeatures::found_by_runtime_probing_ = 0;
47 
48 
49 void CpuFeatures::Probe() {
50  ASSERT(supported_ == CpuFeatures::kDefaultCpuFeatures);
51 #ifdef DEBUG
52  initialized_ = true;
53 #endif
54  supported_ = kDefaultCpuFeatures;
55  if (Serializer::enabled()) {
56  supported_ |= OS::CpuFeaturesImpliedByPlatform();
57  return; // No features if we might serialize.
58  }
59 
60  const int kBufferSize = 4 * KB;
61  VirtualMemory* memory = new VirtualMemory(kBufferSize);
62  if (!memory->IsReserved()) {
63  delete memory;
64  return;
65  }
66  ASSERT(memory->size() >= static_cast<size_t>(kBufferSize));
67  if (!memory->Commit(memory->address(), kBufferSize, true/*executable*/)) {
68  delete memory;
69  return;
70  }
71 
72  Assembler assm(NULL, memory->address(), kBufferSize);
73  Label cpuid, done;
74 #define __ assm.
75  // Save old rsp, since we are going to modify the stack.
76  __ push(rbp);
77  __ pushfq();
78  __ push(rdi);
79  __ push(rcx);
80  __ push(rbx);
81  __ movq(rbp, rsp);
82 
83  // If we can modify bit 21 of the EFLAGS register, then CPUID is supported.
84  __ pushfq();
85  __ pop(rax);
86  __ movq(rdx, rax);
87  __ xor_(rax, Immediate(0x200000)); // Flip bit 21.
88  __ push(rax);
89  __ popfq();
90  __ pushfq();
91  __ pop(rax);
92  __ xor_(rax, rdx); // Different if CPUID is supported.
93  __ j(not_zero, &cpuid);
94 
95  // CPUID not supported. Clear the supported features in rax.
96  __ xor_(rax, rax);
97  __ jmp(&done);
98 
99  // Invoke CPUID with 1 in eax to get feature information in
100  // ecx:edx. Temporarily enable CPUID support because we know it's
101  // safe here.
102  __ bind(&cpuid);
103  __ movl(rax, Immediate(1));
104  supported_ = kDefaultCpuFeatures | (1 << CPUID);
105  { Scope fscope(CPUID);
106  __ cpuid();
107  // Move the result from ecx:edx to rdi.
108  __ movl(rdi, rdx); // Zero-extended to 64 bits.
109  __ shl(rcx, Immediate(32));
110  __ or_(rdi, rcx);
111 
112  // Get the sahf supported flag, from CPUID(0x80000001)
113  __ movq(rax, 0x80000001, RelocInfo::NONE);
114  __ cpuid();
115  }
116  supported_ = kDefaultCpuFeatures;
117 
118  // Put the CPU flags in rax.
119  // rax = (rcx & 1) | (rdi & ~1) | (1 << CPUID).
120  __ movl(rax, Immediate(1));
121  __ and_(rcx, rax); // Bit 0 is set if SAHF instruction supported.
122  __ not_(rax);
123  __ and_(rax, rdi);
124  __ or_(rax, rcx);
125  __ or_(rax, Immediate(1 << CPUID));
126 
127  // Done.
128  __ bind(&done);
129  __ movq(rsp, rbp);
130  __ pop(rbx);
131  __ pop(rcx);
132  __ pop(rdi);
133  __ popfq();
134  __ pop(rbp);
135  __ ret(0);
136 #undef __
137 
138  typedef uint64_t (*F0)();
139  F0 probe = FUNCTION_CAST<F0>(reinterpret_cast<Address>(memory->address()));
140  supported_ = probe();
141  found_by_runtime_probing_ = supported_;
142  found_by_runtime_probing_ &= ~kDefaultCpuFeatures;
143  uint64_t os_guarantees = OS::CpuFeaturesImpliedByPlatform();
144  supported_ |= os_guarantees;
145  found_by_runtime_probing_ &= ~os_guarantees;
146  // SSE2 and CMOV must be available on an X64 CPU.
150 
151  delete memory;
152 }
153 
154 
155 // -----------------------------------------------------------------------------
156 // Implementation of RelocInfo
157 
158 // Patch the code at the current PC with a call to the target address.
159 // Additional guard int3 instructions can be added if required.
160 void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
161  // Load register with immediate 64 and call through a register instructions
162  // takes up 13 bytes and int3 takes up one byte.
163  static const int kCallCodeSize = 13;
164  int code_size = kCallCodeSize + guard_bytes;
165 
166  // Create a code patcher.
167  CodePatcher patcher(pc_, code_size);
168 
169  // Add a label for checking the size of the code used for returning.
170 #ifdef DEBUG
171  Label check_codesize;
172  patcher.masm()->bind(&check_codesize);
173 #endif
174 
175  // Patch the code.
176  patcher.masm()->movq(r10, target, RelocInfo::NONE);
177  patcher.masm()->call(r10);
178 
179  // Check that the size of the code generated is as expected.
180  ASSERT_EQ(kCallCodeSize,
181  patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize));
182 
183  // Add the requested number of int3 instructions after the call.
184  for (int i = 0; i < guard_bytes; i++) {
185  patcher.masm()->int3();
186  }
187 }
188 
189 
190 void RelocInfo::PatchCode(byte* instructions, int instruction_count) {
191  // Patch the code at the current address with the supplied instructions.
192  for (int i = 0; i < instruction_count; i++) {
193  *(pc_ + i) = *(instructions + i);
194  }
195 
196  // Indicate that code has changed.
197  CPU::FlushICache(pc_, instruction_count);
198 }
199 
200 
201 // -----------------------------------------------------------------------------
202 // Register constants.
203 
204 const int Register::kRegisterCodeByAllocationIndex[kNumAllocatableRegisters] = {
205  // rax, rbx, rdx, rcx, rdi, r8, r9, r11, r14, r15
206  0, 3, 2, 1, 7, 8, 9, 11, 14, 15
207 };
208 
209 const int Register::kAllocationIndexByRegisterCode[kNumRegisters] = {
210  0, 3, 2, 1, -1, -1, -1, 4, 5, 6, -1, 7, -1, -1, 8, 9
211 };
212 
213 
214 // -----------------------------------------------------------------------------
215 // Implementation of Operand
216 
217 Operand::Operand(Register base, int32_t disp) : rex_(0) {
218  len_ = 1;
219  if (base.is(rsp) || base.is(r12)) {
220  // SIB byte is needed to encode (rsp + offset) or (r12 + offset).
221  set_sib(times_1, rsp, base);
222  }
223 
224  if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
225  set_modrm(0, base);
226  } else if (is_int8(disp)) {
227  set_modrm(1, base);
228  set_disp8(disp);
229  } else {
230  set_modrm(2, base);
231  set_disp32(disp);
232  }
233 }
234 
235 
236 Operand::Operand(Register base,
237  Register index,
238  ScaleFactor scale,
239  int32_t disp) : rex_(0) {
240  ASSERT(!index.is(rsp));
241  len_ = 1;
242  set_sib(scale, index, base);
243  if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
244  // This call to set_modrm doesn't overwrite the REX.B (or REX.X) bits
245  // possibly set by set_sib.
246  set_modrm(0, rsp);
247  } else if (is_int8(disp)) {
248  set_modrm(1, rsp);
249  set_disp8(disp);
250  } else {
251  set_modrm(2, rsp);
252  set_disp32(disp);
253  }
254 }
255 
256 
257 Operand::Operand(Register index,
258  ScaleFactor scale,
259  int32_t disp) : rex_(0) {
260  ASSERT(!index.is(rsp));
261  len_ = 1;
262  set_modrm(0, rsp);
263  set_sib(scale, index, rbp);
264  set_disp32(disp);
265 }
266 
267 
268 Operand::Operand(const Operand& operand, int32_t offset) {
269  ASSERT(operand.len_ >= 1);
270  // Operand encodes REX ModR/M [SIB] [Disp].
271  byte modrm = operand.buf_[0];
272  ASSERT(modrm < 0xC0); // Disallow mode 3 (register target).
273  bool has_sib = ((modrm & 0x07) == 0x04);
274  byte mode = modrm & 0xC0;
275  int disp_offset = has_sib ? 2 : 1;
276  int base_reg = (has_sib ? operand.buf_[1] : modrm) & 0x07;
277  // Mode 0 with rbp/r13 as ModR/M or SIB base register always has a 32-bit
278  // displacement.
279  bool is_baseless = (mode == 0) && (base_reg == 0x05); // No base or RIP base.
280  int32_t disp_value = 0;
281  if (mode == 0x80 || is_baseless) {
282  // Mode 2 or mode 0 with rbp/r13 as base: Word displacement.
283  disp_value = *BitCast<const int32_t*>(&operand.buf_[disp_offset]);
284  } else if (mode == 0x40) {
285  // Mode 1: Byte displacement.
286  disp_value = static_cast<signed char>(operand.buf_[disp_offset]);
287  }
288 
289  // Write new operand with same registers, but with modified displacement.
290  ASSERT(offset >= 0 ? disp_value + offset > disp_value
291  : disp_value + offset < disp_value); // No overflow.
292  disp_value += offset;
293  rex_ = operand.rex_;
294  if (!is_int8(disp_value) || is_baseless) {
295  // Need 32 bits of displacement, mode 2 or mode 1 with register rbp/r13.
296  buf_[0] = (modrm & 0x3f) | (is_baseless ? 0x00 : 0x80);
297  len_ = disp_offset + 4;
298  Memory::int32_at(&buf_[disp_offset]) = disp_value;
299  } else if (disp_value != 0 || (base_reg == 0x05)) {
300  // Need 8 bits of displacement.
301  buf_[0] = (modrm & 0x3f) | 0x40; // Mode 1.
302  len_ = disp_offset + 1;
303  buf_[disp_offset] = static_cast<byte>(disp_value);
304  } else {
305  // Need no displacement.
306  buf_[0] = (modrm & 0x3f); // Mode 0.
307  len_ = disp_offset;
308  }
309  if (has_sib) {
310  buf_[1] = operand.buf_[1];
311  }
312 }
313 
314 
315 bool Operand::AddressUsesRegister(Register reg) const {
316  int code = reg.code();
317  ASSERT((buf_[0] & 0xC0) != 0xC0); // Always a memory operand.
318  // Start with only low three bits of base register. Initial decoding doesn't
319  // distinguish on the REX.B bit.
320  int base_code = buf_[0] & 0x07;
321  if (base_code == rsp.code()) {
322  // SIB byte present in buf_[1].
323  // Check the index register from the SIB byte + REX.X prefix.
324  int index_code = ((buf_[1] >> 3) & 0x07) | ((rex_ & 0x02) << 2);
325  // Index code (including REX.X) of 0x04 (rsp) means no index register.
326  if (index_code != rsp.code() && index_code == code) return true;
327  // Add REX.B to get the full base register code.
328  base_code = (buf_[1] & 0x07) | ((rex_ & 0x01) << 3);
329  // A base register of 0x05 (rbp) with mod = 0 means no base register.
330  if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
331  return code == base_code;
332  } else {
333  // A base register with low bits of 0x05 (rbp or r13) and mod = 0 means
334  // no base register.
335  if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
336  base_code |= ((rex_ & 0x01) << 3);
337  return code == base_code;
338  }
339 }
340 
341 
342 // -----------------------------------------------------------------------------
343 // Implementation of Assembler.
344 
345 #ifdef GENERATED_CODE_COVERAGE
346 static void InitCoverageLog();
347 #endif
348 
349 Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
350  : AssemblerBase(arg_isolate),
351  code_targets_(100),
352  positions_recorder_(this),
353  emit_debug_code_(FLAG_debug_code),
354  predictable_code_size_(false) {
355  if (buffer == NULL) {
356  // Do our own buffer management.
357  if (buffer_size <= kMinimalBufferSize) {
358  buffer_size = kMinimalBufferSize;
359 
360  if (isolate() != NULL && isolate()->assembler_spare_buffer() != NULL) {
361  buffer = isolate()->assembler_spare_buffer();
362  isolate()->set_assembler_spare_buffer(NULL);
363  }
364  }
365  if (buffer == NULL) {
366  buffer_ = NewArray<byte>(buffer_size);
367  } else {
368  buffer_ = static_cast<byte*>(buffer);
369  }
370  buffer_size_ = buffer_size;
371  own_buffer_ = true;
372  } else {
373  // Use externally provided buffer instead.
374  ASSERT(buffer_size > 0);
375  buffer_ = static_cast<byte*>(buffer);
376  buffer_size_ = buffer_size;
377  own_buffer_ = false;
378  }
379 
380  // Clear the buffer in debug mode unless it was provided by the
381  // caller in which case we can't be sure it's okay to overwrite
382  // existing code in it.
383 #ifdef DEBUG
384  if (own_buffer_) {
385  memset(buffer_, 0xCC, buffer_size); // int3
386  }
387 #endif
388 
389  // Set up buffer pointers.
390  ASSERT(buffer_ != NULL);
391  pc_ = buffer_;
392  reloc_info_writer.Reposition(buffer_ + buffer_size, pc_);
393 
394 
395 #ifdef GENERATED_CODE_COVERAGE
396  InitCoverageLog();
397 #endif
398 }
399 
400 
402  if (own_buffer_) {
403  if (isolate() != NULL &&
404  isolate()->assembler_spare_buffer() == NULL &&
405  buffer_size_ == kMinimalBufferSize) {
406  isolate()->set_assembler_spare_buffer(buffer_);
407  } else {
408  DeleteArray(buffer_);
409  }
410  }
411 }
412 
413 
414 void Assembler::GetCode(CodeDesc* desc) {
415  // Finalize code (at this point overflow() may be true, but the gap ensures
416  // that we are still not overlapping instructions and relocation info).
417  ASSERT(pc_ <= reloc_info_writer.pos()); // No overlap.
418  // Set up code descriptor.
419  desc->buffer = buffer_;
420  desc->buffer_size = buffer_size_;
421  desc->instr_size = pc_offset();
422  ASSERT(desc->instr_size > 0); // Zero-size code objects upset the system.
423  desc->reloc_size =
424  static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos());
425  desc->origin = this;
426 }
427 
428 
429 void Assembler::Align(int m) {
430  ASSERT(IsPowerOf2(m));
431  int delta = (m - (pc_offset() & (m - 1))) & (m - 1);
432  Nop(delta);
433 }
434 
435 
437  Align(16); // Preferred alignment of jump targets on x64.
438 }
439 
440 
441 bool Assembler::IsNop(Address addr) {
442  Address a = addr;
443  while (*a == 0x66) a++;
444  if (*a == 0x90) return true;
445  if (a[0] == 0xf && a[1] == 0x1f) return true;
446  return false;
447 }
448 
449 
450 void Assembler::bind_to(Label* L, int pos) {
451  ASSERT(!L->is_bound()); // Label may only be bound once.
452  ASSERT(0 <= pos && pos <= pc_offset()); // Position must be valid.
453  if (L->is_linked()) {
454  int current = L->pos();
455  int next = long_at(current);
456  while (next != current) {
457  // Relative address, relative to point after address.
458  int imm32 = pos - (current + sizeof(int32_t));
459  long_at_put(current, imm32);
460  current = next;
461  next = long_at(next);
462  }
463  // Fix up last fixup on linked list.
464  int last_imm32 = pos - (current + sizeof(int32_t));
465  long_at_put(current, last_imm32);
466  }
467  while (L->is_near_linked()) {
468  int fixup_pos = L->near_link_pos();
469  int offset_to_next =
470  static_cast<int>(*reinterpret_cast<int8_t*>(addr_at(fixup_pos)));
471  ASSERT(offset_to_next <= 0);
472  int disp = pos - (fixup_pos + sizeof(int8_t));
473  CHECK(is_int8(disp));
474  set_byte_at(fixup_pos, disp);
475  if (offset_to_next < 0) {
476  L->link_to(fixup_pos + offset_to_next, Label::kNear);
477  } else {
478  L->UnuseNear();
479  }
480  }
481  L->bind_to(pos);
482 }
483 
484 
485 void Assembler::bind(Label* L) {
486  bind_to(L, pc_offset());
487 }
488 
489 
490 void Assembler::GrowBuffer() {
492  if (!own_buffer_) FATAL("external code buffer is too small");
493 
494  // Compute new buffer size.
495  CodeDesc desc; // the new buffer
496  if (buffer_size_ < 4*KB) {
497  desc.buffer_size = 4*KB;
498  } else {
499  desc.buffer_size = 2*buffer_size_;
500  }
501  // Some internal data structures overflow for very large buffers,
502  // they must ensure that kMaximalBufferSize is not too large.
503  if ((desc.buffer_size > kMaximalBufferSize) ||
504  (desc.buffer_size > HEAP->MaxOldGenerationSize())) {
505  V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
506  }
507 
508  // Set up new buffer.
509  desc.buffer = NewArray<byte>(desc.buffer_size);
510  desc.instr_size = pc_offset();
511  desc.reloc_size =
512  static_cast<int>((buffer_ + buffer_size_) - (reloc_info_writer.pos()));
513 
514  // Clear the buffer in debug mode. Use 'int3' instructions to make
515  // sure to get into problems if we ever run uninitialized code.
516 #ifdef DEBUG
517  memset(desc.buffer, 0xCC, desc.buffer_size);
518 #endif
519 
520  // Copy the data.
521  intptr_t pc_delta = desc.buffer - buffer_;
522  intptr_t rc_delta = (desc.buffer + desc.buffer_size) -
523  (buffer_ + buffer_size_);
524  memmove(desc.buffer, buffer_, desc.instr_size);
525  memmove(rc_delta + reloc_info_writer.pos(),
526  reloc_info_writer.pos(), desc.reloc_size);
527 
528  // Switch buffers.
529  if (isolate() != NULL &&
530  isolate()->assembler_spare_buffer() == NULL &&
531  buffer_size_ == kMinimalBufferSize) {
532  isolate()->set_assembler_spare_buffer(buffer_);
533  } else {
534  DeleteArray(buffer_);
535  }
536  buffer_ = desc.buffer;
537  buffer_size_ = desc.buffer_size;
538  pc_ += pc_delta;
539  reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
540  reloc_info_writer.last_pc() + pc_delta);
541 
542  // Relocate runtime entries.
543  for (RelocIterator it(desc); !it.done(); it.next()) {
544  RelocInfo::Mode rmode = it.rinfo()->rmode();
545  if (rmode == RelocInfo::INTERNAL_REFERENCE) {
546  intptr_t* p = reinterpret_cast<intptr_t*>(it.rinfo()->pc());
547  if (*p != 0) { // 0 means uninitialized.
548  *p += pc_delta;
549  }
550  }
551  }
552 
554 }
555 
556 
557 void Assembler::emit_operand(int code, const Operand& adr) {
558  ASSERT(is_uint3(code));
559  const unsigned length = adr.len_;
560  ASSERT(length > 0);
561 
562  // Emit updated ModR/M byte containing the given register.
563  ASSERT((adr.buf_[0] & 0x38) == 0);
564  pc_[0] = adr.buf_[0] | code << 3;
565 
566  // Emit the rest of the encoded operand.
567  for (unsigned i = 1; i < length; i++) pc_[i] = adr.buf_[i];
568  pc_ += length;
569 }
570 
571 
572 // Assembler Instruction implementations.
573 
574 void Assembler::arithmetic_op(byte opcode, Register reg, const Operand& op) {
575  EnsureSpace ensure_space(this);
576  emit_rex_64(reg, op);
577  emit(opcode);
578  emit_operand(reg, op);
579 }
580 
581 
582 void Assembler::arithmetic_op(byte opcode, Register reg, Register rm_reg) {
583  EnsureSpace ensure_space(this);
584  ASSERT((opcode & 0xC6) == 2);
585  if (rm_reg.low_bits() == 4) { // Forces SIB byte.
586  // Swap reg and rm_reg and change opcode operand order.
587  emit_rex_64(rm_reg, reg);
588  emit(opcode ^ 0x02);
589  emit_modrm(rm_reg, reg);
590  } else {
591  emit_rex_64(reg, rm_reg);
592  emit(opcode);
593  emit_modrm(reg, rm_reg);
594  }
595 }
596 
597 
598 void Assembler::arithmetic_op_16(byte opcode, Register reg, Register rm_reg) {
599  EnsureSpace ensure_space(this);
600  ASSERT((opcode & 0xC6) == 2);
601  if (rm_reg.low_bits() == 4) { // Forces SIB byte.
602  // Swap reg and rm_reg and change opcode operand order.
603  emit(0x66);
604  emit_optional_rex_32(rm_reg, reg);
605  emit(opcode ^ 0x02);
606  emit_modrm(rm_reg, reg);
607  } else {
608  emit(0x66);
609  emit_optional_rex_32(reg, rm_reg);
610  emit(opcode);
611  emit_modrm(reg, rm_reg);
612  }
613 }
614 
615 
616 void Assembler::arithmetic_op_16(byte opcode,
617  Register reg,
618  const Operand& rm_reg) {
619  EnsureSpace ensure_space(this);
620  emit(0x66);
621  emit_optional_rex_32(reg, rm_reg);
622  emit(opcode);
623  emit_operand(reg, rm_reg);
624 }
625 
626 
627 void Assembler::arithmetic_op_32(byte opcode, Register reg, Register rm_reg) {
628  EnsureSpace ensure_space(this);
629  ASSERT((opcode & 0xC6) == 2);
630  if (rm_reg.low_bits() == 4) { // Forces SIB byte.
631  // Swap reg and rm_reg and change opcode operand order.
632  emit_optional_rex_32(rm_reg, reg);
633  emit(opcode ^ 0x02); // E.g. 0x03 -> 0x01 for ADD.
634  emit_modrm(rm_reg, reg);
635  } else {
636  emit_optional_rex_32(reg, rm_reg);
637  emit(opcode);
638  emit_modrm(reg, rm_reg);
639  }
640 }
641 
642 
643 void Assembler::arithmetic_op_32(byte opcode,
644  Register reg,
645  const Operand& rm_reg) {
646  EnsureSpace ensure_space(this);
647  emit_optional_rex_32(reg, rm_reg);
648  emit(opcode);
649  emit_operand(reg, rm_reg);
650 }
651 
652 
653 void Assembler::immediate_arithmetic_op(byte subcode,
654  Register dst,
655  Immediate src) {
656  EnsureSpace ensure_space(this);
657  emit_rex_64(dst);
658  if (is_int8(src.value_)) {
659  emit(0x83);
660  emit_modrm(subcode, dst);
661  emit(src.value_);
662  } else if (dst.is(rax)) {
663  emit(0x05 | (subcode << 3));
664  emitl(src.value_);
665  } else {
666  emit(0x81);
667  emit_modrm(subcode, dst);
668  emitl(src.value_);
669  }
670 }
671 
672 void Assembler::immediate_arithmetic_op(byte subcode,
673  const Operand& dst,
674  Immediate src) {
675  EnsureSpace ensure_space(this);
676  emit_rex_64(dst);
677  if (is_int8(src.value_)) {
678  emit(0x83);
679  emit_operand(subcode, dst);
680  emit(src.value_);
681  } else {
682  emit(0x81);
683  emit_operand(subcode, dst);
684  emitl(src.value_);
685  }
686 }
687 
688 
689 void Assembler::immediate_arithmetic_op_16(byte subcode,
690  Register dst,
691  Immediate src) {
692  EnsureSpace ensure_space(this);
693  emit(0x66); // Operand size override prefix.
694  emit_optional_rex_32(dst);
695  if (is_int8(src.value_)) {
696  emit(0x83);
697  emit_modrm(subcode, dst);
698  emit(src.value_);
699  } else if (dst.is(rax)) {
700  emit(0x05 | (subcode << 3));
701  emitw(src.value_);
702  } else {
703  emit(0x81);
704  emit_modrm(subcode, dst);
705  emitw(src.value_);
706  }
707 }
708 
709 
710 void Assembler::immediate_arithmetic_op_16(byte subcode,
711  const Operand& dst,
712  Immediate src) {
713  EnsureSpace ensure_space(this);
714  emit(0x66); // Operand size override prefix.
715  emit_optional_rex_32(dst);
716  if (is_int8(src.value_)) {
717  emit(0x83);
718  emit_operand(subcode, dst);
719  emit(src.value_);
720  } else {
721  emit(0x81);
722  emit_operand(subcode, dst);
723  emitw(src.value_);
724  }
725 }
726 
727 
728 void Assembler::immediate_arithmetic_op_32(byte subcode,
729  Register dst,
730  Immediate src) {
731  EnsureSpace ensure_space(this);
732  emit_optional_rex_32(dst);
733  if (is_int8(src.value_)) {
734  emit(0x83);
735  emit_modrm(subcode, dst);
736  emit(src.value_);
737  } else if (dst.is(rax)) {
738  emit(0x05 | (subcode << 3));
739  emitl(src.value_);
740  } else {
741  emit(0x81);
742  emit_modrm(subcode, dst);
743  emitl(src.value_);
744  }
745 }
746 
747 
748 void Assembler::immediate_arithmetic_op_32(byte subcode,
749  const Operand& dst,
750  Immediate src) {
751  EnsureSpace ensure_space(this);
752  emit_optional_rex_32(dst);
753  if (is_int8(src.value_)) {
754  emit(0x83);
755  emit_operand(subcode, dst);
756  emit(src.value_);
757  } else {
758  emit(0x81);
759  emit_operand(subcode, dst);
760  emitl(src.value_);
761  }
762 }
763 
764 
765 void Assembler::immediate_arithmetic_op_8(byte subcode,
766  const Operand& dst,
767  Immediate src) {
768  EnsureSpace ensure_space(this);
769  emit_optional_rex_32(dst);
770  ASSERT(is_int8(src.value_) || is_uint8(src.value_));
771  emit(0x80);
772  emit_operand(subcode, dst);
773  emit(src.value_);
774 }
775 
776 
777 void Assembler::immediate_arithmetic_op_8(byte subcode,
778  Register dst,
779  Immediate src) {
780  EnsureSpace ensure_space(this);
781  if (!dst.is_byte_register()) {
782  // Use 64-bit mode byte registers.
783  emit_rex_64(dst);
784  }
785  ASSERT(is_int8(src.value_) || is_uint8(src.value_));
786  emit(0x80);
787  emit_modrm(subcode, dst);
788  emit(src.value_);
789 }
790 
791 
792 void Assembler::shift(Register dst, Immediate shift_amount, int subcode) {
793  EnsureSpace ensure_space(this);
794  ASSERT(is_uint6(shift_amount.value_)); // illegal shift count
795  if (shift_amount.value_ == 1) {
796  emit_rex_64(dst);
797  emit(0xD1);
798  emit_modrm(subcode, dst);
799  } else {
800  emit_rex_64(dst);
801  emit(0xC1);
802  emit_modrm(subcode, dst);
803  emit(shift_amount.value_);
804  }
805 }
806 
807 
808 void Assembler::shift(Register dst, int subcode) {
809  EnsureSpace ensure_space(this);
810  emit_rex_64(dst);
811  emit(0xD3);
812  emit_modrm(subcode, dst);
813 }
814 
815 
816 void Assembler::shift_32(Register dst, int subcode) {
817  EnsureSpace ensure_space(this);
818  emit_optional_rex_32(dst);
819  emit(0xD3);
820  emit_modrm(subcode, dst);
821 }
822 
823 
824 void Assembler::shift_32(Register dst, Immediate shift_amount, int subcode) {
825  EnsureSpace ensure_space(this);
826  ASSERT(is_uint5(shift_amount.value_)); // illegal shift count
827  if (shift_amount.value_ == 1) {
828  emit_optional_rex_32(dst);
829  emit(0xD1);
830  emit_modrm(subcode, dst);
831  } else {
832  emit_optional_rex_32(dst);
833  emit(0xC1);
834  emit_modrm(subcode, dst);
835  emit(shift_amount.value_);
836  }
837 }
838 
839 
840 void Assembler::bt(const Operand& dst, Register src) {
841  EnsureSpace ensure_space(this);
842  emit_rex_64(src, dst);
843  emit(0x0F);
844  emit(0xA3);
845  emit_operand(src, dst);
846 }
847 
848 
849 void Assembler::bts(const Operand& dst, Register src) {
850  EnsureSpace ensure_space(this);
851  emit_rex_64(src, dst);
852  emit(0x0F);
853  emit(0xAB);
854  emit_operand(src, dst);
855 }
856 
857 
858 void Assembler::call(Label* L) {
859  positions_recorder()->WriteRecordedPositions();
860  EnsureSpace ensure_space(this);
861  // 1110 1000 #32-bit disp.
862  emit(0xE8);
863  if (L->is_bound()) {
864  int offset = L->pos() - pc_offset() - sizeof(int32_t);
865  ASSERT(offset <= 0);
866  emitl(offset);
867  } else if (L->is_linked()) {
868  emitl(L->pos());
869  L->link_to(pc_offset() - sizeof(int32_t));
870  } else {
871  ASSERT(L->is_unused());
872  int32_t current = pc_offset();
873  emitl(current);
874  L->link_to(current);
875  }
876 }
877 
878 
879 void Assembler::call(Handle<Code> target,
880  RelocInfo::Mode rmode,
881  TypeFeedbackId ast_id) {
882  positions_recorder()->WriteRecordedPositions();
883  EnsureSpace ensure_space(this);
884  // 1110 1000 #32-bit disp.
885  emit(0xE8);
886  emit_code_target(target, rmode, ast_id);
887 }
888 
889 
890 void Assembler::call(Register adr) {
891  positions_recorder()->WriteRecordedPositions();
892  EnsureSpace ensure_space(this);
893  // Opcode: FF /2 r64.
894  emit_optional_rex_32(adr);
895  emit(0xFF);
896  emit_modrm(0x2, adr);
897 }
898 
899 
900 void Assembler::call(const Operand& op) {
901  positions_recorder()->WriteRecordedPositions();
902  EnsureSpace ensure_space(this);
903  // Opcode: FF /2 m64.
904  emit_optional_rex_32(op);
905  emit(0xFF);
906  emit_operand(0x2, op);
907 }
908 
909 
910 // Calls directly to the given address using a relative offset.
911 // Should only ever be used in Code objects for calls within the
912 // same Code object. Should not be used when generating new code (use labels),
913 // but only when patching existing code.
914 void Assembler::call(Address target) {
915  positions_recorder()->WriteRecordedPositions();
916  EnsureSpace ensure_space(this);
917  // 1110 1000 #32-bit disp.
918  emit(0xE8);
919  Address source = pc_ + 4;
920  intptr_t displacement = target - source;
921  ASSERT(is_int32(displacement));
922  emitl(static_cast<int32_t>(displacement));
923 }
924 
925 
926 void Assembler::clc() {
927  EnsureSpace ensure_space(this);
928  emit(0xF8);
929 }
930 
931 void Assembler::cld() {
932  EnsureSpace ensure_space(this);
933  emit(0xFC);
934 }
935 
936 void Assembler::cdq() {
937  EnsureSpace ensure_space(this);
938  emit(0x99);
939 }
940 
941 
942 void Assembler::cmovq(Condition cc, Register dst, Register src) {
943  if (cc == always) {
944  movq(dst, src);
945  } else if (cc == never) {
946  return;
947  }
948  // No need to check CpuInfo for CMOV support, it's a required part of the
949  // 64-bit architecture.
950  ASSERT(cc >= 0); // Use mov for unconditional moves.
951  EnsureSpace ensure_space(this);
952  // Opcode: REX.W 0f 40 + cc /r.
953  emit_rex_64(dst, src);
954  emit(0x0f);
955  emit(0x40 + cc);
956  emit_modrm(dst, src);
957 }
958 
959 
960 void Assembler::cmovq(Condition cc, Register dst, const Operand& src) {
961  if (cc == always) {
962  movq(dst, src);
963  } else if (cc == never) {
964  return;
965  }
966  ASSERT(cc >= 0);
967  EnsureSpace ensure_space(this);
968  // Opcode: REX.W 0f 40 + cc /r.
969  emit_rex_64(dst, src);
970  emit(0x0f);
971  emit(0x40 + cc);
972  emit_operand(dst, src);
973 }
974 
975 
976 void Assembler::cmovl(Condition cc, Register dst, Register src) {
977  if (cc == always) {
978  movl(dst, src);
979  } else if (cc == never) {
980  return;
981  }
982  ASSERT(cc >= 0);
983  EnsureSpace ensure_space(this);
984  // Opcode: 0f 40 + cc /r.
985  emit_optional_rex_32(dst, src);
986  emit(0x0f);
987  emit(0x40 + cc);
988  emit_modrm(dst, src);
989 }
990 
991 
992 void Assembler::cmovl(Condition cc, Register dst, const Operand& src) {
993  if (cc == always) {
994  movl(dst, src);
995  } else if (cc == never) {
996  return;
997  }
998  ASSERT(cc >= 0);
999  EnsureSpace ensure_space(this);
1000  // Opcode: 0f 40 + cc /r.
1001  emit_optional_rex_32(dst, src);
1002  emit(0x0f);
1003  emit(0x40 + cc);
1004  emit_operand(dst, src);
1005 }
1006 
1007 
1008 void Assembler::cmpb_al(Immediate imm8) {
1009  ASSERT(is_int8(imm8.value_) || is_uint8(imm8.value_));
1010  EnsureSpace ensure_space(this);
1011  emit(0x3c);
1012  emit(imm8.value_);
1013 }
1014 
1015 
1016 void Assembler::cpuid() {
1017  ASSERT(CpuFeatures::IsEnabled(CPUID));
1018  EnsureSpace ensure_space(this);
1019  emit(0x0F);
1020  emit(0xA2);
1021 }
1022 
1023 
1024 void Assembler::cqo() {
1025  EnsureSpace ensure_space(this);
1026  emit_rex_64();
1027  emit(0x99);
1028 }
1029 
1030 
1031 void Assembler::decq(Register dst) {
1032  EnsureSpace ensure_space(this);
1033  emit_rex_64(dst);
1034  emit(0xFF);
1035  emit_modrm(0x1, dst);
1036 }
1037 
1038 
1039 void Assembler::decq(const Operand& dst) {
1040  EnsureSpace ensure_space(this);
1041  emit_rex_64(dst);
1042  emit(0xFF);
1043  emit_operand(1, dst);
1044 }
1045 
1046 
1047 void Assembler::decl(Register dst) {
1048  EnsureSpace ensure_space(this);
1049  emit_optional_rex_32(dst);
1050  emit(0xFF);
1051  emit_modrm(0x1, dst);
1052 }
1053 
1054 
1055 void Assembler::decl(const Operand& dst) {
1056  EnsureSpace ensure_space(this);
1057  emit_optional_rex_32(dst);
1058  emit(0xFF);
1059  emit_operand(1, dst);
1060 }
1061 
1062 
1063 void Assembler::decb(Register dst) {
1064  EnsureSpace ensure_space(this);
1065  if (!dst.is_byte_register()) {
1066  // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1067  emit_rex_32(dst);
1068  }
1069  emit(0xFE);
1070  emit_modrm(0x1, dst);
1071 }
1072 
1073 
1074 void Assembler::decb(const Operand& dst) {
1075  EnsureSpace ensure_space(this);
1076  emit_optional_rex_32(dst);
1077  emit(0xFE);
1078  emit_operand(1, dst);
1079 }
1080 
1081 
1082 void Assembler::enter(Immediate size) {
1083  EnsureSpace ensure_space(this);
1084  emit(0xC8);
1085  emitw(size.value_); // 16 bit operand, always.
1086  emit(0);
1087 }
1088 
1089 
1090 void Assembler::hlt() {
1091  EnsureSpace ensure_space(this);
1092  emit(0xF4);
1093 }
1094 
1095 
1096 void Assembler::idivq(Register src) {
1097  EnsureSpace ensure_space(this);
1098  emit_rex_64(src);
1099  emit(0xF7);
1100  emit_modrm(0x7, src);
1101 }
1102 
1103 
1104 void Assembler::idivl(Register src) {
1105  EnsureSpace ensure_space(this);
1106  emit_optional_rex_32(src);
1107  emit(0xF7);
1108  emit_modrm(0x7, src);
1109 }
1110 
1111 
1112 void Assembler::imul(Register src) {
1113  EnsureSpace ensure_space(this);
1114  emit_rex_64(src);
1115  emit(0xF7);
1116  emit_modrm(0x5, src);
1117 }
1118 
1119 
1120 void Assembler::imul(Register dst, Register src) {
1121  EnsureSpace ensure_space(this);
1122  emit_rex_64(dst, src);
1123  emit(0x0F);
1124  emit(0xAF);
1125  emit_modrm(dst, src);
1126 }
1127 
1128 
1129 void Assembler::imul(Register dst, const Operand& src) {
1130  EnsureSpace ensure_space(this);
1131  emit_rex_64(dst, src);
1132  emit(0x0F);
1133  emit(0xAF);
1134  emit_operand(dst, src);
1135 }
1136 
1137 
1138 void Assembler::imul(Register dst, Register src, Immediate imm) {
1139  EnsureSpace ensure_space(this);
1140  emit_rex_64(dst, src);
1141  if (is_int8(imm.value_)) {
1142  emit(0x6B);
1143  emit_modrm(dst, src);
1144  emit(imm.value_);
1145  } else {
1146  emit(0x69);
1147  emit_modrm(dst, src);
1148  emitl(imm.value_);
1149  }
1150 }
1151 
1152 
1153 void Assembler::imull(Register dst, Register src) {
1154  EnsureSpace ensure_space(this);
1155  emit_optional_rex_32(dst, src);
1156  emit(0x0F);
1157  emit(0xAF);
1158  emit_modrm(dst, src);
1159 }
1160 
1161 
1162 void Assembler::imull(Register dst, const Operand& src) {
1163  EnsureSpace ensure_space(this);
1164  emit_optional_rex_32(dst, src);
1165  emit(0x0F);
1166  emit(0xAF);
1167  emit_operand(dst, src);
1168 }
1169 
1170 
1171 void Assembler::imull(Register dst, Register src, Immediate imm) {
1172  EnsureSpace ensure_space(this);
1173  emit_optional_rex_32(dst, src);
1174  if (is_int8(imm.value_)) {
1175  emit(0x6B);
1176  emit_modrm(dst, src);
1177  emit(imm.value_);
1178  } else {
1179  emit(0x69);
1180  emit_modrm(dst, src);
1181  emitl(imm.value_);
1182  }
1183 }
1184 
1185 
1186 void Assembler::incq(Register dst) {
1187  EnsureSpace ensure_space(this);
1188  emit_rex_64(dst);
1189  emit(0xFF);
1190  emit_modrm(0x0, dst);
1191 }
1192 
1193 
1194 void Assembler::incq(const Operand& dst) {
1195  EnsureSpace ensure_space(this);
1196  emit_rex_64(dst);
1197  emit(0xFF);
1198  emit_operand(0, dst);
1199 }
1200 
1201 
1202 void Assembler::incl(const Operand& dst) {
1203  EnsureSpace ensure_space(this);
1204  emit_optional_rex_32(dst);
1205  emit(0xFF);
1206  emit_operand(0, dst);
1207 }
1208 
1209 
1210 void Assembler::incl(Register dst) {
1211  EnsureSpace ensure_space(this);
1212  emit_optional_rex_32(dst);
1213  emit(0xFF);
1214  emit_modrm(0, dst);
1215 }
1216 
1217 
1218 void Assembler::int3() {
1219  EnsureSpace ensure_space(this);
1220  emit(0xCC);
1221 }
1222 
1223 
1224 void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
1225  if (cc == always) {
1226  jmp(L);
1227  return;
1228  } else if (cc == never) {
1229  return;
1230  }
1231  EnsureSpace ensure_space(this);
1232  ASSERT(is_uint4(cc));
1233  if (L->is_bound()) {
1234  const int short_size = 2;
1235  const int long_size = 6;
1236  int offs = L->pos() - pc_offset();
1237  ASSERT(offs <= 0);
1238  // Determine whether we can use 1-byte offsets for backwards branches,
1239  // which have a max range of 128 bytes.
1240 
1241  // We also need to check the predictable_code_size_ flag here, because
1242  // on x64, when the full code generator recompiles code for debugging, some
1243  // places need to be padded out to a certain size. The debugger is keeping
1244  // track of how often it did this so that it can adjust return addresses on
1245  // the stack, but if the size of jump instructions can also change, that's
1246  // not enough and the calculated offsets would be incorrect.
1247  if (is_int8(offs - short_size) && !predictable_code_size_) {
1248  // 0111 tttn #8-bit disp.
1249  emit(0x70 | cc);
1250  emit((offs - short_size) & 0xFF);
1251  } else {
1252  // 0000 1111 1000 tttn #32-bit disp.
1253  emit(0x0F);
1254  emit(0x80 | cc);
1255  emitl(offs - long_size);
1256  }
1257  } else if (distance == Label::kNear) {
1258  // 0111 tttn #8-bit disp
1259  emit(0x70 | cc);
1260  byte disp = 0x00;
1261  if (L->is_near_linked()) {
1262  int offset = L->near_link_pos() - pc_offset();
1263  ASSERT(is_int8(offset));
1264  disp = static_cast<byte>(offset & 0xFF);
1265  }
1266  L->link_to(pc_offset(), Label::kNear);
1267  emit(disp);
1268  } else if (L->is_linked()) {
1269  // 0000 1111 1000 tttn #32-bit disp.
1270  emit(0x0F);
1271  emit(0x80 | cc);
1272  emitl(L->pos());
1273  L->link_to(pc_offset() - sizeof(int32_t));
1274  } else {
1275  ASSERT(L->is_unused());
1276  emit(0x0F);
1277  emit(0x80 | cc);
1278  int32_t current = pc_offset();
1279  emitl(current);
1280  L->link_to(current);
1281  }
1282 }
1283 
1284 
1285 void Assembler::j(Condition cc,
1286  Handle<Code> target,
1287  RelocInfo::Mode rmode) {
1288  EnsureSpace ensure_space(this);
1289  ASSERT(is_uint4(cc));
1290  // 0000 1111 1000 tttn #32-bit disp.
1291  emit(0x0F);
1292  emit(0x80 | cc);
1293  emit_code_target(target, rmode);
1294 }
1295 
1296 
1297 void Assembler::jmp(Label* L, Label::Distance distance) {
1298  EnsureSpace ensure_space(this);
1299  const int short_size = sizeof(int8_t);
1300  const int long_size = sizeof(int32_t);
1301  if (L->is_bound()) {
1302  int offs = L->pos() - pc_offset() - 1;
1303  ASSERT(offs <= 0);
1304  if (is_int8(offs - short_size) && !predictable_code_size_) {
1305  // 1110 1011 #8-bit disp.
1306  emit(0xEB);
1307  emit((offs - short_size) & 0xFF);
1308  } else {
1309  // 1110 1001 #32-bit disp.
1310  emit(0xE9);
1311  emitl(offs - long_size);
1312  }
1313  } else if (distance == Label::kNear) {
1314  emit(0xEB);
1315  byte disp = 0x00;
1316  if (L->is_near_linked()) {
1317  int offset = L->near_link_pos() - pc_offset();
1318  ASSERT(is_int8(offset));
1319  disp = static_cast<byte>(offset & 0xFF);
1320  }
1321  L->link_to(pc_offset(), Label::kNear);
1322  emit(disp);
1323  } else if (L->is_linked()) {
1324  // 1110 1001 #32-bit disp.
1325  emit(0xE9);
1326  emitl(L->pos());
1327  L->link_to(pc_offset() - long_size);
1328  } else {
1329  // 1110 1001 #32-bit disp.
1330  ASSERT(L->is_unused());
1331  emit(0xE9);
1332  int32_t current = pc_offset();
1333  emitl(current);
1334  L->link_to(current);
1335  }
1336 }
1337 
1338 
1339 void Assembler::jmp(Handle<Code> target, RelocInfo::Mode rmode) {
1340  EnsureSpace ensure_space(this);
1341  // 1110 1001 #32-bit disp.
1342  emit(0xE9);
1343  emit_code_target(target, rmode);
1344 }
1345 
1346 
1347 void Assembler::jmp(Register target) {
1348  EnsureSpace ensure_space(this);
1349  // Opcode FF/4 r64.
1350  emit_optional_rex_32(target);
1351  emit(0xFF);
1352  emit_modrm(0x4, target);
1353 }
1354 
1355 
1356 void Assembler::jmp(const Operand& src) {
1357  EnsureSpace ensure_space(this);
1358  // Opcode FF/4 m64.
1359  emit_optional_rex_32(src);
1360  emit(0xFF);
1361  emit_operand(0x4, src);
1362 }
1363 
1364 
1365 void Assembler::lea(Register dst, const Operand& src) {
1366  EnsureSpace ensure_space(this);
1367  emit_rex_64(dst, src);
1368  emit(0x8D);
1369  emit_operand(dst, src);
1370 }
1371 
1372 
1373 void Assembler::leal(Register dst, const Operand& src) {
1374  EnsureSpace ensure_space(this);
1375  emit_optional_rex_32(dst, src);
1376  emit(0x8D);
1377  emit_operand(dst, src);
1378 }
1379 
1380 
1381 void Assembler::load_rax(void* value, RelocInfo::Mode mode) {
1382  EnsureSpace ensure_space(this);
1383  emit(0x48); // REX.W
1384  emit(0xA1);
1385  emitq(reinterpret_cast<uintptr_t>(value), mode);
1386 }
1387 
1388 
1389 void Assembler::load_rax(ExternalReference ref) {
1390  load_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
1391 }
1392 
1393 
1394 void Assembler::leave() {
1395  EnsureSpace ensure_space(this);
1396  emit(0xC9);
1397 }
1398 
1399 
1400 void Assembler::movb(Register dst, const Operand& src) {
1401  EnsureSpace ensure_space(this);
1402  if (!dst.is_byte_register()) {
1403  // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1404  emit_rex_32(dst, src);
1405  } else {
1406  emit_optional_rex_32(dst, src);
1407  }
1408  emit(0x8A);
1409  emit_operand(dst, src);
1410 }
1411 
1412 
1413 void Assembler::movb(Register dst, Immediate imm) {
1414  EnsureSpace ensure_space(this);
1415  if (!dst.is_byte_register()) {
1416  emit_rex_32(dst);
1417  }
1418  emit(0xB0 + dst.low_bits());
1419  emit(imm.value_);
1420 }
1421 
1422 
1423 void Assembler::movb(const Operand& dst, Register src) {
1424  EnsureSpace ensure_space(this);
1425  if (!src.is_byte_register()) {
1426  emit_rex_32(src, dst);
1427  } else {
1428  emit_optional_rex_32(src, dst);
1429  }
1430  emit(0x88);
1431  emit_operand(src, dst);
1432 }
1433 
1434 
1435 void Assembler::movw(const Operand& dst, Register src) {
1436  EnsureSpace ensure_space(this);
1437  emit(0x66);
1438  emit_optional_rex_32(src, dst);
1439  emit(0x89);
1440  emit_operand(src, dst);
1441 }
1442 
1443 
1444 void Assembler::movl(Register dst, const Operand& src) {
1445  EnsureSpace ensure_space(this);
1446  emit_optional_rex_32(dst, src);
1447  emit(0x8B);
1448  emit_operand(dst, src);
1449 }
1450 
1451 
1452 void Assembler::movl(Register dst, Register src) {
1453  EnsureSpace ensure_space(this);
1454  if (src.low_bits() == 4) {
1455  emit_optional_rex_32(src, dst);
1456  emit(0x89);
1457  emit_modrm(src, dst);
1458  } else {
1459  emit_optional_rex_32(dst, src);
1460  emit(0x8B);
1461  emit_modrm(dst, src);
1462  }
1463 }
1464 
1465 
1466 void Assembler::movl(const Operand& dst, Register src) {
1467  EnsureSpace ensure_space(this);
1468  emit_optional_rex_32(src, dst);
1469  emit(0x89);
1470  emit_operand(src, dst);
1471 }
1472 
1473 
1474 void Assembler::movl(const Operand& dst, Immediate value) {
1475  EnsureSpace ensure_space(this);
1476  emit_optional_rex_32(dst);
1477  emit(0xC7);
1478  emit_operand(0x0, dst);
1479  emit(value);
1480 }
1481 
1482 
1483 void Assembler::movl(Register dst, Immediate value) {
1484  EnsureSpace ensure_space(this);
1485  emit_optional_rex_32(dst);
1486  emit(0xB8 + dst.low_bits());
1487  emit(value);
1488 }
1489 
1490 
1491 void Assembler::movq(Register dst, const Operand& src) {
1492  EnsureSpace ensure_space(this);
1493  emit_rex_64(dst, src);
1494  emit(0x8B);
1495  emit_operand(dst, src);
1496 }
1497 
1498 
1499 void Assembler::movq(Register dst, Register src) {
1500  EnsureSpace ensure_space(this);
1501  if (src.low_bits() == 4) {
1502  emit_rex_64(src, dst);
1503  emit(0x89);
1504  emit_modrm(src, dst);
1505  } else {
1506  emit_rex_64(dst, src);
1507  emit(0x8B);
1508  emit_modrm(dst, src);
1509  }
1510 }
1511 
1512 
1513 void Assembler::movq(Register dst, Immediate value) {
1514  EnsureSpace ensure_space(this);
1515  emit_rex_64(dst);
1516  emit(0xC7);
1517  emit_modrm(0x0, dst);
1518  emit(value); // Only 32-bit immediates are possible, not 8-bit immediates.
1519 }
1520 
1521 
1522 void Assembler::movq(const Operand& dst, Register src) {
1523  EnsureSpace ensure_space(this);
1524  emit_rex_64(src, dst);
1525  emit(0x89);
1526  emit_operand(src, dst);
1527 }
1528 
1529 
1530 void Assembler::movq(Register dst, void* value, RelocInfo::Mode rmode) {
1531  // This method must not be used with heap object references. The stored
1532  // address is not GC safe. Use the handle version instead.
1533  ASSERT(rmode > RelocInfo::LAST_GCED_ENUM);
1534  EnsureSpace ensure_space(this);
1535  emit_rex_64(dst);
1536  emit(0xB8 | dst.low_bits());
1537  emitq(reinterpret_cast<uintptr_t>(value), rmode);
1538 }
1539 
1540 
1541 void Assembler::movq(Register dst, int64_t value, RelocInfo::Mode rmode) {
1542  // Non-relocatable values might not need a 64-bit representation.
1543  if (rmode == RelocInfo::NONE) {
1544  // Sadly, there is no zero or sign extending move for 8-bit immediates.
1545  if (is_int32(value)) {
1546  movq(dst, Immediate(static_cast<int32_t>(value)));
1547  return;
1548  } else if (is_uint32(value)) {
1549  movl(dst, Immediate(static_cast<int32_t>(value)));
1550  return;
1551  }
1552  // Value cannot be represented by 32 bits, so do a full 64 bit immediate
1553  // value.
1554  }
1555  EnsureSpace ensure_space(this);
1556  emit_rex_64(dst);
1557  emit(0xB8 | dst.low_bits());
1558  emitq(value, rmode);
1559 }
1560 
1561 
1562 void Assembler::movq(Register dst, ExternalReference ref) {
1563  int64_t value = reinterpret_cast<int64_t>(ref.address());
1564  movq(dst, value, RelocInfo::EXTERNAL_REFERENCE);
1565 }
1566 
1567 
1568 void Assembler::movq(const Operand& dst, Immediate value) {
1569  EnsureSpace ensure_space(this);
1570  emit_rex_64(dst);
1571  emit(0xC7);
1572  emit_operand(0, dst);
1573  emit(value);
1574 }
1575 
1576 
1577 // Loads the ip-relative location of the src label into the target location
1578 // (as a 32-bit offset sign extended to 64-bit).
1579 void Assembler::movl(const Operand& dst, Label* src) {
1580  EnsureSpace ensure_space(this);
1581  emit_optional_rex_32(dst);
1582  emit(0xC7);
1583  emit_operand(0, dst);
1584  if (src->is_bound()) {
1585  int offset = src->pos() - pc_offset() - sizeof(int32_t);
1586  ASSERT(offset <= 0);
1587  emitl(offset);
1588  } else if (src->is_linked()) {
1589  emitl(src->pos());
1590  src->link_to(pc_offset() - sizeof(int32_t));
1591  } else {
1592  ASSERT(src->is_unused());
1593  int32_t current = pc_offset();
1594  emitl(current);
1595  src->link_to(current);
1596  }
1597 }
1598 
1599 
1600 void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) {
1601  // If there is no relocation info, emit the value of the handle efficiently
1602  // (possibly using less that 8 bytes for the value).
1603  if (mode == RelocInfo::NONE) {
1604  // There is no possible reason to store a heap pointer without relocation
1605  // info, so it must be a smi.
1606  ASSERT(value->IsSmi());
1607  movq(dst, reinterpret_cast<int64_t>(*value), RelocInfo::NONE);
1608  } else {
1609  EnsureSpace ensure_space(this);
1610  ASSERT(value->IsHeapObject());
1611  ASSERT(!HEAP->InNewSpace(*value));
1612  emit_rex_64(dst);
1613  emit(0xB8 | dst.low_bits());
1614  emitq(reinterpret_cast<uintptr_t>(value.location()), mode);
1615  }
1616 }
1617 
1618 
1619 void Assembler::movsxbq(Register dst, const Operand& src) {
1620  EnsureSpace ensure_space(this);
1621  emit_rex_64(dst, src);
1622  emit(0x0F);
1623  emit(0xBE);
1624  emit_operand(dst, src);
1625 }
1626 
1627 
1628 void Assembler::movsxwq(Register dst, const Operand& src) {
1629  EnsureSpace ensure_space(this);
1630  emit_rex_64(dst, src);
1631  emit(0x0F);
1632  emit(0xBF);
1633  emit_operand(dst, src);
1634 }
1635 
1636 
1637 void Assembler::movsxlq(Register dst, Register src) {
1638  EnsureSpace ensure_space(this);
1639  emit_rex_64(dst, src);
1640  emit(0x63);
1641  emit_modrm(dst, src);
1642 }
1643 
1644 
1645 void Assembler::movsxlq(Register dst, const Operand& src) {
1646  EnsureSpace ensure_space(this);
1647  emit_rex_64(dst, src);
1648  emit(0x63);
1649  emit_operand(dst, src);
1650 }
1651 
1652 
1653 void Assembler::movzxbq(Register dst, const Operand& src) {
1654  EnsureSpace ensure_space(this);
1655  // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
1656  // there is no need to make this a 64 bit operation.
1657  emit_optional_rex_32(dst, src);
1658  emit(0x0F);
1659  emit(0xB6);
1660  emit_operand(dst, src);
1661 }
1662 
1663 
1664 void Assembler::movzxbl(Register dst, const Operand& src) {
1665  EnsureSpace ensure_space(this);
1666  emit_optional_rex_32(dst, src);
1667  emit(0x0F);
1668  emit(0xB6);
1669  emit_operand(dst, src);
1670 }
1671 
1672 
1673 void Assembler::movzxwq(Register dst, const Operand& src) {
1674  EnsureSpace ensure_space(this);
1675  emit_optional_rex_32(dst, src);
1676  emit(0x0F);
1677  emit(0xB7);
1678  emit_operand(dst, src);
1679 }
1680 
1681 
1682 void Assembler::movzxwl(Register dst, const Operand& src) {
1683  EnsureSpace ensure_space(this);
1684  emit_optional_rex_32(dst, src);
1685  emit(0x0F);
1686  emit(0xB7);
1687  emit_operand(dst, src);
1688 }
1689 
1690 
1691 void Assembler::repmovsb() {
1692  EnsureSpace ensure_space(this);
1693  emit(0xF3);
1694  emit(0xA4);
1695 }
1696 
1697 
1698 void Assembler::repmovsw() {
1699  EnsureSpace ensure_space(this);
1700  emit(0x66); // Operand size override.
1701  emit(0xF3);
1702  emit(0xA4);
1703 }
1704 
1705 
1706 void Assembler::repmovsl() {
1707  EnsureSpace ensure_space(this);
1708  emit(0xF3);
1709  emit(0xA5);
1710 }
1711 
1712 
1713 void Assembler::repmovsq() {
1714  EnsureSpace ensure_space(this);
1715  emit(0xF3);
1716  emit_rex_64();
1717  emit(0xA5);
1718 }
1719 
1720 
1721 void Assembler::mul(Register src) {
1722  EnsureSpace ensure_space(this);
1723  emit_rex_64(src);
1724  emit(0xF7);
1725  emit_modrm(0x4, src);
1726 }
1727 
1728 
1729 void Assembler::neg(Register dst) {
1730  EnsureSpace ensure_space(this);
1731  emit_rex_64(dst);
1732  emit(0xF7);
1733  emit_modrm(0x3, dst);
1734 }
1735 
1736 
1737 void Assembler::negl(Register dst) {
1738  EnsureSpace ensure_space(this);
1739  emit_optional_rex_32(dst);
1740  emit(0xF7);
1741  emit_modrm(0x3, dst);
1742 }
1743 
1744 
1745 void Assembler::neg(const Operand& dst) {
1746  EnsureSpace ensure_space(this);
1747  emit_rex_64(dst);
1748  emit(0xF7);
1749  emit_operand(3, dst);
1750 }
1751 
1752 
1753 void Assembler::nop() {
1754  EnsureSpace ensure_space(this);
1755  emit(0x90);
1756 }
1757 
1758 
1759 void Assembler::not_(Register dst) {
1760  EnsureSpace ensure_space(this);
1761  emit_rex_64(dst);
1762  emit(0xF7);
1763  emit_modrm(0x2, dst);
1764 }
1765 
1766 
1767 void Assembler::not_(const Operand& dst) {
1768  EnsureSpace ensure_space(this);
1769  emit_rex_64(dst);
1770  emit(0xF7);
1771  emit_operand(2, dst);
1772 }
1773 
1774 
1775 void Assembler::notl(Register dst) {
1776  EnsureSpace ensure_space(this);
1777  emit_optional_rex_32(dst);
1778  emit(0xF7);
1779  emit_modrm(0x2, dst);
1780 }
1781 
1782 
1783 void Assembler::Nop(int n) {
1784  // The recommended muti-byte sequences of NOP instructions from the Intel 64
1785  // and IA-32 Architectures Software Developer's Manual.
1786  //
1787  // Length Assembly Byte Sequence
1788  // 2 bytes 66 NOP 66 90H
1789  // 3 bytes NOP DWORD ptr [EAX] 0F 1F 00H
1790  // 4 bytes NOP DWORD ptr [EAX + 00H] 0F 1F 40 00H
1791  // 5 bytes NOP DWORD ptr [EAX + EAX*1 + 00H] 0F 1F 44 00 00H
1792  // 6 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 00H] 66 0F 1F 44 00 00H
1793  // 7 bytes NOP DWORD ptr [EAX + 00000000H] 0F 1F 80 00 00 00 00H
1794  // 8 bytes NOP DWORD ptr [EAX + EAX*1 + 00000000H] 0F 1F 84 00 00 00 00 00H
1795  // 9 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 66 0F 1F 84 00 00 00 00
1796  // 00000000H] 00H
1797 
1798  EnsureSpace ensure_space(this);
1799  while (n > 0) {
1800  switch (n) {
1801  case 2:
1802  emit(0x66);
1803  case 1:
1804  emit(0x90);
1805  return;
1806  case 3:
1807  emit(0x0f);
1808  emit(0x1f);
1809  emit(0x00);
1810  return;
1811  case 4:
1812  emit(0x0f);
1813  emit(0x1f);
1814  emit(0x40);
1815  emit(0x00);
1816  return;
1817  case 6:
1818  emit(0x66);
1819  case 5:
1820  emit(0x0f);
1821  emit(0x1f);
1822  emit(0x44);
1823  emit(0x00);
1824  emit(0x00);
1825  return;
1826  case 7:
1827  emit(0x0f);
1828  emit(0x1f);
1829  emit(0x80);
1830  emit(0x00);
1831  emit(0x00);
1832  emit(0x00);
1833  emit(0x00);
1834  return;
1835  default:
1836  case 11:
1837  emit(0x66);
1838  n--;
1839  case 10:
1840  emit(0x66);
1841  n--;
1842  case 9:
1843  emit(0x66);
1844  n--;
1845  case 8:
1846  emit(0x0f);
1847  emit(0x1f);
1848  emit(0x84);
1849  emit(0x00);
1850  emit(0x00);
1851  emit(0x00);
1852  emit(0x00);
1853  emit(0x00);
1854  n -= 8;
1855  }
1856  }
1857 }
1858 
1859 
1860 void Assembler::pop(Register dst) {
1861  EnsureSpace ensure_space(this);
1862  emit_optional_rex_32(dst);
1863  emit(0x58 | dst.low_bits());
1864 }
1865 
1866 
1867 void Assembler::pop(const Operand& dst) {
1868  EnsureSpace ensure_space(this);
1869  emit_optional_rex_32(dst);
1870  emit(0x8F);
1871  emit_operand(0, dst);
1872 }
1873 
1874 
1875 void Assembler::popfq() {
1876  EnsureSpace ensure_space(this);
1877  emit(0x9D);
1878 }
1879 
1880 
1881 void Assembler::push(Register src) {
1882  EnsureSpace ensure_space(this);
1883  emit_optional_rex_32(src);
1884  emit(0x50 | src.low_bits());
1885 }
1886 
1887 
1888 void Assembler::push(const Operand& src) {
1889  EnsureSpace ensure_space(this);
1890  emit_optional_rex_32(src);
1891  emit(0xFF);
1892  emit_operand(6, src);
1893 }
1894 
1895 
1896 void Assembler::push(Immediate value) {
1897  EnsureSpace ensure_space(this);
1898  if (is_int8(value.value_)) {
1899  emit(0x6A);
1900  emit(value.value_); // Emit low byte of value.
1901  } else {
1902  emit(0x68);
1903  emitl(value.value_);
1904  }
1905 }
1906 
1907 
1908 void Assembler::push_imm32(int32_t imm32) {
1909  EnsureSpace ensure_space(this);
1910  emit(0x68);
1911  emitl(imm32);
1912 }
1913 
1914 
1915 void Assembler::pushfq() {
1916  EnsureSpace ensure_space(this);
1917  emit(0x9C);
1918 }
1919 
1920 
1921 void Assembler::rdtsc() {
1922  EnsureSpace ensure_space(this);
1923  emit(0x0F);
1924  emit(0x31);
1925 }
1926 
1927 
1928 void Assembler::ret(int imm16) {
1929  EnsureSpace ensure_space(this);
1930  ASSERT(is_uint16(imm16));
1931  if (imm16 == 0) {
1932  emit(0xC3);
1933  } else {
1934  emit(0xC2);
1935  emit(imm16 & 0xFF);
1936  emit((imm16 >> 8) & 0xFF);
1937  }
1938 }
1939 
1940 
1941 void Assembler::setcc(Condition cc, Register reg) {
1942  if (cc > last_condition) {
1943  movb(reg, Immediate(cc == always ? 1 : 0));
1944  return;
1945  }
1946  EnsureSpace ensure_space(this);
1947  ASSERT(is_uint4(cc));
1948  if (!reg.is_byte_register()) { // Use x64 byte registers, where different.
1949  emit_rex_32(reg);
1950  }
1951  emit(0x0F);
1952  emit(0x90 | cc);
1953  emit_modrm(0x0, reg);
1954 }
1955 
1956 
1957 void Assembler::shld(Register dst, Register src) {
1958  EnsureSpace ensure_space(this);
1959  emit_rex_64(src, dst);
1960  emit(0x0F);
1961  emit(0xA5);
1962  emit_modrm(src, dst);
1963 }
1964 
1965 
1966 void Assembler::shrd(Register dst, Register src) {
1967  EnsureSpace ensure_space(this);
1968  emit_rex_64(src, dst);
1969  emit(0x0F);
1970  emit(0xAD);
1971  emit_modrm(src, dst);
1972 }
1973 
1974 
1975 void Assembler::xchg(Register dst, Register src) {
1976  EnsureSpace ensure_space(this);
1977  if (src.is(rax) || dst.is(rax)) { // Single-byte encoding
1978  Register other = src.is(rax) ? dst : src;
1979  emit_rex_64(other);
1980  emit(0x90 | other.low_bits());
1981  } else if (dst.low_bits() == 4) {
1982  emit_rex_64(dst, src);
1983  emit(0x87);
1984  emit_modrm(dst, src);
1985  } else {
1986  emit_rex_64(src, dst);
1987  emit(0x87);
1988  emit_modrm(src, dst);
1989  }
1990 }
1991 
1992 
1993 void Assembler::store_rax(void* dst, RelocInfo::Mode mode) {
1994  EnsureSpace ensure_space(this);
1995  emit(0x48); // REX.W
1996  emit(0xA3);
1997  emitq(reinterpret_cast<uintptr_t>(dst), mode);
1998 }
1999 
2000 
2001 void Assembler::store_rax(ExternalReference ref) {
2002  store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
2003 }
2004 
2005 
2006 void Assembler::testb(Register dst, Register src) {
2007  EnsureSpace ensure_space(this);
2008  if (src.low_bits() == 4) {
2009  emit_rex_32(src, dst);
2010  emit(0x84);
2011  emit_modrm(src, dst);
2012  } else {
2013  if (!dst.is_byte_register() || !src.is_byte_register()) {
2014  // Register is not one of al, bl, cl, dl. Its encoding needs REX.
2015  emit_rex_32(dst, src);
2016  }
2017  emit(0x84);
2018  emit_modrm(dst, src);
2019  }
2020 }
2021 
2022 
2023 void Assembler::testb(Register reg, Immediate mask) {
2024  ASSERT(is_int8(mask.value_) || is_uint8(mask.value_));
2025  EnsureSpace ensure_space(this);
2026  if (reg.is(rax)) {
2027  emit(0xA8);
2028  emit(mask.value_); // Low byte emitted.
2029  } else {
2030  if (!reg.is_byte_register()) {
2031  // Register is not one of al, bl, cl, dl. Its encoding needs REX.
2032  emit_rex_32(reg);
2033  }
2034  emit(0xF6);
2035  emit_modrm(0x0, reg);
2036  emit(mask.value_); // Low byte emitted.
2037  }
2038 }
2039 
2040 
2041 void Assembler::testb(const Operand& op, Immediate mask) {
2042  ASSERT(is_int8(mask.value_) || is_uint8(mask.value_));
2043  EnsureSpace ensure_space(this);
2044  emit_optional_rex_32(rax, op);
2045  emit(0xF6);
2046  emit_operand(rax, op); // Operation code 0
2047  emit(mask.value_); // Low byte emitted.
2048 }
2049 
2050 
2051 void Assembler::testb(const Operand& op, Register reg) {
2052  EnsureSpace ensure_space(this);
2053  if (!reg.is_byte_register()) {
2054  // Register is not one of al, bl, cl, dl. Its encoding needs REX.
2055  emit_rex_32(reg, op);
2056  } else {
2057  emit_optional_rex_32(reg, op);
2058  }
2059  emit(0x84);
2060  emit_operand(reg, op);
2061 }
2062 
2063 
2064 void Assembler::testl(Register dst, Register src) {
2065  EnsureSpace ensure_space(this);
2066  if (src.low_bits() == 4) {
2067  emit_optional_rex_32(src, dst);
2068  emit(0x85);
2069  emit_modrm(src, dst);
2070  } else {
2071  emit_optional_rex_32(dst, src);
2072  emit(0x85);
2073  emit_modrm(dst, src);
2074  }
2075 }
2076 
2077 
2078 void Assembler::testl(Register reg, Immediate mask) {
2079  // testl with a mask that fits in the low byte is exactly testb.
2080  if (is_uint8(mask.value_)) {
2081  testb(reg, mask);
2082  return;
2083  }
2084  EnsureSpace ensure_space(this);
2085  if (reg.is(rax)) {
2086  emit(0xA9);
2087  emit(mask);
2088  } else {
2089  emit_optional_rex_32(rax, reg);
2090  emit(0xF7);
2091  emit_modrm(0x0, reg);
2092  emit(mask);
2093  }
2094 }
2095 
2096 
2097 void Assembler::testl(const Operand& op, Immediate mask) {
2098  // testl with a mask that fits in the low byte is exactly testb.
2099  if (is_uint8(mask.value_)) {
2100  testb(op, mask);
2101  return;
2102  }
2103  EnsureSpace ensure_space(this);
2104  emit_optional_rex_32(rax, op);
2105  emit(0xF7);
2106  emit_operand(rax, op); // Operation code 0
2107  emit(mask);
2108 }
2109 
2110 
2111 void Assembler::testq(const Operand& op, Register reg) {
2112  EnsureSpace ensure_space(this);
2113  emit_rex_64(reg, op);
2114  emit(0x85);
2115  emit_operand(reg, op);
2116 }
2117 
2118 
2119 void Assembler::testq(Register dst, Register src) {
2120  EnsureSpace ensure_space(this);
2121  if (src.low_bits() == 4) {
2122  emit_rex_64(src, dst);
2123  emit(0x85);
2124  emit_modrm(src, dst);
2125  } else {
2126  emit_rex_64(dst, src);
2127  emit(0x85);
2128  emit_modrm(dst, src);
2129  }
2130 }
2131 
2132 
2133 void Assembler::testq(Register dst, Immediate mask) {
2134  EnsureSpace ensure_space(this);
2135  if (dst.is(rax)) {
2136  emit_rex_64();
2137  emit(0xA9);
2138  emit(mask);
2139  } else {
2140  emit_rex_64(dst);
2141  emit(0xF7);
2142  emit_modrm(0, dst);
2143  emit(mask);
2144  }
2145 }
2146 
2147 
2148 // FPU instructions.
2149 
2150 
2151 void Assembler::fld(int i) {
2152  EnsureSpace ensure_space(this);
2153  emit_farith(0xD9, 0xC0, i);
2154 }
2155 
2156 
2157 void Assembler::fld1() {
2158  EnsureSpace ensure_space(this);
2159  emit(0xD9);
2160  emit(0xE8);
2161 }
2162 
2163 
2164 void Assembler::fldz() {
2165  EnsureSpace ensure_space(this);
2166  emit(0xD9);
2167  emit(0xEE);
2168 }
2169 
2170 
2171 void Assembler::fldpi() {
2172  EnsureSpace ensure_space(this);
2173  emit(0xD9);
2174  emit(0xEB);
2175 }
2176 
2177 
2178 void Assembler::fldln2() {
2179  EnsureSpace ensure_space(this);
2180  emit(0xD9);
2181  emit(0xED);
2182 }
2183 
2184 
2185 void Assembler::fld_s(const Operand& adr) {
2186  EnsureSpace ensure_space(this);
2187  emit_optional_rex_32(adr);
2188  emit(0xD9);
2189  emit_operand(0, adr);
2190 }
2191 
2192 
2193 void Assembler::fld_d(const Operand& adr) {
2194  EnsureSpace ensure_space(this);
2195  emit_optional_rex_32(adr);
2196  emit(0xDD);
2197  emit_operand(0, adr);
2198 }
2199 
2200 
2201 void Assembler::fstp_s(const Operand& adr) {
2202  EnsureSpace ensure_space(this);
2203  emit_optional_rex_32(adr);
2204  emit(0xD9);
2205  emit_operand(3, adr);
2206 }
2207 
2208 
2209 void Assembler::fstp_d(const Operand& adr) {
2210  EnsureSpace ensure_space(this);
2211  emit_optional_rex_32(adr);
2212  emit(0xDD);
2213  emit_operand(3, adr);
2214 }
2215 
2216 
2217 void Assembler::fstp(int index) {
2218  ASSERT(is_uint3(index));
2219  EnsureSpace ensure_space(this);
2220  emit_farith(0xDD, 0xD8, index);
2221 }
2222 
2223 
2224 void Assembler::fild_s(const Operand& adr) {
2225  EnsureSpace ensure_space(this);
2226  emit_optional_rex_32(adr);
2227  emit(0xDB);
2228  emit_operand(0, adr);
2229 }
2230 
2231 
2232 void Assembler::fild_d(const Operand& adr) {
2233  EnsureSpace ensure_space(this);
2234  emit_optional_rex_32(adr);
2235  emit(0xDF);
2236  emit_operand(5, adr);
2237 }
2238 
2239 
2240 void Assembler::fistp_s(const Operand& adr) {
2241  EnsureSpace ensure_space(this);
2242  emit_optional_rex_32(adr);
2243  emit(0xDB);
2244  emit_operand(3, adr);
2245 }
2246 
2247 
2248 void Assembler::fisttp_s(const Operand& adr) {
2249  ASSERT(CpuFeatures::IsEnabled(SSE3));
2250  EnsureSpace ensure_space(this);
2251  emit_optional_rex_32(adr);
2252  emit(0xDB);
2253  emit_operand(1, adr);
2254 }
2255 
2256 
2257 void Assembler::fisttp_d(const Operand& adr) {
2258  ASSERT(CpuFeatures::IsEnabled(SSE3));
2259  EnsureSpace ensure_space(this);
2260  emit_optional_rex_32(adr);
2261  emit(0xDD);
2262  emit_operand(1, adr);
2263 }
2264 
2265 
2266 void Assembler::fist_s(const Operand& adr) {
2267  EnsureSpace ensure_space(this);
2268  emit_optional_rex_32(adr);
2269  emit(0xDB);
2270  emit_operand(2, adr);
2271 }
2272 
2273 
2274 void Assembler::fistp_d(const Operand& adr) {
2275  EnsureSpace ensure_space(this);
2276  emit_optional_rex_32(adr);
2277  emit(0xDF);
2278  emit_operand(7, adr);
2279 }
2280 
2281 
2282 void Assembler::fabs() {
2283  EnsureSpace ensure_space(this);
2284  emit(0xD9);
2285  emit(0xE1);
2286 }
2287 
2288 
2289 void Assembler::fchs() {
2290  EnsureSpace ensure_space(this);
2291  emit(0xD9);
2292  emit(0xE0);
2293 }
2294 
2295 
2296 void Assembler::fcos() {
2297  EnsureSpace ensure_space(this);
2298  emit(0xD9);
2299  emit(0xFF);
2300 }
2301 
2302 
2303 void Assembler::fsin() {
2304  EnsureSpace ensure_space(this);
2305  emit(0xD9);
2306  emit(0xFE);
2307 }
2308 
2309 
2310 void Assembler::fptan() {
2311  EnsureSpace ensure_space(this);
2312  emit(0xD9);
2313  emit(0xF2);
2314 }
2315 
2316 
2317 void Assembler::fyl2x() {
2318  EnsureSpace ensure_space(this);
2319  emit(0xD9);
2320  emit(0xF1);
2321 }
2322 
2323 
2324 void Assembler::f2xm1() {
2325  EnsureSpace ensure_space(this);
2326  emit(0xD9);
2327  emit(0xF0);
2328 }
2329 
2330 
2331 void Assembler::fscale() {
2332  EnsureSpace ensure_space(this);
2333  emit(0xD9);
2334  emit(0xFD);
2335 }
2336 
2337 
2338 void Assembler::fninit() {
2339  EnsureSpace ensure_space(this);
2340  emit(0xDB);
2341  emit(0xE3);
2342 }
2343 
2344 
2345 void Assembler::fadd(int i) {
2346  EnsureSpace ensure_space(this);
2347  emit_farith(0xDC, 0xC0, i);
2348 }
2349 
2350 
2351 void Assembler::fsub(int i) {
2352  EnsureSpace ensure_space(this);
2353  emit_farith(0xDC, 0xE8, i);
2354 }
2355 
2356 
2357 void Assembler::fisub_s(const Operand& adr) {
2358  EnsureSpace ensure_space(this);
2359  emit_optional_rex_32(adr);
2360  emit(0xDA);
2361  emit_operand(4, adr);
2362 }
2363 
2364 
2365 void Assembler::fmul(int i) {
2366  EnsureSpace ensure_space(this);
2367  emit_farith(0xDC, 0xC8, i);
2368 }
2369 
2370 
2371 void Assembler::fdiv(int i) {
2372  EnsureSpace ensure_space(this);
2373  emit_farith(0xDC, 0xF8, i);
2374 }
2375 
2376 
2377 void Assembler::faddp(int i) {
2378  EnsureSpace ensure_space(this);
2379  emit_farith(0xDE, 0xC0, i);
2380 }
2381 
2382 
2383 void Assembler::fsubp(int i) {
2384  EnsureSpace ensure_space(this);
2385  emit_farith(0xDE, 0xE8, i);
2386 }
2387 
2388 
2389 void Assembler::fsubrp(int i) {
2390  EnsureSpace ensure_space(this);
2391  emit_farith(0xDE, 0xE0, i);
2392 }
2393 
2394 
2395 void Assembler::fmulp(int i) {
2396  EnsureSpace ensure_space(this);
2397  emit_farith(0xDE, 0xC8, i);
2398 }
2399 
2400 
2401 void Assembler::fdivp(int i) {
2402  EnsureSpace ensure_space(this);
2403  emit_farith(0xDE, 0xF8, i);
2404 }
2405 
2406 
2407 void Assembler::fprem() {
2408  EnsureSpace ensure_space(this);
2409  emit(0xD9);
2410  emit(0xF8);
2411 }
2412 
2413 
2414 void Assembler::fprem1() {
2415  EnsureSpace ensure_space(this);
2416  emit(0xD9);
2417  emit(0xF5);
2418 }
2419 
2420 
2421 void Assembler::fxch(int i) {
2422  EnsureSpace ensure_space(this);
2423  emit_farith(0xD9, 0xC8, i);
2424 }
2425 
2426 
2427 void Assembler::fincstp() {
2428  EnsureSpace ensure_space(this);
2429  emit(0xD9);
2430  emit(0xF7);
2431 }
2432 
2433 
2434 void Assembler::ffree(int i) {
2435  EnsureSpace ensure_space(this);
2436  emit_farith(0xDD, 0xC0, i);
2437 }
2438 
2439 
2440 void Assembler::ftst() {
2441  EnsureSpace ensure_space(this);
2442  emit(0xD9);
2443  emit(0xE4);
2444 }
2445 
2446 
2447 void Assembler::fucomp(int i) {
2448  EnsureSpace ensure_space(this);
2449  emit_farith(0xDD, 0xE8, i);
2450 }
2451 
2452 
2453 void Assembler::fucompp() {
2454  EnsureSpace ensure_space(this);
2455  emit(0xDA);
2456  emit(0xE9);
2457 }
2458 
2459 
2460 void Assembler::fucomi(int i) {
2461  EnsureSpace ensure_space(this);
2462  emit(0xDB);
2463  emit(0xE8 + i);
2464 }
2465 
2466 
2467 void Assembler::fucomip() {
2468  EnsureSpace ensure_space(this);
2469  emit(0xDF);
2470  emit(0xE9);
2471 }
2472 
2473 
2474 void Assembler::fcompp() {
2475  EnsureSpace ensure_space(this);
2476  emit(0xDE);
2477  emit(0xD9);
2478 }
2479 
2480 
2481 void Assembler::fnstsw_ax() {
2482  EnsureSpace ensure_space(this);
2483  emit(0xDF);
2484  emit(0xE0);
2485 }
2486 
2487 
2488 void Assembler::fwait() {
2489  EnsureSpace ensure_space(this);
2490  emit(0x9B);
2491 }
2492 
2493 
2494 void Assembler::frndint() {
2495  EnsureSpace ensure_space(this);
2496  emit(0xD9);
2497  emit(0xFC);
2498 }
2499 
2500 
2501 void Assembler::fnclex() {
2502  EnsureSpace ensure_space(this);
2503  emit(0xDB);
2504  emit(0xE2);
2505 }
2506 
2507 
2508 void Assembler::sahf() {
2509  // TODO(X64): Test for presence. Not all 64-bit intel CPU's have sahf
2510  // in 64-bit mode. Test CpuID.
2511  EnsureSpace ensure_space(this);
2512  emit(0x9E);
2513 }
2514 
2515 
2516 void Assembler::emit_farith(int b1, int b2, int i) {
2517  ASSERT(is_uint8(b1) && is_uint8(b2)); // wrong opcode
2518  ASSERT(is_uint3(i)); // illegal stack offset
2519  emit(b1);
2520  emit(b2 + i);
2521 }
2522 
2523 // SSE 2 operations.
2524 
2525 void Assembler::movd(XMMRegister dst, Register src) {
2526  EnsureSpace ensure_space(this);
2527  emit(0x66);
2528  emit_optional_rex_32(dst, src);
2529  emit(0x0F);
2530  emit(0x6E);
2531  emit_sse_operand(dst, src);
2532 }
2533 
2534 
2535 void Assembler::movd(Register dst, XMMRegister src) {
2536  EnsureSpace ensure_space(this);
2537  emit(0x66);
2538  emit_optional_rex_32(src, dst);
2539  emit(0x0F);
2540  emit(0x7E);
2541  emit_sse_operand(src, dst);
2542 }
2543 
2544 
2545 void Assembler::movq(XMMRegister dst, Register src) {
2546  EnsureSpace ensure_space(this);
2547  emit(0x66);
2548  emit_rex_64(dst, src);
2549  emit(0x0F);
2550  emit(0x6E);
2551  emit_sse_operand(dst, src);
2552 }
2553 
2554 
2555 void Assembler::movq(Register dst, XMMRegister src) {
2556  EnsureSpace ensure_space(this);
2557  emit(0x66);
2558  emit_rex_64(src, dst);
2559  emit(0x0F);
2560  emit(0x7E);
2561  emit_sse_operand(src, dst);
2562 }
2563 
2564 
2565 void Assembler::movq(XMMRegister dst, XMMRegister src) {
2566  EnsureSpace ensure_space(this);
2567  if (dst.low_bits() == 4) {
2568  // Avoid unnecessary SIB byte.
2569  emit(0xf3);
2570  emit_optional_rex_32(dst, src);
2571  emit(0x0F);
2572  emit(0x7e);
2573  emit_sse_operand(dst, src);
2574  } else {
2575  emit(0x66);
2576  emit_optional_rex_32(src, dst);
2577  emit(0x0F);
2578  emit(0xD6);
2579  emit_sse_operand(src, dst);
2580  }
2581 }
2582 
2583 void Assembler::movdqa(const Operand& dst, XMMRegister src) {
2584  EnsureSpace ensure_space(this);
2585  emit(0x66);
2586  emit_rex_64(src, dst);
2587  emit(0x0F);
2588  emit(0x7F);
2589  emit_sse_operand(src, dst);
2590 }
2591 
2592 
2593 void Assembler::movdqa(XMMRegister dst, const Operand& src) {
2594  EnsureSpace ensure_space(this);
2595  emit(0x66);
2596  emit_rex_64(dst, src);
2597  emit(0x0F);
2598  emit(0x6F);
2599  emit_sse_operand(dst, src);
2600 }
2601 
2602 
2603 void Assembler::extractps(Register dst, XMMRegister src, byte imm8) {
2605  ASSERT(is_uint8(imm8));
2606  EnsureSpace ensure_space(this);
2607  emit(0x66);
2608  emit_optional_rex_32(dst, src);
2609  emit(0x0F);
2610  emit(0x3A);
2611  emit(0x17);
2612  emit_sse_operand(dst, src);
2613  emit(imm8);
2614 }
2615 
2616 
2617 void Assembler::movsd(const Operand& dst, XMMRegister src) {
2618  EnsureSpace ensure_space(this);
2619  emit(0xF2); // double
2620  emit_optional_rex_32(src, dst);
2621  emit(0x0F);
2622  emit(0x11); // store
2623  emit_sse_operand(src, dst);
2624 }
2625 
2626 
2627 void Assembler::movsd(XMMRegister dst, XMMRegister src) {
2628  EnsureSpace ensure_space(this);
2629  emit(0xF2); // double
2630  emit_optional_rex_32(dst, src);
2631  emit(0x0F);
2632  emit(0x10); // load
2633  emit_sse_operand(dst, src);
2634 }
2635 
2636 
2637 void Assembler::movsd(XMMRegister dst, const Operand& src) {
2638  EnsureSpace ensure_space(this);
2639  emit(0xF2); // double
2640  emit_optional_rex_32(dst, src);
2641  emit(0x0F);
2642  emit(0x10); // load
2643  emit_sse_operand(dst, src);
2644 }
2645 
2646 
2647 void Assembler::movaps(XMMRegister dst, XMMRegister src) {
2648  EnsureSpace ensure_space(this);
2649  if (src.low_bits() == 4) {
2650  // Try to avoid an unnecessary SIB byte.
2651  emit_optional_rex_32(src, dst);
2652  emit(0x0F);
2653  emit(0x29);
2654  emit_sse_operand(src, dst);
2655  } else {
2656  emit_optional_rex_32(dst, src);
2657  emit(0x0F);
2658  emit(0x28);
2659  emit_sse_operand(dst, src);
2660  }
2661 }
2662 
2663 
2664 void Assembler::movapd(XMMRegister dst, XMMRegister src) {
2665  EnsureSpace ensure_space(this);
2666  if (src.low_bits() == 4) {
2667  // Try to avoid an unnecessary SIB byte.
2668  emit(0x66);
2669  emit_optional_rex_32(src, dst);
2670  emit(0x0F);
2671  emit(0x29);
2672  emit_sse_operand(src, dst);
2673  } else {
2674  emit(0x66);
2675  emit_optional_rex_32(dst, src);
2676  emit(0x0F);
2677  emit(0x28);
2678  emit_sse_operand(dst, src);
2679  }
2680 }
2681 
2682 
2683 void Assembler::movss(XMMRegister dst, const Operand& src) {
2684  EnsureSpace ensure_space(this);
2685  emit(0xF3); // single
2686  emit_optional_rex_32(dst, src);
2687  emit(0x0F);
2688  emit(0x10); // load
2689  emit_sse_operand(dst, src);
2690 }
2691 
2692 
2693 void Assembler::movss(const Operand& src, XMMRegister dst) {
2694  EnsureSpace ensure_space(this);
2695  emit(0xF3); // single
2696  emit_optional_rex_32(dst, src);
2697  emit(0x0F);
2698  emit(0x11); // store
2699  emit_sse_operand(dst, src);
2700 }
2701 
2702 
2703 void Assembler::cvttss2si(Register dst, const Operand& src) {
2704  EnsureSpace ensure_space(this);
2705  emit(0xF3);
2706  emit_optional_rex_32(dst, src);
2707  emit(0x0F);
2708  emit(0x2C);
2709  emit_operand(dst, src);
2710 }
2711 
2712 
2713 void Assembler::cvttss2si(Register dst, XMMRegister src) {
2714  EnsureSpace ensure_space(this);
2715  emit(0xF3);
2716  emit_optional_rex_32(dst, src);
2717  emit(0x0F);
2718  emit(0x2C);
2719  emit_sse_operand(dst, src);
2720 }
2721 
2722 
2723 void Assembler::cvttsd2si(Register dst, const Operand& src) {
2724  EnsureSpace ensure_space(this);
2725  emit(0xF2);
2726  emit_optional_rex_32(dst, src);
2727  emit(0x0F);
2728  emit(0x2C);
2729  emit_operand(dst, src);
2730 }
2731 
2732 
2733 void Assembler::cvttsd2si(Register dst, XMMRegister src) {
2734  EnsureSpace ensure_space(this);
2735  emit(0xF2);
2736  emit_optional_rex_32(dst, src);
2737  emit(0x0F);
2738  emit(0x2C);
2739  emit_sse_operand(dst, src);
2740 }
2741 
2742 
2743 void Assembler::cvttsd2siq(Register dst, XMMRegister src) {
2744  EnsureSpace ensure_space(this);
2745  emit(0xF2);
2746  emit_rex_64(dst, src);
2747  emit(0x0F);
2748  emit(0x2C);
2749  emit_sse_operand(dst, src);
2750 }
2751 
2752 
2753 void Assembler::cvtlsi2sd(XMMRegister dst, const Operand& src) {
2754  EnsureSpace ensure_space(this);
2755  emit(0xF2);
2756  emit_optional_rex_32(dst, src);
2757  emit(0x0F);
2758  emit(0x2A);
2759  emit_sse_operand(dst, src);
2760 }
2761 
2762 
2763 void Assembler::cvtlsi2sd(XMMRegister dst, Register src) {
2764  EnsureSpace ensure_space(this);
2765  emit(0xF2);
2766  emit_optional_rex_32(dst, src);
2767  emit(0x0F);
2768  emit(0x2A);
2769  emit_sse_operand(dst, src);
2770 }
2771 
2772 
2773 void Assembler::cvtlsi2ss(XMMRegister dst, Register src) {
2774  EnsureSpace ensure_space(this);
2775  emit(0xF3);
2776  emit_optional_rex_32(dst, src);
2777  emit(0x0F);
2778  emit(0x2A);
2779  emit_sse_operand(dst, src);
2780 }
2781 
2782 
2783 void Assembler::cvtqsi2sd(XMMRegister dst, Register src) {
2784  EnsureSpace ensure_space(this);
2785  emit(0xF2);
2786  emit_rex_64(dst, src);
2787  emit(0x0F);
2788  emit(0x2A);
2789  emit_sse_operand(dst, src);
2790 }
2791 
2792 
2793 void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) {
2794  EnsureSpace ensure_space(this);
2795  emit(0xF3);
2796  emit_optional_rex_32(dst, src);
2797  emit(0x0F);
2798  emit(0x5A);
2799  emit_sse_operand(dst, src);
2800 }
2801 
2802 
2803 void Assembler::cvtss2sd(XMMRegister dst, const Operand& src) {
2804  EnsureSpace ensure_space(this);
2805  emit(0xF3);
2806  emit_optional_rex_32(dst, src);
2807  emit(0x0F);
2808  emit(0x5A);
2809  emit_sse_operand(dst, src);
2810 }
2811 
2812 
2813 void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) {
2814  EnsureSpace ensure_space(this);
2815  emit(0xF2);
2816  emit_optional_rex_32(dst, src);
2817  emit(0x0F);
2818  emit(0x5A);
2819  emit_sse_operand(dst, src);
2820 }
2821 
2822 
2823 void Assembler::cvtsd2si(Register dst, XMMRegister src) {
2824  EnsureSpace ensure_space(this);
2825  emit(0xF2);
2826  emit_optional_rex_32(dst, src);
2827  emit(0x0F);
2828  emit(0x2D);
2829  emit_sse_operand(dst, src);
2830 }
2831 
2832 
2833 void Assembler::cvtsd2siq(Register dst, XMMRegister src) {
2834  EnsureSpace ensure_space(this);
2835  emit(0xF2);
2836  emit_rex_64(dst, src);
2837  emit(0x0F);
2838  emit(0x2D);
2839  emit_sse_operand(dst, src);
2840 }
2841 
2842 
2843 void Assembler::addsd(XMMRegister dst, XMMRegister src) {
2844  EnsureSpace ensure_space(this);
2845  emit(0xF2);
2846  emit_optional_rex_32(dst, src);
2847  emit(0x0F);
2848  emit(0x58);
2849  emit_sse_operand(dst, src);
2850 }
2851 
2852 
2853 void Assembler::mulsd(XMMRegister dst, XMMRegister src) {
2854  EnsureSpace ensure_space(this);
2855  emit(0xF2);
2856  emit_optional_rex_32(dst, src);
2857  emit(0x0F);
2858  emit(0x59);
2859  emit_sse_operand(dst, src);
2860 }
2861 
2862 
2863 void Assembler::subsd(XMMRegister dst, XMMRegister src) {
2864  EnsureSpace ensure_space(this);
2865  emit(0xF2);
2866  emit_optional_rex_32(dst, src);
2867  emit(0x0F);
2868  emit(0x5C);
2869  emit_sse_operand(dst, src);
2870 }
2871 
2872 
2873 void Assembler::divsd(XMMRegister dst, XMMRegister src) {
2874  EnsureSpace ensure_space(this);
2875  emit(0xF2);
2876  emit_optional_rex_32(dst, src);
2877  emit(0x0F);
2878  emit(0x5E);
2879  emit_sse_operand(dst, src);
2880 }
2881 
2882 
2883 void Assembler::andpd(XMMRegister dst, XMMRegister src) {
2884  EnsureSpace ensure_space(this);
2885  emit(0x66);
2886  emit_optional_rex_32(dst, src);
2887  emit(0x0F);
2888  emit(0x54);
2889  emit_sse_operand(dst, src);
2890 }
2891 
2892 
2893 void Assembler::orpd(XMMRegister dst, XMMRegister src) {
2894  EnsureSpace ensure_space(this);
2895  emit(0x66);
2896  emit_optional_rex_32(dst, src);
2897  emit(0x0F);
2898  emit(0x56);
2899  emit_sse_operand(dst, src);
2900 }
2901 
2902 
2903 void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
2904  EnsureSpace ensure_space(this);
2905  emit(0x66);
2906  emit_optional_rex_32(dst, src);
2907  emit(0x0F);
2908  emit(0x57);
2909  emit_sse_operand(dst, src);
2910 }
2911 
2912 
2913 void Assembler::xorps(XMMRegister dst, XMMRegister src) {
2914  EnsureSpace ensure_space(this);
2915  emit_optional_rex_32(dst, src);
2916  emit(0x0F);
2917  emit(0x57);
2918  emit_sse_operand(dst, src);
2919 }
2920 
2921 
2922 void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) {
2923  EnsureSpace ensure_space(this);
2924  emit(0xF2);
2925  emit_optional_rex_32(dst, src);
2926  emit(0x0F);
2927  emit(0x51);
2928  emit_sse_operand(dst, src);
2929 }
2930 
2931 
2932 void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
2933  EnsureSpace ensure_space(this);
2934  emit(0x66);
2935  emit_optional_rex_32(dst, src);
2936  emit(0x0f);
2937  emit(0x2e);
2938  emit_sse_operand(dst, src);
2939 }
2940 
2941 
2942 void Assembler::ucomisd(XMMRegister dst, const Operand& src) {
2943  EnsureSpace ensure_space(this);
2944  emit(0x66);
2945  emit_optional_rex_32(dst, src);
2946  emit(0x0f);
2947  emit(0x2e);
2948  emit_sse_operand(dst, src);
2949 }
2950 
2951 
2952 void Assembler::roundsd(XMMRegister dst, XMMRegister src,
2953  Assembler::RoundingMode mode) {
2954  ASSERT(CpuFeatures::IsEnabled(SSE4_1));
2955  EnsureSpace ensure_space(this);
2956  emit(0x66);
2957  emit_optional_rex_32(dst, src);
2958  emit(0x0f);
2959  emit(0x3a);
2960  emit(0x0b);
2961  emit_sse_operand(dst, src);
2962  // Mask precision exeption.
2963  emit(static_cast<byte>(mode) | 0x8);
2964 }
2965 
2966 
2967 void Assembler::movmskpd(Register dst, XMMRegister src) {
2968  EnsureSpace ensure_space(this);
2969  emit(0x66);
2970  emit_optional_rex_32(dst, src);
2971  emit(0x0f);
2972  emit(0x50);
2973  emit_sse_operand(dst, src);
2974 }
2975 
2976 
2977 void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) {
2978  Register ireg = { reg.code() };
2979  emit_operand(ireg, adr);
2980 }
2981 
2982 
2983 void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) {
2984  emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
2985 }
2986 
2987 void Assembler::emit_sse_operand(XMMRegister dst, Register src) {
2988  emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
2989 }
2990 
2991 void Assembler::emit_sse_operand(Register dst, XMMRegister src) {
2992  emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
2993 }
2994 
2995 
2996 void Assembler::db(uint8_t data) {
2997  EnsureSpace ensure_space(this);
2998  emit(data);
2999 }
3000 
3001 
3002 void Assembler::dd(uint32_t data) {
3003  EnsureSpace ensure_space(this);
3004  emitl(data);
3005 }
3006 
3007 
3008 // Relocation information implementations.
3009 
3010 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
3011  ASSERT(rmode != RelocInfo::NONE);
3012  // Don't record external references unless the heap will be serialized.
3013  if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
3014 #ifdef DEBUG
3015  if (!Serializer::enabled()) {
3017  }
3018 #endif
3019  if (!Serializer::enabled() && !emit_debug_code()) {
3020  return;
3021  }
3022  }
3023  RelocInfo rinfo(pc_, rmode, data, NULL);
3024  reloc_info_writer.Write(&rinfo);
3025 }
3026 
3028  positions_recorder()->WriteRecordedPositions();
3029  EnsureSpace ensure_space(this);
3030  RecordRelocInfo(RelocInfo::JS_RETURN);
3031 }
3032 
3033 
3035  positions_recorder()->WriteRecordedPositions();
3036  EnsureSpace ensure_space(this);
3037  RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT);
3038 }
3039 
3040 
3041 void Assembler::RecordComment(const char* msg, bool force) {
3042  if (FLAG_code_comments || force) {
3043  EnsureSpace ensure_space(this);
3044  RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
3045  }
3046 }
3047 
3048 
3049 const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
3050  1 << RelocInfo::INTERNAL_REFERENCE;
3051 
3052 
3053 bool RelocInfo::IsCodedSpecially() {
3054  // The deserializer needs to know whether a pointer is specially coded. Being
3055  // specially coded on x64 means that it is a relative 32 bit address, as used
3056  // by branch instructions.
3057  return (1 << rmode_) & kApplyMask;
3058 }
3059 
3060 } } // namespace v8::internal
3061 
3062 #endif // V8_TARGET_ARCH_X64
byte * Address
Definition: globals.h:157
const Register rdx
void cvtlsi2ss(XMMRegister dst, Register src)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable memory(in Mbytes)") DEFINE_bool(gc_global
void movapd(XMMRegister dst, XMMRegister src)
static const int kMaximalBufferSize
Isolate * isolate() const
Definition: assembler.h:61
void db(uint8_t data)
void load_rax(void *ptr, RelocInfo::Mode rmode)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 instructions(ARM only)") DEFINE_bool(enable_vfp2
void ucomisd(XMMRegister dst, XMMRegister src)
void cvttss2si(Register dst, const Operand &src)
void movzxbl(Register dst, const Operand &src)
void roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode)
#define FATAL(msg)
Definition: checks.h:46
bool buffer_overflow() const
const int kNumRegisters
Definition: constants-arm.h:92
void mulsd(XMMRegister dst, XMMRegister src)
const Register rbp
const int KB
Definition: globals.h:207
void cvtsd2si(Register dst, XMMRegister src)
void idivq(Register src)
void orpd(XMMRegister dst, XMMRegister src)
void dd(uint32_t data)
void push(Register src, Condition cond=al)
void cvtss2sd(XMMRegister dst, XMMRegister src)
void sqrtsd(XMMRegister dst, XMMRegister src)
int int32_t
Definition: unicode.cc:47
static bool IsSupported(CpuFeature f)
static bool enabled()
Definition: serialize.h:481
bool is_uint3(int x)
Definition: assembler.h:846
void andpd(XMMRegister dst, XMMRegister src)
void j(Condition cc, Label *L, Label::Distance distance=Label::kFar)
bool is_int8(int x)
Definition: assembler.h:836
static const int kMinimalBufferSize
#define ASSERT(condition)
Definition: checks.h:270
void cvtlsi2sd(XMMRegister dst, const Operand &src)
bool is_uint6(int x)
Definition: assembler.h:849
void movsxlq(Register dst, Register src)
void xorpd(XMMRegister dst, XMMRegister src)
#define CHECK(condition)
Definition: checks.h:56
void ret(int imm16)
void bt(const Operand &dst, Register src)
bool is_uint32(int64_t x)
Definition: assembler-x64.h:48
void incl(Register dst)
void cmovl(Condition cc, Register dst, Register src)
void testb(Register dst, Register src)
void fistp_s(const Operand &adr)
void imul(Register src)
void addsd(XMMRegister dst, XMMRegister src)
void fld_d(const Operand &adr)
void imull(Register dst, Register src)
void cmpb_al(const Operand &op)
void xchg(Register dst, Register src)
void fild_s(const Operand &adr)
void decl(Register dst)
void idivl(Register src)
uint8_t byte
Definition: globals.h:156
void enter(const Immediate &size)
void testl(Register dst, Register src)
void movzxbq(Register dst, const Operand &src)
void shld(Register dst, Register src)
void fisttp_d(const Operand &adr)
void movss(XMMRegister dst, const Operand &src)
void movb(Register dst, const Operand &src)
void set_byte_at(int pos, byte value)
void cvtsd2ss(XMMRegister dst, XMMRegister src)
void negl(Register dst)
void movsd(XMMRegister dst, XMMRegister src)
void GetCode(CodeDesc *desc)
void movdqa(XMMRegister dst, const Operand &src)
void movsxbq(Register dst, const Operand &src)
static void TooLateToEnableNow()
Definition: serialize.h:480
void movmskpd(Register dst, XMMRegister src)
int(* F0)()
void fisttp_s(const Operand &adr)
void movzxwq(Register dst, const Operand &src)
bool is_uint5(int x)
Definition: assembler.h:848
const Register rbx
bool is_uint16(int x)
Definition: assembler.h:853
const Register rsp
static int32_t & int32_at(Address addr)
Definition: v8memory.h:51
void movzxwl(Register dst, const Operand &src)
#define __
const Register r12
void not_(Register dst)
const Register rax
void emit_sse_operand(XMMRegister reg, const Operand &adr)
const Register r13
const Register rdi
void cvtqsi2sd(XMMRegister dst, const Operand &src)
bool IsPowerOf2(T x)
Definition: utils.h:50
void xorps(XMMRegister dst, XMMRegister src)
activate correct semantics for inheriting readonliness false
Definition: flags.cc:141
void Nop(int bytes=1)
void neg(Register dst)
void setcc(Condition cc, Register reg)
void fld_s(const Operand &adr)
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
bool is_int32(int64_t x)
Definition: assembler-x64.h:53
void decq(Register dst)
void fstp_d(const Operand &adr)
void movw(Register reg, uint32_t immediate, Condition cond=al)
void store_rax(void *dst, RelocInfo::Mode mode)
void push_imm32(int32_t imm32)
void fistp_d(const Operand &adr)
void cvtsd2siq(Register dst, XMMRegister src)
void shrd(Register dst, Register src)
void movaps(XMMRegister dst, XMMRegister src)
void RecordComment(const char *msg)
void fstp_s(const Operand &adr)
void divsd(XMMRegister dst, XMMRegister src)
void lea(Register dst, const Operand &src)
void fild_d(const Operand &adr)
const Register rcx
Assembler(Isolate *isolate, void *buffer, int buffer_size)
#define HEAP
Definition: isolate.h:1433
void movl(Register dst, Register src)
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
void decb(Register dst)
void notl(Register dst)
void testq(const Operand &op, Register reg)
void movd(XMMRegister dst, Register src)
PositionsRecorder * positions_recorder()
void fisub_s(const Operand &adr)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
const Register r10
void extractps(Register dst, XMMRegister src, byte imm8)
static uint64_t CpuFeaturesImpliedByPlatform()
bool is_uint8(int x)
Definition: assembler.h:850
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static void FlushICache(void *start, size_t size)
void fist_s(const Operand &adr)
void movsxwq(Register dst, const Operand &src)
void DeleteArray(T *array)
Definition: allocation.h:91
void bts(Register dst, Register src)
void cmovq(Condition cc, Register dst, Register src)
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
bool emit_debug_code() const
void subsd(XMMRegister dst, XMMRegister src)
void cvttsd2siq(Register dst, XMMRegister src)
void movq(const Operand &dst, Register src)
void mul(Register dst, Register src1, Register src2, SBit s=LeaveCC, Condition cond=al)
void cvttsd2si(Register dst, const Operand &src)
void incq(Register dst)
void leal(Register dst, const Operand &src)
bool is_uint4(int x)
Definition: assembler.h:847