v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
assembler-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_X64
31 
32 #include "macro-assembler.h"
33 #include "serialize.h"
34 
35 namespace v8 {
36 namespace internal {
37 
38 // -----------------------------------------------------------------------------
39 // Implementation of CpuFeatures
40 
41 
42 #ifdef DEBUG
43 bool CpuFeatures::initialized_ = false;
44 #endif
45 uint64_t CpuFeatures::supported_ = CpuFeatures::kDefaultCpuFeatures;
46 uint64_t CpuFeatures::found_by_runtime_probing_only_ = 0;
47 uint64_t CpuFeatures::cross_compile_ = 0;
48 
49 ExternalReference ExternalReference::cpu_features() {
50  ASSERT(CpuFeatures::initialized_);
51  return ExternalReference(&CpuFeatures::supported_);
52 }
53 
54 
55 void CpuFeatures::Probe() {
56  ASSERT(supported_ == CpuFeatures::kDefaultCpuFeatures);
57 #ifdef DEBUG
58  initialized_ = true;
59 #endif
60  supported_ = kDefaultCpuFeatures;
61  if (Serializer::enabled()) {
62  supported_ |= OS::CpuFeaturesImpliedByPlatform();
63  return; // No features if we might serialize.
64  }
65 
66  uint64_t probed_features = 0;
67  CPU cpu;
68  if (cpu.has_sse41()) {
69  probed_features |= static_cast<uint64_t>(1) << SSE4_1;
70  }
71  if (cpu.has_sse3()) {
72  probed_features |= static_cast<uint64_t>(1) << SSE3;
73  }
74 
75  // SSE2 must be available on every x64 CPU.
76  ASSERT(cpu.has_sse2());
77  probed_features |= static_cast<uint64_t>(1) << SSE2;
78 
79  // CMOV must be available on every x64 CPU.
80  ASSERT(cpu.has_cmov());
81  probed_features |= static_cast<uint64_t>(1) << CMOV;
82 
83  // SAHF is not generally available in long mode.
84  if (cpu.has_sahf()) {
85  probed_features |= static_cast<uint64_t>(1) << SAHF;
86  }
87 
88  uint64_t platform_features = OS::CpuFeaturesImpliedByPlatform();
89  supported_ = probed_features | platform_features;
90  found_by_runtime_probing_only_
91  = probed_features & ~kDefaultCpuFeatures & ~platform_features;
92 }
93 
94 
95 // -----------------------------------------------------------------------------
96 // Implementation of RelocInfo
97 
98 // Patch the code at the current PC with a call to the target address.
99 // Additional guard int3 instructions can be added if required.
100 void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
101  int code_size = Assembler::kCallSequenceLength + guard_bytes;
102 
103  // Create a code patcher.
104  CodePatcher patcher(pc_, code_size);
105 
106  // Add a label for checking the size of the code used for returning.
107 #ifdef DEBUG
108  Label check_codesize;
109  patcher.masm()->bind(&check_codesize);
110 #endif
111 
112  // Patch the code.
113  patcher.masm()->movp(kScratchRegister, reinterpret_cast<void*>(target),
115  patcher.masm()->call(kScratchRegister);
116 
117  // Check that the size of the code generated is as expected.
119  patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize));
120 
121  // Add the requested number of int3 instructions after the call.
122  for (int i = 0; i < guard_bytes; i++) {
123  patcher.masm()->int3();
124  }
125 }
126 
127 
128 void RelocInfo::PatchCode(byte* instructions, int instruction_count) {
129  // Patch the code at the current address with the supplied instructions.
130  for (int i = 0; i < instruction_count; i++) {
131  *(pc_ + i) = *(instructions + i);
132  }
133 
134  // Indicate that code has changed.
135  CPU::FlushICache(pc_, instruction_count);
136 }
137 
138 
139 // -----------------------------------------------------------------------------
140 // Register constants.
141 
142 const int
143  Register::kRegisterCodeByAllocationIndex[kMaxNumAllocatableRegisters] = {
144  // rax, rbx, rdx, rcx, rsi, rdi, r8, r9, r11, r14, r15
145  0, 3, 2, 1, 6, 7, 8, 9, 11, 14, 15
146 };
147 
148 const int Register::kAllocationIndexByRegisterCode[kNumRegisters] = {
149  0, 3, 2, 1, -1, -1, 4, 5, 6, 7, -1, 8, -1, -1, 9, 10
150 };
151 
152 
153 // -----------------------------------------------------------------------------
154 // Implementation of Operand
155 
156 Operand::Operand(Register base, int32_t disp) : rex_(0) {
157  len_ = 1;
158  if (base.is(rsp) || base.is(r12)) {
159  // SIB byte is needed to encode (rsp + offset) or (r12 + offset).
160  set_sib(times_1, rsp, base);
161  }
162 
163  if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
164  set_modrm(0, base);
165  } else if (is_int8(disp)) {
166  set_modrm(1, base);
167  set_disp8(disp);
168  } else {
169  set_modrm(2, base);
170  set_disp32(disp);
171  }
172 }
173 
174 
175 Operand::Operand(Register base,
176  Register index,
177  ScaleFactor scale,
178  int32_t disp) : rex_(0) {
179  ASSERT(!index.is(rsp));
180  len_ = 1;
181  set_sib(scale, index, base);
182  if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
183  // This call to set_modrm doesn't overwrite the REX.B (or REX.X) bits
184  // possibly set by set_sib.
185  set_modrm(0, rsp);
186  } else if (is_int8(disp)) {
187  set_modrm(1, rsp);
188  set_disp8(disp);
189  } else {
190  set_modrm(2, rsp);
191  set_disp32(disp);
192  }
193 }
194 
195 
196 Operand::Operand(Register index,
197  ScaleFactor scale,
198  int32_t disp) : rex_(0) {
199  ASSERT(!index.is(rsp));
200  len_ = 1;
201  set_modrm(0, rsp);
202  set_sib(scale, index, rbp);
203  set_disp32(disp);
204 }
205 
206 
207 Operand::Operand(const Operand& operand, int32_t offset) {
208  ASSERT(operand.len_ >= 1);
209  // Operand encodes REX ModR/M [SIB] [Disp].
210  byte modrm = operand.buf_[0];
211  ASSERT(modrm < 0xC0); // Disallow mode 3 (register target).
212  bool has_sib = ((modrm & 0x07) == 0x04);
213  byte mode = modrm & 0xC0;
214  int disp_offset = has_sib ? 2 : 1;
215  int base_reg = (has_sib ? operand.buf_[1] : modrm) & 0x07;
216  // Mode 0 with rbp/r13 as ModR/M or SIB base register always has a 32-bit
217  // displacement.
218  bool is_baseless = (mode == 0) && (base_reg == 0x05); // No base or RIP base.
219  int32_t disp_value = 0;
220  if (mode == 0x80 || is_baseless) {
221  // Mode 2 or mode 0 with rbp/r13 as base: Word displacement.
222  disp_value = *BitCast<const int32_t*>(&operand.buf_[disp_offset]);
223  } else if (mode == 0x40) {
224  // Mode 1: Byte displacement.
225  disp_value = static_cast<signed char>(operand.buf_[disp_offset]);
226  }
227 
228  // Write new operand with same registers, but with modified displacement.
229  ASSERT(offset >= 0 ? disp_value + offset > disp_value
230  : disp_value + offset < disp_value); // No overflow.
231  disp_value += offset;
232  rex_ = operand.rex_;
233  if (!is_int8(disp_value) || is_baseless) {
234  // Need 32 bits of displacement, mode 2 or mode 1 with register rbp/r13.
235  buf_[0] = (modrm & 0x3f) | (is_baseless ? 0x00 : 0x80);
236  len_ = disp_offset + 4;
237  Memory::int32_at(&buf_[disp_offset]) = disp_value;
238  } else if (disp_value != 0 || (base_reg == 0x05)) {
239  // Need 8 bits of displacement.
240  buf_[0] = (modrm & 0x3f) | 0x40; // Mode 1.
241  len_ = disp_offset + 1;
242  buf_[disp_offset] = static_cast<byte>(disp_value);
243  } else {
244  // Need no displacement.
245  buf_[0] = (modrm & 0x3f); // Mode 0.
246  len_ = disp_offset;
247  }
248  if (has_sib) {
249  buf_[1] = operand.buf_[1];
250  }
251 }
252 
253 
254 bool Operand::AddressUsesRegister(Register reg) const {
255  int code = reg.code();
256  ASSERT((buf_[0] & 0xC0) != 0xC0); // Always a memory operand.
257  // Start with only low three bits of base register. Initial decoding doesn't
258  // distinguish on the REX.B bit.
259  int base_code = buf_[0] & 0x07;
260  if (base_code == rsp.code()) {
261  // SIB byte present in buf_[1].
262  // Check the index register from the SIB byte + REX.X prefix.
263  int index_code = ((buf_[1] >> 3) & 0x07) | ((rex_ & 0x02) << 2);
264  // Index code (including REX.X) of 0x04 (rsp) means no index register.
265  if (index_code != rsp.code() && index_code == code) return true;
266  // Add REX.B to get the full base register code.
267  base_code = (buf_[1] & 0x07) | ((rex_ & 0x01) << 3);
268  // A base register of 0x05 (rbp) with mod = 0 means no base register.
269  if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
270  return code == base_code;
271  } else {
272  // A base register with low bits of 0x05 (rbp or r13) and mod = 0 means
273  // no base register.
274  if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
275  base_code |= ((rex_ & 0x01) << 3);
276  return code == base_code;
277  }
278 }
279 
280 
281 // -----------------------------------------------------------------------------
282 // Implementation of Assembler.
283 
284 #ifdef GENERATED_CODE_COVERAGE
285 static void InitCoverageLog();
286 #endif
287 
288 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
289  : AssemblerBase(isolate, buffer, buffer_size),
290  code_targets_(100),
291  positions_recorder_(this) {
292  // Clear the buffer in debug mode unless it was provided by the
293  // caller in which case we can't be sure it's okay to overwrite
294  // existing code in it.
295 #ifdef DEBUG
296  if (own_buffer_) {
297  memset(buffer_, 0xCC, buffer_size_); // int3
298  }
299 #endif
300 
301  reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
302 
303 
304 #ifdef GENERATED_CODE_COVERAGE
305  InitCoverageLog();
306 #endif
307 }
308 
309 
310 void Assembler::GetCode(CodeDesc* desc) {
311  // Finalize code (at this point overflow() may be true, but the gap ensures
312  // that we are still not overlapping instructions and relocation info).
313  ASSERT(pc_ <= reloc_info_writer.pos()); // No overlap.
314  // Set up code descriptor.
315  desc->buffer = buffer_;
316  desc->buffer_size = buffer_size_;
317  desc->instr_size = pc_offset();
318  ASSERT(desc->instr_size > 0); // Zero-size code objects upset the system.
319  desc->reloc_size =
320  static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos());
321  desc->origin = this;
322 }
323 
324 
325 void Assembler::Align(int m) {
326  ASSERT(IsPowerOf2(m));
327  int delta = (m - (pc_offset() & (m - 1))) & (m - 1);
328  Nop(delta);
329 }
330 
331 
333  Align(16); // Preferred alignment of jump targets on x64.
334 }
335 
336 
337 bool Assembler::IsNop(Address addr) {
338  Address a = addr;
339  while (*a == 0x66) a++;
340  if (*a == 0x90) return true;
341  if (a[0] == 0xf && a[1] == 0x1f) return true;
342  return false;
343 }
344 
345 
346 void Assembler::bind_to(Label* L, int pos) {
347  ASSERT(!L->is_bound()); // Label may only be bound once.
348  ASSERT(0 <= pos && pos <= pc_offset()); // Position must be valid.
349  if (L->is_linked()) {
350  int current = L->pos();
351  int next = long_at(current);
352  while (next != current) {
353  // Relative address, relative to point after address.
354  int imm32 = pos - (current + sizeof(int32_t));
355  long_at_put(current, imm32);
356  current = next;
357  next = long_at(next);
358  }
359  // Fix up last fixup on linked list.
360  int last_imm32 = pos - (current + sizeof(int32_t));
361  long_at_put(current, last_imm32);
362  }
363  while (L->is_near_linked()) {
364  int fixup_pos = L->near_link_pos();
365  int offset_to_next =
366  static_cast<int>(*reinterpret_cast<int8_t*>(addr_at(fixup_pos)));
367  ASSERT(offset_to_next <= 0);
368  int disp = pos - (fixup_pos + sizeof(int8_t));
369  CHECK(is_int8(disp));
370  set_byte_at(fixup_pos, disp);
371  if (offset_to_next < 0) {
372  L->link_to(fixup_pos + offset_to_next, Label::kNear);
373  } else {
374  L->UnuseNear();
375  }
376  }
377  L->bind_to(pos);
378 }
379 
380 
381 void Assembler::bind(Label* L) {
382  bind_to(L, pc_offset());
383 }
384 
385 
386 void Assembler::GrowBuffer() {
388  if (!own_buffer_) FATAL("external code buffer is too small");
389 
390  // Compute new buffer size.
391  CodeDesc desc; // the new buffer
392  if (buffer_size_ < 4*KB) {
393  desc.buffer_size = 4*KB;
394  } else {
395  desc.buffer_size = 2*buffer_size_;
396  }
397  // Some internal data structures overflow for very large buffers,
398  // they must ensure that kMaximalBufferSize is not too large.
399  if ((desc.buffer_size > kMaximalBufferSize) ||
400  (desc.buffer_size > isolate()->heap()->MaxOldGenerationSize())) {
401  V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
402  }
403 
404  // Set up new buffer.
405  desc.buffer = NewArray<byte>(desc.buffer_size);
406  desc.instr_size = pc_offset();
407  desc.reloc_size =
408  static_cast<int>((buffer_ + buffer_size_) - (reloc_info_writer.pos()));
409 
410  // Clear the buffer in debug mode. Use 'int3' instructions to make
411  // sure to get into problems if we ever run uninitialized code.
412 #ifdef DEBUG
413  memset(desc.buffer, 0xCC, desc.buffer_size);
414 #endif
415 
416  // Copy the data.
417  intptr_t pc_delta = desc.buffer - buffer_;
418  intptr_t rc_delta = (desc.buffer + desc.buffer_size) -
419  (buffer_ + buffer_size_);
420  OS::MemMove(desc.buffer, buffer_, desc.instr_size);
421  OS::MemMove(rc_delta + reloc_info_writer.pos(),
422  reloc_info_writer.pos(), desc.reloc_size);
423 
424  // Switch buffers.
425  if (isolate() != NULL &&
426  isolate()->assembler_spare_buffer() == NULL &&
428  isolate()->set_assembler_spare_buffer(buffer_);
429  } else {
431  }
432  buffer_ = desc.buffer;
433  buffer_size_ = desc.buffer_size;
434  pc_ += pc_delta;
435  reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
436  reloc_info_writer.last_pc() + pc_delta);
437 
438  // Relocate runtime entries.
439  for (RelocIterator it(desc); !it.done(); it.next()) {
440  RelocInfo::Mode rmode = it.rinfo()->rmode();
441  if (rmode == RelocInfo::INTERNAL_REFERENCE) {
442  intptr_t* p = reinterpret_cast<intptr_t*>(it.rinfo()->pc());
443  if (*p != 0) { // 0 means uninitialized.
444  *p += pc_delta;
445  }
446  }
447  }
448 
450 }
451 
452 
453 void Assembler::emit_operand(int code, const Operand& adr) {
454  ASSERT(is_uint3(code));
455  const unsigned length = adr.len_;
456  ASSERT(length > 0);
457 
458  // Emit updated ModR/M byte containing the given register.
459  ASSERT((adr.buf_[0] & 0x38) == 0);
460  pc_[0] = adr.buf_[0] | code << 3;
461 
462  // Emit the rest of the encoded operand.
463  for (unsigned i = 1; i < length; i++) pc_[i] = adr.buf_[i];
464  pc_ += length;
465 }
466 
467 
468 // Assembler Instruction implementations.
469 
470 void Assembler::arithmetic_op(byte opcode, Register reg, const Operand& op) {
471  EnsureSpace ensure_space(this);
472  emit_rex_64(reg, op);
473  emit(opcode);
474  emit_operand(reg, op);
475 }
476 
477 
478 void Assembler::arithmetic_op(byte opcode, Register reg, Register rm_reg) {
479  EnsureSpace ensure_space(this);
480  ASSERT((opcode & 0xC6) == 2);
481  if (rm_reg.low_bits() == 4) { // Forces SIB byte.
482  // Swap reg and rm_reg and change opcode operand order.
483  emit_rex_64(rm_reg, reg);
484  emit(opcode ^ 0x02);
485  emit_modrm(rm_reg, reg);
486  } else {
487  emit_rex_64(reg, rm_reg);
488  emit(opcode);
489  emit_modrm(reg, rm_reg);
490  }
491 }
492 
493 
494 void Assembler::arithmetic_op_16(byte opcode, Register reg, Register rm_reg) {
495  EnsureSpace ensure_space(this);
496  ASSERT((opcode & 0xC6) == 2);
497  if (rm_reg.low_bits() == 4) { // Forces SIB byte.
498  // Swap reg and rm_reg and change opcode operand order.
499  emit(0x66);
500  emit_optional_rex_32(rm_reg, reg);
501  emit(opcode ^ 0x02);
502  emit_modrm(rm_reg, reg);
503  } else {
504  emit(0x66);
505  emit_optional_rex_32(reg, rm_reg);
506  emit(opcode);
507  emit_modrm(reg, rm_reg);
508  }
509 }
510 
511 
512 void Assembler::arithmetic_op_16(byte opcode,
513  Register reg,
514  const Operand& rm_reg) {
515  EnsureSpace ensure_space(this);
516  emit(0x66);
517  emit_optional_rex_32(reg, rm_reg);
518  emit(opcode);
519  emit_operand(reg, rm_reg);
520 }
521 
522 
523 void Assembler::arithmetic_op_32(byte opcode, Register reg, Register rm_reg) {
524  EnsureSpace ensure_space(this);
525  ASSERT((opcode & 0xC6) == 2);
526  if (rm_reg.low_bits() == 4) { // Forces SIB byte.
527  // Swap reg and rm_reg and change opcode operand order.
528  emit_optional_rex_32(rm_reg, reg);
529  emit(opcode ^ 0x02); // E.g. 0x03 -> 0x01 for ADD.
530  emit_modrm(rm_reg, reg);
531  } else {
532  emit_optional_rex_32(reg, rm_reg);
533  emit(opcode);
534  emit_modrm(reg, rm_reg);
535  }
536 }
537 
538 
539 void Assembler::arithmetic_op_32(byte opcode,
540  Register reg,
541  const Operand& rm_reg) {
542  EnsureSpace ensure_space(this);
543  emit_optional_rex_32(reg, rm_reg);
544  emit(opcode);
545  emit_operand(reg, rm_reg);
546 }
547 
548 
549 void Assembler::immediate_arithmetic_op(byte subcode,
550  Register dst,
551  Immediate src) {
552  EnsureSpace ensure_space(this);
553  emit_rex_64(dst);
554  if (is_int8(src.value_)) {
555  emit(0x83);
556  emit_modrm(subcode, dst);
557  emit(src.value_);
558  } else if (dst.is(rax)) {
559  emit(0x05 | (subcode << 3));
560  emitl(src.value_);
561  } else {
562  emit(0x81);
563  emit_modrm(subcode, dst);
564  emitl(src.value_);
565  }
566 }
567 
568 void Assembler::immediate_arithmetic_op(byte subcode,
569  const Operand& dst,
570  Immediate src) {
571  EnsureSpace ensure_space(this);
572  emit_rex_64(dst);
573  if (is_int8(src.value_)) {
574  emit(0x83);
575  emit_operand(subcode, dst);
576  emit(src.value_);
577  } else {
578  emit(0x81);
579  emit_operand(subcode, dst);
580  emitl(src.value_);
581  }
582 }
583 
584 
585 void Assembler::immediate_arithmetic_op_16(byte subcode,
586  Register dst,
587  Immediate src) {
588  EnsureSpace ensure_space(this);
589  emit(0x66); // Operand size override prefix.
590  emit_optional_rex_32(dst);
591  if (is_int8(src.value_)) {
592  emit(0x83);
593  emit_modrm(subcode, dst);
594  emit(src.value_);
595  } else if (dst.is(rax)) {
596  emit(0x05 | (subcode << 3));
597  emitw(src.value_);
598  } else {
599  emit(0x81);
600  emit_modrm(subcode, dst);
601  emitw(src.value_);
602  }
603 }
604 
605 
606 void Assembler::immediate_arithmetic_op_16(byte subcode,
607  const Operand& dst,
608  Immediate src) {
609  EnsureSpace ensure_space(this);
610  emit(0x66); // Operand size override prefix.
611  emit_optional_rex_32(dst);
612  if (is_int8(src.value_)) {
613  emit(0x83);
614  emit_operand(subcode, dst);
615  emit(src.value_);
616  } else {
617  emit(0x81);
618  emit_operand(subcode, dst);
619  emitw(src.value_);
620  }
621 }
622 
623 
624 void Assembler::immediate_arithmetic_op_32(byte subcode,
625  Register dst,
626  Immediate src) {
627  EnsureSpace ensure_space(this);
628  emit_optional_rex_32(dst);
629  if (is_int8(src.value_)) {
630  emit(0x83);
631  emit_modrm(subcode, dst);
632  emit(src.value_);
633  } else if (dst.is(rax)) {
634  emit(0x05 | (subcode << 3));
635  emitl(src.value_);
636  } else {
637  emit(0x81);
638  emit_modrm(subcode, dst);
639  emitl(src.value_);
640  }
641 }
642 
643 
644 void Assembler::immediate_arithmetic_op_32(byte subcode,
645  const Operand& dst,
646  Immediate src) {
647  EnsureSpace ensure_space(this);
648  emit_optional_rex_32(dst);
649  if (is_int8(src.value_)) {
650  emit(0x83);
651  emit_operand(subcode, dst);
652  emit(src.value_);
653  } else {
654  emit(0x81);
655  emit_operand(subcode, dst);
656  emitl(src.value_);
657  }
658 }
659 
660 
661 void Assembler::immediate_arithmetic_op_8(byte subcode,
662  const Operand& dst,
663  Immediate src) {
664  EnsureSpace ensure_space(this);
665  emit_optional_rex_32(dst);
666  ASSERT(is_int8(src.value_) || is_uint8(src.value_));
667  emit(0x80);
668  emit_operand(subcode, dst);
669  emit(src.value_);
670 }
671 
672 
673 void Assembler::immediate_arithmetic_op_8(byte subcode,
674  Register dst,
675  Immediate src) {
676  EnsureSpace ensure_space(this);
677  if (!dst.is_byte_register()) {
678  // Use 64-bit mode byte registers.
679  emit_rex_64(dst);
680  }
681  ASSERT(is_int8(src.value_) || is_uint8(src.value_));
682  emit(0x80);
683  emit_modrm(subcode, dst);
684  emit(src.value_);
685 }
686 
687 
688 void Assembler::shift(Register dst, Immediate shift_amount, int subcode) {
689  EnsureSpace ensure_space(this);
690  ASSERT(is_uint6(shift_amount.value_)); // illegal shift count
691  if (shift_amount.value_ == 1) {
692  emit_rex_64(dst);
693  emit(0xD1);
694  emit_modrm(subcode, dst);
695  } else {
696  emit_rex_64(dst);
697  emit(0xC1);
698  emit_modrm(subcode, dst);
699  emit(shift_amount.value_);
700  }
701 }
702 
703 
704 void Assembler::shift(Register dst, int subcode) {
705  EnsureSpace ensure_space(this);
706  emit_rex_64(dst);
707  emit(0xD3);
708  emit_modrm(subcode, dst);
709 }
710 
711 
712 void Assembler::shift_32(Register dst, int subcode) {
713  EnsureSpace ensure_space(this);
714  emit_optional_rex_32(dst);
715  emit(0xD3);
716  emit_modrm(subcode, dst);
717 }
718 
719 
720 void Assembler::shift_32(Register dst, Immediate shift_amount, int subcode) {
721  EnsureSpace ensure_space(this);
722  ASSERT(is_uint5(shift_amount.value_)); // illegal shift count
723  if (shift_amount.value_ == 1) {
724  emit_optional_rex_32(dst);
725  emit(0xD1);
726  emit_modrm(subcode, dst);
727  } else {
728  emit_optional_rex_32(dst);
729  emit(0xC1);
730  emit_modrm(subcode, dst);
731  emit(shift_amount.value_);
732  }
733 }
734 
735 
736 void Assembler::bt(const Operand& dst, Register src) {
737  EnsureSpace ensure_space(this);
738  emit_rex_64(src, dst);
739  emit(0x0F);
740  emit(0xA3);
741  emit_operand(src, dst);
742 }
743 
744 
745 void Assembler::bts(const Operand& dst, Register src) {
746  EnsureSpace ensure_space(this);
747  emit_rex_64(src, dst);
748  emit(0x0F);
749  emit(0xAB);
750  emit_operand(src, dst);
751 }
752 
753 
754 void Assembler::bsrl(Register dst, Register src) {
755  EnsureSpace ensure_space(this);
756  emit_optional_rex_32(dst, src);
757  emit(0x0F);
758  emit(0xBD);
759  emit_modrm(dst, src);
760 }
761 
762 
763 void Assembler::call(Label* L) {
764  positions_recorder()->WriteRecordedPositions();
765  EnsureSpace ensure_space(this);
766  // 1110 1000 #32-bit disp.
767  emit(0xE8);
768  if (L->is_bound()) {
769  int offset = L->pos() - pc_offset() - sizeof(int32_t);
770  ASSERT(offset <= 0);
771  emitl(offset);
772  } else if (L->is_linked()) {
773  emitl(L->pos());
774  L->link_to(pc_offset() - sizeof(int32_t));
775  } else {
776  ASSERT(L->is_unused());
777  int32_t current = pc_offset();
778  emitl(current);
779  L->link_to(current);
780  }
781 }
782 
783 
784 void Assembler::call(Address entry, RelocInfo::Mode rmode) {
785  ASSERT(RelocInfo::IsRuntimeEntry(rmode));
786  positions_recorder()->WriteRecordedPositions();
787  EnsureSpace ensure_space(this);
788  // 1110 1000 #32-bit disp.
789  emit(0xE8);
790  emit_runtime_entry(entry, rmode);
791 }
792 
793 
794 void Assembler::call(Handle<Code> target,
795  RelocInfo::Mode rmode,
796  TypeFeedbackId ast_id) {
797  positions_recorder()->WriteRecordedPositions();
798  EnsureSpace ensure_space(this);
799  // 1110 1000 #32-bit disp.
800  emit(0xE8);
801  emit_code_target(target, rmode, ast_id);
802 }
803 
804 
805 void Assembler::call(Register adr) {
806  positions_recorder()->WriteRecordedPositions();
807  EnsureSpace ensure_space(this);
808  // Opcode: FF /2 r64.
809  emit_optional_rex_32(adr);
810  emit(0xFF);
811  emit_modrm(0x2, adr);
812 }
813 
814 
815 void Assembler::call(const Operand& op) {
816  positions_recorder()->WriteRecordedPositions();
817  EnsureSpace ensure_space(this);
818  // Opcode: FF /2 m64.
819  emit_optional_rex_32(op);
820  emit(0xFF);
821  emit_operand(0x2, op);
822 }
823 
824 
825 // Calls directly to the given address using a relative offset.
826 // Should only ever be used in Code objects for calls within the
827 // same Code object. Should not be used when generating new code (use labels),
828 // but only when patching existing code.
829 void Assembler::call(Address target) {
830  positions_recorder()->WriteRecordedPositions();
831  EnsureSpace ensure_space(this);
832  // 1110 1000 #32-bit disp.
833  emit(0xE8);
834  Address source = pc_ + 4;
835  intptr_t displacement = target - source;
836  ASSERT(is_int32(displacement));
837  emitl(static_cast<int32_t>(displacement));
838 }
839 
840 
841 void Assembler::clc() {
842  EnsureSpace ensure_space(this);
843  emit(0xF8);
844 }
845 
846 
847 void Assembler::cld() {
848  EnsureSpace ensure_space(this);
849  emit(0xFC);
850 }
851 
852 
853 void Assembler::cdq() {
854  EnsureSpace ensure_space(this);
855  emit(0x99);
856 }
857 
858 
859 void Assembler::cmovq(Condition cc, Register dst, Register src) {
860  if (cc == always) {
861  movq(dst, src);
862  } else if (cc == never) {
863  return;
864  }
865  // No need to check CpuInfo for CMOV support, it's a required part of the
866  // 64-bit architecture.
867  ASSERT(cc >= 0); // Use mov for unconditional moves.
868  EnsureSpace ensure_space(this);
869  // Opcode: REX.W 0f 40 + cc /r.
870  emit_rex_64(dst, src);
871  emit(0x0f);
872  emit(0x40 + cc);
873  emit_modrm(dst, src);
874 }
875 
876 
877 void Assembler::cmovq(Condition cc, Register dst, const Operand& src) {
878  if (cc == always) {
879  movq(dst, src);
880  } else if (cc == never) {
881  return;
882  }
883  ASSERT(cc >= 0);
884  EnsureSpace ensure_space(this);
885  // Opcode: REX.W 0f 40 + cc /r.
886  emit_rex_64(dst, src);
887  emit(0x0f);
888  emit(0x40 + cc);
889  emit_operand(dst, src);
890 }
891 
892 
893 void Assembler::cmovl(Condition cc, Register dst, Register src) {
894  if (cc == always) {
895  movl(dst, src);
896  } else if (cc == never) {
897  return;
898  }
899  ASSERT(cc >= 0);
900  EnsureSpace ensure_space(this);
901  // Opcode: 0f 40 + cc /r.
902  emit_optional_rex_32(dst, src);
903  emit(0x0f);
904  emit(0x40 + cc);
905  emit_modrm(dst, src);
906 }
907 
908 
909 void Assembler::cmovl(Condition cc, Register dst, const Operand& src) {
910  if (cc == always) {
911  movl(dst, src);
912  } else if (cc == never) {
913  return;
914  }
915  ASSERT(cc >= 0);
916  EnsureSpace ensure_space(this);
917  // Opcode: 0f 40 + cc /r.
918  emit_optional_rex_32(dst, src);
919  emit(0x0f);
920  emit(0x40 + cc);
921  emit_operand(dst, src);
922 }
923 
924 
925 void Assembler::cmpb_al(Immediate imm8) {
926  ASSERT(is_int8(imm8.value_) || is_uint8(imm8.value_));
927  EnsureSpace ensure_space(this);
928  emit(0x3c);
929  emit(imm8.value_);
930 }
931 
932 
933 void Assembler::cpuid() {
934  EnsureSpace ensure_space(this);
935  emit(0x0F);
936  emit(0xA2);
937 }
938 
939 
940 void Assembler::cqo() {
941  EnsureSpace ensure_space(this);
942  emit_rex_64();
943  emit(0x99);
944 }
945 
946 
947 void Assembler::emit_dec(Register dst, int size) {
948  EnsureSpace ensure_space(this);
949  emit_rex(dst, size);
950  emit(0xFF);
951  emit_modrm(0x1, dst);
952 }
953 
954 
955 void Assembler::emit_dec(const Operand& dst, int size) {
956  EnsureSpace ensure_space(this);
957  emit_rex(dst, size);
958  emit(0xFF);
959  emit_operand(1, dst);
960 }
961 
962 
963 void Assembler::decb(Register dst) {
964  EnsureSpace ensure_space(this);
965  if (!dst.is_byte_register()) {
966  // Register is not one of al, bl, cl, dl. Its encoding needs REX.
967  emit_rex_32(dst);
968  }
969  emit(0xFE);
970  emit_modrm(0x1, dst);
971 }
972 
973 
974 void Assembler::decb(const Operand& dst) {
975  EnsureSpace ensure_space(this);
976  emit_optional_rex_32(dst);
977  emit(0xFE);
978  emit_operand(1, dst);
979 }
980 
981 
982 void Assembler::enter(Immediate size) {
983  EnsureSpace ensure_space(this);
984  emit(0xC8);
985  emitw(size.value_); // 16 bit operand, always.
986  emit(0);
987 }
988 
989 
990 void Assembler::hlt() {
991  EnsureSpace ensure_space(this);
992  emit(0xF4);
993 }
994 
995 
996 void Assembler::emit_idiv(Register src, int size) {
997  EnsureSpace ensure_space(this);
998  emit_rex(src, size);
999  emit(0xF7);
1000  emit_modrm(0x7, src);
1001 }
1002 
1003 
1004 void Assembler::emit_imul(Register src, int size) {
1005  EnsureSpace ensure_space(this);
1006  emit_rex(src, size);
1007  emit(0xF7);
1008  emit_modrm(0x5, src);
1009 }
1010 
1011 
1012 void Assembler::emit_imul(Register dst, Register src, int size) {
1013  EnsureSpace ensure_space(this);
1014  emit_rex(dst, src, size);
1015  emit(0x0F);
1016  emit(0xAF);
1017  emit_modrm(dst, src);
1018 }
1019 
1020 
1021 void Assembler::emit_imul(Register dst, const Operand& src, int size) {
1022  EnsureSpace ensure_space(this);
1023  emit_rex(dst, src, size);
1024  emit(0x0F);
1025  emit(0xAF);
1026  emit_operand(dst, src);
1027 }
1028 
1029 
1030 void Assembler::emit_imul(Register dst, Register src, Immediate imm, int size) {
1031  EnsureSpace ensure_space(this);
1032  emit_rex(dst, src, size);
1033  if (is_int8(imm.value_)) {
1034  emit(0x6B);
1035  emit_modrm(dst, src);
1036  emit(imm.value_);
1037  } else {
1038  emit(0x69);
1039  emit_modrm(dst, src);
1040  emitl(imm.value_);
1041  }
1042 }
1043 
1044 
1045 void Assembler::emit_inc(Register dst, int size) {
1046  EnsureSpace ensure_space(this);
1047  emit_rex(dst, size);
1048  emit(0xFF);
1049  emit_modrm(0x0, dst);
1050 }
1051 
1052 
1053 void Assembler::emit_inc(const Operand& dst, int size) {
1054  EnsureSpace ensure_space(this);
1055  emit_rex(dst, size);
1056  emit(0xFF);
1057  emit_operand(0, dst);
1058 }
1059 
1060 
1061 void Assembler::int3() {
1062  EnsureSpace ensure_space(this);
1063  emit(0xCC);
1064 }
1065 
1066 
1067 void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
1068  if (cc == always) {
1069  jmp(L);
1070  return;
1071  } else if (cc == never) {
1072  return;
1073  }
1074  EnsureSpace ensure_space(this);
1075  ASSERT(is_uint4(cc));
1076  if (L->is_bound()) {
1077  const int short_size = 2;
1078  const int long_size = 6;
1079  int offs = L->pos() - pc_offset();
1080  ASSERT(offs <= 0);
1081  // Determine whether we can use 1-byte offsets for backwards branches,
1082  // which have a max range of 128 bytes.
1083 
1084  // We also need to check predictable_code_size() flag here, because on x64,
1085  // when the full code generator recompiles code for debugging, some places
1086  // need to be padded out to a certain size. The debugger is keeping track of
1087  // how often it did this so that it can adjust return addresses on the
1088  // stack, but if the size of jump instructions can also change, that's not
1089  // enough and the calculated offsets would be incorrect.
1090  if (is_int8(offs - short_size) && !predictable_code_size()) {
1091  // 0111 tttn #8-bit disp.
1092  emit(0x70 | cc);
1093  emit((offs - short_size) & 0xFF);
1094  } else {
1095  // 0000 1111 1000 tttn #32-bit disp.
1096  emit(0x0F);
1097  emit(0x80 | cc);
1098  emitl(offs - long_size);
1099  }
1100  } else if (distance == Label::kNear) {
1101  // 0111 tttn #8-bit disp
1102  emit(0x70 | cc);
1103  byte disp = 0x00;
1104  if (L->is_near_linked()) {
1105  int offset = L->near_link_pos() - pc_offset();
1106  ASSERT(is_int8(offset));
1107  disp = static_cast<byte>(offset & 0xFF);
1108  }
1109  L->link_to(pc_offset(), Label::kNear);
1110  emit(disp);
1111  } else if (L->is_linked()) {
1112  // 0000 1111 1000 tttn #32-bit disp.
1113  emit(0x0F);
1114  emit(0x80 | cc);
1115  emitl(L->pos());
1116  L->link_to(pc_offset() - sizeof(int32_t));
1117  } else {
1118  ASSERT(L->is_unused());
1119  emit(0x0F);
1120  emit(0x80 | cc);
1121  int32_t current = pc_offset();
1122  emitl(current);
1123  L->link_to(current);
1124  }
1125 }
1126 
1127 
1128 void Assembler::j(Condition cc, Address entry, RelocInfo::Mode rmode) {
1129  ASSERT(RelocInfo::IsRuntimeEntry(rmode));
1130  EnsureSpace ensure_space(this);
1131  ASSERT(is_uint4(cc));
1132  emit(0x0F);
1133  emit(0x80 | cc);
1134  emit_runtime_entry(entry, rmode);
1135 }
1136 
1137 
1138 void Assembler::j(Condition cc,
1139  Handle<Code> target,
1140  RelocInfo::Mode rmode) {
1141  EnsureSpace ensure_space(this);
1142  ASSERT(is_uint4(cc));
1143  // 0000 1111 1000 tttn #32-bit disp.
1144  emit(0x0F);
1145  emit(0x80 | cc);
1146  emit_code_target(target, rmode);
1147 }
1148 
1149 
1150 void Assembler::jmp(Label* L, Label::Distance distance) {
1151  EnsureSpace ensure_space(this);
1152  const int short_size = sizeof(int8_t);
1153  const int long_size = sizeof(int32_t);
1154  if (L->is_bound()) {
1155  int offs = L->pos() - pc_offset() - 1;
1156  ASSERT(offs <= 0);
1157  if (is_int8(offs - short_size) && !predictable_code_size()) {
1158  // 1110 1011 #8-bit disp.
1159  emit(0xEB);
1160  emit((offs - short_size) & 0xFF);
1161  } else {
1162  // 1110 1001 #32-bit disp.
1163  emit(0xE9);
1164  emitl(offs - long_size);
1165  }
1166  } else if (distance == Label::kNear) {
1167  emit(0xEB);
1168  byte disp = 0x00;
1169  if (L->is_near_linked()) {
1170  int offset = L->near_link_pos() - pc_offset();
1171  ASSERT(is_int8(offset));
1172  disp = static_cast<byte>(offset & 0xFF);
1173  }
1174  L->link_to(pc_offset(), Label::kNear);
1175  emit(disp);
1176  } else if (L->is_linked()) {
1177  // 1110 1001 #32-bit disp.
1178  emit(0xE9);
1179  emitl(L->pos());
1180  L->link_to(pc_offset() - long_size);
1181  } else {
1182  // 1110 1001 #32-bit disp.
1183  ASSERT(L->is_unused());
1184  emit(0xE9);
1185  int32_t current = pc_offset();
1186  emitl(current);
1187  L->link_to(current);
1188  }
1189 }
1190 
1191 
1192 void Assembler::jmp(Handle<Code> target, RelocInfo::Mode rmode) {
1193  EnsureSpace ensure_space(this);
1194  // 1110 1001 #32-bit disp.
1195  emit(0xE9);
1196  emit_code_target(target, rmode);
1197 }
1198 
1199 
1200 void Assembler::jmp(Address entry, RelocInfo::Mode rmode) {
1201  ASSERT(RelocInfo::IsRuntimeEntry(rmode));
1202  EnsureSpace ensure_space(this);
1203  ASSERT(RelocInfo::IsRuntimeEntry(rmode));
1204  emit(0xE9);
1205  emit_runtime_entry(entry, rmode);
1206 }
1207 
1208 
1209 void Assembler::jmp(Register target) {
1210  EnsureSpace ensure_space(this);
1211  // Opcode FF/4 r64.
1212  emit_optional_rex_32(target);
1213  emit(0xFF);
1214  emit_modrm(0x4, target);
1215 }
1216 
1217 
1218 void Assembler::jmp(const Operand& src) {
1219  EnsureSpace ensure_space(this);
1220  // Opcode FF/4 m64.
1221  emit_optional_rex_32(src);
1222  emit(0xFF);
1223  emit_operand(0x4, src);
1224 }
1225 
1226 
1227 void Assembler::emit_lea(Register dst, const Operand& src, int size) {
1228  EnsureSpace ensure_space(this);
1229  emit_rex(dst, src, size);
1230  emit(0x8D);
1231  emit_operand(dst, src);
1232 }
1233 
1234 
1235 void Assembler::load_rax(void* value, RelocInfo::Mode mode) {
1236  EnsureSpace ensure_space(this);
1237  if (kPointerSize == kInt64Size) {
1238  emit(0x48); // REX.W
1239  emit(0xA1);
1240  emitp(value, mode);
1241  } else {
1243  emit(0xA1);
1244  emitp(value, mode);
1245  // In 64-bit mode, need to zero extend the operand to 8 bytes.
1246  // See 2.2.1.4 in Intel64 and IA32 Architectures Software
1247  // Developer's Manual Volume 2.
1248  emitl(0);
1249  }
1250 }
1251 
1252 
1253 void Assembler::load_rax(ExternalReference ref) {
1254  load_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
1255 }
1256 
1257 
1258 void Assembler::leave() {
1259  EnsureSpace ensure_space(this);
1260  emit(0xC9);
1261 }
1262 
1263 
1264 void Assembler::movb(Register dst, const Operand& src) {
1265  EnsureSpace ensure_space(this);
1266  if (!dst.is_byte_register()) {
1267  // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1268  emit_rex_32(dst, src);
1269  } else {
1270  emit_optional_rex_32(dst, src);
1271  }
1272  emit(0x8A);
1273  emit_operand(dst, src);
1274 }
1275 
1276 
1277 void Assembler::movb(Register dst, Immediate imm) {
1278  EnsureSpace ensure_space(this);
1279  if (!dst.is_byte_register()) {
1280  emit_rex_32(dst);
1281  }
1282  emit(0xB0 + dst.low_bits());
1283  emit(imm.value_);
1284 }
1285 
1286 
1287 void Assembler::movb(const Operand& dst, Register src) {
1288  EnsureSpace ensure_space(this);
1289  if (!src.is_byte_register()) {
1290  emit_rex_32(src, dst);
1291  } else {
1292  emit_optional_rex_32(src, dst);
1293  }
1294  emit(0x88);
1295  emit_operand(src, dst);
1296 }
1297 
1298 
1299 void Assembler::movb(const Operand& dst, Immediate imm) {
1300  EnsureSpace ensure_space(this);
1301  emit_optional_rex_32(dst);
1302  emit(0xC6);
1303  emit_operand(0x0, dst);
1304  emit(static_cast<byte>(imm.value_));
1305 }
1306 
1307 
1308 void Assembler::movw(Register dst, const Operand& src) {
1309  EnsureSpace ensure_space(this);
1310  emit(0x66);
1311  emit_optional_rex_32(dst, src);
1312  emit(0x8B);
1313  emit_operand(dst, src);
1314 }
1315 
1316 
1317 void Assembler::movw(const Operand& dst, Register src) {
1318  EnsureSpace ensure_space(this);
1319  emit(0x66);
1320  emit_optional_rex_32(src, dst);
1321  emit(0x89);
1322  emit_operand(src, dst);
1323 }
1324 
1325 
1326 void Assembler::movw(const Operand& dst, Immediate imm) {
1327  EnsureSpace ensure_space(this);
1328  emit(0x66);
1329  emit_optional_rex_32(dst);
1330  emit(0xC7);
1331  emit_operand(0x0, dst);
1332  emit(static_cast<byte>(imm.value_ & 0xff));
1333  emit(static_cast<byte>(imm.value_ >> 8));
1334 }
1335 
1336 
1337 void Assembler::emit_mov(Register dst, const Operand& src, int size) {
1338  EnsureSpace ensure_space(this);
1339  emit_rex(dst, src, size);
1340  emit(0x8B);
1341  emit_operand(dst, src);
1342 }
1343 
1344 
1345 void Assembler::emit_mov(Register dst, Register src, int size) {
1346  EnsureSpace ensure_space(this);
1347  if (src.low_bits() == 4) {
1348  emit_rex(src, dst, size);
1349  emit(0x89);
1350  emit_modrm(src, dst);
1351  } else {
1352  emit_rex(dst, src, size);
1353  emit(0x8B);
1354  emit_modrm(dst, src);
1355  }
1356 }
1357 
1358 
1359 void Assembler::emit_mov(const Operand& dst, Register src, int size) {
1360  EnsureSpace ensure_space(this);
1361  emit_rex(src, dst, size);
1362  emit(0x89);
1363  emit_operand(src, dst);
1364 }
1365 
1366 
1367 void Assembler::emit_mov(Register dst, Immediate value, int size) {
1368  EnsureSpace ensure_space(this);
1369  emit_rex(dst, size);
1370  if (size == kInt64Size) {
1371  emit(0xC7);
1372  emit_modrm(0x0, dst);
1373  } else {
1374  ASSERT(size == kInt32Size);
1375  emit(0xB8 + dst.low_bits());
1376  }
1377  emit(value);
1378 }
1379 
1380 
1381 void Assembler::emit_mov(const Operand& dst, Immediate value, int size) {
1382  EnsureSpace ensure_space(this);
1383  emit_rex(dst, size);
1384  emit(0xC7);
1385  emit_operand(0x0, dst);
1386  emit(value);
1387 }
1388 
1389 
1390 void Assembler::movp(Register dst, void* value, RelocInfo::Mode rmode) {
1391  EnsureSpace ensure_space(this);
1392  emit_rex(dst, kPointerSize);
1393  emit(0xB8 | dst.low_bits());
1394  emitp(value, rmode);
1395 }
1396 
1397 
1398 void Assembler::movq(Register dst, int64_t value) {
1399  EnsureSpace ensure_space(this);
1400  emit_rex_64(dst);
1401  emit(0xB8 | dst.low_bits());
1402  emitq(value);
1403 }
1404 
1405 
1406 void Assembler::movq(Register dst, uint64_t value) {
1407  movq(dst, static_cast<int64_t>(value));
1408 }
1409 
1410 
1411 // Loads the ip-relative location of the src label into the target location
1412 // (as a 32-bit offset sign extended to 64-bit).
1413 void Assembler::movl(const Operand& dst, Label* src) {
1414  EnsureSpace ensure_space(this);
1415  emit_optional_rex_32(dst);
1416  emit(0xC7);
1417  emit_operand(0, dst);
1418  if (src->is_bound()) {
1419  int offset = src->pos() - pc_offset() - sizeof(int32_t);
1420  ASSERT(offset <= 0);
1421  emitl(offset);
1422  } else if (src->is_linked()) {
1423  emitl(src->pos());
1424  src->link_to(pc_offset() - sizeof(int32_t));
1425  } else {
1426  ASSERT(src->is_unused());
1427  int32_t current = pc_offset();
1428  emitl(current);
1429  src->link_to(current);
1430  }
1431 }
1432 
1433 
1434 void Assembler::movsxbq(Register dst, const Operand& src) {
1435  EnsureSpace ensure_space(this);
1436  emit_rex_64(dst, src);
1437  emit(0x0F);
1438  emit(0xBE);
1439  emit_operand(dst, src);
1440 }
1441 
1442 
1443 void Assembler::movsxwq(Register dst, const Operand& src) {
1444  EnsureSpace ensure_space(this);
1445  emit_rex_64(dst, src);
1446  emit(0x0F);
1447  emit(0xBF);
1448  emit_operand(dst, src);
1449 }
1450 
1451 
1452 void Assembler::movsxlq(Register dst, Register src) {
1453  EnsureSpace ensure_space(this);
1454  emit_rex_64(dst, src);
1455  emit(0x63);
1456  emit_modrm(dst, src);
1457 }
1458 
1459 
1460 void Assembler::movsxlq(Register dst, const Operand& src) {
1461  EnsureSpace ensure_space(this);
1462  emit_rex_64(dst, src);
1463  emit(0x63);
1464  emit_operand(dst, src);
1465 }
1466 
1467 
1468 void Assembler::emit_movzxb(Register dst, const Operand& src, int size) {
1469  EnsureSpace ensure_space(this);
1470  // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
1471  // there is no need to make this a 64 bit operation.
1472  emit_optional_rex_32(dst, src);
1473  emit(0x0F);
1474  emit(0xB6);
1475  emit_operand(dst, src);
1476 }
1477 
1478 
1479 void Assembler::emit_movzxw(Register dst, const Operand& src, int size) {
1480  EnsureSpace ensure_space(this);
1481  // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
1482  // there is no need to make this a 64 bit operation.
1483  emit_optional_rex_32(dst, src);
1484  emit(0x0F);
1485  emit(0xB7);
1486  emit_operand(dst, src);
1487 }
1488 
1489 
1490 void Assembler::emit_movzxw(Register dst, Register src, int size) {
1491  EnsureSpace ensure_space(this);
1492  // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
1493  // there is no need to make this a 64 bit operation.
1494  emit_optional_rex_32(dst, src);
1495  emit(0x0F);
1496  emit(0xB7);
1497  emit_modrm(dst, src);
1498 }
1499 
1500 
1501 void Assembler::repmovsb() {
1502  EnsureSpace ensure_space(this);
1503  emit(0xF3);
1504  emit(0xA4);
1505 }
1506 
1507 
1508 void Assembler::repmovsw() {
1509  EnsureSpace ensure_space(this);
1510  emit(0x66); // Operand size override.
1511  emit(0xF3);
1512  emit(0xA4);
1513 }
1514 
1515 
1516 void Assembler::emit_repmovs(int size) {
1517  EnsureSpace ensure_space(this);
1518  emit(0xF3);
1519  emit_rex(size);
1520  emit(0xA5);
1521 }
1522 
1523 
1524 void Assembler::mul(Register src) {
1525  EnsureSpace ensure_space(this);
1526  emit_rex_64(src);
1527  emit(0xF7);
1528  emit_modrm(0x4, src);
1529 }
1530 
1531 
1532 void Assembler::emit_neg(Register dst, int size) {
1533  EnsureSpace ensure_space(this);
1534  emit_rex(dst, size);
1535  emit(0xF7);
1536  emit_modrm(0x3, dst);
1537 }
1538 
1539 
1540 void Assembler::emit_neg(const Operand& dst, int size) {
1541  EnsureSpace ensure_space(this);
1542  emit_rex_64(dst);
1543  emit(0xF7);
1544  emit_operand(3, dst);
1545 }
1546 
1547 
1548 void Assembler::nop() {
1549  EnsureSpace ensure_space(this);
1550  emit(0x90);
1551 }
1552 
1553 
1554 void Assembler::emit_not(Register dst, int size) {
1555  EnsureSpace ensure_space(this);
1556  emit_rex(dst, size);
1557  emit(0xF7);
1558  emit_modrm(0x2, dst);
1559 }
1560 
1561 
1562 void Assembler::emit_not(const Operand& dst, int size) {
1563  EnsureSpace ensure_space(this);
1564  emit_rex(dst, size);
1565  emit(0xF7);
1566  emit_operand(2, dst);
1567 }
1568 
1569 
1570 void Assembler::Nop(int n) {
1571  // The recommended muti-byte sequences of NOP instructions from the Intel 64
1572  // and IA-32 Architectures Software Developer's Manual.
1573  //
1574  // Length Assembly Byte Sequence
1575  // 2 bytes 66 NOP 66 90H
1576  // 3 bytes NOP DWORD ptr [EAX] 0F 1F 00H
1577  // 4 bytes NOP DWORD ptr [EAX + 00H] 0F 1F 40 00H
1578  // 5 bytes NOP DWORD ptr [EAX + EAX*1 + 00H] 0F 1F 44 00 00H
1579  // 6 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 00H] 66 0F 1F 44 00 00H
1580  // 7 bytes NOP DWORD ptr [EAX + 00000000H] 0F 1F 80 00 00 00 00H
1581  // 8 bytes NOP DWORD ptr [EAX + EAX*1 + 00000000H] 0F 1F 84 00 00 00 00 00H
1582  // 9 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 66 0F 1F 84 00 00 00 00
1583  // 00000000H] 00H
1584 
1585  EnsureSpace ensure_space(this);
1586  while (n > 0) {
1587  switch (n) {
1588  case 2:
1589  emit(0x66);
1590  case 1:
1591  emit(0x90);
1592  return;
1593  case 3:
1594  emit(0x0f);
1595  emit(0x1f);
1596  emit(0x00);
1597  return;
1598  case 4:
1599  emit(0x0f);
1600  emit(0x1f);
1601  emit(0x40);
1602  emit(0x00);
1603  return;
1604  case 6:
1605  emit(0x66);
1606  case 5:
1607  emit(0x0f);
1608  emit(0x1f);
1609  emit(0x44);
1610  emit(0x00);
1611  emit(0x00);
1612  return;
1613  case 7:
1614  emit(0x0f);
1615  emit(0x1f);
1616  emit(0x80);
1617  emit(0x00);
1618  emit(0x00);
1619  emit(0x00);
1620  emit(0x00);
1621  return;
1622  default:
1623  case 11:
1624  emit(0x66);
1625  n--;
1626  case 10:
1627  emit(0x66);
1628  n--;
1629  case 9:
1630  emit(0x66);
1631  n--;
1632  case 8:
1633  emit(0x0f);
1634  emit(0x1f);
1635  emit(0x84);
1636  emit(0x00);
1637  emit(0x00);
1638  emit(0x00);
1639  emit(0x00);
1640  emit(0x00);
1641  n -= 8;
1642  }
1643  }
1644 }
1645 
1646 
1647 void Assembler::popq(Register dst) {
1648  EnsureSpace ensure_space(this);
1649  emit_optional_rex_32(dst);
1650  emit(0x58 | dst.low_bits());
1651 }
1652 
1653 
1654 void Assembler::popq(const Operand& dst) {
1655  EnsureSpace ensure_space(this);
1656  emit_optional_rex_32(dst);
1657  emit(0x8F);
1658  emit_operand(0, dst);
1659 }
1660 
1661 
1662 void Assembler::popfq() {
1663  EnsureSpace ensure_space(this);
1664  emit(0x9D);
1665 }
1666 
1667 
1668 void Assembler::pushq(Register src) {
1669  EnsureSpace ensure_space(this);
1670  emit_optional_rex_32(src);
1671  emit(0x50 | src.low_bits());
1672 }
1673 
1674 
1675 void Assembler::pushq(const Operand& src) {
1676  EnsureSpace ensure_space(this);
1677  emit_optional_rex_32(src);
1678  emit(0xFF);
1679  emit_operand(6, src);
1680 }
1681 
1682 
1683 void Assembler::pushq(Immediate value) {
1684  EnsureSpace ensure_space(this);
1685  if (is_int8(value.value_)) {
1686  emit(0x6A);
1687  emit(value.value_); // Emit low byte of value.
1688  } else {
1689  emit(0x68);
1690  emitl(value.value_);
1691  }
1692 }
1693 
1694 
1695 void Assembler::pushq_imm32(int32_t imm32) {
1696  EnsureSpace ensure_space(this);
1697  emit(0x68);
1698  emitl(imm32);
1699 }
1700 
1701 
1702 void Assembler::pushfq() {
1703  EnsureSpace ensure_space(this);
1704  emit(0x9C);
1705 }
1706 
1707 
1708 void Assembler::ret(int imm16) {
1709  EnsureSpace ensure_space(this);
1710  ASSERT(is_uint16(imm16));
1711  if (imm16 == 0) {
1712  emit(0xC3);
1713  } else {
1714  emit(0xC2);
1715  emit(imm16 & 0xFF);
1716  emit((imm16 >> 8) & 0xFF);
1717  }
1718 }
1719 
1720 
1721 void Assembler::setcc(Condition cc, Register reg) {
1722  if (cc > last_condition) {
1723  movb(reg, Immediate(cc == always ? 1 : 0));
1724  return;
1725  }
1726  EnsureSpace ensure_space(this);
1727  ASSERT(is_uint4(cc));
1728  if (!reg.is_byte_register()) { // Use x64 byte registers, where different.
1729  emit_rex_32(reg);
1730  }
1731  emit(0x0F);
1732  emit(0x90 | cc);
1733  emit_modrm(0x0, reg);
1734 }
1735 
1736 
1737 void Assembler::shld(Register dst, Register src) {
1738  EnsureSpace ensure_space(this);
1739  emit_rex_64(src, dst);
1740  emit(0x0F);
1741  emit(0xA5);
1742  emit_modrm(src, dst);
1743 }
1744 
1745 
1746 void Assembler::shrd(Register dst, Register src) {
1747  EnsureSpace ensure_space(this);
1748  emit_rex_64(src, dst);
1749  emit(0x0F);
1750  emit(0xAD);
1751  emit_modrm(src, dst);
1752 }
1753 
1754 
1755 void Assembler::emit_xchg(Register dst, Register src, int size) {
1756  EnsureSpace ensure_space(this);
1757  if (src.is(rax) || dst.is(rax)) { // Single-byte encoding
1758  Register other = src.is(rax) ? dst : src;
1759  emit_rex(other, size);
1760  emit(0x90 | other.low_bits());
1761  } else if (dst.low_bits() == 4) {
1762  emit_rex(dst, src, size);
1763  emit(0x87);
1764  emit_modrm(dst, src);
1765  } else {
1766  emit_rex(src, dst, size);
1767  emit(0x87);
1768  emit_modrm(src, dst);
1769  }
1770 }
1771 
1772 
1773 void Assembler::store_rax(void* dst, RelocInfo::Mode mode) {
1774  EnsureSpace ensure_space(this);
1775  if (kPointerSize == kInt64Size) {
1776  emit(0x48); // REX.W
1777  emit(0xA3);
1778  emitp(dst, mode);
1779  } else {
1781  emit(0xA3);
1782  emitp(dst, mode);
1783  // In 64-bit mode, need to zero extend the operand to 8 bytes.
1784  // See 2.2.1.4 in Intel64 and IA32 Architectures Software
1785  // Developer's Manual Volume 2.
1786  emitl(0);
1787  }
1788 }
1789 
1790 
1791 void Assembler::store_rax(ExternalReference ref) {
1792  store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
1793 }
1794 
1795 
1796 void Assembler::testb(Register dst, Register src) {
1797  EnsureSpace ensure_space(this);
1798  if (src.low_bits() == 4) {
1799  emit_rex_32(src, dst);
1800  emit(0x84);
1801  emit_modrm(src, dst);
1802  } else {
1803  if (!dst.is_byte_register() || !src.is_byte_register()) {
1804  // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1805  emit_rex_32(dst, src);
1806  }
1807  emit(0x84);
1808  emit_modrm(dst, src);
1809  }
1810 }
1811 
1812 
1813 void Assembler::testb(Register reg, Immediate mask) {
1814  ASSERT(is_int8(mask.value_) || is_uint8(mask.value_));
1815  EnsureSpace ensure_space(this);
1816  if (reg.is(rax)) {
1817  emit(0xA8);
1818  emit(mask.value_); // Low byte emitted.
1819  } else {
1820  if (!reg.is_byte_register()) {
1821  // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1822  emit_rex_32(reg);
1823  }
1824  emit(0xF6);
1825  emit_modrm(0x0, reg);
1826  emit(mask.value_); // Low byte emitted.
1827  }
1828 }
1829 
1830 
1831 void Assembler::testb(const Operand& op, Immediate mask) {
1832  ASSERT(is_int8(mask.value_) || is_uint8(mask.value_));
1833  EnsureSpace ensure_space(this);
1834  emit_optional_rex_32(rax, op);
1835  emit(0xF6);
1836  emit_operand(rax, op); // Operation code 0
1837  emit(mask.value_); // Low byte emitted.
1838 }
1839 
1840 
1841 void Assembler::testb(const Operand& op, Register reg) {
1842  EnsureSpace ensure_space(this);
1843  if (!reg.is_byte_register()) {
1844  // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1845  emit_rex_32(reg, op);
1846  } else {
1847  emit_optional_rex_32(reg, op);
1848  }
1849  emit(0x84);
1850  emit_operand(reg, op);
1851 }
1852 
1853 
1854 void Assembler::emit_test(Register dst, Register src, int size) {
1855  EnsureSpace ensure_space(this);
1856  if (src.low_bits() == 4) {
1857  emit_rex(src, dst, size);
1858  emit(0x85);
1859  emit_modrm(src, dst);
1860  } else {
1861  emit_rex(dst, src, size);
1862  emit(0x85);
1863  emit_modrm(dst, src);
1864  }
1865 }
1866 
1867 
1868 void Assembler::emit_test(Register reg, Immediate mask, int size) {
1869  // testl with a mask that fits in the low byte is exactly testb.
1870  if (is_uint8(mask.value_)) {
1871  testb(reg, mask);
1872  return;
1873  }
1874  EnsureSpace ensure_space(this);
1875  if (reg.is(rax)) {
1876  emit_rex(rax, size);
1877  emit(0xA9);
1878  emit(mask);
1879  } else {
1880  emit_rex(reg, size);
1881  emit(0xF7);
1882  emit_modrm(0x0, reg);
1883  emit(mask);
1884  }
1885 }
1886 
1887 
1888 void Assembler::emit_test(const Operand& op, Immediate mask, int size) {
1889  // testl with a mask that fits in the low byte is exactly testb.
1890  if (is_uint8(mask.value_)) {
1891  testb(op, mask);
1892  return;
1893  }
1894  EnsureSpace ensure_space(this);
1895  emit_rex(rax, op, size);
1896  emit(0xF7);
1897  emit_operand(rax, op); // Operation code 0
1898  emit(mask);
1899 }
1900 
1901 
1902 void Assembler::emit_test(const Operand& op, Register reg, int size) {
1903  EnsureSpace ensure_space(this);
1904  emit_rex(reg, op, size);
1905  emit(0x85);
1906  emit_operand(reg, op);
1907 }
1908 
1909 
1910 // FPU instructions.
1911 
1912 
1913 void Assembler::fld(int i) {
1914  EnsureSpace ensure_space(this);
1915  emit_farith(0xD9, 0xC0, i);
1916 }
1917 
1918 
1919 void Assembler::fld1() {
1920  EnsureSpace ensure_space(this);
1921  emit(0xD9);
1922  emit(0xE8);
1923 }
1924 
1925 
1926 void Assembler::fldz() {
1927  EnsureSpace ensure_space(this);
1928  emit(0xD9);
1929  emit(0xEE);
1930 }
1931 
1932 
1933 void Assembler::fldpi() {
1934  EnsureSpace ensure_space(this);
1935  emit(0xD9);
1936  emit(0xEB);
1937 }
1938 
1939 
1940 void Assembler::fldln2() {
1941  EnsureSpace ensure_space(this);
1942  emit(0xD9);
1943  emit(0xED);
1944 }
1945 
1946 
1947 void Assembler::fld_s(const Operand& adr) {
1948  EnsureSpace ensure_space(this);
1949  emit_optional_rex_32(adr);
1950  emit(0xD9);
1951  emit_operand(0, adr);
1952 }
1953 
1954 
1955 void Assembler::fld_d(const Operand& adr) {
1956  EnsureSpace ensure_space(this);
1957  emit_optional_rex_32(adr);
1958  emit(0xDD);
1959  emit_operand(0, adr);
1960 }
1961 
1962 
1963 void Assembler::fstp_s(const Operand& adr) {
1964  EnsureSpace ensure_space(this);
1965  emit_optional_rex_32(adr);
1966  emit(0xD9);
1967  emit_operand(3, adr);
1968 }
1969 
1970 
1971 void Assembler::fstp_d(const Operand& adr) {
1972  EnsureSpace ensure_space(this);
1973  emit_optional_rex_32(adr);
1974  emit(0xDD);
1975  emit_operand(3, adr);
1976 }
1977 
1978 
1979 void Assembler::fstp(int index) {
1980  ASSERT(is_uint3(index));
1981  EnsureSpace ensure_space(this);
1982  emit_farith(0xDD, 0xD8, index);
1983 }
1984 
1985 
1986 void Assembler::fild_s(const Operand& adr) {
1987  EnsureSpace ensure_space(this);
1988  emit_optional_rex_32(adr);
1989  emit(0xDB);
1990  emit_operand(0, adr);
1991 }
1992 
1993 
1994 void Assembler::fild_d(const Operand& adr) {
1995  EnsureSpace ensure_space(this);
1996  emit_optional_rex_32(adr);
1997  emit(0xDF);
1998  emit_operand(5, adr);
1999 }
2000 
2001 
2002 void Assembler::fistp_s(const Operand& adr) {
2003  EnsureSpace ensure_space(this);
2004  emit_optional_rex_32(adr);
2005  emit(0xDB);
2006  emit_operand(3, adr);
2007 }
2008 
2009 
2010 void Assembler::fisttp_s(const Operand& adr) {
2011  ASSERT(IsEnabled(SSE3));
2012  EnsureSpace ensure_space(this);
2013  emit_optional_rex_32(adr);
2014  emit(0xDB);
2015  emit_operand(1, adr);
2016 }
2017 
2018 
2019 void Assembler::fisttp_d(const Operand& adr) {
2020  ASSERT(IsEnabled(SSE3));
2021  EnsureSpace ensure_space(this);
2022  emit_optional_rex_32(adr);
2023  emit(0xDD);
2024  emit_operand(1, adr);
2025 }
2026 
2027 
2028 void Assembler::fist_s(const Operand& adr) {
2029  EnsureSpace ensure_space(this);
2030  emit_optional_rex_32(adr);
2031  emit(0xDB);
2032  emit_operand(2, adr);
2033 }
2034 
2035 
2036 void Assembler::fistp_d(const Operand& adr) {
2037  EnsureSpace ensure_space(this);
2038  emit_optional_rex_32(adr);
2039  emit(0xDF);
2040  emit_operand(7, adr);
2041 }
2042 
2043 
2044 void Assembler::fabs() {
2045  EnsureSpace ensure_space(this);
2046  emit(0xD9);
2047  emit(0xE1);
2048 }
2049 
2050 
2051 void Assembler::fchs() {
2052  EnsureSpace ensure_space(this);
2053  emit(0xD9);
2054  emit(0xE0);
2055 }
2056 
2057 
2058 void Assembler::fcos() {
2059  EnsureSpace ensure_space(this);
2060  emit(0xD9);
2061  emit(0xFF);
2062 }
2063 
2064 
2065 void Assembler::fsin() {
2066  EnsureSpace ensure_space(this);
2067  emit(0xD9);
2068  emit(0xFE);
2069 }
2070 
2071 
2072 void Assembler::fptan() {
2073  EnsureSpace ensure_space(this);
2074  emit(0xD9);
2075  emit(0xF2);
2076 }
2077 
2078 
2079 void Assembler::fyl2x() {
2080  EnsureSpace ensure_space(this);
2081  emit(0xD9);
2082  emit(0xF1);
2083 }
2084 
2085 
2086 void Assembler::f2xm1() {
2087  EnsureSpace ensure_space(this);
2088  emit(0xD9);
2089  emit(0xF0);
2090 }
2091 
2092 
2093 void Assembler::fscale() {
2094  EnsureSpace ensure_space(this);
2095  emit(0xD9);
2096  emit(0xFD);
2097 }
2098 
2099 
2100 void Assembler::fninit() {
2101  EnsureSpace ensure_space(this);
2102  emit(0xDB);
2103  emit(0xE3);
2104 }
2105 
2106 
2107 void Assembler::fadd(int i) {
2108  EnsureSpace ensure_space(this);
2109  emit_farith(0xDC, 0xC0, i);
2110 }
2111 
2112 
2113 void Assembler::fsub(int i) {
2114  EnsureSpace ensure_space(this);
2115  emit_farith(0xDC, 0xE8, i);
2116 }
2117 
2118 
2119 void Assembler::fisub_s(const Operand& adr) {
2120  EnsureSpace ensure_space(this);
2121  emit_optional_rex_32(adr);
2122  emit(0xDA);
2123  emit_operand(4, adr);
2124 }
2125 
2126 
2127 void Assembler::fmul(int i) {
2128  EnsureSpace ensure_space(this);
2129  emit_farith(0xDC, 0xC8, i);
2130 }
2131 
2132 
2133 void Assembler::fdiv(int i) {
2134  EnsureSpace ensure_space(this);
2135  emit_farith(0xDC, 0xF8, i);
2136 }
2137 
2138 
2139 void Assembler::faddp(int i) {
2140  EnsureSpace ensure_space(this);
2141  emit_farith(0xDE, 0xC0, i);
2142 }
2143 
2144 
2145 void Assembler::fsubp(int i) {
2146  EnsureSpace ensure_space(this);
2147  emit_farith(0xDE, 0xE8, i);
2148 }
2149 
2150 
2151 void Assembler::fsubrp(int i) {
2152  EnsureSpace ensure_space(this);
2153  emit_farith(0xDE, 0xE0, i);
2154 }
2155 
2156 
2157 void Assembler::fmulp(int i) {
2158  EnsureSpace ensure_space(this);
2159  emit_farith(0xDE, 0xC8, i);
2160 }
2161 
2162 
2163 void Assembler::fdivp(int i) {
2164  EnsureSpace ensure_space(this);
2165  emit_farith(0xDE, 0xF8, i);
2166 }
2167 
2168 
2169 void Assembler::fprem() {
2170  EnsureSpace ensure_space(this);
2171  emit(0xD9);
2172  emit(0xF8);
2173 }
2174 
2175 
2176 void Assembler::fprem1() {
2177  EnsureSpace ensure_space(this);
2178  emit(0xD9);
2179  emit(0xF5);
2180 }
2181 
2182 
2183 void Assembler::fxch(int i) {
2184  EnsureSpace ensure_space(this);
2185  emit_farith(0xD9, 0xC8, i);
2186 }
2187 
2188 
2189 void Assembler::fincstp() {
2190  EnsureSpace ensure_space(this);
2191  emit(0xD9);
2192  emit(0xF7);
2193 }
2194 
2195 
2196 void Assembler::ffree(int i) {
2197  EnsureSpace ensure_space(this);
2198  emit_farith(0xDD, 0xC0, i);
2199 }
2200 
2201 
2202 void Assembler::ftst() {
2203  EnsureSpace ensure_space(this);
2204  emit(0xD9);
2205  emit(0xE4);
2206 }
2207 
2208 
2209 void Assembler::fucomp(int i) {
2210  EnsureSpace ensure_space(this);
2211  emit_farith(0xDD, 0xE8, i);
2212 }
2213 
2214 
2215 void Assembler::fucompp() {
2216  EnsureSpace ensure_space(this);
2217  emit(0xDA);
2218  emit(0xE9);
2219 }
2220 
2221 
2222 void Assembler::fucomi(int i) {
2223  EnsureSpace ensure_space(this);
2224  emit(0xDB);
2225  emit(0xE8 + i);
2226 }
2227 
2228 
2229 void Assembler::fucomip() {
2230  EnsureSpace ensure_space(this);
2231  emit(0xDF);
2232  emit(0xE9);
2233 }
2234 
2235 
2236 void Assembler::fcompp() {
2237  EnsureSpace ensure_space(this);
2238  emit(0xDE);
2239  emit(0xD9);
2240 }
2241 
2242 
2243 void Assembler::fnstsw_ax() {
2244  EnsureSpace ensure_space(this);
2245  emit(0xDF);
2246  emit(0xE0);
2247 }
2248 
2249 
2250 void Assembler::fwait() {
2251  EnsureSpace ensure_space(this);
2252  emit(0x9B);
2253 }
2254 
2255 
2256 void Assembler::frndint() {
2257  EnsureSpace ensure_space(this);
2258  emit(0xD9);
2259  emit(0xFC);
2260 }
2261 
2262 
2263 void Assembler::fnclex() {
2264  EnsureSpace ensure_space(this);
2265  emit(0xDB);
2266  emit(0xE2);
2267 }
2268 
2269 
2270 void Assembler::sahf() {
2271  // TODO(X64): Test for presence. Not all 64-bit intel CPU's have sahf
2272  // in 64-bit mode. Test CpuID.
2273  EnsureSpace ensure_space(this);
2274  emit(0x9E);
2275 }
2276 
2277 
2278 void Assembler::emit_farith(int b1, int b2, int i) {
2279  ASSERT(is_uint8(b1) && is_uint8(b2)); // wrong opcode
2280  ASSERT(is_uint3(i)); // illegal stack offset
2281  emit(b1);
2282  emit(b2 + i);
2283 }
2284 
2285 
2286 // SSE operations.
2287 
2288 void Assembler::andps(XMMRegister dst, XMMRegister src) {
2289  EnsureSpace ensure_space(this);
2290  emit_optional_rex_32(dst, src);
2291  emit(0x0F);
2292  emit(0x54);
2293  emit_sse_operand(dst, src);
2294 }
2295 
2296 
2297 void Assembler::andps(XMMRegister dst, const Operand& src) {
2298  EnsureSpace ensure_space(this);
2299  emit_optional_rex_32(dst, src);
2300  emit(0x0F);
2301  emit(0x54);
2302  emit_sse_operand(dst, src);
2303 }
2304 
2305 
2306 void Assembler::orps(XMMRegister dst, XMMRegister src) {
2307  EnsureSpace ensure_space(this);
2308  emit_optional_rex_32(dst, src);
2309  emit(0x0F);
2310  emit(0x56);
2311  emit_sse_operand(dst, src);
2312 }
2313 
2314 
2315 void Assembler::orps(XMMRegister dst, const Operand& src) {
2316  EnsureSpace ensure_space(this);
2317  emit_optional_rex_32(dst, src);
2318  emit(0x0F);
2319  emit(0x56);
2320  emit_sse_operand(dst, src);
2321 }
2322 
2323 
2324 void Assembler::xorps(XMMRegister dst, XMMRegister src) {
2325  EnsureSpace ensure_space(this);
2326  emit_optional_rex_32(dst, src);
2327  emit(0x0F);
2328  emit(0x57);
2329  emit_sse_operand(dst, src);
2330 }
2331 
2332 
2333 void Assembler::xorps(XMMRegister dst, const Operand& src) {
2334  EnsureSpace ensure_space(this);
2335  emit_optional_rex_32(dst, src);
2336  emit(0x0F);
2337  emit(0x57);
2338  emit_sse_operand(dst, src);
2339 }
2340 
2341 
2342 void Assembler::addps(XMMRegister dst, XMMRegister src) {
2343  EnsureSpace ensure_space(this);
2344  emit_optional_rex_32(dst, src);
2345  emit(0x0F);
2346  emit(0x58);
2347  emit_sse_operand(dst, src);
2348 }
2349 
2350 
2351 void Assembler::addps(XMMRegister dst, const Operand& src) {
2352  EnsureSpace ensure_space(this);
2353  emit_optional_rex_32(dst, src);
2354  emit(0x0F);
2355  emit(0x58);
2356  emit_sse_operand(dst, src);
2357 }
2358 
2359 
2360 void Assembler::subps(XMMRegister dst, XMMRegister src) {
2361  EnsureSpace ensure_space(this);
2362  emit_optional_rex_32(dst, src);
2363  emit(0x0F);
2364  emit(0x5C);
2365  emit_sse_operand(dst, src);
2366 }
2367 
2368 
2369 void Assembler::subps(XMMRegister dst, const Operand& src) {
2370  EnsureSpace ensure_space(this);
2371  emit_optional_rex_32(dst, src);
2372  emit(0x0F);
2373  emit(0x5C);
2374  emit_sse_operand(dst, src);
2375 }
2376 
2377 
2378 void Assembler::mulps(XMMRegister dst, XMMRegister src) {
2379  EnsureSpace ensure_space(this);
2380  emit_optional_rex_32(dst, src);
2381  emit(0x0F);
2382  emit(0x59);
2383  emit_sse_operand(dst, src);
2384 }
2385 
2386 
2387 void Assembler::mulps(XMMRegister dst, const Operand& src) {
2388  EnsureSpace ensure_space(this);
2389  emit_optional_rex_32(dst, src);
2390  emit(0x0F);
2391  emit(0x59);
2392  emit_sse_operand(dst, src);
2393 }
2394 
2395 
2396 void Assembler::divps(XMMRegister dst, XMMRegister src) {
2397  EnsureSpace ensure_space(this);
2398  emit_optional_rex_32(dst, src);
2399  emit(0x0F);
2400  emit(0x5E);
2401  emit_sse_operand(dst, src);
2402 }
2403 
2404 
2405 void Assembler::divps(XMMRegister dst, const Operand& src) {
2406  EnsureSpace ensure_space(this);
2407  emit_optional_rex_32(dst, src);
2408  emit(0x0F);
2409  emit(0x5E);
2410  emit_sse_operand(dst, src);
2411 }
2412 
2413 
2414 // SSE 2 operations.
2415 
2416 void Assembler::movd(XMMRegister dst, Register src) {
2417  EnsureSpace ensure_space(this);
2418  emit(0x66);
2419  emit_optional_rex_32(dst, src);
2420  emit(0x0F);
2421  emit(0x6E);
2422  emit_sse_operand(dst, src);
2423 }
2424 
2425 
2426 void Assembler::movd(Register dst, XMMRegister src) {
2427  EnsureSpace ensure_space(this);
2428  emit(0x66);
2429  emit_optional_rex_32(src, dst);
2430  emit(0x0F);
2431  emit(0x7E);
2432  emit_sse_operand(src, dst);
2433 }
2434 
2435 
2436 void Assembler::movq(XMMRegister dst, Register src) {
2437  EnsureSpace ensure_space(this);
2438  emit(0x66);
2439  emit_rex_64(dst, src);
2440  emit(0x0F);
2441  emit(0x6E);
2442  emit_sse_operand(dst, src);
2443 }
2444 
2445 
2446 void Assembler::movq(Register dst, XMMRegister src) {
2447  EnsureSpace ensure_space(this);
2448  emit(0x66);
2449  emit_rex_64(src, dst);
2450  emit(0x0F);
2451  emit(0x7E);
2452  emit_sse_operand(src, dst);
2453 }
2454 
2455 
2456 void Assembler::movq(XMMRegister dst, XMMRegister src) {
2457  EnsureSpace ensure_space(this);
2458  if (dst.low_bits() == 4) {
2459  // Avoid unnecessary SIB byte.
2460  emit(0xf3);
2461  emit_optional_rex_32(dst, src);
2462  emit(0x0F);
2463  emit(0x7e);
2464  emit_sse_operand(dst, src);
2465  } else {
2466  emit(0x66);
2467  emit_optional_rex_32(src, dst);
2468  emit(0x0F);
2469  emit(0xD6);
2470  emit_sse_operand(src, dst);
2471  }
2472 }
2473 
2474 
2475 void Assembler::movdqa(const Operand& dst, XMMRegister src) {
2476  EnsureSpace ensure_space(this);
2477  emit(0x66);
2478  emit_rex_64(src, dst);
2479  emit(0x0F);
2480  emit(0x7F);
2481  emit_sse_operand(src, dst);
2482 }
2483 
2484 
2485 void Assembler::movdqa(XMMRegister dst, const Operand& src) {
2486  EnsureSpace ensure_space(this);
2487  emit(0x66);
2488  emit_rex_64(dst, src);
2489  emit(0x0F);
2490  emit(0x6F);
2491  emit_sse_operand(dst, src);
2492 }
2493 
2494 
2495 void Assembler::movdqu(const Operand& dst, XMMRegister src) {
2496  EnsureSpace ensure_space(this);
2497  emit(0xF3);
2498  emit_rex_64(src, dst);
2499  emit(0x0F);
2500  emit(0x7F);
2501  emit_sse_operand(src, dst);
2502 }
2503 
2504 
2505 void Assembler::movdqu(XMMRegister dst, const Operand& src) {
2506  EnsureSpace ensure_space(this);
2507  emit(0xF3);
2508  emit_rex_64(dst, src);
2509  emit(0x0F);
2510  emit(0x6F);
2511  emit_sse_operand(dst, src);
2512 }
2513 
2514 
2515 void Assembler::extractps(Register dst, XMMRegister src, byte imm8) {
2517  ASSERT(is_uint8(imm8));
2518  EnsureSpace ensure_space(this);
2519  emit(0x66);
2520  emit_optional_rex_32(src, dst);
2521  emit(0x0F);
2522  emit(0x3A);
2523  emit(0x17);
2524  emit_sse_operand(src, dst);
2525  emit(imm8);
2526 }
2527 
2528 
2529 void Assembler::movsd(const Operand& dst, XMMRegister src) {
2530  EnsureSpace ensure_space(this);
2531  emit(0xF2); // double
2532  emit_optional_rex_32(src, dst);
2533  emit(0x0F);
2534  emit(0x11); // store
2535  emit_sse_operand(src, dst);
2536 }
2537 
2538 
2539 void Assembler::movsd(XMMRegister dst, XMMRegister src) {
2540  EnsureSpace ensure_space(this);
2541  emit(0xF2); // double
2542  emit_optional_rex_32(dst, src);
2543  emit(0x0F);
2544  emit(0x10); // load
2545  emit_sse_operand(dst, src);
2546 }
2547 
2548 
2549 void Assembler::movsd(XMMRegister dst, const Operand& src) {
2550  EnsureSpace ensure_space(this);
2551  emit(0xF2); // double
2552  emit_optional_rex_32(dst, src);
2553  emit(0x0F);
2554  emit(0x10); // load
2555  emit_sse_operand(dst, src);
2556 }
2557 
2558 
2559 void Assembler::movaps(XMMRegister dst, XMMRegister src) {
2560  EnsureSpace ensure_space(this);
2561  if (src.low_bits() == 4) {
2562  // Try to avoid an unnecessary SIB byte.
2563  emit_optional_rex_32(src, dst);
2564  emit(0x0F);
2565  emit(0x29);
2566  emit_sse_operand(src, dst);
2567  } else {
2568  emit_optional_rex_32(dst, src);
2569  emit(0x0F);
2570  emit(0x28);
2571  emit_sse_operand(dst, src);
2572  }
2573 }
2574 
2575 
2576 void Assembler::shufps(XMMRegister dst, XMMRegister src, byte imm8) {
2577  ASSERT(is_uint8(imm8));
2578  EnsureSpace ensure_space(this);
2579  emit_optional_rex_32(src, dst);
2580  emit(0x0F);
2581  emit(0xC6);
2582  emit_sse_operand(dst, src);
2583  emit(imm8);
2584 }
2585 
2586 
2587 void Assembler::movapd(XMMRegister dst, XMMRegister src) {
2588  EnsureSpace ensure_space(this);
2589  if (src.low_bits() == 4) {
2590  // Try to avoid an unnecessary SIB byte.
2591  emit(0x66);
2592  emit_optional_rex_32(src, dst);
2593  emit(0x0F);
2594  emit(0x29);
2595  emit_sse_operand(src, dst);
2596  } else {
2597  emit(0x66);
2598  emit_optional_rex_32(dst, src);
2599  emit(0x0F);
2600  emit(0x28);
2601  emit_sse_operand(dst, src);
2602  }
2603 }
2604 
2605 
2606 void Assembler::movss(XMMRegister dst, const Operand& src) {
2607  EnsureSpace ensure_space(this);
2608  emit(0xF3); // single
2609  emit_optional_rex_32(dst, src);
2610  emit(0x0F);
2611  emit(0x10); // load
2612  emit_sse_operand(dst, src);
2613 }
2614 
2615 
2616 void Assembler::movss(const Operand& src, XMMRegister dst) {
2617  EnsureSpace ensure_space(this);
2618  emit(0xF3); // single
2619  emit_optional_rex_32(dst, src);
2620  emit(0x0F);
2621  emit(0x11); // store
2622  emit_sse_operand(dst, src);
2623 }
2624 
2625 
2626 void Assembler::psllq(XMMRegister reg, byte imm8) {
2627  EnsureSpace ensure_space(this);
2628  emit(0x66);
2629  emit(0x0F);
2630  emit(0x73);
2631  emit_sse_operand(rsi, reg); // rsi == 6
2632  emit(imm8);
2633 }
2634 
2635 
2636 void Assembler::cvttss2si(Register dst, const Operand& src) {
2637  EnsureSpace ensure_space(this);
2638  emit(0xF3);
2639  emit_optional_rex_32(dst, src);
2640  emit(0x0F);
2641  emit(0x2C);
2642  emit_operand(dst, src);
2643 }
2644 
2645 
2646 void Assembler::cvttss2si(Register dst, XMMRegister src) {
2647  EnsureSpace ensure_space(this);
2648  emit(0xF3);
2649  emit_optional_rex_32(dst, src);
2650  emit(0x0F);
2651  emit(0x2C);
2652  emit_sse_operand(dst, src);
2653 }
2654 
2655 
2656 void Assembler::cvttsd2si(Register dst, const Operand& src) {
2657  EnsureSpace ensure_space(this);
2658  emit(0xF2);
2659  emit_optional_rex_32(dst, src);
2660  emit(0x0F);
2661  emit(0x2C);
2662  emit_operand(dst, src);
2663 }
2664 
2665 
2666 void Assembler::cvttsd2si(Register dst, XMMRegister src) {
2667  EnsureSpace ensure_space(this);
2668  emit(0xF2);
2669  emit_optional_rex_32(dst, src);
2670  emit(0x0F);
2671  emit(0x2C);
2672  emit_sse_operand(dst, src);
2673 }
2674 
2675 
2676 void Assembler::cvttsd2siq(Register dst, XMMRegister src) {
2677  EnsureSpace ensure_space(this);
2678  emit(0xF2);
2679  emit_rex_64(dst, src);
2680  emit(0x0F);
2681  emit(0x2C);
2682  emit_sse_operand(dst, src);
2683 }
2684 
2685 
2686 void Assembler::cvtlsi2sd(XMMRegister dst, const Operand& src) {
2687  EnsureSpace ensure_space(this);
2688  emit(0xF2);
2689  emit_optional_rex_32(dst, src);
2690  emit(0x0F);
2691  emit(0x2A);
2692  emit_sse_operand(dst, src);
2693 }
2694 
2695 
2696 void Assembler::cvtlsi2sd(XMMRegister dst, Register src) {
2697  EnsureSpace ensure_space(this);
2698  emit(0xF2);
2699  emit_optional_rex_32(dst, src);
2700  emit(0x0F);
2701  emit(0x2A);
2702  emit_sse_operand(dst, src);
2703 }
2704 
2705 
2706 void Assembler::cvtlsi2ss(XMMRegister dst, Register src) {
2707  EnsureSpace ensure_space(this);
2708  emit(0xF3);
2709  emit_optional_rex_32(dst, src);
2710  emit(0x0F);
2711  emit(0x2A);
2712  emit_sse_operand(dst, src);
2713 }
2714 
2715 
2716 void Assembler::cvtqsi2sd(XMMRegister dst, Register src) {
2717  EnsureSpace ensure_space(this);
2718  emit(0xF2);
2719  emit_rex_64(dst, src);
2720  emit(0x0F);
2721  emit(0x2A);
2722  emit_sse_operand(dst, src);
2723 }
2724 
2725 
2726 void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) {
2727  EnsureSpace ensure_space(this);
2728  emit(0xF3);
2729  emit_optional_rex_32(dst, src);
2730  emit(0x0F);
2731  emit(0x5A);
2732  emit_sse_operand(dst, src);
2733 }
2734 
2735 
2736 void Assembler::cvtss2sd(XMMRegister dst, const Operand& src) {
2737  EnsureSpace ensure_space(this);
2738  emit(0xF3);
2739  emit_optional_rex_32(dst, src);
2740  emit(0x0F);
2741  emit(0x5A);
2742  emit_sse_operand(dst, src);
2743 }
2744 
2745 
2746 void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) {
2747  EnsureSpace ensure_space(this);
2748  emit(0xF2);
2749  emit_optional_rex_32(dst, src);
2750  emit(0x0F);
2751  emit(0x5A);
2752  emit_sse_operand(dst, src);
2753 }
2754 
2755 
2756 void Assembler::cvtsd2si(Register dst, XMMRegister src) {
2757  EnsureSpace ensure_space(this);
2758  emit(0xF2);
2759  emit_optional_rex_32(dst, src);
2760  emit(0x0F);
2761  emit(0x2D);
2762  emit_sse_operand(dst, src);
2763 }
2764 
2765 
2766 void Assembler::cvtsd2siq(Register dst, XMMRegister src) {
2767  EnsureSpace ensure_space(this);
2768  emit(0xF2);
2769  emit_rex_64(dst, src);
2770  emit(0x0F);
2771  emit(0x2D);
2772  emit_sse_operand(dst, src);
2773 }
2774 
2775 
2776 void Assembler::addsd(XMMRegister dst, XMMRegister src) {
2777  EnsureSpace ensure_space(this);
2778  emit(0xF2);
2779  emit_optional_rex_32(dst, src);
2780  emit(0x0F);
2781  emit(0x58);
2782  emit_sse_operand(dst, src);
2783 }
2784 
2785 
2786 void Assembler::addsd(XMMRegister dst, const Operand& src) {
2787  EnsureSpace ensure_space(this);
2788  emit(0xF2);
2789  emit_optional_rex_32(dst, src);
2790  emit(0x0F);
2791  emit(0x58);
2792  emit_sse_operand(dst, src);
2793 }
2794 
2795 
2796 void Assembler::mulsd(XMMRegister dst, XMMRegister src) {
2797  EnsureSpace ensure_space(this);
2798  emit(0xF2);
2799  emit_optional_rex_32(dst, src);
2800  emit(0x0F);
2801  emit(0x59);
2802  emit_sse_operand(dst, src);
2803 }
2804 
2805 
2806 void Assembler::mulsd(XMMRegister dst, const Operand& src) {
2807  EnsureSpace ensure_space(this);
2808  emit(0xF2);
2809  emit_optional_rex_32(dst, src);
2810  emit(0x0F);
2811  emit(0x59);
2812  emit_sse_operand(dst, src);
2813 }
2814 
2815 
2816 void Assembler::subsd(XMMRegister dst, XMMRegister src) {
2817  EnsureSpace ensure_space(this);
2818  emit(0xF2);
2819  emit_optional_rex_32(dst, src);
2820  emit(0x0F);
2821  emit(0x5C);
2822  emit_sse_operand(dst, src);
2823 }
2824 
2825 
2826 void Assembler::divsd(XMMRegister dst, XMMRegister src) {
2827  EnsureSpace ensure_space(this);
2828  emit(0xF2);
2829  emit_optional_rex_32(dst, src);
2830  emit(0x0F);
2831  emit(0x5E);
2832  emit_sse_operand(dst, src);
2833 }
2834 
2835 
2836 void Assembler::andpd(XMMRegister dst, XMMRegister src) {
2837  EnsureSpace ensure_space(this);
2838  emit(0x66);
2839  emit_optional_rex_32(dst, src);
2840  emit(0x0F);
2841  emit(0x54);
2842  emit_sse_operand(dst, src);
2843 }
2844 
2845 
2846 void Assembler::orpd(XMMRegister dst, XMMRegister src) {
2847  EnsureSpace ensure_space(this);
2848  emit(0x66);
2849  emit_optional_rex_32(dst, src);
2850  emit(0x0F);
2851  emit(0x56);
2852  emit_sse_operand(dst, src);
2853 }
2854 
2855 
2856 void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
2857  EnsureSpace ensure_space(this);
2858  emit(0x66);
2859  emit_optional_rex_32(dst, src);
2860  emit(0x0F);
2861  emit(0x57);
2862  emit_sse_operand(dst, src);
2863 }
2864 
2865 
2866 void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) {
2867  EnsureSpace ensure_space(this);
2868  emit(0xF2);
2869  emit_optional_rex_32(dst, src);
2870  emit(0x0F);
2871  emit(0x51);
2872  emit_sse_operand(dst, src);
2873 }
2874 
2875 
2876 void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
2877  EnsureSpace ensure_space(this);
2878  emit(0x66);
2879  emit_optional_rex_32(dst, src);
2880  emit(0x0f);
2881  emit(0x2e);
2882  emit_sse_operand(dst, src);
2883 }
2884 
2885 
2886 void Assembler::ucomisd(XMMRegister dst, const Operand& src) {
2887  EnsureSpace ensure_space(this);
2888  emit(0x66);
2889  emit_optional_rex_32(dst, src);
2890  emit(0x0f);
2891  emit(0x2e);
2892  emit_sse_operand(dst, src);
2893 }
2894 
2895 
2896 void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) {
2897  EnsureSpace ensure_space(this);
2898  emit(0xF2);
2899  emit_optional_rex_32(dst, src);
2900  emit(0x0F);
2901  emit(0xC2);
2902  emit_sse_operand(dst, src);
2903  emit(0x01); // LT == 1
2904 }
2905 
2906 
2907 void Assembler::roundsd(XMMRegister dst, XMMRegister src,
2908  Assembler::RoundingMode mode) {
2910  EnsureSpace ensure_space(this);
2911  emit(0x66);
2912  emit_optional_rex_32(dst, src);
2913  emit(0x0f);
2914  emit(0x3a);
2915  emit(0x0b);
2916  emit_sse_operand(dst, src);
2917  // Mask precision exeption.
2918  emit(static_cast<byte>(mode) | 0x8);
2919 }
2920 
2921 
2922 void Assembler::movmskpd(Register dst, XMMRegister src) {
2923  EnsureSpace ensure_space(this);
2924  emit(0x66);
2925  emit_optional_rex_32(dst, src);
2926  emit(0x0f);
2927  emit(0x50);
2928  emit_sse_operand(dst, src);
2929 }
2930 
2931 
2932 void Assembler::movmskps(Register dst, XMMRegister src) {
2933  EnsureSpace ensure_space(this);
2934  emit_optional_rex_32(dst, src);
2935  emit(0x0f);
2936  emit(0x50);
2937  emit_sse_operand(dst, src);
2938 }
2939 
2940 
2941 void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) {
2942  Register ireg = { reg.code() };
2943  emit_operand(ireg, adr);
2944 }
2945 
2946 
2947 void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) {
2948  emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
2949 }
2950 
2951 
2952 void Assembler::emit_sse_operand(XMMRegister dst, Register src) {
2953  emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
2954 }
2955 
2956 
2957 void Assembler::emit_sse_operand(Register dst, XMMRegister src) {
2958  emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
2959 }
2960 
2961 
2962 void Assembler::db(uint8_t data) {
2963  EnsureSpace ensure_space(this);
2964  emit(data);
2965 }
2966 
2967 
2968 void Assembler::dd(uint32_t data) {
2969  EnsureSpace ensure_space(this);
2970  emitl(data);
2971 }
2972 
2973 
2974 // Relocation information implementations.
2975 
2976 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
2977  ASSERT(!RelocInfo::IsNone(rmode));
2978  if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
2979  // Don't record external references unless the heap will be serialized.
2980 #ifdef DEBUG
2981  if (!Serializer::enabled()) {
2983  }
2984 #endif
2985  if (!Serializer::enabled() && !emit_debug_code()) {
2986  return;
2987  }
2988  } else if (rmode == RelocInfo::CODE_AGE_SEQUENCE) {
2989  // Don't record psuedo relocation info for code age sequence mode.
2990  return;
2991  }
2992  RelocInfo rinfo(pc_, rmode, data, NULL);
2993  reloc_info_writer.Write(&rinfo);
2994 }
2995 
2996 
2998  positions_recorder()->WriteRecordedPositions();
2999  EnsureSpace ensure_space(this);
3000  RecordRelocInfo(RelocInfo::JS_RETURN);
3001 }
3002 
3003 
3005  positions_recorder()->WriteRecordedPositions();
3006  EnsureSpace ensure_space(this);
3007  RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT);
3008 }
3009 
3010 
3011 void Assembler::RecordComment(const char* msg, bool force) {
3012  if (FLAG_code_comments || force) {
3013  EnsureSpace ensure_space(this);
3014  RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
3015  }
3016 }
3017 
3018 
3019 MaybeObject* Assembler::AllocateConstantPool(Heap* heap) {
3020  // No out-of-line constant pool support.
3021  UNREACHABLE();
3022  return NULL;
3023 }
3024 
3025 
3026 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) {
3027  // No out-of-line constant pool support.
3028  UNREACHABLE();
3029 }
3030 
3031 
3032 const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
3034  1 << RelocInfo::INTERNAL_REFERENCE |
3035  1 << RelocInfo::CODE_AGE_SEQUENCE;
3036 
3037 
3038 bool RelocInfo::IsCodedSpecially() {
3039  // The deserializer needs to know whether a pointer is specially coded. Being
3040  // specially coded on x64 means that it is a relative 32 bit address, as used
3041  // by branch instructions.
3042  return (1 << rmode_) & kApplyMask;
3043 }
3044 
3045 
3046 bool RelocInfo::IsInConstantPool() {
3047  return false;
3048 }
3049 
3050 
3051 } } // namespace v8::internal
3052 
3053 #endif // V8_TARGET_ARCH_X64
byte * Address
Definition: globals.h:186
void psllq(XMMRegister reg, int8_t shift)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
static RelocInfo::Mode RelocInfoNone()
void cvtlsi2ss(XMMRegister dst, Register src)
void movapd(XMMRegister dst, XMMRegister src)
static const int kMaximalBufferSize
void pushq_imm32(int32_t imm32)
Isolate * isolate() const
Definition: assembler.h:62
void fsub(const FPRegister &fd, const FPRegister &fn, const FPRegister &fm)
void db(uint8_t data)
void load_rax(void *ptr, RelocInfo::Mode rmode)
void ucomisd(XMMRegister dst, XMMRegister src)
void cvttss2si(Register dst, const Operand &src)
void PopulateConstantPool(ConstantPoolArray *constant_pool)
void roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode)
#define FATAL(msg)
Definition: checks.h:48
void bsrl(Register dst, Register src)
bool buffer_overflow() const
const int kNumRegisters
Definition: constants-arm.h:57
void mulsd(XMMRegister dst, XMMRegister src)
void addps(XMMRegister dst, const Operand &src)
const Register rbp
const int KB
Definition: globals.h:245
void cvtsd2si(Register dst, XMMRegister src)
void movq(Register dst, int64_t value)
void orpd(XMMRegister dst, XMMRegister src)
const Register rsi
void dd(uint32_t data)
void cvtss2sd(XMMRegister dst, XMMRegister src)
void sqrtsd(XMMRegister dst, XMMRegister src)
int int32_t
Definition: unicode.cc:47
static const int kMinimalBufferSize
Definition: assembler.h:89
static bool enabled()
Definition: serialize.h:485
void mulps(XMMRegister dst, const Operand &src)
void andpd(XMMRegister dst, XMMRegister src)
void j(Condition cc, Label *L, Label::Distance distance=Label::kFar)
#define ASSERT(condition)
Definition: checks.h:329
void cvtlsi2sd(XMMRegister dst, const Operand &src)
void movsxlq(Register dst, Register src)
void xorpd(XMMRegister dst, XMMRegister src)
#define CHECK(condition)
Definition: checks.h:75
void fdiv(const FPRegister &fd, const FPRegister &fn, const FPRegister &fm)
void bt(const Operand &dst, Register src)
void cmovl(Condition cc, Register dst, Register src)
void testb(Register dst, Register src)
void fistp_s(const Operand &adr)
void addsd(XMMRegister dst, XMMRegister src)
void fld_d(const Operand &adr)
void cmpb_al(const Operand &op)
void pushq(Immediate value)
void fild_s(const Operand &adr)
uint8_t byte
Definition: globals.h:185
void enter(const Immediate &size)
void ret(const Register &xn=lr)
void shld(Register dst, Register src)
#define UNREACHABLE()
Definition: checks.h:52
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
void andps(XMMRegister dst, const Operand &src)
void fisttp_d(const Operand &adr)
void movss(XMMRegister dst, const Operand &src)
void movb(Register dst, const Operand &src)
void set_byte_at(int pos, byte value)
void cvtsd2ss(XMMRegister dst, XMMRegister src)
void movp(Register dst, void *ptr, RelocInfo::Mode rmode)
bool predictable_code_size() const
Definition: assembler.h:68
void movsd(XMMRegister dst, XMMRegister src)
void GetCode(CodeDesc *desc)
void movdqa(XMMRegister dst, const Operand &src)
const int kPointerSize
Definition: globals.h:268
void movdqu(XMMRegister dst, const Operand &src)
void movsxbq(Register dst, const Operand &src)
static void TooLateToEnableNow()
Definition: serialize.h:484
void movmskpd(Register dst, XMMRegister src)
void fisttp_s(const Operand &adr)
const Register rsp
static int32_t & int32_at(Address addr)
Definition: v8memory.h:51
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
void orps(XMMRegister dst, const Operand &src)
static void MemMove(void *dest, const void *src, size_t size)
Definition: platform.h:402
const Register r12
MaybeObject * AllocateConstantPool(Heap *heap)
const Register rax
void emit_sse_operand(XMMRegister reg, const Operand &adr)
const Register r13
void cvtqsi2sd(XMMRegister dst, const Operand &src)
bool IsPowerOf2(T x)
Definition: utils.h:51
void fmul(const FPRegister &fd, const FPRegister &fn, const FPRegister &fm)
void movl(const Operand &dst, Label *src)
void Nop(int bytes=1)
void setcc(Condition cc, Register reg)
void fld_s(const Operand &adr)
void fadd(const FPRegister &fd, const FPRegister &fn, const FPRegister &fm)
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
void fstp_d(const Operand &adr)
void movw(Register reg, uint32_t immediate, Condition cond=al)
static const int kCallSequenceLength
void store_rax(void *dst, RelocInfo::Mode mode)
void fistp_d(const Operand &adr)
void cvtsd2siq(Register dst, XMMRegister src)
void shrd(Register dst, Register src)
void movaps(XMMRegister dst, XMMRegister src)
void movmskps(Register dst, XMMRegister src)
Operand(Register reg, Shift shift=LSL, unsigned shift_amount=0)
void RecordComment(const char *msg)
bool emit_debug_code() const
Definition: assembler.h:65
void fstp_s(const Operand &adr)
void divsd(XMMRegister dst, XMMRegister src)
const Register kScratchRegister
void fild_d(const Operand &adr)
const int kInt64Size
Definition: globals.h:265
Assembler(Isolate *isolate, void *buffer, int buffer_size)
void cmpltsd(XMMRegister dst, XMMRegister src)
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
void decb(Register dst)
void divps(XMMRegister dst, const Operand &src)
void movd(XMMRegister dst, Register src)
void xorps(XMMRegister dst, const Operand &src)
PositionsRecorder * positions_recorder()
void subps(XMMRegister dst, const Operand &src)
void fisub_s(const Operand &adr)
void extractps(Register dst, XMMRegister src, byte imm8)
void shufps(XMMRegister dst, XMMRegister src, byte imm8)
static uint64_t CpuFeaturesImpliedByPlatform()
#define RUNTIME_ENTRY(name, nargs, ressize)
bool IsEnabled(CpuFeature f)
Definition: assembler.h:75
void fist_s(const Operand &adr)
void popq(Register dst)
const int kInt32Size
Definition: globals.h:264
void movsxwq(Register dst, const Operand &src)
void DeleteArray(T *array)
Definition: allocation.h:91
void bts(Register dst, Register src)
void cmovq(Condition cc, Register dst, Register src)
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
void subsd(XMMRegister dst, XMMRegister src)
void cvttsd2siq(Register dst, XMMRegister src)
void mul(Register dst, Register src1, Register src2, SBit s=LeaveCC, Condition cond=al)
void cvttsd2si(Register dst, const Operand &src)