v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
assembler-ia32.cc
Go to the documentation of this file.
1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions
6 // are met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the
14 // distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23 // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24 // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25 // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31 // OF THE POSSIBILITY OF SUCH DAMAGE.
32 
33 // The original source code covered by the above license above has been modified
34 // significantly by Google Inc.
35 // Copyright 2012 the V8 project authors. All rights reserved.
36 
37 #include "v8.h"
38 
39 #if defined(V8_TARGET_ARCH_IA32)
40 
41 #include "disassembler.h"
42 #include "macro-assembler.h"
43 #include "serialize.h"
44 
45 namespace v8 {
46 namespace internal {
47 
48 // -----------------------------------------------------------------------------
49 // Implementation of CpuFeatures
50 
51 #ifdef DEBUG
52 bool CpuFeatures::initialized_ = false;
53 #endif
54 uint64_t CpuFeatures::supported_ = 0;
55 uint64_t CpuFeatures::found_by_runtime_probing_ = 0;
56 
57 
58 // The Probe method needs executable memory, so it uses Heap::CreateCode.
59 // Allocation failure is silent and leads to safe default.
60 void CpuFeatures::Probe() {
61  ASSERT(!initialized_);
62  ASSERT(supported_ == 0);
63 #ifdef DEBUG
64  initialized_ = true;
65 #endif
66  if (Serializer::enabled()) {
67  supported_ |= OS::CpuFeaturesImpliedByPlatform();
68  return; // No features if we might serialize.
69  }
70 
71  const int kBufferSize = 4 * KB;
72  VirtualMemory* memory = new VirtualMemory(kBufferSize);
73  if (!memory->IsReserved()) {
74  delete memory;
75  return;
76  }
77  ASSERT(memory->size() >= static_cast<size_t>(kBufferSize));
78  if (!memory->Commit(memory->address(), kBufferSize, true/*executable*/)) {
79  delete memory;
80  return;
81  }
82 
83  Assembler assm(NULL, memory->address(), kBufferSize);
84  Label cpuid, done;
85 #define __ assm.
86  // Save old esp, since we are going to modify the stack.
87  __ push(ebp);
88  __ pushfd();
89  __ push(ecx);
90  __ push(ebx);
91  __ mov(ebp, esp);
92 
93  // If we can modify bit 21 of the EFLAGS register, then CPUID is supported.
94  __ pushfd();
95  __ pop(eax);
96  __ mov(edx, eax);
97  __ xor_(eax, 0x200000); // Flip bit 21.
98  __ push(eax);
99  __ popfd();
100  __ pushfd();
101  __ pop(eax);
102  __ xor_(eax, edx); // Different if CPUID is supported.
103  __ j(not_zero, &cpuid);
104 
105  // CPUID not supported. Clear the supported features in edx:eax.
106  __ xor_(eax, eax);
107  __ xor_(edx, edx);
108  __ jmp(&done);
109 
110  // Invoke CPUID with 1 in eax to get feature information in
111  // ecx:edx. Temporarily enable CPUID support because we know it's
112  // safe here.
113  __ bind(&cpuid);
114  __ mov(eax, 1);
115  supported_ = (1 << CPUID);
116  { Scope fscope(CPUID);
117  __ cpuid();
118  }
119  supported_ = 0;
120 
121  // Move the result from ecx:edx to edx:eax and make sure to mark the
122  // CPUID feature as supported.
123  __ mov(eax, edx);
124  __ or_(eax, 1 << CPUID);
125  __ mov(edx, ecx);
126 
127  // Done.
128  __ bind(&done);
129  __ mov(esp, ebp);
130  __ pop(ebx);
131  __ pop(ecx);
132  __ popfd();
133  __ pop(ebp);
134  __ ret(0);
135 #undef __
136 
137  typedef uint64_t (*F0)();
138  F0 probe = FUNCTION_CAST<F0>(reinterpret_cast<Address>(memory->address()));
139  supported_ = probe();
140  found_by_runtime_probing_ = supported_;
141  uint64_t os_guarantees = OS::CpuFeaturesImpliedByPlatform();
142  supported_ |= os_guarantees;
143  found_by_runtime_probing_ &= ~os_guarantees;
144 
145  delete memory;
146 }
147 
148 
149 // -----------------------------------------------------------------------------
150 // Implementation of Displacement
151 
152 void Displacement::init(Label* L, Type type) {
153  ASSERT(!L->is_bound());
154  int next = 0;
155  if (L->is_linked()) {
156  next = L->pos();
157  ASSERT(next > 0); // Displacements must be at positions > 0
158  }
159  // Ensure that we _never_ overflow the next field.
160  ASSERT(NextField::is_valid(Assembler::kMaximalBufferSize));
161  data_ = NextField::encode(next) | TypeField::encode(type);
162 }
163 
164 
165 // -----------------------------------------------------------------------------
166 // Implementation of RelocInfo
167 
168 
169 const int RelocInfo::kApplyMask =
170  RelocInfo::kCodeTargetMask | 1 << RelocInfo::RUNTIME_ENTRY |
171  1 << RelocInfo::JS_RETURN | 1 << RelocInfo::INTERNAL_REFERENCE |
172  1 << RelocInfo::DEBUG_BREAK_SLOT;
173 
174 
175 bool RelocInfo::IsCodedSpecially() {
176  // The deserializer needs to know whether a pointer is specially coded. Being
177  // specially coded on IA32 means that it is a relative address, as used by
178  // branch instructions. These are also the ones that need changing when a
179  // code object moves.
180  return (1 << rmode_) & kApplyMask;
181 }
182 
183 
184 void RelocInfo::PatchCode(byte* instructions, int instruction_count) {
185  // Patch the code at the current address with the supplied instructions.
186  for (int i = 0; i < instruction_count; i++) {
187  *(pc_ + i) = *(instructions + i);
188  }
189 
190  // Indicate that code has changed.
191  CPU::FlushICache(pc_, instruction_count);
192 }
193 
194 
195 // Patch the code at the current PC with a call to the target address.
196 // Additional guard int3 instructions can be added if required.
197 void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
198  // Call instruction takes up 5 bytes and int3 takes up one byte.
199  static const int kCallCodeSize = 5;
200  int code_size = kCallCodeSize + guard_bytes;
201 
202  // Create a code patcher.
203  CodePatcher patcher(pc_, code_size);
204 
205  // Add a label for checking the size of the code used for returning.
206 #ifdef DEBUG
207  Label check_codesize;
208  patcher.masm()->bind(&check_codesize);
209 #endif
210 
211  // Patch the code.
212  patcher.masm()->call(target, RelocInfo::NONE);
213 
214  // Check that the size of the code generated is as expected.
215  ASSERT_EQ(kCallCodeSize,
216  patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize));
217 
218  // Add the requested number of int3 instructions after the call.
219  ASSERT_GE(guard_bytes, 0);
220  for (int i = 0; i < guard_bytes; i++) {
221  patcher.masm()->int3();
222  }
223 }
224 
225 
226 // -----------------------------------------------------------------------------
227 // Implementation of Operand
228 
229 Operand::Operand(Register base, int32_t disp, RelocInfo::Mode rmode) {
230  // [base + disp/r]
231  if (disp == 0 && rmode == RelocInfo::NONE && !base.is(ebp)) {
232  // [base]
233  set_modrm(0, base);
234  if (base.is(esp)) set_sib(times_1, esp, base);
235  } else if (is_int8(disp) && rmode == RelocInfo::NONE) {
236  // [base + disp8]
237  set_modrm(1, base);
238  if (base.is(esp)) set_sib(times_1, esp, base);
239  set_disp8(disp);
240  } else {
241  // [base + disp/r]
242  set_modrm(2, base);
243  if (base.is(esp)) set_sib(times_1, esp, base);
244  set_dispr(disp, rmode);
245  }
246 }
247 
248 
249 Operand::Operand(Register base,
250  Register index,
251  ScaleFactor scale,
252  int32_t disp,
253  RelocInfo::Mode rmode) {
254  ASSERT(!index.is(esp)); // illegal addressing mode
255  // [base + index*scale + disp/r]
256  if (disp == 0 && rmode == RelocInfo::NONE && !base.is(ebp)) {
257  // [base + index*scale]
258  set_modrm(0, esp);
259  set_sib(scale, index, base);
260  } else if (is_int8(disp) && rmode == RelocInfo::NONE) {
261  // [base + index*scale + disp8]
262  set_modrm(1, esp);
263  set_sib(scale, index, base);
264  set_disp8(disp);
265  } else {
266  // [base + index*scale + disp/r]
267  set_modrm(2, esp);
268  set_sib(scale, index, base);
269  set_dispr(disp, rmode);
270  }
271 }
272 
273 
274 Operand::Operand(Register index,
275  ScaleFactor scale,
276  int32_t disp,
277  RelocInfo::Mode rmode) {
278  ASSERT(!index.is(esp)); // illegal addressing mode
279  // [index*scale + disp/r]
280  set_modrm(0, esp);
281  set_sib(scale, index, ebp);
282  set_dispr(disp, rmode);
283 }
284 
285 
286 bool Operand::is_reg(Register reg) const {
287  return ((buf_[0] & 0xF8) == 0xC0) // addressing mode is register only.
288  && ((buf_[0] & 0x07) == reg.code()); // register codes match.
289 }
290 
291 
292 bool Operand::is_reg_only() const {
293  return (buf_[0] & 0xF8) == 0xC0; // Addressing mode is register only.
294 }
295 
296 
297 Register Operand::reg() const {
298  ASSERT(is_reg_only());
299  return Register::from_code(buf_[0] & 0x07);
300 }
301 
302 
303 // -----------------------------------------------------------------------------
304 // Implementation of Assembler.
305 
306 // Emit a single byte. Must always be inlined.
307 #define EMIT(x) \
308  *pc_++ = (x)
309 
310 
311 #ifdef GENERATED_CODE_COVERAGE
312 static void InitCoverageLog();
313 #endif
314 
315 Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
316  : AssemblerBase(arg_isolate),
317  positions_recorder_(this),
318  emit_debug_code_(FLAG_debug_code) {
319  if (buffer == NULL) {
320  // Do our own buffer management.
321  if (buffer_size <= kMinimalBufferSize) {
322  buffer_size = kMinimalBufferSize;
323 
324  if (isolate()->assembler_spare_buffer() != NULL) {
325  buffer = isolate()->assembler_spare_buffer();
326  isolate()->set_assembler_spare_buffer(NULL);
327  }
328  }
329  if (buffer == NULL) {
330  buffer_ = NewArray<byte>(buffer_size);
331  } else {
332  buffer_ = static_cast<byte*>(buffer);
333  }
334  buffer_size_ = buffer_size;
335  own_buffer_ = true;
336  } else {
337  // Use externally provided buffer instead.
338  ASSERT(buffer_size > 0);
339  buffer_ = static_cast<byte*>(buffer);
340  buffer_size_ = buffer_size;
341  own_buffer_ = false;
342  }
343 
344  // Clear the buffer in debug mode unless it was provided by the
345  // caller in which case we can't be sure it's okay to overwrite
346  // existing code in it; see CodePatcher::CodePatcher(...).
347 #ifdef DEBUG
348  if (own_buffer_) {
349  memset(buffer_, 0xCC, buffer_size); // int3
350  }
351 #endif
352 
353  // Set up buffer pointers.
354  ASSERT(buffer_ != NULL);
355  pc_ = buffer_;
356  reloc_info_writer.Reposition(buffer_ + buffer_size, pc_);
357 
358 #ifdef GENERATED_CODE_COVERAGE
359  InitCoverageLog();
360 #endif
361 }
362 
363 
365  if (own_buffer_) {
366  if (isolate()->assembler_spare_buffer() == NULL &&
367  buffer_size_ == kMinimalBufferSize) {
368  isolate()->set_assembler_spare_buffer(buffer_);
369  } else {
370  DeleteArray(buffer_);
371  }
372  }
373 }
374 
375 
376 void Assembler::GetCode(CodeDesc* desc) {
377  // Finalize code (at this point overflow() may be true, but the gap ensures
378  // that we are still not overlapping instructions and relocation info).
379  ASSERT(pc_ <= reloc_info_writer.pos()); // No overlap.
380  // Set up code descriptor.
381  desc->buffer = buffer_;
382  desc->buffer_size = buffer_size_;
383  desc->instr_size = pc_offset();
384  desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
385  desc->origin = this;
386 }
387 
388 
389 void Assembler::Align(int m) {
390  ASSERT(IsPowerOf2(m));
391  int mask = m - 1;
392  int addr = pc_offset();
393  Nop((m - (addr & mask)) & mask);
394 }
395 
396 
397 bool Assembler::IsNop(Address addr) {
398  Address a = addr;
399  while (*a == 0x66) a++;
400  if (*a == 0x90) return true;
401  if (a[0] == 0xf && a[1] == 0x1f) return true;
402  return false;
403 }
404 
405 
406 void Assembler::Nop(int bytes) {
407  EnsureSpace ensure_space(this);
408 
410  // Older CPUs that do not support SSE2 may not support multibyte NOP
411  // instructions.
412  for (; bytes > 0; bytes--) {
413  EMIT(0x90);
414  }
415  return;
416  }
417 
418  // Multi byte nops from http://support.amd.com/us/Processor_TechDocs/40546.pdf
419  while (bytes > 0) {
420  switch (bytes) {
421  case 2:
422  EMIT(0x66);
423  case 1:
424  EMIT(0x90);
425  return;
426  case 3:
427  EMIT(0xf);
428  EMIT(0x1f);
429  EMIT(0);
430  return;
431  case 4:
432  EMIT(0xf);
433  EMIT(0x1f);
434  EMIT(0x40);
435  EMIT(0);
436  return;
437  case 6:
438  EMIT(0x66);
439  case 5:
440  EMIT(0xf);
441  EMIT(0x1f);
442  EMIT(0x44);
443  EMIT(0);
444  EMIT(0);
445  return;
446  case 7:
447  EMIT(0xf);
448  EMIT(0x1f);
449  EMIT(0x80);
450  EMIT(0);
451  EMIT(0);
452  EMIT(0);
453  EMIT(0);
454  return;
455  default:
456  case 11:
457  EMIT(0x66);
458  bytes--;
459  case 10:
460  EMIT(0x66);
461  bytes--;
462  case 9:
463  EMIT(0x66);
464  bytes--;
465  case 8:
466  EMIT(0xf);
467  EMIT(0x1f);
468  EMIT(0x84);
469  EMIT(0);
470  EMIT(0);
471  EMIT(0);
472  EMIT(0);
473  EMIT(0);
474  bytes -= 8;
475  }
476  }
477 }
478 
479 
481  Align(16); // Preferred alignment of jump targets on ia32.
482 }
483 
484 
485 void Assembler::cpuid() {
486  ASSERT(CpuFeatures::IsEnabled(CPUID));
487  EnsureSpace ensure_space(this);
488  EMIT(0x0F);
489  EMIT(0xA2);
490 }
491 
492 
493 void Assembler::pushad() {
494  EnsureSpace ensure_space(this);
495  EMIT(0x60);
496 }
497 
498 
499 void Assembler::popad() {
500  EnsureSpace ensure_space(this);
501  EMIT(0x61);
502 }
503 
504 
505 void Assembler::pushfd() {
506  EnsureSpace ensure_space(this);
507  EMIT(0x9C);
508 }
509 
510 
511 void Assembler::popfd() {
512  EnsureSpace ensure_space(this);
513  EMIT(0x9D);
514 }
515 
516 
517 void Assembler::push(const Immediate& x) {
518  EnsureSpace ensure_space(this);
519  if (x.is_int8()) {
520  EMIT(0x6a);
521  EMIT(x.x_);
522  } else {
523  EMIT(0x68);
524  emit(x);
525  }
526 }
527 
528 
529 void Assembler::push_imm32(int32_t imm32) {
530  EnsureSpace ensure_space(this);
531  EMIT(0x68);
532  emit(imm32);
533 }
534 
535 
536 void Assembler::push(Register src) {
537  EnsureSpace ensure_space(this);
538  EMIT(0x50 | src.code());
539 }
540 
541 
542 void Assembler::push(const Operand& src) {
543  EnsureSpace ensure_space(this);
544  EMIT(0xFF);
545  emit_operand(esi, src);
546 }
547 
548 
549 void Assembler::pop(Register dst) {
550  ASSERT(reloc_info_writer.last_pc() != NULL);
551  EnsureSpace ensure_space(this);
552  EMIT(0x58 | dst.code());
553 }
554 
555 
556 void Assembler::pop(const Operand& dst) {
557  EnsureSpace ensure_space(this);
558  EMIT(0x8F);
559  emit_operand(eax, dst);
560 }
561 
562 
563 void Assembler::enter(const Immediate& size) {
564  EnsureSpace ensure_space(this);
565  EMIT(0xC8);
566  emit_w(size);
567  EMIT(0);
568 }
569 
570 
571 void Assembler::leave() {
572  EnsureSpace ensure_space(this);
573  EMIT(0xC9);
574 }
575 
576 
577 void Assembler::mov_b(Register dst, const Operand& src) {
578  CHECK(dst.is_byte_register());
579  EnsureSpace ensure_space(this);
580  EMIT(0x8A);
581  emit_operand(dst, src);
582 }
583 
584 
585 void Assembler::mov_b(const Operand& dst, int8_t imm8) {
586  EnsureSpace ensure_space(this);
587  EMIT(0xC6);
588  emit_operand(eax, dst);
589  EMIT(imm8);
590 }
591 
592 
593 void Assembler::mov_b(const Operand& dst, Register src) {
594  CHECK(src.is_byte_register());
595  EnsureSpace ensure_space(this);
596  EMIT(0x88);
597  emit_operand(src, dst);
598 }
599 
600 
601 void Assembler::mov_w(Register dst, const Operand& src) {
602  EnsureSpace ensure_space(this);
603  EMIT(0x66);
604  EMIT(0x8B);
605  emit_operand(dst, src);
606 }
607 
608 
609 void Assembler::mov_w(const Operand& dst, Register src) {
610  EnsureSpace ensure_space(this);
611  EMIT(0x66);
612  EMIT(0x89);
613  emit_operand(src, dst);
614 }
615 
616 
617 void Assembler::mov(Register dst, int32_t imm32) {
618  EnsureSpace ensure_space(this);
619  EMIT(0xB8 | dst.code());
620  emit(imm32);
621 }
622 
623 
624 void Assembler::mov(Register dst, const Immediate& x) {
625  EnsureSpace ensure_space(this);
626  EMIT(0xB8 | dst.code());
627  emit(x);
628 }
629 
630 
631 void Assembler::mov(Register dst, Handle<Object> handle) {
632  EnsureSpace ensure_space(this);
633  EMIT(0xB8 | dst.code());
634  emit(handle);
635 }
636 
637 
638 void Assembler::mov(Register dst, const Operand& src) {
639  EnsureSpace ensure_space(this);
640  EMIT(0x8B);
641  emit_operand(dst, src);
642 }
643 
644 
645 void Assembler::mov(Register dst, Register src) {
646  EnsureSpace ensure_space(this);
647  EMIT(0x89);
648  EMIT(0xC0 | src.code() << 3 | dst.code());
649 }
650 
651 
652 void Assembler::mov(const Operand& dst, const Immediate& x) {
653  EnsureSpace ensure_space(this);
654  EMIT(0xC7);
655  emit_operand(eax, dst);
656  emit(x);
657 }
658 
659 
660 void Assembler::mov(const Operand& dst, Handle<Object> handle) {
661  EnsureSpace ensure_space(this);
662  EMIT(0xC7);
663  emit_operand(eax, dst);
664  emit(handle);
665 }
666 
667 
668 void Assembler::mov(const Operand& dst, Register src) {
669  EnsureSpace ensure_space(this);
670  EMIT(0x89);
671  emit_operand(src, dst);
672 }
673 
674 
675 void Assembler::movsx_b(Register dst, const Operand& src) {
676  EnsureSpace ensure_space(this);
677  EMIT(0x0F);
678  EMIT(0xBE);
679  emit_operand(dst, src);
680 }
681 
682 
683 void Assembler::movsx_w(Register dst, const Operand& src) {
684  EnsureSpace ensure_space(this);
685  EMIT(0x0F);
686  EMIT(0xBF);
687  emit_operand(dst, src);
688 }
689 
690 
691 void Assembler::movzx_b(Register dst, const Operand& src) {
692  EnsureSpace ensure_space(this);
693  EMIT(0x0F);
694  EMIT(0xB6);
695  emit_operand(dst, src);
696 }
697 
698 
699 void Assembler::movzx_w(Register dst, const Operand& src) {
700  EnsureSpace ensure_space(this);
701  EMIT(0x0F);
702  EMIT(0xB7);
703  emit_operand(dst, src);
704 }
705 
706 
707 void Assembler::cmov(Condition cc, Register dst, const Operand& src) {
708  ASSERT(CpuFeatures::IsEnabled(CMOV));
709  EnsureSpace ensure_space(this);
710  // Opcode: 0f 40 + cc /r.
711  EMIT(0x0F);
712  EMIT(0x40 + cc);
713  emit_operand(dst, src);
714 }
715 
716 
717 void Assembler::cld() {
718  EnsureSpace ensure_space(this);
719  EMIT(0xFC);
720 }
721 
722 
723 void Assembler::rep_movs() {
724  EnsureSpace ensure_space(this);
725  EMIT(0xF3);
726  EMIT(0xA5);
727 }
728 
729 
730 void Assembler::rep_stos() {
731  EnsureSpace ensure_space(this);
732  EMIT(0xF3);
733  EMIT(0xAB);
734 }
735 
736 
737 void Assembler::stos() {
738  EnsureSpace ensure_space(this);
739  EMIT(0xAB);
740 }
741 
742 
743 void Assembler::xchg(Register dst, Register src) {
744  EnsureSpace ensure_space(this);
745  if (src.is(eax) || dst.is(eax)) { // Single-byte encoding.
746  EMIT(0x90 | (src.is(eax) ? dst.code() : src.code()));
747  } else {
748  EMIT(0x87);
749  EMIT(0xC0 | src.code() << 3 | dst.code());
750  }
751 }
752 
753 
754 void Assembler::adc(Register dst, int32_t imm32) {
755  EnsureSpace ensure_space(this);
756  emit_arith(2, Operand(dst), Immediate(imm32));
757 }
758 
759 
760 void Assembler::adc(Register dst, const Operand& src) {
761  EnsureSpace ensure_space(this);
762  EMIT(0x13);
763  emit_operand(dst, src);
764 }
765 
766 
767 void Assembler::add(Register dst, const Operand& src) {
768  EnsureSpace ensure_space(this);
769  EMIT(0x03);
770  emit_operand(dst, src);
771 }
772 
773 
774 void Assembler::add(const Operand& dst, Register src) {
775  EnsureSpace ensure_space(this);
776  EMIT(0x01);
777  emit_operand(src, dst);
778 }
779 
780 
781 void Assembler::add(const Operand& dst, const Immediate& x) {
782  ASSERT(reloc_info_writer.last_pc() != NULL);
783  EnsureSpace ensure_space(this);
784  emit_arith(0, dst, x);
785 }
786 
787 
788 void Assembler::and_(Register dst, int32_t imm32) {
789  and_(dst, Immediate(imm32));
790 }
791 
792 
793 void Assembler::and_(Register dst, const Immediate& x) {
794  EnsureSpace ensure_space(this);
795  emit_arith(4, Operand(dst), x);
796 }
797 
798 
799 void Assembler::and_(Register dst, const Operand& src) {
800  EnsureSpace ensure_space(this);
801  EMIT(0x23);
802  emit_operand(dst, src);
803 }
804 
805 
806 void Assembler::and_(const Operand& dst, const Immediate& x) {
807  EnsureSpace ensure_space(this);
808  emit_arith(4, dst, x);
809 }
810 
811 
812 void Assembler::and_(const Operand& dst, Register src) {
813  EnsureSpace ensure_space(this);
814  EMIT(0x21);
815  emit_operand(src, dst);
816 }
817 
818 
819 void Assembler::cmpb(const Operand& op, int8_t imm8) {
820  EnsureSpace ensure_space(this);
821  if (op.is_reg(eax)) {
822  EMIT(0x3C);
823  } else {
824  EMIT(0x80);
825  emit_operand(edi, op); // edi == 7
826  }
827  EMIT(imm8);
828 }
829 
830 
831 void Assembler::cmpb(const Operand& op, Register reg) {
832  CHECK(reg.is_byte_register());
833  EnsureSpace ensure_space(this);
834  EMIT(0x38);
835  emit_operand(reg, op);
836 }
837 
838 
839 void Assembler::cmpb(Register reg, const Operand& op) {
840  CHECK(reg.is_byte_register());
841  EnsureSpace ensure_space(this);
842  EMIT(0x3A);
843  emit_operand(reg, op);
844 }
845 
846 
847 void Assembler::cmpw(const Operand& op, Immediate imm16) {
848  ASSERT(imm16.is_int16());
849  EnsureSpace ensure_space(this);
850  EMIT(0x66);
851  EMIT(0x81);
852  emit_operand(edi, op);
853  emit_w(imm16);
854 }
855 
856 
857 void Assembler::cmp(Register reg, int32_t imm32) {
858  EnsureSpace ensure_space(this);
859  emit_arith(7, Operand(reg), Immediate(imm32));
860 }
861 
862 
863 void Assembler::cmp(Register reg, Handle<Object> handle) {
864  EnsureSpace ensure_space(this);
865  emit_arith(7, Operand(reg), Immediate(handle));
866 }
867 
868 
869 void Assembler::cmp(Register reg, const Operand& op) {
870  EnsureSpace ensure_space(this);
871  EMIT(0x3B);
872  emit_operand(reg, op);
873 }
874 
875 
876 void Assembler::cmp(const Operand& op, const Immediate& imm) {
877  EnsureSpace ensure_space(this);
878  emit_arith(7, op, imm);
879 }
880 
881 
882 void Assembler::cmp(const Operand& op, Handle<Object> handle) {
883  EnsureSpace ensure_space(this);
884  emit_arith(7, op, Immediate(handle));
885 }
886 
887 
888 void Assembler::cmpb_al(const Operand& op) {
889  EnsureSpace ensure_space(this);
890  EMIT(0x38); // CMP r/m8, r8
891  emit_operand(eax, op); // eax has same code as register al.
892 }
893 
894 
895 void Assembler::cmpw_ax(const Operand& op) {
896  EnsureSpace ensure_space(this);
897  EMIT(0x66);
898  EMIT(0x39); // CMP r/m16, r16
899  emit_operand(eax, op); // eax has same code as register ax.
900 }
901 
902 
903 void Assembler::dec_b(Register dst) {
904  CHECK(dst.is_byte_register());
905  EnsureSpace ensure_space(this);
906  EMIT(0xFE);
907  EMIT(0xC8 | dst.code());
908 }
909 
910 
911 void Assembler::dec_b(const Operand& dst) {
912  EnsureSpace ensure_space(this);
913  EMIT(0xFE);
914  emit_operand(ecx, dst);
915 }
916 
917 
918 void Assembler::dec(Register dst) {
919  EnsureSpace ensure_space(this);
920  EMIT(0x48 | dst.code());
921 }
922 
923 
924 void Assembler::dec(const Operand& dst) {
925  EnsureSpace ensure_space(this);
926  EMIT(0xFF);
927  emit_operand(ecx, dst);
928 }
929 
930 
931 void Assembler::cdq() {
932  EnsureSpace ensure_space(this);
933  EMIT(0x99);
934 }
935 
936 
937 void Assembler::idiv(Register src) {
938  EnsureSpace ensure_space(this);
939  EMIT(0xF7);
940  EMIT(0xF8 | src.code());
941 }
942 
943 
944 void Assembler::imul(Register reg) {
945  EnsureSpace ensure_space(this);
946  EMIT(0xF7);
947  EMIT(0xE8 | reg.code());
948 }
949 
950 
951 void Assembler::imul(Register dst, const Operand& src) {
952  EnsureSpace ensure_space(this);
953  EMIT(0x0F);
954  EMIT(0xAF);
955  emit_operand(dst, src);
956 }
957 
958 
959 void Assembler::imul(Register dst, Register src, int32_t imm32) {
960  EnsureSpace ensure_space(this);
961  if (is_int8(imm32)) {
962  EMIT(0x6B);
963  EMIT(0xC0 | dst.code() << 3 | src.code());
964  EMIT(imm32);
965  } else {
966  EMIT(0x69);
967  EMIT(0xC0 | dst.code() << 3 | src.code());
968  emit(imm32);
969  }
970 }
971 
972 
973 void Assembler::inc(Register dst) {
974  EnsureSpace ensure_space(this);
975  EMIT(0x40 | dst.code());
976 }
977 
978 
979 void Assembler::inc(const Operand& dst) {
980  EnsureSpace ensure_space(this);
981  EMIT(0xFF);
982  emit_operand(eax, dst);
983 }
984 
985 
986 void Assembler::lea(Register dst, const Operand& src) {
987  EnsureSpace ensure_space(this);
988  EMIT(0x8D);
989  emit_operand(dst, src);
990 }
991 
992 
993 void Assembler::mul(Register src) {
994  EnsureSpace ensure_space(this);
995  EMIT(0xF7);
996  EMIT(0xE0 | src.code());
997 }
998 
999 
1000 void Assembler::neg(Register dst) {
1001  EnsureSpace ensure_space(this);
1002  EMIT(0xF7);
1003  EMIT(0xD8 | dst.code());
1004 }
1005 
1006 
1007 void Assembler::not_(Register dst) {
1008  EnsureSpace ensure_space(this);
1009  EMIT(0xF7);
1010  EMIT(0xD0 | dst.code());
1011 }
1012 
1013 
1014 void Assembler::or_(Register dst, int32_t imm32) {
1015  EnsureSpace ensure_space(this);
1016  emit_arith(1, Operand(dst), Immediate(imm32));
1017 }
1018 
1019 
1020 void Assembler::or_(Register dst, const Operand& src) {
1021  EnsureSpace ensure_space(this);
1022  EMIT(0x0B);
1023  emit_operand(dst, src);
1024 }
1025 
1026 
1027 void Assembler::or_(const Operand& dst, const Immediate& x) {
1028  EnsureSpace ensure_space(this);
1029  emit_arith(1, dst, x);
1030 }
1031 
1032 
1033 void Assembler::or_(const Operand& dst, Register src) {
1034  EnsureSpace ensure_space(this);
1035  EMIT(0x09);
1036  emit_operand(src, dst);
1037 }
1038 
1039 
1040 void Assembler::rcl(Register dst, uint8_t imm8) {
1041  EnsureSpace ensure_space(this);
1042  ASSERT(is_uint5(imm8)); // illegal shift count
1043  if (imm8 == 1) {
1044  EMIT(0xD1);
1045  EMIT(0xD0 | dst.code());
1046  } else {
1047  EMIT(0xC1);
1048  EMIT(0xD0 | dst.code());
1049  EMIT(imm8);
1050  }
1051 }
1052 
1053 
1054 void Assembler::rcr(Register dst, uint8_t imm8) {
1055  EnsureSpace ensure_space(this);
1056  ASSERT(is_uint5(imm8)); // illegal shift count
1057  if (imm8 == 1) {
1058  EMIT(0xD1);
1059  EMIT(0xD8 | dst.code());
1060  } else {
1061  EMIT(0xC1);
1062  EMIT(0xD8 | dst.code());
1063  EMIT(imm8);
1064  }
1065 }
1066 
1067 
1068 void Assembler::sar(Register dst, uint8_t imm8) {
1069  EnsureSpace ensure_space(this);
1070  ASSERT(is_uint5(imm8)); // illegal shift count
1071  if (imm8 == 1) {
1072  EMIT(0xD1);
1073  EMIT(0xF8 | dst.code());
1074  } else {
1075  EMIT(0xC1);
1076  EMIT(0xF8 | dst.code());
1077  EMIT(imm8);
1078  }
1079 }
1080 
1081 
1082 void Assembler::sar_cl(Register dst) {
1083  EnsureSpace ensure_space(this);
1084  EMIT(0xD3);
1085  EMIT(0xF8 | dst.code());
1086 }
1087 
1088 
1089 void Assembler::sbb(Register dst, const Operand& src) {
1090  EnsureSpace ensure_space(this);
1091  EMIT(0x1B);
1092  emit_operand(dst, src);
1093 }
1094 
1095 
1096 void Assembler::shld(Register dst, const Operand& src) {
1097  EnsureSpace ensure_space(this);
1098  EMIT(0x0F);
1099  EMIT(0xA5);
1100  emit_operand(dst, src);
1101 }
1102 
1103 
1104 void Assembler::shl(Register dst, uint8_t imm8) {
1105  EnsureSpace ensure_space(this);
1106  ASSERT(is_uint5(imm8)); // illegal shift count
1107  if (imm8 == 1) {
1108  EMIT(0xD1);
1109  EMIT(0xE0 | dst.code());
1110  } else {
1111  EMIT(0xC1);
1112  EMIT(0xE0 | dst.code());
1113  EMIT(imm8);
1114  }
1115 }
1116 
1117 
1118 void Assembler::shl_cl(Register dst) {
1119  EnsureSpace ensure_space(this);
1120  EMIT(0xD3);
1121  EMIT(0xE0 | dst.code());
1122 }
1123 
1124 
1125 void Assembler::shrd(Register dst, const Operand& src) {
1126  EnsureSpace ensure_space(this);
1127  EMIT(0x0F);
1128  EMIT(0xAD);
1129  emit_operand(dst, src);
1130 }
1131 
1132 
1133 void Assembler::shr(Register dst, uint8_t imm8) {
1134  EnsureSpace ensure_space(this);
1135  ASSERT(is_uint5(imm8)); // illegal shift count
1136  if (imm8 == 1) {
1137  EMIT(0xD1);
1138  EMIT(0xE8 | dst.code());
1139  } else {
1140  EMIT(0xC1);
1141  EMIT(0xE8 | dst.code());
1142  EMIT(imm8);
1143  }
1144 }
1145 
1146 
1147 void Assembler::shr_cl(Register dst) {
1148  EnsureSpace ensure_space(this);
1149  EMIT(0xD3);
1150  EMIT(0xE8 | dst.code());
1151 }
1152 
1153 
1154 void Assembler::sub(const Operand& dst, const Immediate& x) {
1155  EnsureSpace ensure_space(this);
1156  emit_arith(5, dst, x);
1157 }
1158 
1159 
1160 void Assembler::sub(Register dst, const Operand& src) {
1161  EnsureSpace ensure_space(this);
1162  EMIT(0x2B);
1163  emit_operand(dst, src);
1164 }
1165 
1166 
1167 void Assembler::sub(const Operand& dst, Register src) {
1168  EnsureSpace ensure_space(this);
1169  EMIT(0x29);
1170  emit_operand(src, dst);
1171 }
1172 
1173 
1174 void Assembler::test(Register reg, const Immediate& imm) {
1175  EnsureSpace ensure_space(this);
1176  // Only use test against byte for registers that have a byte
1177  // variant: eax, ebx, ecx, and edx.
1178  if (imm.rmode_ == RelocInfo::NONE &&
1179  is_uint8(imm.x_) &&
1180  reg.is_byte_register()) {
1181  uint8_t imm8 = imm.x_;
1182  if (reg.is(eax)) {
1183  EMIT(0xA8);
1184  EMIT(imm8);
1185  } else {
1186  emit_arith_b(0xF6, 0xC0, reg, imm8);
1187  }
1188  } else {
1189  // This is not using emit_arith because test doesn't support
1190  // sign-extension of 8-bit operands.
1191  if (reg.is(eax)) {
1192  EMIT(0xA9);
1193  } else {
1194  EMIT(0xF7);
1195  EMIT(0xC0 | reg.code());
1196  }
1197  emit(imm);
1198  }
1199 }
1200 
1201 
1202 void Assembler::test(Register reg, const Operand& op) {
1203  EnsureSpace ensure_space(this);
1204  EMIT(0x85);
1205  emit_operand(reg, op);
1206 }
1207 
1208 
1209 void Assembler::test_b(Register reg, const Operand& op) {
1210  CHECK(reg.is_byte_register());
1211  EnsureSpace ensure_space(this);
1212  EMIT(0x84);
1213  emit_operand(reg, op);
1214 }
1215 
1216 
1217 void Assembler::test(const Operand& op, const Immediate& imm) {
1218  EnsureSpace ensure_space(this);
1219  EMIT(0xF7);
1220  emit_operand(eax, op);
1221  emit(imm);
1222 }
1223 
1224 
1225 void Assembler::test_b(const Operand& op, uint8_t imm8) {
1226  if (op.is_reg_only() && !op.reg().is_byte_register()) {
1227  test(op, Immediate(imm8));
1228  return;
1229  }
1230  EnsureSpace ensure_space(this);
1231  EMIT(0xF6);
1232  emit_operand(eax, op);
1233  EMIT(imm8);
1234 }
1235 
1236 
1237 void Assembler::xor_(Register dst, int32_t imm32) {
1238  EnsureSpace ensure_space(this);
1239  emit_arith(6, Operand(dst), Immediate(imm32));
1240 }
1241 
1242 
1243 void Assembler::xor_(Register dst, const Operand& src) {
1244  EnsureSpace ensure_space(this);
1245  EMIT(0x33);
1246  emit_operand(dst, src);
1247 }
1248 
1249 
1250 void Assembler::xor_(const Operand& dst, Register src) {
1251  EnsureSpace ensure_space(this);
1252  EMIT(0x31);
1253  emit_operand(src, dst);
1254 }
1255 
1256 
1257 void Assembler::xor_(const Operand& dst, const Immediate& x) {
1258  EnsureSpace ensure_space(this);
1259  emit_arith(6, dst, x);
1260 }
1261 
1262 
1263 void Assembler::bt(const Operand& dst, Register src) {
1264  EnsureSpace ensure_space(this);
1265  EMIT(0x0F);
1266  EMIT(0xA3);
1267  emit_operand(src, dst);
1268 }
1269 
1270 
1271 void Assembler::bts(const Operand& dst, Register src) {
1272  EnsureSpace ensure_space(this);
1273  EMIT(0x0F);
1274  EMIT(0xAB);
1275  emit_operand(src, dst);
1276 }
1277 
1278 
1279 void Assembler::hlt() {
1280  EnsureSpace ensure_space(this);
1281  EMIT(0xF4);
1282 }
1283 
1284 
1285 void Assembler::int3() {
1286  EnsureSpace ensure_space(this);
1287  EMIT(0xCC);
1288 }
1289 
1290 
1291 void Assembler::nop() {
1292  EnsureSpace ensure_space(this);
1293  EMIT(0x90);
1294 }
1295 
1296 
1297 void Assembler::rdtsc() {
1298  ASSERT(CpuFeatures::IsEnabled(RDTSC));
1299  EnsureSpace ensure_space(this);
1300  EMIT(0x0F);
1301  EMIT(0x31);
1302 }
1303 
1304 
1305 void Assembler::ret(int imm16) {
1306  EnsureSpace ensure_space(this);
1307  ASSERT(is_uint16(imm16));
1308  if (imm16 == 0) {
1309  EMIT(0xC3);
1310  } else {
1311  EMIT(0xC2);
1312  EMIT(imm16 & 0xFF);
1313  EMIT((imm16 >> 8) & 0xFF);
1314  }
1315 }
1316 
1317 
1318 // Labels refer to positions in the (to be) generated code.
1319 // There are bound, linked, and unused labels.
1320 //
1321 // Bound labels refer to known positions in the already
1322 // generated code. pos() is the position the label refers to.
1323 //
1324 // Linked labels refer to unknown positions in the code
1325 // to be generated; pos() is the position of the 32bit
1326 // Displacement of the last instruction using the label.
1327 
1328 
1329 void Assembler::print(Label* L) {
1330  if (L->is_unused()) {
1331  PrintF("unused label\n");
1332  } else if (L->is_bound()) {
1333  PrintF("bound label to %d\n", L->pos());
1334  } else if (L->is_linked()) {
1335  Label l = *L;
1336  PrintF("unbound label");
1337  while (l.is_linked()) {
1338  Displacement disp = disp_at(&l);
1339  PrintF("@ %d ", l.pos());
1340  disp.print();
1341  PrintF("\n");
1342  disp.next(&l);
1343  }
1344  } else {
1345  PrintF("label in inconsistent state (pos = %d)\n", L->pos_);
1346  }
1347 }
1348 
1349 
1350 void Assembler::bind_to(Label* L, int pos) {
1351  EnsureSpace ensure_space(this);
1352  ASSERT(0 <= pos && pos <= pc_offset()); // must have a valid binding position
1353  while (L->is_linked()) {
1354  Displacement disp = disp_at(L);
1355  int fixup_pos = L->pos();
1356  if (disp.type() == Displacement::CODE_RELATIVE) {
1357  // Relative to Code* heap object pointer.
1358  long_at_put(fixup_pos, pos + Code::kHeaderSize - kHeapObjectTag);
1359  } else {
1360  if (disp.type() == Displacement::UNCONDITIONAL_JUMP) {
1361  ASSERT(byte_at(fixup_pos - 1) == 0xE9); // jmp expected
1362  }
1363  // Relative address, relative to point after address.
1364  int imm32 = pos - (fixup_pos + sizeof(int32_t));
1365  long_at_put(fixup_pos, imm32);
1366  }
1367  disp.next(L);
1368  }
1369  while (L->is_near_linked()) {
1370  int fixup_pos = L->near_link_pos();
1371  int offset_to_next =
1372  static_cast<int>(*reinterpret_cast<int8_t*>(addr_at(fixup_pos)));
1373  ASSERT(offset_to_next <= 0);
1374  // Relative address, relative to point after address.
1375  int disp = pos - fixup_pos - sizeof(int8_t);
1376  CHECK(0 <= disp && disp <= 127);
1377  set_byte_at(fixup_pos, disp);
1378  if (offset_to_next < 0) {
1379  L->link_to(fixup_pos + offset_to_next, Label::kNear);
1380  } else {
1381  L->UnuseNear();
1382  }
1383  }
1384  L->bind_to(pos);
1385 }
1386 
1387 
1388 void Assembler::bind(Label* L) {
1389  EnsureSpace ensure_space(this);
1390  ASSERT(!L->is_bound()); // label can only be bound once
1391  bind_to(L, pc_offset());
1392 }
1393 
1394 
1395 void Assembler::call(Label* L) {
1396  positions_recorder()->WriteRecordedPositions();
1397  EnsureSpace ensure_space(this);
1398  if (L->is_bound()) {
1399  const int long_size = 5;
1400  int offs = L->pos() - pc_offset();
1401  ASSERT(offs <= 0);
1402  // 1110 1000 #32-bit disp.
1403  EMIT(0xE8);
1404  emit(offs - long_size);
1405  } else {
1406  // 1110 1000 #32-bit disp.
1407  EMIT(0xE8);
1408  emit_disp(L, Displacement::OTHER);
1409  }
1410 }
1411 
1412 
1413 void Assembler::call(byte* entry, RelocInfo::Mode rmode) {
1414  positions_recorder()->WriteRecordedPositions();
1415  EnsureSpace ensure_space(this);
1416  ASSERT(!RelocInfo::IsCodeTarget(rmode));
1417  EMIT(0xE8);
1418  emit(entry - (pc_ + sizeof(int32_t)), rmode);
1419 }
1420 
1421 
1422 int Assembler::CallSize(const Operand& adr) {
1423  // Call size is 1 (opcode) + adr.len_ (operand).
1424  return 1 + adr.len_;
1425 }
1426 
1427 
1428 void Assembler::call(const Operand& adr) {
1429  positions_recorder()->WriteRecordedPositions();
1430  EnsureSpace ensure_space(this);
1431  EMIT(0xFF);
1432  emit_operand(edx, adr);
1433 }
1434 
1435 
1436 int Assembler::CallSize(Handle<Code> code, RelocInfo::Mode rmode) {
1437  return 1 /* EMIT */ + sizeof(uint32_t) /* emit */;
1438 }
1439 
1440 
1441 void Assembler::call(Handle<Code> code,
1442  RelocInfo::Mode rmode,
1443  TypeFeedbackId ast_id) {
1444  positions_recorder()->WriteRecordedPositions();
1445  EnsureSpace ensure_space(this);
1446  ASSERT(RelocInfo::IsCodeTarget(rmode));
1447  EMIT(0xE8);
1448  emit(reinterpret_cast<intptr_t>(code.location()), rmode, ast_id);
1449 }
1450 
1451 
1452 void Assembler::jmp(Label* L, Label::Distance distance) {
1453  EnsureSpace ensure_space(this);
1454  if (L->is_bound()) {
1455  const int short_size = 2;
1456  const int long_size = 5;
1457  int offs = L->pos() - pc_offset();
1458  ASSERT(offs <= 0);
1459  if (is_int8(offs - short_size)) {
1460  // 1110 1011 #8-bit disp.
1461  EMIT(0xEB);
1462  EMIT((offs - short_size) & 0xFF);
1463  } else {
1464  // 1110 1001 #32-bit disp.
1465  EMIT(0xE9);
1466  emit(offs - long_size);
1467  }
1468  } else if (distance == Label::kNear) {
1469  EMIT(0xEB);
1470  emit_near_disp(L);
1471  } else {
1472  // 1110 1001 #32-bit disp.
1473  EMIT(0xE9);
1474  emit_disp(L, Displacement::UNCONDITIONAL_JUMP);
1475  }
1476 }
1477 
1478 
1479 void Assembler::jmp(byte* entry, RelocInfo::Mode rmode) {
1480  EnsureSpace ensure_space(this);
1481  ASSERT(!RelocInfo::IsCodeTarget(rmode));
1482  EMIT(0xE9);
1483  emit(entry - (pc_ + sizeof(int32_t)), rmode);
1484 }
1485 
1486 
1487 void Assembler::jmp(const Operand& adr) {
1488  EnsureSpace ensure_space(this);
1489  EMIT(0xFF);
1490  emit_operand(esp, adr);
1491 }
1492 
1493 
1494 void Assembler::jmp(Handle<Code> code, RelocInfo::Mode rmode) {
1495  EnsureSpace ensure_space(this);
1496  ASSERT(RelocInfo::IsCodeTarget(rmode));
1497  EMIT(0xE9);
1498  emit(reinterpret_cast<intptr_t>(code.location()), rmode);
1499 }
1500 
1501 
1502 void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
1503  EnsureSpace ensure_space(this);
1504  ASSERT(0 <= cc && cc < 16);
1505  if (L->is_bound()) {
1506  const int short_size = 2;
1507  const int long_size = 6;
1508  int offs = L->pos() - pc_offset();
1509  ASSERT(offs <= 0);
1510  if (is_int8(offs - short_size)) {
1511  // 0111 tttn #8-bit disp
1512  EMIT(0x70 | cc);
1513  EMIT((offs - short_size) & 0xFF);
1514  } else {
1515  // 0000 1111 1000 tttn #32-bit disp
1516  EMIT(0x0F);
1517  EMIT(0x80 | cc);
1518  emit(offs - long_size);
1519  }
1520  } else if (distance == Label::kNear) {
1521  EMIT(0x70 | cc);
1522  emit_near_disp(L);
1523  } else {
1524  // 0000 1111 1000 tttn #32-bit disp
1525  // Note: could eliminate cond. jumps to this jump if condition
1526  // is the same however, seems to be rather unlikely case.
1527  EMIT(0x0F);
1528  EMIT(0x80 | cc);
1529  emit_disp(L, Displacement::OTHER);
1530  }
1531 }
1532 
1533 
1534 void Assembler::j(Condition cc, byte* entry, RelocInfo::Mode rmode) {
1535  EnsureSpace ensure_space(this);
1536  ASSERT((0 <= cc) && (cc < 16));
1537  // 0000 1111 1000 tttn #32-bit disp.
1538  EMIT(0x0F);
1539  EMIT(0x80 | cc);
1540  emit(entry - (pc_ + sizeof(int32_t)), rmode);
1541 }
1542 
1543 
1544 void Assembler::j(Condition cc, Handle<Code> code) {
1545  EnsureSpace ensure_space(this);
1546  // 0000 1111 1000 tttn #32-bit disp
1547  EMIT(0x0F);
1548  EMIT(0x80 | cc);
1549  emit(reinterpret_cast<intptr_t>(code.location()), RelocInfo::CODE_TARGET);
1550 }
1551 
1552 
1553 // FPU instructions.
1554 
1555 void Assembler::fld(int i) {
1556  EnsureSpace ensure_space(this);
1557  emit_farith(0xD9, 0xC0, i);
1558 }
1559 
1560 
1561 void Assembler::fstp(int i) {
1562  EnsureSpace ensure_space(this);
1563  emit_farith(0xDD, 0xD8, i);
1564 }
1565 
1566 
1567 void Assembler::fld1() {
1568  EnsureSpace ensure_space(this);
1569  EMIT(0xD9);
1570  EMIT(0xE8);
1571 }
1572 
1573 
1574 void Assembler::fldpi() {
1575  EnsureSpace ensure_space(this);
1576  EMIT(0xD9);
1577  EMIT(0xEB);
1578 }
1579 
1580 
1581 void Assembler::fldz() {
1582  EnsureSpace ensure_space(this);
1583  EMIT(0xD9);
1584  EMIT(0xEE);
1585 }
1586 
1587 
1588 void Assembler::fldln2() {
1589  EnsureSpace ensure_space(this);
1590  EMIT(0xD9);
1591  EMIT(0xED);
1592 }
1593 
1594 
1595 void Assembler::fld_s(const Operand& adr) {
1596  EnsureSpace ensure_space(this);
1597  EMIT(0xD9);
1598  emit_operand(eax, adr);
1599 }
1600 
1601 
1602 void Assembler::fld_d(const Operand& adr) {
1603  EnsureSpace ensure_space(this);
1604  EMIT(0xDD);
1605  emit_operand(eax, adr);
1606 }
1607 
1608 
1609 void Assembler::fstp_s(const Operand& adr) {
1610  EnsureSpace ensure_space(this);
1611  EMIT(0xD9);
1612  emit_operand(ebx, adr);
1613 }
1614 
1615 
1616 void Assembler::fstp_d(const Operand& adr) {
1617  EnsureSpace ensure_space(this);
1618  EMIT(0xDD);
1619  emit_operand(ebx, adr);
1620 }
1621 
1622 
1623 void Assembler::fst_d(const Operand& adr) {
1624  EnsureSpace ensure_space(this);
1625  EMIT(0xDD);
1626  emit_operand(edx, adr);
1627 }
1628 
1629 
1630 void Assembler::fild_s(const Operand& adr) {
1631  EnsureSpace ensure_space(this);
1632  EMIT(0xDB);
1633  emit_operand(eax, adr);
1634 }
1635 
1636 
1637 void Assembler::fild_d(const Operand& adr) {
1638  EnsureSpace ensure_space(this);
1639  EMIT(0xDF);
1640  emit_operand(ebp, adr);
1641 }
1642 
1643 
1644 void Assembler::fistp_s(const Operand& adr) {
1645  EnsureSpace ensure_space(this);
1646  EMIT(0xDB);
1647  emit_operand(ebx, adr);
1648 }
1649 
1650 
1651 void Assembler::fisttp_s(const Operand& adr) {
1652  ASSERT(CpuFeatures::IsEnabled(SSE3));
1653  EnsureSpace ensure_space(this);
1654  EMIT(0xDB);
1655  emit_operand(ecx, adr);
1656 }
1657 
1658 
1659 void Assembler::fisttp_d(const Operand& adr) {
1660  ASSERT(CpuFeatures::IsEnabled(SSE3));
1661  EnsureSpace ensure_space(this);
1662  EMIT(0xDD);
1663  emit_operand(ecx, adr);
1664 }
1665 
1666 
1667 void Assembler::fist_s(const Operand& adr) {
1668  EnsureSpace ensure_space(this);
1669  EMIT(0xDB);
1670  emit_operand(edx, adr);
1671 }
1672 
1673 
1674 void Assembler::fistp_d(const Operand& adr) {
1675  EnsureSpace ensure_space(this);
1676  EMIT(0xDF);
1677  emit_operand(edi, adr);
1678 }
1679 
1680 
1681 void Assembler::fabs() {
1682  EnsureSpace ensure_space(this);
1683  EMIT(0xD9);
1684  EMIT(0xE1);
1685 }
1686 
1687 
1688 void Assembler::fchs() {
1689  EnsureSpace ensure_space(this);
1690  EMIT(0xD9);
1691  EMIT(0xE0);
1692 }
1693 
1694 
1695 void Assembler::fcos() {
1696  EnsureSpace ensure_space(this);
1697  EMIT(0xD9);
1698  EMIT(0xFF);
1699 }
1700 
1701 
1702 void Assembler::fsin() {
1703  EnsureSpace ensure_space(this);
1704  EMIT(0xD9);
1705  EMIT(0xFE);
1706 }
1707 
1708 
1709 void Assembler::fptan() {
1710  EnsureSpace ensure_space(this);
1711  EMIT(0xD9);
1712  EMIT(0xF2);
1713 }
1714 
1715 
1716 void Assembler::fyl2x() {
1717  EnsureSpace ensure_space(this);
1718  EMIT(0xD9);
1719  EMIT(0xF1);
1720 }
1721 
1722 
1723 void Assembler::f2xm1() {
1724  EnsureSpace ensure_space(this);
1725  EMIT(0xD9);
1726  EMIT(0xF0);
1727 }
1728 
1729 
1730 void Assembler::fscale() {
1731  EnsureSpace ensure_space(this);
1732  EMIT(0xD9);
1733  EMIT(0xFD);
1734 }
1735 
1736 
1737 void Assembler::fninit() {
1738  EnsureSpace ensure_space(this);
1739  EMIT(0xDB);
1740  EMIT(0xE3);
1741 }
1742 
1743 
1744 void Assembler::fadd(int i) {
1745  EnsureSpace ensure_space(this);
1746  emit_farith(0xDC, 0xC0, i);
1747 }
1748 
1749 
1750 void Assembler::fsub(int i) {
1751  EnsureSpace ensure_space(this);
1752  emit_farith(0xDC, 0xE8, i);
1753 }
1754 
1755 
1756 void Assembler::fisub_s(const Operand& adr) {
1757  EnsureSpace ensure_space(this);
1758  EMIT(0xDA);
1759  emit_operand(esp, adr);
1760 }
1761 
1762 
1763 void Assembler::fmul(int i) {
1764  EnsureSpace ensure_space(this);
1765  emit_farith(0xDC, 0xC8, i);
1766 }
1767 
1768 
1769 void Assembler::fdiv(int i) {
1770  EnsureSpace ensure_space(this);
1771  emit_farith(0xDC, 0xF8, i);
1772 }
1773 
1774 
1775 void Assembler::faddp(int i) {
1776  EnsureSpace ensure_space(this);
1777  emit_farith(0xDE, 0xC0, i);
1778 }
1779 
1780 
1781 void Assembler::fsubp(int i) {
1782  EnsureSpace ensure_space(this);
1783  emit_farith(0xDE, 0xE8, i);
1784 }
1785 
1786 
1787 void Assembler::fsubrp(int i) {
1788  EnsureSpace ensure_space(this);
1789  emit_farith(0xDE, 0xE0, i);
1790 }
1791 
1792 
1793 void Assembler::fmulp(int i) {
1794  EnsureSpace ensure_space(this);
1795  emit_farith(0xDE, 0xC8, i);
1796 }
1797 
1798 
1799 void Assembler::fdivp(int i) {
1800  EnsureSpace ensure_space(this);
1801  emit_farith(0xDE, 0xF8, i);
1802 }
1803 
1804 
1805 void Assembler::fprem() {
1806  EnsureSpace ensure_space(this);
1807  EMIT(0xD9);
1808  EMIT(0xF8);
1809 }
1810 
1811 
1812 void Assembler::fprem1() {
1813  EnsureSpace ensure_space(this);
1814  EMIT(0xD9);
1815  EMIT(0xF5);
1816 }
1817 
1818 
1819 void Assembler::fxch(int i) {
1820  EnsureSpace ensure_space(this);
1821  emit_farith(0xD9, 0xC8, i);
1822 }
1823 
1824 
1825 void Assembler::fincstp() {
1826  EnsureSpace ensure_space(this);
1827  EMIT(0xD9);
1828  EMIT(0xF7);
1829 }
1830 
1831 
1832 void Assembler::ffree(int i) {
1833  EnsureSpace ensure_space(this);
1834  emit_farith(0xDD, 0xC0, i);
1835 }
1836 
1837 
1838 void Assembler::ftst() {
1839  EnsureSpace ensure_space(this);
1840  EMIT(0xD9);
1841  EMIT(0xE4);
1842 }
1843 
1844 
1845 void Assembler::fucomp(int i) {
1846  EnsureSpace ensure_space(this);
1847  emit_farith(0xDD, 0xE8, i);
1848 }
1849 
1850 
1851 void Assembler::fucompp() {
1852  EnsureSpace ensure_space(this);
1853  EMIT(0xDA);
1854  EMIT(0xE9);
1855 }
1856 
1857 
1858 void Assembler::fucomi(int i) {
1859  EnsureSpace ensure_space(this);
1860  EMIT(0xDB);
1861  EMIT(0xE8 + i);
1862 }
1863 
1864 
1865 void Assembler::fucomip() {
1866  EnsureSpace ensure_space(this);
1867  EMIT(0xDF);
1868  EMIT(0xE9);
1869 }
1870 
1871 
1872 void Assembler::fcompp() {
1873  EnsureSpace ensure_space(this);
1874  EMIT(0xDE);
1875  EMIT(0xD9);
1876 }
1877 
1878 
1879 void Assembler::fnstsw_ax() {
1880  EnsureSpace ensure_space(this);
1881  EMIT(0xDF);
1882  EMIT(0xE0);
1883 }
1884 
1885 
1886 void Assembler::fwait() {
1887  EnsureSpace ensure_space(this);
1888  EMIT(0x9B);
1889 }
1890 
1891 
1892 void Assembler::frndint() {
1893  EnsureSpace ensure_space(this);
1894  EMIT(0xD9);
1895  EMIT(0xFC);
1896 }
1897 
1898 
1899 void Assembler::fnclex() {
1900  EnsureSpace ensure_space(this);
1901  EMIT(0xDB);
1902  EMIT(0xE2);
1903 }
1904 
1905 
1906 void Assembler::sahf() {
1907  EnsureSpace ensure_space(this);
1908  EMIT(0x9E);
1909 }
1910 
1911 
1912 void Assembler::setcc(Condition cc, Register reg) {
1913  ASSERT(reg.is_byte_register());
1914  EnsureSpace ensure_space(this);
1915  EMIT(0x0F);
1916  EMIT(0x90 | cc);
1917  EMIT(0xC0 | reg.code());
1918 }
1919 
1920 
1921 void Assembler::cvttss2si(Register dst, const Operand& src) {
1922  ASSERT(CpuFeatures::IsEnabled(SSE2));
1923  EnsureSpace ensure_space(this);
1924  EMIT(0xF3);
1925  EMIT(0x0F);
1926  EMIT(0x2C);
1927  emit_operand(dst, src);
1928 }
1929 
1930 
1931 void Assembler::cvttsd2si(Register dst, const Operand& src) {
1932  ASSERT(CpuFeatures::IsEnabled(SSE2));
1933  EnsureSpace ensure_space(this);
1934  EMIT(0xF2);
1935  EMIT(0x0F);
1936  EMIT(0x2C);
1937  emit_operand(dst, src);
1938 }
1939 
1940 
1941 void Assembler::cvtsd2si(Register dst, XMMRegister src) {
1942  ASSERT(CpuFeatures::IsEnabled(SSE2));
1943  EnsureSpace ensure_space(this);
1944  EMIT(0xF2);
1945  EMIT(0x0F);
1946  EMIT(0x2D);
1947  emit_sse_operand(dst, src);
1948 }
1949 
1950 
1951 void Assembler::cvtsi2sd(XMMRegister dst, const Operand& src) {
1952  ASSERT(CpuFeatures::IsEnabled(SSE2));
1953  EnsureSpace ensure_space(this);
1954  EMIT(0xF2);
1955  EMIT(0x0F);
1956  EMIT(0x2A);
1957  emit_sse_operand(dst, src);
1958 }
1959 
1960 
1961 void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) {
1962  ASSERT(CpuFeatures::IsEnabled(SSE2));
1963  EnsureSpace ensure_space(this);
1964  EMIT(0xF3);
1965  EMIT(0x0F);
1966  EMIT(0x5A);
1967  emit_sse_operand(dst, src);
1968 }
1969 
1970 
1971 void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) {
1972  ASSERT(CpuFeatures::IsEnabled(SSE2));
1973  EnsureSpace ensure_space(this);
1974  EMIT(0xF2);
1975  EMIT(0x0F);
1976  EMIT(0x5A);
1977  emit_sse_operand(dst, src);
1978 }
1979 
1980 
1981 void Assembler::addsd(XMMRegister dst, XMMRegister src) {
1982  ASSERT(CpuFeatures::IsEnabled(SSE2));
1983  EnsureSpace ensure_space(this);
1984  EMIT(0xF2);
1985  EMIT(0x0F);
1986  EMIT(0x58);
1987  emit_sse_operand(dst, src);
1988 }
1989 
1990 
1991 void Assembler::mulsd(XMMRegister dst, XMMRegister src) {
1992  ASSERT(CpuFeatures::IsEnabled(SSE2));
1993  EnsureSpace ensure_space(this);
1994  EMIT(0xF2);
1995  EMIT(0x0F);
1996  EMIT(0x59);
1997  emit_sse_operand(dst, src);
1998 }
1999 
2000 
2001 void Assembler::subsd(XMMRegister dst, XMMRegister src) {
2002  ASSERT(CpuFeatures::IsEnabled(SSE2));
2003  EnsureSpace ensure_space(this);
2004  EMIT(0xF2);
2005  EMIT(0x0F);
2006  EMIT(0x5C);
2007  emit_sse_operand(dst, src);
2008 }
2009 
2010 
2011 void Assembler::divsd(XMMRegister dst, XMMRegister src) {
2012  ASSERT(CpuFeatures::IsEnabled(SSE2));
2013  EnsureSpace ensure_space(this);
2014  EMIT(0xF2);
2015  EMIT(0x0F);
2016  EMIT(0x5E);
2017  emit_sse_operand(dst, src);
2018 }
2019 
2020 
2021 void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
2022  ASSERT(CpuFeatures::IsEnabled(SSE2));
2023  EnsureSpace ensure_space(this);
2024  EMIT(0x66);
2025  EMIT(0x0F);
2026  EMIT(0x57);
2027  emit_sse_operand(dst, src);
2028 }
2029 
2030 
2031 void Assembler::xorps(XMMRegister dst, XMMRegister src) {
2032  EnsureSpace ensure_space(this);
2033  EMIT(0x0F);
2034  EMIT(0x57);
2035  emit_sse_operand(dst, src);
2036 }
2037 
2038 
2039 void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) {
2040  EnsureSpace ensure_space(this);
2041  EMIT(0xF2);
2042  EMIT(0x0F);
2043  EMIT(0x51);
2044  emit_sse_operand(dst, src);
2045 }
2046 
2047 
2048 void Assembler::andpd(XMMRegister dst, XMMRegister src) {
2049  EnsureSpace ensure_space(this);
2050  EMIT(0x66);
2051  EMIT(0x0F);
2052  EMIT(0x54);
2053  emit_sse_operand(dst, src);
2054 }
2055 
2056 
2057 void Assembler::orpd(XMMRegister dst, XMMRegister src) {
2058  EnsureSpace ensure_space(this);
2059  EMIT(0x66);
2060  EMIT(0x0F);
2061  EMIT(0x56);
2062  emit_sse_operand(dst, src);
2063 }
2064 
2065 
2066 void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
2067  ASSERT(CpuFeatures::IsEnabled(SSE2));
2068  EnsureSpace ensure_space(this);
2069  EMIT(0x66);
2070  EMIT(0x0F);
2071  EMIT(0x2E);
2072  emit_sse_operand(dst, src);
2073 }
2074 
2075 
2076 void Assembler::ucomisd(XMMRegister dst, const Operand& src) {
2077  ASSERT(CpuFeatures::IsEnabled(SSE2));
2078  EnsureSpace ensure_space(this);
2079  EMIT(0x66);
2080  EMIT(0x0F);
2081  EMIT(0x2E);
2082  emit_sse_operand(dst, src);
2083 }
2084 
2085 
2086 void Assembler::roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode) {
2087  ASSERT(CpuFeatures::IsEnabled(SSE4_1));
2088  EnsureSpace ensure_space(this);
2089  EMIT(0x66);
2090  EMIT(0x0F);
2091  EMIT(0x3A);
2092  EMIT(0x0B);
2093  emit_sse_operand(dst, src);
2094  // Mask precision exeption.
2095  EMIT(static_cast<byte>(mode) | 0x8);
2096 }
2097 
2098 void Assembler::movmskpd(Register dst, XMMRegister src) {
2099  ASSERT(CpuFeatures::IsEnabled(SSE2));
2100  EnsureSpace ensure_space(this);
2101  EMIT(0x66);
2102  EMIT(0x0F);
2103  EMIT(0x50);
2104  emit_sse_operand(dst, src);
2105 }
2106 
2107 
2108 void Assembler::pcmpeqd(XMMRegister dst, XMMRegister src) {
2109  ASSERT(CpuFeatures::IsEnabled(SSE2));
2110  EnsureSpace ensure_space(this);
2111  EMIT(0x66);
2112  EMIT(0x0F);
2113  EMIT(0x76);
2114  emit_sse_operand(dst, src);
2115 }
2116 
2117 
2118 void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) {
2119  ASSERT(CpuFeatures::IsEnabled(SSE2));
2120  EnsureSpace ensure_space(this);
2121  EMIT(0xF2);
2122  EMIT(0x0F);
2123  EMIT(0xC2);
2124  emit_sse_operand(dst, src);
2125  EMIT(1); // LT == 1
2126 }
2127 
2128 
2129 void Assembler::movaps(XMMRegister dst, XMMRegister src) {
2130  ASSERT(CpuFeatures::IsEnabled(SSE2));
2131  EnsureSpace ensure_space(this);
2132  EMIT(0x0F);
2133  EMIT(0x28);
2134  emit_sse_operand(dst, src);
2135 }
2136 
2137 
2138 void Assembler::movdqa(const Operand& dst, XMMRegister src) {
2139  ASSERT(CpuFeatures::IsEnabled(SSE2));
2140  EnsureSpace ensure_space(this);
2141  EMIT(0x66);
2142  EMIT(0x0F);
2143  EMIT(0x7F);
2144  emit_sse_operand(src, dst);
2145 }
2146 
2147 
2148 void Assembler::movdqa(XMMRegister dst, const Operand& src) {
2149  ASSERT(CpuFeatures::IsEnabled(SSE2));
2150  EnsureSpace ensure_space(this);
2151  EMIT(0x66);
2152  EMIT(0x0F);
2153  EMIT(0x6F);
2154  emit_sse_operand(dst, src);
2155 }
2156 
2157 
2158 void Assembler::movdqu(const Operand& dst, XMMRegister src ) {
2159  ASSERT(CpuFeatures::IsEnabled(SSE2));
2160  EnsureSpace ensure_space(this);
2161  EMIT(0xF3);
2162  EMIT(0x0F);
2163  EMIT(0x7F);
2164  emit_sse_operand(src, dst);
2165 }
2166 
2167 
2168 void Assembler::movdqu(XMMRegister dst, const Operand& src) {
2169  ASSERT(CpuFeatures::IsEnabled(SSE2));
2170  EnsureSpace ensure_space(this);
2171  EMIT(0xF3);
2172  EMIT(0x0F);
2173  EMIT(0x6F);
2174  emit_sse_operand(dst, src);
2175 }
2176 
2177 
2178 void Assembler::movntdqa(XMMRegister dst, const Operand& src) {
2179  ASSERT(CpuFeatures::IsEnabled(SSE4_1));
2180  EnsureSpace ensure_space(this);
2181  EMIT(0x66);
2182  EMIT(0x0F);
2183  EMIT(0x38);
2184  EMIT(0x2A);
2185  emit_sse_operand(dst, src);
2186 }
2187 
2188 
2189 void Assembler::movntdq(const Operand& dst, XMMRegister src) {
2190  ASSERT(CpuFeatures::IsEnabled(SSE2));
2191  EnsureSpace ensure_space(this);
2192  EMIT(0x66);
2193  EMIT(0x0F);
2194  EMIT(0xE7);
2195  emit_sse_operand(src, dst);
2196 }
2197 
2198 
2199 void Assembler::prefetch(const Operand& src, int level) {
2200  ASSERT(is_uint2(level));
2201  EnsureSpace ensure_space(this);
2202  EMIT(0x0F);
2203  EMIT(0x18);
2204  XMMRegister code = { level }; // Emit hint number in Reg position of RegR/M.
2205  emit_sse_operand(code, src);
2206 }
2207 
2208 
2209 void Assembler::movdbl(XMMRegister dst, const Operand& src) {
2210  EnsureSpace ensure_space(this);
2211  movsd(dst, src);
2212 }
2213 
2214 
2215 void Assembler::movdbl(const Operand& dst, XMMRegister src) {
2216  EnsureSpace ensure_space(this);
2217  movsd(dst, src);
2218 }
2219 
2220 
2221 void Assembler::movsd(const Operand& dst, XMMRegister src ) {
2222  ASSERT(CpuFeatures::IsEnabled(SSE2));
2223  EnsureSpace ensure_space(this);
2224  EMIT(0xF2); // double
2225  EMIT(0x0F);
2226  EMIT(0x11); // store
2227  emit_sse_operand(src, dst);
2228 }
2229 
2230 
2231 void Assembler::movsd(XMMRegister dst, const Operand& src) {
2232  ASSERT(CpuFeatures::IsEnabled(SSE2));
2233  EnsureSpace ensure_space(this);
2234  EMIT(0xF2); // double
2235  EMIT(0x0F);
2236  EMIT(0x10); // load
2237  emit_sse_operand(dst, src);
2238 }
2239 
2240 
2241 void Assembler::movsd(XMMRegister dst, XMMRegister src) {
2242  ASSERT(CpuFeatures::IsEnabled(SSE2));
2243  EnsureSpace ensure_space(this);
2244  EMIT(0xF2);
2245  EMIT(0x0F);
2246  EMIT(0x10);
2247  emit_sse_operand(dst, src);
2248 }
2249 
2250 
2251 void Assembler::movss(const Operand& dst, XMMRegister src ) {
2252  ASSERT(CpuFeatures::IsEnabled(SSE2));
2253  EnsureSpace ensure_space(this);
2254  EMIT(0xF3); // float
2255  EMIT(0x0F);
2256  EMIT(0x11); // store
2257  emit_sse_operand(src, dst);
2258 }
2259 
2260 
2261 void Assembler::movss(XMMRegister dst, const Operand& src) {
2262  ASSERT(CpuFeatures::IsEnabled(SSE2));
2263  EnsureSpace ensure_space(this);
2264  EMIT(0xF3); // float
2265  EMIT(0x0F);
2266  EMIT(0x10); // load
2267  emit_sse_operand(dst, src);
2268 }
2269 
2270 
2271 void Assembler::movss(XMMRegister dst, XMMRegister src) {
2272  ASSERT(CpuFeatures::IsEnabled(SSE2));
2273  EnsureSpace ensure_space(this);
2274  EMIT(0xF3);
2275  EMIT(0x0F);
2276  EMIT(0x10);
2277  emit_sse_operand(dst, src);
2278 }
2279 
2280 
2281 void Assembler::movd(XMMRegister dst, const Operand& src) {
2282  ASSERT(CpuFeatures::IsEnabled(SSE2));
2283  EnsureSpace ensure_space(this);
2284  EMIT(0x66);
2285  EMIT(0x0F);
2286  EMIT(0x6E);
2287  emit_sse_operand(dst, src);
2288 }
2289 
2290 
2291 void Assembler::movd(const Operand& dst, XMMRegister src) {
2292  ASSERT(CpuFeatures::IsEnabled(SSE2));
2293  EnsureSpace ensure_space(this);
2294  EMIT(0x66);
2295  EMIT(0x0F);
2296  EMIT(0x7E);
2297  emit_sse_operand(src, dst);
2298 }
2299 
2300 
2301 void Assembler::extractps(Register dst, XMMRegister src, byte imm8) {
2303  ASSERT(is_uint8(imm8));
2304  EnsureSpace ensure_space(this);
2305  EMIT(0x66);
2306  EMIT(0x0F);
2307  EMIT(0x3A);
2308  EMIT(0x17);
2309  emit_sse_operand(dst, src);
2310  EMIT(imm8);
2311 }
2312 
2313 
2314 void Assembler::pand(XMMRegister dst, XMMRegister src) {
2315  ASSERT(CpuFeatures::IsEnabled(SSE2));
2316  EnsureSpace ensure_space(this);
2317  EMIT(0x66);
2318  EMIT(0x0F);
2319  EMIT(0xDB);
2320  emit_sse_operand(dst, src);
2321 }
2322 
2323 
2324 void Assembler::pxor(XMMRegister dst, XMMRegister src) {
2325  ASSERT(CpuFeatures::IsEnabled(SSE2));
2326  EnsureSpace ensure_space(this);
2327  EMIT(0x66);
2328  EMIT(0x0F);
2329  EMIT(0xEF);
2330  emit_sse_operand(dst, src);
2331 }
2332 
2333 
2334 void Assembler::por(XMMRegister dst, XMMRegister src) {
2335  ASSERT(CpuFeatures::IsEnabled(SSE2));
2336  EnsureSpace ensure_space(this);
2337  EMIT(0x66);
2338  EMIT(0x0F);
2339  EMIT(0xEB);
2340  emit_sse_operand(dst, src);
2341 }
2342 
2343 
2344 void Assembler::ptest(XMMRegister dst, XMMRegister src) {
2345  ASSERT(CpuFeatures::IsEnabled(SSE4_1));
2346  EnsureSpace ensure_space(this);
2347  EMIT(0x66);
2348  EMIT(0x0F);
2349  EMIT(0x38);
2350  EMIT(0x17);
2351  emit_sse_operand(dst, src);
2352 }
2353 
2354 
2355 void Assembler::psllq(XMMRegister reg, int8_t shift) {
2356  ASSERT(CpuFeatures::IsEnabled(SSE2));
2357  EnsureSpace ensure_space(this);
2358  EMIT(0x66);
2359  EMIT(0x0F);
2360  EMIT(0x73);
2361  emit_sse_operand(esi, reg); // esi == 6
2362  EMIT(shift);
2363 }
2364 
2365 
2366 void Assembler::psllq(XMMRegister dst, XMMRegister src) {
2367  ASSERT(CpuFeatures::IsEnabled(SSE2));
2368  EnsureSpace ensure_space(this);
2369  EMIT(0x66);
2370  EMIT(0x0F);
2371  EMIT(0xF3);
2372  emit_sse_operand(dst, src);
2373 }
2374 
2375 
2376 void Assembler::psrlq(XMMRegister reg, int8_t shift) {
2377  ASSERT(CpuFeatures::IsEnabled(SSE2));
2378  EnsureSpace ensure_space(this);
2379  EMIT(0x66);
2380  EMIT(0x0F);
2381  EMIT(0x73);
2382  emit_sse_operand(edx, reg); // edx == 2
2383  EMIT(shift);
2384 }
2385 
2386 
2387 void Assembler::psrlq(XMMRegister dst, XMMRegister src) {
2388  ASSERT(CpuFeatures::IsEnabled(SSE2));
2389  EnsureSpace ensure_space(this);
2390  EMIT(0x66);
2391  EMIT(0x0F);
2392  EMIT(0xD3);
2393  emit_sse_operand(dst, src);
2394 }
2395 
2396 
2397 void Assembler::pshufd(XMMRegister dst, XMMRegister src, int8_t shuffle) {
2398  ASSERT(CpuFeatures::IsEnabled(SSE2));
2399  EnsureSpace ensure_space(this);
2400  EMIT(0x66);
2401  EMIT(0x0F);
2402  EMIT(0x70);
2403  emit_sse_operand(dst, src);
2404  EMIT(shuffle);
2405 }
2406 
2407 
2408 void Assembler::pextrd(const Operand& dst, XMMRegister src, int8_t offset) {
2409  ASSERT(CpuFeatures::IsEnabled(SSE4_1));
2410  EnsureSpace ensure_space(this);
2411  EMIT(0x66);
2412  EMIT(0x0F);
2413  EMIT(0x3A);
2414  EMIT(0x16);
2415  emit_sse_operand(src, dst);
2416  EMIT(offset);
2417 }
2418 
2419 
2420 void Assembler::pinsrd(XMMRegister dst, const Operand& src, int8_t offset) {
2421  ASSERT(CpuFeatures::IsEnabled(SSE4_1));
2422  EnsureSpace ensure_space(this);
2423  EMIT(0x66);
2424  EMIT(0x0F);
2425  EMIT(0x3A);
2426  EMIT(0x22);
2427  emit_sse_operand(dst, src);
2428  EMIT(offset);
2429 }
2430 
2431 
2432 void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) {
2433  Register ireg = { reg.code() };
2434  emit_operand(ireg, adr);
2435 }
2436 
2437 
2438 void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) {
2439  EMIT(0xC0 | dst.code() << 3 | src.code());
2440 }
2441 
2442 
2443 void Assembler::emit_sse_operand(Register dst, XMMRegister src) {
2444  EMIT(0xC0 | dst.code() << 3 | src.code());
2445 }
2446 
2447 
2448 void Assembler::Print() {
2449  Disassembler::Decode(stdout, buffer_, pc_);
2450 }
2451 
2452 
2454  positions_recorder()->WriteRecordedPositions();
2455  EnsureSpace ensure_space(this);
2456  RecordRelocInfo(RelocInfo::JS_RETURN);
2457 }
2458 
2459 
2461  positions_recorder()->WriteRecordedPositions();
2462  EnsureSpace ensure_space(this);
2463  RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT);
2464 }
2465 
2466 
2467 void Assembler::RecordComment(const char* msg, bool force) {
2468  if (FLAG_code_comments || force) {
2469  EnsureSpace ensure_space(this);
2470  RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
2471  }
2472 }
2473 
2474 
2475 void Assembler::GrowBuffer() {
2476  ASSERT(overflow());
2477  if (!own_buffer_) FATAL("external code buffer is too small");
2478 
2479  // Compute new buffer size.
2480  CodeDesc desc; // the new buffer
2481  if (buffer_size_ < 4*KB) {
2482  desc.buffer_size = 4*KB;
2483  } else {
2484  desc.buffer_size = 2*buffer_size_;
2485  }
2486  // Some internal data structures overflow for very large buffers,
2487  // they must ensure that kMaximalBufferSize is not too large.
2488  if ((desc.buffer_size > kMaximalBufferSize) ||
2489  (desc.buffer_size > isolate()->heap()->MaxOldGenerationSize())) {
2490  V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
2491  }
2492 
2493  // Set up new buffer.
2494  desc.buffer = NewArray<byte>(desc.buffer_size);
2495  desc.instr_size = pc_offset();
2496  desc.reloc_size = (buffer_ + buffer_size_) - (reloc_info_writer.pos());
2497 
2498  // Clear the buffer in debug mode. Use 'int3' instructions to make
2499  // sure to get into problems if we ever run uninitialized code.
2500 #ifdef DEBUG
2501  memset(desc.buffer, 0xCC, desc.buffer_size);
2502 #endif
2503 
2504  // Copy the data.
2505  int pc_delta = desc.buffer - buffer_;
2506  int rc_delta = (desc.buffer + desc.buffer_size) - (buffer_ + buffer_size_);
2507  memmove(desc.buffer, buffer_, desc.instr_size);
2508  memmove(rc_delta + reloc_info_writer.pos(),
2509  reloc_info_writer.pos(), desc.reloc_size);
2510 
2511  // Switch buffers.
2512  if (isolate()->assembler_spare_buffer() == NULL &&
2513  buffer_size_ == kMinimalBufferSize) {
2514  isolate()->set_assembler_spare_buffer(buffer_);
2515  } else {
2516  DeleteArray(buffer_);
2517  }
2518  buffer_ = desc.buffer;
2519  buffer_size_ = desc.buffer_size;
2520  pc_ += pc_delta;
2521  reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
2522  reloc_info_writer.last_pc() + pc_delta);
2523 
2524  // Relocate runtime entries.
2525  for (RelocIterator it(desc); !it.done(); it.next()) {
2526  RelocInfo::Mode rmode = it.rinfo()->rmode();
2527  if (rmode == RelocInfo::RUNTIME_ENTRY) {
2528  int32_t* p = reinterpret_cast<int32_t*>(it.rinfo()->pc());
2529  *p -= pc_delta; // relocate entry
2530  } else if (rmode == RelocInfo::INTERNAL_REFERENCE) {
2531  int32_t* p = reinterpret_cast<int32_t*>(it.rinfo()->pc());
2532  if (*p != 0) { // 0 means uninitialized.
2533  *p += pc_delta;
2534  }
2535  }
2536  }
2537 
2538  ASSERT(!overflow());
2539 }
2540 
2541 
2542 void Assembler::emit_arith_b(int op1, int op2, Register dst, int imm8) {
2543  ASSERT(is_uint8(op1) && is_uint8(op2)); // wrong opcode
2544  ASSERT(is_uint8(imm8));
2545  ASSERT((op1 & 0x01) == 0); // should be 8bit operation
2546  EMIT(op1);
2547  EMIT(op2 | dst.code());
2548  EMIT(imm8);
2549 }
2550 
2551 
2552 void Assembler::emit_arith(int sel, Operand dst, const Immediate& x) {
2553  ASSERT((0 <= sel) && (sel <= 7));
2554  Register ireg = { sel };
2555  if (x.is_int8()) {
2556  EMIT(0x83); // using a sign-extended 8-bit immediate.
2557  emit_operand(ireg, dst);
2558  EMIT(x.x_ & 0xFF);
2559  } else if (dst.is_reg(eax)) {
2560  EMIT((sel << 3) | 0x05); // short form if the destination is eax.
2561  emit(x);
2562  } else {
2563  EMIT(0x81); // using a literal 32-bit immediate.
2564  emit_operand(ireg, dst);
2565  emit(x);
2566  }
2567 }
2568 
2569 
2570 void Assembler::emit_operand(Register reg, const Operand& adr) {
2571  const unsigned length = adr.len_;
2572  ASSERT(length > 0);
2573 
2574  // Emit updated ModRM byte containing the given register.
2575  pc_[0] = (adr.buf_[0] & ~0x38) | (reg.code() << 3);
2576 
2577  // Emit the rest of the encoded operand.
2578  for (unsigned i = 1; i < length; i++) pc_[i] = adr.buf_[i];
2579  pc_ += length;
2580 
2581  // Emit relocation information if necessary.
2582  if (length >= sizeof(int32_t) && adr.rmode_ != RelocInfo::NONE) {
2583  pc_ -= sizeof(int32_t); // pc_ must be *at* disp32
2584  RecordRelocInfo(adr.rmode_);
2585  pc_ += sizeof(int32_t);
2586  }
2587 }
2588 
2589 
2590 void Assembler::emit_farith(int b1, int b2, int i) {
2591  ASSERT(is_uint8(b1) && is_uint8(b2)); // wrong opcode
2592  ASSERT(0 <= i && i < 8); // illegal stack offset
2593  EMIT(b1);
2594  EMIT(b2 + i);
2595 }
2596 
2597 
2598 void Assembler::db(uint8_t data) {
2599  EnsureSpace ensure_space(this);
2600  EMIT(data);
2601 }
2602 
2603 
2604 void Assembler::dd(uint32_t data) {
2605  EnsureSpace ensure_space(this);
2606  emit(data);
2607 }
2608 
2609 
2610 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
2611  ASSERT(rmode != RelocInfo::NONE);
2612  // Don't record external references unless the heap will be serialized.
2613  if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
2614 #ifdef DEBUG
2615  if (!Serializer::enabled()) {
2617  }
2618 #endif
2619  if (!Serializer::enabled() && !emit_debug_code()) {
2620  return;
2621  }
2622  }
2623  RelocInfo rinfo(pc_, rmode, data, NULL);
2624  reloc_info_writer.Write(&rinfo);
2625 }
2626 
2627 
2628 #ifdef GENERATED_CODE_COVERAGE
2629 static FILE* coverage_log = NULL;
2630 
2631 
2632 static void InitCoverageLog() {
2633  char* file_name = getenv("V8_GENERATED_CODE_COVERAGE_LOG");
2634  if (file_name != NULL) {
2635  coverage_log = fopen(file_name, "aw+");
2636  }
2637 }
2638 
2639 
2640 void LogGeneratedCodeCoverage(const char* file_line) {
2641  const char* return_address = (&file_line)[-1];
2642  char* push_insn = const_cast<char*>(return_address - 12);
2643  push_insn[0] = 0xeb; // Relative branch insn.
2644  push_insn[1] = 13; // Skip over coverage insns.
2645  if (coverage_log != NULL) {
2646  fprintf(coverage_log, "%s\n", file_line);
2647  fflush(coverage_log);
2648  }
2649 }
2650 
2651 #endif
2652 
2653 } } // namespace v8::internal
2654 
2655 #endif // V8_TARGET_ARCH_IA32
byte * Address
Definition: globals.h:157
void cmp(Register src1, const Operand &src2, Condition cond=al)
void psllq(XMMRegister reg, int8_t shift)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable memory(in Mbytes)") DEFINE_bool(gc_global
static const int kMaximalBufferSize
Isolate * isolate() const
Definition: assembler.h:61
void db(uint8_t data)
void fst_d(const Operand &adr)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 instructions(ARM only)") DEFINE_bool(enable_vfp2
void ucomisd(XMMRegister dst, XMMRegister src)
void pcmpeqd(XMMRegister dst, XMMRegister src)
void cmpb(Register reg, int8_t imm8)
void cvttss2si(Register dst, const Operand &src)
void PrintF(const char *format,...)
Definition: v8utils.cc:40
void roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode)
void por(XMMRegister dst, XMMRegister src)
#define FATAL(msg)
Definition: checks.h:46
void idiv(Register src)
void mulsd(XMMRegister dst, XMMRegister src)
const int KB
Definition: globals.h:207
void cvtsd2si(Register dst, XMMRegister src)
const Register esp
void orpd(XMMRegister dst, XMMRegister src)
void movdbl(XMMRegister dst, const Operand &src)
void or_(Register dst, int32_t imm32)
void dd(uint32_t data)
void push(Register src, Condition cond=al)
void movntdq(const Operand &dst, XMMRegister src)
void cvtsi2sd(XMMRegister dst, Register src)
void cvtss2sd(XMMRegister dst, XMMRegister src)
void sqrtsd(XMMRegister dst, XMMRegister src)
int int32_t
Definition: unicode.cc:47
static bool IsSupported(CpuFeature f)
static bool enabled()
Definition: serialize.h:481
void sbb(Register dst, const Operand &src)
void andpd(XMMRegister dst, XMMRegister src)
void j(Condition cc, Label *L, Label::Distance distance=Label::kFar)
bool is_int8(int x)
Definition: assembler.h:836
static const int kMinimalBufferSize
#define ASSERT(condition)
Definition: checks.h:270
void dec_b(Register dst)
void ptest(XMMRegister dst, XMMRegister src)
#define ASSERT_GE(v1, v2)
Definition: checks.h:273
void xorpd(XMMRegister dst, XMMRegister src)
#define CHECK(condition)
Definition: checks.h:56
StringInputBuffer *const buffer_
void ret(int imm16)
void bt(const Operand &dst, Register src)
void sar(Register dst, uint8_t imm8)
void fistp_s(const Operand &adr)
void movsx_b(Register dst, Register src)
void imul(Register src)
void pxor(XMMRegister dst, XMMRegister src)
void addsd(XMMRegister dst, XMMRegister src)
void movzx_b(Register dst, Register src)
void shr_cl(Register dst)
void fld_d(const Operand &adr)
void cmpb_al(const Operand &op)
const Register edi
void xchg(Register dst, Register src)
void fild_s(const Operand &adr)
void movntdqa(XMMRegister dst, const Operand &src)
uint8_t byte
Definition: globals.h:156
void rcl(Register dst, uint8_t imm8)
void enter(const Immediate &size)
const Register ebp
void shld(Register dst, Register src)
void fisttp_d(const Operand &adr)
void movss(XMMRegister dst, const Operand &src)
const Register eax
void set_byte_at(int pos, byte value)
void cvtsd2ss(XMMRegister dst, XMMRegister src)
void pinsrd(XMMRegister dst, Register src, int8_t offset)
void movsd(XMMRegister dst, XMMRegister src)
void GetCode(CodeDesc *desc)
void movdqa(XMMRegister dst, const Operand &src)
void movdqu(XMMRegister dst, const Operand &src)
const Register ecx
static void TooLateToEnableNow()
Definition: serialize.h:480
const int kHeapObjectTag
Definition: v8.h:4009
void movmskpd(Register dst, XMMRegister src)
int(* F0)()
void fisttp_s(const Operand &adr)
bool is_uint5(int x)
Definition: assembler.h:848
bool is_uint16(int x)
Definition: assembler.h:853
#define __
void shl(Register dst, uint8_t imm8)
void cmpw_ax(const Operand &op)
void not_(Register dst)
static Register from_code(int code)
void emit_sse_operand(XMMRegister reg, const Operand &adr)
void rcr(Register dst, uint8_t imm8)
bool IsPowerOf2(T x)
Definition: utils.h:50
void xorps(XMMRegister dst, XMMRegister src)
void add(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void Nop(int bytes=1)
void neg(Register dst)
void setcc(Condition cc, Register reg)
void fld_s(const Operand &adr)
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
void pand(XMMRegister dst, XMMRegister src)
void mov_w(Register dst, const Operand &src)
void fstp_d(const Operand &adr)
void push_imm32(int32_t imm32)
void fistp_d(const Operand &adr)
void pextrd(Register dst, XMMRegister src, int8_t offset)
bool is_uint2(int x)
Definition: assembler.h:845
const Register ebx
void shrd(Register dst, Register src)
void movaps(XMMRegister dst, XMMRegister src)
void psrlq(XMMRegister reg, int8_t shift)
void RecordComment(const char *msg)
int CallSize(const Operand &adr)
void mov_b(Register dst, Register src)
void fstp_s(const Operand &adr)
void divsd(XMMRegister dst, XMMRegister src)
void lea(Register dst, const Operand &src)
void fild_d(const Operand &adr)
void xor_(Register dst, int32_t imm32)
void mov(Register dst, const Operand &src, SBit s=LeaveCC, Condition cond=al)
static const int kHeaderSize
Definition: objects.h:4549
void inc(Register dst)
Assembler(Isolate *isolate, void *buffer, int buffer_size)
void cmpltsd(XMMRegister dst, XMMRegister src)
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
void test(Register reg, const Immediate &imm)
const Register esi
void shr(Register dst, uint8_t imm8)
void sar_cl(Register dst)
void pshufd(XMMRegister dst, XMMRegister src, int8_t shuffle)
void movd(XMMRegister dst, Register src)
void cmov(Condition cc, Register dst, Register src)
PositionsRecorder * positions_recorder()
void movsx_w(Register dst, Register src)
void fisub_s(const Operand &adr)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
void extractps(Register dst, XMMRegister src, byte imm8)
static uint64_t CpuFeaturesImpliedByPlatform()
#define RUNTIME_ENTRY(name, nargs, ressize)
bool is_uint8(int x)
Definition: assembler.h:850
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static int Decode(FILE *f, byte *begin, byte *end)
static void FlushICache(void *start, size_t size)
void and_(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void fist_s(const Operand &adr)
void DeleteArray(T *array)
Definition: allocation.h:91
const Register edx
void bts(Register dst, Register src)
void prefetch(const Operand &src, int level)
void movzx_w(Register dst, Register src)
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
void dec(Register dst)
void sub(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
bool emit_debug_code() const
void subsd(XMMRegister dst, XMMRegister src)
void cmpw(const Operand &op, Immediate imm16)
void test_b(Register reg, const Operand &op)
void mul(Register dst, Register src1, Register src2, SBit s=LeaveCC, Condition cond=al)
void cvttsd2si(Register dst, const Operand &src)
void shl_cl(Register dst)
void adc(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)