v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
macro-assembler-ia32.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_IA32)
31 
32 #include "bootstrapper.h"
33 #include "codegen.h"
34 #include "debug.h"
35 #include "runtime.h"
36 #include "serialize.h"
37 
38 namespace v8 {
39 namespace internal {
40 
41 // -------------------------------------------------------------------------
42 // MacroAssembler implementation.
43 
44 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
45  : Assembler(arg_isolate, buffer, size),
46  generating_stub_(false),
47  allow_stub_calls_(true),
48  has_frame_(false) {
49  if (isolate() != NULL) {
50  code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
51  isolate());
52  }
53 }
54 
55 
56 void MacroAssembler::InNewSpace(
57  Register object,
58  Register scratch,
59  Condition cc,
60  Label* condition_met,
61  Label::Distance condition_met_distance) {
62  ASSERT(cc == equal || cc == not_equal);
63  if (scratch.is(object)) {
64  and_(scratch, Immediate(~Page::kPageAlignmentMask));
65  } else {
66  mov(scratch, Immediate(~Page::kPageAlignmentMask));
67  and_(scratch, object);
68  }
69  // Check that we can use a test_b.
70  ASSERT(MemoryChunk::IN_FROM_SPACE < 8);
71  ASSERT(MemoryChunk::IN_TO_SPACE < 8);
72  int mask = (1 << MemoryChunk::IN_FROM_SPACE)
73  | (1 << MemoryChunk::IN_TO_SPACE);
74  // If non-zero, the page belongs to new-space.
75  test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
76  static_cast<uint8_t>(mask));
77  j(cc, condition_met, condition_met_distance);
78 }
79 
80 
81 void MacroAssembler::RememberedSetHelper(
82  Register object, // Only used for debug checks.
83  Register addr,
84  Register scratch,
85  SaveFPRegsMode save_fp,
86  MacroAssembler::RememberedSetFinalAction and_then) {
87  Label done;
88  if (FLAG_debug_code) {
89  Label ok;
90  JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
91  int3();
92  bind(&ok);
93  }
94  // Load store buffer top.
95  ExternalReference store_buffer =
96  ExternalReference::store_buffer_top(isolate());
97  mov(scratch, Operand::StaticVariable(store_buffer));
98  // Store pointer to buffer.
99  mov(Operand(scratch, 0), addr);
100  // Increment buffer top.
101  add(scratch, Immediate(kPointerSize));
102  // Write back new top of buffer.
103  mov(Operand::StaticVariable(store_buffer), scratch);
104  // Call stub on end of buffer.
105  // Check for end of buffer.
106  test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
107  if (and_then == kReturnAtEnd) {
108  Label buffer_overflowed;
109  j(not_equal, &buffer_overflowed, Label::kNear);
110  ret(0);
111  bind(&buffer_overflowed);
112  } else {
113  ASSERT(and_then == kFallThroughAtEnd);
114  j(equal, &done, Label::kNear);
115  }
116  StoreBufferOverflowStub store_buffer_overflow =
117  StoreBufferOverflowStub(save_fp);
118  CallStub(&store_buffer_overflow);
119  if (and_then == kReturnAtEnd) {
120  ret(0);
121  } else {
122  ASSERT(and_then == kFallThroughAtEnd);
123  bind(&done);
124  }
125 }
126 
127 
128 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
129  XMMRegister scratch_reg,
130  Register result_reg) {
131  Label done;
132  ExternalReference zero_ref = ExternalReference::address_of_zero();
133  movdbl(scratch_reg, Operand::StaticVariable(zero_ref));
134  Set(result_reg, Immediate(0));
135  ucomisd(input_reg, scratch_reg);
136  j(below, &done, Label::kNear);
137  ExternalReference half_ref = ExternalReference::address_of_one_half();
138  movdbl(scratch_reg, Operand::StaticVariable(half_ref));
139  addsd(scratch_reg, input_reg);
140  cvttsd2si(result_reg, Operand(scratch_reg));
141  test(result_reg, Immediate(0xFFFFFF00));
142  j(zero, &done, Label::kNear);
143  Set(result_reg, Immediate(255));
144  bind(&done);
145 }
146 
147 
148 void MacroAssembler::ClampUint8(Register reg) {
149  Label done;
150  test(reg, Immediate(0xFFFFFF00));
151  j(zero, &done, Label::kNear);
152  setcc(negative, reg); // 1 if negative, 0 if positive.
153  dec_b(reg); // 0 if negative, 255 if positive.
154  bind(&done);
155 }
156 
157 
158 void MacroAssembler::RecordWriteArray(Register object,
159  Register value,
160  Register index,
161  SaveFPRegsMode save_fp,
162  RememberedSetAction remembered_set_action,
163  SmiCheck smi_check) {
164  // First, check if a write barrier is even needed. The tests below
165  // catch stores of Smis.
166  Label done;
167 
168  // Skip barrier if writing a smi.
169  if (smi_check == INLINE_SMI_CHECK) {
170  ASSERT_EQ(0, kSmiTag);
171  test(value, Immediate(kSmiTagMask));
172  j(zero, &done);
173  }
174 
175  // Array access: calculate the destination address in the same manner as
176  // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
177  // into an array of words.
178  Register dst = index;
179  lea(dst, Operand(object, index, times_half_pointer_size,
180  FixedArray::kHeaderSize - kHeapObjectTag));
181 
182  RecordWrite(
183  object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
184 
185  bind(&done);
186 
187  // Clobber clobbered input registers when running with the debug-code flag
188  // turned on to provoke errors.
189  if (emit_debug_code()) {
190  mov(value, Immediate(BitCast<int32_t>(kZapValue)));
191  mov(index, Immediate(BitCast<int32_t>(kZapValue)));
192  }
193 }
194 
195 
196 void MacroAssembler::RecordWriteField(
197  Register object,
198  int offset,
199  Register value,
200  Register dst,
201  SaveFPRegsMode save_fp,
202  RememberedSetAction remembered_set_action,
203  SmiCheck smi_check) {
204  // First, check if a write barrier is even needed. The tests below
205  // catch stores of Smis.
206  Label done;
207 
208  // Skip barrier if writing a smi.
209  if (smi_check == INLINE_SMI_CHECK) {
210  JumpIfSmi(value, &done, Label::kNear);
211  }
212 
213  // Although the object register is tagged, the offset is relative to the start
214  // of the object, so so offset must be a multiple of kPointerSize.
215  ASSERT(IsAligned(offset, kPointerSize));
216 
217  lea(dst, FieldOperand(object, offset));
218  if (emit_debug_code()) {
219  Label ok;
220  test_b(dst, (1 << kPointerSizeLog2) - 1);
221  j(zero, &ok, Label::kNear);
222  int3();
223  bind(&ok);
224  }
225 
226  RecordWrite(
227  object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
228 
229  bind(&done);
230 
231  // Clobber clobbered input registers when running with the debug-code flag
232  // turned on to provoke errors.
233  if (emit_debug_code()) {
234  mov(value, Immediate(BitCast<int32_t>(kZapValue)));
235  mov(dst, Immediate(BitCast<int32_t>(kZapValue)));
236  }
237 }
238 
239 
240 void MacroAssembler::RecordWriteForMap(
241  Register object,
242  Handle<Map> map,
243  Register scratch1,
244  Register scratch2,
245  SaveFPRegsMode save_fp) {
246  Label done;
247 
248  Register address = scratch1;
249  Register value = scratch2;
250  if (emit_debug_code()) {
251  Label ok;
252  lea(address, FieldOperand(object, HeapObject::kMapOffset));
253  test_b(address, (1 << kPointerSizeLog2) - 1);
254  j(zero, &ok, Label::kNear);
255  int3();
256  bind(&ok);
257  }
258 
259  ASSERT(!object.is(value));
260  ASSERT(!object.is(address));
261  ASSERT(!value.is(address));
262  if (emit_debug_code()) {
263  AbortIfSmi(object);
264  }
265 
266  if (!FLAG_incremental_marking) {
267  return;
268  }
269 
270  // A single check of the map's pages interesting flag suffices, since it is
271  // only set during incremental collection, and then it's also guaranteed that
272  // the from object's page's interesting flag is also set. This optimization
273  // relies on the fact that maps can never be in new space.
274  ASSERT(!isolate()->heap()->InNewSpace(*map));
275  CheckPageFlagForMap(map,
276  MemoryChunk::kPointersToHereAreInterestingMask,
277  zero,
278  &done,
279  Label::kNear);
280 
281  // Delay the initialization of |address| and |value| for the stub until it's
282  // known that the will be needed. Up until this point their values are not
283  // needed since they are embedded in the operands of instructions that need
284  // them.
285  lea(address, FieldOperand(object, HeapObject::kMapOffset));
286  mov(value, Immediate(map));
287  RecordWriteStub stub(object, value, address, OMIT_REMEMBERED_SET, save_fp);
288  CallStub(&stub);
289 
290  bind(&done);
291 
292  // Clobber clobbered input registers when running with the debug-code flag
293  // turned on to provoke errors.
294  if (emit_debug_code()) {
295  mov(value, Immediate(BitCast<int32_t>(kZapValue)));
296  mov(scratch1, Immediate(BitCast<int32_t>(kZapValue)));
297  mov(scratch2, Immediate(BitCast<int32_t>(kZapValue)));
298  }
299 }
300 
301 
302 void MacroAssembler::RecordWrite(Register object,
303  Register address,
304  Register value,
305  SaveFPRegsMode fp_mode,
306  RememberedSetAction remembered_set_action,
307  SmiCheck smi_check) {
308  ASSERT(!object.is(value));
309  ASSERT(!object.is(address));
310  ASSERT(!value.is(address));
311  if (emit_debug_code()) {
312  AbortIfSmi(object);
313  }
314 
315  if (remembered_set_action == OMIT_REMEMBERED_SET &&
316  !FLAG_incremental_marking) {
317  return;
318  }
319 
320  if (FLAG_debug_code) {
321  Label ok;
322  cmp(value, Operand(address, 0));
323  j(equal, &ok, Label::kNear);
324  int3();
325  bind(&ok);
326  }
327 
328  // First, check if a write barrier is even needed. The tests below
329  // catch stores of Smis and stores into young gen.
330  Label done;
331 
332  if (smi_check == INLINE_SMI_CHECK) {
333  // Skip barrier if writing a smi.
334  JumpIfSmi(value, &done, Label::kNear);
335  }
336 
337  CheckPageFlag(value,
338  value, // Used as scratch.
339  MemoryChunk::kPointersToHereAreInterestingMask,
340  zero,
341  &done,
342  Label::kNear);
343  CheckPageFlag(object,
344  value, // Used as scratch.
345  MemoryChunk::kPointersFromHereAreInterestingMask,
346  zero,
347  &done,
348  Label::kNear);
349 
350  RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
351  CallStub(&stub);
352 
353  bind(&done);
354 
355  // Clobber clobbered registers when running with the debug-code flag
356  // turned on to provoke errors.
357  if (emit_debug_code()) {
358  mov(address, Immediate(BitCast<int32_t>(kZapValue)));
359  mov(value, Immediate(BitCast<int32_t>(kZapValue)));
360  }
361 }
362 
363 
364 #ifdef ENABLE_DEBUGGER_SUPPORT
365 void MacroAssembler::DebugBreak() {
366  Set(eax, Immediate(0));
367  mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
368  CEntryStub ces(1);
369  call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
370 }
371 #endif
372 
373 
374 void MacroAssembler::Set(Register dst, const Immediate& x) {
375  if (x.is_zero()) {
376  xor_(dst, dst); // Shorter than mov.
377  } else {
378  mov(dst, x);
379  }
380 }
381 
382 
383 void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
384  mov(dst, x);
385 }
386 
387 
388 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
389  static const int kMaxImmediateBits = 17;
390  if (x.rmode_ != RelocInfo::NONE) return false;
391  return !is_intn(x.x_, kMaxImmediateBits);
392 }
393 
394 
395 void MacroAssembler::SafeSet(Register dst, const Immediate& x) {
396  if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
397  Set(dst, Immediate(x.x_ ^ jit_cookie()));
398  xor_(dst, jit_cookie());
399  } else {
400  Set(dst, x);
401  }
402 }
403 
404 
405 void MacroAssembler::SafePush(const Immediate& x) {
406  if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
407  push(Immediate(x.x_ ^ jit_cookie()));
408  xor_(Operand(esp, 0), Immediate(jit_cookie()));
409  } else {
410  push(x);
411  }
412 }
413 
414 
415 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
416  // see ROOT_ACCESSOR macro in factory.h
417  Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
418  cmp(with, value);
419 }
420 
421 
422 void MacroAssembler::CompareRoot(const Operand& with,
423  Heap::RootListIndex index) {
424  // see ROOT_ACCESSOR macro in factory.h
425  Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
426  cmp(with, value);
427 }
428 
429 
430 void MacroAssembler::CmpObjectType(Register heap_object,
432  Register map) {
433  mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
434  CmpInstanceType(map, type);
435 }
436 
437 
438 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
439  cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
440  static_cast<int8_t>(type));
441 }
442 
443 
444 void MacroAssembler::CheckFastElements(Register map,
445  Label* fail,
446  Label::Distance distance) {
451  cmpb(FieldOperand(map, Map::kBitField2Offset),
452  Map::kMaximumBitField2FastHoleyElementValue);
453  j(above, fail, distance);
454 }
455 
456 
457 void MacroAssembler::CheckFastObjectElements(Register map,
458  Label* fail,
459  Label::Distance distance) {
464  cmpb(FieldOperand(map, Map::kBitField2Offset),
465  Map::kMaximumBitField2FastHoleySmiElementValue);
466  j(below_equal, fail, distance);
467  cmpb(FieldOperand(map, Map::kBitField2Offset),
468  Map::kMaximumBitField2FastHoleyElementValue);
469  j(above, fail, distance);
470 }
471 
472 
473 void MacroAssembler::CheckFastSmiElements(Register map,
474  Label* fail,
475  Label::Distance distance) {
478  cmpb(FieldOperand(map, Map::kBitField2Offset),
479  Map::kMaximumBitField2FastHoleySmiElementValue);
480  j(above, fail, distance);
481 }
482 
483 
484 void MacroAssembler::StoreNumberToDoubleElements(
485  Register maybe_number,
486  Register elements,
487  Register key,
488  Register scratch1,
489  XMMRegister scratch2,
490  Label* fail,
491  bool specialize_for_processor) {
492  Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
493  JumpIfSmi(maybe_number, &smi_value, Label::kNear);
494 
495  CheckMap(maybe_number,
496  isolate()->factory()->heap_number_map(),
497  fail,
499 
500  // Double value, canonicalize NaN.
501  uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
502  cmp(FieldOperand(maybe_number, offset),
504  j(greater_equal, &maybe_nan, Label::kNear);
505 
506  bind(&not_nan);
507  ExternalReference canonical_nan_reference =
508  ExternalReference::address_of_canonical_non_hole_nan();
509  if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
510  CpuFeatures::Scope use_sse2(SSE2);
511  movdbl(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
512  bind(&have_double_value);
513  movdbl(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize),
514  scratch2);
515  } else {
516  fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
517  bind(&have_double_value);
518  fstp_d(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize));
519  }
520  jmp(&done);
521 
522  bind(&maybe_nan);
523  // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
524  // it's an Infinity, and the non-NaN code path applies.
525  j(greater, &is_nan, Label::kNear);
526  cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
527  j(zero, &not_nan);
528  bind(&is_nan);
529  if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
530  CpuFeatures::Scope use_sse2(SSE2);
531  movdbl(scratch2, Operand::StaticVariable(canonical_nan_reference));
532  } else {
533  fld_d(Operand::StaticVariable(canonical_nan_reference));
534  }
535  jmp(&have_double_value, Label::kNear);
536 
537  bind(&smi_value);
538  // Value is a smi. Convert to a double and store.
539  // Preserve original value.
540  mov(scratch1, maybe_number);
541  SmiUntag(scratch1);
542  if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
543  CpuFeatures::Scope fscope(SSE2);
544  cvtsi2sd(scratch2, scratch1);
545  movdbl(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize),
546  scratch2);
547  } else {
548  push(scratch1);
549  fild_s(Operand(esp, 0));
550  pop(scratch1);
551  fstp_d(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize));
552  }
553  bind(&done);
554 }
555 
556 
557 void MacroAssembler::CompareMap(Register obj,
558  Handle<Map> map,
559  Label* early_success,
560  CompareMapMode mode) {
561  cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
562  if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
563  ElementsKind kind = map->elements_kind();
564  if (IsFastElementsKind(kind)) {
565  bool packed = IsFastPackedElementsKind(kind);
566  Map* current_map = *map;
567  while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) {
568  kind = GetNextMoreGeneralFastElementsKind(kind, packed);
569  current_map = current_map->LookupElementsTransitionMap(kind);
570  if (!current_map) break;
571  j(equal, early_success, Label::kNear);
572  cmp(FieldOperand(obj, HeapObject::kMapOffset),
573  Handle<Map>(current_map));
574  }
575  }
576  }
577 }
578 
579 
580 void MacroAssembler::CheckMap(Register obj,
581  Handle<Map> map,
582  Label* fail,
583  SmiCheckType smi_check_type,
584  CompareMapMode mode) {
585  if (smi_check_type == DO_SMI_CHECK) {
586  JumpIfSmi(obj, fail);
587  }
588 
589  Label success;
590  CompareMap(obj, map, &success, mode);
591  j(not_equal, fail);
592  bind(&success);
593 }
594 
595 
596 void MacroAssembler::DispatchMap(Register obj,
597  Handle<Map> map,
598  Handle<Code> success,
599  SmiCheckType smi_check_type) {
600  Label fail;
601  if (smi_check_type == DO_SMI_CHECK) {
602  JumpIfSmi(obj, &fail);
603  }
604  cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
605  j(equal, success);
606 
607  bind(&fail);
608 }
609 
610 
611 Condition MacroAssembler::IsObjectStringType(Register heap_object,
612  Register map,
613  Register instance_type) {
614  mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
615  movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
617  test(instance_type, Immediate(kIsNotStringMask));
618  return zero;
619 }
620 
621 
622 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
623  Register map,
624  Register scratch,
625  Label* fail) {
626  mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
627  IsInstanceJSObjectType(map, scratch, fail);
628 }
629 
630 
631 void MacroAssembler::IsInstanceJSObjectType(Register map,
632  Register scratch,
633  Label* fail) {
634  movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
635  sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
636  cmp(scratch,
638  j(above, fail);
639 }
640 
641 
642 void MacroAssembler::FCmp() {
643  if (CpuFeatures::IsSupported(CMOV)) {
644  fucomip();
645  fstp(0);
646  } else {
647  fucompp();
648  push(eax);
649  fnstsw_ax();
650  sahf();
651  pop(eax);
652  }
653 }
654 
655 
656 void MacroAssembler::AbortIfNotNumber(Register object) {
657  Label ok;
658  JumpIfSmi(object, &ok);
659  cmp(FieldOperand(object, HeapObject::kMapOffset),
660  isolate()->factory()->heap_number_map());
661  Assert(equal, "Operand not a number");
662  bind(&ok);
663 }
664 
665 
666 void MacroAssembler::AbortIfNotSmi(Register object) {
667  test(object, Immediate(kSmiTagMask));
668  Assert(equal, "Operand is not a smi");
669 }
670 
671 
672 void MacroAssembler::AbortIfNotString(Register object) {
673  test(object, Immediate(kSmiTagMask));
674  Assert(not_equal, "Operand is not a string");
675  push(object);
676  mov(object, FieldOperand(object, HeapObject::kMapOffset));
677  CmpInstanceType(object, FIRST_NONSTRING_TYPE);
678  pop(object);
679  Assert(below, "Operand is not a string");
680 }
681 
682 
683 void MacroAssembler::AbortIfSmi(Register object) {
684  test(object, Immediate(kSmiTagMask));
685  Assert(not_equal, "Operand is a smi");
686 }
687 
688 
689 void MacroAssembler::EnterFrame(StackFrame::Type type) {
690  push(ebp);
691  mov(ebp, esp);
692  push(esi);
693  push(Immediate(Smi::FromInt(type)));
694  push(Immediate(CodeObject()));
695  if (emit_debug_code()) {
696  cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
697  Check(not_equal, "code object not properly patched");
698  }
699 }
700 
701 
702 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
703  if (emit_debug_code()) {
704  cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
705  Immediate(Smi::FromInt(type)));
706  Check(equal, "stack frame types must match");
707  }
708  leave();
709 }
710 
711 
712 void MacroAssembler::EnterExitFramePrologue() {
713  // Set up the frame structure on the stack.
714  ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
715  ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
716  ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
717  push(ebp);
718  mov(ebp, esp);
719 
720  // Reserve room for entry stack pointer and push the code object.
721  ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
722  push(Immediate(0)); // Saved entry sp, patched before call.
723  push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
724 
725  // Save the frame pointer and the context in top.
726  ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
727  isolate());
728  ExternalReference context_address(Isolate::kContextAddress,
729  isolate());
730  mov(Operand::StaticVariable(c_entry_fp_address), ebp);
731  mov(Operand::StaticVariable(context_address), esi);
732 }
733 
734 
735 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
736  // Optionally save all XMM registers.
737  if (save_doubles) {
738  CpuFeatures::Scope scope(SSE2);
739  int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
740  sub(esp, Immediate(space));
741  const int offset = -2 * kPointerSize;
742  for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
743  XMMRegister reg = XMMRegister::from_code(i);
744  movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
745  }
746  } else {
747  sub(esp, Immediate(argc * kPointerSize));
748  }
749 
750  // Get the required frame alignment for the OS.
751  const int kFrameAlignment = OS::ActivationFrameAlignment();
752  if (kFrameAlignment > 0) {
753  ASSERT(IsPowerOf2(kFrameAlignment));
754  and_(esp, -kFrameAlignment);
755  }
756 
757  // Patch the saved entry sp.
758  mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
759 }
760 
761 
762 void MacroAssembler::EnterExitFrame(bool save_doubles) {
763  EnterExitFramePrologue();
764 
765  // Set up argc and argv in callee-saved registers.
766  int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
767  mov(edi, eax);
768  lea(esi, Operand(ebp, eax, times_4, offset));
769 
770  // Reserve space for argc, argv and isolate.
771  EnterExitFrameEpilogue(3, save_doubles);
772 }
773 
774 
775 void MacroAssembler::EnterApiExitFrame(int argc) {
776  EnterExitFramePrologue();
777  EnterExitFrameEpilogue(argc, false);
778 }
779 
780 
781 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
782  // Optionally restore all XMM registers.
783  if (save_doubles) {
784  CpuFeatures::Scope scope(SSE2);
785  const int offset = -2 * kPointerSize;
786  for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
787  XMMRegister reg = XMMRegister::from_code(i);
788  movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
789  }
790  }
791 
792  // Get the return address from the stack and restore the frame pointer.
793  mov(ecx, Operand(ebp, 1 * kPointerSize));
794  mov(ebp, Operand(ebp, 0 * kPointerSize));
795 
796  // Pop the arguments and the receiver from the caller stack.
797  lea(esp, Operand(esi, 1 * kPointerSize));
798 
799  // Push the return address to get ready to return.
800  push(ecx);
801 
802  LeaveExitFrameEpilogue();
803 }
804 
805 void MacroAssembler::LeaveExitFrameEpilogue() {
806  // Restore current context from top and clear it in debug mode.
807  ExternalReference context_address(Isolate::kContextAddress, isolate());
808  mov(esi, Operand::StaticVariable(context_address));
809 #ifdef DEBUG
810  mov(Operand::StaticVariable(context_address), Immediate(0));
811 #endif
812 
813  // Clear the top frame.
814  ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
815  isolate());
816  mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
817 }
818 
819 
820 void MacroAssembler::LeaveApiExitFrame() {
821  mov(esp, ebp);
822  pop(ebp);
823 
824  LeaveExitFrameEpilogue();
825 }
826 
827 
828 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
829  int handler_index) {
830  // Adjust this code if not the case.
831  STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
832  STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
833  STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
834  STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
835  STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
836  STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
837 
838  // We will build up the handler from the bottom by pushing on the stack.
839  // First push the frame pointer and context.
840  if (kind == StackHandler::JS_ENTRY) {
841  // The frame pointer does not point to a JS frame so we save NULL for
842  // ebp. We expect the code throwing an exception to check ebp before
843  // dereferencing it to restore the context.
844  push(Immediate(0)); // NULL frame pointer.
845  push(Immediate(Smi::FromInt(0))); // No context.
846  } else {
847  push(ebp);
848  push(esi);
849  }
850  // Push the state and the code object.
851  unsigned state =
852  StackHandler::IndexField::encode(handler_index) |
853  StackHandler::KindField::encode(kind);
854  push(Immediate(state));
855  Push(CodeObject());
856 
857  // Link the current handler as the next handler.
858  ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
859  push(Operand::StaticVariable(handler_address));
860  // Set this new handler as the current one.
861  mov(Operand::StaticVariable(handler_address), esp);
862 }
863 
864 
865 void MacroAssembler::PopTryHandler() {
866  STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
867  ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
868  pop(Operand::StaticVariable(handler_address));
869  add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
870 }
871 
872 
873 void MacroAssembler::JumpToHandlerEntry() {
874  // Compute the handler entry address and jump to it. The handler table is
875  // a fixed array of (smi-tagged) code offsets.
876  // eax = exception, edi = code object, edx = state.
877  mov(ebx, FieldOperand(edi, Code::kHandlerTableOffset));
878  shr(edx, StackHandler::kKindWidth);
879  mov(edx, FieldOperand(ebx, edx, times_4, FixedArray::kHeaderSize));
880  SmiUntag(edx);
881  lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
882  jmp(edi);
883 }
884 
885 
886 void MacroAssembler::Throw(Register value) {
887  // Adjust this code if not the case.
888  STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
889  STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
890  STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
891  STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
892  STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
893  STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
894 
895  // The exception is expected in eax.
896  if (!value.is(eax)) {
897  mov(eax, value);
898  }
899  // Drop the stack pointer to the top of the top handler.
900  ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
901  mov(esp, Operand::StaticVariable(handler_address));
902  // Restore the next handler.
903  pop(Operand::StaticVariable(handler_address));
904 
905  // Remove the code object and state, compute the handler address in edi.
906  pop(edi); // Code object.
907  pop(edx); // Index and state.
908 
909  // Restore the context and frame pointer.
910  pop(esi); // Context.
911  pop(ebp); // Frame pointer.
912 
913  // If the handler is a JS frame, restore the context to the frame.
914  // (kind == ENTRY) == (ebp == 0) == (esi == 0), so we could test either
915  // ebp or esi.
916  Label skip;
917  test(esi, esi);
918  j(zero, &skip, Label::kNear);
919  mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
920  bind(&skip);
921 
922  JumpToHandlerEntry();
923 }
924 
925 
926 void MacroAssembler::ThrowUncatchable(Register value) {
927  // Adjust this code if not the case.
928  STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
929  STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
930  STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
931  STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
932  STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
933  STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
934 
935  // The exception is expected in eax.
936  if (!value.is(eax)) {
937  mov(eax, value);
938  }
939  // Drop the stack pointer to the top of the top stack handler.
940  ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
941  mov(esp, Operand::StaticVariable(handler_address));
942 
943  // Unwind the handlers until the top ENTRY handler is found.
944  Label fetch_next, check_kind;
945  jmp(&check_kind, Label::kNear);
946  bind(&fetch_next);
947  mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
948 
949  bind(&check_kind);
950  STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
951  test(Operand(esp, StackHandlerConstants::kStateOffset),
952  Immediate(StackHandler::KindField::kMask));
953  j(not_zero, &fetch_next);
954 
955  // Set the top handler address to next handler past the top ENTRY handler.
956  pop(Operand::StaticVariable(handler_address));
957 
958  // Remove the code object and state, compute the handler address in edi.
959  pop(edi); // Code object.
960  pop(edx); // Index and state.
961 
962  // Clear the context pointer and frame pointer (0 was saved in the handler).
963  pop(esi);
964  pop(ebp);
965 
966  JumpToHandlerEntry();
967 }
968 
969 
970 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
971  Register scratch,
972  Label* miss) {
973  Label same_contexts;
974 
975  ASSERT(!holder_reg.is(scratch));
976 
977  // Load current lexical context from the stack frame.
978  mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
979 
980  // When generating debug code, make sure the lexical context is set.
981  if (emit_debug_code()) {
982  cmp(scratch, Immediate(0));
983  Check(not_equal, "we should not have an empty lexical context");
984  }
985  // Load the global context of the current context.
986  int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
987  mov(scratch, FieldOperand(scratch, offset));
988  mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
989 
990  // Check the context is a global context.
991  if (emit_debug_code()) {
992  push(scratch);
993  // Read the first word and compare to global_context_map.
994  mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
995  cmp(scratch, isolate()->factory()->global_context_map());
996  Check(equal, "JSGlobalObject::global_context should be a global context.");
997  pop(scratch);
998  }
999 
1000  // Check if both contexts are the same.
1001  cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1002  j(equal, &same_contexts);
1003 
1004  // Compare security tokens, save holder_reg on the stack so we can use it
1005  // as a temporary register.
1006  //
1007  // TODO(119): avoid push(holder_reg)/pop(holder_reg)
1008  push(holder_reg);
1009  // Check that the security token in the calling global object is
1010  // compatible with the security token in the receiving global
1011  // object.
1012  mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1013 
1014  // Check the context is a global context.
1015  if (emit_debug_code()) {
1016  cmp(holder_reg, isolate()->factory()->null_value());
1017  Check(not_equal, "JSGlobalProxy::context() should not be null.");
1018 
1019  push(holder_reg);
1020  // Read the first word and compare to global_context_map(),
1021  mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
1022  cmp(holder_reg, isolate()->factory()->global_context_map());
1023  Check(equal, "JSGlobalObject::global_context should be a global context.");
1024  pop(holder_reg);
1025  }
1026 
1027  int token_offset = Context::kHeaderSize +
1028  Context::SECURITY_TOKEN_INDEX * kPointerSize;
1029  mov(scratch, FieldOperand(scratch, token_offset));
1030  cmp(scratch, FieldOperand(holder_reg, token_offset));
1031  pop(holder_reg);
1032  j(not_equal, miss);
1033 
1034  bind(&same_contexts);
1035 }
1036 
1037 
1038 // Compute the hash code from the untagged key. This must be kept in sync
1039 // with ComputeIntegerHash in utils.h.
1040 //
1041 // Note: r0 will contain hash code
1042 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1043  // Xor original key with a seed.
1044  if (Serializer::enabled()) {
1045  ExternalReference roots_array_start =
1046  ExternalReference::roots_array_start(isolate());
1047  mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1048  mov(scratch,
1049  Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
1050  SmiUntag(scratch);
1051  xor_(r0, scratch);
1052  } else {
1053  int32_t seed = isolate()->heap()->HashSeed();
1054  xor_(r0, Immediate(seed));
1055  }
1056 
1057  // hash = ~hash + (hash << 15);
1058  mov(scratch, r0);
1059  not_(r0);
1060  shl(scratch, 15);
1061  add(r0, scratch);
1062  // hash = hash ^ (hash >> 12);
1063  mov(scratch, r0);
1064  shr(scratch, 12);
1065  xor_(r0, scratch);
1066  // hash = hash + (hash << 2);
1067  lea(r0, Operand(r0, r0, times_4, 0));
1068  // hash = hash ^ (hash >> 4);
1069  mov(scratch, r0);
1070  shr(scratch, 4);
1071  xor_(r0, scratch);
1072  // hash = hash * 2057;
1073  imul(r0, r0, 2057);
1074  // hash = hash ^ (hash >> 16);
1075  mov(scratch, r0);
1076  shr(scratch, 16);
1077  xor_(r0, scratch);
1078 }
1079 
1080 
1081 
1082 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1083  Register elements,
1084  Register key,
1085  Register r0,
1086  Register r1,
1087  Register r2,
1088  Register result) {
1089  // Register use:
1090  //
1091  // elements - holds the slow-case elements of the receiver and is unchanged.
1092  //
1093  // key - holds the smi key on entry and is unchanged.
1094  //
1095  // Scratch registers:
1096  //
1097  // r0 - holds the untagged key on entry and holds the hash once computed.
1098  //
1099  // r1 - used to hold the capacity mask of the dictionary
1100  //
1101  // r2 - used for the index into the dictionary.
1102  //
1103  // result - holds the result on exit if the load succeeds and we fall through.
1104 
1105  Label done;
1106 
1107  GetNumberHash(r0, r1);
1108 
1109  // Compute capacity mask.
1110  mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1111  shr(r1, kSmiTagSize); // convert smi to int
1112  dec(r1);
1113 
1114  // Generate an unrolled loop that performs a few probes before giving up.
1115  const int kProbes = 4;
1116  for (int i = 0; i < kProbes; i++) {
1117  // Use r2 for index calculations and keep the hash intact in r0.
1118  mov(r2, r0);
1119  // Compute the masked index: (hash + i + i * i) & mask.
1120  if (i > 0) {
1121  add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
1122  }
1123  and_(r2, r1);
1124 
1125  // Scale the index by multiplying by the entry size.
1126  ASSERT(SeededNumberDictionary::kEntrySize == 3);
1127  lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
1128 
1129  // Check if the key matches.
1130  cmp(key, FieldOperand(elements,
1131  r2,
1133  SeededNumberDictionary::kElementsStartOffset));
1134  if (i != (kProbes - 1)) {
1135  j(equal, &done);
1136  } else {
1137  j(not_equal, miss);
1138  }
1139  }
1140 
1141  bind(&done);
1142  // Check that the value is a normal propety.
1143  const int kDetailsOffset =
1144  SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
1145  ASSERT_EQ(NORMAL, 0);
1146  test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
1147  Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
1148  j(not_zero, miss);
1149 
1150  // Get the value at the masked, scaled index.
1151  const int kValueOffset =
1152  SeededNumberDictionary::kElementsStartOffset + kPointerSize;
1153  mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1154 }
1155 
1156 
1157 void MacroAssembler::LoadAllocationTopHelper(Register result,
1158  Register scratch,
1160  ExternalReference new_space_allocation_top =
1161  ExternalReference::new_space_allocation_top_address(isolate());
1162 
1163  // Just return if allocation top is already known.
1164  if ((flags & RESULT_CONTAINS_TOP) != 0) {
1165  // No use of scratch if allocation top is provided.
1166  ASSERT(scratch.is(no_reg));
1167 #ifdef DEBUG
1168  // Assert that result actually contains top on entry.
1169  cmp(result, Operand::StaticVariable(new_space_allocation_top));
1170  Check(equal, "Unexpected allocation top");
1171 #endif
1172  return;
1173  }
1174 
1175  // Move address of new object to result. Use scratch register if available.
1176  if (scratch.is(no_reg)) {
1177  mov(result, Operand::StaticVariable(new_space_allocation_top));
1178  } else {
1179  mov(scratch, Immediate(new_space_allocation_top));
1180  mov(result, Operand(scratch, 0));
1181  }
1182 }
1183 
1184 
1185 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1186  Register scratch) {
1187  if (emit_debug_code()) {
1188  test(result_end, Immediate(kObjectAlignmentMask));
1189  Check(zero, "Unaligned allocation in new space");
1190  }
1191 
1192  ExternalReference new_space_allocation_top =
1193  ExternalReference::new_space_allocation_top_address(isolate());
1194 
1195  // Update new top. Use scratch if available.
1196  if (scratch.is(no_reg)) {
1197  mov(Operand::StaticVariable(new_space_allocation_top), result_end);
1198  } else {
1199  mov(Operand(scratch, 0), result_end);
1200  }
1201 }
1202 
1203 
1204 void MacroAssembler::AllocateInNewSpace(int object_size,
1205  Register result,
1206  Register result_end,
1207  Register scratch,
1208  Label* gc_required,
1209  AllocationFlags flags) {
1210  if (!FLAG_inline_new) {
1211  if (emit_debug_code()) {
1212  // Trash the registers to simulate an allocation failure.
1213  mov(result, Immediate(0x7091));
1214  if (result_end.is_valid()) {
1215  mov(result_end, Immediate(0x7191));
1216  }
1217  if (scratch.is_valid()) {
1218  mov(scratch, Immediate(0x7291));
1219  }
1220  }
1221  jmp(gc_required);
1222  return;
1223  }
1224  ASSERT(!result.is(result_end));
1225 
1226  // Load address of new object into result.
1227  LoadAllocationTopHelper(result, scratch, flags);
1228 
1229  Register top_reg = result_end.is_valid() ? result_end : result;
1230 
1231  // Calculate new top and bail out if new space is exhausted.
1232  ExternalReference new_space_allocation_limit =
1233  ExternalReference::new_space_allocation_limit_address(isolate());
1234 
1235  if (!top_reg.is(result)) {
1236  mov(top_reg, result);
1237  }
1238  add(top_reg, Immediate(object_size));
1239  j(carry, gc_required);
1240  cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
1241  j(above, gc_required);
1242 
1243  // Update allocation top.
1244  UpdateAllocationTopHelper(top_reg, scratch);
1245 
1246  // Tag result if requested.
1247  if (top_reg.is(result)) {
1248  if ((flags & TAG_OBJECT) != 0) {
1249  sub(result, Immediate(object_size - kHeapObjectTag));
1250  } else {
1251  sub(result, Immediate(object_size));
1252  }
1253  } else if ((flags & TAG_OBJECT) != 0) {
1254  add(result, Immediate(kHeapObjectTag));
1255  }
1256 }
1257 
1258 
1259 void MacroAssembler::AllocateInNewSpace(int header_size,
1260  ScaleFactor element_size,
1261  Register element_count,
1262  Register result,
1263  Register result_end,
1264  Register scratch,
1265  Label* gc_required,
1266  AllocationFlags flags) {
1267  if (!FLAG_inline_new) {
1268  if (emit_debug_code()) {
1269  // Trash the registers to simulate an allocation failure.
1270  mov(result, Immediate(0x7091));
1271  mov(result_end, Immediate(0x7191));
1272  if (scratch.is_valid()) {
1273  mov(scratch, Immediate(0x7291));
1274  }
1275  // Register element_count is not modified by the function.
1276  }
1277  jmp(gc_required);
1278  return;
1279  }
1280  ASSERT(!result.is(result_end));
1281 
1282  // Load address of new object into result.
1283  LoadAllocationTopHelper(result, scratch, flags);
1284 
1285  // Calculate new top and bail out if new space is exhausted.
1286  ExternalReference new_space_allocation_limit =
1287  ExternalReference::new_space_allocation_limit_address(isolate());
1288 
1289  // We assume that element_count*element_size + header_size does not
1290  // overflow.
1291  lea(result_end, Operand(element_count, element_size, header_size));
1292  add(result_end, result);
1293  j(carry, gc_required);
1294  cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
1295  j(above, gc_required);
1296 
1297  // Tag result if requested.
1298  if ((flags & TAG_OBJECT) != 0) {
1299  lea(result, Operand(result, kHeapObjectTag));
1300  }
1301 
1302  // Update allocation top.
1303  UpdateAllocationTopHelper(result_end, scratch);
1304 }
1305 
1306 
1307 void MacroAssembler::AllocateInNewSpace(Register object_size,
1308  Register result,
1309  Register result_end,
1310  Register scratch,
1311  Label* gc_required,
1312  AllocationFlags flags) {
1313  if (!FLAG_inline_new) {
1314  if (emit_debug_code()) {
1315  // Trash the registers to simulate an allocation failure.
1316  mov(result, Immediate(0x7091));
1317  mov(result_end, Immediate(0x7191));
1318  if (scratch.is_valid()) {
1319  mov(scratch, Immediate(0x7291));
1320  }
1321  // object_size is left unchanged by this function.
1322  }
1323  jmp(gc_required);
1324  return;
1325  }
1326  ASSERT(!result.is(result_end));
1327 
1328  // Load address of new object into result.
1329  LoadAllocationTopHelper(result, scratch, flags);
1330 
1331  // Calculate new top and bail out if new space is exhausted.
1332  ExternalReference new_space_allocation_limit =
1333  ExternalReference::new_space_allocation_limit_address(isolate());
1334  if (!object_size.is(result_end)) {
1335  mov(result_end, object_size);
1336  }
1337  add(result_end, result);
1338  j(carry, gc_required);
1339  cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
1340  j(above, gc_required);
1341 
1342  // Tag result if requested.
1343  if ((flags & TAG_OBJECT) != 0) {
1344  lea(result, Operand(result, kHeapObjectTag));
1345  }
1346 
1347  // Update allocation top.
1348  UpdateAllocationTopHelper(result_end, scratch);
1349 }
1350 
1351 
1352 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1353  ExternalReference new_space_allocation_top =
1354  ExternalReference::new_space_allocation_top_address(isolate());
1355 
1356  // Make sure the object has no tag before resetting top.
1357  and_(object, Immediate(~kHeapObjectTagMask));
1358 #ifdef DEBUG
1359  cmp(object, Operand::StaticVariable(new_space_allocation_top));
1360  Check(below, "Undo allocation of non allocated memory");
1361 #endif
1362  mov(Operand::StaticVariable(new_space_allocation_top), object);
1363 }
1364 
1365 
1366 void MacroAssembler::AllocateHeapNumber(Register result,
1367  Register scratch1,
1368  Register scratch2,
1369  Label* gc_required) {
1370  // Allocate heap number in new space.
1371  AllocateInNewSpace(HeapNumber::kSize,
1372  result,
1373  scratch1,
1374  scratch2,
1375  gc_required,
1376  TAG_OBJECT);
1377 
1378  // Set the map.
1379  mov(FieldOperand(result, HeapObject::kMapOffset),
1380  Immediate(isolate()->factory()->heap_number_map()));
1381 }
1382 
1383 
1384 void MacroAssembler::AllocateTwoByteString(Register result,
1385  Register length,
1386  Register scratch1,
1387  Register scratch2,
1388  Register scratch3,
1389  Label* gc_required) {
1390  // Calculate the number of bytes needed for the characters in the string while
1391  // observing object alignment.
1392  ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1393  ASSERT(kShortSize == 2);
1394  // scratch1 = length * 2 + kObjectAlignmentMask.
1395  lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1396  and_(scratch1, Immediate(~kObjectAlignmentMask));
1397 
1398  // Allocate two byte string in new space.
1399  AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
1400  times_1,
1401  scratch1,
1402  result,
1403  scratch2,
1404  scratch3,
1405  gc_required,
1406  TAG_OBJECT);
1407 
1408  // Set the map, length and hash field.
1409  mov(FieldOperand(result, HeapObject::kMapOffset),
1410  Immediate(isolate()->factory()->string_map()));
1411  mov(scratch1, length);
1412  SmiTag(scratch1);
1413  mov(FieldOperand(result, String::kLengthOffset), scratch1);
1414  mov(FieldOperand(result, String::kHashFieldOffset),
1415  Immediate(String::kEmptyHashField));
1416 }
1417 
1418 
1419 void MacroAssembler::AllocateAsciiString(Register result,
1420  Register length,
1421  Register scratch1,
1422  Register scratch2,
1423  Register scratch3,
1424  Label* gc_required) {
1425  // Calculate the number of bytes needed for the characters in the string while
1426  // observing object alignment.
1427  ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
1428  mov(scratch1, length);
1429  ASSERT(kCharSize == 1);
1430  add(scratch1, Immediate(kObjectAlignmentMask));
1431  and_(scratch1, Immediate(~kObjectAlignmentMask));
1432 
1433  // Allocate ASCII string in new space.
1434  AllocateInNewSpace(SeqAsciiString::kHeaderSize,
1435  times_1,
1436  scratch1,
1437  result,
1438  scratch2,
1439  scratch3,
1440  gc_required,
1441  TAG_OBJECT);
1442 
1443  // Set the map, length and hash field.
1444  mov(FieldOperand(result, HeapObject::kMapOffset),
1445  Immediate(isolate()->factory()->ascii_string_map()));
1446  mov(scratch1, length);
1447  SmiTag(scratch1);
1448  mov(FieldOperand(result, String::kLengthOffset), scratch1);
1449  mov(FieldOperand(result, String::kHashFieldOffset),
1450  Immediate(String::kEmptyHashField));
1451 }
1452 
1453 
1454 void MacroAssembler::AllocateAsciiString(Register result,
1455  int length,
1456  Register scratch1,
1457  Register scratch2,
1458  Label* gc_required) {
1459  ASSERT(length > 0);
1460 
1461  // Allocate ASCII string in new space.
1462  AllocateInNewSpace(SeqAsciiString::SizeFor(length),
1463  result,
1464  scratch1,
1465  scratch2,
1466  gc_required,
1467  TAG_OBJECT);
1468 
1469  // Set the map, length and hash field.
1470  mov(FieldOperand(result, HeapObject::kMapOffset),
1471  Immediate(isolate()->factory()->ascii_string_map()));
1472  mov(FieldOperand(result, String::kLengthOffset),
1473  Immediate(Smi::FromInt(length)));
1474  mov(FieldOperand(result, String::kHashFieldOffset),
1475  Immediate(String::kEmptyHashField));
1476 }
1477 
1478 
1479 void MacroAssembler::AllocateTwoByteConsString(Register result,
1480  Register scratch1,
1481  Register scratch2,
1482  Label* gc_required) {
1483  // Allocate heap number in new space.
1484  AllocateInNewSpace(ConsString::kSize,
1485  result,
1486  scratch1,
1487  scratch2,
1488  gc_required,
1489  TAG_OBJECT);
1490 
1491  // Set the map. The other fields are left uninitialized.
1492  mov(FieldOperand(result, HeapObject::kMapOffset),
1493  Immediate(isolate()->factory()->cons_string_map()));
1494 }
1495 
1496 
1497 void MacroAssembler::AllocateAsciiConsString(Register result,
1498  Register scratch1,
1499  Register scratch2,
1500  Label* gc_required) {
1501  // Allocate heap number in new space.
1502  AllocateInNewSpace(ConsString::kSize,
1503  result,
1504  scratch1,
1505  scratch2,
1506  gc_required,
1507  TAG_OBJECT);
1508 
1509  // Set the map. The other fields are left uninitialized.
1510  mov(FieldOperand(result, HeapObject::kMapOffset),
1511  Immediate(isolate()->factory()->cons_ascii_string_map()));
1512 }
1513 
1514 
1515 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1516  Register scratch1,
1517  Register scratch2,
1518  Label* gc_required) {
1519  // Allocate heap number in new space.
1520  AllocateInNewSpace(SlicedString::kSize,
1521  result,
1522  scratch1,
1523  scratch2,
1524  gc_required,
1525  TAG_OBJECT);
1526 
1527  // Set the map. The other fields are left uninitialized.
1528  mov(FieldOperand(result, HeapObject::kMapOffset),
1529  Immediate(isolate()->factory()->sliced_string_map()));
1530 }
1531 
1532 
1533 void MacroAssembler::AllocateAsciiSlicedString(Register result,
1534  Register scratch1,
1535  Register scratch2,
1536  Label* gc_required) {
1537  // Allocate heap number in new space.
1538  AllocateInNewSpace(SlicedString::kSize,
1539  result,
1540  scratch1,
1541  scratch2,
1542  gc_required,
1543  TAG_OBJECT);
1544 
1545  // Set the map. The other fields are left uninitialized.
1546  mov(FieldOperand(result, HeapObject::kMapOffset),
1547  Immediate(isolate()->factory()->sliced_ascii_string_map()));
1548 }
1549 
1550 
1551 // Copy memory, byte-by-byte, from source to destination. Not optimized for
1552 // long or aligned copies. The contents of scratch and length are destroyed.
1553 // Source and destination are incremented by length.
1554 // Many variants of movsb, loop unrolling, word moves, and indexed operands
1555 // have been tried here already, and this is fastest.
1556 // A simpler loop is faster on small copies, but 30% slower on large ones.
1557 // The cld() instruction must have been emitted, to set the direction flag(),
1558 // before calling this function.
1559 void MacroAssembler::CopyBytes(Register source,
1560  Register destination,
1561  Register length,
1562  Register scratch) {
1563  Label loop, done, short_string, short_loop;
1564  // Experimentation shows that the short string loop is faster if length < 10.
1565  cmp(length, Immediate(10));
1566  j(less_equal, &short_string);
1567 
1568  ASSERT(source.is(esi));
1569  ASSERT(destination.is(edi));
1570  ASSERT(length.is(ecx));
1571 
1572  // Because source is 4-byte aligned in our uses of this function,
1573  // we keep source aligned for the rep_movs call by copying the odd bytes
1574  // at the end of the ranges.
1575  mov(scratch, Operand(source, length, times_1, -4));
1576  mov(Operand(destination, length, times_1, -4), scratch);
1577  mov(scratch, ecx);
1578  shr(ecx, 2);
1579  rep_movs();
1580  and_(scratch, Immediate(0x3));
1581  add(destination, scratch);
1582  jmp(&done);
1583 
1584  bind(&short_string);
1585  test(length, length);
1586  j(zero, &done);
1587 
1588  bind(&short_loop);
1589  mov_b(scratch, Operand(source, 0));
1590  mov_b(Operand(destination, 0), scratch);
1591  inc(source);
1592  inc(destination);
1593  dec(length);
1594  j(not_zero, &short_loop);
1595 
1596  bind(&done);
1597 }
1598 
1599 
1600 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
1601  Register end_offset,
1602  Register filler) {
1603  Label loop, entry;
1604  jmp(&entry);
1605  bind(&loop);
1606  mov(Operand(start_offset, 0), filler);
1607  add(start_offset, Immediate(kPointerSize));
1608  bind(&entry);
1609  cmp(start_offset, end_offset);
1610  j(less, &loop);
1611 }
1612 
1613 
1614 void MacroAssembler::BooleanBitTest(Register object,
1615  int field_offset,
1616  int bit_index) {
1617  bit_index += kSmiTagSize + kSmiShiftSize;
1619  int byte_index = bit_index / kBitsPerByte;
1620  int byte_bit_index = bit_index & (kBitsPerByte - 1);
1621  test_b(FieldOperand(object, field_offset + byte_index),
1622  static_cast<byte>(1 << byte_bit_index));
1623 }
1624 
1625 
1626 
1627 void MacroAssembler::NegativeZeroTest(Register result,
1628  Register op,
1629  Label* then_label) {
1630  Label ok;
1631  test(result, result);
1632  j(not_zero, &ok);
1633  test(op, op);
1634  j(sign, then_label);
1635  bind(&ok);
1636 }
1637 
1638 
1639 void MacroAssembler::NegativeZeroTest(Register result,
1640  Register op1,
1641  Register op2,
1642  Register scratch,
1643  Label* then_label) {
1644  Label ok;
1645  test(result, result);
1646  j(not_zero, &ok);
1647  mov(scratch, op1);
1648  or_(scratch, op2);
1649  j(sign, then_label);
1650  bind(&ok);
1651 }
1652 
1653 
1654 void MacroAssembler::TryGetFunctionPrototype(Register function,
1655  Register result,
1656  Register scratch,
1657  Label* miss,
1658  bool miss_on_bound_function) {
1659  // Check that the receiver isn't a smi.
1660  JumpIfSmi(function, miss);
1661 
1662  // Check that the function really is a function.
1663  CmpObjectType(function, JS_FUNCTION_TYPE, result);
1664  j(not_equal, miss);
1665 
1666  if (miss_on_bound_function) {
1667  // If a bound function, go to miss label.
1668  mov(scratch,
1669  FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1670  BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
1671  SharedFunctionInfo::kBoundFunction);
1672  j(not_zero, miss);
1673  }
1674 
1675  // Make sure that the function has an instance prototype.
1676  Label non_instance;
1677  movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1678  test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1679  j(not_zero, &non_instance);
1680 
1681  // Get the prototype or initial map from the function.
1682  mov(result,
1683  FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1684 
1685  // If the prototype or initial map is the hole, don't return it and
1686  // simply miss the cache instead. This will allow us to allocate a
1687  // prototype object on-demand in the runtime system.
1688  cmp(result, Immediate(isolate()->factory()->the_hole_value()));
1689  j(equal, miss);
1690 
1691  // If the function does not have an initial map, we're done.
1692  Label done;
1693  CmpObjectType(result, MAP_TYPE, scratch);
1694  j(not_equal, &done);
1695 
1696  // Get the prototype from the initial map.
1697  mov(result, FieldOperand(result, Map::kPrototypeOffset));
1698  jmp(&done);
1699 
1700  // Non-instance prototype: Fetch prototype from constructor field
1701  // in initial map.
1702  bind(&non_instance);
1703  mov(result, FieldOperand(result, Map::kConstructorOffset));
1704 
1705  // All done.
1706  bind(&done);
1707 }
1708 
1709 
1710 void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
1711  ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
1712  call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
1713 }
1714 
1715 
1716 void MacroAssembler::TailCallStub(CodeStub* stub) {
1717  ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
1718  jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1719 }
1720 
1721 
1722 void MacroAssembler::StubReturn(int argc) {
1723  ASSERT(argc >= 1 && generating_stub());
1724  ret((argc - 1) * kPointerSize);
1725 }
1726 
1727 
1728 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
1729  if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
1730  return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
1731 }
1732 
1733 
1734 void MacroAssembler::IllegalOperation(int num_arguments) {
1735  if (num_arguments > 0) {
1736  add(esp, Immediate(num_arguments * kPointerSize));
1737  }
1738  mov(eax, Immediate(isolate()->factory()->undefined_value()));
1739 }
1740 
1741 
1742 void MacroAssembler::IndexFromHash(Register hash, Register index) {
1743  // The assert checks that the constants for the maximum number of digits
1744  // for an array index cached in the hash field and the number of bits
1745  // reserved for it does not conflict.
1746  ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
1747  (1 << String::kArrayIndexValueBits));
1748  // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
1749  // the low kHashShift bits.
1750  and_(hash, String::kArrayIndexValueMask);
1751  STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
1752  if (String::kHashShift > kSmiTagSize) {
1753  shr(hash, String::kHashShift - kSmiTagSize);
1754  }
1755  if (!index.is(hash)) {
1756  mov(index, hash);
1757  }
1758 }
1759 
1760 
1761 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1762  CallRuntime(Runtime::FunctionForId(id), num_arguments);
1763 }
1764 
1765 
1766 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
1767  const Runtime::Function* function = Runtime::FunctionForId(id);
1768  Set(eax, Immediate(function->nargs));
1769  mov(ebx, Immediate(ExternalReference(function, isolate())));
1770  CEntryStub ces(1, kSaveFPRegs);
1771  CallStub(&ces);
1772 }
1773 
1774 
1775 void MacroAssembler::CallRuntime(const Runtime::Function* f,
1776  int num_arguments) {
1777  // If the expected number of arguments of the runtime function is
1778  // constant, we check that the actual number of arguments match the
1779  // expectation.
1780  if (f->nargs >= 0 && f->nargs != num_arguments) {
1781  IllegalOperation(num_arguments);
1782  return;
1783  }
1784 
1785  // TODO(1236192): Most runtime routines don't need the number of
1786  // arguments passed in because it is constant. At some point we
1787  // should remove this need and make the runtime routine entry code
1788  // smarter.
1789  Set(eax, Immediate(num_arguments));
1790  mov(ebx, Immediate(ExternalReference(f, isolate())));
1791  CEntryStub ces(1);
1792  CallStub(&ces);
1793 }
1794 
1795 
1796 void MacroAssembler::CallExternalReference(ExternalReference ref,
1797  int num_arguments) {
1798  mov(eax, Immediate(num_arguments));
1799  mov(ebx, Immediate(ref));
1800 
1801  CEntryStub stub(1);
1802  CallStub(&stub);
1803 }
1804 
1805 
1806 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1807  int num_arguments,
1808  int result_size) {
1809  // TODO(1236192): Most runtime routines don't need the number of
1810  // arguments passed in because it is constant. At some point we
1811  // should remove this need and make the runtime routine entry code
1812  // smarter.
1813  Set(eax, Immediate(num_arguments));
1814  JumpToExternalReference(ext);
1815 }
1816 
1817 
1818 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1819  int num_arguments,
1820  int result_size) {
1821  TailCallExternalReference(ExternalReference(fid, isolate()),
1822  num_arguments,
1823  result_size);
1824 }
1825 
1826 
1827 // If true, a Handle<T> returned by value from a function with cdecl calling
1828 // convention will be returned directly as a value of location_ field in a
1829 // register eax.
1830 // If false, it is returned as a pointer to a preallocated by caller memory
1831 // region. Pointer to this region should be passed to a function as an
1832 // implicit first argument.
1833 #if defined(USING_BSD_ABI) || defined(__MINGW32__) || defined(__CYGWIN__)
1834 static const bool kReturnHandlesDirectly = true;
1835 #else
1836 static const bool kReturnHandlesDirectly = false;
1837 #endif
1838 
1839 
1840 Operand ApiParameterOperand(int index) {
1841  return Operand(
1842  esp, (index + (kReturnHandlesDirectly ? 0 : 1)) * kPointerSize);
1843 }
1844 
1845 
1846 void MacroAssembler::PrepareCallApiFunction(int argc) {
1847  if (kReturnHandlesDirectly) {
1848  EnterApiExitFrame(argc);
1849  // When handles are returned directly we don't have to allocate extra
1850  // space for and pass an out parameter.
1851  if (emit_debug_code()) {
1852  mov(esi, Immediate(BitCast<int32_t>(kZapValue)));
1853  }
1854  } else {
1855  // We allocate two additional slots: return value and pointer to it.
1856  EnterApiExitFrame(argc + 2);
1857 
1858  // The argument slots are filled as follows:
1859  //
1860  // n + 1: output slot
1861  // n: arg n
1862  // ...
1863  // 1: arg1
1864  // 0: pointer to the output slot
1865 
1866  lea(esi, Operand(esp, (argc + 1) * kPointerSize));
1867  mov(Operand(esp, 0 * kPointerSize), esi);
1868  if (emit_debug_code()) {
1869  mov(Operand(esi, 0), Immediate(0));
1870  }
1871  }
1872 }
1873 
1874 
1875 void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
1876  int stack_space) {
1877  ExternalReference next_address =
1878  ExternalReference::handle_scope_next_address();
1879  ExternalReference limit_address =
1880  ExternalReference::handle_scope_limit_address();
1881  ExternalReference level_address =
1882  ExternalReference::handle_scope_level_address();
1883 
1884  // Allocate HandleScope in callee-save registers.
1885  mov(ebx, Operand::StaticVariable(next_address));
1886  mov(edi, Operand::StaticVariable(limit_address));
1887  add(Operand::StaticVariable(level_address), Immediate(1));
1888 
1889  // Call the api function.
1890  call(function_address, RelocInfo::RUNTIME_ENTRY);
1891 
1892  if (!kReturnHandlesDirectly) {
1893  // PrepareCallApiFunction saved pointer to the output slot into
1894  // callee-save register esi.
1895  mov(eax, Operand(esi, 0));
1896  }
1897 
1898  Label empty_handle;
1899  Label prologue;
1900  Label promote_scheduled_exception;
1901  Label delete_allocated_handles;
1902  Label leave_exit_frame;
1903 
1904  // Check if the result handle holds 0.
1905  test(eax, eax);
1906  j(zero, &empty_handle);
1907  // It was non-zero. Dereference to get the result value.
1908  mov(eax, Operand(eax, 0));
1909  bind(&prologue);
1910  // No more valid handles (the result handle was the last one). Restore
1911  // previous handle scope.
1912  mov(Operand::StaticVariable(next_address), ebx);
1913  sub(Operand::StaticVariable(level_address), Immediate(1));
1914  Assert(above_equal, "Invalid HandleScope level");
1915  cmp(edi, Operand::StaticVariable(limit_address));
1916  j(not_equal, &delete_allocated_handles);
1917  bind(&leave_exit_frame);
1918 
1919  // Check if the function scheduled an exception.
1920  ExternalReference scheduled_exception_address =
1921  ExternalReference::scheduled_exception_address(isolate());
1922  cmp(Operand::StaticVariable(scheduled_exception_address),
1923  Immediate(isolate()->factory()->the_hole_value()));
1924  j(not_equal, &promote_scheduled_exception);
1925  LeaveApiExitFrame();
1926  ret(stack_space * kPointerSize);
1927  bind(&promote_scheduled_exception);
1928  TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
1929 
1930  bind(&empty_handle);
1931  // It was zero; the result is undefined.
1932  mov(eax, isolate()->factory()->undefined_value());
1933  jmp(&prologue);
1934 
1935  // HandleScope limit has changed. Delete allocated extensions.
1936  ExternalReference delete_extensions =
1937  ExternalReference::delete_handle_scope_extensions(isolate());
1938  bind(&delete_allocated_handles);
1939  mov(Operand::StaticVariable(limit_address), edi);
1940  mov(edi, eax);
1941  mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address()));
1942  mov(eax, Immediate(delete_extensions));
1943  call(eax);
1944  mov(eax, edi);
1945  jmp(&leave_exit_frame);
1946 }
1947 
1948 
1949 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
1950  // Set the entry point and jump to the C entry runtime stub.
1951  mov(ebx, Immediate(ext));
1952  CEntryStub ces(1);
1953  jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1954 }
1955 
1956 
1957 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
1958  // This macro takes the dst register to make the code more readable
1959  // at the call sites. However, the dst register has to be ecx to
1960  // follow the calling convention which requires the call type to be
1961  // in ecx.
1962  ASSERT(dst.is(ecx));
1963  if (call_kind == CALL_AS_FUNCTION) {
1964  // Set to some non-zero smi by updating the least significant
1965  // byte.
1966  mov_b(dst, 1 << kSmiTagSize);
1967  } else {
1968  // Set to smi zero by clearing the register.
1969  xor_(dst, dst);
1970  }
1971 }
1972 
1973 
1974 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1975  const ParameterCount& actual,
1976  Handle<Code> code_constant,
1977  const Operand& code_operand,
1978  Label* done,
1979  bool* definitely_mismatches,
1980  InvokeFlag flag,
1981  Label::Distance done_near,
1982  const CallWrapper& call_wrapper,
1983  CallKind call_kind) {
1984  bool definitely_matches = false;
1985  *definitely_mismatches = false;
1986  Label invoke;
1987  if (expected.is_immediate()) {
1988  ASSERT(actual.is_immediate());
1989  if (expected.immediate() == actual.immediate()) {
1990  definitely_matches = true;
1991  } else {
1992  mov(eax, actual.immediate());
1993  const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1994  if (expected.immediate() == sentinel) {
1995  // Don't worry about adapting arguments for builtins that
1996  // don't want that done. Skip adaption code by making it look
1997  // like we have a match between expected and actual number of
1998  // arguments.
1999  definitely_matches = true;
2000  } else {
2001  *definitely_mismatches = true;
2002  mov(ebx, expected.immediate());
2003  }
2004  }
2005  } else {
2006  if (actual.is_immediate()) {
2007  // Expected is in register, actual is immediate. This is the
2008  // case when we invoke function values without going through the
2009  // IC mechanism.
2010  cmp(expected.reg(), actual.immediate());
2011  j(equal, &invoke);
2012  ASSERT(expected.reg().is(ebx));
2013  mov(eax, actual.immediate());
2014  } else if (!expected.reg().is(actual.reg())) {
2015  // Both expected and actual are in (different) registers. This
2016  // is the case when we invoke functions using call and apply.
2017  cmp(expected.reg(), actual.reg());
2018  j(equal, &invoke);
2019  ASSERT(actual.reg().is(eax));
2020  ASSERT(expected.reg().is(ebx));
2021  }
2022  }
2023 
2024  if (!definitely_matches) {
2025  Handle<Code> adaptor =
2026  isolate()->builtins()->ArgumentsAdaptorTrampoline();
2027  if (!code_constant.is_null()) {
2028  mov(edx, Immediate(code_constant));
2029  add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2030  } else if (!code_operand.is_reg(edx)) {
2031  mov(edx, code_operand);
2032  }
2033 
2034  if (flag == CALL_FUNCTION) {
2035  call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2036  SetCallKind(ecx, call_kind);
2037  call(adaptor, RelocInfo::CODE_TARGET);
2038  call_wrapper.AfterCall();
2039  if (!*definitely_mismatches) {
2040  jmp(done, done_near);
2041  }
2042  } else {
2043  SetCallKind(ecx, call_kind);
2044  jmp(adaptor, RelocInfo::CODE_TARGET);
2045  }
2046  bind(&invoke);
2047  }
2048 }
2049 
2050 
2051 void MacroAssembler::InvokeCode(const Operand& code,
2052  const ParameterCount& expected,
2053  const ParameterCount& actual,
2054  InvokeFlag flag,
2055  const CallWrapper& call_wrapper,
2056  CallKind call_kind) {
2057  // You can't call a function without a valid frame.
2058  ASSERT(flag == JUMP_FUNCTION || has_frame());
2059 
2060  Label done;
2061  bool definitely_mismatches = false;
2062  InvokePrologue(expected, actual, Handle<Code>::null(), code,
2063  &done, &definitely_mismatches, flag, Label::kNear,
2064  call_wrapper, call_kind);
2065  if (!definitely_mismatches) {
2066  if (flag == CALL_FUNCTION) {
2067  call_wrapper.BeforeCall(CallSize(code));
2068  SetCallKind(ecx, call_kind);
2069  call(code);
2070  call_wrapper.AfterCall();
2071  } else {
2072  ASSERT(flag == JUMP_FUNCTION);
2073  SetCallKind(ecx, call_kind);
2074  jmp(code);
2075  }
2076  bind(&done);
2077  }
2078 }
2079 
2080 
2081 void MacroAssembler::InvokeCode(Handle<Code> code,
2082  const ParameterCount& expected,
2083  const ParameterCount& actual,
2084  RelocInfo::Mode rmode,
2085  InvokeFlag flag,
2086  const CallWrapper& call_wrapper,
2087  CallKind call_kind) {
2088  // You can't call a function without a valid frame.
2089  ASSERT(flag == JUMP_FUNCTION || has_frame());
2090 
2091  Label done;
2092  Operand dummy(eax, 0);
2093  bool definitely_mismatches = false;
2094  InvokePrologue(expected, actual, code, dummy, &done, &definitely_mismatches,
2095  flag, Label::kNear, call_wrapper, call_kind);
2096  if (!definitely_mismatches) {
2097  if (flag == CALL_FUNCTION) {
2098  call_wrapper.BeforeCall(CallSize(code, rmode));
2099  SetCallKind(ecx, call_kind);
2100  call(code, rmode);
2101  call_wrapper.AfterCall();
2102  } else {
2103  ASSERT(flag == JUMP_FUNCTION);
2104  SetCallKind(ecx, call_kind);
2105  jmp(code, rmode);
2106  }
2107  bind(&done);
2108  }
2109 }
2110 
2111 
2112 void MacroAssembler::InvokeFunction(Register fun,
2113  const ParameterCount& actual,
2114  InvokeFlag flag,
2115  const CallWrapper& call_wrapper,
2116  CallKind call_kind) {
2117  // You can't call a function without a valid frame.
2118  ASSERT(flag == JUMP_FUNCTION || has_frame());
2119 
2120  ASSERT(fun.is(edi));
2121  mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2122  mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2123  mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2124  SmiUntag(ebx);
2125 
2126  ParameterCount expected(ebx);
2127  InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2128  expected, actual, flag, call_wrapper, call_kind);
2129 }
2130 
2131 
2132 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
2133  const ParameterCount& actual,
2134  InvokeFlag flag,
2135  const CallWrapper& call_wrapper,
2136  CallKind call_kind) {
2137  // You can't call a function without a valid frame.
2138  ASSERT(flag == JUMP_FUNCTION || has_frame());
2139 
2140  // Get the function and setup the context.
2141  LoadHeapObject(edi, function);
2142  mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2143 
2144  ParameterCount expected(function->shared()->formal_parameter_count());
2145  // We call indirectly through the code field in the function to
2146  // allow recompilation to take effect without changing any of the
2147  // call sites.
2148  InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2149  expected, actual, flag, call_wrapper, call_kind);
2150 }
2151 
2152 
2153 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
2154  InvokeFlag flag,
2155  const CallWrapper& call_wrapper) {
2156  // You can't call a builtin without a valid frame.
2157  ASSERT(flag == JUMP_FUNCTION || has_frame());
2158 
2159  // Rely on the assertion to check that the number of provided
2160  // arguments match the expected number of arguments. Fake a
2161  // parameter count to avoid emitting code to do the check.
2162  ParameterCount expected(0);
2163  GetBuiltinFunction(edi, id);
2164  InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2165  expected, expected, flag, call_wrapper, CALL_AS_METHOD);
2166 }
2167 
2168 
2169 void MacroAssembler::GetBuiltinFunction(Register target,
2170  Builtins::JavaScript id) {
2171  // Load the JavaScript builtin function from the builtins object.
2172  mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2173  mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
2174  mov(target, FieldOperand(target,
2175  JSBuiltinsObject::OffsetOfFunctionWithId(id)));
2176 }
2177 
2178 
2179 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
2180  ASSERT(!target.is(edi));
2181  // Load the JavaScript builtin function from the builtins object.
2182  GetBuiltinFunction(edi, id);
2183  // Load the code entry point from the function into the target register.
2184  mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2185 }
2186 
2187 
2188 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2189  if (context_chain_length > 0) {
2190  // Move up the chain of contexts to the context containing the slot.
2191  mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2192  for (int i = 1; i < context_chain_length; i++) {
2193  mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2194  }
2195  } else {
2196  // Slot is in the current function context. Move it into the
2197  // destination register in case we store into it (the write barrier
2198  // cannot be allowed to destroy the context in esi).
2199  mov(dst, esi);
2200  }
2201 
2202  // We should not have found a with context by walking the context chain
2203  // (i.e., the static scope chain and runtime context chain do not agree).
2204  // A variable occurring in such a scope should have slot type LOOKUP and
2205  // not CONTEXT.
2206  if (emit_debug_code()) {
2207  cmp(FieldOperand(dst, HeapObject::kMapOffset),
2208  isolate()->factory()->with_context_map());
2209  Check(not_equal, "Variable resolved to with context.");
2210  }
2211 }
2212 
2213 
2214 void MacroAssembler::LoadTransitionedArrayMapConditional(
2215  ElementsKind expected_kind,
2216  ElementsKind transitioned_kind,
2217  Register map_in_out,
2218  Register scratch,
2219  Label* no_map_match) {
2220  // Load the global or builtins object from the current context.
2221  mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2222  mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2223 
2224  // Check that the function's map is the same as the expected cached map.
2225  mov(scratch, Operand(scratch,
2226  Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2227 
2228  size_t offset = expected_kind * kPointerSize +
2229  FixedArrayBase::kHeaderSize;
2230  cmp(map_in_out, FieldOperand(scratch, offset));
2231  j(not_equal, no_map_match);
2232 
2233  // Use the transitioned cached map.
2234  offset = transitioned_kind * kPointerSize +
2235  FixedArrayBase::kHeaderSize;
2236  mov(map_in_out, FieldOperand(scratch, offset));
2237 }
2238 
2239 
2240 void MacroAssembler::LoadInitialArrayMap(
2241  Register function_in, Register scratch,
2242  Register map_out, bool can_have_holes) {
2243  ASSERT(!function_in.is(map_out));
2244  Label done;
2245  mov(map_out, FieldOperand(function_in,
2246  JSFunction::kPrototypeOrInitialMapOffset));
2247  if (!FLAG_smi_only_arrays) {
2248  ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
2249  LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
2250  kind,
2251  map_out,
2252  scratch,
2253  &done);
2254  } else if (can_have_holes) {
2255  LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
2257  map_out,
2258  scratch,
2259  &done);
2260  }
2261  bind(&done);
2262 }
2263 
2264 
2265 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2266  // Load the global or builtins object from the current context.
2267  mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2268  // Load the global context from the global or builtins object.
2269  mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
2270  // Load the function from the global context.
2271  mov(function, Operand(function, Context::SlotOffset(index)));
2272 }
2273 
2274 
2275 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2276  Register map) {
2277  // Load the initial map. The global functions all have initial maps.
2278  mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2279  if (emit_debug_code()) {
2280  Label ok, fail;
2281  CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
2282  jmp(&ok);
2283  bind(&fail);
2284  Abort("Global functions must have initial map");
2285  bind(&ok);
2286  }
2287 }
2288 
2289 
2290 // Store the value in register src in the safepoint register stack
2291 // slot for register dst.
2292 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2293  mov(SafepointRegisterSlot(dst), src);
2294 }
2295 
2296 
2297 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2298  mov(SafepointRegisterSlot(dst), src);
2299 }
2300 
2301 
2302 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2303  mov(dst, SafepointRegisterSlot(src));
2304 }
2305 
2306 
2307 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2308  return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2309 }
2310 
2311 
2312 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2313  // The registers are pushed starting with the lowest encoding,
2314  // which means that lowest encodings are furthest away from
2315  // the stack pointer.
2316  ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2317  return kNumSafepointRegisters - reg_code - 1;
2318 }
2319 
2320 
2321 void MacroAssembler::LoadHeapObject(Register result,
2322  Handle<HeapObject> object) {
2323  if (isolate()->heap()->InNewSpace(*object)) {
2324  Handle<JSGlobalPropertyCell> cell =
2325  isolate()->factory()->NewJSGlobalPropertyCell(object);
2326  mov(result, Operand::Cell(cell));
2327  } else {
2328  mov(result, object);
2329  }
2330 }
2331 
2332 
2333 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2334  if (isolate()->heap()->InNewSpace(*object)) {
2335  Handle<JSGlobalPropertyCell> cell =
2336  isolate()->factory()->NewJSGlobalPropertyCell(object);
2337  push(Operand::Cell(cell));
2338  } else {
2339  Push(object);
2340  }
2341 }
2342 
2343 
2344 void MacroAssembler::Ret() {
2345  ret(0);
2346 }
2347 
2348 
2349 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2350  if (is_uint16(bytes_dropped)) {
2351  ret(bytes_dropped);
2352  } else {
2353  pop(scratch);
2354  add(esp, Immediate(bytes_dropped));
2355  push(scratch);
2356  ret(0);
2357  }
2358 }
2359 
2360 
2361 void MacroAssembler::Drop(int stack_elements) {
2362  if (stack_elements > 0) {
2363  add(esp, Immediate(stack_elements * kPointerSize));
2364  }
2365 }
2366 
2367 
2368 void MacroAssembler::Move(Register dst, Register src) {
2369  if (!dst.is(src)) {
2370  mov(dst, src);
2371  }
2372 }
2373 
2374 
2375 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2376  if (FLAG_native_code_counters && counter->Enabled()) {
2377  mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2378  }
2379 }
2380 
2381 
2382 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2383  ASSERT(value > 0);
2384  if (FLAG_native_code_counters && counter->Enabled()) {
2385  Operand operand = Operand::StaticVariable(ExternalReference(counter));
2386  if (value == 1) {
2387  inc(operand);
2388  } else {
2389  add(operand, Immediate(value));
2390  }
2391  }
2392 }
2393 
2394 
2395 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2396  ASSERT(value > 0);
2397  if (FLAG_native_code_counters && counter->Enabled()) {
2398  Operand operand = Operand::StaticVariable(ExternalReference(counter));
2399  if (value == 1) {
2400  dec(operand);
2401  } else {
2402  sub(operand, Immediate(value));
2403  }
2404  }
2405 }
2406 
2407 
2408 void MacroAssembler::IncrementCounter(Condition cc,
2409  StatsCounter* counter,
2410  int value) {
2411  ASSERT(value > 0);
2412  if (FLAG_native_code_counters && counter->Enabled()) {
2413  Label skip;
2414  j(NegateCondition(cc), &skip);
2415  pushfd();
2416  IncrementCounter(counter, value);
2417  popfd();
2418  bind(&skip);
2419  }
2420 }
2421 
2422 
2423 void MacroAssembler::DecrementCounter(Condition cc,
2424  StatsCounter* counter,
2425  int value) {
2426  ASSERT(value > 0);
2427  if (FLAG_native_code_counters && counter->Enabled()) {
2428  Label skip;
2429  j(NegateCondition(cc), &skip);
2430  pushfd();
2431  DecrementCounter(counter, value);
2432  popfd();
2433  bind(&skip);
2434  }
2435 }
2436 
2437 
2438 void MacroAssembler::Assert(Condition cc, const char* msg) {
2439  if (emit_debug_code()) Check(cc, msg);
2440 }
2441 
2442 
2443 void MacroAssembler::AssertFastElements(Register elements) {
2444  if (emit_debug_code()) {
2445  Factory* factory = isolate()->factory();
2446  Label ok;
2447  cmp(FieldOperand(elements, HeapObject::kMapOffset),
2448  Immediate(factory->fixed_array_map()));
2449  j(equal, &ok);
2450  cmp(FieldOperand(elements, HeapObject::kMapOffset),
2451  Immediate(factory->fixed_double_array_map()));
2452  j(equal, &ok);
2453  cmp(FieldOperand(elements, HeapObject::kMapOffset),
2454  Immediate(factory->fixed_cow_array_map()));
2455  j(equal, &ok);
2456  Abort("JSObject with fast elements map has slow elements");
2457  bind(&ok);
2458  }
2459 }
2460 
2461 
2462 void MacroAssembler::Check(Condition cc, const char* msg) {
2463  Label L;
2464  j(cc, &L);
2465  Abort(msg);
2466  // will not return here
2467  bind(&L);
2468 }
2469 
2470 
2471 void MacroAssembler::CheckStackAlignment() {
2472  int frame_alignment = OS::ActivationFrameAlignment();
2473  int frame_alignment_mask = frame_alignment - 1;
2474  if (frame_alignment > kPointerSize) {
2475  ASSERT(IsPowerOf2(frame_alignment));
2476  Label alignment_as_expected;
2477  test(esp, Immediate(frame_alignment_mask));
2478  j(zero, &alignment_as_expected);
2479  // Abort if stack is not aligned.
2480  int3();
2481  bind(&alignment_as_expected);
2482  }
2483 }
2484 
2485 
2486 void MacroAssembler::Abort(const char* msg) {
2487  // We want to pass the msg string like a smi to avoid GC
2488  // problems, however msg is not guaranteed to be aligned
2489  // properly. Instead, we pass an aligned pointer that is
2490  // a proper v8 smi, but also pass the alignment difference
2491  // from the real pointer as a smi.
2492  intptr_t p1 = reinterpret_cast<intptr_t>(msg);
2493  intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
2494  ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
2495 #ifdef DEBUG
2496  if (msg != NULL) {
2497  RecordComment("Abort message: ");
2498  RecordComment(msg);
2499  }
2500 #endif
2501 
2502  push(eax);
2503  push(Immediate(p0));
2504  push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
2505  // Disable stub call restrictions to always allow calls to abort.
2506  if (!has_frame_) {
2507  // We don't actually want to generate a pile of code for this, so just
2508  // claim there is a stack frame, without generating one.
2509  FrameScope scope(this, StackFrame::NONE);
2510  CallRuntime(Runtime::kAbort, 2);
2511  } else {
2512  CallRuntime(Runtime::kAbort, 2);
2513  }
2514  // will not return here
2515  int3();
2516 }
2517 
2518 
2519 void MacroAssembler::LoadInstanceDescriptors(Register map,
2520  Register descriptors) {
2521  mov(descriptors,
2522  FieldOperand(map, Map::kInstanceDescriptorsOrBitField3Offset));
2523  Label not_smi;
2524  JumpIfNotSmi(descriptors, &not_smi);
2525  mov(descriptors, isolate()->factory()->empty_descriptor_array());
2526  bind(&not_smi);
2527 }
2528 
2529 
2530 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2531  Register scratch,
2532  int power) {
2533  ASSERT(is_uintn(power + HeapNumber::kExponentBias,
2534  HeapNumber::kExponentBits));
2535  mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2536  movd(dst, scratch);
2537  psllq(dst, HeapNumber::kMantissaBits);
2538 }
2539 
2540 
2541 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2542  Register instance_type,
2543  Register scratch,
2544  Label* failure) {
2545  if (!scratch.is(instance_type)) {
2546  mov(scratch, instance_type);
2547  }
2548  and_(scratch,
2550  cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
2551  j(not_equal, failure);
2552 }
2553 
2554 
2555 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
2556  Register object2,
2557  Register scratch1,
2558  Register scratch2,
2559  Label* failure) {
2560  // Check that both objects are not smis.
2561  STATIC_ASSERT(kSmiTag == 0);
2562  mov(scratch1, object1);
2563  and_(scratch1, object2);
2564  JumpIfSmi(scratch1, failure);
2565 
2566  // Load instance type for both strings.
2567  mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2568  mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2569  movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2570  movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2571 
2572  // Check that both are flat ASCII strings.
2573  const int kFlatAsciiStringMask =
2575  const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2576  // Interleave bits from both instance types and compare them in one check.
2577  ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2578  and_(scratch1, kFlatAsciiStringMask);
2579  and_(scratch2, kFlatAsciiStringMask);
2580  lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2581  cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
2582  j(not_equal, failure);
2583 }
2584 
2585 
2586 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
2587  int frame_alignment = OS::ActivationFrameAlignment();
2588  if (frame_alignment != 0) {
2589  // Make stack end at alignment and make room for num_arguments words
2590  // and the original value of esp.
2591  mov(scratch, esp);
2592  sub(esp, Immediate((num_arguments + 1) * kPointerSize));
2593  ASSERT(IsPowerOf2(frame_alignment));
2594  and_(esp, -frame_alignment);
2595  mov(Operand(esp, num_arguments * kPointerSize), scratch);
2596  } else {
2597  sub(esp, Immediate(num_arguments * kPointerSize));
2598  }
2599 }
2600 
2601 
2602 void MacroAssembler::CallCFunction(ExternalReference function,
2603  int num_arguments) {
2604  // Trashing eax is ok as it will be the return value.
2605  mov(eax, Immediate(function));
2606  CallCFunction(eax, num_arguments);
2607 }
2608 
2609 
2610 void MacroAssembler::CallCFunction(Register function,
2611  int num_arguments) {
2612  ASSERT(has_frame());
2613  // Check stack alignment.
2614  if (emit_debug_code()) {
2615  CheckStackAlignment();
2616  }
2617 
2618  call(function);
2619  if (OS::ActivationFrameAlignment() != 0) {
2620  mov(esp, Operand(esp, num_arguments * kPointerSize));
2621  } else {
2622  add(esp, Immediate(num_arguments * kPointerSize));
2623  }
2624 }
2625 
2626 
2627 bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
2628  if (r1.is(r2)) return true;
2629  if (r1.is(r3)) return true;
2630  if (r1.is(r4)) return true;
2631  if (r2.is(r3)) return true;
2632  if (r2.is(r4)) return true;
2633  if (r3.is(r4)) return true;
2634  return false;
2635 }
2636 
2637 
2638 CodePatcher::CodePatcher(byte* address, int size)
2639  : address_(address),
2640  size_(size),
2641  masm_(NULL, address, size + Assembler::kGap) {
2642  // Create a new macro assembler pointing to the address of the code to patch.
2643  // The size is adjusted with kGap on order for the assembler to generate size
2644  // bytes of instructions without failing with buffer size constraints.
2645  ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2646 }
2647 
2648 
2649 CodePatcher::~CodePatcher() {
2650  // Indicate that code has changed.
2651  CPU::FlushICache(address_, size_);
2652 
2653  // Check that the code was patched as expected.
2654  ASSERT(masm_.pc_ == address_ + size_);
2655  ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2656 }
2657 
2658 
2659 void MacroAssembler::CheckPageFlag(
2660  Register object,
2661  Register scratch,
2662  int mask,
2663  Condition cc,
2664  Label* condition_met,
2665  Label::Distance condition_met_distance) {
2666  ASSERT(cc == zero || cc == not_zero);
2667  if (scratch.is(object)) {
2668  and_(scratch, Immediate(~Page::kPageAlignmentMask));
2669  } else {
2670  mov(scratch, Immediate(~Page::kPageAlignmentMask));
2671  and_(scratch, object);
2672  }
2673  if (mask < (1 << kBitsPerByte)) {
2674  test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
2675  static_cast<uint8_t>(mask));
2676  } else {
2677  test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2678  }
2679  j(cc, condition_met, condition_met_distance);
2680 }
2681 
2682 
2683 void MacroAssembler::CheckPageFlagForMap(
2684  Handle<Map> map,
2685  int mask,
2686  Condition cc,
2687  Label* condition_met,
2688  Label::Distance condition_met_distance) {
2689  ASSERT(cc == zero || cc == not_zero);
2690  Page* page = Page::FromAddress(map->address());
2691  ExternalReference reference(ExternalReference::page_flags(page));
2692  // The inlined static address check of the page's flags relies
2693  // on maps never being compacted.
2694  ASSERT(!isolate()->heap()->mark_compact_collector()->
2695  IsOnEvacuationCandidate(*map));
2696  if (mask < (1 << kBitsPerByte)) {
2697  test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
2698  } else {
2699  test(Operand::StaticVariable(reference), Immediate(mask));
2700  }
2701  j(cc, condition_met, condition_met_distance);
2702 }
2703 
2704 
2705 void MacroAssembler::JumpIfBlack(Register object,
2706  Register scratch0,
2707  Register scratch1,
2708  Label* on_black,
2709  Label::Distance on_black_near) {
2710  HasColor(object, scratch0, scratch1,
2711  on_black, on_black_near,
2712  1, 0); // kBlackBitPattern.
2713  ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
2714 }
2715 
2716 
2717 void MacroAssembler::HasColor(Register object,
2718  Register bitmap_scratch,
2719  Register mask_scratch,
2720  Label* has_color,
2721  Label::Distance has_color_distance,
2722  int first_bit,
2723  int second_bit) {
2724  ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
2725 
2726  GetMarkBits(object, bitmap_scratch, mask_scratch);
2727 
2728  Label other_color, word_boundary;
2729  test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2730  j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
2731  add(mask_scratch, mask_scratch); // Shift left 1 by adding.
2732  j(zero, &word_boundary, Label::kNear);
2733  test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2734  j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2735  jmp(&other_color, Label::kNear);
2736 
2737  bind(&word_boundary);
2738  test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
2739 
2740  j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2741  bind(&other_color);
2742 }
2743 
2744 
2745 void MacroAssembler::GetMarkBits(Register addr_reg,
2746  Register bitmap_reg,
2747  Register mask_reg) {
2748  ASSERT(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
2749  mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
2750  and_(bitmap_reg, addr_reg);
2751  mov(ecx, addr_reg);
2752  int shift =
2753  Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
2754  shr(ecx, shift);
2755  and_(ecx,
2756  (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
2757 
2758  add(bitmap_reg, ecx);
2759  mov(ecx, addr_reg);
2760  shr(ecx, kPointerSizeLog2);
2761  and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
2762  mov(mask_reg, Immediate(1));
2763  shl_cl(mask_reg);
2764 }
2765 
2766 
2767 void MacroAssembler::EnsureNotWhite(
2768  Register value,
2769  Register bitmap_scratch,
2770  Register mask_scratch,
2771  Label* value_is_white_and_not_data,
2772  Label::Distance distance) {
2773  ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
2774  GetMarkBits(value, bitmap_scratch, mask_scratch);
2775 
2776  // If the value is black or grey we don't need to do anything.
2777  ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
2778  ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
2779  ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
2780  ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
2781 
2782  Label done;
2783 
2784  // Since both black and grey have a 1 in the first position and white does
2785  // not have a 1 there we only need to check one bit.
2786  test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2787  j(not_zero, &done, Label::kNear);
2788 
2789  if (FLAG_debug_code) {
2790  // Check for impossible bit pattern.
2791  Label ok;
2792  push(mask_scratch);
2793  // shl. May overflow making the check conservative.
2794  add(mask_scratch, mask_scratch);
2795  test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2796  j(zero, &ok, Label::kNear);
2797  int3();
2798  bind(&ok);
2799  pop(mask_scratch);
2800  }
2801 
2802  // Value is white. We check whether it is data that doesn't need scanning.
2803  // Currently only checks for HeapNumber and non-cons strings.
2804  Register map = ecx; // Holds map while checking type.
2805  Register length = ecx; // Holds length of object after checking type.
2806  Label not_heap_number;
2807  Label is_data_object;
2808 
2809  // Check for heap-number
2810  mov(map, FieldOperand(value, HeapObject::kMapOffset));
2811  cmp(map, FACTORY->heap_number_map());
2812  j(not_equal, &not_heap_number, Label::kNear);
2813  mov(length, Immediate(HeapNumber::kSize));
2814  jmp(&is_data_object, Label::kNear);
2815 
2816  bind(&not_heap_number);
2817  // Check for strings.
2819  ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
2820  // If it's a string and it's not a cons string then it's an object containing
2821  // no GC pointers.
2822  Register instance_type = ecx;
2823  movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
2824  test_b(instance_type, kIsIndirectStringMask | kIsNotStringMask);
2825  j(not_zero, value_is_white_and_not_data);
2826  // It's a non-indirect (non-cons and non-slice) string.
2827  // If it's external, the length is just ExternalString::kSize.
2828  // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
2829  Label not_external;
2830  // External strings are the only ones with the kExternalStringTag bit
2831  // set.
2834  test_b(instance_type, kExternalStringTag);
2835  j(zero, &not_external, Label::kNear);
2836  mov(length, Immediate(ExternalString::kSize));
2837  jmp(&is_data_object, Label::kNear);
2838 
2839  bind(&not_external);
2840  // Sequential string, either ASCII or UC16.
2841  ASSERT(kAsciiStringTag == 0x04);
2842  and_(length, Immediate(kStringEncodingMask));
2843  xor_(length, Immediate(kStringEncodingMask));
2844  add(length, Immediate(0x04));
2845  // Value now either 4 (if ASCII) or 8 (if UC16), i.e., char-size shifted
2846  // by 2. If we multiply the string length as smi by this, it still
2847  // won't overflow a 32-bit value.
2848  ASSERT_EQ(SeqAsciiString::kMaxSize, SeqTwoByteString::kMaxSize);
2849  ASSERT(SeqAsciiString::kMaxSize <=
2850  static_cast<int>(0xffffffffu >> (2 + kSmiTagSize)));
2851  imul(length, FieldOperand(value, String::kLengthOffset));
2852  shr(length, 2 + kSmiTagSize + kSmiShiftSize);
2853  add(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
2854  and_(length, Immediate(~kObjectAlignmentMask));
2855 
2856  bind(&is_data_object);
2857  // Value is a data object, and it is white. Mark it black. Since we know
2858  // that the object is white we can make it black by flipping one bit.
2859  or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
2860 
2861  and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
2862  add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
2863  length);
2864  if (FLAG_debug_code) {
2865  mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
2866  cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
2867  Check(less_equal, "Live Bytes Count overflow chunk size");
2868  }
2869 
2870  bind(&done);
2871 }
2872 
2873 
2874 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
2875  Label next;
2876  mov(ecx, eax);
2877  bind(&next);
2878 
2879  // Check that there are no elements. Register ecx contains the
2880  // current JS object we've reached through the prototype chain.
2881  cmp(FieldOperand(ecx, JSObject::kElementsOffset),
2882  isolate()->factory()->empty_fixed_array());
2883  j(not_equal, call_runtime);
2884 
2885  // Check that instance descriptors are not empty so that we can
2886  // check for an enum cache. Leave the map in ebx for the subsequent
2887  // prototype load.
2888  mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
2889  mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOrBitField3Offset));
2890  JumpIfSmi(edx, call_runtime);
2891 
2892  // Check that there is an enum cache in the non-empty instance
2893  // descriptors (edx). This is the case if the next enumeration
2894  // index field does not contain a smi.
2895  mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
2896  JumpIfSmi(edx, call_runtime);
2897 
2898  // For all objects but the receiver, check that the cache is empty.
2899  Label check_prototype;
2900  cmp(ecx, eax);
2901  j(equal, &check_prototype, Label::kNear);
2902  mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
2903  cmp(edx, isolate()->factory()->empty_fixed_array());
2904  j(not_equal, call_runtime);
2905 
2906  // Load the prototype from the map and loop if non-null.
2907  bind(&check_prototype);
2908  mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
2909  cmp(ecx, isolate()->factory()->null_value());
2910  j(not_equal, &next);
2911 }
2912 
2913 } } // namespace v8::internal
2914 
2915 #endif // V8_TARGET_ARCH_IA32
byte * Address
Definition: globals.h:172
unsigned char byte
Definition: disasm.h:33
const intptr_t kSmiTagMask
Definition: v8.h:3855
const uint32_t kNaNOrInfinityLowerBoundUpper32
Definition: v8globals.h:478
bool is_intn(int x, int n)
Definition: assembler.h:826
const Register r3
const int kNumRegisters
Definition: constants-arm.h:95
const Register esp
Flag flags[]
Definition: flags.cc:1467
int int32_t
Definition: unicode.cc:47
bool AreAliased(Register r1, Register r2, Register r3, Register r4)
#define ASSERT(condition)
Definition: checks.h:270
bool CanTransitionToMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
const int kPointerSizeLog2
Definition: globals.h:246
const uint32_t kStringRepresentationMask
Definition: objects.h:455
const Register r2
const intptr_t kObjectAlignmentMask
Definition: v8globals.h:45
bool IsFastElementsKind(ElementsKind kind)
const intptr_t kHeapObjectTagMask
Definition: v8.h:3850
unsigned int seed
Definition: test-strings.cc:17
const Register edi
const uint32_t kNotStringTag
Definition: objects.h:438
const Register ebp
const Register eax
bool IsFastPackedElementsKind(ElementsKind kind)
const int kDoubleSize
Definition: globals.h:232
const uint32_t kIsIndirectStringMask
Definition: objects.h:462
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
const int kPointerSize
Definition: globals.h:234
Operand FieldOperand(Register object, int offset)
const Register ecx
const Address kZapValue
Definition: v8globals.h:89
const int kHeapObjectTag
Definition: v8.h:3848
bool IsAligned(T value, U alignment)
Definition: utils.h:206
bool is_uint16(int x)
Definition: assembler.h:847
const uint32_t kHoleNanLower32
Definition: v8globals.h:477
const int kBitsPerByte
Definition: globals.h:251
const Register r0
bool IsPowerOf2(T x)
Definition: utils.h:50
int TenToThe(int exponent)
Definition: utils.h:794
MacroAssembler(Isolate *isolate, void *buffer, int size)
const uint32_t kStringTag
Definition: objects.h:437
InvokeFlag
const uint32_t kIsNotStringMask
Definition: objects.h:436
const Register r1
const Register ebx
const int kNumSafepointRegisters
Definition: frames-arm.h:92
const int kSmiShiftSize
Definition: v8.h:3899
ElementsKind GetNextMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
Operand ApiParameterOperand(int index)
const int kSmiTagSize
Definition: v8.h:3854
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset flag
Definition: objects-inl.h:3682
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
const Register esi
const int kShortSize
Definition: globals.h:230
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping true
Definition: flags.cc:157
const int kSmiTag
Definition: v8.h:3853
#define FACTORY
Definition: isolate.h:1409
const uint32_t kIsIndirectStringTag
Definition: objects.h:463
#define RUNTIME_ENTRY(name, nargs, ressize)
const Register no_reg
#define STATIC_ASSERT(test)
Definition: checks.h:283
const uint32_t kAsciiStringTag
Definition: objects.h:451
const Register edx
const int kCharSize
Definition: globals.h:229
FlagType type() const
Definition: flags.cc:1358
bool is_uintn(int x, int n)
Definition: assembler.h:835
const uint32_t kStringEncodingMask
Definition: objects.h:449
const Register r4