v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
code-stubs-ia32.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_IA32
31 
32 #include "bootstrapper.h"
33 #include "code-stubs.h"
34 #include "isolate.h"
35 #include "jsregexp.h"
36 #include "regexp-macro-assembler.h"
37 #include "runtime.h"
38 #include "stub-cache.h"
39 #include "codegen.h"
40 #include "runtime.h"
41 
42 namespace v8 {
43 namespace internal {
44 
45 
47  Isolate* isolate,
48  CodeStubInterfaceDescriptor* descriptor) {
49  static Register registers[] = { ebx };
50  descriptor->register_param_count_ = 1;
51  descriptor->register_params_ = registers;
52  descriptor->deoptimization_handler_ =
53  Runtime::FunctionForId(Runtime::kHiddenNewClosureFromStubFailure)->entry;
54 }
55 
56 
57 void FastNewContextStub::InitializeInterfaceDescriptor(
58  Isolate* isolate,
59  CodeStubInterfaceDescriptor* descriptor) {
60  static Register registers[] = { edi };
61  descriptor->register_param_count_ = 1;
62  descriptor->register_params_ = registers;
63  descriptor->deoptimization_handler_ = NULL;
64 }
65 
66 
68  Isolate* isolate,
69  CodeStubInterfaceDescriptor* descriptor) {
70  static Register registers[] = { eax };
71  descriptor->register_param_count_ = 1;
72  descriptor->register_params_ = registers;
73  descriptor->deoptimization_handler_ = NULL;
74 }
75 
76 
77 void NumberToStringStub::InitializeInterfaceDescriptor(
78  Isolate* isolate,
79  CodeStubInterfaceDescriptor* descriptor) {
80  static Register registers[] = { eax };
81  descriptor->register_param_count_ = 1;
82  descriptor->register_params_ = registers;
83  descriptor->deoptimization_handler_ =
84  Runtime::FunctionForId(Runtime::kHiddenNumberToString)->entry;
85 }
86 
87 
89  Isolate* isolate,
90  CodeStubInterfaceDescriptor* descriptor) {
91  static Register registers[] = { eax, ebx, ecx };
92  descriptor->register_param_count_ = 3;
93  descriptor->register_params_ = registers;
94  descriptor->deoptimization_handler_ =
96  Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
97 }
98 
99 
101  Isolate* isolate,
102  CodeStubInterfaceDescriptor* descriptor) {
103  static Register registers[] = { eax, ebx, ecx, edx };
104  descriptor->register_param_count_ = 4;
105  descriptor->register_params_ = registers;
106  descriptor->deoptimization_handler_ =
107  Runtime::FunctionForId(Runtime::kHiddenCreateObjectLiteral)->entry;
108 }
109 
110 
112  Isolate* isolate,
113  CodeStubInterfaceDescriptor* descriptor) {
114  static Register registers[] = { ebx, edx };
115  descriptor->register_param_count_ = 2;
116  descriptor->register_params_ = registers;
117  descriptor->deoptimization_handler_ = NULL;
118 }
119 
120 
122  Isolate* isolate,
123  CodeStubInterfaceDescriptor* descriptor) {
124  static Register registers[] = { edx, ecx };
125  descriptor->register_param_count_ = 2;
126  descriptor->register_params_ = registers;
127  descriptor->deoptimization_handler_ =
128  FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
129 }
130 
131 
133  Isolate* isolate,
134  CodeStubInterfaceDescriptor* descriptor) {
135  static Register registers[] = { edx, ecx };
136  descriptor->register_param_count_ = 2;
137  descriptor->register_params_ = registers;
138  descriptor->deoptimization_handler_ =
139  FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
140 }
141 
142 
143 void RegExpConstructResultStub::InitializeInterfaceDescriptor(
144  Isolate* isolate,
145  CodeStubInterfaceDescriptor* descriptor) {
146  static Register registers[] = { ecx, ebx, eax };
147  descriptor->register_param_count_ = 3;
148  descriptor->register_params_ = registers;
149  descriptor->deoptimization_handler_ =
150  Runtime::FunctionForId(Runtime::kHiddenRegExpConstructResult)->entry;
151 }
152 
153 
155  Isolate* isolate,
156  CodeStubInterfaceDescriptor* descriptor) {
157  static Register registers[] = { edx };
158  descriptor->register_param_count_ = 1;
159  descriptor->register_params_ = registers;
160  descriptor->deoptimization_handler_ = NULL;
161 }
162 
163 
165  Isolate* isolate,
166  CodeStubInterfaceDescriptor* descriptor) {
167  static Register registers[] = { edx };
168  descriptor->register_param_count_ = 1;
169  descriptor->register_params_ = registers;
170  descriptor->deoptimization_handler_ = NULL;
171 }
172 
173 
175  Isolate* isolate,
176  CodeStubInterfaceDescriptor* descriptor) {
177  static Register registers[] = { edx, ecx };
178  descriptor->register_param_count_ = 2;
179  descriptor->register_params_ = registers;
180  descriptor->deoptimization_handler_ = NULL;
181 }
182 
183 
185  Isolate* isolate,
186  CodeStubInterfaceDescriptor* descriptor) {
187  static Register registers[] = { edx, ecx };
188  descriptor->register_param_count_ = 2;
189  descriptor->register_params_ = registers;
190  descriptor->deoptimization_handler_ = NULL;
191 }
192 
193 
195  Isolate* isolate,
196  CodeStubInterfaceDescriptor* descriptor) {
197  static Register registers[] = { edx, ecx, eax };
198  descriptor->register_param_count_ = 3;
199  descriptor->register_params_ = registers;
200  descriptor->deoptimization_handler_ =
201  FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure);
202 }
203 
204 
206  Isolate* isolate,
207  CodeStubInterfaceDescriptor* descriptor) {
208  static Register registers[] = { eax, ebx };
209  descriptor->register_param_count_ = 2;
210  descriptor->register_params_ = registers;
211  descriptor->deoptimization_handler_ =
212  Runtime::FunctionForId(Runtime::kTransitionElementsKind)->entry;
213 }
214 
215 
216 static void InitializeArrayConstructorDescriptor(
217  Isolate* isolate,
218  CodeStubInterfaceDescriptor* descriptor,
219  int constant_stack_parameter_count) {
220  // register state
221  // eax -- number of arguments
222  // edi -- function
223  // ebx -- allocation site with elements kind
224  static Register registers_variable_args[] = { edi, ebx, eax };
225  static Register registers_no_args[] = { edi, ebx };
226 
227  if (constant_stack_parameter_count == 0) {
228  descriptor->register_param_count_ = 2;
229  descriptor->register_params_ = registers_no_args;
230  } else {
231  // stack param count needs (constructor pointer, and single argument)
232  descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
233  descriptor->stack_parameter_count_ = eax;
234  descriptor->register_param_count_ = 3;
235  descriptor->register_params_ = registers_variable_args;
236  }
237 
238  descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
239  descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
240  descriptor->deoptimization_handler_ =
241  Runtime::FunctionForId(Runtime::kHiddenArrayConstructor)->entry;
242 }
243 
244 
245 static void InitializeInternalArrayConstructorDescriptor(
246  Isolate* isolate,
247  CodeStubInterfaceDescriptor* descriptor,
248  int constant_stack_parameter_count) {
249  // register state
250  // eax -- number of arguments
251  // edi -- constructor function
252  static Register registers_variable_args[] = { edi, eax };
253  static Register registers_no_args[] = { edi };
254 
255  if (constant_stack_parameter_count == 0) {
256  descriptor->register_param_count_ = 1;
257  descriptor->register_params_ = registers_no_args;
258  } else {
259  // stack param count needs (constructor pointer, and single argument)
260  descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
261  descriptor->stack_parameter_count_ = eax;
262  descriptor->register_param_count_ = 2;
263  descriptor->register_params_ = registers_variable_args;
264  }
265 
266  descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
267  descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
268  descriptor->deoptimization_handler_ =
269  Runtime::FunctionForId(Runtime::kHiddenInternalArrayConstructor)->entry;
270 }
271 
272 
274  Isolate* isolate,
275  CodeStubInterfaceDescriptor* descriptor) {
276  InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
277 }
278 
279 
281  Isolate* isolate,
282  CodeStubInterfaceDescriptor* descriptor) {
283  InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
284 }
285 
286 
288  Isolate* isolate,
289  CodeStubInterfaceDescriptor* descriptor) {
290  InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
291 }
292 
293 
295  Isolate* isolate,
296  CodeStubInterfaceDescriptor* descriptor) {
297  InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 0);
298 }
299 
300 
302  Isolate* isolate,
303  CodeStubInterfaceDescriptor* descriptor) {
304  InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 1);
305 }
306 
307 
309  Isolate* isolate,
310  CodeStubInterfaceDescriptor* descriptor) {
311  InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1);
312 }
313 
314 
316  Isolate* isolate,
317  CodeStubInterfaceDescriptor* descriptor) {
318  static Register registers[] = { eax };
319  descriptor->register_param_count_ = 1;
320  descriptor->register_params_ = registers;
321  descriptor->deoptimization_handler_ =
322  FUNCTION_ADDR(CompareNilIC_Miss);
323  descriptor->SetMissHandler(
324  ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate));
325 }
326 
328  Isolate* isolate,
329  CodeStubInterfaceDescriptor* descriptor) {
330  static Register registers[] = { eax };
331  descriptor->register_param_count_ = 1;
332  descriptor->register_params_ = registers;
333  descriptor->deoptimization_handler_ =
334  FUNCTION_ADDR(ToBooleanIC_Miss);
335  descriptor->SetMissHandler(
336  ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
337 }
338 
339 
341  Isolate* isolate,
342  CodeStubInterfaceDescriptor* descriptor) {
343  static Register registers[] = { edx, ecx, eax };
344  descriptor->register_param_count_ = 3;
345  descriptor->register_params_ = registers;
346  descriptor->deoptimization_handler_ =
347  FUNCTION_ADDR(StoreIC_MissFromStubFailure);
348 }
349 
350 
352  Isolate* isolate,
353  CodeStubInterfaceDescriptor* descriptor) {
354  static Register registers[] = { eax, ebx, ecx, edx };
355  descriptor->register_param_count_ = 4;
356  descriptor->register_params_ = registers;
357  descriptor->deoptimization_handler_ =
358  FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss);
359 }
360 
361 
363  Isolate* isolate,
364  CodeStubInterfaceDescriptor* descriptor) {
365  static Register registers[] = { edx, eax };
366  descriptor->register_param_count_ = 2;
367  descriptor->register_params_ = registers;
368  descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss);
369  descriptor->SetMissHandler(
370  ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate));
371 }
372 
373 
374 void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor(
375  Isolate* isolate,
376  CodeStubInterfaceDescriptor* descriptor) {
377  static Register registers[] = { ecx, edx, eax };
378  descriptor->register_param_count_ = 3;
379  descriptor->register_params_ = registers;
380  descriptor->deoptimization_handler_ =
381  FUNCTION_ADDR(BinaryOpIC_MissWithAllocationSite);
382 }
383 
384 
385 void StringAddStub::InitializeInterfaceDescriptor(
386  Isolate* isolate,
387  CodeStubInterfaceDescriptor* descriptor) {
388  static Register registers[] = { edx, eax };
389  descriptor->register_param_count_ = 2;
390  descriptor->register_params_ = registers;
391  descriptor->deoptimization_handler_ =
392  Runtime::FunctionForId(Runtime::kHiddenStringAdd)->entry;
393 }
394 
395 
396 void CallDescriptors::InitializeForIsolate(Isolate* isolate) {
397  {
398  CallInterfaceDescriptor* descriptor =
399  isolate->call_descriptor(Isolate::ArgumentAdaptorCall);
400  static Register registers[] = { edi, // JSFunction
401  esi, // context
402  eax, // actual number of arguments
403  ebx, // expected number of arguments
404  };
405  static Representation representations[] = {
406  Representation::Tagged(), // JSFunction
407  Representation::Tagged(), // context
408  Representation::Integer32(), // actual number of arguments
409  Representation::Integer32(), // expected number of arguments
410  };
411  descriptor->register_param_count_ = 4;
412  descriptor->register_params_ = registers;
413  descriptor->param_representations_ = representations;
414  }
415  {
416  CallInterfaceDescriptor* descriptor =
417  isolate->call_descriptor(Isolate::KeyedCall);
418  static Register registers[] = { esi, // context
419  ecx, // key
420  };
421  static Representation representations[] = {
422  Representation::Tagged(), // context
423  Representation::Tagged(), // key
424  };
425  descriptor->register_param_count_ = 2;
426  descriptor->register_params_ = registers;
427  descriptor->param_representations_ = representations;
428  }
429  {
430  CallInterfaceDescriptor* descriptor =
431  isolate->call_descriptor(Isolate::NamedCall);
432  static Register registers[] = { esi, // context
433  ecx, // name
434  };
435  static Representation representations[] = {
436  Representation::Tagged(), // context
437  Representation::Tagged(), // name
438  };
439  descriptor->register_param_count_ = 2;
440  descriptor->register_params_ = registers;
441  descriptor->param_representations_ = representations;
442  }
443  {
444  CallInterfaceDescriptor* descriptor =
445  isolate->call_descriptor(Isolate::CallHandler);
446  static Register registers[] = { esi, // context
447  edx, // receiver
448  };
449  static Representation representations[] = {
450  Representation::Tagged(), // context
451  Representation::Tagged(), // receiver
452  };
453  descriptor->register_param_count_ = 2;
454  descriptor->register_params_ = registers;
455  descriptor->param_representations_ = representations;
456  }
457  {
458  CallInterfaceDescriptor* descriptor =
459  isolate->call_descriptor(Isolate::ApiFunctionCall);
460  static Register registers[] = { eax, // callee
461  ebx, // call_data
462  ecx, // holder
463  edx, // api_function_address
464  esi, // context
465  };
466  static Representation representations[] = {
467  Representation::Tagged(), // callee
468  Representation::Tagged(), // call_data
469  Representation::Tagged(), // holder
470  Representation::External(), // api_function_address
471  Representation::Tagged(), // context
472  };
473  descriptor->register_param_count_ = 5;
474  descriptor->register_params_ = registers;
475  descriptor->param_representations_ = representations;
476  }
477 }
478 
479 
480 #define __ ACCESS_MASM(masm)
481 
482 
483 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
484  // Update the static counter each time a new code stub is generated.
485  Isolate* isolate = masm->isolate();
486  isolate->counters()->code_stubs()->Increment();
487 
488  CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
489  int param_count = descriptor->register_param_count_;
490  {
491  // Call the runtime system in a fresh internal frame.
492  FrameScope scope(masm, StackFrame::INTERNAL);
493  ASSERT(descriptor->register_param_count_ == 0 ||
494  eax.is(descriptor->register_params_[param_count - 1]));
495  // Push arguments
496  for (int i = 0; i < param_count; ++i) {
497  __ push(descriptor->register_params_[i]);
498  }
499  ExternalReference miss = descriptor->miss_handler();
500  __ CallExternalReference(miss, descriptor->register_param_count_);
501  }
502 
503  __ ret(0);
504 }
505 
506 
507 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
508  // We don't allow a GC during a store buffer overflow so there is no need to
509  // store the registers in any particular way, but we do have to store and
510  // restore them.
511  __ pushad();
512  if (save_doubles_ == kSaveFPRegs) {
513  CpuFeatureScope scope(masm, SSE2);
514  __ sub(esp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
515  for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
516  XMMRegister reg = XMMRegister::from_code(i);
517  __ movsd(Operand(esp, i * kDoubleSize), reg);
518  }
519  }
520  const int argument_count = 1;
521 
522  AllowExternalCallThatCantCauseGC scope(masm);
523  __ PrepareCallCFunction(argument_count, ecx);
524  __ mov(Operand(esp, 0 * kPointerSize),
525  Immediate(ExternalReference::isolate_address(masm->isolate())));
526  __ CallCFunction(
527  ExternalReference::store_buffer_overflow_function(masm->isolate()),
528  argument_count);
529  if (save_doubles_ == kSaveFPRegs) {
530  CpuFeatureScope scope(masm, SSE2);
531  for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
532  XMMRegister reg = XMMRegister::from_code(i);
533  __ movsd(reg, Operand(esp, i * kDoubleSize));
534  }
535  __ add(esp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
536  }
537  __ popad();
538  __ ret(0);
539 }
540 
541 
542 class FloatingPointHelper : public AllStatic {
543  public:
544  enum ArgLocation {
545  ARGS_ON_STACK,
546  ARGS_IN_REGISTERS
547  };
548 
549  // Code pattern for loading a floating point value. Input value must
550  // be either a smi or a heap number object (fp value). Requirements:
551  // operand in register number. Returns operand as floating point number
552  // on FPU stack.
553  static void LoadFloatOperand(MacroAssembler* masm, Register number);
554 
555  // Test if operands are smi or number objects (fp). Requirements:
556  // operand_1 in eax, operand_2 in edx; falls through on float
557  // operands, jumps to the non_float label otherwise.
558  static void CheckFloatOperands(MacroAssembler* masm,
559  Label* non_float,
560  Register scratch);
561 
562  // Test if operands are numbers (smi or HeapNumber objects), and load
563  // them into xmm0 and xmm1 if they are. Jump to label not_numbers if
564  // either operand is not a number. Operands are in edx and eax.
565  // Leaves operands unchanged.
566  static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
567 };
568 
569 
570 void DoubleToIStub::Generate(MacroAssembler* masm) {
571  Register input_reg = this->source();
572  Register final_result_reg = this->destination();
574 
575  Label check_negative, process_64_bits, done, done_no_stash;
576 
577  int double_offset = offset();
578 
579  // Account for return address and saved regs if input is esp.
580  if (input_reg.is(esp)) double_offset += 3 * kPointerSize;
581 
582  MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
583  MemOperand exponent_operand(MemOperand(input_reg,
584  double_offset + kDoubleSize / 2));
585 
586  Register scratch1;
587  {
588  Register scratch_candidates[3] = { ebx, edx, edi };
589  for (int i = 0; i < 3; i++) {
590  scratch1 = scratch_candidates[i];
591  if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
592  }
593  }
594  // Since we must use ecx for shifts below, use some other register (eax)
595  // to calculate the result if ecx is the requested return register.
596  Register result_reg = final_result_reg.is(ecx) ? eax : final_result_reg;
597  // Save ecx if it isn't the return register and therefore volatile, or if it
598  // is the return register, then save the temp register we use in its stead for
599  // the result.
600  Register save_reg = final_result_reg.is(ecx) ? eax : ecx;
601  __ push(scratch1);
602  __ push(save_reg);
603 
604  bool stash_exponent_copy = !input_reg.is(esp);
605  __ mov(scratch1, mantissa_operand);
607  CpuFeatureScope scope(masm, SSE3);
608  // Load x87 register with heap number.
609  __ fld_d(mantissa_operand);
610  }
611  __ mov(ecx, exponent_operand);
612  if (stash_exponent_copy) __ push(ecx);
613 
616  __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
617  __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
618  __ j(below, &process_64_bits);
619 
620  // Result is entirely in lower 32-bits of mantissa
623  __ fstp(0);
624  }
625  __ sub(ecx, Immediate(delta));
626  __ xor_(result_reg, result_reg);
627  __ cmp(ecx, Immediate(31));
628  __ j(above, &done);
629  __ shl_cl(scratch1);
630  __ jmp(&check_negative);
631 
632  __ bind(&process_64_bits);
634  CpuFeatureScope scope(masm, SSE3);
635  if (stash_exponent_copy) {
636  // Already a copy of the exponent on the stack, overwrite it.
637  STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
638  __ sub(esp, Immediate(kDoubleSize / 2));
639  } else {
640  // Reserve space for 64 bit answer.
641  __ sub(esp, Immediate(kDoubleSize)); // Nolint.
642  }
643  // Do conversion, which cannot fail because we checked the exponent.
644  __ fisttp_d(Operand(esp, 0));
645  __ mov(result_reg, Operand(esp, 0)); // Load low word of answer as result
646  __ add(esp, Immediate(kDoubleSize));
647  __ jmp(&done_no_stash);
648  } else {
649  // Result must be extracted from shifted 32-bit mantissa
650  __ sub(ecx, Immediate(delta));
651  __ neg(ecx);
652  if (stash_exponent_copy) {
653  __ mov(result_reg, MemOperand(esp, 0));
654  } else {
655  __ mov(result_reg, exponent_operand);
656  }
657  __ and_(result_reg,
658  Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
659  __ add(result_reg,
660  Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
661  __ shrd(result_reg, scratch1);
662  __ shr_cl(result_reg);
663  __ test(ecx, Immediate(32));
665  CpuFeatureScope use_cmov(masm, CMOV);
666  __ cmov(not_equal, scratch1, result_reg);
667  } else {
668  Label skip_mov;
669  __ j(equal, &skip_mov, Label::kNear);
670  __ mov(scratch1, result_reg);
671  __ bind(&skip_mov);
672  }
673  }
674 
675  // If the double was negative, negate the integer result.
676  __ bind(&check_negative);
677  __ mov(result_reg, scratch1);
678  __ neg(result_reg);
679  if (stash_exponent_copy) {
680  __ cmp(MemOperand(esp, 0), Immediate(0));
681  } else {
682  __ cmp(exponent_operand, Immediate(0));
683  }
685  CpuFeatureScope use_cmov(masm, CMOV);
686  __ cmov(greater, result_reg, scratch1);
687  } else {
688  Label skip_mov;
689  __ j(less_equal, &skip_mov, Label::kNear);
690  __ mov(result_reg, scratch1);
691  __ bind(&skip_mov);
692  }
693 
694  // Restore registers
695  __ bind(&done);
696  if (stash_exponent_copy) {
697  __ add(esp, Immediate(kDoubleSize / 2));
698  }
699  __ bind(&done_no_stash);
700  if (!final_result_reg.is(result_reg)) {
701  ASSERT(final_result_reg.is(ecx));
702  __ mov(final_result_reg, result_reg);
703  }
704  __ pop(save_reg);
705  __ pop(scratch1);
706  __ ret(0);
707 }
708 
709 
710 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
711  Register number) {
712  Label load_smi, done;
713 
714  __ JumpIfSmi(number, &load_smi, Label::kNear);
715  __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
716  __ jmp(&done, Label::kNear);
717 
718  __ bind(&load_smi);
719  __ SmiUntag(number);
720  __ push(number);
721  __ fild_s(Operand(esp, 0));
722  __ pop(number);
723 
724  __ bind(&done);
725 }
726 
727 
728 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
729  Label* not_numbers) {
730  Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
731  // Load operand in edx into xmm0, or branch to not_numbers.
732  __ JumpIfSmi(edx, &load_smi_edx, Label::kNear);
733  Factory* factory = masm->isolate()->factory();
734  __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
735  __ j(not_equal, not_numbers); // Argument in edx is not a number.
737  __ bind(&load_eax);
738  // Load operand in eax into xmm1, or branch to not_numbers.
739  __ JumpIfSmi(eax, &load_smi_eax, Label::kNear);
740  __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
741  __ j(equal, &load_float_eax, Label::kNear);
742  __ jmp(not_numbers); // Argument in eax is not a number.
743  __ bind(&load_smi_edx);
744  __ SmiUntag(edx); // Untag smi before converting to float.
745  __ Cvtsi2sd(xmm0, edx);
746  __ SmiTag(edx); // Retag smi for heap number overwriting test.
747  __ jmp(&load_eax);
748  __ bind(&load_smi_eax);
749  __ SmiUntag(eax); // Untag smi before converting to float.
750  __ Cvtsi2sd(xmm1, eax);
751  __ SmiTag(eax); // Retag smi for heap number overwriting test.
752  __ jmp(&done, Label::kNear);
753  __ bind(&load_float_eax);
755  __ bind(&done);
756 }
757 
758 
759 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
760  Label* non_float,
761  Register scratch) {
762  Label test_other, done;
763  // Test if both operands are floats or smi -> scratch=k_is_float;
764  // Otherwise scratch = k_not_float.
765  __ JumpIfSmi(edx, &test_other, Label::kNear);
766  __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
767  Factory* factory = masm->isolate()->factory();
768  __ cmp(scratch, factory->heap_number_map());
769  __ j(not_equal, non_float); // argument in edx is not a number -> NaN
770 
771  __ bind(&test_other);
772  __ JumpIfSmi(eax, &done, Label::kNear);
773  __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
774  __ cmp(scratch, factory->heap_number_map());
775  __ j(not_equal, non_float); // argument in eax is not a number -> NaN
776 
777  // Fall-through: Both operands are numbers.
778  __ bind(&done);
779 }
780 
781 
782 void MathPowStub::Generate(MacroAssembler* masm) {
783  CpuFeatureScope use_sse2(masm, SSE2);
784  Factory* factory = masm->isolate()->factory();
785  const Register exponent = eax;
786  const Register base = edx;
787  const Register scratch = ecx;
788  const XMMRegister double_result = xmm3;
789  const XMMRegister double_base = xmm2;
790  const XMMRegister double_exponent = xmm1;
791  const XMMRegister double_scratch = xmm4;
792 
793  Label call_runtime, done, exponent_not_smi, int_exponent;
794 
795  // Save 1 in double_result - we need this several times later on.
796  __ mov(scratch, Immediate(1));
797  __ Cvtsi2sd(double_result, scratch);
798 
799  if (exponent_type_ == ON_STACK) {
800  Label base_is_smi, unpack_exponent;
801  // The exponent and base are supplied as arguments on the stack.
802  // This can only happen if the stub is called from non-optimized code.
803  // Load input parameters from stack.
804  __ mov(base, Operand(esp, 2 * kPointerSize));
805  __ mov(exponent, Operand(esp, 1 * kPointerSize));
806 
807  __ JumpIfSmi(base, &base_is_smi, Label::kNear);
809  factory->heap_number_map());
810  __ j(not_equal, &call_runtime);
811 
812  __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
813  __ jmp(&unpack_exponent, Label::kNear);
814 
815  __ bind(&base_is_smi);
816  __ SmiUntag(base);
817  __ Cvtsi2sd(double_base, base);
818 
819  __ bind(&unpack_exponent);
820  __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
821  __ SmiUntag(exponent);
822  __ jmp(&int_exponent);
823 
824  __ bind(&exponent_not_smi);
825  __ cmp(FieldOperand(exponent, HeapObject::kMapOffset),
826  factory->heap_number_map());
827  __ j(not_equal, &call_runtime);
828  __ movsd(double_exponent,
830  } else if (exponent_type_ == TAGGED) {
831  __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
832  __ SmiUntag(exponent);
833  __ jmp(&int_exponent);
834 
835  __ bind(&exponent_not_smi);
836  __ movsd(double_exponent,
838  }
839 
840  if (exponent_type_ != INTEGER) {
841  Label fast_power, try_arithmetic_simplification;
842  __ DoubleToI(exponent, double_exponent, double_scratch,
843  TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification);
844  __ jmp(&int_exponent);
845 
846  __ bind(&try_arithmetic_simplification);
847  // Skip to runtime if possibly NaN (indicated by the indefinite integer).
848  __ cvttsd2si(exponent, Operand(double_exponent));
849  __ cmp(exponent, Immediate(0x1));
850  __ j(overflow, &call_runtime);
851 
852  if (exponent_type_ == ON_STACK) {
853  // Detect square root case. Crankshaft detects constant +/-0.5 at
854  // compile time and uses DoMathPowHalf instead. We then skip this check
855  // for non-constant cases of +/-0.5 as these hardly occur.
856  Label continue_sqrt, continue_rsqrt, not_plus_half;
857  // Test for 0.5.
858  // Load double_scratch with 0.5.
859  __ mov(scratch, Immediate(0x3F000000u));
860  __ movd(double_scratch, scratch);
861  __ cvtss2sd(double_scratch, double_scratch);
862  // Already ruled out NaNs for exponent.
863  __ ucomisd(double_scratch, double_exponent);
864  __ j(not_equal, &not_plus_half, Label::kNear);
865 
866  // Calculates square root of base. Check for the special case of
867  // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
868  // According to IEEE-754, single-precision -Infinity has the highest
869  // 9 bits set and the lowest 23 bits cleared.
870  __ mov(scratch, 0xFF800000u);
871  __ movd(double_scratch, scratch);
872  __ cvtss2sd(double_scratch, double_scratch);
873  __ ucomisd(double_base, double_scratch);
874  // Comparing -Infinity with NaN results in "unordered", which sets the
875  // zero flag as if both were equal. However, it also sets the carry flag.
876  __ j(not_equal, &continue_sqrt, Label::kNear);
877  __ j(carry, &continue_sqrt, Label::kNear);
878 
879  // Set result to Infinity in the special case.
880  __ xorps(double_result, double_result);
881  __ subsd(double_result, double_scratch);
882  __ jmp(&done);
883 
884  __ bind(&continue_sqrt);
885  // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
886  __ xorps(double_scratch, double_scratch);
887  __ addsd(double_scratch, double_base); // Convert -0 to +0.
888  __ sqrtsd(double_result, double_scratch);
889  __ jmp(&done);
890 
891  // Test for -0.5.
892  __ bind(&not_plus_half);
893  // Load double_exponent with -0.5 by substracting 1.
894  __ subsd(double_scratch, double_result);
895  // Already ruled out NaNs for exponent.
896  __ ucomisd(double_scratch, double_exponent);
897  __ j(not_equal, &fast_power, Label::kNear);
898 
899  // Calculates reciprocal of square root of base. Check for the special
900  // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
901  // According to IEEE-754, single-precision -Infinity has the highest
902  // 9 bits set and the lowest 23 bits cleared.
903  __ mov(scratch, 0xFF800000u);
904  __ movd(double_scratch, scratch);
905  __ cvtss2sd(double_scratch, double_scratch);
906  __ ucomisd(double_base, double_scratch);
907  // Comparing -Infinity with NaN results in "unordered", which sets the
908  // zero flag as if both were equal. However, it also sets the carry flag.
909  __ j(not_equal, &continue_rsqrt, Label::kNear);
910  __ j(carry, &continue_rsqrt, Label::kNear);
911 
912  // Set result to 0 in the special case.
913  __ xorps(double_result, double_result);
914  __ jmp(&done);
915 
916  __ bind(&continue_rsqrt);
917  // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
918  __ xorps(double_exponent, double_exponent);
919  __ addsd(double_exponent, double_base); // Convert -0 to +0.
920  __ sqrtsd(double_exponent, double_exponent);
921  __ divsd(double_result, double_exponent);
922  __ jmp(&done);
923  }
924 
925  // Using FPU instructions to calculate power.
926  Label fast_power_failed;
927  __ bind(&fast_power);
928  __ fnclex(); // Clear flags to catch exceptions later.
929  // Transfer (B)ase and (E)xponent onto the FPU register stack.
930  __ sub(esp, Immediate(kDoubleSize));
931  __ movsd(Operand(esp, 0), double_exponent);
932  __ fld_d(Operand(esp, 0)); // E
933  __ movsd(Operand(esp, 0), double_base);
934  __ fld_d(Operand(esp, 0)); // B, E
935 
936  // Exponent is in st(1) and base is in st(0)
937  // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
938  // FYL2X calculates st(1) * log2(st(0))
939  __ fyl2x(); // X
940  __ fld(0); // X, X
941  __ frndint(); // rnd(X), X
942  __ fsub(1); // rnd(X), X-rnd(X)
943  __ fxch(1); // X - rnd(X), rnd(X)
944  // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
945  __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
946  __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
947  __ faddp(1); // 2^(X-rnd(X)), rnd(X)
948  // FSCALE calculates st(0) * 2^st(1)
949  __ fscale(); // 2^X, rnd(X)
950  __ fstp(1); // 2^X
951  // Bail out to runtime in case of exceptions in the status word.
952  __ fnstsw_ax();
953  __ test_b(eax, 0x5F); // We check for all but precision exception.
954  __ j(not_zero, &fast_power_failed, Label::kNear);
955  __ fstp_d(Operand(esp, 0));
956  __ movsd(double_result, Operand(esp, 0));
957  __ add(esp, Immediate(kDoubleSize));
958  __ jmp(&done);
959 
960  __ bind(&fast_power_failed);
961  __ fninit();
962  __ add(esp, Immediate(kDoubleSize));
963  __ jmp(&call_runtime);
964  }
965 
966  // Calculate power with integer exponent.
967  __ bind(&int_exponent);
968  const XMMRegister double_scratch2 = double_exponent;
969  __ mov(scratch, exponent); // Back up exponent.
970  __ movsd(double_scratch, double_base); // Back up base.
971  __ movsd(double_scratch2, double_result); // Load double_exponent with 1.
972 
973  // Get absolute value of exponent.
974  Label no_neg, while_true, while_false;
975  __ test(scratch, scratch);
976  __ j(positive, &no_neg, Label::kNear);
977  __ neg(scratch);
978  __ bind(&no_neg);
979 
980  __ j(zero, &while_false, Label::kNear);
981  __ shr(scratch, 1);
982  // Above condition means CF==0 && ZF==0. This means that the
983  // bit that has been shifted out is 0 and the result is not 0.
984  __ j(above, &while_true, Label::kNear);
985  __ movsd(double_result, double_scratch);
986  __ j(zero, &while_false, Label::kNear);
987 
988  __ bind(&while_true);
989  __ shr(scratch, 1);
990  __ mulsd(double_scratch, double_scratch);
991  __ j(above, &while_true, Label::kNear);
992  __ mulsd(double_result, double_scratch);
993  __ j(not_zero, &while_true);
994 
995  __ bind(&while_false);
996  // scratch has the original value of the exponent - if the exponent is
997  // negative, return 1/result.
998  __ test(exponent, exponent);
999  __ j(positive, &done);
1000  __ divsd(double_scratch2, double_result);
1001  __ movsd(double_result, double_scratch2);
1002  // Test whether result is zero. Bail out to check for subnormal result.
1003  // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
1004  __ xorps(double_scratch2, double_scratch2);
1005  __ ucomisd(double_scratch2, double_result); // Result cannot be NaN.
1006  // double_exponent aliased as double_scratch2 has already been overwritten
1007  // and may not have contained the exponent value in the first place when the
1008  // exponent is a smi. We reset it with exponent value before bailing out.
1009  __ j(not_equal, &done);
1010  __ Cvtsi2sd(double_exponent, exponent);
1011 
1012  // Returning or bailing out.
1013  Counters* counters = masm->isolate()->counters();
1014  if (exponent_type_ == ON_STACK) {
1015  // The arguments are still on the stack.
1016  __ bind(&call_runtime);
1017  __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
1018 
1019  // The stub is called from non-optimized code, which expects the result
1020  // as heap number in exponent.
1021  __ bind(&done);
1022  __ AllocateHeapNumber(eax, scratch, base, &call_runtime);
1023  __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), double_result);
1024  __ IncrementCounter(counters->math_pow(), 1);
1025  __ ret(2 * kPointerSize);
1026  } else {
1027  __ bind(&call_runtime);
1028  {
1029  AllowExternalCallThatCantCauseGC scope(masm);
1030  __ PrepareCallCFunction(4, scratch);
1031  __ movsd(Operand(esp, 0 * kDoubleSize), double_base);
1032  __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent);
1033  __ CallCFunction(
1034  ExternalReference::power_double_double_function(masm->isolate()), 4);
1035  }
1036  // Return value is in st(0) on ia32.
1037  // Store it into the (fixed) result register.
1038  __ sub(esp, Immediate(kDoubleSize));
1039  __ fstp_d(Operand(esp, 0));
1040  __ movsd(double_result, Operand(esp, 0));
1041  __ add(esp, Immediate(kDoubleSize));
1042 
1043  __ bind(&done);
1044  __ IncrementCounter(counters->math_pow(), 1);
1045  __ ret(0);
1046  }
1047 }
1048 
1049 
1050 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1051  // ----------- S t a t e -------------
1052  // -- ecx : name
1053  // -- edx : receiver
1054  // -- esp[0] : return address
1055  // -----------------------------------
1056  Label miss;
1057 
1058  if (kind() == Code::KEYED_LOAD_IC) {
1059  __ cmp(ecx, Immediate(masm->isolate()->factory()->prototype_string()));
1060  __ j(not_equal, &miss);
1061  }
1062 
1063  StubCompiler::GenerateLoadFunctionPrototype(masm, edx, eax, ebx, &miss);
1064  __ bind(&miss);
1065  StubCompiler::TailCallBuiltin(
1067 }
1068 
1069 
1070 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
1071  // The key is in edx and the parameter count is in eax.
1072 
1073  // The displacement is used for skipping the frame pointer on the
1074  // stack. It is the offset of the last parameter (if any) relative
1075  // to the frame pointer.
1076  static const int kDisplacement = 1 * kPointerSize;
1077 
1078  // Check that the key is a smi.
1079  Label slow;
1080  __ JumpIfNotSmi(edx, &slow, Label::kNear);
1081 
1082  // Check if the calling frame is an arguments adaptor frame.
1083  Label adaptor;
1087  __ j(equal, &adaptor, Label::kNear);
1088 
1089  // Check index against formal parameters count limit passed in
1090  // through register eax. Use unsigned comparison to get negative
1091  // check for free.
1092  __ cmp(edx, eax);
1093  __ j(above_equal, &slow, Label::kNear);
1094 
1095  // Read the argument from the stack and return it.
1096  STATIC_ASSERT(kSmiTagSize == 1);
1097  STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
1098  __ lea(ebx, Operand(ebp, eax, times_2, 0));
1099  __ neg(edx);
1100  __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
1101  __ ret(0);
1102 
1103  // Arguments adaptor case: Check index against actual arguments
1104  // limit found in the arguments adaptor frame. Use unsigned
1105  // comparison to get negative check for free.
1106  __ bind(&adaptor);
1108  __ cmp(edx, ecx);
1109  __ j(above_equal, &slow, Label::kNear);
1110 
1111  // Read the argument from the stack and return it.
1112  STATIC_ASSERT(kSmiTagSize == 1);
1113  STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
1114  __ lea(ebx, Operand(ebx, ecx, times_2, 0));
1115  __ neg(edx);
1116  __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
1117  __ ret(0);
1118 
1119  // Slow-case: Handle non-smi or out-of-bounds access to arguments
1120  // by calling the runtime system.
1121  __ bind(&slow);
1122  __ pop(ebx); // Return address.
1123  __ push(edx);
1124  __ push(ebx);
1125  __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
1126 }
1127 
1128 
1129 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
1130  // esp[0] : return address
1131  // esp[4] : number of parameters
1132  // esp[8] : receiver displacement
1133  // esp[12] : function
1134 
1135  // Check if the calling frame is an arguments adaptor frame.
1136  Label runtime;
1140  __ j(not_equal, &runtime, Label::kNear);
1141 
1142  // Patch the arguments.length and the parameters pointer.
1144  __ mov(Operand(esp, 1 * kPointerSize), ecx);
1145  __ lea(edx, Operand(edx, ecx, times_2,
1147  __ mov(Operand(esp, 2 * kPointerSize), edx);
1148 
1149  __ bind(&runtime);
1150  __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1);
1151 }
1152 
1153 
1154 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
1155  Isolate* isolate = masm->isolate();
1156 
1157  // esp[0] : return address
1158  // esp[4] : number of parameters (tagged)
1159  // esp[8] : receiver displacement
1160  // esp[12] : function
1161 
1162  // ebx = parameter count (tagged)
1163  __ mov(ebx, Operand(esp, 1 * kPointerSize));
1164 
1165  // Check if the calling frame is an arguments adaptor frame.
1166  // TODO(rossberg): Factor out some of the bits that are shared with the other
1167  // Generate* functions.
1168  Label runtime;
1169  Label adaptor_frame, try_allocate;
1173  __ j(equal, &adaptor_frame, Label::kNear);
1174 
1175  // No adaptor, parameter count = argument count.
1176  __ mov(ecx, ebx);
1177  __ jmp(&try_allocate, Label::kNear);
1178 
1179  // We have an adaptor frame. Patch the parameters pointer.
1180  __ bind(&adaptor_frame);
1182  __ lea(edx, Operand(edx, ecx, times_2,
1184  __ mov(Operand(esp, 2 * kPointerSize), edx);
1185 
1186  // ebx = parameter count (tagged)
1187  // ecx = argument count (tagged)
1188  // esp[4] = parameter count (tagged)
1189  // esp[8] = address of receiver argument
1190  // Compute the mapped parameter count = min(ebx, ecx) in ebx.
1191  __ cmp(ebx, ecx);
1192  __ j(less_equal, &try_allocate, Label::kNear);
1193  __ mov(ebx, ecx);
1194 
1195  __ bind(&try_allocate);
1196 
1197  // Save mapped parameter count.
1198  __ push(ebx);
1199 
1200  // Compute the sizes of backing store, parameter map, and arguments object.
1201  // 1. Parameter map, has 2 extra words containing context and backing store.
1202  const int kParameterMapHeaderSize =
1204  Label no_parameter_map;
1205  __ test(ebx, ebx);
1206  __ j(zero, &no_parameter_map, Label::kNear);
1207  __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
1208  __ bind(&no_parameter_map);
1209 
1210  // 2. Backing store.
1211  __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
1212 
1213  // 3. Arguments object.
1214  __ add(ebx, Immediate(Heap::kSloppyArgumentsObjectSize));
1215 
1216  // Do the allocation of all three objects in one go.
1217  __ Allocate(ebx, eax, edx, edi, &runtime, TAG_OBJECT);
1218 
1219  // eax = address of new object(s) (tagged)
1220  // ecx = argument count (tagged)
1221  // esp[0] = mapped parameter count (tagged)
1222  // esp[8] = parameter count (tagged)
1223  // esp[12] = address of receiver argument
1224  // Get the arguments boilerplate from the current native context into edi.
1225  Label has_mapped_parameters, copy;
1228  __ mov(ebx, Operand(esp, 0 * kPointerSize));
1229  __ test(ebx, ebx);
1230  __ j(not_zero, &has_mapped_parameters, Label::kNear);
1231  __ mov(edi, Operand(edi,
1233  __ jmp(&copy, Label::kNear);
1234 
1235  __ bind(&has_mapped_parameters);
1236  __ mov(edi, Operand(edi,
1238  __ bind(&copy);
1239 
1240  // eax = address of new object (tagged)
1241  // ebx = mapped parameter count (tagged)
1242  // ecx = argument count (tagged)
1243  // edi = address of boilerplate object (tagged)
1244  // esp[0] = mapped parameter count (tagged)
1245  // esp[8] = parameter count (tagged)
1246  // esp[12] = address of receiver argument
1247  // Copy the JS object part.
1248  for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
1249  __ mov(edx, FieldOperand(edi, i));
1250  __ mov(FieldOperand(eax, i), edx);
1251  }
1252 
1253  // Set up the callee in-object property.
1255  __ mov(edx, Operand(esp, 4 * kPointerSize));
1256  __ mov(FieldOperand(eax, JSObject::kHeaderSize +
1257  Heap::kArgumentsCalleeIndex * kPointerSize),
1258  edx);
1259 
1260  // Use the length (smi tagged) and set that as an in-object property too.
1262  __ mov(FieldOperand(eax, JSObject::kHeaderSize +
1263  Heap::kArgumentsLengthIndex * kPointerSize),
1264  ecx);
1265 
1266  // Set up the elements pointer in the allocated arguments object.
1267  // If we allocated a parameter map, edi will point there, otherwise to the
1268  // backing store.
1269  __ lea(edi, Operand(eax, Heap::kSloppyArgumentsObjectSize));
1271 
1272  // eax = address of new object (tagged)
1273  // ebx = mapped parameter count (tagged)
1274  // ecx = argument count (tagged)
1275  // edi = address of parameter map or backing store (tagged)
1276  // esp[0] = mapped parameter count (tagged)
1277  // esp[8] = parameter count (tagged)
1278  // esp[12] = address of receiver argument
1279  // Free a register.
1280  __ push(eax);
1281 
1282  // Initialize parameter map. If there are no mapped arguments, we're done.
1283  Label skip_parameter_map;
1284  __ test(ebx, ebx);
1285  __ j(zero, &skip_parameter_map);
1286 
1288  Immediate(isolate->factory()->sloppy_arguments_elements_map()));
1289  __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
1291  __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
1292  __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
1293  __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
1294 
1295  // Copy the parameter slots and the holes in the arguments.
1296  // We need to fill in mapped_parameter_count slots. They index the context,
1297  // where parameters are stored in reverse order, at
1298  // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
1299  // The mapped parameter thus need to get indices
1300  // MIN_CONTEXT_SLOTS+parameter_count-1 ..
1301  // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
1302  // We loop from right to left.
1303  Label parameters_loop, parameters_test;
1304  __ push(ecx);
1305  __ mov(eax, Operand(esp, 2 * kPointerSize));
1306  __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
1307  __ add(ebx, Operand(esp, 4 * kPointerSize));
1308  __ sub(ebx, eax);
1309  __ mov(ecx, isolate->factory()->the_hole_value());
1310  __ mov(edx, edi);
1311  __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
1312  // eax = loop variable (tagged)
1313  // ebx = mapping index (tagged)
1314  // ecx = the hole value
1315  // edx = address of parameter map (tagged)
1316  // edi = address of backing store (tagged)
1317  // esp[0] = argument count (tagged)
1318  // esp[4] = address of new object (tagged)
1319  // esp[8] = mapped parameter count (tagged)
1320  // esp[16] = parameter count (tagged)
1321  // esp[20] = address of receiver argument
1322  __ jmp(&parameters_test, Label::kNear);
1323 
1324  __ bind(&parameters_loop);
1325  __ sub(eax, Immediate(Smi::FromInt(1)));
1326  __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
1328  __ add(ebx, Immediate(Smi::FromInt(1)));
1329  __ bind(&parameters_test);
1330  __ test(eax, eax);
1331  __ j(not_zero, &parameters_loop, Label::kNear);
1332  __ pop(ecx);
1333 
1334  __ bind(&skip_parameter_map);
1335 
1336  // ecx = argument count (tagged)
1337  // edi = address of backing store (tagged)
1338  // esp[0] = address of new object (tagged)
1339  // esp[4] = mapped parameter count (tagged)
1340  // esp[12] = parameter count (tagged)
1341  // esp[16] = address of receiver argument
1342  // Copy arguments header and remaining slots (if there are any).
1344  Immediate(isolate->factory()->fixed_array_map()));
1346 
1347  Label arguments_loop, arguments_test;
1348  __ mov(ebx, Operand(esp, 1 * kPointerSize));
1349  __ mov(edx, Operand(esp, 4 * kPointerSize));
1350  __ sub(edx, ebx); // Is there a smarter way to do negative scaling?
1351  __ sub(edx, ebx);
1352  __ jmp(&arguments_test, Label::kNear);
1353 
1354  __ bind(&arguments_loop);
1355  __ sub(edx, Immediate(kPointerSize));
1356  __ mov(eax, Operand(edx, 0));
1358  __ add(ebx, Immediate(Smi::FromInt(1)));
1359 
1360  __ bind(&arguments_test);
1361  __ cmp(ebx, ecx);
1362  __ j(less, &arguments_loop, Label::kNear);
1363 
1364  // Restore.
1365  __ pop(eax); // Address of arguments object.
1366  __ pop(ebx); // Parameter count.
1367 
1368  // Return and remove the on-stack parameters.
1369  __ ret(3 * kPointerSize);
1370 
1371  // Do the runtime call to allocate the arguments object.
1372  __ bind(&runtime);
1373  __ pop(eax); // Remove saved parameter count.
1374  __ mov(Operand(esp, 1 * kPointerSize), ecx); // Patch argument count.
1375  __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1);
1376 }
1377 
1378 
1379 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
1380  Isolate* isolate = masm->isolate();
1381 
1382  // esp[0] : return address
1383  // esp[4] : number of parameters
1384  // esp[8] : receiver displacement
1385  // esp[12] : function
1386 
1387  // Check if the calling frame is an arguments adaptor frame.
1388  Label adaptor_frame, try_allocate, runtime;
1392  __ j(equal, &adaptor_frame, Label::kNear);
1393 
1394  // Get the length from the frame.
1395  __ mov(ecx, Operand(esp, 1 * kPointerSize));
1396  __ jmp(&try_allocate, Label::kNear);
1397 
1398  // Patch the arguments.length and the parameters pointer.
1399  __ bind(&adaptor_frame);
1401  __ mov(Operand(esp, 1 * kPointerSize), ecx);
1402  __ lea(edx, Operand(edx, ecx, times_2,
1404  __ mov(Operand(esp, 2 * kPointerSize), edx);
1405 
1406  // Try the new space allocation. Start out with computing the size of
1407  // the arguments object and the elements array.
1408  Label add_arguments_object;
1409  __ bind(&try_allocate);
1410  __ test(ecx, ecx);
1411  __ j(zero, &add_arguments_object, Label::kNear);
1412  __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
1413  __ bind(&add_arguments_object);
1414  __ add(ecx, Immediate(Heap::kStrictArgumentsObjectSize));
1415 
1416  // Do the allocation of both objects in one go.
1417  __ Allocate(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
1418 
1419  // Get the arguments boilerplate from the current native context.
1422  const int offset =
1424  __ mov(edi, Operand(edi, offset));
1425 
1426  // Copy the JS object part.
1427  for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
1428  __ mov(ebx, FieldOperand(edi, i));
1429  __ mov(FieldOperand(eax, i), ebx);
1430  }
1431 
1432  // Get the length (smi tagged) and set that as an in-object property too.
1434  __ mov(ecx, Operand(esp, 1 * kPointerSize));
1435  __ mov(FieldOperand(eax, JSObject::kHeaderSize +
1436  Heap::kArgumentsLengthIndex * kPointerSize),
1437  ecx);
1438 
1439  // If there are no actual arguments, we're done.
1440  Label done;
1441  __ test(ecx, ecx);
1442  __ j(zero, &done, Label::kNear);
1443 
1444  // Get the parameters pointer from the stack.
1445  __ mov(edx, Operand(esp, 2 * kPointerSize));
1446 
1447  // Set up the elements pointer in the allocated arguments object and
1448  // initialize the header in the elements fixed array.
1449  __ lea(edi, Operand(eax, Heap::kStrictArgumentsObjectSize));
1452  Immediate(isolate->factory()->fixed_array_map()));
1453 
1455  // Untag the length for the loop below.
1456  __ SmiUntag(ecx);
1457 
1458  // Copy the fixed array slots.
1459  Label loop;
1460  __ bind(&loop);
1461  __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
1463  __ add(edi, Immediate(kPointerSize));
1464  __ sub(edx, Immediate(kPointerSize));
1465  __ dec(ecx);
1466  __ j(not_zero, &loop);
1467 
1468  // Return and remove the on-stack parameters.
1469  __ bind(&done);
1470  __ ret(3 * kPointerSize);
1471 
1472  // Do the runtime call to allocate the arguments object.
1473  __ bind(&runtime);
1474  __ TailCallRuntime(Runtime::kHiddenNewStrictArgumentsFast, 3, 1);
1475 }
1476 
1477 
1478 void RegExpExecStub::Generate(MacroAssembler* masm) {
1479  // Just jump directly to runtime if native RegExp is not selected at compile
1480  // time or if regexp entry in generated code is turned off runtime switch or
1481  // at compilation.
1482 #ifdef V8_INTERPRETED_REGEXP
1483  __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1);
1484 #else // V8_INTERPRETED_REGEXP
1485 
1486  // Stack frame on entry.
1487  // esp[0]: return address
1488  // esp[4]: last_match_info (expected JSArray)
1489  // esp[8]: previous index
1490  // esp[12]: subject string
1491  // esp[16]: JSRegExp object
1492 
1493  static const int kLastMatchInfoOffset = 1 * kPointerSize;
1494  static const int kPreviousIndexOffset = 2 * kPointerSize;
1495  static const int kSubjectOffset = 3 * kPointerSize;
1496  static const int kJSRegExpOffset = 4 * kPointerSize;
1497 
1498  Label runtime;
1499  Factory* factory = masm->isolate()->factory();
1500 
1501  // Ensure that a RegExp stack is allocated.
1502  ExternalReference address_of_regexp_stack_memory_address =
1503  ExternalReference::address_of_regexp_stack_memory_address(
1504  masm->isolate());
1505  ExternalReference address_of_regexp_stack_memory_size =
1506  ExternalReference::address_of_regexp_stack_memory_size(masm->isolate());
1507  __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
1508  __ test(ebx, ebx);
1509  __ j(zero, &runtime);
1510 
1511  // Check that the first argument is a JSRegExp object.
1512  __ mov(eax, Operand(esp, kJSRegExpOffset));
1513  STATIC_ASSERT(kSmiTag == 0);
1514  __ JumpIfSmi(eax, &runtime);
1515  __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
1516  __ j(not_equal, &runtime);
1517 
1518  // Check that the RegExp has been compiled (data contains a fixed array).
1520  if (FLAG_debug_code) {
1521  __ test(ecx, Immediate(kSmiTagMask));
1522  __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1523  __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
1524  __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1525  }
1526 
1527  // ecx: RegExp data (FixedArray)
1528  // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1530  __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
1531  __ j(not_equal, &runtime);
1532 
1533  // ecx: RegExp data (FixedArray)
1534  // Check that the number of captures fit in the static offsets vector buffer.
1536  // Check (number_of_captures + 1) * 2 <= offsets vector size
1537  // Or number_of_captures * 2 <= offsets vector size - 2
1538  // Multiplying by 2 comes for free since edx is smi-tagged.
1539  STATIC_ASSERT(kSmiTag == 0);
1543  __ j(above, &runtime);
1544 
1545  // Reset offset for possibly sliced string.
1546  __ Move(edi, Immediate(0));
1547  __ mov(eax, Operand(esp, kSubjectOffset));
1548  __ JumpIfSmi(eax, &runtime);
1549  __ mov(edx, eax); // Make a copy of the original subject string.
1552 
1553  // eax: subject string
1554  // edx: subject string
1555  // ebx: subject string instance type
1556  // ecx: RegExp data (FixedArray)
1557  // Handle subject string according to its encoding and representation:
1558  // (1) Sequential two byte? If yes, go to (9).
1559  // (2) Sequential one byte? If yes, go to (6).
1560  // (3) Anything but sequential or cons? If yes, go to (7).
1561  // (4) Cons string. If the string is flat, replace subject with first string.
1562  // Otherwise bailout.
1563  // (5a) Is subject sequential two byte? If yes, go to (9).
1564  // (5b) Is subject external? If yes, go to (8).
1565  // (6) One byte sequential. Load regexp code for one byte.
1566  // (E) Carry on.
1568 
1569  // Deferred code at the end of the stub:
1570  // (7) Not a long external string? If yes, go to (10).
1571  // (8) External string. Make it, offset-wise, look like a sequential string.
1572  // (8a) Is the external string one byte? If yes, go to (6).
1573  // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1574  // (10) Short external string or not a string? If yes, bail out to runtime.
1575  // (11) Sliced string. Replace subject with parent. Go to (5a).
1576 
1577  Label seq_one_byte_string /* 6 */, seq_two_byte_string /* 9 */,
1578  external_string /* 8 */, check_underlying /* 5a */,
1579  not_seq_nor_cons /* 7 */, check_code /* E */,
1580  not_long_external /* 10 */;
1581 
1582  // (1) Sequential two byte? If yes, go to (9).
1583  __ and_(ebx, kIsNotStringMask |
1588  __ j(zero, &seq_two_byte_string); // Go to (9).
1589 
1590  // (2) Sequential one byte? If yes, go to (6).
1591  // Any other sequential string must be one byte.
1592  __ and_(ebx, Immediate(kIsNotStringMask |
1595  __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (6).
1596 
1597  // (3) Anything but sequential or cons? If yes, go to (7).
1598  // We check whether the subject string is a cons, since sequential strings
1599  // have already been covered.
1604  __ cmp(ebx, Immediate(kExternalStringTag));
1605  __ j(greater_equal, &not_seq_nor_cons); // Go to (7).
1606 
1607  // (4) Cons string. Check that it's flat.
1608  // Replace subject with first string and reload instance type.
1609  __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
1610  __ j(not_equal, &runtime);
1612  __ bind(&check_underlying);
1615 
1616  // (5a) Is subject sequential two byte? If yes, go to (9).
1619  __ j(zero, &seq_two_byte_string); // Go to (9).
1620  // (5b) Is subject external? If yes, go to (8).
1621  __ test_b(ebx, kStringRepresentationMask);
1622  // The underlying external string is never a short external string.
1625  __ j(not_zero, &external_string); // Go to (8).
1626 
1627  // eax: sequential subject string (or look-alike, external string)
1628  // edx: original subject string
1629  // ecx: RegExp data (FixedArray)
1630  // (6) One byte sequential. Load regexp code for one byte.
1631  __ bind(&seq_one_byte_string);
1632  // Load previous index and check range before edx is overwritten. We have
1633  // to use edx instead of eax here because it might have been only made to
1634  // look like a sequential string when it actually is an external string.
1635  __ mov(ebx, Operand(esp, kPreviousIndexOffset));
1636  __ JumpIfNotSmi(ebx, &runtime);
1638  __ j(above_equal, &runtime);
1640  __ Move(ecx, Immediate(1)); // Type is one byte.
1641 
1642  // (E) Carry on. String handling is done.
1643  __ bind(&check_code);
1644  // edx: irregexp code
1645  // Check that the irregexp code has been generated for the actual string
1646  // encoding. If it has, the field contains a code object otherwise it contains
1647  // a smi (code flushing support).
1648  __ JumpIfSmi(edx, &runtime);
1649 
1650  // eax: subject string
1651  // ebx: previous index (smi)
1652  // edx: code
1653  // ecx: encoding of subject string (1 if ASCII, 0 if two_byte);
1654  // All checks done. Now push arguments for native regexp code.
1655  Counters* counters = masm->isolate()->counters();
1656  __ IncrementCounter(counters->regexp_entry_native(), 1);
1657 
1658  // Isolates: note we add an additional parameter here (isolate pointer).
1659  static const int kRegExpExecuteArguments = 9;
1660  __ EnterApiExitFrame(kRegExpExecuteArguments);
1661 
1662  // Argument 9: Pass current isolate address.
1663  __ mov(Operand(esp, 8 * kPointerSize),
1664  Immediate(ExternalReference::isolate_address(masm->isolate())));
1665 
1666  // Argument 8: Indicate that this is a direct call from JavaScript.
1667  __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
1668 
1669  // Argument 7: Start (high end) of backtracking stack memory area.
1670  __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
1671  __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
1672  __ mov(Operand(esp, 6 * kPointerSize), esi);
1673 
1674  // Argument 6: Set the number of capture registers to zero to force global
1675  // regexps to behave as non-global. This does not affect non-global regexps.
1676  __ mov(Operand(esp, 5 * kPointerSize), Immediate(0));
1677 
1678  // Argument 5: static offsets vector buffer.
1679  __ mov(Operand(esp, 4 * kPointerSize),
1680  Immediate(ExternalReference::address_of_static_offsets_vector(
1681  masm->isolate())));
1682 
1683  // Argument 2: Previous index.
1684  __ SmiUntag(ebx);
1685  __ mov(Operand(esp, 1 * kPointerSize), ebx);
1686 
1687  // Argument 1: Original subject string.
1688  // The original subject is in the previous stack frame. Therefore we have to
1689  // use ebp, which points exactly to one pointer size below the previous esp.
1690  // (Because creating a new stack frame pushes the previous ebp onto the stack
1691  // and thereby moves up esp by one kPointerSize.)
1692  __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
1693  __ mov(Operand(esp, 0 * kPointerSize), esi);
1694 
1695  // esi: original subject string
1696  // eax: underlying subject string
1697  // ebx: previous index
1698  // ecx: encoding of subject string (1 if ASCII 0 if two_byte);
1699  // edx: code
1700  // Argument 4: End of string data
1701  // Argument 3: Start of string data
1702  // Prepare start and end index of the input.
1703  // Load the length from the original sliced string if that is the case.
1705  __ add(esi, edi); // Calculate input end wrt offset.
1706  __ SmiUntag(edi);
1707  __ add(ebx, edi); // Calculate input start wrt offset.
1708 
1709  // ebx: start index of the input string
1710  // esi: end index of the input string
1711  Label setup_two_byte, setup_rest;
1712  __ test(ecx, ecx);
1713  __ j(zero, &setup_two_byte, Label::kNear);
1714  __ SmiUntag(esi);
1716  __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
1718  __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
1719  __ jmp(&setup_rest, Label::kNear);
1720 
1721  __ bind(&setup_two_byte);
1722  STATIC_ASSERT(kSmiTag == 0);
1723  STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2).
1725  __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
1727  __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
1728 
1729  __ bind(&setup_rest);
1730 
1731  // Locate the code entry and call it.
1732  __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1733  __ call(edx);
1734 
1735  // Drop arguments and come back to JS mode.
1736  __ LeaveApiExitFrame(true);
1737 
1738  // Check the result.
1739  Label success;
1740  __ cmp(eax, 1);
1741  // We expect exactly one result since we force the called regexp to behave
1742  // as non-global.
1743  __ j(equal, &success);
1744  Label failure;
1746  __ j(equal, &failure);
1748  // If not exception it can only be retry. Handle that in the runtime system.
1749  __ j(not_equal, &runtime);
1750  // Result must now be exception. If there is no pending exception already a
1751  // stack overflow (on the backtrack stack) was detected in RegExp code but
1752  // haven't created the exception yet. Handle that in the runtime system.
1753  // TODO(592): Rerunning the RegExp to get the stack overflow exception.
1754  ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
1755  masm->isolate());
1756  __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
1757  __ mov(eax, Operand::StaticVariable(pending_exception));
1758  __ cmp(edx, eax);
1759  __ j(equal, &runtime);
1760  // For exception, throw the exception again.
1761 
1762  // Clear the pending exception variable.
1763  __ mov(Operand::StaticVariable(pending_exception), edx);
1764 
1765  // Special handling of termination exceptions which are uncatchable
1766  // by javascript code.
1767  __ cmp(eax, factory->termination_exception());
1768  Label throw_termination_exception;
1769  __ j(equal, &throw_termination_exception, Label::kNear);
1770 
1771  // Handle normal exception by following handler chain.
1772  __ Throw(eax);
1773 
1774  __ bind(&throw_termination_exception);
1775  __ ThrowUncatchable(eax);
1776 
1777  __ bind(&failure);
1778  // For failure to match, return null.
1779  __ mov(eax, factory->null_value());
1780  __ ret(4 * kPointerSize);
1781 
1782  // Load RegExp data.
1783  __ bind(&success);
1784  __ mov(eax, Operand(esp, kJSRegExpOffset));
1787  // Calculate number of capture registers (number_of_captures + 1) * 2.
1788  STATIC_ASSERT(kSmiTag == 0);
1790  __ add(edx, Immediate(2)); // edx was a smi.
1791 
1792  // edx: Number of capture registers
1793  // Load last_match_info which is still known to be a fast case JSArray.
1794  // Check that the fourth object is a JSArray object.
1795  __ mov(eax, Operand(esp, kLastMatchInfoOffset));
1796  __ JumpIfSmi(eax, &runtime);
1797  __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
1798  __ j(not_equal, &runtime);
1799  // Check that the JSArray is in fast case.
1802  __ cmp(eax, factory->fixed_array_map());
1803  __ j(not_equal, &runtime);
1804  // Check that the last match info has space for the capture registers and the
1805  // additional information.
1807  __ SmiUntag(eax);
1808  __ sub(eax, Immediate(RegExpImpl::kLastMatchOverhead));
1809  __ cmp(edx, eax);
1810  __ j(greater, &runtime);
1811 
1812  // ebx: last_match_info backing store (FixedArray)
1813  // edx: number of capture registers
1814  // Store the capture count.
1815  __ SmiTag(edx); // Number of capture registers to smi.
1817  __ SmiUntag(edx); // Number of capture registers back from smi.
1818  // Store last subject and last input.
1819  __ mov(eax, Operand(esp, kSubjectOffset));
1820  __ mov(ecx, eax);
1822  __ RecordWriteField(ebx,
1824  eax,
1825  edi,
1826  kDontSaveFPRegs);
1827  __ mov(eax, ecx);
1829  __ RecordWriteField(ebx,
1831  eax,
1832  edi,
1833  kDontSaveFPRegs);
1834 
1835  // Get the static offsets vector filled by the native regexp code.
1836  ExternalReference address_of_static_offsets_vector =
1837  ExternalReference::address_of_static_offsets_vector(masm->isolate());
1838  __ mov(ecx, Immediate(address_of_static_offsets_vector));
1839 
1840  // ebx: last_match_info backing store (FixedArray)
1841  // ecx: offsets vector
1842  // edx: number of capture registers
1843  Label next_capture, done;
1844  // Capture register counter starts from number of capture registers and
1845  // counts down until wraping after zero.
1846  __ bind(&next_capture);
1847  __ sub(edx, Immediate(1));
1848  __ j(negative, &done, Label::kNear);
1849  // Read the value from the static offsets vector buffer.
1850  __ mov(edi, Operand(ecx, edx, times_int_size, 0));
1851  __ SmiTag(edi);
1852  // Store the smi value in the last match info.
1853  __ mov(FieldOperand(ebx,
1854  edx,
1857  edi);
1858  __ jmp(&next_capture);
1859  __ bind(&done);
1860 
1861  // Return last match info.
1862  __ mov(eax, Operand(esp, kLastMatchInfoOffset));
1863  __ ret(4 * kPointerSize);
1864 
1865  // Do the runtime call to execute the regexp.
1866  __ bind(&runtime);
1867  __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1);
1868 
1869  // Deferred code for string handling.
1870  // (7) Not a long external string? If yes, go to (10).
1871  __ bind(&not_seq_nor_cons);
1872  // Compare flags are still set from (3).
1873  __ j(greater, &not_long_external, Label::kNear); // Go to (10).
1874 
1875  // (8) External string. Short external strings have been ruled out.
1876  __ bind(&external_string);
1877  // Reload instance type.
1880  if (FLAG_debug_code) {
1881  // Assert that we do not have a cons or slice (indirect strings) here.
1882  // Sequential strings have already been ruled out.
1883  __ test_b(ebx, kIsIndirectStringMask);
1884  __ Assert(zero, kExternalStringExpectedButNotFound);
1885  }
1887  // Move the pointer so that offset-wise, it looks like a sequential string.
1891  // (8a) Is the external string one byte? If yes, go to (6).
1892  __ test_b(ebx, kStringEncodingMask);
1893  __ j(not_zero, &seq_one_byte_string); // Goto (6).
1894 
1895  // eax: sequential subject string (or look-alike, external string)
1896  // edx: original subject string
1897  // ecx: RegExp data (FixedArray)
1898  // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1899  __ bind(&seq_two_byte_string);
1900  // Load previous index and check range before edx is overwritten. We have
1901  // to use edx instead of eax here because it might have been only made to
1902  // look like a sequential string when it actually is an external string.
1903  __ mov(ebx, Operand(esp, kPreviousIndexOffset));
1904  __ JumpIfNotSmi(ebx, &runtime);
1906  __ j(above_equal, &runtime);
1908  __ Move(ecx, Immediate(0)); // Type is two byte.
1909  __ jmp(&check_code); // Go to (E).
1910 
1911  // (10) Not a string or a short external string? If yes, bail out to runtime.
1912  __ bind(&not_long_external);
1913  // Catch non-string subject or short external string.
1915  __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag));
1916  __ j(not_zero, &runtime);
1917 
1918  // (11) Sliced string. Replace subject with parent. Go to (5a).
1919  // Load offset into edi and replace subject string with parent.
1922  __ jmp(&check_underlying); // Go to (5a).
1923 #endif // V8_INTERPRETED_REGEXP
1924 }
1925 
1926 
1927 static int NegativeComparisonResult(Condition cc) {
1928  ASSERT(cc != equal);
1929  ASSERT((cc == less) || (cc == less_equal)
1930  || (cc == greater) || (cc == greater_equal));
1931  return (cc == greater || cc == greater_equal) ? LESS : GREATER;
1932 }
1933 
1934 
1935 static void CheckInputType(MacroAssembler* masm,
1936  Register input,
1937  CompareIC::State expected,
1938  Label* fail) {
1939  Label ok;
1940  if (expected == CompareIC::SMI) {
1941  __ JumpIfNotSmi(input, fail);
1942  } else if (expected == CompareIC::NUMBER) {
1943  __ JumpIfSmi(input, &ok);
1945  Immediate(masm->isolate()->factory()->heap_number_map()));
1946  __ j(not_equal, fail);
1947  }
1948  // We could be strict about internalized/non-internalized here, but as long as
1949  // hydrogen doesn't care, the stub doesn't have to care either.
1950  __ bind(&ok);
1951 }
1952 
1953 
1954 static void BranchIfNotInternalizedString(MacroAssembler* masm,
1955  Label* label,
1956  Register object,
1957  Register scratch) {
1958  __ JumpIfSmi(object, label);
1959  __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
1960  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
1962  __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
1963  __ j(not_zero, label);
1964 }
1965 
1966 
1967 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
1968  Label check_unequal_objects;
1969  Condition cc = GetCondition();
1970 
1971  Label miss;
1972  CheckInputType(masm, edx, left_, &miss);
1973  CheckInputType(masm, eax, right_, &miss);
1974 
1975  // Compare two smis.
1976  Label non_smi, smi_done;
1977  __ mov(ecx, edx);
1978  __ or_(ecx, eax);
1979  __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
1980  __ sub(edx, eax); // Return on the result of the subtraction.
1981  __ j(no_overflow, &smi_done, Label::kNear);
1982  __ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
1983  __ bind(&smi_done);
1984  __ mov(eax, edx);
1985  __ ret(0);
1986  __ bind(&non_smi);
1987 
1988  // NOTICE! This code is only reached after a smi-fast-case check, so
1989  // it is certain that at least one operand isn't a smi.
1990 
1991  // Identical objects can be compared fast, but there are some tricky cases
1992  // for NaN and undefined.
1993  Label generic_heap_number_comparison;
1994  {
1995  Label not_identical;
1996  __ cmp(eax, edx);
1997  __ j(not_equal, &not_identical);
1998 
1999  if (cc != equal) {
2000  // Check for undefined. undefined OP undefined is false even though
2001  // undefined == undefined.
2002  Label check_for_nan;
2003  __ cmp(edx, masm->isolate()->factory()->undefined_value());
2004  __ j(not_equal, &check_for_nan, Label::kNear);
2005  __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
2006  __ ret(0);
2007  __ bind(&check_for_nan);
2008  }
2009 
2010  // Test for NaN. Compare heap numbers in a general way,
2011  // to hanlde NaNs correctly.
2013  Immediate(masm->isolate()->factory()->heap_number_map()));
2014  __ j(equal, &generic_heap_number_comparison, Label::kNear);
2015  if (cc != equal) {
2016  // Call runtime on identical JSObjects. Otherwise return equal.
2017  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2018  __ j(above_equal, &not_identical);
2019  }
2020  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2021  __ ret(0);
2022 
2023 
2024  __ bind(&not_identical);
2025  }
2026 
2027  // Strict equality can quickly decide whether objects are equal.
2028  // Non-strict object equality is slower, so it is handled later in the stub.
2029  if (cc == equal && strict()) {
2030  Label slow; // Fallthrough label.
2031  Label not_smis;
2032  // If we're doing a strict equality comparison, we don't have to do
2033  // type conversion, so we generate code to do fast comparison for objects
2034  // and oddballs. Non-smi numbers and strings still go through the usual
2035  // slow-case code.
2036  // If either is a Smi (we know that not both are), then they can only
2037  // be equal if the other is a HeapNumber. If so, use the slow case.
2038  STATIC_ASSERT(kSmiTag == 0);
2039  ASSERT_EQ(0, Smi::FromInt(0));
2040  __ mov(ecx, Immediate(kSmiTagMask));
2041  __ and_(ecx, eax);
2042  __ test(ecx, edx);
2043  __ j(not_zero, &not_smis, Label::kNear);
2044  // One operand is a smi.
2045 
2046  // Check whether the non-smi is a heap number.
2047  STATIC_ASSERT(kSmiTagMask == 1);
2048  // ecx still holds eax & kSmiTag, which is either zero or one.
2049  __ sub(ecx, Immediate(0x01));
2050  __ mov(ebx, edx);
2051  __ xor_(ebx, eax);
2052  __ and_(ebx, ecx); // ebx holds either 0 or eax ^ edx.
2053  __ xor_(ebx, eax);
2054  // if eax was smi, ebx is now edx, else eax.
2055 
2056  // Check if the non-smi operand is a heap number.
2058  Immediate(masm->isolate()->factory()->heap_number_map()));
2059  // If heap number, handle it in the slow case.
2060  __ j(equal, &slow, Label::kNear);
2061  // Return non-equal (ebx is not zero)
2062  __ mov(eax, ebx);
2063  __ ret(0);
2064 
2065  __ bind(&not_smis);
2066  // If either operand is a JSObject or an oddball value, then they are not
2067  // equal since their pointers are different
2068  // There is no test for undetectability in strict equality.
2069 
2070  // Get the type of the first operand.
2071  // If the first object is a JS object, we have done pointer comparison.
2072  Label first_non_object;
2074  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2075  __ j(below, &first_non_object, Label::kNear);
2076 
2077  // Return non-zero (eax is not zero)
2078  Label return_not_equal;
2080  __ bind(&return_not_equal);
2081  __ ret(0);
2082 
2083  __ bind(&first_non_object);
2084  // Check for oddballs: true, false, null, undefined.
2085  __ CmpInstanceType(ecx, ODDBALL_TYPE);
2086  __ j(equal, &return_not_equal);
2087 
2088  __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ecx);
2089  __ j(above_equal, &return_not_equal);
2090 
2091  // Check for oddballs: true, false, null, undefined.
2092  __ CmpInstanceType(ecx, ODDBALL_TYPE);
2093  __ j(equal, &return_not_equal);
2094 
2095  // Fall through to the general case.
2096  __ bind(&slow);
2097  }
2098 
2099  // Generate the number comparison code.
2100  Label non_number_comparison;
2101  Label unordered;
2102  __ bind(&generic_heap_number_comparison);
2104  CpuFeatureScope use_sse2(masm, SSE2);
2105  CpuFeatureScope use_cmov(masm, CMOV);
2106 
2107  FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
2108  __ ucomisd(xmm0, xmm1);
2109 
2110  // Don't base result on EFLAGS when a NaN is involved.
2111  __ j(parity_even, &unordered, Label::kNear);
2112  // Return a result of -1, 0, or 1, based on EFLAGS.
2113  __ mov(eax, 0); // equal
2114  __ mov(ecx, Immediate(Smi::FromInt(1)));
2115  __ cmov(above, eax, ecx);
2116  __ mov(ecx, Immediate(Smi::FromInt(-1)));
2117  __ cmov(below, eax, ecx);
2118  __ ret(0);
2119  } else {
2120  FloatingPointHelper::CheckFloatOperands(
2121  masm, &non_number_comparison, ebx);
2122  FloatingPointHelper::LoadFloatOperand(masm, eax);
2123  FloatingPointHelper::LoadFloatOperand(masm, edx);
2124  __ FCmp();
2125 
2126  // Don't base result on EFLAGS when a NaN is involved.
2127  __ j(parity_even, &unordered, Label::kNear);
2128 
2129  Label below_label, above_label;
2130  // Return a result of -1, 0, or 1, based on EFLAGS.
2131  __ j(below, &below_label, Label::kNear);
2132  __ j(above, &above_label, Label::kNear);
2133 
2134  __ Move(eax, Immediate(0));
2135  __ ret(0);
2136 
2137  __ bind(&below_label);
2138  __ mov(eax, Immediate(Smi::FromInt(-1)));
2139  __ ret(0);
2140 
2141  __ bind(&above_label);
2142  __ mov(eax, Immediate(Smi::FromInt(1)));
2143  __ ret(0);
2144  }
2145 
2146  // If one of the numbers was NaN, then the result is always false.
2147  // The cc is never not-equal.
2148  __ bind(&unordered);
2149  ASSERT(cc != not_equal);
2150  if (cc == less || cc == less_equal) {
2151  __ mov(eax, Immediate(Smi::FromInt(1)));
2152  } else {
2153  __ mov(eax, Immediate(Smi::FromInt(-1)));
2154  }
2155  __ ret(0);
2156 
2157  // The number comparison code did not provide a valid result.
2158  __ bind(&non_number_comparison);
2159 
2160  // Fast negative check for internalized-to-internalized equality.
2161  Label check_for_strings;
2162  if (cc == equal) {
2163  BranchIfNotInternalizedString(masm, &check_for_strings, eax, ecx);
2164  BranchIfNotInternalizedString(masm, &check_for_strings, edx, ecx);
2165 
2166  // We've already checked for object identity, so if both operands
2167  // are internalized they aren't equal. Register eax already holds a
2168  // non-zero value, which indicates not equal, so just return.
2169  __ ret(0);
2170  }
2171 
2172  __ bind(&check_for_strings);
2173 
2174  __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx,
2175  &check_unequal_objects);
2176 
2177  // Inline comparison of ASCII strings.
2178  if (cc == equal) {
2180  edx,
2181  eax,
2182  ecx,
2183  ebx);
2184  } else {
2186  edx,
2187  eax,
2188  ecx,
2189  ebx,
2190  edi);
2191  }
2192 #ifdef DEBUG
2193  __ Abort(kUnexpectedFallThroughFromStringComparison);
2194 #endif
2195 
2196  __ bind(&check_unequal_objects);
2197  if (cc == equal && !strict()) {
2198  // Non-strict equality. Objects are unequal if
2199  // they are both JSObjects and not undetectable,
2200  // and their pointers are different.
2201  Label not_both_objects;
2202  Label return_unequal;
2203  // At most one is a smi, so we can test for smi by adding the two.
2204  // A smi plus a heap object has the low bit set, a heap object plus
2205  // a heap object has the low bit clear.
2206  STATIC_ASSERT(kSmiTag == 0);
2207  STATIC_ASSERT(kSmiTagMask == 1);
2208  __ lea(ecx, Operand(eax, edx, times_1, 0));
2209  __ test(ecx, Immediate(kSmiTagMask));
2210  __ j(not_zero, &not_both_objects, Label::kNear);
2211  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2212  __ j(below, &not_both_objects, Label::kNear);
2213  __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ebx);
2214  __ j(below, &not_both_objects, Label::kNear);
2215  // We do not bail out after this point. Both are JSObjects, and
2216  // they are equal if and only if both are undetectable.
2217  // The and of the undetectable flags is 1 if and only if they are equal.
2219  1 << Map::kIsUndetectable);
2220  __ j(zero, &return_unequal, Label::kNear);
2222  1 << Map::kIsUndetectable);
2223  __ j(zero, &return_unequal, Label::kNear);
2224  // The objects are both undetectable, so they both compare as the value
2225  // undefined, and are equal.
2226  __ Move(eax, Immediate(EQUAL));
2227  __ bind(&return_unequal);
2228  // Return non-equal by returning the non-zero object pointer in eax,
2229  // or return equal if we fell through to here.
2230  __ ret(0); // rax, rdx were pushed
2231  __ bind(&not_both_objects);
2232  }
2233 
2234  // Push arguments below the return address.
2235  __ pop(ecx);
2236  __ push(edx);
2237  __ push(eax);
2238 
2239  // Figure out which native to call and setup the arguments.
2240  Builtins::JavaScript builtin;
2241  if (cc == equal) {
2242  builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
2243  } else {
2244  builtin = Builtins::COMPARE;
2245  __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
2246  }
2247 
2248  // Restore return address on the stack.
2249  __ push(ecx);
2250 
2251  // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
2252  // tagged as a small integer.
2253  __ InvokeBuiltin(builtin, JUMP_FUNCTION);
2254 
2255  __ bind(&miss);
2256  GenerateMiss(masm);
2257 }
2258 
2259 
2260 static void GenerateRecordCallTarget(MacroAssembler* masm) {
2261  // Cache the called function in a feedback vector slot. Cache states
2262  // are uninitialized, monomorphic (indicated by a JSFunction), and
2263  // megamorphic.
2264  // eax : number of arguments to the construct function
2265  // ebx : Feedback vector
2266  // edx : slot in feedback vector (Smi)
2267  // edi : the function to call
2268  Isolate* isolate = masm->isolate();
2269  Label initialize, done, miss, megamorphic, not_array_function;
2270 
2271  // Load the cache state into ecx.
2274 
2275  // A monomorphic cache hit or an already megamorphic state: invoke the
2276  // function without changing the state.
2277  __ cmp(ecx, edi);
2278  __ j(equal, &done, Label::kFar);
2279  __ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
2280  __ j(equal, &done, Label::kFar);
2281 
2282  if (!FLAG_pretenuring_call_new) {
2283  // If we came here, we need to see if we are the array function.
2284  // If we didn't have a matching function, and we didn't find the megamorph
2285  // sentinel, then we have in the slot either some other function or an
2286  // AllocationSite. Do a map check on the object in ecx.
2287  Handle<Map> allocation_site_map =
2288  masm->isolate()->factory()->allocation_site_map();
2289  __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
2290  __ j(not_equal, &miss);
2291 
2292  // Make sure the function is the Array() function
2293  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
2294  __ cmp(edi, ecx);
2295  __ j(not_equal, &megamorphic);
2296  __ jmp(&done, Label::kFar);
2297  }
2298 
2299  __ bind(&miss);
2300 
2301  // A monomorphic miss (i.e, here the cache is not uninitialized) goes
2302  // megamorphic.
2303  __ cmp(ecx, Immediate(TypeFeedbackInfo::UninitializedSentinel(isolate)));
2304  __ j(equal, &initialize);
2305  // MegamorphicSentinel is an immortal immovable object (undefined) so no
2306  // write-barrier is needed.
2307  __ bind(&megamorphic);
2310  Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
2311  __ jmp(&done, Label::kFar);
2312 
2313  // An uninitialized cache is patched with the function or sentinel to
2314  // indicate the ElementsKind if function is the Array constructor.
2315  __ bind(&initialize);
2316  if (!FLAG_pretenuring_call_new) {
2317  // Make sure the function is the Array() function
2318  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
2319  __ cmp(edi, ecx);
2320  __ j(not_equal, &not_array_function);
2321 
2322  // The target function is the Array constructor,
2323  // Create an AllocationSite if we don't already have it, store it in the
2324  // slot.
2325  {
2326  FrameScope scope(masm, StackFrame::INTERNAL);
2327 
2328  // Arguments register must be smi-tagged to call out.
2329  __ SmiTag(eax);
2330  __ push(eax);
2331  __ push(edi);
2332  __ push(edx);
2333  __ push(ebx);
2334 
2335  CreateAllocationSiteStub create_stub;
2336  __ CallStub(&create_stub);
2337 
2338  __ pop(ebx);
2339  __ pop(edx);
2340  __ pop(edi);
2341  __ pop(eax);
2342  __ SmiUntag(eax);
2343  }
2344  __ jmp(&done);
2345 
2346  __ bind(&not_array_function);
2347  }
2348 
2351  edi);
2352  // We won't need edx or ebx anymore, just save edi
2353  __ push(edi);
2354  __ push(ebx);
2355  __ push(edx);
2356  __ RecordWriteArray(ebx, edi, edx, kDontSaveFPRegs,
2358  __ pop(edx);
2359  __ pop(ebx);
2360  __ pop(edi);
2361 
2362  __ bind(&done);
2363 }
2364 
2365 
2366 void CallFunctionStub::Generate(MacroAssembler* masm) {
2367  // ebx : feedback vector
2368  // edx : (only if ebx is not the megamorphic symbol) slot in feedback
2369  // vector (Smi)
2370  // edi : the function to call
2371  Isolate* isolate = masm->isolate();
2372  Label slow, non_function, wrap, cont;
2373 
2374  if (NeedsChecks()) {
2375  // Check that the function really is a JavaScript function.
2376  __ JumpIfSmi(edi, &non_function);
2377 
2378  // Goto slow case if we do not have a function.
2379  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2380  __ j(not_equal, &slow);
2381 
2382  if (RecordCallTarget()) {
2383  GenerateRecordCallTarget(masm);
2384  // Type information was updated. Because we may call Array, which
2385  // expects either undefined or an AllocationSite in ebx we need
2386  // to set ebx to undefined.
2387  __ mov(ebx, Immediate(isolate->factory()->undefined_value()));
2388  }
2389  }
2390 
2391  // Fast-case: Just invoke the function.
2392  ParameterCount actual(argc_);
2393 
2394  if (CallAsMethod()) {
2395  if (NeedsChecks()) {
2396  // Do not transform the receiver for strict mode functions.
2400  __ j(not_equal, &cont);
2401 
2402  // Do not transform the receiver for natives (shared already in ecx).
2405  __ j(not_equal, &cont);
2406  }
2407 
2408  // Load the receiver from the stack.
2409  __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
2410 
2411  if (NeedsChecks()) {
2412  __ JumpIfSmi(eax, &wrap);
2413 
2414  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2415  __ j(below, &wrap);
2416  } else {
2417  __ jmp(&wrap);
2418  }
2419 
2420  __ bind(&cont);
2421  }
2422 
2423  __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
2424 
2425  if (NeedsChecks()) {
2426  // Slow-case: Non-function called.
2427  __ bind(&slow);
2428  if (RecordCallTarget()) {
2429  // If there is a call target cache, mark it megamorphic in the
2430  // non-function case. MegamorphicSentinel is an immortal immovable
2431  // object (megamorphic symbol) so no write barrier is needed.
2434  Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
2435  }
2436  // Check for function proxy.
2437  __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
2438  __ j(not_equal, &non_function);
2439  __ pop(ecx);
2440  __ push(edi); // put proxy as additional argument under return address
2441  __ push(ecx);
2442  __ Move(eax, Immediate(argc_ + 1));
2443  __ Move(ebx, Immediate(0));
2444  __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
2445  {
2446  Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
2447  __ jmp(adaptor, RelocInfo::CODE_TARGET);
2448  }
2449 
2450  // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
2451  // of the original receiver from the call site).
2452  __ bind(&non_function);
2453  __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
2454  __ Move(eax, Immediate(argc_));
2455  __ Move(ebx, Immediate(0));
2456  __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
2457  Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
2458  __ jmp(adaptor, RelocInfo::CODE_TARGET);
2459  }
2460 
2461  if (CallAsMethod()) {
2462  __ bind(&wrap);
2463  // Wrap the receiver and patch it back onto the stack.
2464  { FrameScope frame_scope(masm, StackFrame::INTERNAL);
2465  __ push(edi);
2466  __ push(eax);
2467  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
2468  __ pop(edi);
2469  }
2470  __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax);
2471  __ jmp(&cont);
2472  }
2473 }
2474 
2475 
2476 void CallConstructStub::Generate(MacroAssembler* masm) {
2477  // eax : number of arguments
2478  // ebx : feedback vector
2479  // edx : (only if ebx is not the megamorphic symbol) slot in feedback
2480  // vector (Smi)
2481  // edi : constructor function
2482  Label slow, non_function_call;
2483 
2484  // Check that function is not a smi.
2485  __ JumpIfSmi(edi, &non_function_call);
2486  // Check that function is a JSFunction.
2487  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2488  __ j(not_equal, &slow);
2489 
2490  if (RecordCallTarget()) {
2491  GenerateRecordCallTarget(masm);
2492 
2493  if (FLAG_pretenuring_call_new) {
2494  // Put the AllocationSite from the feedback vector into ebx.
2495  // By adding kPointerSize we encode that we know the AllocationSite
2496  // entry is at the feedback vector slot given by edx + 1.
2498  FixedArray::kHeaderSize + kPointerSize));
2499  } else {
2500  Label feedback_register_initialized;
2501  // Put the AllocationSite from the feedback vector into ebx, or undefined.
2504  Handle<Map> allocation_site_map =
2505  masm->isolate()->factory()->allocation_site_map();
2506  __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
2507  __ j(equal, &feedback_register_initialized);
2508  __ mov(ebx, masm->isolate()->factory()->undefined_value());
2509  __ bind(&feedback_register_initialized);
2510  }
2511 
2512  __ AssertUndefinedOrAllocationSite(ebx);
2513  }
2514 
2515  // Jump to the function-specific construct stub.
2516  Register jmp_reg = ecx;
2518  __ mov(jmp_reg, FieldOperand(jmp_reg,
2520  __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
2521  __ jmp(jmp_reg);
2522 
2523  // edi: called object
2524  // eax: number of arguments
2525  // ecx: object map
2526  Label do_call;
2527  __ bind(&slow);
2528  __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
2529  __ j(not_equal, &non_function_call);
2530  __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
2531  __ jmp(&do_call);
2532 
2533  __ bind(&non_function_call);
2534  __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2535  __ bind(&do_call);
2536  // Set expected number of arguments to zero (not changing eax).
2537  __ Move(ebx, Immediate(0));
2538  Handle<Code> arguments_adaptor =
2539  masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
2540  __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
2541 }
2542 
2543 
2544 bool CEntryStub::NeedsImmovableCode() {
2545  return false;
2546 }
2547 
2548 
2549 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2553  // It is important that the store buffer overflow stubs are generated first.
2556  if (Serializer::enabled()) {
2557  PlatformFeatureScope sse2(SSE2);
2559  BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
2560  } else {
2562  BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
2563  }
2564 }
2565 
2566 
2567 void CodeStub::GenerateFPStubs(Isolate* isolate) {
2569  CEntryStub save_doubles(1, kSaveFPRegs);
2570  // Stubs might already be in the snapshot, detect that and don't regenerate,
2571  // which would lead to code stub initialization state being messed up.
2572  Code* save_doubles_code;
2573  if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) {
2574  save_doubles_code = *(save_doubles.GetCode(isolate));
2575  }
2576  isolate->set_fp_stubs_generated(true);
2577  }
2578 }
2579 
2580 
2581 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
2582  CEntryStub stub(1, kDontSaveFPRegs);
2583  stub.GetCode(isolate);
2584 }
2585 
2586 
2587 void CEntryStub::GenerateCore(MacroAssembler* masm,
2588  Label* throw_normal_exception,
2589  Label* throw_termination_exception,
2590  bool do_gc,
2591  bool always_allocate_scope) {
2592  // eax: result parameter for PerformGC, if any
2593  // ebx: pointer to C function (C callee-saved)
2594  // ebp: frame pointer (restored after C call)
2595  // esp: stack pointer (restored after C call)
2596  // edi: number of arguments including receiver (C callee-saved)
2597  // esi: pointer to the first argument (C callee-saved)
2598 
2599  // Result returned in eax, or eax+edx if result_size_ is 2.
2600 
2601  // Check stack alignment.
2602  if (FLAG_debug_code) {
2603  __ CheckStackAlignment();
2604  }
2605 
2606  if (do_gc) {
2607  // Pass failure code returned from last attempt as first argument to
2608  // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
2609  // stack alignment is known to be correct. This function takes one argument
2610  // which is passed on the stack, and we know that the stack has been
2611  // prepared to pass at least one argument.
2612  __ mov(Operand(esp, 1 * kPointerSize),
2613  Immediate(ExternalReference::isolate_address(masm->isolate())));
2614  __ mov(Operand(esp, 0 * kPointerSize), eax); // Result.
2616  }
2617 
2618  ExternalReference scope_depth =
2619  ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
2620  if (always_allocate_scope) {
2621  __ inc(Operand::StaticVariable(scope_depth));
2622  }
2623 
2624  // Call C function.
2625  __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
2626  __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
2627  __ mov(Operand(esp, 2 * kPointerSize),
2628  Immediate(ExternalReference::isolate_address(masm->isolate())));
2629  __ call(ebx);
2630  // Result is in eax or edx:eax - do not destroy these registers!
2631 
2632  if (always_allocate_scope) {
2633  __ dec(Operand::StaticVariable(scope_depth));
2634  }
2635 
2636  // Runtime functions should not return 'the hole'. Allowing it to escape may
2637  // lead to crashes in the IC code later.
2638  if (FLAG_debug_code) {
2639  Label okay;
2640  __ cmp(eax, masm->isolate()->factory()->the_hole_value());
2641  __ j(not_equal, &okay, Label::kNear);
2642  __ int3();
2643  __ bind(&okay);
2644  }
2645 
2646  // Check for failure result.
2647  Label failure_returned;
2648  STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
2649  __ lea(ecx, Operand(eax, 1));
2650  // Lower 2 bits of ecx are 0 iff eax has failure tag.
2651  __ test(ecx, Immediate(kFailureTagMask));
2652  __ j(zero, &failure_returned);
2653 
2654  ExternalReference pending_exception_address(
2655  Isolate::kPendingExceptionAddress, masm->isolate());
2656 
2657  // Check that there is no pending exception, otherwise we
2658  // should have returned some failure value.
2659  if (FLAG_debug_code) {
2660  __ push(edx);
2661  __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
2662  Label okay;
2663  __ cmp(edx, Operand::StaticVariable(pending_exception_address));
2664  // Cannot use check here as it attempts to generate call into runtime.
2665  __ j(equal, &okay, Label::kNear);
2666  __ int3();
2667  __ bind(&okay);
2668  __ pop(edx);
2669  }
2670 
2671  // Exit the JavaScript to C++ exit frame.
2672  __ LeaveExitFrame(save_doubles_ == kSaveFPRegs);
2673  __ ret(0);
2674 
2675  // Handling of failure.
2676  __ bind(&failure_returned);
2677 
2678  Label retry;
2679  // If the returned exception is RETRY_AFTER_GC continue at retry label
2681  __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
2682  __ j(zero, &retry, Label::kNear);
2683 
2684  // Retrieve the pending exception.
2685  __ mov(eax, Operand::StaticVariable(pending_exception_address));
2686 
2687  // Clear the pending exception.
2688  __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
2689  __ mov(Operand::StaticVariable(pending_exception_address), edx);
2690 
2691  // Special handling of termination exceptions which are uncatchable
2692  // by javascript code.
2693  __ cmp(eax, masm->isolate()->factory()->termination_exception());
2694  __ j(equal, throw_termination_exception);
2695 
2696  // Handle normal exception.
2697  __ jmp(throw_normal_exception);
2698 
2699  // Retry.
2700  __ bind(&retry);
2701 }
2702 
2703 
2704 void CEntryStub::Generate(MacroAssembler* masm) {
2705  // eax: number of arguments including receiver
2706  // ebx: pointer to C function (C callee-saved)
2707  // ebp: frame pointer (restored after C call)
2708  // esp: stack pointer (restored after C call)
2709  // esi: current context (C callee-saved)
2710  // edi: JS function of the caller (C callee-saved)
2711 
2713 
2714  // NOTE: Invocations of builtins may return failure objects instead
2715  // of a proper result. The builtin entry handles this by performing
2716  // a garbage collection and retrying the builtin (twice).
2717 
2718  // Enter the exit frame that transitions from JavaScript to C++.
2719  __ EnterExitFrame(save_doubles_ == kSaveFPRegs);
2720 
2721  // eax: result parameter for PerformGC, if any (setup below)
2722  // ebx: pointer to builtin function (C callee-saved)
2723  // ebp: frame pointer (restored after C call)
2724  // esp: stack pointer (restored after C call)
2725  // edi: number of arguments including receiver (C callee-saved)
2726  // esi: argv pointer (C callee-saved)
2727 
2728  Label throw_normal_exception;
2729  Label throw_termination_exception;
2730 
2731  // Call into the runtime system.
2732  GenerateCore(masm,
2733  &throw_normal_exception,
2734  &throw_termination_exception,
2735  false,
2736  false);
2737 
2738  // Do space-specific GC and retry runtime call.
2739  GenerateCore(masm,
2740  &throw_normal_exception,
2741  &throw_termination_exception,
2742  true,
2743  false);
2744 
2745  // Do full GC and retry runtime call one final time.
2746  Failure* failure = Failure::InternalError();
2747  __ mov(eax, Immediate(reinterpret_cast<int32_t>(failure)));
2748  GenerateCore(masm,
2749  &throw_normal_exception,
2750  &throw_termination_exception,
2751  true,
2752  true);
2753 
2754  { FrameScope scope(masm, StackFrame::MANUAL);
2755  __ PrepareCallCFunction(0, eax);
2756  __ CallCFunction(
2757  ExternalReference::out_of_memory_function(masm->isolate()), 0);
2758  }
2759 
2760  __ bind(&throw_termination_exception);
2761  __ ThrowUncatchable(eax);
2762 
2763  __ bind(&throw_normal_exception);
2764  __ Throw(eax);
2765 }
2766 
2767 
2768 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
2769  Label invoke, handler_entry, exit;
2770  Label not_outermost_js, not_outermost_js_2;
2771 
2773 
2774  // Set up frame.
2775  __ push(ebp);
2776  __ mov(ebp, esp);
2777 
2778  // Push marker in two places.
2779  int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
2780  __ push(Immediate(Smi::FromInt(marker))); // context slot
2781  __ push(Immediate(Smi::FromInt(marker))); // function slot
2782  // Save callee-saved registers (C calling conventions).
2783  __ push(edi);
2784  __ push(esi);
2785  __ push(ebx);
2786 
2787  // Save copies of the top frame descriptor on the stack.
2788  ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, masm->isolate());
2789  __ push(Operand::StaticVariable(c_entry_fp));
2790 
2791  // If this is the outermost JS call, set js_entry_sp value.
2792  ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress,
2793  masm->isolate());
2794  __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
2795  __ j(not_equal, &not_outermost_js, Label::kNear);
2796  __ mov(Operand::StaticVariable(js_entry_sp), ebp);
2797  __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
2798  __ jmp(&invoke, Label::kNear);
2799  __ bind(&not_outermost_js);
2800  __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
2801 
2802  // Jump to a faked try block that does the invoke, with a faked catch
2803  // block that sets the pending exception.
2804  __ jmp(&invoke);
2805  __ bind(&handler_entry);
2806  handler_offset_ = handler_entry.pos();
2807  // Caught exception: Store result (exception) in the pending exception
2808  // field in the JSEnv and return a failure sentinel.
2809  ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
2810  masm->isolate());
2811  __ mov(Operand::StaticVariable(pending_exception), eax);
2812  __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception()));
2813  __ jmp(&exit);
2814 
2815  // Invoke: Link this frame into the handler chain. There's only one
2816  // handler block in this code object, so its index is 0.
2817  __ bind(&invoke);
2818  __ PushTryHandler(StackHandler::JS_ENTRY, 0);
2819 
2820  // Clear any pending exceptions.
2821  __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
2822  __ mov(Operand::StaticVariable(pending_exception), edx);
2823 
2824  // Fake a receiver (NULL).
2825  __ push(Immediate(0)); // receiver
2826 
2827  // Invoke the function by calling through JS entry trampoline builtin and
2828  // pop the faked function when we return. Notice that we cannot store a
2829  // reference to the trampoline code directly in this stub, because the
2830  // builtin stubs may not have been generated yet.
2831  if (is_construct) {
2832  ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
2833  masm->isolate());
2834  __ mov(edx, Immediate(construct_entry));
2835  } else {
2836  ExternalReference entry(Builtins::kJSEntryTrampoline,
2837  masm->isolate());
2838  __ mov(edx, Immediate(entry));
2839  }
2840  __ mov(edx, Operand(edx, 0)); // deref address
2842  __ call(edx);
2843 
2844  // Unlink this frame from the handler chain.
2845  __ PopTryHandler();
2846 
2847  __ bind(&exit);
2848  // Check if the current stack frame is marked as the outermost JS frame.
2849  __ pop(ebx);
2850  __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
2851  __ j(not_equal, &not_outermost_js_2);
2852  __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
2853  __ bind(&not_outermost_js_2);
2854 
2855  // Restore the top frame descriptor from the stack.
2856  __ pop(Operand::StaticVariable(ExternalReference(
2857  Isolate::kCEntryFPAddress,
2858  masm->isolate())));
2859 
2860  // Restore callee-saved registers (C calling conventions).
2861  __ pop(ebx);
2862  __ pop(esi);
2863  __ pop(edi);
2864  __ add(esp, Immediate(2 * kPointerSize)); // remove markers
2865 
2866  // Restore frame pointer and return.
2867  __ pop(ebp);
2868  __ ret(0);
2869 }
2870 
2871 
2872 // Generate stub code for instanceof.
2873 // This code can patch a call site inlined cache of the instance of check,
2874 // which looks like this.
2875 //
2876 // 81 ff XX XX XX XX cmp edi, <the hole, patched to a map>
2877 // 75 0a jne <some near label>
2878 // b8 XX XX XX XX mov eax, <the hole, patched to either true or false>
2879 //
2880 // If call site patching is requested the stack will have the delta from the
2881 // return address to the cmp instruction just below the return address. This
2882 // also means that call site patching can only take place with arguments in
2883 // registers. TOS looks like this when call site patching is requested
2884 //
2885 // esp[0] : return address
2886 // esp[4] : delta from return address to cmp instruction
2887 //
2888 void InstanceofStub::Generate(MacroAssembler* masm) {
2889  // Call site inlining and patching implies arguments in registers.
2890  ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
2891 
2892  // Fixed register usage throughout the stub.
2893  Register object = eax; // Object (lhs).
2894  Register map = ebx; // Map of the object.
2895  Register function = edx; // Function (rhs).
2896  Register prototype = edi; // Prototype of the function.
2897  Register scratch = ecx;
2898 
2899  // Constants describing the call site code to patch.
2900  static const int kDeltaToCmpImmediate = 2;
2901  static const int kDeltaToMov = 8;
2902  static const int kDeltaToMovImmediate = 9;
2903  static const int8_t kCmpEdiOperandByte1 = BitCast<int8_t, uint8_t>(0x3b);
2904  static const int8_t kCmpEdiOperandByte2 = BitCast<int8_t, uint8_t>(0x3d);
2905  static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
2906 
2907  ASSERT_EQ(object.code(), InstanceofStub::left().code());
2908  ASSERT_EQ(function.code(), InstanceofStub::right().code());
2909 
2910  // Get the object and function - they are always both needed.
2911  Label slow, not_js_object;
2912  if (!HasArgsInRegisters()) {
2913  __ mov(object, Operand(esp, 2 * kPointerSize));
2914  __ mov(function, Operand(esp, 1 * kPointerSize));
2915  }
2916 
2917  // Check that the left hand is a JS object.
2918  __ JumpIfSmi(object, &not_js_object);
2919  __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
2920 
2921  // If there is a call site cache don't look in the global cache, but do the
2922  // real lookup and update the call site cache.
2923  if (!HasCallSiteInlineCheck()) {
2924  // Look up the function and the map in the instanceof cache.
2925  Label miss;
2926  __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2927  __ j(not_equal, &miss, Label::kNear);
2928  __ CompareRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
2929  __ j(not_equal, &miss, Label::kNear);
2930  __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex);
2931  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2932  __ bind(&miss);
2933  }
2934 
2935  // Get the prototype of the function.
2936  __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
2937 
2938  // Check that the function prototype is a JS object.
2939  __ JumpIfSmi(prototype, &slow);
2940  __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
2941 
2942  // Update the global instanceof or call site inlined cache with the current
2943  // map and function. The cached answer will be set when it is known below.
2944  if (!HasCallSiteInlineCheck()) {
2945  __ StoreRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
2946  __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2947  } else {
2948  // The constants for the code patching are based on no push instructions
2949  // at the call site.
2950  ASSERT(HasArgsInRegisters());
2951  // Get return address and delta to inlined map check.
2952  __ mov(scratch, Operand(esp, 0 * kPointerSize));
2953  __ sub(scratch, Operand(esp, 1 * kPointerSize));
2954  if (FLAG_debug_code) {
2955  __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
2956  __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1);
2957  __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
2958  __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2);
2959  }
2960  __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
2961  __ mov(Operand(scratch, 0), map);
2962  }
2963 
2964  // Loop through the prototype chain of the object looking for the function
2965  // prototype.
2966  __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset));
2967  Label loop, is_instance, is_not_instance;
2968  __ bind(&loop);
2969  __ cmp(scratch, prototype);
2970  __ j(equal, &is_instance, Label::kNear);
2971  Factory* factory = masm->isolate()->factory();
2972  __ cmp(scratch, Immediate(factory->null_value()));
2973  __ j(equal, &is_not_instance, Label::kNear);
2974  __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
2975  __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
2976  __ jmp(&loop);
2977 
2978  __ bind(&is_instance);
2979  if (!HasCallSiteInlineCheck()) {
2980  __ mov(eax, Immediate(0));
2981  __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
2982  } else {
2983  // Get return address and delta to inlined map check.
2984  __ mov(eax, factory->true_value());
2985  __ mov(scratch, Operand(esp, 0 * kPointerSize));
2986  __ sub(scratch, Operand(esp, 1 * kPointerSize));
2987  if (FLAG_debug_code) {
2988  __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
2989  __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2990  }
2991  __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
2992  if (!ReturnTrueFalseObject()) {
2993  __ Move(eax, Immediate(0));
2994  }
2995  }
2996  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2997 
2998  __ bind(&is_not_instance);
2999  if (!HasCallSiteInlineCheck()) {
3000  __ mov(eax, Immediate(Smi::FromInt(1)));
3001  __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
3002  } else {
3003  // Get return address and delta to inlined map check.
3004  __ mov(eax, factory->false_value());
3005  __ mov(scratch, Operand(esp, 0 * kPointerSize));
3006  __ sub(scratch, Operand(esp, 1 * kPointerSize));
3007  if (FLAG_debug_code) {
3008  __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
3009  __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
3010  }
3011  __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
3012  if (!ReturnTrueFalseObject()) {
3013  __ Move(eax, Immediate(Smi::FromInt(1)));
3014  }
3015  }
3016  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3017 
3018  Label object_not_null, object_not_null_or_smi;
3019  __ bind(&not_js_object);
3020  // Before null, smi and string value checks, check that the rhs is a function
3021  // as for a non-function rhs an exception needs to be thrown.
3022  __ JumpIfSmi(function, &slow, Label::kNear);
3023  __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
3024  __ j(not_equal, &slow, Label::kNear);
3025 
3026  // Null is not instance of anything.
3027  __ cmp(object, factory->null_value());
3028  __ j(not_equal, &object_not_null, Label::kNear);
3029  __ Move(eax, Immediate(Smi::FromInt(1)));
3030  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3031 
3032  __ bind(&object_not_null);
3033  // Smi values is not instance of anything.
3034  __ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear);
3035  __ Move(eax, Immediate(Smi::FromInt(1)));
3036  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3037 
3038  __ bind(&object_not_null_or_smi);
3039  // String values is not instance of anything.
3040  Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
3041  __ j(NegateCondition(is_string), &slow, Label::kNear);
3042  __ Move(eax, Immediate(Smi::FromInt(1)));
3043  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3044 
3045  // Slow-case: Go through the JavaScript implementation.
3046  __ bind(&slow);
3047  if (!ReturnTrueFalseObject()) {
3048  // Tail call the builtin which returns 0 or 1.
3049  if (HasArgsInRegisters()) {
3050  // Push arguments below return address.
3051  __ pop(scratch);
3052  __ push(object);
3053  __ push(function);
3054  __ push(scratch);
3055  }
3056  __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
3057  } else {
3058  // Call the builtin and convert 0/1 to true/false.
3059  {
3060  FrameScope scope(masm, StackFrame::INTERNAL);
3061  __ push(object);
3062  __ push(function);
3063  __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
3064  }
3065  Label true_value, done;
3066  __ test(eax, eax);
3067  __ j(zero, &true_value, Label::kNear);
3068  __ mov(eax, factory->false_value());
3069  __ jmp(&done, Label::kNear);
3070  __ bind(&true_value);
3071  __ mov(eax, factory->true_value());
3072  __ bind(&done);
3073  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3074  }
3075 }
3076 
3077 
3078 Register InstanceofStub::left() { return eax; }
3079 
3080 
3081 Register InstanceofStub::right() { return edx; }
3082 
3083 
3084 // -------------------------------------------------------------------------
3085 // StringCharCodeAtGenerator
3086 
3087 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
3088  // If the receiver is a smi trigger the non-string case.
3089  STATIC_ASSERT(kSmiTag == 0);
3090  __ JumpIfSmi(object_, receiver_not_string_);
3091 
3092  // Fetch the instance type of the receiver into result register.
3093  __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
3094  __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3095  // If the receiver is not a string trigger the non-string case.
3096  __ test(result_, Immediate(kIsNotStringMask));
3097  __ j(not_zero, receiver_not_string_);
3098 
3099  // If the index is non-smi trigger the non-smi case.
3100  STATIC_ASSERT(kSmiTag == 0);
3101  __ JumpIfNotSmi(index_, &index_not_smi_);
3102  __ bind(&got_smi_index_);
3103 
3104  // Check for index out of range.
3105  __ cmp(index_, FieldOperand(object_, String::kLengthOffset));
3106  __ j(above_equal, index_out_of_range_);
3107 
3108  __ SmiUntag(index_);
3109 
3110  Factory* factory = masm->isolate()->factory();
3112  masm, factory, object_, index_, result_, &call_runtime_);
3113 
3114  __ SmiTag(result_);
3115  __ bind(&exit_);
3116 }
3117 
3118 
3120  MacroAssembler* masm,
3121  const RuntimeCallHelper& call_helper) {
3122  __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
3123 
3124  // Index is not a smi.
3125  __ bind(&index_not_smi_);
3126  // If index is a heap number, try converting it to an integer.
3127  __ CheckMap(index_,
3128  masm->isolate()->factory()->heap_number_map(),
3129  index_not_number_,
3131  call_helper.BeforeCall(masm);
3132  __ push(object_);
3133  __ push(index_); // Consumed by runtime conversion function.
3134  if (index_flags_ == STRING_INDEX_IS_NUMBER) {
3135  __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3136  } else {
3137  ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
3138  // NumberToSmi discards numbers that are not exact integers.
3139  __ CallRuntime(Runtime::kHiddenNumberToSmi, 1);
3140  }
3141  if (!index_.is(eax)) {
3142  // Save the conversion result before the pop instructions below
3143  // have a chance to overwrite it.
3144  __ mov(index_, eax);
3145  }
3146  __ pop(object_);
3147  // Reload the instance type.
3148  __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
3149  __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3150  call_helper.AfterCall(masm);
3151  // If index is still not a smi, it must be out of range.
3152  STATIC_ASSERT(kSmiTag == 0);
3153  __ JumpIfNotSmi(index_, index_out_of_range_);
3154  // Otherwise, return to the fast path.
3155  __ jmp(&got_smi_index_);
3156 
3157  // Call runtime. We get here when the receiver is a string and the
3158  // index is a number, but the code of getting the actual character
3159  // is too complex (e.g., when the string needs to be flattened).
3160  __ bind(&call_runtime_);
3161  call_helper.BeforeCall(masm);
3162  __ push(object_);
3163  __ SmiTag(index_);
3164  __ push(index_);
3165  __ CallRuntime(Runtime::kHiddenStringCharCodeAt, 2);
3166  if (!result_.is(eax)) {
3167  __ mov(result_, eax);
3168  }
3169  call_helper.AfterCall(masm);
3170  __ jmp(&exit_);
3171 
3172  __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
3173 }
3174 
3175 
3176 // -------------------------------------------------------------------------
3177 // StringCharFromCodeGenerator
3178 
3179 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
3180  // Fast case of Heap::LookupSingleCharacterStringFromCode.
3181  STATIC_ASSERT(kSmiTag == 0);
3184  __ test(code_,
3185  Immediate(kSmiTagMask |
3187  __ j(not_zero, &slow_case_);
3188 
3189  Factory* factory = masm->isolate()->factory();
3190  __ Move(result_, Immediate(factory->single_character_string_cache()));
3191  STATIC_ASSERT(kSmiTag == 0);
3192  STATIC_ASSERT(kSmiTagSize == 1);
3194  // At this point code register contains smi tagged ASCII char code.
3195  __ mov(result_, FieldOperand(result_,
3196  code_, times_half_pointer_size,
3198  __ cmp(result_, factory->undefined_value());
3199  __ j(equal, &slow_case_);
3200  __ bind(&exit_);
3201 }
3202 
3203 
3205  MacroAssembler* masm,
3206  const RuntimeCallHelper& call_helper) {
3207  __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
3208 
3209  __ bind(&slow_case_);
3210  call_helper.BeforeCall(masm);
3211  __ push(code_);
3212  __ CallRuntime(Runtime::kCharFromCode, 1);
3213  if (!result_.is(eax)) {
3214  __ mov(result_, eax);
3215  }
3216  call_helper.AfterCall(masm);
3217  __ jmp(&exit_);
3218 
3219  __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3220 }
3221 
3222 
3223 void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
3224  Register dest,
3225  Register src,
3226  Register count,
3227  Register scratch,
3228  bool ascii) {
3229  // Copy characters using rep movs of doublewords.
3230  // The destination is aligned on a 4 byte boundary because we are
3231  // copying to the beginning of a newly allocated string.
3232  ASSERT(dest.is(edi)); // rep movs destination
3233  ASSERT(src.is(esi)); // rep movs source
3234  ASSERT(count.is(ecx)); // rep movs count
3235  ASSERT(!scratch.is(dest));
3236  ASSERT(!scratch.is(src));
3237  ASSERT(!scratch.is(count));
3238 
3239  // Nothing to do for zero characters.
3240  Label done;
3241  __ test(count, count);
3242  __ j(zero, &done);
3243 
3244  // Make count the number of bytes to copy.
3245  if (!ascii) {
3246  __ shl(count, 1);
3247  }
3248 
3249  // Don't enter the rep movs if there are less than 4 bytes to copy.
3250  Label last_bytes;
3251  __ test(count, Immediate(~3));
3252  __ j(zero, &last_bytes, Label::kNear);
3253 
3254  // Copy from edi to esi using rep movs instruction.
3255  __ mov(scratch, count);
3256  __ sar(count, 2); // Number of doublewords to copy.
3257  __ cld();
3258  __ rep_movs();
3259 
3260  // Find number of bytes left.
3261  __ mov(count, scratch);
3262  __ and_(count, 3);
3263 
3264  // Check if there are more bytes to copy.
3265  __ bind(&last_bytes);
3266  __ test(count, count);
3267  __ j(zero, &done);
3268 
3269  // Copy remaining characters.
3270  Label loop;
3271  __ bind(&loop);
3272  __ mov_b(scratch, Operand(src, 0));
3273  __ mov_b(Operand(dest, 0), scratch);
3274  __ add(src, Immediate(1));
3275  __ add(dest, Immediate(1));
3276  __ sub(count, Immediate(1));
3277  __ j(not_zero, &loop);
3278 
3279  __ bind(&done);
3280 }
3281 
3282 
3283 void StringHelper::GenerateHashInit(MacroAssembler* masm,
3284  Register hash,
3285  Register character,
3286  Register scratch) {
3287  // hash = (seed + character) + ((seed + character) << 10);
3288  if (Serializer::enabled()) {
3289  __ LoadRoot(scratch, Heap::kHashSeedRootIndex);
3290  __ SmiUntag(scratch);
3291  __ add(scratch, character);
3292  __ mov(hash, scratch);
3293  __ shl(scratch, 10);
3294  __ add(hash, scratch);
3295  } else {
3296  int32_t seed = masm->isolate()->heap()->HashSeed();
3297  __ lea(scratch, Operand(character, seed));
3298  __ shl(scratch, 10);
3299  __ lea(hash, Operand(scratch, character, times_1, seed));
3300  }
3301  // hash ^= hash >> 6;
3302  __ mov(scratch, hash);
3303  __ shr(scratch, 6);
3304  __ xor_(hash, scratch);
3305 }
3306 
3307 
3308 void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
3309  Register hash,
3310  Register character,
3311  Register scratch) {
3312  // hash += character;
3313  __ add(hash, character);
3314  // hash += hash << 10;
3315  __ mov(scratch, hash);
3316  __ shl(scratch, 10);
3317  __ add(hash, scratch);
3318  // hash ^= hash >> 6;
3319  __ mov(scratch, hash);
3320  __ shr(scratch, 6);
3321  __ xor_(hash, scratch);
3322 }
3323 
3324 
3325 void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
3326  Register hash,
3327  Register scratch) {
3328  // hash += hash << 3;
3329  __ mov(scratch, hash);
3330  __ shl(scratch, 3);
3331  __ add(hash, scratch);
3332  // hash ^= hash >> 11;
3333  __ mov(scratch, hash);
3334  __ shr(scratch, 11);
3335  __ xor_(hash, scratch);
3336  // hash += hash << 15;
3337  __ mov(scratch, hash);
3338  __ shl(scratch, 15);
3339  __ add(hash, scratch);
3340 
3341  __ and_(hash, String::kHashBitMask);
3342 
3343  // if (hash == 0) hash = 27;
3344  Label hash_not_zero;
3345  __ j(not_zero, &hash_not_zero, Label::kNear);
3346  __ mov(hash, Immediate(StringHasher::kZeroHash));
3347  __ bind(&hash_not_zero);
3348 }
3349 
3350 
3351 void SubStringStub::Generate(MacroAssembler* masm) {
3352  Label runtime;
3353 
3354  // Stack frame on entry.
3355  // esp[0]: return address
3356  // esp[4]: to
3357  // esp[8]: from
3358  // esp[12]: string
3359 
3360  // Make sure first argument is a string.
3361  __ mov(eax, Operand(esp, 3 * kPointerSize));
3362  STATIC_ASSERT(kSmiTag == 0);
3363  __ JumpIfSmi(eax, &runtime);
3364  Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
3365  __ j(NegateCondition(is_string), &runtime);
3366 
3367  // eax: string
3368  // ebx: instance type
3369 
3370  // Calculate length of sub string using the smi values.
3371  __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
3372  __ JumpIfNotSmi(ecx, &runtime);
3373  __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
3374  __ JumpIfNotSmi(edx, &runtime);
3375  __ sub(ecx, edx);
3377  Label not_original_string;
3378  // Shorter than original string's length: an actual substring.
3379  __ j(below, &not_original_string, Label::kNear);
3380  // Longer than original string's length or negative: unsafe arguments.
3381  __ j(above, &runtime);
3382  // Return original string.
3383  Counters* counters = masm->isolate()->counters();
3384  __ IncrementCounter(counters->sub_string_native(), 1);
3385  __ ret(3 * kPointerSize);
3386  __ bind(&not_original_string);
3387 
3388  Label single_char;
3389  __ cmp(ecx, Immediate(Smi::FromInt(1)));
3390  __ j(equal, &single_char);
3391 
3392  // eax: string
3393  // ebx: instance type
3394  // ecx: sub string length (smi)
3395  // edx: from index (smi)
3396  // Deal with different string types: update the index if necessary
3397  // and put the underlying string into edi.
3398  Label underlying_unpacked, sliced_string, seq_or_external_string;
3399  // If the string is not indirect, it can only be sequential or external.
3402  __ test(ebx, Immediate(kIsIndirectStringMask));
3403  __ j(zero, &seq_or_external_string, Label::kNear);
3404 
3405  Factory* factory = masm->isolate()->factory();
3406  __ test(ebx, Immediate(kSlicedNotConsMask));
3407  __ j(not_zero, &sliced_string, Label::kNear);
3408  // Cons string. Check whether it is flat, then fetch first part.
3409  // Flat cons strings have an empty second part.
3411  factory->empty_string());
3412  __ j(not_equal, &runtime);
3414  // Update instance type.
3417  __ jmp(&underlying_unpacked, Label::kNear);
3418 
3419  __ bind(&sliced_string);
3420  // Sliced string. Fetch parent and adjust start index by offset.
3423  // Update instance type.
3426  __ jmp(&underlying_unpacked, Label::kNear);
3427 
3428  __ bind(&seq_or_external_string);
3429  // Sequential or external string. Just move string to the expected register.
3430  __ mov(edi, eax);
3431 
3432  __ bind(&underlying_unpacked);
3433 
3434  if (FLAG_string_slices) {
3435  Label copy_routine;
3436  // edi: underlying subject string
3437  // ebx: instance type of underlying subject string
3438  // edx: adjusted start index (smi)
3439  // ecx: length (smi)
3440  __ cmp(ecx, Immediate(Smi::FromInt(SlicedString::kMinLength)));
3441  // Short slice. Copy instead of slicing.
3442  __ j(less, &copy_routine);
3443  // Allocate new sliced string. At this point we do not reload the instance
3444  // type including the string encoding because we simply rely on the info
3445  // provided by the original string. It does not matter if the original
3446  // string's encoding is wrong because we always have to recheck encoding of
3447  // the newly created string's parent anyways due to externalized strings.
3448  Label two_byte_slice, set_slice_header;
3451  __ test(ebx, Immediate(kStringEncodingMask));
3452  __ j(zero, &two_byte_slice, Label::kNear);
3453  __ AllocateAsciiSlicedString(eax, ebx, no_reg, &runtime);
3454  __ jmp(&set_slice_header, Label::kNear);
3455  __ bind(&two_byte_slice);
3456  __ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
3457  __ bind(&set_slice_header);
3460  Immediate(String::kEmptyHashField));
3463  __ IncrementCounter(counters->sub_string_native(), 1);
3464  __ ret(3 * kPointerSize);
3465 
3466  __ bind(&copy_routine);
3467  }
3468 
3469  // edi: underlying subject string
3470  // ebx: instance type of underlying subject string
3471  // edx: adjusted start index (smi)
3472  // ecx: length (smi)
3473  // The subject string can only be external or sequential string of either
3474  // encoding at this point.
3475  Label two_byte_sequential, runtime_drop_two, sequential_string;
3478  __ test_b(ebx, kExternalStringTag);
3479  __ j(zero, &sequential_string);
3480 
3481  // Handle external string.
3482  // Rule out short external strings.
3484  __ test_b(ebx, kShortExternalStringMask);
3485  __ j(not_zero, &runtime);
3487  // Move the pointer so that offset-wise, it looks like a sequential string.
3490 
3491  __ bind(&sequential_string);
3492  // Stash away (adjusted) index and (underlying) string.
3493  __ push(edx);
3494  __ push(edi);
3495  __ SmiUntag(ecx);
3497  __ test_b(ebx, kStringEncodingMask);
3498  __ j(zero, &two_byte_sequential);
3499 
3500  // Sequential ASCII string. Allocate the result.
3501  __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
3502 
3503  // eax: result string
3504  // ecx: result string length
3505  __ mov(edx, esi); // esi used by following code.
3506  // Locate first character of result.
3507  __ mov(edi, eax);
3509  // Load string argument and locate character of sub string start.
3510  __ pop(esi);
3511  __ pop(ebx);
3512  __ SmiUntag(ebx);
3514 
3515  // eax: result string
3516  // ecx: result length
3517  // edx: original value of esi
3518  // edi: first character of result
3519  // esi: character of sub string start
3521  __ mov(esi, edx); // Restore esi.
3522  __ IncrementCounter(counters->sub_string_native(), 1);
3523  __ ret(3 * kPointerSize);
3524 
3525  __ bind(&two_byte_sequential);
3526  // Sequential two-byte string. Allocate the result.
3527  __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
3528 
3529  // eax: result string
3530  // ecx: result string length
3531  __ mov(edx, esi); // esi used by following code.
3532  // Locate first character of result.
3533  __ mov(edi, eax);
3534  __ add(edi,
3536  // Load string argument and locate character of sub string start.
3537  __ pop(esi);
3538  __ pop(ebx);
3539  // As from is a smi it is 2 times the value which matches the size of a two
3540  // byte character.
3541  STATIC_ASSERT(kSmiTag == 0);
3544 
3545  // eax: result string
3546  // ecx: result length
3547  // edx: original value of esi
3548  // edi: first character of result
3549  // esi: character of sub string start
3551  __ mov(esi, edx); // Restore esi.
3552  __ IncrementCounter(counters->sub_string_native(), 1);
3553  __ ret(3 * kPointerSize);
3554 
3555  // Drop pushed values on the stack before tail call.
3556  __ bind(&runtime_drop_two);
3557  __ Drop(2);
3558 
3559  // Just jump to runtime to create the sub string.
3560  __ bind(&runtime);
3561  __ TailCallRuntime(Runtime::kHiddenSubString, 3, 1);
3562 
3563  __ bind(&single_char);
3564  // eax: string
3565  // ebx: instance type
3566  // ecx: sub string length (smi)
3567  // edx: from index (smi)
3568  StringCharAtGenerator generator(
3569  eax, edx, ecx, eax, &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER);
3570  generator.GenerateFast(masm);
3571  __ ret(3 * kPointerSize);
3572  generator.SkipSlow(masm, &runtime);
3573 }
3574 
3575 
3576 void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
3577  Register left,
3578  Register right,
3579  Register scratch1,
3580  Register scratch2) {
3581  Register length = scratch1;
3582 
3583  // Compare lengths.
3584  Label strings_not_equal, check_zero_length;
3585  __ mov(length, FieldOperand(left, String::kLengthOffset));
3586  __ cmp(length, FieldOperand(right, String::kLengthOffset));
3587  __ j(equal, &check_zero_length, Label::kNear);
3588  __ bind(&strings_not_equal);
3589  __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
3590  __ ret(0);
3591 
3592  // Check if the length is zero.
3593  Label compare_chars;
3594  __ bind(&check_zero_length);
3595  STATIC_ASSERT(kSmiTag == 0);
3596  __ test(length, length);
3597  __ j(not_zero, &compare_chars, Label::kNear);
3598  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3599  __ ret(0);
3600 
3601  // Compare characters.
3602  __ bind(&compare_chars);
3603  GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2,
3604  &strings_not_equal, Label::kNear);
3605 
3606  // Characters are equal.
3607  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3608  __ ret(0);
3609 }
3610 
3611 
3612 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
3613  Register left,
3614  Register right,
3615  Register scratch1,
3616  Register scratch2,
3617  Register scratch3) {
3618  Counters* counters = masm->isolate()->counters();
3619  __ IncrementCounter(counters->string_compare_native(), 1);
3620 
3621  // Find minimum length.
3622  Label left_shorter;
3623  __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
3624  __ mov(scratch3, scratch1);
3625  __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
3626 
3627  Register length_delta = scratch3;
3628 
3629  __ j(less_equal, &left_shorter, Label::kNear);
3630  // Right string is shorter. Change scratch1 to be length of right string.
3631  __ sub(scratch1, length_delta);
3632  __ bind(&left_shorter);
3633 
3634  Register min_length = scratch1;
3635 
3636  // If either length is zero, just compare lengths.
3637  Label compare_lengths;
3638  __ test(min_length, min_length);
3639  __ j(zero, &compare_lengths, Label::kNear);
3640 
3641  // Compare characters.
3642  Label result_not_equal;
3643  GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
3644  &result_not_equal, Label::kNear);
3645 
3646  // Compare lengths - strings up to min-length are equal.
3647  __ bind(&compare_lengths);
3648  __ test(length_delta, length_delta);
3649  Label length_not_equal;
3650  __ j(not_zero, &length_not_equal, Label::kNear);
3651 
3652  // Result is EQUAL.
3653  STATIC_ASSERT(EQUAL == 0);
3654  STATIC_ASSERT(kSmiTag == 0);
3655  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3656  __ ret(0);
3657 
3658  Label result_greater;
3659  Label result_less;
3660  __ bind(&length_not_equal);
3661  __ j(greater, &result_greater, Label::kNear);
3662  __ jmp(&result_less, Label::kNear);
3663  __ bind(&result_not_equal);
3664  __ j(above, &result_greater, Label::kNear);
3665  __ bind(&result_less);
3666 
3667  // Result is LESS.
3668  __ Move(eax, Immediate(Smi::FromInt(LESS)));
3669  __ ret(0);
3670 
3671  // Result is GREATER.
3672  __ bind(&result_greater);
3673  __ Move(eax, Immediate(Smi::FromInt(GREATER)));
3674  __ ret(0);
3675 }
3676 
3677 
3678 void StringCompareStub::GenerateAsciiCharsCompareLoop(
3679  MacroAssembler* masm,
3680  Register left,
3681  Register right,
3682  Register length,
3683  Register scratch,
3684  Label* chars_not_equal,
3685  Label::Distance chars_not_equal_near) {
3686  // Change index to run from -length to -1 by adding length to string
3687  // start. This means that loop ends when index reaches zero, which
3688  // doesn't need an additional compare.
3689  __ SmiUntag(length);
3690  __ lea(left,
3692  __ lea(right,
3694  __ neg(length);
3695  Register index = length; // index = -length;
3696 
3697  // Compare loop.
3698  Label loop;
3699  __ bind(&loop);
3700  __ mov_b(scratch, Operand(left, index, times_1, 0));
3701  __ cmpb(scratch, Operand(right, index, times_1, 0));
3702  __ j(not_equal, chars_not_equal, chars_not_equal_near);
3703  __ inc(index);
3704  __ j(not_zero, &loop);
3705 }
3706 
3707 
3708 void StringCompareStub::Generate(MacroAssembler* masm) {
3709  Label runtime;
3710 
3711  // Stack frame on entry.
3712  // esp[0]: return address
3713  // esp[4]: right string
3714  // esp[8]: left string
3715 
3716  __ mov(edx, Operand(esp, 2 * kPointerSize)); // left
3717  __ mov(eax, Operand(esp, 1 * kPointerSize)); // right
3718 
3719  Label not_same;
3720  __ cmp(edx, eax);
3721  __ j(not_equal, &not_same, Label::kNear);
3722  STATIC_ASSERT(EQUAL == 0);
3723  STATIC_ASSERT(kSmiTag == 0);
3724  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3725  __ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1);
3726  __ ret(2 * kPointerSize);
3727 
3728  __ bind(&not_same);
3729 
3730  // Check that both objects are sequential ASCII strings.
3731  __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime);
3732 
3733  // Compare flat ASCII strings.
3734  // Drop arguments from the stack.
3735  __ pop(ecx);
3736  __ add(esp, Immediate(2 * kPointerSize));
3737  __ push(ecx);
3739 
3740  // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
3741  // tagged as a small integer.
3742  __ bind(&runtime);
3743  __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
3744 }
3745 
3746 
3747 void ArrayPushStub::Generate(MacroAssembler* masm) {
3748  int argc = arguments_count();
3749 
3750  if (argc == 0) {
3751  // Noop, return the length.
3753  __ ret((argc + 1) * kPointerSize);
3754  return;
3755  }
3756 
3757  Isolate* isolate = masm->isolate();
3758 
3759  if (argc != 1) {
3760  __ TailCallExternalReference(
3761  ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3762  return;
3763  }
3764 
3765  Label call_builtin, attempt_to_grow_elements, with_write_barrier;
3766 
3767  // Get the elements array of the object.
3769 
3770  if (IsFastSmiOrObjectElementsKind(elements_kind())) {
3771  // Check that the elements are in fast mode and writable.
3773  isolate->factory()->fixed_array_map());
3774  __ j(not_equal, &call_builtin);
3775  }
3776 
3777  // Get the array's length into eax and calculate new length.
3779  STATIC_ASSERT(kSmiTagSize == 1);
3780  STATIC_ASSERT(kSmiTag == 0);
3781  __ add(eax, Immediate(Smi::FromInt(argc)));
3782 
3783  // Get the elements' length into ecx.
3785 
3786  // Check if we could survive without allocation.
3787  __ cmp(eax, ecx);
3788 
3789  if (IsFastSmiOrObjectElementsKind(elements_kind())) {
3790  __ j(greater, &attempt_to_grow_elements);
3791 
3792  // Check if value is a smi.
3793  __ mov(ecx, Operand(esp, argc * kPointerSize));
3794  __ JumpIfNotSmi(ecx, &with_write_barrier);
3795 
3796  // Store the value.
3798  FixedArray::kHeaderSize - argc * kPointerSize),
3799  ecx);
3800  } else {
3801  __ j(greater, &call_builtin);
3802 
3803  __ mov(ecx, Operand(esp, argc * kPointerSize));
3804  __ StoreNumberToDoubleElements(
3805  ecx, edi, eax, ecx, xmm0, &call_builtin, true, argc * kDoubleSize);
3806  }
3807 
3808  // Save new length.
3810  __ ret((argc + 1) * kPointerSize);
3811 
3812  if (IsFastDoubleElementsKind(elements_kind())) {
3813  __ bind(&call_builtin);
3814  __ TailCallExternalReference(
3815  ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3816  return;
3817  }
3818 
3819  __ bind(&with_write_barrier);
3820 
3821  if (IsFastSmiElementsKind(elements_kind())) {
3822  if (FLAG_trace_elements_transitions) __ jmp(&call_builtin);
3823 
3825  isolate->factory()->heap_number_map());
3826  __ j(equal, &call_builtin);
3827 
3828  ElementsKind target_kind = IsHoleyElementsKind(elements_kind())
3833  const int header_size = FixedArrayBase::kHeaderSize;
3834  // Verify that the object can be transitioned in place.
3835  const int origin_offset = header_size + elements_kind() * kPointerSize;
3836  __ mov(edi, FieldOperand(ebx, origin_offset));
3838  __ j(not_equal, &call_builtin);
3839 
3840  const int target_offset = header_size + target_kind * kPointerSize;
3841  __ mov(ebx, FieldOperand(ebx, target_offset));
3844  // Restore edi used as a scratch register for the write barrier used while
3845  // setting the map.
3847  }
3848 
3849  // Save new length.
3851 
3852  // Store the value.
3854  FixedArray::kHeaderSize - argc * kPointerSize));
3855  __ mov(Operand(edx, 0), ecx);
3856 
3857  __ RecordWrite(edi, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
3858  OMIT_SMI_CHECK);
3859 
3860  __ ret((argc + 1) * kPointerSize);
3861 
3862  __ bind(&attempt_to_grow_elements);
3863  if (!FLAG_inline_new) {
3864  __ bind(&call_builtin);
3865  __ TailCallExternalReference(
3866  ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3867  return;
3868  }
3869 
3870  __ mov(ebx, Operand(esp, argc * kPointerSize));
3871  // Growing elements that are SMI-only requires special handling in case the
3872  // new element is non-Smi. For now, delegate to the builtin.
3873  if (IsFastSmiElementsKind(elements_kind())) {
3874  __ JumpIfNotSmi(ebx, &call_builtin);
3875  }
3876 
3877  // We could be lucky and the elements array could be at the top of new-space.
3878  // In this case we can just grow it in place by moving the allocation pointer
3879  // up.
3880  ExternalReference new_space_allocation_top =
3881  ExternalReference::new_space_allocation_top_address(isolate);
3882  ExternalReference new_space_allocation_limit =
3883  ExternalReference::new_space_allocation_limit_address(isolate);
3884 
3885  const int kAllocationDelta = 4;
3886  ASSERT(kAllocationDelta >= argc);
3887  // Load top.
3888  __ mov(ecx, Operand::StaticVariable(new_space_allocation_top));
3889 
3890  // Check if it's the end of elements.
3892  FixedArray::kHeaderSize - argc * kPointerSize));
3893  __ cmp(edx, ecx);
3894  __ j(not_equal, &call_builtin);
3895  __ add(ecx, Immediate(kAllocationDelta * kPointerSize));
3896  __ cmp(ecx, Operand::StaticVariable(new_space_allocation_limit));
3897  __ j(above, &call_builtin);
3898 
3899  // We fit and could grow elements.
3900  __ mov(Operand::StaticVariable(new_space_allocation_top), ecx);
3901 
3902  // Push the argument...
3903  __ mov(Operand(edx, 0), ebx);
3904  // ... and fill the rest with holes.
3905  for (int i = 1; i < kAllocationDelta; i++) {
3906  __ mov(Operand(edx, i * kPointerSize),
3907  isolate->factory()->the_hole_value());
3908  }
3909 
3910  if (IsFastObjectElementsKind(elements_kind())) {
3911  // We know the elements array is in new space so we don't need the
3912  // remembered set, but we just pushed a value onto it so we may have to tell
3913  // the incremental marker to rescan the object that we just grew. We don't
3914  // need to worry about the holes because they are in old space and already
3915  // marked black.
3916  __ RecordWrite(edi, edx, ebx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
3917  }
3918 
3919  // Restore receiver to edx as finish sequence assumes it's here.
3920  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
3921 
3922  // Increment element's and array's sizes.
3924  Immediate(Smi::FromInt(kAllocationDelta)));
3925 
3926  // NOTE: This only happen in new-space, where we don't care about the
3927  // black-byte-count on pages. Otherwise we should update that too if the
3928  // object is black.
3929 
3931  __ ret((argc + 1) * kPointerSize);
3932 
3933  __ bind(&call_builtin);
3934  __ TailCallExternalReference(
3935  ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3936 }
3937 
3938 
3939 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3940  // ----------- S t a t e -------------
3941  // -- edx : left
3942  // -- eax : right
3943  // -- esp[0] : return address
3944  // -----------------------------------
3945  Isolate* isolate = masm->isolate();
3946 
3947  // Load ecx with the allocation site. We stick an undefined dummy value here
3948  // and replace it with the real allocation site later when we instantiate this
3949  // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
3950  __ mov(ecx, handle(isolate->heap()->undefined_value()));
3951 
3952  // Make sure that we actually patched the allocation site.
3953  if (FLAG_debug_code) {
3954  __ test(ecx, Immediate(kSmiTagMask));
3955  __ Assert(not_equal, kExpectedAllocationSite);
3957  isolate->factory()->allocation_site_map());
3958  __ Assert(equal, kExpectedAllocationSite);
3959  }
3960 
3961  // Tail call into the stub that handles binary operations with allocation
3962  // sites.
3963  BinaryOpWithAllocationSiteStub stub(state_);
3964  __ TailCallStub(&stub);
3965 }
3966 
3967 
3968 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
3969  ASSERT(state_ == CompareIC::SMI);
3970  Label miss;
3971  __ mov(ecx, edx);
3972  __ or_(ecx, eax);
3973  __ JumpIfNotSmi(ecx, &miss, Label::kNear);
3974 
3975  if (GetCondition() == equal) {
3976  // For equality we do not care about the sign of the result.
3977  __ sub(eax, edx);
3978  } else {
3979  Label done;
3980  __ sub(edx, eax);
3981  __ j(no_overflow, &done, Label::kNear);
3982  // Correct sign of result in case of overflow.
3983  __ not_(edx);
3984  __ bind(&done);
3985  __ mov(eax, edx);
3986  }
3987  __ ret(0);
3988 
3989  __ bind(&miss);
3990  GenerateMiss(masm);
3991 }
3992 
3993 
3994 void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
3995  ASSERT(state_ == CompareIC::NUMBER);
3996 
3997  Label generic_stub;
3998  Label unordered, maybe_undefined1, maybe_undefined2;
3999  Label miss;
4000 
4001  if (left_ == CompareIC::SMI) {
4002  __ JumpIfNotSmi(edx, &miss);
4003  }
4004  if (right_ == CompareIC::SMI) {
4005  __ JumpIfNotSmi(eax, &miss);
4006  }
4007 
4008  // Inlining the double comparison and falling back to the general compare
4009  // stub if NaN is involved or SSE2 or CMOV is unsupported.
4011  CpuFeatureScope scope1(masm, SSE2);
4012  CpuFeatureScope scope2(masm, CMOV);
4013 
4014  // Load left and right operand.
4015  Label done, left, left_smi, right_smi;
4016  __ JumpIfSmi(eax, &right_smi, Label::kNear);
4018  masm->isolate()->factory()->heap_number_map());
4019  __ j(not_equal, &maybe_undefined1, Label::kNear);
4021  __ jmp(&left, Label::kNear);
4022  __ bind(&right_smi);
4023  __ mov(ecx, eax); // Can't clobber eax because we can still jump away.
4024  __ SmiUntag(ecx);
4025  __ Cvtsi2sd(xmm1, ecx);
4026 
4027  __ bind(&left);
4028  __ JumpIfSmi(edx, &left_smi, Label::kNear);
4030  masm->isolate()->factory()->heap_number_map());
4031  __ j(not_equal, &maybe_undefined2, Label::kNear);
4033  __ jmp(&done);
4034  __ bind(&left_smi);
4035  __ mov(ecx, edx); // Can't clobber edx because we can still jump away.
4036  __ SmiUntag(ecx);
4037  __ Cvtsi2sd(xmm0, ecx);
4038 
4039  __ bind(&done);
4040  // Compare operands.
4041  __ ucomisd(xmm0, xmm1);
4042 
4043  // Don't base result on EFLAGS when a NaN is involved.
4044  __ j(parity_even, &unordered, Label::kNear);
4045 
4046  // Return a result of -1, 0, or 1, based on EFLAGS.
4047  // Performing mov, because xor would destroy the flag register.
4048  __ mov(eax, 0); // equal
4049  __ mov(ecx, Immediate(Smi::FromInt(1)));
4050  __ cmov(above, eax, ecx);
4051  __ mov(ecx, Immediate(Smi::FromInt(-1)));
4052  __ cmov(below, eax, ecx);
4053  __ ret(0);
4054  } else {
4055  __ mov(ecx, edx);
4056  __ and_(ecx, eax);
4057  __ JumpIfSmi(ecx, &generic_stub, Label::kNear);
4058 
4060  masm->isolate()->factory()->heap_number_map());
4061  __ j(not_equal, &maybe_undefined1, Label::kNear);
4063  masm->isolate()->factory()->heap_number_map());
4064  __ j(not_equal, &maybe_undefined2, Label::kNear);
4065  }
4066 
4067  __ bind(&unordered);
4068  __ bind(&generic_stub);
4071  __ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
4072 
4073  __ bind(&maybe_undefined1);
4075  __ cmp(eax, Immediate(masm->isolate()->factory()->undefined_value()));
4076  __ j(not_equal, &miss);
4077  __ JumpIfSmi(edx, &unordered);
4078  __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
4079  __ j(not_equal, &maybe_undefined2, Label::kNear);
4080  __ jmp(&unordered);
4081  }
4082 
4083  __ bind(&maybe_undefined2);
4085  __ cmp(edx, Immediate(masm->isolate()->factory()->undefined_value()));
4086  __ j(equal, &unordered);
4087  }
4088 
4089  __ bind(&miss);
4090  GenerateMiss(masm);
4091 }
4092 
4093 
4094 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
4096  ASSERT(GetCondition() == equal);
4097 
4098  // Registers containing left and right operands respectively.
4099  Register left = edx;
4100  Register right = eax;
4101  Register tmp1 = ecx;
4102  Register tmp2 = ebx;
4103 
4104  // Check that both operands are heap objects.
4105  Label miss;
4106  __ mov(tmp1, left);
4107  STATIC_ASSERT(kSmiTag == 0);
4108  __ and_(tmp1, right);
4109  __ JumpIfSmi(tmp1, &miss, Label::kNear);
4110 
4111  // Check that both operands are internalized strings.
4112  __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
4113  __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
4114  __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
4115  __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
4117  __ or_(tmp1, tmp2);
4118  __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
4119  __ j(not_zero, &miss, Label::kNear);
4120 
4121  // Internalized strings are compared by identity.
4122  Label done;
4123  __ cmp(left, right);
4124  // Make sure eax is non-zero. At this point input operands are
4125  // guaranteed to be non-zero.
4126  ASSERT(right.is(eax));
4127  __ j(not_equal, &done, Label::kNear);
4128  STATIC_ASSERT(EQUAL == 0);
4129  STATIC_ASSERT(kSmiTag == 0);
4130  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
4131  __ bind(&done);
4132  __ ret(0);
4133 
4134  __ bind(&miss);
4135  GenerateMiss(masm);
4136 }
4137 
4138 
4139 void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
4140  ASSERT(state_ == CompareIC::UNIQUE_NAME);
4141  ASSERT(GetCondition() == equal);
4142 
4143  // Registers containing left and right operands respectively.
4144  Register left = edx;
4145  Register right = eax;
4146  Register tmp1 = ecx;
4147  Register tmp2 = ebx;
4148 
4149  // Check that both operands are heap objects.
4150  Label miss;
4151  __ mov(tmp1, left);
4152  STATIC_ASSERT(kSmiTag == 0);
4153  __ and_(tmp1, right);
4154  __ JumpIfSmi(tmp1, &miss, Label::kNear);
4155 
4156  // Check that both operands are unique names. This leaves the instance
4157  // types loaded in tmp1 and tmp2.
4158  __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
4159  __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
4160  __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
4161  __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
4162 
4163  __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear);
4164  __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear);
4165 
4166  // Unique names are compared by identity.
4167  Label done;
4168  __ cmp(left, right);
4169  // Make sure eax is non-zero. At this point input operands are
4170  // guaranteed to be non-zero.
4171  ASSERT(right.is(eax));
4172  __ j(not_equal, &done, Label::kNear);
4173  STATIC_ASSERT(EQUAL == 0);
4174  STATIC_ASSERT(kSmiTag == 0);
4175  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
4176  __ bind(&done);
4177  __ ret(0);
4178 
4179  __ bind(&miss);
4180  GenerateMiss(masm);
4181 }
4182 
4183 
4184 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
4185  ASSERT(state_ == CompareIC::STRING);
4186  Label miss;
4187 
4188  bool equality = Token::IsEqualityOp(op_);
4189 
4190  // Registers containing left and right operands respectively.
4191  Register left = edx;
4192  Register right = eax;
4193  Register tmp1 = ecx;
4194  Register tmp2 = ebx;
4195  Register tmp3 = edi;
4196 
4197  // Check that both operands are heap objects.
4198  __ mov(tmp1, left);
4199  STATIC_ASSERT(kSmiTag == 0);
4200  __ and_(tmp1, right);
4201  __ JumpIfSmi(tmp1, &miss);
4202 
4203  // Check that both operands are strings. This leaves the instance
4204  // types loaded in tmp1 and tmp2.
4205  __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
4206  __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
4207  __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
4208  __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
4209  __ mov(tmp3, tmp1);
4211  __ or_(tmp3, tmp2);
4212  __ test(tmp3, Immediate(kIsNotStringMask));
4213  __ j(not_zero, &miss);
4214 
4215  // Fast check for identical strings.
4216  Label not_same;
4217  __ cmp(left, right);
4218  __ j(not_equal, &not_same, Label::kNear);
4219  STATIC_ASSERT(EQUAL == 0);
4220  STATIC_ASSERT(kSmiTag == 0);
4221  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
4222  __ ret(0);
4223 
4224  // Handle not identical strings.
4225  __ bind(&not_same);
4226 
4227  // Check that both strings are internalized. If they are, we're done
4228  // because we already know they are not identical. But in the case of
4229  // non-equality compare, we still need to determine the order. We
4230  // also know they are both strings.
4231  if (equality) {
4232  Label do_compare;
4234  __ or_(tmp1, tmp2);
4235  __ test(tmp1, Immediate(kIsNotInternalizedMask));
4236  __ j(not_zero, &do_compare, Label::kNear);
4237  // Make sure eax is non-zero. At this point input operands are
4238  // guaranteed to be non-zero.
4239  ASSERT(right.is(eax));
4240  __ ret(0);
4241  __ bind(&do_compare);
4242  }
4243 
4244  // Check that both strings are sequential ASCII.
4245  Label runtime;
4246  __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
4247 
4248  // Compare flat ASCII strings. Returns when done.
4249  if (equality) {
4251  masm, left, right, tmp1, tmp2);
4252  } else {
4254  masm, left, right, tmp1, tmp2, tmp3);
4255  }
4256 
4257  // Handle more complex cases in runtime.
4258  __ bind(&runtime);
4259  __ pop(tmp1); // Return address.
4260  __ push(left);
4261  __ push(right);
4262  __ push(tmp1);
4263  if (equality) {
4264  __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
4265  } else {
4266  __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
4267  }
4268 
4269  __ bind(&miss);
4270  GenerateMiss(masm);
4271 }
4272 
4273 
4274 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
4275  ASSERT(state_ == CompareIC::OBJECT);
4276  Label miss;
4277  __ mov(ecx, edx);
4278  __ and_(ecx, eax);
4279  __ JumpIfSmi(ecx, &miss, Label::kNear);
4280 
4281  __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx);
4282  __ j(not_equal, &miss, Label::kNear);
4283  __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx);
4284  __ j(not_equal, &miss, Label::kNear);
4285 
4286  ASSERT(GetCondition() == equal);
4287  __ sub(eax, edx);
4288  __ ret(0);
4289 
4290  __ bind(&miss);
4291  GenerateMiss(masm);
4292 }
4293 
4294 
4295 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
4296  Label miss;
4297  __ mov(ecx, edx);
4298  __ and_(ecx, eax);
4299  __ JumpIfSmi(ecx, &miss, Label::kNear);
4300 
4303  __ cmp(ecx, known_map_);
4304  __ j(not_equal, &miss, Label::kNear);
4305  __ cmp(ebx, known_map_);
4306  __ j(not_equal, &miss, Label::kNear);
4307 
4308  __ sub(eax, edx);
4309  __ ret(0);
4310 
4311  __ bind(&miss);
4312  GenerateMiss(masm);
4313 }
4314 
4315 
4316 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
4317  {
4318  // Call the runtime system in a fresh internal frame.
4319  ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
4320  masm->isolate());
4321  FrameScope scope(masm, StackFrame::INTERNAL);
4322  __ push(edx); // Preserve edx and eax.
4323  __ push(eax);
4324  __ push(edx); // And also use them as the arguments.
4325  __ push(eax);
4326  __ push(Immediate(Smi::FromInt(op_)));
4327  __ CallExternalReference(miss, 3);
4328  // Compute the entry point of the rewritten stub.
4330  __ pop(eax);
4331  __ pop(edx);
4332  }
4333 
4334  // Do a tail call to the rewritten stub.
4335  __ jmp(edi);
4336 }
4337 
4338 
4339 // Helper function used to check that the dictionary doesn't contain
4340 // the property. This function may return false negatives, so miss_label
4341 // must always call a backup property check that is complete.
4342 // This function is safe to call if the receiver has fast properties.
4343 // Name must be a unique name and receiver must be a heap object.
4344 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
4345  Label* miss,
4346  Label* done,
4347  Register properties,
4348  Handle<Name> name,
4349  Register r0) {
4350  ASSERT(name->IsUniqueName());
4351 
4352  // If names of slots in range from 1 to kProbes - 1 for the hash value are
4353  // not equal to the name and kProbes-th slot is not used (its name is the
4354  // undefined value), it guarantees the hash table doesn't contain the
4355  // property. It's true even if some slots represent deleted properties
4356  // (their names are the hole value).
4357  for (int i = 0; i < kInlinedProbes; i++) {
4358  // Compute the masked index: (hash + i + i * i) & mask.
4359  Register index = r0;
4360  // Capacity is smi 2^n.
4361  __ mov(index, FieldOperand(properties, kCapacityOffset));
4362  __ dec(index);
4363  __ and_(index,
4364  Immediate(Smi::FromInt(name->Hash() +
4365  NameDictionary::GetProbeOffset(i))));
4366 
4367  // Scale the index by multiplying by the entry size.
4369  __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
4370  Register entity_name = r0;
4371  // Having undefined at this place means the name is not contained.
4372  ASSERT_EQ(kSmiTagSize, 1);
4373  __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
4374  kElementsStartOffset - kHeapObjectTag));
4375  __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
4376  __ j(equal, done);
4377 
4378  // Stop if found the property.
4379  __ cmp(entity_name, Handle<Name>(name));
4380  __ j(equal, miss);
4381 
4382  Label good;
4383  // Check for the hole and skip.
4384  __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
4385  __ j(equal, &good, Label::kNear);
4386 
4387  // Check if the entry name is not a unique name.
4388  __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
4389  __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset),
4390  miss);
4391  __ bind(&good);
4392  }
4393 
4394  NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP);
4395  __ push(Immediate(Handle<Object>(name)));
4396  __ push(Immediate(name->Hash()));
4397  __ CallStub(&stub);
4398  __ test(r0, r0);
4399  __ j(not_zero, miss);
4400  __ jmp(done);
4401 }
4402 
4403 
4404 // Probe the name dictionary in the |elements| register. Jump to the
4405 // |done| label if a property with the given name is found leaving the
4406 // index into the dictionary in |r0|. Jump to the |miss| label
4407 // otherwise.
4408 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
4409  Label* miss,
4410  Label* done,
4411  Register elements,
4412  Register name,
4413  Register r0,
4414  Register r1) {
4415  ASSERT(!elements.is(r0));
4416  ASSERT(!elements.is(r1));
4417  ASSERT(!name.is(r0));
4418  ASSERT(!name.is(r1));
4419 
4420  __ AssertName(name);
4421 
4422  __ mov(r1, FieldOperand(elements, kCapacityOffset));
4423  __ shr(r1, kSmiTagSize); // convert smi to int
4424  __ dec(r1);
4425 
4426  // Generate an unrolled loop that performs a few probes before
4427  // giving up. Measurements done on Gmail indicate that 2 probes
4428  // cover ~93% of loads from dictionaries.
4429  for (int i = 0; i < kInlinedProbes; i++) {
4430  // Compute the masked index: (hash + i + i * i) & mask.
4431  __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
4432  __ shr(r0, Name::kHashShift);
4433  if (i > 0) {
4434  __ add(r0, Immediate(NameDictionary::GetProbeOffset(i)));
4435  }
4436  __ and_(r0, r1);
4437 
4438  // Scale the index by multiplying by the entry size.
4440  __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3
4441 
4442  // Check if the key is identical to the name.
4443  __ cmp(name, Operand(elements,
4444  r0,
4445  times_4,
4446  kElementsStartOffset - kHeapObjectTag));
4447  __ j(equal, done);
4448  }
4449 
4450  NameDictionaryLookupStub stub(elements, r1, r0, POSITIVE_LOOKUP);
4451  __ push(name);
4452  __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
4453  __ shr(r0, Name::kHashShift);
4454  __ push(r0);
4455  __ CallStub(&stub);
4456 
4457  __ test(r1, r1);
4458  __ j(zero, miss);
4459  __ jmp(done);
4460 }
4461 
4462 
4463 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
4464  // This stub overrides SometimesSetsUpAFrame() to return false. That means
4465  // we cannot call anything that could cause a GC from this stub.
4466  // Stack frame on entry:
4467  // esp[0 * kPointerSize]: return address.
4468  // esp[1 * kPointerSize]: key's hash.
4469  // esp[2 * kPointerSize]: key.
4470  // Registers:
4471  // dictionary_: NameDictionary to probe.
4472  // result_: used as scratch.
4473  // index_: will hold an index of entry if lookup is successful.
4474  // might alias with result_.
4475  // Returns:
4476  // result_ is zero if lookup failed, non zero otherwise.
4477 
4478  Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
4479 
4480  Register scratch = result_;
4481 
4482  __ mov(scratch, FieldOperand(dictionary_, kCapacityOffset));
4483  __ dec(scratch);
4484  __ SmiUntag(scratch);
4485  __ push(scratch);
4486 
4487  // If names of slots in range from 1 to kProbes - 1 for the hash value are
4488  // not equal to the name and kProbes-th slot is not used (its name is the
4489  // undefined value), it guarantees the hash table doesn't contain the
4490  // property. It's true even if some slots represent deleted properties
4491  // (their names are the null value).
4492  for (int i = kInlinedProbes; i < kTotalProbes; i++) {
4493  // Compute the masked index: (hash + i + i * i) & mask.
4494  __ mov(scratch, Operand(esp, 2 * kPointerSize));
4495  if (i > 0) {
4496  __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
4497  }
4498  __ and_(scratch, Operand(esp, 0));
4499 
4500  // Scale the index by multiplying by the entry size.
4502  __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3.
4503 
4504  // Having undefined at this place means the name is not contained.
4505  ASSERT_EQ(kSmiTagSize, 1);
4506  __ mov(scratch, Operand(dictionary_,
4507  index_,
4509  kElementsStartOffset - kHeapObjectTag));
4510  __ cmp(scratch, masm->isolate()->factory()->undefined_value());
4511  __ j(equal, &not_in_dictionary);
4512 
4513  // Stop if found the property.
4514  __ cmp(scratch, Operand(esp, 3 * kPointerSize));
4515  __ j(equal, &in_dictionary);
4516 
4517  if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
4518  // If we hit a key that is not a unique name during negative
4519  // lookup we have to bailout as this key might be equal to the
4520  // key we are looking for.
4521 
4522  // Check if the entry name is not a unique name.
4523  __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
4524  __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset),
4525  &maybe_in_dictionary);
4526  }
4527  }
4528 
4529  __ bind(&maybe_in_dictionary);
4530  // If we are doing negative lookup then probing failure should be
4531  // treated as a lookup success. For positive lookup probing failure
4532  // should be treated as lookup failure.
4533  if (mode_ == POSITIVE_LOOKUP) {
4534  __ mov(result_, Immediate(0));
4535  __ Drop(1);
4536  __ ret(2 * kPointerSize);
4537  }
4538 
4539  __ bind(&in_dictionary);
4540  __ mov(result_, Immediate(1));
4541  __ Drop(1);
4542  __ ret(2 * kPointerSize);
4543 
4544  __ bind(&not_in_dictionary);
4545  __ mov(result_, Immediate(0));
4546  __ Drop(1);
4547  __ ret(2 * kPointerSize);
4548 }
4549 
4550 
4552  Isolate* isolate) {
4554  stub.GetCode(isolate);
4557  stub2.GetCode(isolate);
4558  }
4559 }
4560 
4561 
4562 bool CodeStub::CanUseFPRegisters() {
4564 }
4565 
4566 
4567 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
4568 // the value has just been written into the object, now this stub makes sure
4569 // we keep the GC informed. The word in the object where the value has been
4570 // written is in the address register.
4571 void RecordWriteStub::Generate(MacroAssembler* masm) {
4572  Label skip_to_incremental_noncompacting;
4573  Label skip_to_incremental_compacting;
4574 
4575  // The first two instructions are generated with labels so as to get the
4576  // offset fixed up correctly by the bind(Label*) call. We patch it back and
4577  // forth between a compare instructions (a nop in this position) and the
4578  // real branch when we start and stop incremental heap marking.
4579  __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
4580  __ jmp(&skip_to_incremental_compacting, Label::kFar);
4581 
4582  if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
4583  __ RememberedSetHelper(object_,
4584  address_,
4585  value_,
4586  save_fp_regs_mode_,
4588  } else {
4589  __ ret(0);
4590  }
4591 
4592  __ bind(&skip_to_incremental_noncompacting);
4593  GenerateIncremental(masm, INCREMENTAL);
4594 
4595  __ bind(&skip_to_incremental_compacting);
4596  GenerateIncremental(masm, INCREMENTAL_COMPACTION);
4597 
4598  // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
4599  // Will be checked in IncrementalMarking::ActivateGeneratedStub.
4600  masm->set_byte_at(0, kTwoByteNopInstruction);
4601  masm->set_byte_at(2, kFiveByteNopInstruction);
4602 }
4603 
4604 
4605 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
4606  regs_.Save(masm);
4607 
4608  if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
4609  Label dont_need_remembered_set;
4610 
4611  __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
4612  __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
4613  regs_.scratch0(),
4614  &dont_need_remembered_set);
4615 
4616  __ CheckPageFlag(regs_.object(),
4617  regs_.scratch0(),
4619  not_zero,
4620  &dont_need_remembered_set);
4621 
4622  // First notify the incremental marker if necessary, then update the
4623  // remembered set.
4624  CheckNeedsToInformIncrementalMarker(
4625  masm,
4626  kUpdateRememberedSetOnNoNeedToInformIncrementalMarker,
4627  mode);
4628  InformIncrementalMarker(masm);
4629  regs_.Restore(masm);
4630  __ RememberedSetHelper(object_,
4631  address_,
4632  value_,
4633  save_fp_regs_mode_,
4635 
4636  __ bind(&dont_need_remembered_set);
4637  }
4638 
4639  CheckNeedsToInformIncrementalMarker(
4640  masm,
4641  kReturnOnNoNeedToInformIncrementalMarker,
4642  mode);
4643  InformIncrementalMarker(masm);
4644  regs_.Restore(masm);
4645  __ ret(0);
4646 }
4647 
4648 
4649 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4650  regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
4651  int argument_count = 3;
4652  __ PrepareCallCFunction(argument_count, regs_.scratch0());
4653  __ mov(Operand(esp, 0 * kPointerSize), regs_.object());
4654  __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot.
4655  __ mov(Operand(esp, 2 * kPointerSize),
4656  Immediate(ExternalReference::isolate_address(masm->isolate())));
4657 
4658  AllowExternalCallThatCantCauseGC scope(masm);
4659  __ CallCFunction(
4660  ExternalReference::incremental_marking_record_write_function(
4661  masm->isolate()),
4662  argument_count);
4663 
4664  regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
4665 }
4666 
4667 
4668 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4669  MacroAssembler* masm,
4670  OnNoNeedToInformIncrementalMarker on_no_need,
4671  Mode mode) {
4672  Label object_is_black, need_incremental, need_incremental_pop_object;
4673 
4674  __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
4675  __ and_(regs_.scratch0(), regs_.object());
4676  __ mov(regs_.scratch1(),
4677  Operand(regs_.scratch0(),
4679  __ sub(regs_.scratch1(), Immediate(1));
4680  __ mov(Operand(regs_.scratch0(),
4682  regs_.scratch1());
4683  __ j(negative, &need_incremental);
4684 
4685  // Let's look at the color of the object: If it is not black we don't have
4686  // to inform the incremental marker.
4687  __ JumpIfBlack(regs_.object(),
4688  regs_.scratch0(),
4689  regs_.scratch1(),
4690  &object_is_black,
4691  Label::kNear);
4692 
4693  regs_.Restore(masm);
4694  if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4695  __ RememberedSetHelper(object_,
4696  address_,
4697  value_,
4698  save_fp_regs_mode_,
4700  } else {
4701  __ ret(0);
4702  }
4703 
4704  __ bind(&object_is_black);
4705 
4706  // Get the value from the slot.
4707  __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
4708 
4709  if (mode == INCREMENTAL_COMPACTION) {
4710  Label ensure_not_white;
4711 
4712  __ CheckPageFlag(regs_.scratch0(), // Contains value.
4713  regs_.scratch1(), // Scratch.
4715  zero,
4716  &ensure_not_white,
4717  Label::kNear);
4718 
4719  __ CheckPageFlag(regs_.object(),
4720  regs_.scratch1(), // Scratch.
4722  not_zero,
4723  &ensure_not_white,
4724  Label::kNear);
4725 
4726  __ jmp(&need_incremental);
4727 
4728  __ bind(&ensure_not_white);
4729  }
4730 
4731  // We need an extra register for this, so we push the object register
4732  // temporarily.
4733  __ push(regs_.object());
4734  __ EnsureNotWhite(regs_.scratch0(), // The value.
4735  regs_.scratch1(), // Scratch.
4736  regs_.object(), // Scratch.
4737  &need_incremental_pop_object,
4738  Label::kNear);
4739  __ pop(regs_.object());
4740 
4741  regs_.Restore(masm);
4742  if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4743  __ RememberedSetHelper(object_,
4744  address_,
4745  value_,
4746  save_fp_regs_mode_,
4748  } else {
4749  __ ret(0);
4750  }
4751 
4752  __ bind(&need_incremental_pop_object);
4753  __ pop(regs_.object());
4754 
4755  __ bind(&need_incremental);
4756 
4757  // Fall through when we need to inform the incremental marker.
4758 }
4759 
4760 
4761 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
4762  // ----------- S t a t e -------------
4763  // -- eax : element value to store
4764  // -- ecx : element index as smi
4765  // -- esp[0] : return address
4766  // -- esp[4] : array literal index in function
4767  // -- esp[8] : array literal
4768  // clobbers ebx, edx, edi
4769  // -----------------------------------
4770 
4771  Label element_done;
4772  Label double_elements;
4773  Label smi_element;
4774  Label slow_elements;
4775  Label slow_elements_from_double;
4776  Label fast_elements;
4777 
4778  // Get array literal index, array literal and its map.
4779  __ mov(edx, Operand(esp, 1 * kPointerSize));
4780  __ mov(ebx, Operand(esp, 2 * kPointerSize));
4782 
4783  __ CheckFastElements(edi, &double_elements);
4784 
4785  // Check for FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS elements
4786  __ JumpIfSmi(eax, &smi_element);
4787  __ CheckFastSmiElements(edi, &fast_elements, Label::kNear);
4788 
4789  // Store into the array literal requires a elements transition. Call into
4790  // the runtime.
4791 
4792  __ bind(&slow_elements);
4793  __ pop(edi); // Pop return address and remember to put back later for tail
4794  // call.
4795  __ push(ebx);
4796  __ push(ecx);
4797  __ push(eax);
4800  __ push(edx);
4801  __ push(edi); // Return return address so that tail call returns to right
4802  // place.
4803  __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
4804 
4805  __ bind(&slow_elements_from_double);
4806  __ pop(edx);
4807  __ jmp(&slow_elements);
4808 
4809  // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
4810  __ bind(&fast_elements);
4814  __ mov(Operand(ecx, 0), eax);
4815  // Update the write barrier for the array store.
4816  __ RecordWrite(ebx, ecx, eax,
4819  OMIT_SMI_CHECK);
4820  __ ret(0);
4821 
4822  // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS,
4823  // and value is Smi.
4824  __ bind(&smi_element);
4828  __ ret(0);
4829 
4830  // Array literal has ElementsKind of FAST_*_DOUBLE_ELEMENTS.
4831  __ bind(&double_elements);
4832 
4833  __ push(edx);
4835  __ StoreNumberToDoubleElements(eax,
4836  edx,
4837  ecx,
4838  edi,
4839  xmm0,
4840  &slow_elements_from_double,
4841  false);
4842  __ pop(edx);
4843  __ ret(0);
4844 }
4845 
4846 
4847 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
4848  CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
4849  __ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
4850  int parameter_count_offset =
4852  __ mov(ebx, MemOperand(ebp, parameter_count_offset));
4853  masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4854  __ pop(ecx);
4855  int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE
4856  ? kPointerSize
4857  : 0;
4858  __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
4859  __ jmp(ecx); // Return to IC Miss stub, continuation still on stack.
4860 }
4861 
4862 
4863 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4864  if (masm->isolate()->function_entry_hook() != NULL) {
4865  ProfileEntryHookStub stub;
4866  masm->CallStub(&stub);
4867  }
4868 }
4869 
4870 
4871 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4872  // Save volatile registers.
4873  const int kNumSavedRegisters = 3;
4874  __ push(eax);
4875  __ push(ecx);
4876  __ push(edx);
4877 
4878  // Calculate and push the original stack pointer.
4879  __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
4880  __ push(eax);
4881 
4882  // Retrieve our return address and use it to calculate the calling
4883  // function's address.
4884  __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
4885  __ sub(eax, Immediate(Assembler::kCallInstructionLength));
4886  __ push(eax);
4887 
4888  // Call the entry hook.
4889  ASSERT(masm->isolate()->function_entry_hook() != NULL);
4890  __ call(FUNCTION_ADDR(masm->isolate()->function_entry_hook()),
4892  __ add(esp, Immediate(2 * kPointerSize));
4893 
4894  // Restore ecx.
4895  __ pop(edx);
4896  __ pop(ecx);
4897  __ pop(eax);
4898 
4899  __ ret(0);
4900 }
4901 
4902 
4903 template<class T>
4904 static void CreateArrayDispatch(MacroAssembler* masm,
4906  if (mode == DISABLE_ALLOCATION_SITES) {
4908  mode);
4909  __ TailCallStub(&stub);
4910  } else if (mode == DONT_OVERRIDE) {
4911  int last_index = GetSequenceIndexFromFastElementsKind(
4913  for (int i = 0; i <= last_index; ++i) {
4914  Label next;
4916  __ cmp(edx, kind);
4917  __ j(not_equal, &next);
4918  T stub(kind);
4919  __ TailCallStub(&stub);
4920  __ bind(&next);
4921  }
4922 
4923  // If we reached this point there is a problem.
4924  __ Abort(kUnexpectedElementsKindInArrayConstructor);
4925  } else {
4926  UNREACHABLE();
4927  }
4928 }
4929 
4930 
4931 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4933  // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4934  // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
4935  // eax - number of arguments
4936  // edi - constructor?
4937  // esp[0] - return address
4938  // esp[4] - last argument
4939  Label normal_sequence;
4940  if (mode == DONT_OVERRIDE) {
4941  ASSERT(FAST_SMI_ELEMENTS == 0);
4943  ASSERT(FAST_ELEMENTS == 2);
4947 
4948  // is the low bit set? If so, we are holey and that is good.
4949  __ test_b(edx, 1);
4950  __ j(not_zero, &normal_sequence);
4951  }
4952 
4953  // look at the first argument
4954  __ mov(ecx, Operand(esp, kPointerSize));
4955  __ test(ecx, ecx);
4956  __ j(zero, &normal_sequence);
4957 
4958  if (mode == DISABLE_ALLOCATION_SITES) {
4960  ElementsKind holey_initial = GetHoleyElementsKind(initial);
4961 
4962  ArraySingleArgumentConstructorStub stub_holey(holey_initial,
4964  __ TailCallStub(&stub_holey);
4965 
4966  __ bind(&normal_sequence);
4967  ArraySingleArgumentConstructorStub stub(initial,
4969  __ TailCallStub(&stub);
4970  } else if (mode == DONT_OVERRIDE) {
4971  // We are going to create a holey array, but our kind is non-holey.
4972  // Fix kind and retry.
4973  __ inc(edx);
4974 
4975  if (FLAG_debug_code) {
4976  Handle<Map> allocation_site_map =
4977  masm->isolate()->factory()->allocation_site_map();
4978  __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
4979  __ Assert(equal, kExpectedAllocationSite);
4980  }
4981 
4982  // Save the resulting elements kind in type info. We can't just store r3
4983  // in the AllocationSite::transition_info field because elements kind is
4984  // restricted to a portion of the field...upper bits need to be left alone.
4988 
4989  __ bind(&normal_sequence);
4990  int last_index = GetSequenceIndexFromFastElementsKind(
4992  for (int i = 0; i <= last_index; ++i) {
4993  Label next;
4995  __ cmp(edx, kind);
4996  __ j(not_equal, &next);
4997  ArraySingleArgumentConstructorStub stub(kind);
4998  __ TailCallStub(&stub);
4999  __ bind(&next);
5000  }
5001 
5002  // If we reached this point there is a problem.
5003  __ Abort(kUnexpectedElementsKindInArrayConstructor);
5004  } else {
5005  UNREACHABLE();
5006  }
5007 }
5008 
5009 
5010 template<class T>
5011 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
5012  int to_index = GetSequenceIndexFromFastElementsKind(
5014  for (int i = 0; i <= to_index; ++i) {
5016  T stub(kind);
5017  stub.GetCode(isolate);
5019  T stub1(kind, DISABLE_ALLOCATION_SITES);
5020  stub1.GetCode(isolate);
5021  }
5022  }
5023 }
5024 
5025 
5027  ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
5028  isolate);
5029  ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
5030  isolate);
5031  ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
5032  isolate);
5033 }
5034 
5035 
5037  Isolate* isolate) {
5039  for (int i = 0; i < 2; i++) {
5040  // For internal arrays we only need a few things
5041  InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
5042  stubh1.GetCode(isolate);
5043  InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
5044  stubh2.GetCode(isolate);
5045  InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
5046  stubh3.GetCode(isolate);
5047  }
5048 }
5049 
5050 
5051 void ArrayConstructorStub::GenerateDispatchToArrayStub(
5052  MacroAssembler* masm,
5054  if (argument_count_ == ANY) {
5055  Label not_zero_case, not_one_case;
5056  __ test(eax, eax);
5057  __ j(not_zero, &not_zero_case);
5058  CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
5059 
5060  __ bind(&not_zero_case);
5061  __ cmp(eax, 1);
5062  __ j(greater, &not_one_case);
5063  CreateArrayDispatchOneArgument(masm, mode);
5064 
5065  __ bind(&not_one_case);
5066  CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
5067  } else if (argument_count_ == NONE) {
5068  CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
5069  } else if (argument_count_ == ONE) {
5070  CreateArrayDispatchOneArgument(masm, mode);
5071  } else if (argument_count_ == MORE_THAN_ONE) {
5072  CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
5073  } else {
5074  UNREACHABLE();
5075  }
5076 }
5077 
5078 
5079 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
5080  // ----------- S t a t e -------------
5081  // -- eax : argc (only if argument_count_ == ANY)
5082  // -- ebx : AllocationSite or undefined
5083  // -- edi : constructor
5084  // -- esp[0] : return address
5085  // -- esp[4] : last argument
5086  // -----------------------------------
5087  if (FLAG_debug_code) {
5088  // The array construct code is only set for the global and natives
5089  // builtin Array functions which always have maps.
5090 
5091  // Initial map for the builtin Array function should be a map.
5093  // Will both indicate a NULL and a Smi.
5094  __ test(ecx, Immediate(kSmiTagMask));
5095  __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
5096  __ CmpObjectType(ecx, MAP_TYPE, ecx);
5097  __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
5098 
5099  // We should either have undefined in ebx or a valid AllocationSite
5100  __ AssertUndefinedOrAllocationSite(ebx);
5101  }
5102 
5103  Label no_info;
5104  // If the feedback vector is the undefined value call an array constructor
5105  // that doesn't use AllocationSites.
5106  __ cmp(ebx, masm->isolate()->factory()->undefined_value());
5107  __ j(equal, &no_info);
5108 
5109  // Only look at the lower 16 bits of the transition info.
5111  __ SmiUntag(edx);
5114  GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
5115 
5116  __ bind(&no_info);
5117  GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
5118 }
5119 
5120 
5121 void InternalArrayConstructorStub::GenerateCase(
5122  MacroAssembler* masm, ElementsKind kind) {
5123  Label not_zero_case, not_one_case;
5124  Label normal_sequence;
5125 
5126  __ test(eax, eax);
5127  __ j(not_zero, &not_zero_case);
5128  InternalArrayNoArgumentConstructorStub stub0(kind);
5129  __ TailCallStub(&stub0);
5130 
5131  __ bind(&not_zero_case);
5132  __ cmp(eax, 1);
5133  __ j(greater, &not_one_case);
5134 
5135  if (IsFastPackedElementsKind(kind)) {
5136  // We might need to create a holey array
5137  // look at the first argument
5138  __ mov(ecx, Operand(esp, kPointerSize));
5139  __ test(ecx, ecx);
5140  __ j(zero, &normal_sequence);
5141 
5142  InternalArraySingleArgumentConstructorStub
5143  stub1_holey(GetHoleyElementsKind(kind));
5144  __ TailCallStub(&stub1_holey);
5145  }
5146 
5147  __ bind(&normal_sequence);
5148  InternalArraySingleArgumentConstructorStub stub1(kind);
5149  __ TailCallStub(&stub1);
5150 
5151  __ bind(&not_one_case);
5152  InternalArrayNArgumentsConstructorStub stubN(kind);
5153  __ TailCallStub(&stubN);
5154 }
5155 
5156 
5157 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
5158  // ----------- S t a t e -------------
5159  // -- eax : argc
5160  // -- edi : constructor
5161  // -- esp[0] : return address
5162  // -- esp[4] : last argument
5163  // -----------------------------------
5164 
5165  if (FLAG_debug_code) {
5166  // The array construct code is only set for the global and natives
5167  // builtin Array functions which always have maps.
5168 
5169  // Initial map for the builtin Array function should be a map.
5171  // Will both indicate a NULL and a Smi.
5172  __ test(ecx, Immediate(kSmiTagMask));
5173  __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
5174  __ CmpObjectType(ecx, MAP_TYPE, ecx);
5175  __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
5176  }
5177 
5178  // Figure out the right elements kind
5180 
5181  // Load the map's "bit field 2" into |result|. We only need the first byte,
5182  // but the following masking takes care of that anyway.
5184  // Retrieve elements_kind from bit field 2.
5185  __ and_(ecx, Map::kElementsKindMask);
5187 
5188  if (FLAG_debug_code) {
5189  Label done;
5190  __ cmp(ecx, Immediate(FAST_ELEMENTS));
5191  __ j(equal, &done);
5192  __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
5193  __ Assert(equal,
5194  kInvalidElementsKindForInternalArrayOrInternalPackedArray);
5195  __ bind(&done);
5196  }
5197 
5198  Label fast_elements_case;
5199  __ cmp(ecx, Immediate(FAST_ELEMENTS));
5200  __ j(equal, &fast_elements_case);
5201  GenerateCase(masm, FAST_HOLEY_ELEMENTS);
5202 
5203  __ bind(&fast_elements_case);
5204  GenerateCase(masm, FAST_ELEMENTS);
5205 }
5206 
5207 
5208 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
5209  // ----------- S t a t e -------------
5210  // -- eax : callee
5211  // -- ebx : call_data
5212  // -- ecx : holder
5213  // -- edx : api_function_address
5214  // -- esi : context
5215  // --
5216  // -- esp[0] : return address
5217  // -- esp[4] : last argument
5218  // -- ...
5219  // -- esp[argc * 4] : first argument
5220  // -- esp[(argc + 1) * 4] : receiver
5221  // -----------------------------------
5222 
5223  Register callee = eax;
5224  Register call_data = ebx;
5225  Register holder = ecx;
5226  Register api_function_address = edx;
5227  Register return_address = edi;
5228  Register context = esi;
5229 
5230  int argc = ArgumentBits::decode(bit_field_);
5231  bool is_store = IsStoreBits::decode(bit_field_);
5232  bool call_data_undefined = CallDataUndefinedBits::decode(bit_field_);
5233 
5234  typedef FunctionCallbackArguments FCA;
5235 
5236  STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5237  STATIC_ASSERT(FCA::kCalleeIndex == 5);
5238  STATIC_ASSERT(FCA::kDataIndex == 4);
5239  STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5240  STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5241  STATIC_ASSERT(FCA::kIsolateIndex == 1);
5242  STATIC_ASSERT(FCA::kHolderIndex == 0);
5243  STATIC_ASSERT(FCA::kArgsLength == 7);
5244 
5245  Isolate* isolate = masm->isolate();
5246 
5247  __ pop(return_address);
5248 
5249  // context save
5250  __ push(context);
5251  // load context from callee
5252  __ mov(context, FieldOperand(callee, JSFunction::kContextOffset));
5253 
5254  // callee
5255  __ push(callee);
5256 
5257  // call data
5258  __ push(call_data);
5259 
5260  Register scratch = call_data;
5261  if (!call_data_undefined) {
5262  // return value
5263  __ push(Immediate(isolate->factory()->undefined_value()));
5264  // return value default
5265  __ push(Immediate(isolate->factory()->undefined_value()));
5266  } else {
5267  // return value
5268  __ push(scratch);
5269  // return value default
5270  __ push(scratch);
5271  }
5272  // isolate
5273  __ push(Immediate(reinterpret_cast<int>(isolate)));
5274  // holder
5275  __ push(holder);
5276 
5277  __ mov(scratch, esp);
5278 
5279  // return address
5280  __ push(return_address);
5281 
5282  // API function gets reference to the v8::Arguments. If CPU profiler
5283  // is enabled wrapper function will be called and we need to pass
5284  // address of the callback as additional parameter, always allocate
5285  // space for it.
5286  const int kApiArgc = 1 + 1;
5287 
5288  // Allocate the v8::Arguments structure in the arguments' space since
5289  // it's not controlled by GC.
5290  const int kApiStackSpace = 4;
5291 
5292  __ PrepareCallApiFunction(kApiArgc + kApiStackSpace);
5293 
5294  // FunctionCallbackInfo::implicit_args_.
5295  __ mov(ApiParameterOperand(2), scratch);
5296  __ add(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
5297  // FunctionCallbackInfo::values_.
5298  __ mov(ApiParameterOperand(3), scratch);
5299  // FunctionCallbackInfo::length_.
5300  __ Move(ApiParameterOperand(4), Immediate(argc));
5301  // FunctionCallbackInfo::is_construct_call_.
5302  __ Move(ApiParameterOperand(5), Immediate(0));
5303 
5304  // v8::InvocationCallback's argument.
5305  __ lea(scratch, ApiParameterOperand(2));
5306  __ mov(ApiParameterOperand(0), scratch);
5307 
5308  Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
5309 
5310  Operand context_restore_operand(ebp,
5311  (2 + FCA::kContextSaveIndex) * kPointerSize);
5312  // Stores return the first js argument
5313  int return_value_offset = 0;
5314  if (is_store) {
5315  return_value_offset = 2 + FCA::kArgsLength;
5316  } else {
5317  return_value_offset = 2 + FCA::kReturnValueOffset;
5318  }
5319  Operand return_value_operand(ebp, return_value_offset * kPointerSize);
5320  __ CallApiFunctionAndReturn(api_function_address,
5321  thunk_address,
5323  argc + FCA::kArgsLength + 1,
5324  return_value_operand,
5325  &context_restore_operand);
5326 }
5327 
5328 
5329 void CallApiGetterStub::Generate(MacroAssembler* masm) {
5330  // ----------- S t a t e -------------
5331  // -- esp[0] : return address
5332  // -- esp[4] : name
5333  // -- esp[8 - kArgsLength*4] : PropertyCallbackArguments object
5334  // -- ...
5335  // -- edx : api_function_address
5336  // -----------------------------------
5337 
5338  // array for v8::Arguments::values_, handler for name and pointer
5339  // to the values (it considered as smi in GC).
5340  const int kStackSpace = PropertyCallbackArguments::kArgsLength + 2;
5341  // Allocate space for opional callback address parameter in case
5342  // CPU profiler is active.
5343  const int kApiArgc = 2 + 1;
5344 
5345  Register api_function_address = edx;
5346  Register scratch = ebx;
5347 
5348  // load address of name
5349  __ lea(scratch, Operand(esp, 1 * kPointerSize));
5350 
5351  __ PrepareCallApiFunction(kApiArgc);
5352  __ mov(ApiParameterOperand(0), scratch); // name.
5353  __ add(scratch, Immediate(kPointerSize));
5354  __ mov(ApiParameterOperand(1), scratch); // arguments pointer.
5355 
5357 
5358  __ CallApiFunctionAndReturn(api_function_address,
5359  thunk_address,
5361  kStackSpace,
5362  Operand(ebp, 7 * kPointerSize),
5363  NULL);
5364 }
5365 
5366 
5367 #undef __
5368 
5369 } } // namespace v8::internal
5370 
5371 #endif // V8_TARGET_ARCH_IA32
byte * Address
Definition: globals.h:186
static const int kResourceDataOffset
Definition: objects.h:9244
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
static const int kElementsKindMask
Definition: objects.h:6486
void GenerateFast(MacroAssembler *masm)
static const int kHashFieldOffset
Definition: objects.h:8629
static const int kBitFieldOffset
Definition: objects.h:6461
void GenerateFast(MacroAssembler *masm)
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
const intptr_t kSmiTagMask
Definition: v8.h:5480
static const int kEvacuationCandidateMask
Definition: spaces.h:430
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
bool IsHoleyElementsKind(ElementsKind kind)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:7519
#define COMPARE(asm_, compare_string)
static int SlotOffset(int index)
Definition: contexts.h:498
static void GenerateAheadOfTime(Isolate *isolate)
Definition: code-stubs.cc:217
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
static const int kCallerStackParameterCountFrameOffset
Definition: frames.h:776
void Generate(MacroAssembler *masm)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const XMMRegister xmm4
const uint32_t kTwoByteStringTag
Definition: objects.h:610
const int kFailureTypeTagSize
Definition: objects.h:1712
static const uint32_t kExponentMask
Definition: objects.h:1981
static Failure * InternalError()
Definition: objects-inl.h:1239
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
bool IsFastObjectElementsKind(ElementsKind kind)
void Generate(MacroAssembler *masm)
static const byte kTwoByteNopInstruction
const Register esp
static const int kDataOffset
Definition: objects.h:7921
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kJSRegexpStaticOffsetsVectorSize
Definition: isolate.h:997
static Representation Integer32()
static void GenerateAheadOfTime(Isolate *isolate)
Definition: code-stubs.cc:541
static const int kNativeByteOffset
Definition: objects.h:7267
static void GenerateHashGetHash(MacroAssembler *masm, Register hash)
static const uint64_t kHiddenBit
Definition: double.h:47
static const int kExponentBias
Definition: objects.h:1985
int int32_t
Definition: unicode.cc:47
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor) V8_OVERRIDE
static const intptr_t kPageAlignmentMask
Definition: spaces.h:823
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static bool IsSupported(CpuFeature f)
Definition: assembler-arm.h:68
static const int kStrictModeBitWithinByte
Definition: objects.h:7258
static Failure * Exception()
Definition: objects-inl.h:1244
static bool enabled()
Definition: serialize.h:485
const uint32_t kIsNotInternalizedMask
Definition: objects.h:603
AllocationSiteOverrideMode
Definition: code-stubs.h:759
static const Function * FunctionForId(FunctionId id)
Definition: runtime.cc:15154
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
#define ASSERT(condition)
Definition: checks.h:329
void Generate(MacroAssembler *masm)
static const int kContextOffset
Definition: frames.h:185
static const int kNumRegisters
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
Definition: objects-inl.h:6680
static bool IsSafeForSnapshot(CpuFeature f)
Definition: assembler-arm.h:78
static void GenerateCompareFlatAsciiStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
const uint32_t kStringRepresentationMask
Definition: objects.h:615
static const int kCallerFPOffset
Definition: frames.h:188
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
Definition: objects.h:7523
const uint32_t kShortExternalStringMask
Definition: objects.h:643
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
static const int kLastSubjectOffset
Definition: jsregexp.h:190
ProfileEntryHookStub()
Definition: code-stubs.h:2504
static const int kZeroHash
Definition: objects.h:8520
const Register edi
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kLastCaptureCountOffset
Definition: jsregexp.h:188
static const int kFirstOffset
Definition: objects.h:9165
static const int kMinLength
Definition: objects.h:9170
const uint32_t kNotStringTag
Definition: objects.h:599
static const int kParentOffset
Definition: objects.h:9209
static const int kLiteralsOffset
Definition: objects.h:7524
const Register ebp
#define UNREACHABLE()
Definition: checks.h:52
const int kFastElementsKindPackedToHoley
Definition: elements-kind.h:94
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static void GenerateCopyCharactersREP(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, bool ascii)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kLengthOffset
Definition: objects.h:8905
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kExponentShift
Definition: objects.h:1986
const intptr_t kFailureTagMask
Definition: v8globals.h:64
const Register eax
static const int kValueOffset
Definition: objects.h:1971
const int kFailureTagSize
Definition: v8globals.h:63
bool IsFastPackedElementsKind(ElementsKind kind)
static void GenerateFlatAsciiStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
const int kDoubleSize
Definition: globals.h:266
static const int kIrregexpCaptureCountOffset
Definition: objects.h:7967
static const size_t kWriteBarrierCounterOffset
Definition: spaces.h:577
const XMMRegister xmm1
const uint32_t kIsIndirectStringMask
Definition: objects.h:622
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
const int kPointerSize
Definition: globals.h:268
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kTransitionInfoOffset
Definition: objects.h:8411
static void PerformGC(Object *result, Isolate *isolate)
Definition: runtime.cc:15159
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
const Register ecx
static void GenerateAheadOfTime(Isolate *isolate)
const int kHeapObjectTag
Definition: v8.h:5473
void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define __
static const int kCallerSPOffset
Definition: frames.h:190
static const byte kFiveByteNopInstruction
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
virtual void Generate(MacroAssembler *masm)
bool IsFastSmiElementsKind(ElementsKind kind)
static const int kMinLength
Definition: objects.h:9214
const uint32_t kShortExternalStringTag
Definition: objects.h:644
static void GenerateHashAddCharacter(MacroAssembler *masm, Register hash, Register character)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const int kHeaderSize
Definition: objects.h:9042
static void GenerateStubsAheadOfTime(Isolate *isolate)
const Register r0
static const int kElementsOffset
Definition: objects.h:2756
static const int kNativeBitWithinByte
Definition: objects.h:7261
bool IsPowerOf2(T x)
Definition: utils.h:51
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const uint32_t kStringTag
Definition: objects.h:598
void Generate(MacroAssembler *masm)
static bool IsEqualityOp(Value op)
Definition: token.h:228
static Representation External()
static const int kOffsetOffset
Definition: objects.h:9210
const uint32_t kInternalizedTag
Definition: objects.h:605
static const int kLengthOffset
Definition: objects.h:10076
#define T(name, string, precedence)
Definition: token.cc:48
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
virtual void Generate(MacroAssembler *masm)
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kLastMatchOverhead
Definition: jsregexp.h:185
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const XMMRegister xmm3
static const int kHeaderSize
Definition: objects.h:3016
void Generate(MacroAssembler *masm)
static Builtins::Name MissBuiltin(Code::Kind kind)
Definition: stub-cache.h:466
static const int kMapOffset
Definition: objects.h:1890
static const int kMaxShortLength
Definition: objects.h:9247
bool is(Register reg) const
static const int kSkipEvacuationSlotsRecordingMask
Definition: spaces.h:433
const uint32_t kIsNotStringMask
Definition: objects.h:597
static const uint64_t kSignificandMask
Definition: double.h:45
static const int kCallInstructionLength
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const Register r1
const uint32_t kSlicedNotConsMask
Definition: objects.h:633
static const int kLengthOffset
Definition: objects.h:3015
const Register ebx
void Generate(MacroAssembler *masm)
static const int kSecondOffset
Definition: objects.h:9166
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:6675
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor) V8_OVERRIDE
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:103
static const int kArgumentsLengthIndex
Definition: heap.h:1104
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
ElementsKind GetInitialFastElementsKind()
static const int kFirstCaptureOffset
Definition: jsregexp.h:194
static const int kLastInputOffset
Definition: jsregexp.h:192
static void GenerateMapChangeElementsTransition(MacroAssembler *masm, AllocationSiteMode mode, Label *allocation_memento_found)
static const int kStrictModeByteOffset
Definition: objects.h:7265
const int kSmiShiftSize
Definition: v8.h:5539
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
Operand ApiParameterOperand(int index)
const int kSmiTagSize
Definition: v8.h:5479
static const int kHeaderSize
Definition: objects.h:5604
void InvokeAccessorGetterCallback(v8::Local< v8::String > property, const v8::PropertyCallbackInfo< v8::Value > &info, v8::AccessorGetterCallback getter)
Definition: api.cc:7628
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
Definition: objects-inl.h:1477
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void GenerateBody(MacroAssembler *masm, bool is_construct)
static const int kDataAsciiCodeOffset
Definition: objects.h:7963
ICCompareStub(Token::Value op, CompareIC::State left, CompareIC::State right, CompareIC::State handler)
Definition: code-stubs.h:1329
static void GenerateStubsAheadOfTime(Isolate *isolate)
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
const Register esi
static const int kElementsKindShift
Definition: objects.h:6482
static XMMRegister from_code(int code)
CodeStubInterfaceDescriptor * GetInterfaceDescriptor(Isolate *isolate)
Definition: code-stubs.h:395
const uint32_t kOneByteStringTag
Definition: objects.h:611
void Generate(MacroAssembler *masm)
static const int kArgumentsCalleeIndex
Definition: heap.h:1106
const int kSmiTag
Definition: v8.h:5478
static const int kIsUndetectable
Definition: objects.h:6472
static const int kHeaderSize
Definition: objects.h:2757
Code::Kind kind()
Definition: code-stubs.h:831
static void InitializeForIsolate(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const int kFailureTag
Definition: v8globals.h:62
static const int kDataTagOffset
Definition: objects.h:7961
static const uint32_t kHashBitMask
Definition: objects.h:8646
static const int kPrototypeOffset
Definition: objects.h:6427
void Generate(MacroAssembler *masm)
static const int kHashShift
Definition: objects.h:8642
#define RUNTIME_ENTRY(name, nargs, ressize)
static void GenerateAheadOfTime(Isolate *isolate)
Definition: code-stubs.cc:697
const Register no_reg
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static Representation Tagged()
static const int kNativeContextOffset
Definition: objects.h:7611
static void GenerateFixedRegStubsAheadOfTime(Isolate *isolate)
static const int kConstructStubOffset
Definition: objects.h:7106
const XMMRegister xmm2
const Register edx
static const int kEmptyHashField
Definition: objects.h:8678
static const int kSharedFunctionInfoOffset
Definition: objects.h:7521
#define FUNCTION_ADDR(f)
Definition: globals.h:345
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kSloppyArgumentsObjectSize
Definition: heap.h:1098
void InvokeFunctionCallback(const v8::FunctionCallbackInfo< v8::Value > &info, v8::FunctionCallback callback)
Definition: api.cc:7642
static const int kBitField2Offset
Definition: objects.h:6462
static const int kMantissaBits
Definition: objects.h:1983
void Generate(MacroAssembler *masm)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
CEntryStub(int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
Definition: code-stubs.h:1492
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
static const int32_t kMaxOneByteCharCode
Definition: objects.h:8914
static const int kDataUC16CodeOffset
Definition: objects.h:7965
StoreBufferOverflowStub(SaveFPRegsMode save_fp)
static const int kStrictArgumentsObjectSize
Definition: heap.h:1101
static void GenerateHashInit(MacroAssembler *masm, Register hash, Register character)
bool IsFastDoubleElementsKind(ElementsKind kind)
static bool IsOrderedRelationalCompareOp(Value op)
Definition: token.h:224
const uint32_t kStringEncodingMask
Definition: objects.h:609
static const int kInstanceTypeOffset
Definition: objects.h:6459
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void Generate(MacroAssembler *masm)
const XMMRegister xmm0
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kPhysicalSignificandSize
Definition: double.h:48