v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
code-stubs-x64.cc
Go to the documentation of this file.
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_X64
31 
32 #include "bootstrapper.h"
33 #include "code-stubs.h"
34 #include "regexp-macro-assembler.h"
35 #include "stub-cache.h"
36 #include "runtime.h"
37 
38 namespace v8 {
39 namespace internal {
40 
41 
43  Isolate* isolate,
44  CodeStubInterfaceDescriptor* descriptor) {
45  static Register registers[] = { rbx };
46  descriptor->register_param_count_ = 1;
47  descriptor->register_params_ = registers;
48  descriptor->deoptimization_handler_ =
49  Runtime::FunctionForId(Runtime::kHiddenNewClosureFromStubFailure)->entry;
50 }
51 
52 
53 void FastNewContextStub::InitializeInterfaceDescriptor(
54  Isolate* isolate,
55  CodeStubInterfaceDescriptor* descriptor) {
56  static Register registers[] = { rdi };
57  descriptor->register_param_count_ = 1;
58  descriptor->register_params_ = registers;
59  descriptor->deoptimization_handler_ = NULL;
60 }
61 
62 
64  Isolate* isolate,
65  CodeStubInterfaceDescriptor* descriptor) {
66  static Register registers[] = { rax };
67  descriptor->register_param_count_ = 1;
68  descriptor->register_params_ = registers;
69  descriptor->deoptimization_handler_ = NULL;
70 }
71 
72 
73 void NumberToStringStub::InitializeInterfaceDescriptor(
74  Isolate* isolate,
75  CodeStubInterfaceDescriptor* descriptor) {
76  static Register registers[] = { rax };
77  descriptor->register_param_count_ = 1;
78  descriptor->register_params_ = registers;
79  descriptor->deoptimization_handler_ =
80  Runtime::FunctionForId(Runtime::kHiddenNumberToString)->entry;
81 }
82 
83 
85  Isolate* isolate,
86  CodeStubInterfaceDescriptor* descriptor) {
87  static Register registers[] = { rax, rbx, rcx };
88  descriptor->register_param_count_ = 3;
89  descriptor->register_params_ = registers;
90  descriptor->deoptimization_handler_ =
92  Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
93 }
94 
95 
97  Isolate* isolate,
98  CodeStubInterfaceDescriptor* descriptor) {
99  static Register registers[] = { rax, rbx, rcx, rdx };
100  descriptor->register_param_count_ = 4;
101  descriptor->register_params_ = registers;
102  descriptor->deoptimization_handler_ =
103  Runtime::FunctionForId(Runtime::kHiddenCreateObjectLiteral)->entry;
104 }
105 
106 
108  Isolate* isolate,
109  CodeStubInterfaceDescriptor* descriptor) {
110  static Register registers[] = { rbx, rdx };
111  descriptor->register_param_count_ = 2;
112  descriptor->register_params_ = registers;
113  descriptor->deoptimization_handler_ = NULL;
114 }
115 
116 
118  Isolate* isolate,
119  CodeStubInterfaceDescriptor* descriptor) {
120  static Register registers[] = { rdx, rax };
121  descriptor->register_param_count_ = 2;
122  descriptor->register_params_ = registers;
123  descriptor->deoptimization_handler_ =
124  FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
125 }
126 
127 
129  Isolate* isolate,
130  CodeStubInterfaceDescriptor* descriptor) {
131  static Register registers[] = { rdx, rax };
132  descriptor->register_param_count_ = 2;
133  descriptor->register_params_ = registers;
134  descriptor->deoptimization_handler_ =
135  FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
136 }
137 
138 
139 void RegExpConstructResultStub::InitializeInterfaceDescriptor(
140  Isolate* isolate,
141  CodeStubInterfaceDescriptor* descriptor) {
142  static Register registers[] = { rcx, rbx, rax };
143  descriptor->register_param_count_ = 3;
144  descriptor->register_params_ = registers;
145  descriptor->deoptimization_handler_ =
146  Runtime::FunctionForId(Runtime::kHiddenRegExpConstructResult)->entry;
147 }
148 
149 
151  Isolate* isolate,
152  CodeStubInterfaceDescriptor* descriptor) {
153  static Register registers[] = { rax };
154  descriptor->register_param_count_ = 1;
155  descriptor->register_params_ = registers;
156  descriptor->deoptimization_handler_ = NULL;
157 }
158 
159 
161  Isolate* isolate,
162  CodeStubInterfaceDescriptor* descriptor) {
163  static Register registers[] = { rdx };
164  descriptor->register_param_count_ = 1;
165  descriptor->register_params_ = registers;
166  descriptor->deoptimization_handler_ = NULL;
167 }
168 
169 
171  Isolate* isolate,
172  CodeStubInterfaceDescriptor* descriptor) {
173  static Register registers[] = { rax, rcx };
174  descriptor->register_param_count_ = 2;
175  descriptor->register_params_ = registers;
176  descriptor->deoptimization_handler_ = NULL;
177 }
178 
179 
181  Isolate* isolate,
182  CodeStubInterfaceDescriptor* descriptor) {
183  static Register registers[] = { rdx, rax };
184  descriptor->register_param_count_ = 2;
185  descriptor->register_params_ = registers;
186  descriptor->deoptimization_handler_ = NULL;
187 }
188 
189 
191  Isolate* isolate,
192  CodeStubInterfaceDescriptor* descriptor) {
193  static Register registers[] = { rdx, rcx, rax };
194  descriptor->register_param_count_ = 3;
195  descriptor->register_params_ = registers;
196  descriptor->deoptimization_handler_ =
197  FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure);
198 }
199 
200 
202  Isolate* isolate,
203  CodeStubInterfaceDescriptor* descriptor) {
204  static Register registers[] = { rax, rbx };
205  descriptor->register_param_count_ = 2;
206  descriptor->register_params_ = registers;
207  descriptor->deoptimization_handler_ =
208  Runtime::FunctionForId(Runtime::kTransitionElementsKind)->entry;
209 }
210 
211 
212 static void InitializeArrayConstructorDescriptor(
213  Isolate* isolate,
214  CodeStubInterfaceDescriptor* descriptor,
215  int constant_stack_parameter_count) {
216  // register state
217  // rax -- number of arguments
218  // rdi -- function
219  // rbx -- allocation site with elements kind
220  static Register registers_variable_args[] = { rdi, rbx, rax };
221  static Register registers_no_args[] = { rdi, rbx };
222 
223  if (constant_stack_parameter_count == 0) {
224  descriptor->register_param_count_ = 2;
225  descriptor->register_params_ = registers_no_args;
226  } else {
227  // stack param count needs (constructor pointer, and single argument)
228  descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
229  descriptor->stack_parameter_count_ = rax;
230  descriptor->register_param_count_ = 3;
231  descriptor->register_params_ = registers_variable_args;
232  }
233 
234  descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
235  descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
236  descriptor->deoptimization_handler_ =
237  Runtime::FunctionForId(Runtime::kHiddenArrayConstructor)->entry;
238 }
239 
240 
241 static void InitializeInternalArrayConstructorDescriptor(
242  Isolate* isolate,
243  CodeStubInterfaceDescriptor* descriptor,
244  int constant_stack_parameter_count) {
245  // register state
246  // rax -- number of arguments
247  // rdi -- constructor function
248  static Register registers_variable_args[] = { rdi, rax };
249  static Register registers_no_args[] = { rdi };
250 
251  if (constant_stack_parameter_count == 0) {
252  descriptor->register_param_count_ = 1;
253  descriptor->register_params_ = registers_no_args;
254  } else {
255  // stack param count needs (constructor pointer, and single argument)
256  descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
257  descriptor->stack_parameter_count_ = rax;
258  descriptor->register_param_count_ = 2;
259  descriptor->register_params_ = registers_variable_args;
260  }
261 
262  descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
263  descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
264  descriptor->deoptimization_handler_ =
265  Runtime::FunctionForId(Runtime::kHiddenInternalArrayConstructor)->entry;
266 }
267 
268 
270  Isolate* isolate,
271  CodeStubInterfaceDescriptor* descriptor) {
272  InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
273 }
274 
275 
277  Isolate* isolate,
278  CodeStubInterfaceDescriptor* descriptor) {
279  InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
280 }
281 
282 
284  Isolate* isolate,
285  CodeStubInterfaceDescriptor* descriptor) {
286  InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
287 }
288 
289 
291  Isolate* isolate,
292  CodeStubInterfaceDescriptor* descriptor) {
293  InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 0);
294 }
295 
296 
298  Isolate* isolate,
299  CodeStubInterfaceDescriptor* descriptor) {
300  InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 1);
301 }
302 
303 
305  Isolate* isolate,
306  CodeStubInterfaceDescriptor* descriptor) {
307  InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1);
308 }
309 
310 
312  Isolate* isolate,
313  CodeStubInterfaceDescriptor* descriptor) {
314  static Register registers[] = { rax };
315  descriptor->register_param_count_ = 1;
316  descriptor->register_params_ = registers;
317  descriptor->deoptimization_handler_ =
318  FUNCTION_ADDR(CompareNilIC_Miss);
319  descriptor->SetMissHandler(
320  ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate));
321 }
322 
323 
325  Isolate* isolate,
326  CodeStubInterfaceDescriptor* descriptor) {
327  static Register registers[] = { rax };
328  descriptor->register_param_count_ = 1;
329  descriptor->register_params_ = registers;
330  descriptor->deoptimization_handler_ =
331  FUNCTION_ADDR(ToBooleanIC_Miss);
332  descriptor->SetMissHandler(
333  ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
334 }
335 
336 
338  Isolate* isolate,
339  CodeStubInterfaceDescriptor* descriptor) {
340  static Register registers[] = { rdx, rcx, rax };
341  descriptor->register_param_count_ = 3;
342  descriptor->register_params_ = registers;
343  descriptor->deoptimization_handler_ =
344  FUNCTION_ADDR(StoreIC_MissFromStubFailure);
345 }
346 
347 
349  Isolate* isolate,
350  CodeStubInterfaceDescriptor* descriptor) {
351  static Register registers[] = { rax, rbx, rcx, rdx };
352  descriptor->register_param_count_ = 4;
353  descriptor->register_params_ = registers;
354  descriptor->deoptimization_handler_ =
355  FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss);
356 }
357 
358 
360  Isolate* isolate,
361  CodeStubInterfaceDescriptor* descriptor) {
362  static Register registers[] = { rdx, rax };
363  descriptor->register_param_count_ = 2;
364  descriptor->register_params_ = registers;
365  descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss);
366  descriptor->SetMissHandler(
367  ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate));
368 }
369 
370 
371 void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor(
372  Isolate* isolate,
373  CodeStubInterfaceDescriptor* descriptor) {
374  static Register registers[] = { rcx, rdx, rax };
375  descriptor->register_param_count_ = 3;
376  descriptor->register_params_ = registers;
377  descriptor->deoptimization_handler_ =
378  FUNCTION_ADDR(BinaryOpIC_MissWithAllocationSite);
379 }
380 
381 
382 void StringAddStub::InitializeInterfaceDescriptor(
383  Isolate* isolate,
384  CodeStubInterfaceDescriptor* descriptor) {
385  static Register registers[] = { rdx, rax };
386  descriptor->register_param_count_ = 2;
387  descriptor->register_params_ = registers;
388  descriptor->deoptimization_handler_ =
389  Runtime::FunctionForId(Runtime::kHiddenStringAdd)->entry;
390 }
391 
392 
393 void CallDescriptors::InitializeForIsolate(Isolate* isolate) {
394  {
395  CallInterfaceDescriptor* descriptor =
396  isolate->call_descriptor(Isolate::ArgumentAdaptorCall);
397  static Register registers[] = { rdi, // JSFunction
398  rsi, // context
399  rax, // actual number of arguments
400  rbx, // expected number of arguments
401  };
402  static Representation representations[] = {
403  Representation::Tagged(), // JSFunction
404  Representation::Tagged(), // context
405  Representation::Integer32(), // actual number of arguments
406  Representation::Integer32(), // expected number of arguments
407  };
408  descriptor->register_param_count_ = 4;
409  descriptor->register_params_ = registers;
410  descriptor->param_representations_ = representations;
411  }
412  {
413  CallInterfaceDescriptor* descriptor =
414  isolate->call_descriptor(Isolate::KeyedCall);
415  static Register registers[] = { rsi, // context
416  rcx, // key
417  };
418  static Representation representations[] = {
419  Representation::Tagged(), // context
420  Representation::Tagged(), // key
421  };
422  descriptor->register_param_count_ = 2;
423  descriptor->register_params_ = registers;
424  descriptor->param_representations_ = representations;
425  }
426  {
427  CallInterfaceDescriptor* descriptor =
428  isolate->call_descriptor(Isolate::NamedCall);
429  static Register registers[] = { rsi, // context
430  rcx, // name
431  };
432  static Representation representations[] = {
433  Representation::Tagged(), // context
434  Representation::Tagged(), // name
435  };
436  descriptor->register_param_count_ = 2;
437  descriptor->register_params_ = registers;
438  descriptor->param_representations_ = representations;
439  }
440  {
441  CallInterfaceDescriptor* descriptor =
442  isolate->call_descriptor(Isolate::CallHandler);
443  static Register registers[] = { rsi, // context
444  rdx, // receiver
445  };
446  static Representation representations[] = {
447  Representation::Tagged(), // context
448  Representation::Tagged(), // receiver
449  };
450  descriptor->register_param_count_ = 2;
451  descriptor->register_params_ = registers;
452  descriptor->param_representations_ = representations;
453  }
454  {
455  CallInterfaceDescriptor* descriptor =
456  isolate->call_descriptor(Isolate::ApiFunctionCall);
457  static Register registers[] = { rax, // callee
458  rbx, // call_data
459  rcx, // holder
460  rdx, // api_function_address
461  rsi, // context
462  };
463  static Representation representations[] = {
464  Representation::Tagged(), // callee
465  Representation::Tagged(), // call_data
466  Representation::Tagged(), // holder
467  Representation::External(), // api_function_address
468  Representation::Tagged(), // context
469  };
470  descriptor->register_param_count_ = 5;
471  descriptor->register_params_ = registers;
472  descriptor->param_representations_ = representations;
473  }
474 }
475 
476 
477 #define __ ACCESS_MASM(masm)
478 
479 
480 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
481  // Update the static counter each time a new code stub is generated.
482  Isolate* isolate = masm->isolate();
483  isolate->counters()->code_stubs()->Increment();
484 
485  CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
486  int param_count = descriptor->register_param_count_;
487  {
488  // Call the runtime system in a fresh internal frame.
489  FrameScope scope(masm, StackFrame::INTERNAL);
490  ASSERT(descriptor->register_param_count_ == 0 ||
491  rax.is(descriptor->register_params_[param_count - 1]));
492  // Push arguments
493  for (int i = 0; i < param_count; ++i) {
494  __ Push(descriptor->register_params_[i]);
495  }
496  ExternalReference miss = descriptor->miss_handler();
497  __ CallExternalReference(miss, descriptor->register_param_count_);
498  }
499 
500  __ Ret();
501 }
502 
503 
504 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
505  __ PushCallerSaved(save_doubles_);
506  const int argument_count = 1;
507  __ PrepareCallCFunction(argument_count);
508  __ LoadAddress(arg_reg_1,
509  ExternalReference::isolate_address(masm->isolate()));
510 
511  AllowExternalCallThatCantCauseGC scope(masm);
512  __ CallCFunction(
513  ExternalReference::store_buffer_overflow_function(masm->isolate()),
514  argument_count);
515  __ PopCallerSaved(save_doubles_);
516  __ ret(0);
517 }
518 
519 
520 class FloatingPointHelper : public AllStatic {
521  public:
522  enum ConvertUndefined {
523  CONVERT_UNDEFINED_TO_ZERO,
524  BAILOUT_ON_UNDEFINED
525  };
526  // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
527  // If the operands are not both numbers, jump to not_numbers.
528  // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
529  // NumberOperands assumes both are smis or heap numbers.
530  static void LoadSSE2UnknownOperands(MacroAssembler* masm,
531  Label* not_numbers);
532 };
533 
534 
535 void DoubleToIStub::Generate(MacroAssembler* masm) {
536  Register input_reg = this->source();
537  Register final_result_reg = this->destination();
539 
540  Label check_negative, process_64_bits, done;
541 
542  int double_offset = offset();
543 
544  // Account for return address and saved regs if input is rsp.
545  if (input_reg.is(rsp)) double_offset += 3 * kRegisterSize;
546 
547  MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
548  MemOperand exponent_operand(MemOperand(input_reg,
549  double_offset + kDoubleSize / 2));
550 
551  Register scratch1;
552  Register scratch_candidates[3] = { rbx, rdx, rdi };
553  for (int i = 0; i < 3; i++) {
554  scratch1 = scratch_candidates[i];
555  if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
556  }
557 
558  // Since we must use rcx for shifts below, use some other register (rax)
559  // to calculate the result if ecx is the requested return register.
560  Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg;
561  // Save ecx if it isn't the return register and therefore volatile, or if it
562  // is the return register, then save the temp register we use in its stead
563  // for the result.
564  Register save_reg = final_result_reg.is(rcx) ? rax : rcx;
565  __ pushq(scratch1);
566  __ pushq(save_reg);
567 
568  bool stash_exponent_copy = !input_reg.is(rsp);
569  __ movl(scratch1, mantissa_operand);
570  __ movsd(xmm0, mantissa_operand);
571  __ movl(rcx, exponent_operand);
572  if (stash_exponent_copy) __ pushq(rcx);
573 
574  __ andl(rcx, Immediate(HeapNumber::kExponentMask));
575  __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
576  __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
577  __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
578  __ j(below, &process_64_bits);
579 
580  // Result is entirely in lower 32-bits of mantissa
582  __ subl(rcx, Immediate(delta));
583  __ xorl(result_reg, result_reg);
584  __ cmpl(rcx, Immediate(31));
585  __ j(above, &done);
586  __ shll_cl(scratch1);
587  __ jmp(&check_negative);
588 
589  __ bind(&process_64_bits);
590  __ cvttsd2siq(result_reg, xmm0);
591  __ jmp(&done, Label::kNear);
592 
593  // If the double was negative, negate the integer result.
594  __ bind(&check_negative);
595  __ movl(result_reg, scratch1);
596  __ negl(result_reg);
597  if (stash_exponent_copy) {
598  __ cmpl(MemOperand(rsp, 0), Immediate(0));
599  } else {
600  __ cmpl(exponent_operand, Immediate(0));
601  }
602  __ cmovl(greater, result_reg, scratch1);
603 
604  // Restore registers
605  __ bind(&done);
606  if (stash_exponent_copy) {
607  __ addp(rsp, Immediate(kDoubleSize));
608  }
609  if (!final_result_reg.is(result_reg)) {
610  ASSERT(final_result_reg.is(rcx));
611  __ movl(final_result_reg, result_reg);
612  }
613  __ popq(save_reg);
614  __ popq(scratch1);
615  __ ret(0);
616 }
617 
618 
619 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
620  Label* not_numbers) {
621  Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
622  // Load operand in rdx into xmm0, or branch to not_numbers.
623  __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
624  __ JumpIfSmi(rdx, &load_smi_rdx);
626  __ j(not_equal, not_numbers); // Argument in rdx is not a number.
628  // Load operand in rax into xmm1, or branch to not_numbers.
629  __ JumpIfSmi(rax, &load_smi_rax);
630 
631  __ bind(&load_nonsmi_rax);
633  __ j(not_equal, not_numbers);
635  __ jmp(&done);
636 
637  __ bind(&load_smi_rdx);
638  __ SmiToInteger32(kScratchRegister, rdx);
639  __ Cvtlsi2sd(xmm0, kScratchRegister);
640  __ JumpIfNotSmi(rax, &load_nonsmi_rax);
641 
642  __ bind(&load_smi_rax);
643  __ SmiToInteger32(kScratchRegister, rax);
644  __ Cvtlsi2sd(xmm1, kScratchRegister);
645  __ bind(&done);
646 }
647 
648 
649 void MathPowStub::Generate(MacroAssembler* masm) {
650  const Register exponent = rdx;
651  const Register base = rax;
652  const Register scratch = rcx;
653  const XMMRegister double_result = xmm3;
654  const XMMRegister double_base = xmm2;
655  const XMMRegister double_exponent = xmm1;
656  const XMMRegister double_scratch = xmm4;
657 
658  Label call_runtime, done, exponent_not_smi, int_exponent;
659 
660  // Save 1 in double_result - we need this several times later on.
661  __ movp(scratch, Immediate(1));
662  __ Cvtlsi2sd(double_result, scratch);
663 
664  if (exponent_type_ == ON_STACK) {
665  Label base_is_smi, unpack_exponent;
666  // The exponent and base are supplied as arguments on the stack.
667  // This can only happen if the stub is called from non-optimized code.
668  // Load input parameters from stack.
669  StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
670  __ movp(base, args.GetArgumentOperand(0));
671  __ movp(exponent, args.GetArgumentOperand(1));
672  __ JumpIfSmi(base, &base_is_smi, Label::kNear);
673  __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
674  Heap::kHeapNumberMapRootIndex);
675  __ j(not_equal, &call_runtime);
676 
677  __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
678  __ jmp(&unpack_exponent, Label::kNear);
679 
680  __ bind(&base_is_smi);
681  __ SmiToInteger32(base, base);
682  __ Cvtlsi2sd(double_base, base);
683  __ bind(&unpack_exponent);
684 
685  __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
686  __ SmiToInteger32(exponent, exponent);
687  __ jmp(&int_exponent);
688 
689  __ bind(&exponent_not_smi);
690  __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset),
691  Heap::kHeapNumberMapRootIndex);
692  __ j(not_equal, &call_runtime);
693  __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
694  } else if (exponent_type_ == TAGGED) {
695  __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
696  __ SmiToInteger32(exponent, exponent);
697  __ jmp(&int_exponent);
698 
699  __ bind(&exponent_not_smi);
700  __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
701  }
702 
703  if (exponent_type_ != INTEGER) {
704  Label fast_power, try_arithmetic_simplification;
705  // Detect integer exponents stored as double.
706  __ DoubleToI(exponent, double_exponent, double_scratch,
707  TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification);
708  __ jmp(&int_exponent);
709 
710  __ bind(&try_arithmetic_simplification);
711  __ cvttsd2si(exponent, double_exponent);
712  // Skip to runtime if possibly NaN (indicated by the indefinite integer).
713  __ cmpl(exponent, Immediate(0x1));
714  __ j(overflow, &call_runtime);
715 
716  if (exponent_type_ == ON_STACK) {
717  // Detect square root case. Crankshaft detects constant +/-0.5 at
718  // compile time and uses DoMathPowHalf instead. We then skip this check
719  // for non-constant cases of +/-0.5 as these hardly occur.
720  Label continue_sqrt, continue_rsqrt, not_plus_half;
721  // Test for 0.5.
722  // Load double_scratch with 0.5.
723  __ movq(scratch, V8_UINT64_C(0x3FE0000000000000));
724  __ movq(double_scratch, scratch);
725  // Already ruled out NaNs for exponent.
726  __ ucomisd(double_scratch, double_exponent);
727  __ j(not_equal, &not_plus_half, Label::kNear);
728 
729  // Calculates square root of base. Check for the special case of
730  // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
731  // According to IEEE-754, double-precision -Infinity has the highest
732  // 12 bits set and the lowest 52 bits cleared.
733  __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
734  __ movq(double_scratch, scratch);
735  __ ucomisd(double_scratch, double_base);
736  // Comparing -Infinity with NaN results in "unordered", which sets the
737  // zero flag as if both were equal. However, it also sets the carry flag.
738  __ j(not_equal, &continue_sqrt, Label::kNear);
739  __ j(carry, &continue_sqrt, Label::kNear);
740 
741  // Set result to Infinity in the special case.
742  __ xorps(double_result, double_result);
743  __ subsd(double_result, double_scratch);
744  __ jmp(&done);
745 
746  __ bind(&continue_sqrt);
747  // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
748  __ xorps(double_scratch, double_scratch);
749  __ addsd(double_scratch, double_base); // Convert -0 to 0.
750  __ sqrtsd(double_result, double_scratch);
751  __ jmp(&done);
752 
753  // Test for -0.5.
754  __ bind(&not_plus_half);
755  // Load double_scratch with -0.5 by substracting 1.
756  __ subsd(double_scratch, double_result);
757  // Already ruled out NaNs for exponent.
758  __ ucomisd(double_scratch, double_exponent);
759  __ j(not_equal, &fast_power, Label::kNear);
760 
761  // Calculates reciprocal of square root of base. Check for the special
762  // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
763  // According to IEEE-754, double-precision -Infinity has the highest
764  // 12 bits set and the lowest 52 bits cleared.
765  __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
766  __ movq(double_scratch, scratch);
767  __ ucomisd(double_scratch, double_base);
768  // Comparing -Infinity with NaN results in "unordered", which sets the
769  // zero flag as if both were equal. However, it also sets the carry flag.
770  __ j(not_equal, &continue_rsqrt, Label::kNear);
771  __ j(carry, &continue_rsqrt, Label::kNear);
772 
773  // Set result to 0 in the special case.
774  __ xorps(double_result, double_result);
775  __ jmp(&done);
776 
777  __ bind(&continue_rsqrt);
778  // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
779  __ xorps(double_exponent, double_exponent);
780  __ addsd(double_exponent, double_base); // Convert -0 to +0.
781  __ sqrtsd(double_exponent, double_exponent);
782  __ divsd(double_result, double_exponent);
783  __ jmp(&done);
784  }
785 
786  // Using FPU instructions to calculate power.
787  Label fast_power_failed;
788  __ bind(&fast_power);
789  __ fnclex(); // Clear flags to catch exceptions later.
790  // Transfer (B)ase and (E)xponent onto the FPU register stack.
791  __ subp(rsp, Immediate(kDoubleSize));
792  __ movsd(Operand(rsp, 0), double_exponent);
793  __ fld_d(Operand(rsp, 0)); // E
794  __ movsd(Operand(rsp, 0), double_base);
795  __ fld_d(Operand(rsp, 0)); // B, E
796 
797  // Exponent is in st(1) and base is in st(0)
798  // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
799  // FYL2X calculates st(1) * log2(st(0))
800  __ fyl2x(); // X
801  __ fld(0); // X, X
802  __ frndint(); // rnd(X), X
803  __ fsub(1); // rnd(X), X-rnd(X)
804  __ fxch(1); // X - rnd(X), rnd(X)
805  // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
806  __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
807  __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
808  __ faddp(1); // 2^(X-rnd(X)), rnd(X)
809  // FSCALE calculates st(0) * 2^st(1)
810  __ fscale(); // 2^X, rnd(X)
811  __ fstp(1);
812  // Bail out to runtime in case of exceptions in the status word.
813  __ fnstsw_ax();
814  __ testb(rax, Immediate(0x5F)); // Check for all but precision exception.
815  __ j(not_zero, &fast_power_failed, Label::kNear);
816  __ fstp_d(Operand(rsp, 0));
817  __ movsd(double_result, Operand(rsp, 0));
818  __ addp(rsp, Immediate(kDoubleSize));
819  __ jmp(&done);
820 
821  __ bind(&fast_power_failed);
822  __ fninit();
823  __ addp(rsp, Immediate(kDoubleSize));
824  __ jmp(&call_runtime);
825  }
826 
827  // Calculate power with integer exponent.
828  __ bind(&int_exponent);
829  const XMMRegister double_scratch2 = double_exponent;
830  // Back up exponent as we need to check if exponent is negative later.
831  __ movp(scratch, exponent); // Back up exponent.
832  __ movsd(double_scratch, double_base); // Back up base.
833  __ movsd(double_scratch2, double_result); // Load double_exponent with 1.
834 
835  // Get absolute value of exponent.
836  Label no_neg, while_true, while_false;
837  __ testl(scratch, scratch);
838  __ j(positive, &no_neg, Label::kNear);
839  __ negl(scratch);
840  __ bind(&no_neg);
841 
842  __ j(zero, &while_false, Label::kNear);
843  __ shrl(scratch, Immediate(1));
844  // Above condition means CF==0 && ZF==0. This means that the
845  // bit that has been shifted out is 0 and the result is not 0.
846  __ j(above, &while_true, Label::kNear);
847  __ movsd(double_result, double_scratch);
848  __ j(zero, &while_false, Label::kNear);
849 
850  __ bind(&while_true);
851  __ shrl(scratch, Immediate(1));
852  __ mulsd(double_scratch, double_scratch);
853  __ j(above, &while_true, Label::kNear);
854  __ mulsd(double_result, double_scratch);
855  __ j(not_zero, &while_true);
856 
857  __ bind(&while_false);
858  // If the exponent is negative, return 1/result.
859  __ testl(exponent, exponent);
860  __ j(greater, &done);
861  __ divsd(double_scratch2, double_result);
862  __ movsd(double_result, double_scratch2);
863  // Test whether result is zero. Bail out to check for subnormal result.
864  // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
865  __ xorps(double_scratch2, double_scratch2);
866  __ ucomisd(double_scratch2, double_result);
867  // double_exponent aliased as double_scratch2 has already been overwritten
868  // and may not have contained the exponent value in the first place when the
869  // input was a smi. We reset it with exponent value before bailing out.
870  __ j(not_equal, &done);
871  __ Cvtlsi2sd(double_exponent, exponent);
872 
873  // Returning or bailing out.
874  Counters* counters = masm->isolate()->counters();
875  if (exponent_type_ == ON_STACK) {
876  // The arguments are still on the stack.
877  __ bind(&call_runtime);
878  __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
879 
880  // The stub is called from non-optimized code, which expects the result
881  // as heap number in rax.
882  __ bind(&done);
883  __ AllocateHeapNumber(rax, rcx, &call_runtime);
884  __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result);
885  __ IncrementCounter(counters->math_pow(), 1);
886  __ ret(2 * kPointerSize);
887  } else {
888  __ bind(&call_runtime);
889  // Move base to the correct argument register. Exponent is already in xmm1.
890  __ movsd(xmm0, double_base);
891  ASSERT(double_exponent.is(xmm1));
892  {
893  AllowExternalCallThatCantCauseGC scope(masm);
894  __ PrepareCallCFunction(2);
895  __ CallCFunction(
896  ExternalReference::power_double_double_function(masm->isolate()), 2);
897  }
898  // Return value is in xmm0.
899  __ movsd(double_result, xmm0);
900 
901  __ bind(&done);
902  __ IncrementCounter(counters->math_pow(), 1);
903  __ ret(0);
904  }
905 }
906 
907 
908 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
909  Label miss;
910  Register receiver;
911  if (kind() == Code::KEYED_LOAD_IC) {
912  // ----------- S t a t e -------------
913  // -- rax : key
914  // -- rdx : receiver
915  // -- rsp[0] : return address
916  // -----------------------------------
917  __ Cmp(rax, masm->isolate()->factory()->prototype_string());
918  __ j(not_equal, &miss);
919  receiver = rdx;
920  } else {
921  ASSERT(kind() == Code::LOAD_IC);
922  // ----------- S t a t e -------------
923  // -- rax : receiver
924  // -- rcx : name
925  // -- rsp[0] : return address
926  // -----------------------------------
927  receiver = rax;
928  }
929 
930  StubCompiler::GenerateLoadFunctionPrototype(masm, receiver, r8, r9, &miss);
931  __ bind(&miss);
932  StubCompiler::TailCallBuiltin(
934 }
935 
936 
937 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
938  // The key is in rdx and the parameter count is in rax.
939 
940  // Check that the key is a smi.
941  Label slow;
942  __ JumpIfNotSmi(rdx, &slow);
943 
944  // Check if the calling frame is an arguments adaptor frame. We look at the
945  // context offset, and if the frame is not a regular one, then we find a
946  // Smi instead of the context. We can't use SmiCompare here, because that
947  // only works for comparing two smis.
948  Label adaptor;
952  __ j(equal, &adaptor);
953 
954  // Check index against formal parameters count limit passed in
955  // through register rax. Use unsigned comparison to get negative
956  // check for free.
957  __ cmpp(rdx, rax);
958  __ j(above_equal, &slow);
959 
960  // Read the argument from the stack and return it.
961  __ SmiSub(rax, rax, rdx);
962  __ SmiToInteger32(rax, rax);
963  StackArgumentsAccessor args(rbp, rax, ARGUMENTS_DONT_CONTAIN_RECEIVER);
964  __ movp(rax, args.GetArgumentOperand(0));
965  __ Ret();
966 
967  // Arguments adaptor case: Check index against actual arguments
968  // limit found in the arguments adaptor frame. Use unsigned
969  // comparison to get negative check for free.
970  __ bind(&adaptor);
972  __ cmpp(rdx, rcx);
973  __ j(above_equal, &slow);
974 
975  // Read the argument from the stack and return it.
976  __ SmiSub(rcx, rcx, rdx);
977  __ SmiToInteger32(rcx, rcx);
978  StackArgumentsAccessor adaptor_args(rbx, rcx,
980  __ movp(rax, adaptor_args.GetArgumentOperand(0));
981  __ Ret();
982 
983  // Slow-case: Handle non-smi or out-of-bounds access to arguments
984  // by calling the runtime system.
985  __ bind(&slow);
986  __ PopReturnAddressTo(rbx);
987  __ Push(rdx);
988  __ PushReturnAddressFrom(rbx);
989  __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
990 }
991 
992 
993 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
994  // Stack layout:
995  // rsp[0] : return address
996  // rsp[8] : number of parameters (tagged)
997  // rsp[16] : receiver displacement
998  // rsp[24] : function
999  // Registers used over the whole function:
1000  // rbx: the mapped parameter count (untagged)
1001  // rax: the allocated object (tagged).
1002 
1003  Factory* factory = masm->isolate()->factory();
1004 
1005  StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
1006  __ SmiToInteger64(rbx, args.GetArgumentOperand(2));
1007  // rbx = parameter count (untagged)
1008 
1009  // Check if the calling frame is an arguments adaptor frame.
1010  Label runtime;
1011  Label adaptor_frame, try_allocate;
1015  __ j(equal, &adaptor_frame);
1016 
1017  // No adaptor, parameter count = argument count.
1018  __ movp(rcx, rbx);
1019  __ jmp(&try_allocate, Label::kNear);
1020 
1021  // We have an adaptor frame. Patch the parameters pointer.
1022  __ bind(&adaptor_frame);
1023  __ SmiToInteger64(rcx,
1024  Operand(rdx,
1026  __ leap(rdx, Operand(rdx, rcx, times_pointer_size,
1028  __ movp(args.GetArgumentOperand(1), rdx);
1029 
1030  // rbx = parameter count (untagged)
1031  // rcx = argument count (untagged)
1032  // Compute the mapped parameter count = min(rbx, rcx) in rbx.
1033  __ cmpp(rbx, rcx);
1034  __ j(less_equal, &try_allocate, Label::kNear);
1035  __ movp(rbx, rcx);
1036 
1037  __ bind(&try_allocate);
1038 
1039  // Compute the sizes of backing store, parameter map, and arguments object.
1040  // 1. Parameter map, has 2 extra words containing context and backing store.
1041  const int kParameterMapHeaderSize =
1043  Label no_parameter_map;
1044  __ xorp(r8, r8);
1045  __ testp(rbx, rbx);
1046  __ j(zero, &no_parameter_map, Label::kNear);
1047  __ leap(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
1048  __ bind(&no_parameter_map);
1049 
1050  // 2. Backing store.
1052 
1053  // 3. Arguments object.
1054  __ addp(r8, Immediate(Heap::kSloppyArgumentsObjectSize));
1055 
1056  // Do the allocation of all three objects in one go.
1057  __ Allocate(r8, rax, rdx, rdi, &runtime, TAG_OBJECT);
1058 
1059  // rax = address of new object(s) (tagged)
1060  // rcx = argument count (untagged)
1061  // Get the arguments boilerplate from the current native context into rdi.
1062  Label has_mapped_parameters, copy;
1065  __ testp(rbx, rbx);
1066  __ j(not_zero, &has_mapped_parameters, Label::kNear);
1067 
1069  __ movp(rdi, Operand(rdi, Context::SlotOffset(kIndex)));
1070  __ jmp(&copy, Label::kNear);
1071 
1072  const int kAliasedIndex = Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX;
1073  __ bind(&has_mapped_parameters);
1074  __ movp(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex)));
1075  __ bind(&copy);
1076 
1077  // rax = address of new object (tagged)
1078  // rbx = mapped parameter count (untagged)
1079  // rcx = argument count (untagged)
1080  // rdi = address of boilerplate object (tagged)
1081  // Copy the JS object part.
1082  for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
1083  __ movp(rdx, FieldOperand(rdi, i));
1084  __ movp(FieldOperand(rax, i), rdx);
1085  }
1086 
1087  // Set up the callee in-object property.
1089  __ movp(rdx, args.GetArgumentOperand(0));
1090  __ movp(FieldOperand(rax, JSObject::kHeaderSize +
1091  Heap::kArgumentsCalleeIndex * kPointerSize),
1092  rdx);
1093 
1094  // Use the length (smi tagged) and set that as an in-object property too.
1095  // Note: rcx is tagged from here on.
1097  __ Integer32ToSmi(rcx, rcx);
1098  __ movp(FieldOperand(rax, JSObject::kHeaderSize +
1099  Heap::kArgumentsLengthIndex * kPointerSize),
1100  rcx);
1101 
1102  // Set up the elements pointer in the allocated arguments object.
1103  // If we allocated a parameter map, edi will point there, otherwise to the
1104  // backing store.
1105  __ leap(rdi, Operand(rax, Heap::kSloppyArgumentsObjectSize));
1107 
1108  // rax = address of new object (tagged)
1109  // rbx = mapped parameter count (untagged)
1110  // rcx = argument count (tagged)
1111  // rdi = address of parameter map or backing store (tagged)
1112 
1113  // Initialize parameter map. If there are no mapped arguments, we're done.
1114  Label skip_parameter_map;
1115  __ testp(rbx, rbx);
1116  __ j(zero, &skip_parameter_map);
1117 
1118  __ LoadRoot(kScratchRegister, Heap::kSloppyArgumentsElementsMapRootIndex);
1119  // rbx contains the untagged argument count. Add 2 and tag to write.
1121  __ Integer64PlusConstantToSmi(r9, rbx, 2);
1123  __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi);
1124  __ leap(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
1125  __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9);
1126 
1127  // Copy the parameter slots and the holes in the arguments.
1128  // We need to fill in mapped_parameter_count slots. They index the context,
1129  // where parameters are stored in reverse order, at
1130  // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
1131  // The mapped parameter thus need to get indices
1132  // MIN_CONTEXT_SLOTS+parameter_count-1 ..
1133  // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
1134  // We loop from right to left.
1135  Label parameters_loop, parameters_test;
1136 
1137  // Load tagged parameter count into r9.
1138  __ Integer32ToSmi(r9, rbx);
1140  __ addp(r8, args.GetArgumentOperand(2));
1141  __ subp(r8, r9);
1142  __ Move(r11, factory->the_hole_value());
1143  __ movp(rdx, rdi);
1144  __ leap(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
1145  // r9 = loop variable (tagged)
1146  // r8 = mapping index (tagged)
1147  // r11 = the hole value
1148  // rdx = address of parameter map (tagged)
1149  // rdi = address of backing store (tagged)
1150  __ jmp(&parameters_test, Label::kNear);
1151 
1152  __ bind(&parameters_loop);
1153  __ SmiSubConstant(r9, r9, Smi::FromInt(1));
1154  __ SmiToInteger64(kScratchRegister, r9);
1157  kParameterMapHeaderSize),
1158  r8);
1162  r11);
1163  __ SmiAddConstant(r8, r8, Smi::FromInt(1));
1164  __ bind(&parameters_test);
1165  __ SmiTest(r9);
1166  __ j(not_zero, &parameters_loop, Label::kNear);
1167 
1168  __ bind(&skip_parameter_map);
1169 
1170  // rcx = argument count (tagged)
1171  // rdi = address of backing store (tagged)
1172  // Copy arguments header and remaining slots (if there are any).
1174  factory->fixed_array_map());
1176 
1177  Label arguments_loop, arguments_test;
1178  __ movp(r8, rbx);
1179  __ movp(rdx, args.GetArgumentOperand(1));
1180  // Untag rcx for the loop below.
1181  __ SmiToInteger64(rcx, rcx);
1182  __ leap(kScratchRegister, Operand(r8, times_pointer_size, 0));
1183  __ subp(rdx, kScratchRegister);
1184  __ jmp(&arguments_test, Label::kNear);
1185 
1186  __ bind(&arguments_loop);
1187  __ subp(rdx, Immediate(kPointerSize));
1188  __ movp(r9, Operand(rdx, 0));
1189  __ movp(FieldOperand(rdi, r8,
1192  r9);
1193  __ addp(r8, Immediate(1));
1194 
1195  __ bind(&arguments_test);
1196  __ cmpp(r8, rcx);
1197  __ j(less, &arguments_loop, Label::kNear);
1198 
1199  // Return and remove the on-stack parameters.
1200  __ ret(3 * kPointerSize);
1201 
1202  // Do the runtime call to allocate the arguments object.
1203  // rcx = argument count (untagged)
1204  __ bind(&runtime);
1205  __ Integer32ToSmi(rcx, rcx);
1206  __ movp(args.GetArgumentOperand(2), rcx); // Patch argument count.
1207  __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1);
1208 }
1209 
1210 
1211 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
1212  // rsp[0] : return address
1213  // rsp[8] : number of parameters
1214  // rsp[16] : receiver displacement
1215  // rsp[24] : function
1216 
1217  // Check if the calling frame is an arguments adaptor frame.
1218  Label runtime;
1222  __ j(not_equal, &runtime);
1223 
1224  // Patch the arguments.length and the parameters pointer.
1225  StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
1227  __ movp(args.GetArgumentOperand(2), rcx);
1228  __ SmiToInteger64(rcx, rcx);
1229  __ leap(rdx, Operand(rdx, rcx, times_pointer_size,
1231  __ movp(args.GetArgumentOperand(1), rdx);
1232 
1233  __ bind(&runtime);
1234  __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1);
1235 }
1236 
1237 
1238 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
1239  // rsp[0] : return address
1240  // rsp[8] : number of parameters
1241  // rsp[16] : receiver displacement
1242  // rsp[24] : function
1243 
1244  // Check if the calling frame is an arguments adaptor frame.
1245  Label adaptor_frame, try_allocate, runtime;
1249  __ j(equal, &adaptor_frame);
1250 
1251  // Get the length from the frame.
1252  StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
1253  __ movp(rcx, args.GetArgumentOperand(2));
1254  __ SmiToInteger64(rcx, rcx);
1255  __ jmp(&try_allocate);
1256 
1257  // Patch the arguments.length and the parameters pointer.
1258  __ bind(&adaptor_frame);
1260  __ movp(args.GetArgumentOperand(2), rcx);
1261  __ SmiToInteger64(rcx, rcx);
1262  __ leap(rdx, Operand(rdx, rcx, times_pointer_size,
1264  __ movp(args.GetArgumentOperand(1), rdx);
1265 
1266  // Try the new space allocation. Start out with computing the size of
1267  // the arguments object and the elements array.
1268  Label add_arguments_object;
1269  __ bind(&try_allocate);
1270  __ testp(rcx, rcx);
1271  __ j(zero, &add_arguments_object, Label::kNear);
1273  __ bind(&add_arguments_object);
1274  __ addp(rcx, Immediate(Heap::kStrictArgumentsObjectSize));
1275 
1276  // Do the allocation of both objects in one go.
1277  __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
1278 
1279  // Get the arguments boilerplate from the current native context.
1282  const int offset =
1284  __ movp(rdi, Operand(rdi, offset));
1285 
1286  // Copy the JS object part.
1287  for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
1288  __ movp(rbx, FieldOperand(rdi, i));
1289  __ movp(FieldOperand(rax, i), rbx);
1290  }
1291 
1292  // Get the length (smi tagged) and set that as an in-object property too.
1294  __ movp(rcx, args.GetArgumentOperand(2));
1295  __ movp(FieldOperand(rax, JSObject::kHeaderSize +
1296  Heap::kArgumentsLengthIndex * kPointerSize),
1297  rcx);
1298 
1299  // If there are no actual arguments, we're done.
1300  Label done;
1301  __ testp(rcx, rcx);
1302  __ j(zero, &done);
1303 
1304  // Get the parameters pointer from the stack.
1305  __ movp(rdx, args.GetArgumentOperand(1));
1306 
1307  // Set up the elements pointer in the allocated arguments object and
1308  // initialize the header in the elements fixed array.
1309  __ leap(rdi, Operand(rax, Heap::kStrictArgumentsObjectSize));
1311  __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
1313 
1314 
1316  // Untag the length for the loop below.
1317  __ SmiToInteger64(rcx, rcx);
1318 
1319  // Copy the fixed array slots.
1320  Label loop;
1321  __ bind(&loop);
1322  __ movp(rbx, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
1324  __ addp(rdi, Immediate(kPointerSize));
1325  __ subp(rdx, Immediate(kPointerSize));
1326  __ decp(rcx);
1327  __ j(not_zero, &loop);
1328 
1329  // Return and remove the on-stack parameters.
1330  __ bind(&done);
1331  __ ret(3 * kPointerSize);
1332 
1333  // Do the runtime call to allocate the arguments object.
1334  __ bind(&runtime);
1335  __ TailCallRuntime(Runtime::kHiddenNewStrictArgumentsFast, 3, 1);
1336 }
1337 
1338 
1339 void RegExpExecStub::Generate(MacroAssembler* masm) {
1340  // Just jump directly to runtime if native RegExp is not selected at compile
1341  // time or if regexp entry in generated code is turned off runtime switch or
1342  // at compilation.
1343 #ifdef V8_INTERPRETED_REGEXP
1344  __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1);
1345 #else // V8_INTERPRETED_REGEXP
1346 
1347  // Stack frame on entry.
1348  // rsp[0] : return address
1349  // rsp[8] : last_match_info (expected JSArray)
1350  // rsp[16] : previous index
1351  // rsp[24] : subject string
1352  // rsp[32] : JSRegExp object
1353 
1354  enum RegExpExecStubArgumentIndices {
1355  JS_REG_EXP_OBJECT_ARGUMENT_INDEX,
1356  SUBJECT_STRING_ARGUMENT_INDEX,
1357  PREVIOUS_INDEX_ARGUMENT_INDEX,
1358  LAST_MATCH_INFO_ARGUMENT_INDEX,
1359  REG_EXP_EXEC_ARGUMENT_COUNT
1360  };
1361 
1362  StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT,
1364  Label runtime;
1365  // Ensure that a RegExp stack is allocated.
1366  Isolate* isolate = masm->isolate();
1367  ExternalReference address_of_regexp_stack_memory_address =
1368  ExternalReference::address_of_regexp_stack_memory_address(isolate);
1369  ExternalReference address_of_regexp_stack_memory_size =
1370  ExternalReference::address_of_regexp_stack_memory_size(isolate);
1371  __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
1373  __ j(zero, &runtime);
1374 
1375  // Check that the first argument is a JSRegExp object.
1376  __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
1377  __ JumpIfSmi(rax, &runtime);
1378  __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
1379  __ j(not_equal, &runtime);
1380 
1381  // Check that the RegExp has been compiled (data contains a fixed array).
1383  if (FLAG_debug_code) {
1384  Condition is_smi = masm->CheckSmi(rax);
1385  __ Check(NegateCondition(is_smi),
1386  kUnexpectedTypeForRegExpDataFixedArrayExpected);
1387  __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
1388  __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1389  }
1390 
1391  // rax: RegExp data (FixedArray)
1392  // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1393  __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset));
1394  __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
1395  __ j(not_equal, &runtime);
1396 
1397  // rax: RegExp data (FixedArray)
1398  // Check that the number of captures fit in the static offsets vector buffer.
1399  __ SmiToInteger32(rdx,
1401  // Check (number_of_captures + 1) * 2 <= offsets vector size
1402  // Or number_of_captures <= offsets vector size / 2 - 1
1404  __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1));
1405  __ j(above, &runtime);
1406 
1407  // Reset offset for possibly sliced string.
1408  __ Set(r14, 0);
1409  __ movp(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
1410  __ JumpIfSmi(rdi, &runtime);
1411  __ movp(r15, rdi); // Make a copy of the original subject string.
1414  // rax: RegExp data (FixedArray)
1415  // rdi: subject string
1416  // r15: subject string
1417  // Handle subject string according to its encoding and representation:
1418  // (1) Sequential two byte? If yes, go to (9).
1419  // (2) Sequential one byte? If yes, go to (6).
1420  // (3) Anything but sequential or cons? If yes, go to (7).
1421  // (4) Cons string. If the string is flat, replace subject with first string.
1422  // Otherwise bailout.
1423  // (5a) Is subject sequential two byte? If yes, go to (9).
1424  // (5b) Is subject external? If yes, go to (8).
1425  // (6) One byte sequential. Load regexp code for one byte.
1426  // (E) Carry on.
1428 
1429  // Deferred code at the end of the stub:
1430  // (7) Not a long external string? If yes, go to (10).
1431  // (8) External string. Make it, offset-wise, look like a sequential string.
1432  // (8a) Is the external string one byte? If yes, go to (6).
1433  // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1434  // (10) Short external string or not a string? If yes, bail out to runtime.
1435  // (11) Sliced string. Replace subject with parent. Go to (5a).
1436 
1437  Label seq_one_byte_string /* 6 */, seq_two_byte_string /* 9 */,
1438  external_string /* 8 */, check_underlying /* 5a */,
1439  not_seq_nor_cons /* 7 */, check_code /* E */,
1440  not_long_external /* 10 */;
1441 
1442  // (1) Sequential two byte? If yes, go to (9).
1443  __ andb(rbx, Immediate(kIsNotStringMask |
1448  __ j(zero, &seq_two_byte_string); // Go to (9).
1449 
1450  // (2) Sequential one byte? If yes, go to (6).
1451  // Any other sequential string must be one byte.
1452  __ andb(rbx, Immediate(kIsNotStringMask |
1455  __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (6).
1456 
1457  // (3) Anything but sequential or cons? If yes, go to (7).
1458  // We check whether the subject string is a cons, since sequential strings
1459  // have already been covered.
1464  __ cmpp(rbx, Immediate(kExternalStringTag));
1465  __ j(greater_equal, &not_seq_nor_cons); // Go to (7).
1466 
1467  // (4) Cons string. Check that it's flat.
1468  // Replace subject with first string and reload instance type.
1470  Heap::kempty_stringRootIndex);
1471  __ j(not_equal, &runtime);
1473  __ bind(&check_underlying);
1476 
1477  // (5a) Is subject sequential two byte? If yes, go to (9).
1478  __ testb(rbx, Immediate(kStringRepresentationMask | kStringEncodingMask));
1480  __ j(zero, &seq_two_byte_string); // Go to (9).
1481  // (5b) Is subject external? If yes, go to (8).
1482  __ testb(rbx, Immediate(kStringRepresentationMask));
1483  // The underlying external string is never a short external string.
1486  __ j(not_zero, &external_string); // Go to (8)
1487 
1488  // (6) One byte sequential. Load regexp code for one byte.
1489  __ bind(&seq_one_byte_string);
1490  // rax: RegExp data (FixedArray)
1492  __ Set(rcx, 1); // Type is one byte.
1493 
1494  // (E) Carry on. String handling is done.
1495  __ bind(&check_code);
1496  // r11: irregexp code
1497  // Check that the irregexp code has been generated for the actual string
1498  // encoding. If it has, the field contains a code object otherwise it contains
1499  // smi (code flushing support)
1500  __ JumpIfSmi(r11, &runtime);
1501 
1502  // rdi: sequential subject string (or look-alike, external string)
1503  // r15: original subject string
1504  // rcx: encoding of subject string (1 if ASCII, 0 if two_byte);
1505  // r11: code
1506  // Load used arguments before starting to push arguments for call to native
1507  // RegExp code to avoid handling changing stack height.
1508  // We have to use r15 instead of rdi to load the length because rdi might
1509  // have been only made to look like a sequential string when it actually
1510  // is an external string.
1511  __ movp(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX));
1512  __ JumpIfNotSmi(rbx, &runtime);
1513  __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset));
1514  __ j(above_equal, &runtime);
1515  __ SmiToInteger64(rbx, rbx);
1516 
1517  // rdi: subject string
1518  // rbx: previous index
1519  // rcx: encoding of subject string (1 if ASCII 0 if two_byte);
1520  // r11: code
1521  // All checks done. Now push arguments for native regexp code.
1522  Counters* counters = masm->isolate()->counters();
1523  __ IncrementCounter(counters->regexp_entry_native(), 1);
1524 
1525  // Isolates: note we add an additional parameter here (isolate pointer).
1526  static const int kRegExpExecuteArguments = 9;
1527  int argument_slots_on_stack =
1528  masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
1529  __ EnterApiExitFrame(argument_slots_on_stack);
1530 
1531  // Argument 9: Pass current isolate address.
1532  __ LoadAddress(kScratchRegister,
1533  ExternalReference::isolate_address(masm->isolate()));
1534  __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize),
1536 
1537  // Argument 8: Indicate that this is a direct call from JavaScript.
1538  __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize),
1539  Immediate(1));
1540 
1541  // Argument 7: Start (high end) of backtracking stack memory area.
1542  __ Move(kScratchRegister, address_of_regexp_stack_memory_address);
1543  __ movp(r9, Operand(kScratchRegister, 0));
1544  __ Move(kScratchRegister, address_of_regexp_stack_memory_size);
1545  __ addp(r9, Operand(kScratchRegister, 0));
1546  __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9);
1547 
1548  // Argument 6: Set the number of capture registers to zero to force global
1549  // regexps to behave as non-global. This does not affect non-global regexps.
1550  // Argument 6 is passed in r9 on Linux and on the stack on Windows.
1551 #ifdef _WIN64
1552  __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize),
1553  Immediate(0));
1554 #else
1555  __ Set(r9, 0);
1556 #endif
1557 
1558  // Argument 5: static offsets vector buffer.
1559  __ LoadAddress(r8,
1560  ExternalReference::address_of_static_offsets_vector(isolate));
1561  // Argument 5 passed in r8 on Linux and on the stack on Windows.
1562 #ifdef _WIN64
1563  __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8);
1564 #endif
1565 
1566  // rdi: subject string
1567  // rbx: previous index
1568  // rcx: encoding of subject string (1 if ASCII 0 if two_byte);
1569  // r11: code
1570  // r14: slice offset
1571  // r15: original subject string
1572 
1573  // Argument 2: Previous index.
1574  __ movp(arg_reg_2, rbx);
1575 
1576  // Argument 4: End of string data
1577  // Argument 3: Start of string data
1578  Label setup_two_byte, setup_rest, got_length, length_not_from_slice;
1579  // Prepare start and end index of the input.
1580  // Load the length from the original sliced string if that is the case.
1581  __ addp(rbx, r14);
1582  __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset));
1583  __ addp(r14, arg_reg_3); // Using arg3 as scratch.
1584 
1585  // rbx: start index of the input
1586  // r14: end index of the input
1587  // r15: original subject string
1588  __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
1589  __ j(zero, &setup_two_byte, Label::kNear);
1590  __ leap(arg_reg_4,
1592  __ leap(arg_reg_3,
1594  __ jmp(&setup_rest, Label::kNear);
1595  __ bind(&setup_two_byte);
1596  __ leap(arg_reg_4,
1598  __ leap(arg_reg_3,
1600  __ bind(&setup_rest);
1601 
1602  // Argument 1: Original subject string.
1603  // The original subject is in the previous stack frame. Therefore we have to
1604  // use rbp, which points exactly to one pointer size below the previous rsp.
1605  // (Because creating a new stack frame pushes the previous rbp onto the stack
1606  // and thereby moves up rsp by one kPointerSize.)
1607  __ movp(arg_reg_1, r15);
1608 
1609  // Locate the code entry and call it.
1610  __ addp(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
1611  __ call(r11);
1612 
1613  __ LeaveApiExitFrame(true);
1614 
1615  // Check the result.
1616  Label success;
1617  Label exception;
1618  __ cmpl(rax, Immediate(1));
1619  // We expect exactly one result since we force the called regexp to behave
1620  // as non-global.
1621  __ j(equal, &success, Label::kNear);
1622  __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
1623  __ j(equal, &exception);
1624  __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
1625  // If none of the above, it can only be retry.
1626  // Handle that in the runtime system.
1627  __ j(not_equal, &runtime);
1628 
1629  // For failure return null.
1630  __ LoadRoot(rax, Heap::kNullValueRootIndex);
1631  __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
1632 
1633  // Load RegExp data.
1634  __ bind(&success);
1635  __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
1637  __ SmiToInteger32(rax,
1639  // Calculate number of capture registers (number_of_captures + 1) * 2.
1640  __ leal(rdx, Operand(rax, rax, times_1, 2));
1641 
1642  // rdx: Number of capture registers
1643  // Check that the fourth object is a JSArray object.
1644  __ movp(r15, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX));
1645  __ JumpIfSmi(r15, &runtime);
1646  __ CmpObjectType(r15, JS_ARRAY_TYPE, kScratchRegister);
1647  __ j(not_equal, &runtime);
1648  // Check that the JSArray is in fast case.
1651  __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex);
1652  __ j(not_equal, &runtime);
1653  // Check that the last match info has space for the capture registers and the
1654  // additional information. Ensure no overflow in add.
1656  __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
1657  __ subl(rax, Immediate(RegExpImpl::kLastMatchOverhead));
1658  __ cmpl(rdx, rax);
1659  __ j(greater, &runtime);
1660 
1661  // rbx: last_match_info backing store (FixedArray)
1662  // rdx: number of capture registers
1663  // Store the capture count.
1664  __ Integer32ToSmi(kScratchRegister, rdx);
1667  // Store last subject and last input.
1668  __ movp(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
1670  __ movp(rcx, rax);
1671  __ RecordWriteField(rbx,
1673  rax,
1674  rdi,
1675  kDontSaveFPRegs);
1676  __ movp(rax, rcx);
1678  __ RecordWriteField(rbx,
1680  rax,
1681  rdi,
1682  kDontSaveFPRegs);
1683 
1684  // Get the static offsets vector filled by the native regexp code.
1685  __ LoadAddress(rcx,
1686  ExternalReference::address_of_static_offsets_vector(isolate));
1687 
1688  // rbx: last_match_info backing store (FixedArray)
1689  // rcx: offsets vector
1690  // rdx: number of capture registers
1691  Label next_capture, done;
1692  // Capture register counter starts from number of capture registers and
1693  // counts down until wraping after zero.
1694  __ bind(&next_capture);
1695  __ subp(rdx, Immediate(1));
1696  __ j(negative, &done, Label::kNear);
1697  // Read the value from the static offsets vector buffer and make it a smi.
1698  __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
1699  __ Integer32ToSmi(rdi, rdi);
1700  // Store the smi value in the last match info.
1701  __ movp(FieldOperand(rbx,
1702  rdx,
1705  rdi);
1706  __ jmp(&next_capture);
1707  __ bind(&done);
1708 
1709  // Return last match info.
1710  __ movp(rax, r15);
1711  __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
1712 
1713  __ bind(&exception);
1714  // Result must now be exception. If there is no pending exception already a
1715  // stack overflow (on the backtrack stack) was detected in RegExp code but
1716  // haven't created the exception yet. Handle that in the runtime system.
1717  // TODO(592): Rerunning the RegExp to get the stack overflow exception.
1718  ExternalReference pending_exception_address(
1719  Isolate::kPendingExceptionAddress, isolate);
1720  Operand pending_exception_operand =
1721  masm->ExternalOperand(pending_exception_address, rbx);
1722  __ movp(rax, pending_exception_operand);
1723  __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
1724  __ cmpp(rax, rdx);
1725  __ j(equal, &runtime);
1726  __ movp(pending_exception_operand, rdx);
1727 
1728  __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
1729  Label termination_exception;
1730  __ j(equal, &termination_exception, Label::kNear);
1731  __ Throw(rax);
1732 
1733  __ bind(&termination_exception);
1734  __ ThrowUncatchable(rax);
1735 
1736  // Do the runtime call to execute the regexp.
1737  __ bind(&runtime);
1738  __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1);
1739 
1740  // Deferred code for string handling.
1741  // (7) Not a long external string? If yes, go to (10).
1742  __ bind(&not_seq_nor_cons);
1743  // Compare flags are still set from (3).
1744  __ j(greater, &not_long_external, Label::kNear); // Go to (10).
1745 
1746  // (8) External string. Short external strings have been ruled out.
1747  __ bind(&external_string);
1750  if (FLAG_debug_code) {
1751  // Assert that we do not have a cons or slice (indirect strings) here.
1752  // Sequential strings have already been ruled out.
1753  __ testb(rbx, Immediate(kIsIndirectStringMask));
1754  __ Assert(zero, kExternalStringExpectedButNotFound);
1755  }
1757  // Move the pointer so that offset-wise, it looks like a sequential string.
1759  __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
1761  // (8a) Is the external string one byte? If yes, go to (6).
1762  __ testb(rbx, Immediate(kStringEncodingMask));
1763  __ j(not_zero, &seq_one_byte_string); // Goto (6).
1764 
1765  // rdi: subject string (flat two-byte)
1766  // rax: RegExp data (FixedArray)
1767  // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1768  __ bind(&seq_two_byte_string);
1770  __ Set(rcx, 0); // Type is two byte.
1771  __ jmp(&check_code); // Go to (E).
1772 
1773  // (10) Not a string or a short external string? If yes, bail out to runtime.
1774  __ bind(&not_long_external);
1775  // Catch non-string subject or short external string.
1777  __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask));
1778  __ j(not_zero, &runtime);
1779 
1780  // (11) Sliced string. Replace subject with parent. Go to (5a).
1781  // Load offset into r14 and replace subject string with parent.
1782  __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset));
1784  __ jmp(&check_underlying);
1785 #endif // V8_INTERPRETED_REGEXP
1786 }
1787 
1788 
1789 static int NegativeComparisonResult(Condition cc) {
1790  ASSERT(cc != equal);
1791  ASSERT((cc == less) || (cc == less_equal)
1792  || (cc == greater) || (cc == greater_equal));
1793  return (cc == greater || cc == greater_equal) ? LESS : GREATER;
1794 }
1795 
1796 
1797 static void CheckInputType(MacroAssembler* masm,
1798  Register input,
1799  CompareIC::State expected,
1800  Label* fail) {
1801  Label ok;
1802  if (expected == CompareIC::SMI) {
1803  __ JumpIfNotSmi(input, fail);
1804  } else if (expected == CompareIC::NUMBER) {
1805  __ JumpIfSmi(input, &ok);
1806  __ CompareMap(input, masm->isolate()->factory()->heap_number_map());
1807  __ j(not_equal, fail);
1808  }
1809  // We could be strict about internalized/non-internalized here, but as long as
1810  // hydrogen doesn't care, the stub doesn't have to care either.
1811  __ bind(&ok);
1812 }
1813 
1814 
1815 static void BranchIfNotInternalizedString(MacroAssembler* masm,
1816  Label* label,
1817  Register object,
1818  Register scratch) {
1819  __ JumpIfSmi(object, label);
1820  __ movp(scratch, FieldOperand(object, HeapObject::kMapOffset));
1821  __ movzxbp(scratch,
1824  __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
1825  __ j(not_zero, label);
1826 }
1827 
1828 
1829 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
1830  Label check_unequal_objects, done;
1831  Condition cc = GetCondition();
1832  Factory* factory = masm->isolate()->factory();
1833 
1834  Label miss;
1835  CheckInputType(masm, rdx, left_, &miss);
1836  CheckInputType(masm, rax, right_, &miss);
1837 
1838  // Compare two smis.
1839  Label non_smi, smi_done;
1840  __ JumpIfNotBothSmi(rax, rdx, &non_smi);
1841  __ subp(rdx, rax);
1842  __ j(no_overflow, &smi_done);
1843  __ notp(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
1844  __ bind(&smi_done);
1845  __ movp(rax, rdx);
1846  __ ret(0);
1847  __ bind(&non_smi);
1848 
1849  // The compare stub returns a positive, negative, or zero 64-bit integer
1850  // value in rax, corresponding to result of comparing the two inputs.
1851  // NOTICE! This code is only reached after a smi-fast-case check, so
1852  // it is certain that at least one operand isn't a smi.
1853 
1854  // Two identical objects are equal unless they are both NaN or undefined.
1855  {
1856  Label not_identical;
1857  __ cmpp(rax, rdx);
1858  __ j(not_equal, &not_identical, Label::kNear);
1859 
1860  if (cc != equal) {
1861  // Check for undefined. undefined OP undefined is false even though
1862  // undefined == undefined.
1863  Label check_for_nan;
1864  __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
1865  __ j(not_equal, &check_for_nan, Label::kNear);
1866  __ Set(rax, NegativeComparisonResult(cc));
1867  __ ret(0);
1868  __ bind(&check_for_nan);
1869  }
1870 
1871  // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
1872  // so we do the second best thing - test it ourselves.
1873  Label heap_number;
1874  // If it's not a heap number, then return equal for (in)equality operator.
1876  factory->heap_number_map());
1877  __ j(equal, &heap_number, Label::kNear);
1878  if (cc != equal) {
1879  // Call runtime on identical objects. Otherwise return equal.
1880  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1881  __ j(above_equal, &not_identical, Label::kNear);
1882  }
1883  __ Set(rax, EQUAL);
1884  __ ret(0);
1885 
1886  __ bind(&heap_number);
1887  // It is a heap number, so return equal if it's not NaN.
1888  // For NaN, return 1 for every condition except greater and
1889  // greater-equal. Return -1 for them, so the comparison yields
1890  // false for all conditions except not-equal.
1891  __ Set(rax, EQUAL);
1893  __ ucomisd(xmm0, xmm0);
1894  __ setcc(parity_even, rax);
1895  // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
1896  if (cc == greater_equal || cc == greater) {
1897  __ negp(rax);
1898  }
1899  __ ret(0);
1900 
1901  __ bind(&not_identical);
1902  }
1903 
1904  if (cc == equal) { // Both strict and non-strict.
1905  Label slow; // Fallthrough label.
1906 
1907  // If we're doing a strict equality comparison, we don't have to do
1908  // type conversion, so we generate code to do fast comparison for objects
1909  // and oddballs. Non-smi numbers and strings still go through the usual
1910  // slow-case code.
1911  if (strict()) {
1912  // If either is a Smi (we know that not both are), then they can only
1913  // be equal if the other is a HeapNumber. If so, use the slow case.
1914  {
1915  Label not_smis;
1916  __ SelectNonSmi(rbx, rax, rdx, &not_smis);
1917 
1918  // Check if the non-smi operand is a heap number.
1920  factory->heap_number_map());
1921  // If heap number, handle it in the slow case.
1922  __ j(equal, &slow);
1923  // Return non-equal. ebx (the lower half of rbx) is not zero.
1924  __ movp(rax, rbx);
1925  __ ret(0);
1926 
1927  __ bind(&not_smis);
1928  }
1929 
1930  // If either operand is a JSObject or an oddball value, then they are not
1931  // equal since their pointers are different
1932  // There is no test for undetectability in strict equality.
1933 
1934  // If the first object is a JS object, we have done pointer comparison.
1936  Label first_non_object;
1937  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1938  __ j(below, &first_non_object, Label::kNear);
1939  // Return non-zero (rax (not rax) is not zero)
1940  Label return_not_equal;
1942  __ bind(&return_not_equal);
1943  __ ret(0);
1944 
1945  __ bind(&first_non_object);
1946  // Check for oddballs: true, false, null, undefined.
1947  __ CmpInstanceType(rcx, ODDBALL_TYPE);
1948  __ j(equal, &return_not_equal);
1949 
1950  __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx);
1951  __ j(above_equal, &return_not_equal);
1952 
1953  // Check for oddballs: true, false, null, undefined.
1954  __ CmpInstanceType(rcx, ODDBALL_TYPE);
1955  __ j(equal, &return_not_equal);
1956 
1957  // Fall through to the general case.
1958  }
1959  __ bind(&slow);
1960  }
1961 
1962  // Generate the number comparison code.
1963  Label non_number_comparison;
1964  Label unordered;
1965  FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
1966  __ xorl(rax, rax);
1967  __ xorl(rcx, rcx);
1968  __ ucomisd(xmm0, xmm1);
1969 
1970  // Don't base result on EFLAGS when a NaN is involved.
1971  __ j(parity_even, &unordered, Label::kNear);
1972  // Return a result of -1, 0, or 1, based on EFLAGS.
1973  __ setcc(above, rax);
1974  __ setcc(below, rcx);
1975  __ subp(rax, rcx);
1976  __ ret(0);
1977 
1978  // If one of the numbers was NaN, then the result is always false.
1979  // The cc is never not-equal.
1980  __ bind(&unordered);
1981  ASSERT(cc != not_equal);
1982  if (cc == less || cc == less_equal) {
1983  __ Set(rax, 1);
1984  } else {
1985  __ Set(rax, -1);
1986  }
1987  __ ret(0);
1988 
1989  // The number comparison code did not provide a valid result.
1990  __ bind(&non_number_comparison);
1991 
1992  // Fast negative check for internalized-to-internalized equality.
1993  Label check_for_strings;
1994  if (cc == equal) {
1995  BranchIfNotInternalizedString(
1996  masm, &check_for_strings, rax, kScratchRegister);
1997  BranchIfNotInternalizedString(
1998  masm, &check_for_strings, rdx, kScratchRegister);
1999 
2000  // We've already checked for object identity, so if both operands are
2001  // internalized strings they aren't equal. Register rax (not rax) already
2002  // holds a non-zero value, which indicates not equal, so just return.
2003  __ ret(0);
2004  }
2005 
2006  __ bind(&check_for_strings);
2007 
2008  __ JumpIfNotBothSequentialAsciiStrings(
2009  rdx, rax, rcx, rbx, &check_unequal_objects);
2010 
2011  // Inline comparison of ASCII strings.
2012  if (cc == equal) {
2014  rdx,
2015  rax,
2016  rcx,
2017  rbx);
2018  } else {
2020  rdx,
2021  rax,
2022  rcx,
2023  rbx,
2024  rdi,
2025  r8);
2026  }
2027 
2028 #ifdef DEBUG
2029  __ Abort(kUnexpectedFallThroughFromStringComparison);
2030 #endif
2031 
2032  __ bind(&check_unequal_objects);
2033  if (cc == equal && !strict()) {
2034  // Not strict equality. Objects are unequal if
2035  // they are both JSObjects and not undetectable,
2036  // and their pointers are different.
2037  Label not_both_objects, return_unequal;
2038  // At most one is a smi, so we can test for smi by adding the two.
2039  // A smi plus a heap object has the low bit set, a heap object plus
2040  // a heap object has the low bit clear.
2041  STATIC_ASSERT(kSmiTag == 0);
2042  STATIC_ASSERT(kSmiTagMask == 1);
2043  __ leap(rcx, Operand(rax, rdx, times_1, 0));
2044  __ testb(rcx, Immediate(kSmiTagMask));
2045  __ j(not_zero, &not_both_objects, Label::kNear);
2046  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
2047  __ j(below, &not_both_objects, Label::kNear);
2048  __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx);
2049  __ j(below, &not_both_objects, Label::kNear);
2051  Immediate(1 << Map::kIsUndetectable));
2052  __ j(zero, &return_unequal, Label::kNear);
2054  Immediate(1 << Map::kIsUndetectable));
2055  __ j(zero, &return_unequal, Label::kNear);
2056  // The objects are both undetectable, so they both compare as the value
2057  // undefined, and are equal.
2058  __ Set(rax, EQUAL);
2059  __ bind(&return_unequal);
2060  // Return non-equal by returning the non-zero object pointer in rax,
2061  // or return equal if we fell through to here.
2062  __ ret(0);
2063  __ bind(&not_both_objects);
2064  }
2065 
2066  // Push arguments below the return address to prepare jump to builtin.
2067  __ PopReturnAddressTo(rcx);
2068  __ Push(rdx);
2069  __ Push(rax);
2070 
2071  // Figure out which native to call and setup the arguments.
2072  Builtins::JavaScript builtin;
2073  if (cc == equal) {
2074  builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
2075  } else {
2076  builtin = Builtins::COMPARE;
2077  __ Push(Smi::FromInt(NegativeComparisonResult(cc)));
2078  }
2079 
2080  __ PushReturnAddressFrom(rcx);
2081 
2082  // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
2083  // tagged as a small integer.
2084  __ InvokeBuiltin(builtin, JUMP_FUNCTION);
2085 
2086  __ bind(&miss);
2087  GenerateMiss(masm);
2088 }
2089 
2090 
2091 static void GenerateRecordCallTarget(MacroAssembler* masm) {
2092  // Cache the called function in a feedback vector slot. Cache states
2093  // are uninitialized, monomorphic (indicated by a JSFunction), and
2094  // megamorphic.
2095  // rax : number of arguments to the construct function
2096  // rbx : Feedback vector
2097  // rdx : slot in feedback vector (Smi)
2098  // rdi : the function to call
2099  Isolate* isolate = masm->isolate();
2100  Label initialize, done, miss, megamorphic, not_array_function,
2101  done_no_smi_convert;
2102 
2103  // Load the cache state into rcx.
2104  __ SmiToInteger32(rdx, rdx);
2107 
2108  // A monomorphic cache hit or an already megamorphic state: invoke the
2109  // function without changing the state.
2110  __ cmpp(rcx, rdi);
2111  __ j(equal, &done);
2113  __ j(equal, &done);
2114 
2115  if (!FLAG_pretenuring_call_new) {
2116  // If we came here, we need to see if we are the array function.
2117  // If we didn't have a matching function, and we didn't find the megamorph
2118  // sentinel, then we have in the slot either some other function or an
2119  // AllocationSite. Do a map check on the object in rcx.
2120  Handle<Map> allocation_site_map =
2121  masm->isolate()->factory()->allocation_site_map();
2122  __ Cmp(FieldOperand(rcx, 0), allocation_site_map);
2123  __ j(not_equal, &miss);
2124 
2125  // Make sure the function is the Array() function
2126  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
2127  __ cmpp(rdi, rcx);
2128  __ j(not_equal, &megamorphic);
2129  __ jmp(&done);
2130  }
2131 
2132  __ bind(&miss);
2133 
2134  // A monomorphic miss (i.e, here the cache is not uninitialized) goes
2135  // megamorphic.
2137  __ j(equal, &initialize);
2138  // MegamorphicSentinel is an immortal immovable object (undefined) so no
2139  // write-barrier is needed.
2140  __ bind(&megamorphic);
2143  __ jmp(&done);
2144 
2145  // An uninitialized cache is patched with the function or sentinel to
2146  // indicate the ElementsKind if function is the Array constructor.
2147  __ bind(&initialize);
2148 
2149  if (!FLAG_pretenuring_call_new) {
2150  // Make sure the function is the Array() function
2151  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
2152  __ cmpp(rdi, rcx);
2153  __ j(not_equal, &not_array_function);
2154 
2155  {
2156  FrameScope scope(masm, StackFrame::INTERNAL);
2157 
2158  // Arguments register must be smi-tagged to call out.
2159  __ Integer32ToSmi(rax, rax);
2160  __ Push(rax);
2161  __ Push(rdi);
2162  __ Integer32ToSmi(rdx, rdx);
2163  __ Push(rdx);
2164  __ Push(rbx);
2165 
2166  CreateAllocationSiteStub create_stub;
2167  __ CallStub(&create_stub);
2168 
2169  __ Pop(rbx);
2170  __ Pop(rdx);
2171  __ Pop(rdi);
2172  __ Pop(rax);
2173  __ SmiToInteger32(rax, rax);
2174  }
2175  __ jmp(&done_no_smi_convert);
2176 
2177  __ bind(&not_array_function);
2178  }
2179 
2181  rdi);
2182 
2183  // We won't need rdx or rbx anymore, just save rdi
2184  __ Push(rdi);
2185  __ Push(rbx);
2186  __ Push(rdx);
2187  __ RecordWriteArray(rbx, rdi, rdx, kDontSaveFPRegs,
2189  __ Pop(rdx);
2190  __ Pop(rbx);
2191  __ Pop(rdi);
2192 
2193  __ bind(&done);
2194  __ Integer32ToSmi(rdx, rdx);
2195 
2196  __ bind(&done_no_smi_convert);
2197 }
2198 
2199 
2200 void CallFunctionStub::Generate(MacroAssembler* masm) {
2201  // rbx : feedback vector
2202  // rdx : (only if rbx is not the megamorphic symbol) slot in feedback
2203  // vector (Smi)
2204  // rdi : the function to call
2205  Isolate* isolate = masm->isolate();
2206  Label slow, non_function, wrap, cont;
2207  StackArgumentsAccessor args(rsp, argc_);
2208 
2209  if (NeedsChecks()) {
2210  // Check that the function really is a JavaScript function.
2211  __ JumpIfSmi(rdi, &non_function);
2212 
2213  // Goto slow case if we do not have a function.
2214  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2215  __ j(not_equal, &slow);
2216 
2217  if (RecordCallTarget()) {
2218  GenerateRecordCallTarget(masm);
2219  // Type information was updated. Because we may call Array, which
2220  // expects either undefined or an AllocationSite in rbx we need
2221  // to set rbx to undefined.
2222  __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
2223  }
2224  }
2225 
2226  // Fast-case: Just invoke the function.
2227  ParameterCount actual(argc_);
2228 
2229  if (CallAsMethod()) {
2230  if (NeedsChecks()) {
2231  // Do not transform the receiver for strict mode functions.
2235  __ j(not_equal, &cont);
2236 
2237  // Do not transform the receiver for natives.
2238  // SharedFunctionInfo is already loaded into rcx.
2241  __ j(not_equal, &cont);
2242  }
2243 
2244 
2245  // Load the receiver from the stack.
2246  __ movp(rax, args.GetReceiverOperand());
2247 
2248  if (NeedsChecks()) {
2249  __ JumpIfSmi(rax, &wrap);
2250 
2251  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
2252  __ j(below, &wrap);
2253  } else {
2254  __ jmp(&wrap);
2255  }
2256 
2257  __ bind(&cont);
2258  }
2259  __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
2260 
2261  if (NeedsChecks()) {
2262  // Slow-case: Non-function called.
2263  __ bind(&slow);
2264  if (RecordCallTarget()) {
2265  // If there is a call target cache, mark it megamorphic in the
2266  // non-function case. MegamorphicSentinel is an immortal immovable
2267  // object (megamorphic symbol) so no write barrier is needed.
2268  __ SmiToInteger32(rdx, rdx);
2272  __ Integer32ToSmi(rdx, rdx);
2273  }
2274  // Check for function proxy.
2275  __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
2276  __ j(not_equal, &non_function);
2277  __ PopReturnAddressTo(rcx);
2278  __ Push(rdi); // put proxy as additional argument under return address
2279  __ PushReturnAddressFrom(rcx);
2280  __ Set(rax, argc_ + 1);
2281  __ Set(rbx, 0);
2282  __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
2283  {
2284  Handle<Code> adaptor =
2285  masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
2286  __ jmp(adaptor, RelocInfo::CODE_TARGET);
2287  }
2288 
2289  // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
2290  // of the original receiver from the call site).
2291  __ bind(&non_function);
2292  __ movp(args.GetReceiverOperand(), rdi);
2293  __ Set(rax, argc_);
2294  __ Set(rbx, 0);
2295  __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
2296  Handle<Code> adaptor =
2297  isolate->builtins()->ArgumentsAdaptorTrampoline();
2298  __ Jump(adaptor, RelocInfo::CODE_TARGET);
2299  }
2300 
2301  if (CallAsMethod()) {
2302  __ bind(&wrap);
2303  // Wrap the receiver and patch it back onto the stack.
2304  { FrameScope frame_scope(masm, StackFrame::INTERNAL);
2305  __ Push(rdi);
2306  __ Push(rax);
2307  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
2308  __ Pop(rdi);
2309  }
2310  __ movp(args.GetReceiverOperand(), rax);
2311  __ jmp(&cont);
2312  }
2313 }
2314 
2315 
2316 void CallConstructStub::Generate(MacroAssembler* masm) {
2317  // rax : number of arguments
2318  // rbx : feedback vector
2319  // rdx : (only if rbx is not the megamorphic symbol) slot in feedback
2320  // vector (Smi)
2321  // rdi : constructor function
2322  Label slow, non_function_call;
2323 
2324  // Check that function is not a smi.
2325  __ JumpIfSmi(rdi, &non_function_call);
2326  // Check that function is a JSFunction.
2327  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2328  __ j(not_equal, &slow);
2329 
2330  if (RecordCallTarget()) {
2331  GenerateRecordCallTarget(masm);
2332 
2333  __ SmiToInteger32(rdx, rdx);
2334  if (FLAG_pretenuring_call_new) {
2335  // Put the AllocationSite from the feedback vector into ebx.
2336  // By adding kPointerSize we encode that we know the AllocationSite
2337  // entry is at the feedback vector slot given by rdx + 1.
2339  FixedArray::kHeaderSize + kPointerSize));
2340  } else {
2341  Label feedback_register_initialized;
2342  // Put the AllocationSite from the feedback vector into rbx, or undefined.
2345  __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
2346  __ j(equal, &feedback_register_initialized);
2347  __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
2348  __ bind(&feedback_register_initialized);
2349  }
2350 
2351  __ AssertUndefinedOrAllocationSite(rbx);
2352  }
2353 
2354  // Jump to the function-specific construct stub.
2355  Register jmp_reg = rcx;
2357  __ movp(jmp_reg, FieldOperand(jmp_reg,
2359  __ leap(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
2360  __ jmp(jmp_reg);
2361 
2362  // rdi: called object
2363  // rax: number of arguments
2364  // rcx: object map
2365  Label do_call;
2366  __ bind(&slow);
2367  __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
2368  __ j(not_equal, &non_function_call);
2369  __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
2370  __ jmp(&do_call);
2371 
2372  __ bind(&non_function_call);
2373  __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2374  __ bind(&do_call);
2375  // Set expected number of arguments to zero (not changing rax).
2376  __ Set(rbx, 0);
2377  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
2378  RelocInfo::CODE_TARGET);
2379 }
2380 
2381 
2382 bool CEntryStub::NeedsImmovableCode() {
2383  return false;
2384 }
2385 
2386 
2387 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2391  // It is important that the store buffer overflow stubs are generated first.
2395  BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
2396 }
2397 
2398 
2399 void CodeStub::GenerateFPStubs(Isolate* isolate) {
2400 }
2401 
2402 
2403 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
2404  CEntryStub stub(1, kDontSaveFPRegs);
2405  stub.GetCode(isolate);
2406  CEntryStub save_doubles(1, kSaveFPRegs);
2407  save_doubles.GetCode(isolate);
2408 }
2409 
2410 
2411 void CEntryStub::GenerateCore(MacroAssembler* masm,
2412  Label* throw_normal_exception,
2413  Label* throw_termination_exception,
2414  bool do_gc,
2415  bool always_allocate_scope) {
2416  // rax: result parameter for PerformGC, if any.
2417  // rbx: pointer to C function (C callee-saved).
2418  // rbp: frame pointer (restored after C call).
2419  // rsp: stack pointer (restored after C call).
2420  // r14: number of arguments including receiver (C callee-saved).
2421  // r15: pointer to the first argument (C callee-saved).
2422  // This pointer is reused in LeaveExitFrame(), so it is stored in a
2423  // callee-saved register.
2424 
2425  // Simple results returned in rax (both AMD64 and Win64 calling conventions).
2426  // Complex results must be written to address passed as first argument.
2427  // AMD64 calling convention: a struct of two pointers in rax+rdx
2428 
2429  // Check stack alignment.
2430  if (FLAG_debug_code) {
2431  __ CheckStackAlignment();
2432  }
2433 
2434  if (do_gc) {
2435  // Pass failure code returned from last attempt as first argument to
2436  // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
2437  // stack is known to be aligned. This function takes one argument which is
2438  // passed in register.
2439  __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
2440  __ movp(arg_reg_1, rax);
2441  __ Move(kScratchRegister,
2442  ExternalReference::perform_gc_function(masm->isolate()));
2443  __ call(kScratchRegister);
2444  }
2445 
2446  ExternalReference scope_depth =
2447  ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
2448  if (always_allocate_scope) {
2449  Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
2450  __ incl(scope_depth_operand);
2451  }
2452 
2453  // Call C function.
2454 #ifdef _WIN64
2455  // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9.
2456  // Pass argv and argc as two parameters. The arguments object will
2457  // be created by stubs declared by DECLARE_RUNTIME_FUNCTION().
2458  if (result_size_ < 2) {
2459  // Pass a pointer to the Arguments object as the first argument.
2460  // Return result in single register (rax).
2461  __ movp(rcx, r14); // argc.
2462  __ movp(rdx, r15); // argv.
2463  __ Move(r8, ExternalReference::isolate_address(masm->isolate()));
2464  } else {
2465  ASSERT_EQ(2, result_size_);
2466  // Pass a pointer to the result location as the first argument.
2467  __ leap(rcx, StackSpaceOperand(2));
2468  // Pass a pointer to the Arguments object as the second argument.
2469  __ movp(rdx, r14); // argc.
2470  __ movp(r8, r15); // argv.
2471  __ Move(r9, ExternalReference::isolate_address(masm->isolate()));
2472  }
2473 
2474 #else // _WIN64
2475  // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
2476  __ movp(rdi, r14); // argc.
2477  __ movp(rsi, r15); // argv.
2478  __ Move(rdx, ExternalReference::isolate_address(masm->isolate()));
2479 #endif
2480  __ call(rbx);
2481  // Result is in rax - do not destroy this register!
2482 
2483  if (always_allocate_scope) {
2484  Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
2485  __ decl(scope_depth_operand);
2486  }
2487 
2488  // Check for failure result.
2489  Label failure_returned;
2490  STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
2491 #ifdef _WIN64
2492  // If return value is on the stack, pop it to registers.
2493  if (result_size_ > 1) {
2494  ASSERT_EQ(2, result_size_);
2495  // Read result values stored on stack. Result is stored
2496  // above the four argument mirror slots and the two
2497  // Arguments object slots.
2498  __ movq(rax, Operand(rsp, 6 * kRegisterSize));
2499  __ movq(rdx, Operand(rsp, 7 * kRegisterSize));
2500  }
2501 #endif
2502  __ leap(rcx, Operand(rax, 1));
2503  // Lower 2 bits of rcx are 0 iff rax has failure tag.
2504  __ testl(rcx, Immediate(kFailureTagMask));
2505  __ j(zero, &failure_returned);
2506 
2507  // Exit the JavaScript to C++ exit frame.
2508  __ LeaveExitFrame(save_doubles_);
2509  __ ret(0);
2510 
2511  // Handling of failure.
2512  __ bind(&failure_returned);
2513 
2514  Label retry;
2515  // If the returned exception is RETRY_AFTER_GC continue at retry label
2517  __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
2518  __ j(zero, &retry, Label::kNear);
2519 
2520  // Retrieve the pending exception.
2521  ExternalReference pending_exception_address(
2522  Isolate::kPendingExceptionAddress, masm->isolate());
2523  Operand pending_exception_operand =
2524  masm->ExternalOperand(pending_exception_address);
2525  __ movp(rax, pending_exception_operand);
2526 
2527  // Clear the pending exception.
2528  pending_exception_operand =
2529  masm->ExternalOperand(pending_exception_address);
2530  __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
2531  __ movp(pending_exception_operand, rdx);
2532 
2533  // Special handling of termination exceptions which are uncatchable
2534  // by javascript code.
2535  __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
2536  __ j(equal, throw_termination_exception);
2537 
2538  // Handle normal exception.
2539  __ jmp(throw_normal_exception);
2540 
2541  // Retry.
2542  __ bind(&retry);
2543 }
2544 
2545 
2546 void CEntryStub::Generate(MacroAssembler* masm) {
2547  // rax: number of arguments including receiver
2548  // rbx: pointer to C function (C callee-saved)
2549  // rbp: frame pointer of calling JS frame (restored after C call)
2550  // rsp: stack pointer (restored after C call)
2551  // rsi: current context (restored)
2552 
2553  // NOTE: Invocations of builtins may return failure objects
2554  // instead of a proper result. The builtin entry handles
2555  // this by performing a garbage collection and retrying the
2556  // builtin once.
2557 
2559 
2560  // Enter the exit frame that transitions from JavaScript to C++.
2561 #ifdef _WIN64
2562  int arg_stack_space = (result_size_ < 2 ? 2 : 4);
2563 #else
2564  int arg_stack_space = 0;
2565 #endif
2566  __ EnterExitFrame(arg_stack_space, save_doubles_);
2567 
2568  // rax: Holds the context at this point, but should not be used.
2569  // On entry to code generated by GenerateCore, it must hold
2570  // a failure result if the collect_garbage argument to GenerateCore
2571  // is true. This failure result can be the result of code
2572  // generated by a previous call to GenerateCore. The value
2573  // of rax is then passed to Runtime::PerformGC.
2574  // rbx: pointer to builtin function (C callee-saved).
2575  // rbp: frame pointer of exit frame (restored after C call).
2576  // rsp: stack pointer (restored after C call).
2577  // r14: number of arguments including receiver (C callee-saved).
2578  // r15: argv pointer (C callee-saved).
2579 
2580  Label throw_normal_exception;
2581  Label throw_termination_exception;
2582 
2583  // Call into the runtime system.
2584  GenerateCore(masm,
2585  &throw_normal_exception,
2586  &throw_termination_exception,
2587  false,
2588  false);
2589 
2590  // Do space-specific GC and retry runtime call.
2591  GenerateCore(masm,
2592  &throw_normal_exception,
2593  &throw_termination_exception,
2594  true,
2595  false);
2596 
2597  // Do full GC and retry runtime call one final time.
2598  Failure* failure = Failure::InternalError();
2599  __ Move(rax, failure, Assembler::RelocInfoNone());
2600  GenerateCore(masm,
2601  &throw_normal_exception,
2602  &throw_termination_exception,
2603  true,
2604  true);
2605 
2606  { FrameScope scope(masm, StackFrame::MANUAL);
2607  __ PrepareCallCFunction(0);
2608  __ CallCFunction(
2609  ExternalReference::out_of_memory_function(masm->isolate()), 0);
2610  }
2611 
2612  __ bind(&throw_termination_exception);
2613  __ ThrowUncatchable(rax);
2614 
2615  __ bind(&throw_normal_exception);
2616  __ Throw(rax);
2617 }
2618 
2619 
2620 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
2621  Label invoke, handler_entry, exit;
2622  Label not_outermost_js, not_outermost_js_2;
2623 
2625 
2626  { // NOLINT. Scope block confuses linter.
2627  MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
2628  // Set up frame.
2629  __ pushq(rbp);
2630  __ movp(rbp, rsp);
2631 
2632  // Push the stack frame type marker twice.
2633  int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
2634  // Scratch register is neither callee-save, nor an argument register on any
2635  // platform. It's free to use at this point.
2636  // Cannot use smi-register for loading yet.
2638  __ Push(kScratchRegister); // context slot
2639  __ Push(kScratchRegister); // function slot
2640  // Save callee-saved registers (X64/X32/Win64 calling conventions).
2641  __ pushq(r12);
2642  __ pushq(r13);
2643  __ pushq(r14);
2644  __ pushq(r15);
2645 #ifdef _WIN64
2646  __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
2647  __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
2648 #endif
2649  __ pushq(rbx);
2650 
2651 #ifdef _WIN64
2652  // On Win64 XMM6-XMM15 are callee-save
2653  __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
2654  __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
2655  __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
2656  __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
2657  __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
2658  __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
2659  __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
2660  __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
2661  __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
2662  __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
2663  __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
2664 #endif
2665 
2666  // Set up the roots and smi constant registers.
2667  // Needs to be done before any further smi loads.
2668  __ InitializeSmiConstantRegister();
2669  __ InitializeRootRegister();
2670  }
2671 
2672  Isolate* isolate = masm->isolate();
2673 
2674  // Save copies of the top frame descriptor on the stack.
2675  ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate);
2676  {
2677  Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
2678  __ Push(c_entry_fp_operand);
2679  }
2680 
2681  // If this is the outermost JS call, set js_entry_sp value.
2682  ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
2683  __ Load(rax, js_entry_sp);
2684  __ testp(rax, rax);
2685  __ j(not_zero, &not_outermost_js);
2686  __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
2687  __ movp(rax, rbp);
2688  __ Store(js_entry_sp, rax);
2689  Label cont;
2690  __ jmp(&cont);
2691  __ bind(&not_outermost_js);
2692  __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
2693  __ bind(&cont);
2694 
2695  // Jump to a faked try block that does the invoke, with a faked catch
2696  // block that sets the pending exception.
2697  __ jmp(&invoke);
2698  __ bind(&handler_entry);
2699  handler_offset_ = handler_entry.pos();
2700  // Caught exception: Store result (exception) in the pending exception
2701  // field in the JSEnv and return a failure sentinel.
2702  ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
2703  isolate);
2704  __ Store(pending_exception, rax);
2706  __ jmp(&exit);
2707 
2708  // Invoke: Link this frame into the handler chain. There's only one
2709  // handler block in this code object, so its index is 0.
2710  __ bind(&invoke);
2711  __ PushTryHandler(StackHandler::JS_ENTRY, 0);
2712 
2713  // Clear any pending exceptions.
2714  __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
2715  __ Store(pending_exception, rax);
2716 
2717  // Fake a receiver (NULL).
2718  __ Push(Immediate(0)); // receiver
2719 
2720  // Invoke the function by calling through JS entry trampoline builtin and
2721  // pop the faked function when we return. We load the address from an
2722  // external reference instead of inlining the call target address directly
2723  // in the code, because the builtin stubs may not have been generated yet
2724  // at the time this code is generated.
2725  if (is_construct) {
2726  ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
2727  isolate);
2728  __ Load(rax, construct_entry);
2729  } else {
2730  ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
2731  __ Load(rax, entry);
2732  }
2734  __ call(kScratchRegister);
2735 
2736  // Unlink this frame from the handler chain.
2737  __ PopTryHandler();
2738 
2739  __ bind(&exit);
2740  // Check if the current stack frame is marked as the outermost JS frame.
2741  __ Pop(rbx);
2742  __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
2743  __ j(not_equal, &not_outermost_js_2);
2744  __ Move(kScratchRegister, js_entry_sp);
2745  __ movp(Operand(kScratchRegister, 0), Immediate(0));
2746  __ bind(&not_outermost_js_2);
2747 
2748  // Restore the top frame descriptor from the stack.
2749  { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
2750  __ Pop(c_entry_fp_operand);
2751  }
2752 
2753  // Restore callee-saved registers (X64 conventions).
2754 #ifdef _WIN64
2755  // On Win64 XMM6-XMM15 are callee-save
2756  __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
2757  __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
2758  __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
2759  __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
2760  __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
2761  __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
2762  __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
2763  __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
2764  __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
2765  __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
2766  __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
2767 #endif
2768 
2769  __ popq(rbx);
2770 #ifdef _WIN64
2771  // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
2772  __ popq(rsi);
2773  __ popq(rdi);
2774 #endif
2775  __ popq(r15);
2776  __ popq(r14);
2777  __ popq(r13);
2778  __ popq(r12);
2779  __ addp(rsp, Immediate(2 * kPointerSize)); // remove markers
2780 
2781  // Restore frame pointer and return.
2782  __ popq(rbp);
2783  __ ret(0);
2784 }
2785 
2786 
2787 void InstanceofStub::Generate(MacroAssembler* masm) {
2788  // Implements "value instanceof function" operator.
2789  // Expected input state with no inline cache:
2790  // rsp[0] : return address
2791  // rsp[8] : function pointer
2792  // rsp[16] : value
2793  // Expected input state with an inline one-element cache:
2794  // rsp[0] : return address
2795  // rsp[8] : offset from return address to location of inline cache
2796  // rsp[16] : function pointer
2797  // rsp[24] : value
2798  // Returns a bitwise zero to indicate that the value
2799  // is and instance of the function and anything else to
2800  // indicate that the value is not an instance.
2801 
2802  static const int kOffsetToMapCheckValue = 2;
2803  static const int kOffsetToResultValue = 18;
2804  // The last 4 bytes of the instruction sequence
2805  // movq(rdi, FieldOperand(rax, HeapObject::kMapOffset))
2806  // Move(kScratchRegister, Factory::the_hole_value())
2807  // in front of the hole value address.
2808  static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78;
2809  // The last 4 bytes of the instruction sequence
2810  // __ j(not_equal, &cache_miss);
2811  // __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
2812  // before the offset of the hole value in the root array.
2813  static const unsigned int kWordBeforeResultValue = 0x458B4906;
2814  // Only the inline check flag is supported on X64.
2815  ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck());
2816  int extra_argument_offset = HasCallSiteInlineCheck() ? 1 : 0;
2817 
2818  // Get the object - go slow case if it's a smi.
2819  Label slow;
2820  StackArgumentsAccessor args(rsp, 2 + extra_argument_offset,
2822  __ movp(rax, args.GetArgumentOperand(0));
2823  __ JumpIfSmi(rax, &slow);
2824 
2825  // Check that the left hand is a JS object. Leave its map in rax.
2826  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
2827  __ j(below, &slow);
2828  __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
2829  __ j(above, &slow);
2830 
2831  // Get the prototype of the function.
2832  __ movp(rdx, args.GetArgumentOperand(1));
2833  // rdx is function, rax is map.
2834 
2835  // If there is a call site cache don't look in the global cache, but do the
2836  // real lookup and update the call site cache.
2837  if (!HasCallSiteInlineCheck()) {
2838  // Look up the function and the map in the instanceof cache.
2839  Label miss;
2840  __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
2841  __ j(not_equal, &miss, Label::kNear);
2842  __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
2843  __ j(not_equal, &miss, Label::kNear);
2844  __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2845  __ ret(2 * kPointerSize);
2846  __ bind(&miss);
2847  }
2848 
2849  __ TryGetFunctionPrototype(rdx, rbx, &slow, true);
2850 
2851  // Check that the function prototype is a JS object.
2852  __ JumpIfSmi(rbx, &slow);
2853  __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, kScratchRegister);
2854  __ j(below, &slow);
2855  __ CmpInstanceType(kScratchRegister, LAST_SPEC_OBJECT_TYPE);
2856  __ j(above, &slow);
2857 
2858  // Register mapping:
2859  // rax is object map.
2860  // rdx is function.
2861  // rbx is function prototype.
2862  if (!HasCallSiteInlineCheck()) {
2863  __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
2864  __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
2865  } else {
2866  // Get return address and delta to inlined map check.
2868  __ subp(kScratchRegister, args.GetArgumentOperand(2));
2869  if (FLAG_debug_code) {
2870  __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
2871  __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
2872  __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck);
2873  }
2874  __ movp(kScratchRegister,
2875  Operand(kScratchRegister, kOffsetToMapCheckValue));
2876  __ movp(Operand(kScratchRegister, 0), rax);
2877  }
2878 
2880 
2881  // Loop through the prototype chain looking for the function prototype.
2882  Label loop, is_instance, is_not_instance;
2883  __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex);
2884  __ bind(&loop);
2885  __ cmpp(rcx, rbx);
2886  __ j(equal, &is_instance, Label::kNear);
2887  __ cmpp(rcx, kScratchRegister);
2888  // The code at is_not_instance assumes that kScratchRegister contains a
2889  // non-zero GCable value (the null object in this case).
2890  __ j(equal, &is_not_instance, Label::kNear);
2893  __ jmp(&loop);
2894 
2895  __ bind(&is_instance);
2896  if (!HasCallSiteInlineCheck()) {
2897  __ xorl(rax, rax);
2898  // Store bitwise zero in the cache. This is a Smi in GC terms.
2899  STATIC_ASSERT(kSmiTag == 0);
2900  __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2901  } else {
2902  // Store offset of true in the root array at the inline check site.
2903  int true_offset = 0x100 +
2904  (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
2905  // Assert it is a 1-byte signed value.
2906  ASSERT(true_offset >= 0 && true_offset < 0x100);
2907  __ movl(rax, Immediate(true_offset));
2909  __ subp(kScratchRegister, args.GetArgumentOperand(2));
2910  __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
2911  if (FLAG_debug_code) {
2912  __ movl(rax, Immediate(kWordBeforeResultValue));
2913  __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
2914  __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2915  }
2916  __ Set(rax, 0);
2917  }
2918  __ ret((2 + extra_argument_offset) * kPointerSize);
2919 
2920  __ bind(&is_not_instance);
2921  if (!HasCallSiteInlineCheck()) {
2922  // We have to store a non-zero value in the cache.
2923  __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
2924  } else {
2925  // Store offset of false in the root array at the inline check site.
2926  int false_offset = 0x100 +
2927  (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
2928  // Assert it is a 1-byte signed value.
2929  ASSERT(false_offset >= 0 && false_offset < 0x100);
2930  __ movl(rax, Immediate(false_offset));
2932  __ subp(kScratchRegister, args.GetArgumentOperand(2));
2933  __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
2934  if (FLAG_debug_code) {
2935  __ movl(rax, Immediate(kWordBeforeResultValue));
2936  __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
2937  __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2938  }
2939  }
2940  __ ret((2 + extra_argument_offset) * kPointerSize);
2941 
2942  // Slow-case: Go through the JavaScript implementation.
2943  __ bind(&slow);
2944  if (HasCallSiteInlineCheck()) {
2945  // Remove extra value from the stack.
2946  __ PopReturnAddressTo(rcx);
2947  __ Pop(rax);
2948  __ PushReturnAddressFrom(rcx);
2949  }
2950  __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
2951 }
2952 
2953 
2954 // Passing arguments in registers is not supported.
2955 Register InstanceofStub::left() { return no_reg; }
2956 
2957 
2958 Register InstanceofStub::right() { return no_reg; }
2959 
2960 
2961 // -------------------------------------------------------------------------
2962 // StringCharCodeAtGenerator
2963 
2964 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
2965  Label flat_string;
2966  Label ascii_string;
2967  Label got_char_code;
2968  Label sliced_string;
2969 
2970  // If the receiver is a smi trigger the non-string case.
2971  __ JumpIfSmi(object_, receiver_not_string_);
2972 
2973  // Fetch the instance type of the receiver into result register.
2974  __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
2975  __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2976  // If the receiver is not a string trigger the non-string case.
2977  __ testb(result_, Immediate(kIsNotStringMask));
2978  __ j(not_zero, receiver_not_string_);
2979 
2980  // If the index is non-smi trigger the non-smi case.
2981  __ JumpIfNotSmi(index_, &index_not_smi_);
2982  __ bind(&got_smi_index_);
2983 
2984  // Check for index out of range.
2985  __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset));
2986  __ j(above_equal, index_out_of_range_);
2987 
2988  __ SmiToInteger32(index_, index_);
2989 
2991  masm, object_, index_, result_, &call_runtime_);
2992 
2993  __ Integer32ToSmi(result_, result_);
2994  __ bind(&exit_);
2995 }
2996 
2997 
2999  MacroAssembler* masm,
3000  const RuntimeCallHelper& call_helper) {
3001  __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
3002 
3003  Factory* factory = masm->isolate()->factory();
3004  // Index is not a smi.
3005  __ bind(&index_not_smi_);
3006  // If index is a heap number, try converting it to an integer.
3007  __ CheckMap(index_,
3008  factory->heap_number_map(),
3009  index_not_number_,
3011  call_helper.BeforeCall(masm);
3012  __ Push(object_);
3013  __ Push(index_); // Consumed by runtime conversion function.
3014  if (index_flags_ == STRING_INDEX_IS_NUMBER) {
3015  __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3016  } else {
3017  ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
3018  // NumberToSmi discards numbers that are not exact integers.
3019  __ CallRuntime(Runtime::kHiddenNumberToSmi, 1);
3020  }
3021  if (!index_.is(rax)) {
3022  // Save the conversion result before the pop instructions below
3023  // have a chance to overwrite it.
3024  __ movp(index_, rax);
3025  }
3026  __ Pop(object_);
3027  // Reload the instance type.
3028  __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
3029  __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3030  call_helper.AfterCall(masm);
3031  // If index is still not a smi, it must be out of range.
3032  __ JumpIfNotSmi(index_, index_out_of_range_);
3033  // Otherwise, return to the fast path.
3034  __ jmp(&got_smi_index_);
3035 
3036  // Call runtime. We get here when the receiver is a string and the
3037  // index is a number, but the code of getting the actual character
3038  // is too complex (e.g., when the string needs to be flattened).
3039  __ bind(&call_runtime_);
3040  call_helper.BeforeCall(masm);
3041  __ Push(object_);
3042  __ Integer32ToSmi(index_, index_);
3043  __ Push(index_);
3044  __ CallRuntime(Runtime::kHiddenStringCharCodeAt, 2);
3045  if (!result_.is(rax)) {
3046  __ movp(result_, rax);
3047  }
3048  call_helper.AfterCall(masm);
3049  __ jmp(&exit_);
3050 
3051  __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
3052 }
3053 
3054 
3055 // -------------------------------------------------------------------------
3056 // StringCharFromCodeGenerator
3057 
3058 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
3059  // Fast case of Heap::LookupSingleCharacterStringFromCode.
3060  __ JumpIfNotSmi(code_, &slow_case_);
3061  __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode));
3062  __ j(above, &slow_case_);
3063 
3064  __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
3065  SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
3066  __ movp(result_, FieldOperand(result_, index.reg, index.scale,
3068  __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
3069  __ j(equal, &slow_case_);
3070  __ bind(&exit_);
3071 }
3072 
3073 
3075  MacroAssembler* masm,
3076  const RuntimeCallHelper& call_helper) {
3077  __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
3078 
3079  __ bind(&slow_case_);
3080  call_helper.BeforeCall(masm);
3081  __ Push(code_);
3082  __ CallRuntime(Runtime::kCharFromCode, 1);
3083  if (!result_.is(rax)) {
3084  __ movp(result_, rax);
3085  }
3086  call_helper.AfterCall(masm);
3087  __ jmp(&exit_);
3088 
3089  __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3090 }
3091 
3092 
3093 void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
3094  Register dest,
3095  Register src,
3096  Register count,
3097  bool ascii) {
3098  // Copy characters using rep movs of doublewords. Align destination on 4 byte
3099  // boundary before starting rep movs. Copy remaining characters after running
3100  // rep movs.
3101  // Count is positive int32, dest and src are character pointers.
3102  ASSERT(dest.is(rdi)); // rep movs destination
3103  ASSERT(src.is(rsi)); // rep movs source
3104  ASSERT(count.is(rcx)); // rep movs count
3105 
3106  // Nothing to do for zero characters.
3107  Label done;
3108  __ testl(count, count);
3109  __ j(zero, &done, Label::kNear);
3110 
3111  // Make count the number of bytes to copy.
3112  if (!ascii) {
3113  STATIC_ASSERT(2 == sizeof(uc16));
3114  __ addl(count, count);
3115  }
3116 
3117  // Don't enter the rep movs if there are less than 4 bytes to copy.
3118  Label last_bytes;
3119  __ testl(count, Immediate(~(kPointerSize - 1)));
3120  __ j(zero, &last_bytes, Label::kNear);
3121 
3122  // Copy from edi to esi using rep movs instruction.
3123  __ movl(kScratchRegister, count);
3124  __ shr(count, Immediate(kPointerSizeLog2)); // Number of doublewords to copy.
3125  __ repmovsp();
3126 
3127  // Find number of bytes left.
3128  __ movl(count, kScratchRegister);
3129  __ andp(count, Immediate(kPointerSize - 1));
3130 
3131  // Check if there are more bytes to copy.
3132  __ bind(&last_bytes);
3133  __ testl(count, count);
3134  __ j(zero, &done, Label::kNear);
3135 
3136  // Copy remaining characters.
3137  Label loop;
3138  __ bind(&loop);
3139  __ movb(kScratchRegister, Operand(src, 0));
3140  __ movb(Operand(dest, 0), kScratchRegister);
3141  __ incp(src);
3142  __ incp(dest);
3143  __ decl(count);
3144  __ j(not_zero, &loop);
3145 
3146  __ bind(&done);
3147 }
3148 
3149 
3150 void StringHelper::GenerateHashInit(MacroAssembler* masm,
3151  Register hash,
3152  Register character,
3153  Register scratch) {
3154  // hash = (seed + character) + ((seed + character) << 10);
3155  __ LoadRoot(scratch, Heap::kHashSeedRootIndex);
3156  __ SmiToInteger32(scratch, scratch);
3157  __ addl(scratch, character);
3158  __ movl(hash, scratch);
3159  __ shll(scratch, Immediate(10));
3160  __ addl(hash, scratch);
3161  // hash ^= hash >> 6;
3162  __ movl(scratch, hash);
3163  __ shrl(scratch, Immediate(6));
3164  __ xorl(hash, scratch);
3165 }
3166 
3167 
3168 void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
3169  Register hash,
3170  Register character,
3171  Register scratch) {
3172  // hash += character;
3173  __ addl(hash, character);
3174  // hash += hash << 10;
3175  __ movl(scratch, hash);
3176  __ shll(scratch, Immediate(10));
3177  __ addl(hash, scratch);
3178  // hash ^= hash >> 6;
3179  __ movl(scratch, hash);
3180  __ shrl(scratch, Immediate(6));
3181  __ xorl(hash, scratch);
3182 }
3183 
3184 
3185 void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
3186  Register hash,
3187  Register scratch) {
3188  // hash += hash << 3;
3189  __ leal(hash, Operand(hash, hash, times_8, 0));
3190  // hash ^= hash >> 11;
3191  __ movl(scratch, hash);
3192  __ shrl(scratch, Immediate(11));
3193  __ xorl(hash, scratch);
3194  // hash += hash << 15;
3195  __ movl(scratch, hash);
3196  __ shll(scratch, Immediate(15));
3197  __ addl(hash, scratch);
3198 
3199  __ andl(hash, Immediate(String::kHashBitMask));
3200 
3201  // if (hash == 0) hash = 27;
3202  Label hash_not_zero;
3203  __ j(not_zero, &hash_not_zero);
3204  __ Set(hash, StringHasher::kZeroHash);
3205  __ bind(&hash_not_zero);
3206 }
3207 
3208 
3209 void SubStringStub::Generate(MacroAssembler* masm) {
3210  Label runtime;
3211 
3212  // Stack frame on entry.
3213  // rsp[0] : return address
3214  // rsp[8] : to
3215  // rsp[16] : from
3216  // rsp[24] : string
3217 
3218  enum SubStringStubArgumentIndices {
3219  STRING_ARGUMENT_INDEX,
3220  FROM_ARGUMENT_INDEX,
3221  TO_ARGUMENT_INDEX,
3222  SUB_STRING_ARGUMENT_COUNT
3223  };
3224 
3225  StackArgumentsAccessor args(rsp, SUB_STRING_ARGUMENT_COUNT,
3227 
3228  // Make sure first argument is a string.
3229  __ movp(rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX));
3230  STATIC_ASSERT(kSmiTag == 0);
3231  __ testl(rax, Immediate(kSmiTagMask));
3232  __ j(zero, &runtime);
3233  Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
3234  __ j(NegateCondition(is_string), &runtime);
3235 
3236  // rax: string
3237  // rbx: instance type
3238  // Calculate length of sub string using the smi values.
3239  __ movp(rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX));
3240  __ movp(rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX));
3241  __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
3242 
3243  __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
3245  Label not_original_string;
3246  // Shorter than original string's length: an actual substring.
3247  __ j(below, &not_original_string, Label::kNear);
3248  // Longer than original string's length or negative: unsafe arguments.
3249  __ j(above, &runtime);
3250  // Return original string.
3251  Counters* counters = masm->isolate()->counters();
3252  __ IncrementCounter(counters->sub_string_native(), 1);
3253  __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3254  __ bind(&not_original_string);
3255 
3256  Label single_char;
3257  __ SmiCompare(rcx, Smi::FromInt(1));
3258  __ j(equal, &single_char);
3259 
3260  __ SmiToInteger32(rcx, rcx);
3261 
3262  // rax: string
3263  // rbx: instance type
3264  // rcx: sub string length
3265  // rdx: from index (smi)
3266  // Deal with different string types: update the index if necessary
3267  // and put the underlying string into edi.
3268  Label underlying_unpacked, sliced_string, seq_or_external_string;
3269  // If the string is not indirect, it can only be sequential or external.
3272  __ testb(rbx, Immediate(kIsIndirectStringMask));
3273  __ j(zero, &seq_or_external_string, Label::kNear);
3274 
3275  __ testb(rbx, Immediate(kSlicedNotConsMask));
3276  __ j(not_zero, &sliced_string, Label::kNear);
3277  // Cons string. Check whether it is flat, then fetch first part.
3278  // Flat cons strings have an empty second part.
3280  Heap::kempty_stringRootIndex);
3281  __ j(not_equal, &runtime);
3283  // Update instance type.
3286  __ jmp(&underlying_unpacked, Label::kNear);
3287 
3288  __ bind(&sliced_string);
3289  // Sliced string. Fetch parent and correct start index by offset.
3292  // Update instance type.
3295  __ jmp(&underlying_unpacked, Label::kNear);
3296 
3297  __ bind(&seq_or_external_string);
3298  // Sequential or external string. Just move string to the correct register.
3299  __ movp(rdi, rax);
3300 
3301  __ bind(&underlying_unpacked);
3302 
3303  if (FLAG_string_slices) {
3304  Label copy_routine;
3305  // rdi: underlying subject string
3306  // rbx: instance type of underlying subject string
3307  // rdx: adjusted start index (smi)
3308  // rcx: length
3309  // If coming from the make_two_character_string path, the string
3310  // is too short to be sliced anyways.
3311  __ cmpp(rcx, Immediate(SlicedString::kMinLength));
3312  // Short slice. Copy instead of slicing.
3313  __ j(less, &copy_routine);
3314  // Allocate new sliced string. At this point we do not reload the instance
3315  // type including the string encoding because we simply rely on the info
3316  // provided by the original string. It does not matter if the original
3317  // string's encoding is wrong because we always have to recheck encoding of
3318  // the newly created string's parent anyways due to externalized strings.
3319  Label two_byte_slice, set_slice_header;
3322  __ testb(rbx, Immediate(kStringEncodingMask));
3323  __ j(zero, &two_byte_slice, Label::kNear);
3324  __ AllocateAsciiSlicedString(rax, rbx, r14, &runtime);
3325  __ jmp(&set_slice_header, Label::kNear);
3326  __ bind(&two_byte_slice);
3327  __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime);
3328  __ bind(&set_slice_header);
3329  __ Integer32ToSmi(rcx, rcx);
3332  Immediate(String::kEmptyHashField));
3335  __ IncrementCounter(counters->sub_string_native(), 1);
3336  __ ret(3 * kPointerSize);
3337 
3338  __ bind(&copy_routine);
3339  }
3340 
3341  // rdi: underlying subject string
3342  // rbx: instance type of underlying subject string
3343  // rdx: adjusted start index (smi)
3344  // rcx: length
3345  // The subject string can only be external or sequential string of either
3346  // encoding at this point.
3347  Label two_byte_sequential, sequential_string;
3350  __ testb(rbx, Immediate(kExternalStringTag));
3351  __ j(zero, &sequential_string);
3352 
3353  // Handle external string.
3354  // Rule out short external strings.
3356  __ testb(rbx, Immediate(kShortExternalStringMask));
3357  __ j(not_zero, &runtime);
3359  // Move the pointer so that offset-wise, it looks like a sequential string.
3361  __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3362 
3363  __ bind(&sequential_string);
3365  __ testb(rbx, Immediate(kStringEncodingMask));
3366  __ j(zero, &two_byte_sequential);
3367 
3368  // Allocate the result.
3369  __ AllocateAsciiString(rax, rcx, r11, r14, r15, &runtime);
3370 
3371  // rax: result string
3372  // rcx: result string length
3373  __ movp(r14, rsi); // esi used by following code.
3374  { // Locate character of sub string start.
3375  SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1);
3376  __ leap(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
3378  }
3379  // Locate first character of result.
3381 
3382  // rax: result string
3383  // rcx: result length
3384  // rdi: first character of result
3385  // rsi: character of sub string start
3386  // r14: original value of rsi
3388  __ movp(rsi, r14); // Restore rsi.
3389  __ IncrementCounter(counters->sub_string_native(), 1);
3390  __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3391 
3392  __ bind(&two_byte_sequential);
3393  // Allocate the result.
3394  __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime);
3395 
3396  // rax: result string
3397  // rcx: result string length
3398  __ movp(r14, rsi); // esi used by following code.
3399  { // Locate character of sub string start.
3400  SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2);
3401  __ leap(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
3403  }
3404  // Locate first character of result.
3406 
3407  // rax: result string
3408  // rcx: result length
3409  // rdi: first character of result
3410  // rsi: character of sub string start
3411  // r14: original value of rsi
3413  __ movp(rsi, r14); // Restore esi.
3414  __ IncrementCounter(counters->sub_string_native(), 1);
3415  __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3416 
3417  // Just jump to runtime to create the sub string.
3418  __ bind(&runtime);
3419  __ TailCallRuntime(Runtime::kHiddenSubString, 3, 1);
3420 
3421  __ bind(&single_char);
3422  // rax: string
3423  // rbx: instance type
3424  // rcx: sub string length (smi)
3425  // rdx: from index (smi)
3426  StringCharAtGenerator generator(
3427  rax, rdx, rcx, rax, &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER);
3428  generator.GenerateFast(masm);
3429  __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3430  generator.SkipSlow(masm, &runtime);
3431 }
3432 
3433 
3434 void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
3435  Register left,
3436  Register right,
3437  Register scratch1,
3438  Register scratch2) {
3439  Register length = scratch1;
3440 
3441  // Compare lengths.
3442  Label check_zero_length;
3443  __ movp(length, FieldOperand(left, String::kLengthOffset));
3444  __ SmiCompare(length, FieldOperand(right, String::kLengthOffset));
3445  __ j(equal, &check_zero_length, Label::kNear);
3446  __ Move(rax, Smi::FromInt(NOT_EQUAL));
3447  __ ret(0);
3448 
3449  // Check if the length is zero.
3450  Label compare_chars;
3451  __ bind(&check_zero_length);
3452  STATIC_ASSERT(kSmiTag == 0);
3453  __ SmiTest(length);
3454  __ j(not_zero, &compare_chars, Label::kNear);
3455  __ Move(rax, Smi::FromInt(EQUAL));
3456  __ ret(0);
3457 
3458  // Compare characters.
3459  __ bind(&compare_chars);
3460  Label strings_not_equal;
3461  GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2,
3462  &strings_not_equal, Label::kNear);
3463 
3464  // Characters are equal.
3465  __ Move(rax, Smi::FromInt(EQUAL));
3466  __ ret(0);
3467 
3468  // Characters are not equal.
3469  __ bind(&strings_not_equal);
3470  __ Move(rax, Smi::FromInt(NOT_EQUAL));
3471  __ ret(0);
3472 }
3473 
3474 
3475 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
3476  Register left,
3477  Register right,
3478  Register scratch1,
3479  Register scratch2,
3480  Register scratch3,
3481  Register scratch4) {
3482  // Ensure that you can always subtract a string length from a non-negative
3483  // number (e.g. another length).
3484  STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
3485 
3486  // Find minimum length and length difference.
3487  __ movp(scratch1, FieldOperand(left, String::kLengthOffset));
3488  __ movp(scratch4, scratch1);
3489  __ SmiSub(scratch4,
3490  scratch4,
3492  // Register scratch4 now holds left.length - right.length.
3493  const Register length_difference = scratch4;
3494  Label left_shorter;
3495  __ j(less, &left_shorter, Label::kNear);
3496  // The right string isn't longer that the left one.
3497  // Get the right string's length by subtracting the (non-negative) difference
3498  // from the left string's length.
3499  __ SmiSub(scratch1, scratch1, length_difference);
3500  __ bind(&left_shorter);
3501  // Register scratch1 now holds Min(left.length, right.length).
3502  const Register min_length = scratch1;
3503 
3504  Label compare_lengths;
3505  // If min-length is zero, go directly to comparing lengths.
3506  __ SmiTest(min_length);
3507  __ j(zero, &compare_lengths, Label::kNear);
3508 
3509  // Compare loop.
3510  Label result_not_equal;
3511  GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
3512  &result_not_equal,
3513  // In debug-code mode, SmiTest below might push
3514  // the target label outside the near range.
3515  Label::kFar);
3516 
3517  // Completed loop without finding different characters.
3518  // Compare lengths (precomputed).
3519  __ bind(&compare_lengths);
3520  __ SmiTest(length_difference);
3521  Label length_not_equal;
3522  __ j(not_zero, &length_not_equal, Label::kNear);
3523 
3524  // Result is EQUAL.
3525  __ Move(rax, Smi::FromInt(EQUAL));
3526  __ ret(0);
3527 
3528  Label result_greater;
3529  Label result_less;
3530  __ bind(&length_not_equal);
3531  __ j(greater, &result_greater, Label::kNear);
3532  __ jmp(&result_less, Label::kNear);
3533  __ bind(&result_not_equal);
3534  // Unequal comparison of left to right, either character or length.
3535  __ j(above, &result_greater, Label::kNear);
3536  __ bind(&result_less);
3537 
3538  // Result is LESS.
3539  __ Move(rax, Smi::FromInt(LESS));
3540  __ ret(0);
3541 
3542  // Result is GREATER.
3543  __ bind(&result_greater);
3544  __ Move(rax, Smi::FromInt(GREATER));
3545  __ ret(0);
3546 }
3547 
3548 
3549 void StringCompareStub::GenerateAsciiCharsCompareLoop(
3550  MacroAssembler* masm,
3551  Register left,
3552  Register right,
3553  Register length,
3554  Register scratch,
3555  Label* chars_not_equal,
3556  Label::Distance near_jump) {
3557  // Change index to run from -length to -1 by adding length to string
3558  // start. This means that loop ends when index reaches zero, which
3559  // doesn't need an additional compare.
3560  __ SmiToInteger32(length, length);
3561  __ leap(left,
3563  __ leap(right,
3565  __ negq(length);
3566  Register index = length; // index = -length;
3567 
3568  // Compare loop.
3569  Label loop;
3570  __ bind(&loop);
3571  __ movb(scratch, Operand(left, index, times_1, 0));
3572  __ cmpb(scratch, Operand(right, index, times_1, 0));
3573  __ j(not_equal, chars_not_equal, near_jump);
3574  __ incq(index);
3575  __ j(not_zero, &loop);
3576 }
3577 
3578 
3579 void StringCompareStub::Generate(MacroAssembler* masm) {
3580  Label runtime;
3581 
3582  // Stack frame on entry.
3583  // rsp[0] : return address
3584  // rsp[8] : right string
3585  // rsp[16] : left string
3586 
3587  StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
3588  __ movp(rdx, args.GetArgumentOperand(0)); // left
3589  __ movp(rax, args.GetArgumentOperand(1)); // right
3590 
3591  // Check for identity.
3592  Label not_same;
3593  __ cmpp(rdx, rax);
3594  __ j(not_equal, &not_same, Label::kNear);
3595  __ Move(rax, Smi::FromInt(EQUAL));
3596  Counters* counters = masm->isolate()->counters();
3597  __ IncrementCounter(counters->string_compare_native(), 1);
3598  __ ret(2 * kPointerSize);
3599 
3600  __ bind(&not_same);
3601 
3602  // Check that both are sequential ASCII strings.
3603  __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime);
3604 
3605  // Inline comparison of ASCII strings.
3606  __ IncrementCounter(counters->string_compare_native(), 1);
3607  // Drop arguments from the stack
3608  __ PopReturnAddressTo(rcx);
3609  __ addp(rsp, Immediate(2 * kPointerSize));
3610  __ PushReturnAddressFrom(rcx);
3612 
3613  // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
3614  // tagged as a small integer.
3615  __ bind(&runtime);
3616  __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
3617 }
3618 
3619 
3620 void ArrayPushStub::Generate(MacroAssembler* masm) {
3621  int argc = arguments_count();
3622 
3623  StackArgumentsAccessor args(rsp, argc);
3624  if (argc == 0) {
3625  // Noop, return the length.
3627  __ ret((argc + 1) * kPointerSize);
3628  return;
3629  }
3630 
3631  Isolate* isolate = masm->isolate();
3632 
3633  if (argc != 1) {
3634  __ TailCallExternalReference(
3635  ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3636  return;
3637  }
3638 
3639  Label call_builtin, attempt_to_grow_elements, with_write_barrier;
3640 
3641  // Get the elements array of the object.
3643 
3644  if (IsFastSmiOrObjectElementsKind(elements_kind())) {
3645  // Check that the elements are in fast mode and writable.
3647  isolate->factory()->fixed_array_map());
3648  __ j(not_equal, &call_builtin);
3649  }
3650 
3651  // Get the array's length into rax and calculate new length.
3652  __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset));
3654  __ addl(rax, Immediate(argc));
3655 
3656  // Get the elements' length into rcx.
3657  __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
3658 
3659  // Check if we could survive without allocation.
3660  __ cmpl(rax, rcx);
3661 
3662  if (IsFastSmiOrObjectElementsKind(elements_kind())) {
3663  __ j(greater, &attempt_to_grow_elements);
3664 
3665  // Check if value is a smi.
3666  __ movp(rcx, args.GetArgumentOperand(1));
3667  __ JumpIfNotSmi(rcx, &with_write_barrier);
3668 
3669  // Store the value.
3670  __ movp(FieldOperand(rdi,
3671  rax,
3673  FixedArray::kHeaderSize - argc * kPointerSize),
3674  rcx);
3675  } else {
3676  __ j(greater, &call_builtin);
3677 
3678  __ movp(rcx, args.GetArgumentOperand(1));
3679  __ StoreNumberToDoubleElements(
3680  rcx, rdi, rax, xmm0, &call_builtin, argc * kDoubleSize);
3681  }
3682 
3683  // Save new length.
3684  __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
3685 
3686  __ Integer32ToSmi(rax, rax); // Return new length as smi.
3687  __ ret((argc + 1) * kPointerSize);
3688 
3689  if (IsFastDoubleElementsKind(elements_kind())) {
3690  __ bind(&call_builtin);
3691  __ TailCallExternalReference(
3692  ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3693  return;
3694  }
3695 
3696  __ bind(&with_write_barrier);
3697 
3698  if (IsFastSmiElementsKind(elements_kind())) {
3699  if (FLAG_trace_elements_transitions) __ jmp(&call_builtin);
3700 
3702  isolate->factory()->heap_number_map());
3703  __ j(equal, &call_builtin);
3704 
3705  ElementsKind target_kind = IsHoleyElementsKind(elements_kind())
3710  const int header_size = FixedArrayBase::kHeaderSize;
3711  // Verify that the object can be transitioned in place.
3712  const int origin_offset = header_size + elements_kind() * kPointerSize;
3713  __ movp(rdi, FieldOperand(rbx, origin_offset));
3715  __ j(not_equal, &call_builtin);
3716 
3717  const int target_offset = header_size + target_kind * kPointerSize;
3718  __ movp(rbx, FieldOperand(rbx, target_offset));
3722  }
3723 
3724  // Save new length.
3725  __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
3726 
3727  // Store the value.
3728  __ leap(rdx, FieldOperand(rdi,
3730  FixedArray::kHeaderSize - argc * kPointerSize));
3731  __ movp(Operand(rdx, 0), rcx);
3732 
3733  __ RecordWrite(rdi, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
3734  OMIT_SMI_CHECK);
3735 
3736  __ Integer32ToSmi(rax, rax); // Return new length as smi.
3737  __ ret((argc + 1) * kPointerSize);
3738 
3739  __ bind(&attempt_to_grow_elements);
3740  if (!FLAG_inline_new) {
3741  __ bind(&call_builtin);
3742  __ TailCallExternalReference(
3743  ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3744  return;
3745  }
3746 
3747  __ movp(rbx, args.GetArgumentOperand(1));
3748  // Growing elements that are SMI-only requires special handling in case the
3749  // new element is non-Smi. For now, delegate to the builtin.
3750  Label no_fast_elements_check;
3751  __ JumpIfSmi(rbx, &no_fast_elements_check);
3753  __ CheckFastObjectElements(rcx, &call_builtin, Label::kFar);
3754  __ bind(&no_fast_elements_check);
3755 
3756  ExternalReference new_space_allocation_top =
3757  ExternalReference::new_space_allocation_top_address(isolate);
3758  ExternalReference new_space_allocation_limit =
3759  ExternalReference::new_space_allocation_limit_address(isolate);
3760 
3761  const int kAllocationDelta = 4;
3762  ASSERT(kAllocationDelta >= argc);
3763  // Load top.
3764  __ Load(rcx, new_space_allocation_top);
3765 
3766  // Check if it's the end of elements.
3767  __ leap(rdx, FieldOperand(rdi,
3769  FixedArray::kHeaderSize - argc * kPointerSize));
3770  __ cmpp(rdx, rcx);
3771  __ j(not_equal, &call_builtin);
3772  __ addp(rcx, Immediate(kAllocationDelta * kPointerSize));
3773  Operand limit_operand = masm->ExternalOperand(new_space_allocation_limit);
3774  __ cmpp(rcx, limit_operand);
3775  __ j(above, &call_builtin);
3776 
3777  // We fit and could grow elements.
3778  __ Store(new_space_allocation_top, rcx);
3779 
3780  // Push the argument...
3781  __ movp(Operand(rdx, 0), rbx);
3782  // ... and fill the rest with holes.
3783  __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
3784  for (int i = 1; i < kAllocationDelta; i++) {
3785  __ movp(Operand(rdx, i * kPointerSize), kScratchRegister);
3786  }
3787 
3788  if (IsFastObjectElementsKind(elements_kind())) {
3789  // We know the elements array is in new space so we don't need the
3790  // remembered set, but we just pushed a value onto it so we may have to tell
3791  // the incremental marker to rescan the object that we just grew. We don't
3792  // need to worry about the holes because they are in old space and already
3793  // marked black.
3794  __ RecordWrite(rdi, rdx, rbx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
3795  }
3796 
3797  // Restore receiver to rdx as finish sequence assumes it's here.
3798  __ movp(rdx, args.GetReceiverOperand());
3799 
3800  // Increment element's and array's sizes.
3801  __ SmiAddConstant(FieldOperand(rdi, FixedArray::kLengthOffset),
3802  Smi::FromInt(kAllocationDelta));
3803 
3804  // Make new length a smi before returning it.
3805  __ Integer32ToSmi(rax, rax);
3807 
3808  __ ret((argc + 1) * kPointerSize);
3809 
3810  __ bind(&call_builtin);
3811  __ TailCallExternalReference(
3812  ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3813 }
3814 
3815 
3816 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3817  // ----------- S t a t e -------------
3818  // -- rdx : left
3819  // -- rax : right
3820  // -- rsp[0] : return address
3821  // -----------------------------------
3822  Isolate* isolate = masm->isolate();
3823 
3824  // Load rcx with the allocation site. We stick an undefined dummy value here
3825  // and replace it with the real allocation site later when we instantiate this
3826  // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
3827  __ Move(rcx, handle(isolate->heap()->undefined_value()));
3828 
3829  // Make sure that we actually patched the allocation site.
3830  if (FLAG_debug_code) {
3831  __ testb(rcx, Immediate(kSmiTagMask));
3832  __ Assert(not_equal, kExpectedAllocationSite);
3834  isolate->factory()->allocation_site_map());
3835  __ Assert(equal, kExpectedAllocationSite);
3836  }
3837 
3838  // Tail call into the stub that handles binary operations with allocation
3839  // sites.
3840  BinaryOpWithAllocationSiteStub stub(state_);
3841  __ TailCallStub(&stub);
3842 }
3843 
3844 
3845 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
3846  ASSERT(state_ == CompareIC::SMI);
3847  Label miss;
3848  __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
3849 
3850  if (GetCondition() == equal) {
3851  // For equality we do not care about the sign of the result.
3852  __ subp(rax, rdx);
3853  } else {
3854  Label done;
3855  __ subp(rdx, rax);
3856  __ j(no_overflow, &done, Label::kNear);
3857  // Correct sign of result in case of overflow.
3858  __ notp(rdx);
3859  __ bind(&done);
3860  __ movp(rax, rdx);
3861  }
3862  __ ret(0);
3863 
3864  __ bind(&miss);
3865  GenerateMiss(masm);
3866 }
3867 
3868 
3869 void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
3870  ASSERT(state_ == CompareIC::NUMBER);
3871 
3872  Label generic_stub;
3873  Label unordered, maybe_undefined1, maybe_undefined2;
3874  Label miss;
3875 
3876  if (left_ == CompareIC::SMI) {
3877  __ JumpIfNotSmi(rdx, &miss);
3878  }
3879  if (right_ == CompareIC::SMI) {
3880  __ JumpIfNotSmi(rax, &miss);
3881  }
3882 
3883  // Load left and right operand.
3884  Label done, left, left_smi, right_smi;
3885  __ JumpIfSmi(rax, &right_smi, Label::kNear);
3886  __ CompareMap(rax, masm->isolate()->factory()->heap_number_map());
3887  __ j(not_equal, &maybe_undefined1, Label::kNear);
3889  __ jmp(&left, Label::kNear);
3890  __ bind(&right_smi);
3891  __ SmiToInteger32(rcx, rax); // Can't clobber rax yet.
3892  __ Cvtlsi2sd(xmm1, rcx);
3893 
3894  __ bind(&left);
3895  __ JumpIfSmi(rdx, &left_smi, Label::kNear);
3896  __ CompareMap(rdx, masm->isolate()->factory()->heap_number_map());
3897  __ j(not_equal, &maybe_undefined2, Label::kNear);
3899  __ jmp(&done);
3900  __ bind(&left_smi);
3901  __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet.
3902  __ Cvtlsi2sd(xmm0, rcx);
3903 
3904  __ bind(&done);
3905  // Compare operands
3906  __ ucomisd(xmm0, xmm1);
3907 
3908  // Don't base result on EFLAGS when a NaN is involved.
3909  __ j(parity_even, &unordered, Label::kNear);
3910 
3911  // Return a result of -1, 0, or 1, based on EFLAGS.
3912  // Performing mov, because xor would destroy the flag register.
3913  __ movl(rax, Immediate(0));
3914  __ movl(rcx, Immediate(0));
3915  __ setcc(above, rax); // Add one to zero if carry clear and not equal.
3916  __ sbbp(rax, rcx); // Subtract one if below (aka. carry set).
3917  __ ret(0);
3918 
3919  __ bind(&unordered);
3920  __ bind(&generic_stub);
3923  __ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
3924 
3925  __ bind(&maybe_undefined1);
3927  __ Cmp(rax, masm->isolate()->factory()->undefined_value());
3928  __ j(not_equal, &miss);
3929  __ JumpIfSmi(rdx, &unordered);
3930  __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
3931  __ j(not_equal, &maybe_undefined2, Label::kNear);
3932  __ jmp(&unordered);
3933  }
3934 
3935  __ bind(&maybe_undefined2);
3937  __ Cmp(rdx, masm->isolate()->factory()->undefined_value());
3938  __ j(equal, &unordered);
3939  }
3940 
3941  __ bind(&miss);
3942  GenerateMiss(masm);
3943 }
3944 
3945 
3946 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
3948  ASSERT(GetCondition() == equal);
3949 
3950  // Registers containing left and right operands respectively.
3951  Register left = rdx;
3952  Register right = rax;
3953  Register tmp1 = rcx;
3954  Register tmp2 = rbx;
3955 
3956  // Check that both operands are heap objects.
3957  Label miss;
3958  Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3959  __ j(cond, &miss, Label::kNear);
3960 
3961  // Check that both operands are internalized strings.
3962  __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3963  __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3964  __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3965  __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3967  __ orp(tmp1, tmp2);
3968  __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
3969  __ j(not_zero, &miss, Label::kNear);
3970 
3971  // Internalized strings are compared by identity.
3972  Label done;
3973  __ cmpp(left, right);
3974  // Make sure rax is non-zero. At this point input operands are
3975  // guaranteed to be non-zero.
3976  ASSERT(right.is(rax));
3977  __ j(not_equal, &done, Label::kNear);
3978  STATIC_ASSERT(EQUAL == 0);
3979  STATIC_ASSERT(kSmiTag == 0);
3980  __ Move(rax, Smi::FromInt(EQUAL));
3981  __ bind(&done);
3982  __ ret(0);
3983 
3984  __ bind(&miss);
3985  GenerateMiss(masm);
3986 }
3987 
3988 
3989 void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
3990  ASSERT(state_ == CompareIC::UNIQUE_NAME);
3991  ASSERT(GetCondition() == equal);
3992 
3993  // Registers containing left and right operands respectively.
3994  Register left = rdx;
3995  Register right = rax;
3996  Register tmp1 = rcx;
3997  Register tmp2 = rbx;
3998 
3999  // Check that both operands are heap objects.
4000  Label miss;
4001  Condition cond = masm->CheckEitherSmi(left, right, tmp1);
4002  __ j(cond, &miss, Label::kNear);
4003 
4004  // Check that both operands are unique names. This leaves the instance
4005  // types loaded in tmp1 and tmp2.
4006  __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
4007  __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
4008  __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
4009  __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
4010 
4011  __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear);
4012  __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear);
4013 
4014  // Unique names are compared by identity.
4015  Label done;
4016  __ cmpp(left, right);
4017  // Make sure rax is non-zero. At this point input operands are
4018  // guaranteed to be non-zero.
4019  ASSERT(right.is(rax));
4020  __ j(not_equal, &done, Label::kNear);
4021  STATIC_ASSERT(EQUAL == 0);
4022  STATIC_ASSERT(kSmiTag == 0);
4023  __ Move(rax, Smi::FromInt(EQUAL));
4024  __ bind(&done);
4025  __ ret(0);
4026 
4027  __ bind(&miss);
4028  GenerateMiss(masm);
4029 }
4030 
4031 
4032 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
4033  ASSERT(state_ == CompareIC::STRING);
4034  Label miss;
4035 
4036  bool equality = Token::IsEqualityOp(op_);
4037 
4038  // Registers containing left and right operands respectively.
4039  Register left = rdx;
4040  Register right = rax;
4041  Register tmp1 = rcx;
4042  Register tmp2 = rbx;
4043  Register tmp3 = rdi;
4044 
4045  // Check that both operands are heap objects.
4046  Condition cond = masm->CheckEitherSmi(left, right, tmp1);
4047  __ j(cond, &miss);
4048 
4049  // Check that both operands are strings. This leaves the instance
4050  // types loaded in tmp1 and tmp2.
4051  __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
4052  __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
4053  __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
4054  __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
4055  __ movp(tmp3, tmp1);
4057  __ orp(tmp3, tmp2);
4058  __ testb(tmp3, Immediate(kIsNotStringMask));
4059  __ j(not_zero, &miss);
4060 
4061  // Fast check for identical strings.
4062  Label not_same;
4063  __ cmpp(left, right);
4064  __ j(not_equal, &not_same, Label::kNear);
4065  STATIC_ASSERT(EQUAL == 0);
4066  STATIC_ASSERT(kSmiTag == 0);
4067  __ Move(rax, Smi::FromInt(EQUAL));
4068  __ ret(0);
4069 
4070  // Handle not identical strings.
4071  __ bind(&not_same);
4072 
4073  // Check that both strings are internalized strings. If they are, we're done
4074  // because we already know they are not identical. We also know they are both
4075  // strings.
4076  if (equality) {
4077  Label do_compare;
4079  __ orp(tmp1, tmp2);
4080  __ testb(tmp1, Immediate(kIsNotInternalizedMask));
4081  __ j(not_zero, &do_compare, Label::kNear);
4082  // Make sure rax is non-zero. At this point input operands are
4083  // guaranteed to be non-zero.
4084  ASSERT(right.is(rax));
4085  __ ret(0);
4086  __ bind(&do_compare);
4087  }
4088 
4089  // Check that both strings are sequential ASCII.
4090  Label runtime;
4091  __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
4092 
4093  // Compare flat ASCII strings. Returns when done.
4094  if (equality) {
4096  masm, left, right, tmp1, tmp2);
4097  } else {
4099  masm, left, right, tmp1, tmp2, tmp3, kScratchRegister);
4100  }
4101 
4102  // Handle more complex cases in runtime.
4103  __ bind(&runtime);
4104  __ PopReturnAddressTo(tmp1);
4105  __ Push(left);
4106  __ Push(right);
4107  __ PushReturnAddressFrom(tmp1);
4108  if (equality) {
4109  __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
4110  } else {
4111  __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
4112  }
4113 
4114  __ bind(&miss);
4115  GenerateMiss(masm);
4116 }
4117 
4118 
4119 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
4120  ASSERT(state_ == CompareIC::OBJECT);
4121  Label miss;
4122  Condition either_smi = masm->CheckEitherSmi(rdx, rax);
4123  __ j(either_smi, &miss, Label::kNear);
4124 
4125  __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx);
4126  __ j(not_equal, &miss, Label::kNear);
4127  __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
4128  __ j(not_equal, &miss, Label::kNear);
4129 
4130  ASSERT(GetCondition() == equal);
4131  __ subp(rax, rdx);
4132  __ ret(0);
4133 
4134  __ bind(&miss);
4135  GenerateMiss(masm);
4136 }
4137 
4138 
4139 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
4140  Label miss;
4141  Condition either_smi = masm->CheckEitherSmi(rdx, rax);
4142  __ j(either_smi, &miss, Label::kNear);
4143 
4146  __ Cmp(rcx, known_map_);
4147  __ j(not_equal, &miss, Label::kNear);
4148  __ Cmp(rbx, known_map_);
4149  __ j(not_equal, &miss, Label::kNear);
4150 
4151  __ subp(rax, rdx);
4152  __ ret(0);
4153 
4154  __ bind(&miss);
4155  GenerateMiss(masm);
4156 }
4157 
4158 
4159 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
4160  {
4161  // Call the runtime system in a fresh internal frame.
4162  ExternalReference miss =
4163  ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
4164 
4165  FrameScope scope(masm, StackFrame::INTERNAL);
4166  __ Push(rdx);
4167  __ Push(rax);
4168  __ Push(rdx);
4169  __ Push(rax);
4170  __ Push(Smi::FromInt(op_));
4171  __ CallExternalReference(miss, 3);
4172 
4173  // Compute the entry point of the rewritten stub.
4175  __ Pop(rax);
4176  __ Pop(rdx);
4177  }
4178 
4179  // Do a tail call to the rewritten stub.
4180  __ jmp(rdi);
4181 }
4182 
4183 
4184 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
4185  Label* miss,
4186  Label* done,
4187  Register properties,
4188  Handle<Name> name,
4189  Register r0) {
4190  ASSERT(name->IsUniqueName());
4191  // If names of slots in range from 1 to kProbes - 1 for the hash value are
4192  // not equal to the name and kProbes-th slot is not used (its name is the
4193  // undefined value), it guarantees the hash table doesn't contain the
4194  // property. It's true even if some slots represent deleted properties
4195  // (their names are the hole value).
4196  for (int i = 0; i < kInlinedProbes; i++) {
4197  // r0 points to properties hash.
4198  // Compute the masked index: (hash + i + i * i) & mask.
4199  Register index = r0;
4200  // Capacity is smi 2^n.
4201  __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
4202  __ decl(index);
4203  __ andp(index,
4204  Immediate(name->Hash() + NameDictionary::GetProbeOffset(i)));
4205 
4206  // Scale the index by multiplying by the entry size.
4208  __ leap(index, Operand(index, index, times_2, 0)); // index *= 3.
4209 
4210  Register entity_name = r0;
4211  // Having undefined at this place means the name is not contained.
4212  ASSERT_EQ(kSmiTagSize, 1);
4213  __ movp(entity_name, Operand(properties,
4214  index,
4216  kElementsStartOffset - kHeapObjectTag));
4217  __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
4218  __ j(equal, done);
4219 
4220  // Stop if found the property.
4221  __ Cmp(entity_name, Handle<Name>(name));
4222  __ j(equal, miss);
4223 
4224  Label good;
4225  // Check for the hole and skip.
4226  __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
4227  __ j(equal, &good, Label::kNear);
4228 
4229  // Check if the entry name is not a unique name.
4230  __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
4231  __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset),
4232  miss);
4233  __ bind(&good);
4234  }
4235 
4236  NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP);
4237  __ Push(Handle<Object>(name));
4238  __ Push(Immediate(name->Hash()));
4239  __ CallStub(&stub);
4240  __ testp(r0, r0);
4241  __ j(not_zero, miss);
4242  __ jmp(done);
4243 }
4244 
4245 
4246 // Probe the name dictionary in the |elements| register. Jump to the
4247 // |done| label if a property with the given name is found leaving the
4248 // index into the dictionary in |r1|. Jump to the |miss| label
4249 // otherwise.
4250 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
4251  Label* miss,
4252  Label* done,
4253  Register elements,
4254  Register name,
4255  Register r0,
4256  Register r1) {
4257  ASSERT(!elements.is(r0));
4258  ASSERT(!elements.is(r1));
4259  ASSERT(!name.is(r0));
4260  ASSERT(!name.is(r1));
4261 
4262  __ AssertName(name);
4263 
4264  __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
4265  __ decl(r0);
4266 
4267  for (int i = 0; i < kInlinedProbes; i++) {
4268  // Compute the masked index: (hash + i + i * i) & mask.
4269  __ movl(r1, FieldOperand(name, Name::kHashFieldOffset));
4270  __ shrl(r1, Immediate(Name::kHashShift));
4271  if (i > 0) {
4272  __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i)));
4273  }
4274  __ andp(r1, r0);
4275 
4276  // Scale the index by multiplying by the entry size.
4278  __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3
4279 
4280  // Check if the key is identical to the name.
4281  __ cmpp(name, Operand(elements, r1, times_pointer_size,
4282  kElementsStartOffset - kHeapObjectTag));
4283  __ j(equal, done);
4284  }
4285 
4286  NameDictionaryLookupStub stub(elements, r0, r1, POSITIVE_LOOKUP);
4287  __ Push(name);
4288  __ movl(r0, FieldOperand(name, Name::kHashFieldOffset));
4289  __ shrl(r0, Immediate(Name::kHashShift));
4290  __ Push(r0);
4291  __ CallStub(&stub);
4292 
4293  __ testp(r0, r0);
4294  __ j(zero, miss);
4295  __ jmp(done);
4296 }
4297 
4298 
4299 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
4300  // This stub overrides SometimesSetsUpAFrame() to return false. That means
4301  // we cannot call anything that could cause a GC from this stub.
4302  // Stack frame on entry:
4303  // rsp[0 * kPointerSize] : return address.
4304  // rsp[1 * kPointerSize] : key's hash.
4305  // rsp[2 * kPointerSize] : key.
4306  // Registers:
4307  // dictionary_: NameDictionary to probe.
4308  // result_: used as scratch.
4309  // index_: will hold an index of entry if lookup is successful.
4310  // might alias with result_.
4311  // Returns:
4312  // result_ is zero if lookup failed, non zero otherwise.
4313 
4314  Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
4315 
4316  Register scratch = result_;
4317 
4318  __ SmiToInteger32(scratch, FieldOperand(dictionary_, kCapacityOffset));
4319  __ decl(scratch);
4320  __ Push(scratch);
4321 
4322  // If names of slots in range from 1 to kProbes - 1 for the hash value are
4323  // not equal to the name and kProbes-th slot is not used (its name is the
4324  // undefined value), it guarantees the hash table doesn't contain the
4325  // property. It's true even if some slots represent deleted properties
4326  // (their names are the null value).
4327  StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
4328  kPointerSize);
4329  for (int i = kInlinedProbes; i < kTotalProbes; i++) {
4330  // Compute the masked index: (hash + i + i * i) & mask.
4331  __ movp(scratch, args.GetArgumentOperand(1));
4332  if (i > 0) {
4333  __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
4334  }
4335  __ andp(scratch, Operand(rsp, 0));
4336 
4337  // Scale the index by multiplying by the entry size.
4339  __ leap(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3.
4340 
4341  // Having undefined at this place means the name is not contained.
4342  __ movp(scratch, Operand(dictionary_,
4343  index_,
4345  kElementsStartOffset - kHeapObjectTag));
4346 
4347  __ Cmp(scratch, masm->isolate()->factory()->undefined_value());
4348  __ j(equal, &not_in_dictionary);
4349 
4350  // Stop if found the property.
4351  __ cmpp(scratch, args.GetArgumentOperand(0));
4352  __ j(equal, &in_dictionary);
4353 
4354  if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
4355  // If we hit a key that is not a unique name during negative
4356  // lookup we have to bailout as this key might be equal to the
4357  // key we are looking for.
4358 
4359  // Check if the entry name is not a unique name.
4360  __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
4361  __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset),
4362  &maybe_in_dictionary);
4363  }
4364  }
4365 
4366  __ bind(&maybe_in_dictionary);
4367  // If we are doing negative lookup then probing failure should be
4368  // treated as a lookup success. For positive lookup probing failure
4369  // should be treated as lookup failure.
4370  if (mode_ == POSITIVE_LOOKUP) {
4371  __ movp(scratch, Immediate(0));
4372  __ Drop(1);
4373  __ ret(2 * kPointerSize);
4374  }
4375 
4376  __ bind(&in_dictionary);
4377  __ movp(scratch, Immediate(1));
4378  __ Drop(1);
4379  __ ret(2 * kPointerSize);
4380 
4381  __ bind(&not_in_dictionary);
4382  __ movp(scratch, Immediate(0));
4383  __ Drop(1);
4384  __ ret(2 * kPointerSize);
4385 }
4386 
4387 
4389  Isolate* isolate) {
4391  stub1.GetCode(isolate);
4393  stub2.GetCode(isolate);
4394 }
4395 
4396 
4397 bool CodeStub::CanUseFPRegisters() {
4398  return true; // Always have SSE2 on x64.
4399 }
4400 
4401 
4402 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
4403 // the value has just been written into the object, now this stub makes sure
4404 // we keep the GC informed. The word in the object where the value has been
4405 // written is in the address register.
4406 void RecordWriteStub::Generate(MacroAssembler* masm) {
4407  Label skip_to_incremental_noncompacting;
4408  Label skip_to_incremental_compacting;
4409 
4410  // The first two instructions are generated with labels so as to get the
4411  // offset fixed up correctly by the bind(Label*) call. We patch it back and
4412  // forth between a compare instructions (a nop in this position) and the
4413  // real branch when we start and stop incremental heap marking.
4414  // See RecordWriteStub::Patch for details.
4415  __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
4416  __ jmp(&skip_to_incremental_compacting, Label::kFar);
4417 
4418  if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
4419  __ RememberedSetHelper(object_,
4420  address_,
4421  value_,
4422  save_fp_regs_mode_,
4424  } else {
4425  __ ret(0);
4426  }
4427 
4428  __ bind(&skip_to_incremental_noncompacting);
4429  GenerateIncremental(masm, INCREMENTAL);
4430 
4431  __ bind(&skip_to_incremental_compacting);
4432  GenerateIncremental(masm, INCREMENTAL_COMPACTION);
4433 
4434  // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
4435  // Will be checked in IncrementalMarking::ActivateGeneratedStub.
4436  masm->set_byte_at(0, kTwoByteNopInstruction);
4437  masm->set_byte_at(2, kFiveByteNopInstruction);
4438 }
4439 
4440 
4441 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
4442  regs_.Save(masm);
4443 
4444  if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
4445  Label dont_need_remembered_set;
4446 
4447  __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
4448  __ JumpIfNotInNewSpace(regs_.scratch0(),
4449  regs_.scratch0(),
4450  &dont_need_remembered_set);
4451 
4452  __ CheckPageFlag(regs_.object(),
4453  regs_.scratch0(),
4455  not_zero,
4456  &dont_need_remembered_set);
4457 
4458  // First notify the incremental marker if necessary, then update the
4459  // remembered set.
4460  CheckNeedsToInformIncrementalMarker(
4461  masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
4462  InformIncrementalMarker(masm);
4463  regs_.Restore(masm);
4464  __ RememberedSetHelper(object_,
4465  address_,
4466  value_,
4467  save_fp_regs_mode_,
4469 
4470  __ bind(&dont_need_remembered_set);
4471  }
4472 
4473  CheckNeedsToInformIncrementalMarker(
4474  masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
4475  InformIncrementalMarker(masm);
4476  regs_.Restore(masm);
4477  __ ret(0);
4478 }
4479 
4480 
4481 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4482  regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
4483  Register address =
4484  arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address();
4485  ASSERT(!address.is(regs_.object()));
4486  ASSERT(!address.is(arg_reg_1));
4487  __ Move(address, regs_.address());
4488  __ Move(arg_reg_1, regs_.object());
4489  // TODO(gc) Can we just set address arg2 in the beginning?
4490  __ Move(arg_reg_2, address);
4491  __ LoadAddress(arg_reg_3,
4492  ExternalReference::isolate_address(masm->isolate()));
4493  int argument_count = 3;
4494 
4495  AllowExternalCallThatCantCauseGC scope(masm);
4496  __ PrepareCallCFunction(argument_count);
4497  __ CallCFunction(
4498  ExternalReference::incremental_marking_record_write_function(
4499  masm->isolate()),
4500  argument_count);
4501  regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
4502 }
4503 
4504 
4505 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4506  MacroAssembler* masm,
4507  OnNoNeedToInformIncrementalMarker on_no_need,
4508  Mode mode) {
4509  Label on_black;
4510  Label need_incremental;
4511  Label need_incremental_pop_object;
4512 
4513  __ movp(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
4514  __ andp(regs_.scratch0(), regs_.object());
4515  __ movp(regs_.scratch1(),
4516  Operand(regs_.scratch0(),
4518  __ subp(regs_.scratch1(), Immediate(1));
4519  __ movp(Operand(regs_.scratch0(),
4521  regs_.scratch1());
4522  __ j(negative, &need_incremental);
4523 
4524  // Let's look at the color of the object: If it is not black we don't have
4525  // to inform the incremental marker.
4526  __ JumpIfBlack(regs_.object(),
4527  regs_.scratch0(),
4528  regs_.scratch1(),
4529  &on_black,
4530  Label::kNear);
4531 
4532  regs_.Restore(masm);
4533  if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4534  __ RememberedSetHelper(object_,
4535  address_,
4536  value_,
4537  save_fp_regs_mode_,
4539  } else {
4540  __ ret(0);
4541  }
4542 
4543  __ bind(&on_black);
4544 
4545  // Get the value from the slot.
4546  __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
4547 
4548  if (mode == INCREMENTAL_COMPACTION) {
4549  Label ensure_not_white;
4550 
4551  __ CheckPageFlag(regs_.scratch0(), // Contains value.
4552  regs_.scratch1(), // Scratch.
4554  zero,
4555  &ensure_not_white,
4556  Label::kNear);
4557 
4558  __ CheckPageFlag(regs_.object(),
4559  regs_.scratch1(), // Scratch.
4561  zero,
4562  &need_incremental);
4563 
4564  __ bind(&ensure_not_white);
4565  }
4566 
4567  // We need an extra register for this, so we push the object register
4568  // temporarily.
4569  __ Push(regs_.object());
4570  __ EnsureNotWhite(regs_.scratch0(), // The value.
4571  regs_.scratch1(), // Scratch.
4572  regs_.object(), // Scratch.
4573  &need_incremental_pop_object,
4574  Label::kNear);
4575  __ Pop(regs_.object());
4576 
4577  regs_.Restore(masm);
4578  if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4579  __ RememberedSetHelper(object_,
4580  address_,
4581  value_,
4582  save_fp_regs_mode_,
4584  } else {
4585  __ ret(0);
4586  }
4587 
4588  __ bind(&need_incremental_pop_object);
4589  __ Pop(regs_.object());
4590 
4591  __ bind(&need_incremental);
4592 
4593  // Fall through when we need to inform the incremental marker.
4594 }
4595 
4596 
4597 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
4598  // ----------- S t a t e -------------
4599  // -- rax : element value to store
4600  // -- rcx : element index as smi
4601  // -- rsp[0] : return address
4602  // -- rsp[8] : array literal index in function
4603  // -- rsp[16] : array literal
4604  // clobbers rbx, rdx, rdi
4605  // -----------------------------------
4606 
4607  Label element_done;
4608  Label double_elements;
4609  Label smi_element;
4610  Label slow_elements;
4611  Label fast_elements;
4612 
4613  // Get array literal index, array literal and its map.
4614  StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4615  __ movp(rdx, args.GetArgumentOperand(1));
4616  __ movp(rbx, args.GetArgumentOperand(0));
4618 
4619  __ CheckFastElements(rdi, &double_elements);
4620 
4621  // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS
4622  __ JumpIfSmi(rax, &smi_element);
4623  __ CheckFastSmiElements(rdi, &fast_elements);
4624 
4625  // Store into the array literal requires a elements transition. Call into
4626  // the runtime.
4627 
4628  __ bind(&slow_elements);
4629  __ PopReturnAddressTo(rdi);
4630  __ Push(rbx);
4631  __ Push(rcx);
4632  __ Push(rax);
4635  __ Push(rdx);
4636  __ PushReturnAddressFrom(rdi);
4637  __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
4638 
4639  // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
4640  __ bind(&fast_elements);
4641  __ SmiToInteger32(kScratchRegister, rcx);
4645  __ movp(Operand(rcx, 0), rax);
4646  // Update the write barrier for the array store.
4647  __ RecordWrite(rbx, rcx, rax,
4650  OMIT_SMI_CHECK);
4651  __ ret(0);
4652 
4653  // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or
4654  // FAST_*_ELEMENTS, and value is Smi.
4655  __ bind(&smi_element);
4656  __ SmiToInteger32(kScratchRegister, rcx);
4660  __ ret(0);
4661 
4662  // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
4663  __ bind(&double_elements);
4664 
4666  __ SmiToInteger32(r11, rcx);
4667  __ StoreNumberToDoubleElements(rax,
4668  r9,
4669  r11,
4670  xmm0,
4671  &slow_elements);
4672  __ ret(0);
4673 }
4674 
4675 
4676 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
4677  CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
4678  __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
4679  int parameter_count_offset =
4681  __ movp(rbx, MemOperand(rbp, parameter_count_offset));
4682  masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4683  __ PopReturnAddressTo(rcx);
4684  int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE
4685  ? kPointerSize
4686  : 0;
4687  __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
4688  __ jmp(rcx); // Return to IC Miss stub, continuation still on stack.
4689 }
4690 
4691 
4692 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4693  if (masm->isolate()->function_entry_hook() != NULL) {
4694  ProfileEntryHookStub stub;
4695  masm->CallStub(&stub);
4696  }
4697 }
4698 
4699 
4700 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4701  // This stub can be called from essentially anywhere, so it needs to save
4702  // all volatile and callee-save registers.
4703  const size_t kNumSavedRegisters = 2;
4704  __ pushq(arg_reg_1);
4705  __ pushq(arg_reg_2);
4706 
4707  // Calculate the original stack pointer and store it in the second arg.
4708  __ leap(arg_reg_2,
4709  Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize));
4710 
4711  // Calculate the function address to the first arg.
4712  __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize));
4714 
4715  // Save the remainder of the volatile registers.
4716  masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4717 
4718  // Call the entry hook function.
4719  __ Move(rax, FUNCTION_ADDR(masm->isolate()->function_entry_hook()),
4721 
4722  AllowExternalCallThatCantCauseGC scope(masm);
4723 
4724  const int kArgumentCount = 2;
4725  __ PrepareCallCFunction(kArgumentCount);
4726  __ CallCFunction(rax, kArgumentCount);
4727 
4728  // Restore volatile regs.
4729  masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4730  __ popq(arg_reg_2);
4731  __ popq(arg_reg_1);
4732 
4733  __ Ret();
4734 }
4735 
4736 
4737 template<class T>
4738 static void CreateArrayDispatch(MacroAssembler* masm,
4740  if (mode == DISABLE_ALLOCATION_SITES) {
4741  T stub(GetInitialFastElementsKind(), mode);
4742  __ TailCallStub(&stub);
4743  } else if (mode == DONT_OVERRIDE) {
4744  int last_index = GetSequenceIndexFromFastElementsKind(
4746  for (int i = 0; i <= last_index; ++i) {
4747  Label next;
4749  __ cmpl(rdx, Immediate(kind));
4750  __ j(not_equal, &next);
4751  T stub(kind);
4752  __ TailCallStub(&stub);
4753  __ bind(&next);
4754  }
4755 
4756  // If we reached this point there is a problem.
4757  __ Abort(kUnexpectedElementsKindInArrayConstructor);
4758  } else {
4759  UNREACHABLE();
4760  }
4761 }
4762 
4763 
4764 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4766  // rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4767  // rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
4768  // rax - number of arguments
4769  // rdi - constructor?
4770  // rsp[0] - return address
4771  // rsp[8] - last argument
4772  Handle<Object> undefined_sentinel(
4773  masm->isolate()->heap()->undefined_value(),
4774  masm->isolate());
4775 
4776  Label normal_sequence;
4777  if (mode == DONT_OVERRIDE) {
4778  ASSERT(FAST_SMI_ELEMENTS == 0);
4780  ASSERT(FAST_ELEMENTS == 2);
4784 
4785  // is the low bit set? If so, we are holey and that is good.
4786  __ testb(rdx, Immediate(1));
4787  __ j(not_zero, &normal_sequence);
4788  }
4789 
4790  // look at the first argument
4791  StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4792  __ movp(rcx, args.GetArgumentOperand(0));
4793  __ testp(rcx, rcx);
4794  __ j(zero, &normal_sequence);
4795 
4796  if (mode == DISABLE_ALLOCATION_SITES) {
4798  ElementsKind holey_initial = GetHoleyElementsKind(initial);
4799 
4800  ArraySingleArgumentConstructorStub stub_holey(holey_initial,
4802  __ TailCallStub(&stub_holey);
4803 
4804  __ bind(&normal_sequence);
4805  ArraySingleArgumentConstructorStub stub(initial,
4807  __ TailCallStub(&stub);
4808  } else if (mode == DONT_OVERRIDE) {
4809  // We are going to create a holey array, but our kind is non-holey.
4810  // Fix kind and retry (only if we have an allocation site in the slot).
4811  __ incl(rdx);
4812 
4813  if (FLAG_debug_code) {
4814  Handle<Map> allocation_site_map =
4815  masm->isolate()->factory()->allocation_site_map();
4816  __ Cmp(FieldOperand(rbx, 0), allocation_site_map);
4817  __ Assert(equal, kExpectedAllocationSite);
4818  }
4819 
4820  // Save the resulting elements kind in type info. We can't just store r3
4821  // in the AllocationSite::transition_info field because elements kind is
4822  // restricted to a portion of the field...upper bits need to be left alone.
4826 
4827  __ bind(&normal_sequence);
4828  int last_index = GetSequenceIndexFromFastElementsKind(
4830  for (int i = 0; i <= last_index; ++i) {
4831  Label next;
4833  __ cmpl(rdx, Immediate(kind));
4834  __ j(not_equal, &next);
4835  ArraySingleArgumentConstructorStub stub(kind);
4836  __ TailCallStub(&stub);
4837  __ bind(&next);
4838  }
4839 
4840  // If we reached this point there is a problem.
4841  __ Abort(kUnexpectedElementsKindInArrayConstructor);
4842  } else {
4843  UNREACHABLE();
4844  }
4845 }
4846 
4847 
4848 template<class T>
4849 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4850  int to_index = GetSequenceIndexFromFastElementsKind(
4852  for (int i = 0; i <= to_index; ++i) {
4854  T stub(kind);
4855  stub.GetCode(isolate);
4857  T stub1(kind, DISABLE_ALLOCATION_SITES);
4858  stub1.GetCode(isolate);
4859  }
4860  }
4861 }
4862 
4863 
4865  ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4866  isolate);
4867  ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4868  isolate);
4869  ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4870  isolate);
4871 }
4872 
4873 
4875  Isolate* isolate) {
4877  for (int i = 0; i < 2; i++) {
4878  // For internal arrays we only need a few things
4879  InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
4880  stubh1.GetCode(isolate);
4881  InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
4882  stubh2.GetCode(isolate);
4883  InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
4884  stubh3.GetCode(isolate);
4885  }
4886 }
4887 
4888 
4889 void ArrayConstructorStub::GenerateDispatchToArrayStub(
4890  MacroAssembler* masm,
4892  if (argument_count_ == ANY) {
4893  Label not_zero_case, not_one_case;
4894  __ testp(rax, rax);
4895  __ j(not_zero, &not_zero_case);
4896  CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4897 
4898  __ bind(&not_zero_case);
4899  __ cmpl(rax, Immediate(1));
4900  __ j(greater, &not_one_case);
4901  CreateArrayDispatchOneArgument(masm, mode);
4902 
4903  __ bind(&not_one_case);
4904  CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4905  } else if (argument_count_ == NONE) {
4906  CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4907  } else if (argument_count_ == ONE) {
4908  CreateArrayDispatchOneArgument(masm, mode);
4909  } else if (argument_count_ == MORE_THAN_ONE) {
4910  CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4911  } else {
4912  UNREACHABLE();
4913  }
4914 }
4915 
4916 
4917 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4918  // ----------- S t a t e -------------
4919  // -- rax : argc
4920  // -- rbx : AllocationSite or undefined
4921  // -- rdi : constructor
4922  // -- rsp[0] : return address
4923  // -- rsp[8] : last argument
4924  // -----------------------------------
4925  if (FLAG_debug_code) {
4926  // The array construct code is only set for the global and natives
4927  // builtin Array functions which always have maps.
4928 
4929  // Initial map for the builtin Array function should be a map.
4931  // Will both indicate a NULL and a Smi.
4932  STATIC_ASSERT(kSmiTag == 0);
4933  Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4934  __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4935  __ CmpObjectType(rcx, MAP_TYPE, rcx);
4936  __ Check(equal, kUnexpectedInitialMapForArrayFunction);
4937 
4938  // We should either have undefined in rbx or a valid AllocationSite
4939  __ AssertUndefinedOrAllocationSite(rbx);
4940  }
4941 
4942  Label no_info;
4943  // If the feedback vector is the undefined value call an array constructor
4944  // that doesn't use AllocationSites.
4945  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
4946  __ j(equal, &no_info);
4947 
4948  // Only look at the lower 16 bits of the transition info.
4950  __ SmiToInteger32(rdx, rdx);
4953  GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4954 
4955  __ bind(&no_info);
4956  GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
4957 }
4958 
4959 
4960 void InternalArrayConstructorStub::GenerateCase(
4961  MacroAssembler* masm, ElementsKind kind) {
4962  Label not_zero_case, not_one_case;
4963  Label normal_sequence;
4964 
4965  __ testp(rax, rax);
4966  __ j(not_zero, &not_zero_case);
4967  InternalArrayNoArgumentConstructorStub stub0(kind);
4968  __ TailCallStub(&stub0);
4969 
4970  __ bind(&not_zero_case);
4971  __ cmpl(rax, Immediate(1));
4972  __ j(greater, &not_one_case);
4973 
4974  if (IsFastPackedElementsKind(kind)) {
4975  // We might need to create a holey array
4976  // look at the first argument
4977  StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4978  __ movp(rcx, args.GetArgumentOperand(0));
4979  __ testp(rcx, rcx);
4980  __ j(zero, &normal_sequence);
4981 
4982  InternalArraySingleArgumentConstructorStub
4983  stub1_holey(GetHoleyElementsKind(kind));
4984  __ TailCallStub(&stub1_holey);
4985  }
4986 
4987  __ bind(&normal_sequence);
4988  InternalArraySingleArgumentConstructorStub stub1(kind);
4989  __ TailCallStub(&stub1);
4990 
4991  __ bind(&not_one_case);
4992  InternalArrayNArgumentsConstructorStub stubN(kind);
4993  __ TailCallStub(&stubN);
4994 }
4995 
4996 
4997 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4998  // ----------- S t a t e -------------
4999  // -- rax : argc
5000  // -- rdi : constructor
5001  // -- rsp[0] : return address
5002  // -- rsp[8] : last argument
5003  // -----------------------------------
5004 
5005  if (FLAG_debug_code) {
5006  // The array construct code is only set for the global and natives
5007  // builtin Array functions which always have maps.
5008 
5009  // Initial map for the builtin Array function should be a map.
5011  // Will both indicate a NULL and a Smi.
5012  STATIC_ASSERT(kSmiTag == 0);
5013  Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
5014  __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
5015  __ CmpObjectType(rcx, MAP_TYPE, rcx);
5016  __ Check(equal, kUnexpectedInitialMapForArrayFunction);
5017  }
5018 
5019  // Figure out the right elements kind
5021 
5022  // Load the map's "bit field 2" into |result|. We only need the first byte,
5023  // but the following masking takes care of that anyway.
5025  // Retrieve elements_kind from bit field 2.
5026  __ andp(rcx, Immediate(Map::kElementsKindMask));
5027  __ shr(rcx, Immediate(Map::kElementsKindShift));
5028 
5029  if (FLAG_debug_code) {
5030  Label done;
5031  __ cmpl(rcx, Immediate(FAST_ELEMENTS));
5032  __ j(equal, &done);
5033  __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
5034  __ Assert(equal,
5035  kInvalidElementsKindForInternalArrayOrInternalPackedArray);
5036  __ bind(&done);
5037  }
5038 
5039  Label fast_elements_case;
5040  __ cmpl(rcx, Immediate(FAST_ELEMENTS));
5041  __ j(equal, &fast_elements_case);
5042  GenerateCase(masm, FAST_HOLEY_ELEMENTS);
5043 
5044  __ bind(&fast_elements_case);
5045  GenerateCase(masm, FAST_ELEMENTS);
5046 }
5047 
5048 
5049 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
5050  // ----------- S t a t e -------------
5051  // -- rax : callee
5052  // -- rbx : call_data
5053  // -- rcx : holder
5054  // -- rdx : api_function_address
5055  // -- rsi : context
5056  // --
5057  // -- rsp[0] : return address
5058  // -- rsp[8] : last argument
5059  // -- ...
5060  // -- rsp[argc * 8] : first argument
5061  // -- rsp[(argc + 1) * 8] : receiver
5062  // -----------------------------------
5063 
5064  Register callee = rax;
5065  Register call_data = rbx;
5066  Register holder = rcx;
5067  Register api_function_address = rdx;
5068  Register return_address = rdi;
5069  Register context = rsi;
5070 
5071  int argc = ArgumentBits::decode(bit_field_);
5072  bool is_store = IsStoreBits::decode(bit_field_);
5073  bool call_data_undefined = CallDataUndefinedBits::decode(bit_field_);
5074 
5075  typedef FunctionCallbackArguments FCA;
5076 
5077  STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5078  STATIC_ASSERT(FCA::kCalleeIndex == 5);
5079  STATIC_ASSERT(FCA::kDataIndex == 4);
5080  STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5081  STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5082  STATIC_ASSERT(FCA::kIsolateIndex == 1);
5083  STATIC_ASSERT(FCA::kHolderIndex == 0);
5084  STATIC_ASSERT(FCA::kArgsLength == 7);
5085 
5086  __ PopReturnAddressTo(return_address);
5087 
5088  // context save
5089  __ Push(context);
5090  // load context from callee
5091  __ movp(context, FieldOperand(callee, JSFunction::kContextOffset));
5092 
5093  // callee
5094  __ Push(callee);
5095 
5096  // call data
5097  __ Push(call_data);
5098  Register scratch = call_data;
5099  if (!call_data_undefined) {
5100  __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5101  }
5102  // return value
5103  __ Push(scratch);
5104  // return value default
5105  __ Push(scratch);
5106  // isolate
5107  __ Move(scratch,
5108  ExternalReference::isolate_address(masm->isolate()));
5109  __ Push(scratch);
5110  // holder
5111  __ Push(holder);
5112 
5113  __ movp(scratch, rsp);
5114  // Push return address back on stack.
5115  __ PushReturnAddressFrom(return_address);
5116 
5117  // Allocate the v8::Arguments structure in the arguments' space since
5118  // it's not controlled by GC.
5119  const int kApiStackSpace = 4;
5120 
5121  __ PrepareCallApiFunction(kApiStackSpace);
5122 
5123  // FunctionCallbackInfo::implicit_args_.
5124  __ movp(StackSpaceOperand(0), scratch);
5125  __ addp(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
5126  __ movp(StackSpaceOperand(1), scratch); // FunctionCallbackInfo::values_.
5127  __ Set(StackSpaceOperand(2), argc); // FunctionCallbackInfo::length_.
5128  // FunctionCallbackInfo::is_construct_call_.
5129  __ Set(StackSpaceOperand(3), 0);
5130 
5131 #if defined(__MINGW64__) || defined(_WIN64)
5132  Register arguments_arg = rcx;
5133  Register callback_arg = rdx;
5134 #else
5135  Register arguments_arg = rdi;
5136  Register callback_arg = rsi;
5137 #endif
5138 
5139  // It's okay if api_function_address == callback_arg
5140  // but not arguments_arg
5141  ASSERT(!api_function_address.is(arguments_arg));
5142 
5143  // v8::InvocationCallback's argument.
5144  __ leap(arguments_arg, StackSpaceOperand(0));
5145 
5146  Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
5147 
5148  // Accessor for FunctionCallbackInfo and first js arg.
5149  StackArgumentsAccessor args_from_rbp(rbp, FCA::kArgsLength + 1,
5151  Operand context_restore_operand = args_from_rbp.GetArgumentOperand(
5152  FCA::kArgsLength - FCA::kContextSaveIndex);
5153  // Stores return the first js argument
5154  Operand return_value_operand = args_from_rbp.GetArgumentOperand(
5155  is_store ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset);
5156  __ CallApiFunctionAndReturn(
5157  api_function_address,
5158  thunk_address,
5159  callback_arg,
5160  argc + FCA::kArgsLength + 1,
5161  return_value_operand,
5162  &context_restore_operand);
5163 }
5164 
5165 
5166 void CallApiGetterStub::Generate(MacroAssembler* masm) {
5167  // ----------- S t a t e -------------
5168  // -- rsp[0] : return address
5169  // -- rsp[8] : name
5170  // -- rsp[16 - kArgsLength*8] : PropertyCallbackArguments object
5171  // -- ...
5172  // -- r8 : api_function_address
5173  // -----------------------------------
5174 
5175 #if defined(__MINGW64__) || defined(_WIN64)
5176  Register getter_arg = r8;
5177  Register accessor_info_arg = rdx;
5178  Register name_arg = rcx;
5179 #else
5180  Register getter_arg = rdx;
5181  Register accessor_info_arg = rsi;
5182  Register name_arg = rdi;
5183 #endif
5184  Register api_function_address = r8;
5185  Register scratch = rax;
5186 
5187  // v8::Arguments::values_ and handler for name.
5188  const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1;
5189 
5190  // Allocate v8::AccessorInfo in non-GCed stack space.
5191  const int kArgStackSpace = 1;
5192 
5193  __ leap(name_arg, Operand(rsp, kPCOnStackSize));
5194 
5195  __ PrepareCallApiFunction(kArgStackSpace);
5196  __ leap(scratch, Operand(name_arg, 1 * kPointerSize));
5197 
5198  // v8::PropertyAccessorInfo::args_.
5199  __ movp(StackSpaceOperand(0), scratch);
5200 
5201  // The context register (rsi) has been saved in PrepareCallApiFunction and
5202  // could be used to pass arguments.
5203  __ leap(accessor_info_arg, StackSpaceOperand(0));
5204 
5206 
5207  // It's okay if api_function_address == getter_arg
5208  // but not accessor_info_arg or name_arg
5209  ASSERT(!api_function_address.is(accessor_info_arg) &&
5210  !api_function_address.is(name_arg));
5211 
5212  // The name handler is counted as an argument.
5213  StackArgumentsAccessor args(rbp, PropertyCallbackArguments::kArgsLength);
5214  Operand return_value_operand = args.GetArgumentOperand(
5217  __ CallApiFunctionAndReturn(api_function_address,
5218  thunk_address,
5219  getter_arg,
5220  kStackSpace,
5221  return_value_operand,
5222  NULL);
5223 }
5224 
5225 
5226 #undef __
5227 
5228 } } // namespace v8::internal
5229 
5230 #endif // V8_TARGET_ARCH_X64
byte * Address
Definition: globals.h:186
static const int kResourceDataOffset
Definition: objects.h:9244
const Register rdx
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
static const int kElementsKindMask
Definition: objects.h:6486
static RelocInfo::Mode RelocInfoNone()
void GenerateFast(MacroAssembler *masm)
static const int kHashFieldOffset
Definition: objects.h:8629
static const int kBitFieldOffset
Definition: objects.h:6461
void GenerateFast(MacroAssembler *masm)
const XMMRegister xmm13
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
static const int kMaxLength
Definition: objects.h:3085
const intptr_t kSmiTagMask
Definition: v8.h:5480
static const int kEvacuationCandidateMask
Definition: spaces.h:430
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
bool IsHoleyElementsKind(ElementsKind kind)
const Register r14
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:7519
#define COMPARE(asm_, compare_string)
static int SlotOffset(int index)
Definition: contexts.h:498
static void GenerateAheadOfTime(Isolate *isolate)
Definition: code-stubs.cc:217
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const Register r11
static const int kCallerStackParameterCountFrameOffset
Definition: frames.h:776
void Generate(MacroAssembler *masm)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const XMMRegister xmm4
const uint32_t kTwoByteStringTag
Definition: objects.h:610
const int kFailureTypeTagSize
Definition: objects.h:1712
static const uint32_t kExponentMask
Definition: objects.h:1981
static Failure * InternalError()
Definition: objects-inl.h:1239
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
bool IsFastObjectElementsKind(ElementsKind kind)
const Register rbp
void Generate(MacroAssembler *masm)
static const byte kTwoByteNopInstruction
static const int kDataOffset
Definition: objects.h:7921
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kJSRegexpStaticOffsetsVectorSize
Definition: isolate.h:997
static Representation Integer32()
static void GenerateAheadOfTime(Isolate *isolate)
Definition: code-stubs.cc:541
#define V8_UINT64_C(x)
Definition: globals.h:217
const Register rsi
static const int kNativeByteOffset
Definition: objects.h:7267
static void GenerateHashGetHash(MacroAssembler *masm, Register hash)
static const int kExponentBias
Definition: objects.h:1985
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor) V8_OVERRIDE
static const intptr_t kPageAlignmentMask
Definition: spaces.h:823
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kStrictModeBitWithinByte
Definition: objects.h:7258
static Failure * Exception()
Definition: objects-inl.h:1244
const int kMaxInt
Definition: globals.h:248
const uint32_t kIsNotInternalizedMask
Definition: objects.h:603
AllocationSiteOverrideMode
Definition: code-stubs.h:759
static const Function * FunctionForId(FunctionId id)
Definition: runtime.cc:15154
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
#define ASSERT(condition)
Definition: checks.h:329
void Generate(MacroAssembler *masm)
static const int kContextOffset
Definition: frames.h:185
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
Definition: objects-inl.h:6680
const int kPointerSizeLog2
Definition: globals.h:281
static const int kShortCallInstructionLength
static void GenerateCompareFlatAsciiStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
const uint32_t kStringRepresentationMask
Definition: objects.h:615
const XMMRegister xmm6
static const int kCallerFPOffset
Definition: frames.h:188
const XMMRegister xmm12
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
Definition: objects.h:7523
const uint32_t kShortExternalStringMask
Definition: objects.h:643
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
static const int kLastSubjectOffset
Definition: jsregexp.h:190
ProfileEntryHookStub()
Definition: code-stubs.h:2504
static const int kZeroHash
Definition: objects.h:8520
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kLastCaptureCountOffset
Definition: jsregexp.h:188
const XMMRegister xmm14
static const int kFirstOffset
Definition: objects.h:9165
static const int kMinLength
Definition: objects.h:9170
const uint32_t kNotStringTag
Definition: objects.h:599
static const int kParentOffset
Definition: objects.h:9209
static const int kLiteralsOffset
Definition: objects.h:7524
const XMMRegister xmm15
#define UNREACHABLE()
Definition: checks.h:52
const int kFastElementsKindPackedToHoley
Definition: elements-kind.h:94
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static void GenerateCopyCharactersREP(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, bool ascii)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kLengthOffset
Definition: objects.h:8905
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kExponentShift
Definition: objects.h:1986
const intptr_t kFailureTagMask
Definition: v8globals.h:64
static const int kValueOffset
Definition: objects.h:1971
const int kFailureTagSize
Definition: v8globals.h:63
bool IsFastPackedElementsKind(ElementsKind kind)
static void GenerateFlatAsciiStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
const int kDoubleSize
Definition: globals.h:266
static const int kIrregexpCaptureCountOffset
Definition: objects.h:7967
static const size_t kWriteBarrierCounterOffset
Definition: spaces.h:577
const XMMRegister xmm1
const uint32_t kIsIndirectStringMask
Definition: objects.h:622
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
const Register r9
const int kPointerSize
Definition: globals.h:268
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kTransitionInfoOffset
Definition: objects.h:8411
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
static void GenerateAheadOfTime(Isolate *isolate)
const int kHeapObjectTag
Definition: v8.h:5473
void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const int kRegisterSize
Definition: globals.h:269
const Register rbx
const Register rsp
#define __
const XMMRegister xmm10
static const int kCallerSPOffset
Definition: frames.h:190
const Register r12
Operand StackSpaceOperand(int index)
static const byte kFiveByteNopInstruction
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const Register rax
virtual void Generate(MacroAssembler *masm)
bool IsFastSmiElementsKind(ElementsKind kind)
static const int kMinLength
Definition: objects.h:9214
const uint32_t kShortExternalStringTag
Definition: objects.h:644
const Register r13
static void GenerateHashAddCharacter(MacroAssembler *masm, Register hash, Register character)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
const Register rdi
static const int kHeaderSize
Definition: objects.h:9042
static void GenerateStubsAheadOfTime(Isolate *isolate)
const Register r0
static const int kElementsOffset
Definition: objects.h:2756
static const int kNativeBitWithinByte
Definition: objects.h:7261
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const int kRootRegisterBias
const uint32_t kStringTag
Definition: objects.h:598
void Generate(MacroAssembler *masm)
static bool IsEqualityOp(Value op)
Definition: token.h:228
static Representation External()
static const int kOffsetOffset
Definition: objects.h:9210
const uint32_t kInternalizedTag
Definition: objects.h:605
static const int kLengthOffset
Definition: objects.h:10076
#define T(name, string, precedence)
Definition: token.cc:48
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
virtual void Generate(MacroAssembler *masm)
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kLastMatchOverhead
Definition: jsregexp.h:185
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const XMMRegister xmm3
static const int kHeaderSize
Definition: objects.h:3016
void Generate(MacroAssembler *masm)
void Load(const v8::FunctionCallbackInfo< v8::Value > &args)
Definition: shell.cc:171
static Builtins::Name MissBuiltin(Code::Kind kind)
Definition: stub-cache.h:466
const Register arg_reg_1
static const int kMapOffset
Definition: objects.h:1890
static const int kMaxShortLength
Definition: objects.h:9247
bool is(Register reg) const
static const int kSkipEvacuationSlotsRecordingMask
Definition: spaces.h:433
const uint32_t kIsNotStringMask
Definition: objects.h:597
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const Register r1
const uint32_t kSlicedNotConsMask
Definition: objects.h:633
static const int kLengthOffset
Definition: objects.h:3015
void Generate(MacroAssembler *masm)
static const int kSecondOffset
Definition: objects.h:9166
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:6675
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor) V8_OVERRIDE
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:103
static const int kArgumentsLengthIndex
Definition: heap.h:1104
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const XMMRegister xmm11
const Register kScratchRegister
ElementsKind GetInitialFastElementsKind()
static const int kFirstCaptureOffset
Definition: jsregexp.h:194
uint16_t uc16
Definition: globals.h:309
static const int kLastInputOffset
Definition: jsregexp.h:192
static void GenerateMapChangeElementsTransition(MacroAssembler *masm, AllocationSiteMode mode, Label *allocation_memento_found)
static const int kStrictModeByteOffset
Definition: objects.h:7265
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const int kSmiTagSize
Definition: v8.h:5479
static const int kHeaderSize
Definition: objects.h:5604
void InvokeAccessorGetterCallback(v8::Local< v8::String > property, const v8::PropertyCallbackInfo< v8::Value > &info, v8::AccessorGetterCallback getter)
Definition: api.cc:7628
const Register r8
const Register arg_reg_4
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
Definition: objects-inl.h:1477
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void GenerateBody(MacroAssembler *masm, bool is_construct)
static const int kDataAsciiCodeOffset
Definition: objects.h:7963
ICCompareStub(Token::Value op, CompareIC::State left, CompareIC::State right, CompareIC::State handler)
Definition: code-stubs.h:1329
const Register rcx
static void GenerateStubsAheadOfTime(Isolate *isolate)
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
static const int kElementsKindShift
Definition: objects.h:6482
CodeStubInterfaceDescriptor * GetInterfaceDescriptor(Isolate *isolate)
Definition: code-stubs.h:395
const XMMRegister xmm9
const Register arg_reg_2
const uint32_t kOneByteStringTag
Definition: objects.h:611
Operand StackOperandForReturnAddress(int32_t disp)
void Generate(MacroAssembler *masm)
static const int kArgumentsCalleeIndex
Definition: heap.h:1106
const int kSmiTag
Definition: v8.h:5478
static const int kIsUndetectable
Definition: objects.h:6472
static const int kHeaderSize
Definition: objects.h:2757
Code::Kind kind()
Definition: code-stubs.h:831
static void InitializeForIsolate(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const int kFailureTag
Definition: v8globals.h:62
static const int kDataTagOffset
Definition: objects.h:7961
static const uint32_t kHashBitMask
Definition: objects.h:8646
static const int kPrototypeOffset
Definition: objects.h:6427
void Generate(MacroAssembler *masm)
static const int kHashShift
Definition: objects.h:8642
static void GenerateAheadOfTime(Isolate *isolate)
Definition: code-stubs.cc:697
const Register no_reg
static const int kMaxLength
Definition: objects.h:8922
const XMMRegister xmm8
const XMMRegister xmm7
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static Representation Tagged()
static const int kNativeContextOffset
Definition: objects.h:7611
const int kPCOnStackSize
Definition: globals.h:270
static void GenerateFixedRegStubsAheadOfTime(Isolate *isolate)
static const int kConstructStubOffset
Definition: objects.h:7106
const XMMRegister xmm2
static const int kEmptyHashField
Definition: objects.h:8678
static const int kSharedFunctionInfoOffset
Definition: objects.h:7521
#define FUNCTION_ADDR(f)
Definition: globals.h:345
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kSloppyArgumentsObjectSize
Definition: heap.h:1098
void InvokeFunctionCallback(const v8::FunctionCallbackInfo< v8::Value > &info, v8::FunctionCallback callback)
Definition: api.cc:7642
static const int kMaxValue
Definition: objects.h:1681
static const int kBitField2Offset
Definition: objects.h:6462
static const int kMantissaBits
Definition: objects.h:1983
void Generate(MacroAssembler *masm)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
CEntryStub(int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
Definition: code-stubs.h:1492
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
static const int32_t kMaxOneByteCharCode
Definition: objects.h:8914
const Register r15
static const int kDataUC16CodeOffset
Definition: objects.h:7965
StoreBufferOverflowStub(SaveFPRegsMode save_fp)
static const int kStrictArgumentsObjectSize
Definition: heap.h:1101
static void GenerateHashInit(MacroAssembler *masm, Register hash, Register character)
bool IsFastDoubleElementsKind(ElementsKind kind)
static bool IsOrderedRelationalCompareOp(Value op)
Definition: token.h:224
const Register arg_reg_3
const uint32_t kStringEncodingMask
Definition: objects.h:609
static const int kInstanceTypeOffset
Definition: objects.h:6459
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void Generate(MacroAssembler *masm)
const XMMRegister xmm0
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kPhysicalSignificandSize
Definition: double.h:48