v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
stub-cache-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_X64)
31 
32 #include "ic-inl.h"
33 #include "codegen.h"
34 #include "stub-cache.h"
35 
36 namespace v8 {
37 namespace internal {
38 
39 #define __ ACCESS_MASM(masm)
40 
41 
42 static void ProbeTable(Isolate* isolate,
43  MacroAssembler* masm,
45  StubCache::Table table,
46  Register receiver,
47  Register name,
48  // The offset is scaled by 4, based on
49  // kHeapObjectTagSize, which is two bits
50  Register offset) {
51  // We need to scale up the pointer by 2 because the offset is scaled by less
52  // than the pointer size.
54  ScaleFactor scale_factor = times_2;
55 
56  ASSERT_EQ(24, sizeof(StubCache::Entry));
57  // The offset register holds the entry offset times four (due to masking
58  // and shifting optimizations).
59  ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
60  ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
61  Label miss;
62 
63  // Multiply by 3 because there are 3 fields per entry (name, code, map).
64  __ lea(offset, Operand(offset, offset, times_2, 0));
65 
66  __ LoadAddress(kScratchRegister, key_offset);
67 
68  // Check that the key in the entry matches the name.
69  // Multiply entry offset by 16 to get the entry address. Since the
70  // offset register already holds the entry offset times four, multiply
71  // by a further four.
72  __ cmpl(name, Operand(kScratchRegister, offset, scale_factor, 0));
73  __ j(not_equal, &miss);
74 
75  // Get the map entry from the cache.
76  // Use key_offset + kPointerSize * 2, rather than loading map_offset.
77  __ movq(kScratchRegister,
78  Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2));
80  __ j(not_equal, &miss);
81 
82  // Get the code entry from the cache.
83  __ LoadAddress(kScratchRegister, value_offset);
84  __ movq(kScratchRegister,
85  Operand(kScratchRegister, offset, scale_factor, 0));
86 
87  // Check that the flags match what we're looking for.
89  __ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup));
90  __ cmpl(offset, Immediate(flags));
91  __ j(not_equal, &miss);
92 
93 #ifdef DEBUG
94  if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
95  __ jmp(&miss);
96  } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
97  __ jmp(&miss);
98  }
99 #endif
100 
101  // Jump to the first instruction in the code stub.
103  __ jmp(kScratchRegister);
104 
105  __ bind(&miss);
106 }
107 
108 
109 // Helper function used to check that the dictionary doesn't contain
110 // the property. This function may return false negatives, so miss_label
111 // must always call a backup property check that is complete.
112 // This function is safe to call if the receiver has fast properties.
113 // Name must be a symbol and receiver must be a heap object.
114 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
115  Label* miss_label,
116  Register receiver,
117  Handle<String> name,
118  Register r0,
119  Register r1) {
120  ASSERT(name->IsSymbol());
121  Counters* counters = masm->isolate()->counters();
122  __ IncrementCounter(counters->negative_lookups(), 1);
123  __ IncrementCounter(counters->negative_lookups_miss(), 1);
124 
125  __ movq(r0, FieldOperand(receiver, HeapObject::kMapOffset));
126 
127  const int kInterceptorOrAccessCheckNeededMask =
129 
130  // Bail out if the receiver has a named interceptor or requires access checks.
132  Immediate(kInterceptorOrAccessCheckNeededMask));
133  __ j(not_zero, miss_label);
134 
135  // Check that receiver is a JSObject.
136  __ CmpInstanceType(r0, FIRST_SPEC_OBJECT_TYPE);
137  __ j(below, miss_label);
138 
139  // Load properties array.
140  Register properties = r0;
141  __ movq(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
142 
143  // Check that the properties array is a dictionary.
144  __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset),
145  Heap::kHashTableMapRootIndex);
146  __ j(not_equal, miss_label);
147 
148  Label done;
150  miss_label,
151  &done,
152  properties,
153  name,
154  r1);
155  __ bind(&done);
156  __ DecrementCounter(counters->negative_lookups_miss(), 1);
157 }
158 
159 
160 void StubCache::GenerateProbe(MacroAssembler* masm,
161  Code::Flags flags,
162  Register receiver,
163  Register name,
164  Register scratch,
165  Register extra,
166  Register extra2,
167  Register extra3) {
168  Isolate* isolate = masm->isolate();
169  Label miss;
170  USE(extra); // The register extra is not used on the X64 platform.
171  USE(extra2); // The register extra2 is not used on the X64 platform.
172  USE(extra3); // The register extra2 is not used on the X64 platform.
173  // Make sure that code is valid. The multiplying code relies on the
174  // entry size being 24.
175  ASSERT(sizeof(Entry) == 24);
176 
177  // Make sure the flags do not name a specific type.
178  ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
179 
180  // Make sure that there are no register conflicts.
181  ASSERT(!scratch.is(receiver));
182  ASSERT(!scratch.is(name));
183 
184  // Check scratch register is valid, extra and extra2 are unused.
185  ASSERT(!scratch.is(no_reg));
186  ASSERT(extra2.is(no_reg));
187  ASSERT(extra3.is(no_reg));
188 
189  Counters* counters = masm->isolate()->counters();
190  __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
191 
192  // Check that the receiver isn't a smi.
193  __ JumpIfSmi(receiver, &miss);
194 
195  // Get the map of the receiver and compute the hash.
196  __ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
197  // Use only the low 32 bits of the map pointer.
198  __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
199  __ xor_(scratch, Immediate(flags));
200  // We mask out the last two bits because they are not part of the hash and
201  // they are always 01 for maps. Also in the two 'and' instructions below.
202  __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
203 
204  // Probe the primary table.
205  ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch);
206 
207  // Primary miss: Compute hash for secondary probe.
208  __ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
209  __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
210  __ xor_(scratch, Immediate(flags));
211  __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
212  __ subl(scratch, name);
213  __ addl(scratch, Immediate(flags));
214  __ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
215 
216  // Probe the secondary table.
217  ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch);
218 
219  // Cache miss: Fall-through and let caller handle the miss by
220  // entering the runtime system.
221  __ bind(&miss);
222  __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
223 }
224 
225 
226 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
227  int index,
228  Register prototype) {
229  // Load the global or builtins object from the current context.
230  __ movq(prototype,
232  // Load the native context from the global or builtins object.
233  __ movq(prototype,
235  // Load the function from the native context.
236  __ movq(prototype, Operand(prototype, Context::SlotOffset(index)));
237  // Load the initial map. The global functions all have initial maps.
238  __ movq(prototype,
240  // Load the prototype from the initial map.
241  __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
242 }
243 
244 
245 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
246  MacroAssembler* masm,
247  int index,
248  Register prototype,
249  Label* miss) {
250  Isolate* isolate = masm->isolate();
251  // Check we're still in the same context.
252  __ Move(prototype, isolate->global_object());
254  prototype);
255  __ j(not_equal, miss);
256  // Get the global function with the given index.
257  Handle<JSFunction> function(
258  JSFunction::cast(isolate->native_context()->get(index)));
259  // Load its initial map. The global functions all have initial maps.
260  __ Move(prototype, Handle<Map>(function->initial_map()));
261  // Load the prototype from the initial map.
262  __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
263 }
264 
265 
266 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
267  Register receiver,
268  Register scratch,
269  Label* miss_label) {
270  // Check that the receiver isn't a smi.
271  __ JumpIfSmi(receiver, miss_label);
272 
273  // Check that the object is a JS array.
274  __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
275  __ j(not_equal, miss_label);
276 
277  // Load length directly from the JS array.
278  __ movq(rax, FieldOperand(receiver, JSArray::kLengthOffset));
279  __ ret(0);
280 }
281 
282 
283 // Generate code to check if an object is a string. If the object is
284 // a string, the map's instance type is left in the scratch register.
285 static void GenerateStringCheck(MacroAssembler* masm,
286  Register receiver,
287  Register scratch,
288  Label* smi,
289  Label* non_string_object) {
290  // Check that the object isn't a smi.
291  __ JumpIfSmi(receiver, smi);
292 
293  // Check that the object is a string.
294  __ movq(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
295  __ movzxbq(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
297  __ testl(scratch, Immediate(kNotStringTag));
298  __ j(not_zero, non_string_object);
299 }
300 
301 
302 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
303  Register receiver,
304  Register scratch1,
305  Register scratch2,
306  Label* miss,
307  bool support_wrappers) {
308  Label check_wrapper;
309 
310  // Check if the object is a string leaving the instance type in the
311  // scratch register.
312  GenerateStringCheck(masm, receiver, scratch1, miss,
313  support_wrappers ? &check_wrapper : miss);
314 
315  // Load length directly from the string.
316  __ movq(rax, FieldOperand(receiver, String::kLengthOffset));
317  __ ret(0);
318 
319  if (support_wrappers) {
320  // Check if the object is a JSValue wrapper.
321  __ bind(&check_wrapper);
322  __ cmpl(scratch1, Immediate(JS_VALUE_TYPE));
323  __ j(not_equal, miss);
324 
325  // Check if the wrapped value is a string and load the length
326  // directly if it is.
327  __ movq(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
328  GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
329  __ movq(rax, FieldOperand(scratch2, String::kLengthOffset));
330  __ ret(0);
331  }
332 }
333 
334 
335 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
336  Register receiver,
337  Register result,
338  Register scratch,
339  Label* miss_label) {
340  __ TryGetFunctionPrototype(receiver, result, miss_label);
341  if (!result.is(rax)) __ movq(rax, result);
342  __ ret(0);
343 }
344 
345 
346 // Load a fast property out of a holder object (src). In-object properties
347 // are loaded directly otherwise the property is loaded from the properties
348 // fixed array.
349 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
350  Register dst,
351  Register src,
352  Handle<JSObject> holder,
353  int index) {
354  // Adjust for the number of properties stored in the holder.
355  index -= holder->map()->inobject_properties();
356  if (index < 0) {
357  // Get the property straight out of the holder.
358  int offset = holder->map()->instance_size() + (index * kPointerSize);
359  __ movq(dst, FieldOperand(src, offset));
360  } else {
361  // Calculate the offset into the properties array.
362  int offset = index * kPointerSize + FixedArray::kHeaderSize;
364  __ movq(dst, FieldOperand(dst, offset));
365  }
366 }
367 
368 
369 static void PushInterceptorArguments(MacroAssembler* masm,
370  Register receiver,
371  Register holder,
372  Register name,
373  Handle<JSObject> holder_obj) {
374  __ push(name);
375  Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
376  ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
377  __ Move(kScratchRegister, interceptor);
378  __ push(kScratchRegister);
379  __ push(receiver);
380  __ push(holder);
382  __ PushAddress(ExternalReference::isolate_address());
383 }
384 
385 
386 static void CompileCallLoadPropertyWithInterceptor(
387  MacroAssembler* masm,
388  Register receiver,
389  Register holder,
390  Register name,
391  Handle<JSObject> holder_obj) {
392  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
393 
394  ExternalReference ref =
395  ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
396  masm->isolate());
397  __ Set(rax, 6);
398  __ LoadAddress(rbx, ref);
399 
400  CEntryStub stub(1);
401  __ CallStub(&stub);
402 }
403 
404 
405 // Number of pointers to be reserved on stack for fast API call.
406 static const int kFastApiCallArguments = 4;
407 
408 
409 // Reserves space for the extra arguments to API function in the
410 // caller's frame.
411 //
412 // These arguments are set by CheckPrototypes and GenerateFastApiCall.
413 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
414  // ----------- S t a t e -------------
415  // -- rsp[0] : return address
416  // -- rsp[8] : last argument in the internal frame of the caller
417  // -----------------------------------
418  __ movq(scratch, Operand(rsp, 0));
419  __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
420  __ movq(Operand(rsp, 0), scratch);
421  __ Move(scratch, Smi::FromInt(0));
422  for (int i = 1; i <= kFastApiCallArguments; i++) {
423  __ movq(Operand(rsp, i * kPointerSize), scratch);
424  }
425 }
426 
427 
428 // Undoes the effects of ReserveSpaceForFastApiCall.
429 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
430  // ----------- S t a t e -------------
431  // -- rsp[0] : return address.
432  // -- rsp[8] : last fast api call extra argument.
433  // -- ...
434  // -- rsp[kFastApiCallArguments * 8] : first fast api call extra argument.
435  // -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal
436  // frame.
437  // -----------------------------------
438  __ movq(scratch, Operand(rsp, 0));
439  __ movq(Operand(rsp, kFastApiCallArguments * kPointerSize), scratch);
440  __ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments));
441 }
442 
443 
444 // Generates call to API function.
445 static void GenerateFastApiCall(MacroAssembler* masm,
446  const CallOptimization& optimization,
447  int argc) {
448  // ----------- S t a t e -------------
449  // -- rsp[0] : return address
450  // -- rsp[8] : object passing the type check
451  // (last fast api call extra argument,
452  // set by CheckPrototypes)
453  // -- rsp[16] : api function
454  // (first fast api call extra argument)
455  // -- rsp[24] : api call data
456  // -- rsp[32] : isolate
457  // -- rsp[40] : last argument
458  // -- ...
459  // -- rsp[(argc + 4) * 8] : first argument
460  // -- rsp[(argc + 5) * 8] : receiver
461  // -----------------------------------
462  // Get the function and setup the context.
463  Handle<JSFunction> function = optimization.constant_function();
464  __ LoadHeapObject(rdi, function);
466 
467  // Pass the additional arguments.
468  __ movq(Operand(rsp, 2 * kPointerSize), rdi);
469  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
470  Handle<Object> call_data(api_call_info->data());
471  if (masm->isolate()->heap()->InNewSpace(*call_data)) {
472  __ Move(rcx, api_call_info);
474  __ movq(Operand(rsp, 3 * kPointerSize), rbx);
475  } else {
476  __ Move(Operand(rsp, 3 * kPointerSize), call_data);
477  }
478  __ movq(kScratchRegister, ExternalReference::isolate_address());
479  __ movq(Operand(rsp, 4 * kPointerSize), kScratchRegister);
480 
481  // Prepare arguments.
482  __ lea(rbx, Operand(rsp, 4 * kPointerSize));
483 
484 #if defined(__MINGW64__)
485  Register arguments_arg = rcx;
486 #elif defined(_WIN64)
487  // Win64 uses first register--rcx--for returned value.
488  Register arguments_arg = rdx;
489 #else
490  Register arguments_arg = rdi;
491 #endif
492 
493  // Allocate the v8::Arguments structure in the arguments' space since
494  // it's not controlled by GC.
495  const int kApiStackSpace = 4;
496 
497  __ PrepareCallApiFunction(kApiStackSpace);
498 
499  __ movq(StackSpaceOperand(0), rbx); // v8::Arguments::implicit_args_.
500  __ addq(rbx, Immediate(argc * kPointerSize));
501  __ movq(StackSpaceOperand(1), rbx); // v8::Arguments::values_.
502  __ Set(StackSpaceOperand(2), argc); // v8::Arguments::length_.
503  // v8::Arguments::is_construct_call_.
504  __ Set(StackSpaceOperand(3), 0);
505 
506  // v8::InvocationCallback's argument.
507  __ lea(arguments_arg, StackSpaceOperand(0));
508 
509  // Function address is a foreign pointer outside V8's heap.
510  Address function_address = v8::ToCData<Address>(api_call_info->callback());
511  __ CallApiFunctionAndReturn(function_address,
512  argc + kFastApiCallArguments + 1);
513 }
514 
515 
516 class CallInterceptorCompiler BASE_EMBEDDED {
517  public:
518  CallInterceptorCompiler(StubCompiler* stub_compiler,
519  const ParameterCount& arguments,
520  Register name,
521  Code::ExtraICState extra_ic_state)
522  : stub_compiler_(stub_compiler),
523  arguments_(arguments),
524  name_(name),
525  extra_ic_state_(extra_ic_state) {}
526 
527  void Compile(MacroAssembler* masm,
528  Handle<JSObject> object,
529  Handle<JSObject> holder,
530  Handle<String> name,
531  LookupResult* lookup,
532  Register receiver,
533  Register scratch1,
534  Register scratch2,
535  Register scratch3,
536  Label* miss) {
537  ASSERT(holder->HasNamedInterceptor());
538  ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
539 
540  // Check that the receiver isn't a smi.
541  __ JumpIfSmi(receiver, miss);
542 
543  CallOptimization optimization(lookup);
544  if (optimization.is_constant_call()) {
545  CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
546  holder, lookup, name, optimization, miss);
547  } else {
548  CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
549  name, holder, miss);
550  }
551  }
552 
553  private:
554  void CompileCacheable(MacroAssembler* masm,
555  Handle<JSObject> object,
556  Register receiver,
557  Register scratch1,
558  Register scratch2,
559  Register scratch3,
560  Handle<JSObject> interceptor_holder,
561  LookupResult* lookup,
562  Handle<String> name,
563  const CallOptimization& optimization,
564  Label* miss_label) {
565  ASSERT(optimization.is_constant_call());
566  ASSERT(!lookup->holder()->IsGlobalObject());
567 
568  int depth1 = kInvalidProtoDepth;
569  int depth2 = kInvalidProtoDepth;
570  bool can_do_fast_api_call = false;
571  if (optimization.is_simple_api_call() &&
572  !lookup->holder()->IsGlobalObject()) {
573  depth1 = optimization.GetPrototypeDepthOfExpectedType(
574  object, interceptor_holder);
575  if (depth1 == kInvalidProtoDepth) {
576  depth2 = optimization.GetPrototypeDepthOfExpectedType(
577  interceptor_holder, Handle<JSObject>(lookup->holder()));
578  }
579  can_do_fast_api_call =
580  depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
581  }
582 
583  Counters* counters = masm->isolate()->counters();
584  __ IncrementCounter(counters->call_const_interceptor(), 1);
585 
586  if (can_do_fast_api_call) {
587  __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1);
588  ReserveSpaceForFastApiCall(masm, scratch1);
589  }
590 
591  // Check that the maps from receiver to interceptor's holder
592  // haven't changed and thus we can invoke interceptor.
593  Label miss_cleanup;
594  Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
595  Register holder =
596  stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
597  scratch1, scratch2, scratch3,
598  name, depth1, miss);
599 
600  // Invoke an interceptor and if it provides a value,
601  // branch to |regular_invoke|.
602  Label regular_invoke;
603  LoadWithInterceptor(masm, receiver, holder, interceptor_holder,
604  &regular_invoke);
605 
606  // Interceptor returned nothing for this property. Try to use cached
607  // constant function.
608 
609  // Check that the maps from interceptor's holder to constant function's
610  // holder haven't changed and thus we can use cached constant function.
611  if (*interceptor_holder != lookup->holder()) {
612  stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
613  Handle<JSObject>(lookup->holder()),
614  scratch1, scratch2, scratch3,
615  name, depth2, miss);
616  } else {
617  // CheckPrototypes has a side effect of fetching a 'holder'
618  // for API (object which is instanceof for the signature). It's
619  // safe to omit it here, as if present, it should be fetched
620  // by the previous CheckPrototypes.
621  ASSERT(depth2 == kInvalidProtoDepth);
622  }
623 
624  // Invoke function.
625  if (can_do_fast_api_call) {
626  GenerateFastApiCall(masm, optimization, arguments_.immediate());
627  } else {
628  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
630  : CALL_AS_METHOD;
631  __ InvokeFunction(optimization.constant_function(), arguments_,
632  JUMP_FUNCTION, NullCallWrapper(), call_kind);
633  }
634 
635  // Deferred code for fast API call case---clean preallocated space.
636  if (can_do_fast_api_call) {
637  __ bind(&miss_cleanup);
638  FreeSpaceForFastApiCall(masm, scratch1);
639  __ jmp(miss_label);
640  }
641 
642  // Invoke a regular function.
643  __ bind(&regular_invoke);
644  if (can_do_fast_api_call) {
645  FreeSpaceForFastApiCall(masm, scratch1);
646  }
647  }
648 
649  void CompileRegular(MacroAssembler* masm,
650  Handle<JSObject> object,
651  Register receiver,
652  Register scratch1,
653  Register scratch2,
654  Register scratch3,
655  Handle<String> name,
656  Handle<JSObject> interceptor_holder,
657  Label* miss_label) {
658  Register holder =
659  stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
660  scratch1, scratch2, scratch3,
661  name, miss_label);
662 
663  FrameScope scope(masm, StackFrame::INTERNAL);
664  // Save the name_ register across the call.
665  __ push(name_);
666 
667  PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder);
668 
669  __ CallExternalReference(
670  ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
671  masm->isolate()),
672  6);
673 
674  // Restore the name_ register.
675  __ pop(name_);
676 
677  // Leave the internal frame.
678  }
679 
680  void LoadWithInterceptor(MacroAssembler* masm,
681  Register receiver,
682  Register holder,
683  Handle<JSObject> holder_obj,
684  Label* interceptor_succeeded) {
685  {
686  FrameScope scope(masm, StackFrame::INTERNAL);
687  __ push(holder); // Save the holder.
688  __ push(name_); // Save the name.
689 
690  CompileCallLoadPropertyWithInterceptor(masm,
691  receiver,
692  holder,
693  name_,
694  holder_obj);
695 
696  __ pop(name_); // Restore the name.
697  __ pop(receiver); // Restore the holder.
698  // Leave the internal frame.
699  }
700 
701  __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
702  __ j(not_equal, interceptor_succeeded);
703  }
704 
705  StubCompiler* stub_compiler_;
706  const ParameterCount& arguments_;
707  Register name_;
708  Code::ExtraICState extra_ic_state_;
709 };
710 
711 
712 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
713  ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
714  Handle<Code> code = (kind == Code::LOAD_IC)
715  ? masm->isolate()->builtins()->LoadIC_Miss()
716  : masm->isolate()->builtins()->KeyedLoadIC_Miss();
717  __ Jump(code, RelocInfo::CODE_TARGET);
718 }
719 
720 
721 void StubCompiler::GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm) {
722  Handle<Code> code =
723  masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
724  __ Jump(code, RelocInfo::CODE_TARGET);
725 }
726 
727 
728 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
729 // but may be destroyed if store is successful.
730 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
731  Handle<JSObject> object,
732  int index,
733  Handle<Map> transition,
734  Handle<String> name,
735  Register receiver_reg,
736  Register name_reg,
737  Register scratch1,
738  Register scratch2,
739  Label* miss_label) {
740  LookupResult lookup(masm->isolate());
741  object->Lookup(*name, &lookup);
742  if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
743  // In sloppy mode, we could just return the value and be done. However, we
744  // might be in strict mode, where we have to throw. Since we cannot tell,
745  // go into slow case unconditionally.
746  __ jmp(miss_label);
747  return;
748  }
749 
750  // Check that the map of the object hasn't changed.
751  CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS
753  __ CheckMap(receiver_reg, Handle<Map>(object->map()),
754  miss_label, DO_SMI_CHECK, mode);
755 
756  // Perform global security token check if needed.
757  if (object->IsJSGlobalProxy()) {
758  __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
759  }
760 
761  // Check that we are allowed to write this.
762  if (!transition.is_null() && object->GetPrototype()->IsJSObject()) {
763  JSObject* holder;
764  if (lookup.IsFound()) {
765  holder = lookup.holder();
766  } else {
767  // Find the top object.
768  holder = *object;
769  do {
770  holder = JSObject::cast(holder->GetPrototype());
771  } while (holder->GetPrototype()->IsJSObject());
772  }
773  // We need an extra register, push
774  __ push(name_reg);
775  Label miss_pop, done_check;
776  CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg,
777  scratch1, scratch2, name, &miss_pop);
778  __ jmp(&done_check);
779  __ bind(&miss_pop);
780  __ pop(name_reg);
781  __ jmp(miss_label);
782  __ bind(&done_check);
783  __ pop(name_reg);
784  }
785 
786  // Stub never generated for non-global objects that require access
787  // checks.
788  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
789 
790  // Perform map transition for the receiver if necessary.
791  if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) {
792  // The properties must be extended before we can store the value.
793  // We jump to a runtime call that extends the properties array.
794  __ pop(scratch1); // Return address.
795  __ push(receiver_reg);
796  __ Push(transition);
797  __ push(rax);
798  __ push(scratch1);
799  __ TailCallExternalReference(
800  ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
801  masm->isolate()),
802  3,
803  1);
804  return;
805  }
806 
807  if (!transition.is_null()) {
808  // Update the map of the object.
809  __ Move(scratch1, transition);
810  __ movq(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
811 
812  // Update the write barrier for the map field and pass the now unused
813  // name_reg as scratch register.
814  __ RecordWriteField(receiver_reg,
816  scratch1,
817  name_reg,
821  }
822 
823  // Adjust for the number of properties stored in the object. Even in the
824  // face of a transition we can use the old map here because the size of the
825  // object and the number of in-object properties is not going to change.
826  index -= object->map()->inobject_properties();
827 
828  if (index < 0) {
829  // Set the property straight into the object.
830  int offset = object->map()->instance_size() + (index * kPointerSize);
831  __ movq(FieldOperand(receiver_reg, offset), rax);
832 
833  // Update the write barrier for the array address.
834  // Pass the value being stored in the now unused name_reg.
835  __ movq(name_reg, rax);
836  __ RecordWriteField(
837  receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs);
838  } else {
839  // Write to the properties array.
840  int offset = index * kPointerSize + FixedArray::kHeaderSize;
841  // Get the properties array (optimistically).
842  __ movq(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
843  __ movq(FieldOperand(scratch1, offset), rax);
844 
845  // Update the write barrier for the array address.
846  // Pass the value being stored in the now unused name_reg.
847  __ movq(name_reg, rax);
848  __ RecordWriteField(
849  scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs);
850  }
851 
852  // Return the value (register rax).
853  __ ret(0);
854 }
855 
856 
857 // Generate code to check that a global property cell is empty. Create
858 // the property cell at compilation time if no cell exists for the
859 // property.
860 static void GenerateCheckPropertyCell(MacroAssembler* masm,
861  Handle<GlobalObject> global,
862  Handle<String> name,
863  Register scratch,
864  Label* miss) {
865  Handle<JSGlobalPropertyCell> cell =
866  GlobalObject::EnsurePropertyCell(global, name);
867  ASSERT(cell->value()->IsTheHole());
868  __ Move(scratch, cell);
870  masm->isolate()->factory()->the_hole_value());
871  __ j(not_equal, miss);
872 }
873 
874 
875 // Calls GenerateCheckPropertyCell for each global object in the prototype chain
876 // from object to (but not including) holder.
877 static void GenerateCheckPropertyCells(MacroAssembler* masm,
878  Handle<JSObject> object,
879  Handle<JSObject> holder,
880  Handle<String> name,
881  Register scratch,
882  Label* miss) {
883  Handle<JSObject> current = object;
884  while (!current.is_identical_to(holder)) {
885  if (current->IsGlobalObject()) {
886  GenerateCheckPropertyCell(masm,
887  Handle<GlobalObject>::cast(current),
888  name,
889  scratch,
890  miss);
891  }
892  current = Handle<JSObject>(JSObject::cast(current->GetPrototype()));
893  }
894 }
895 
896 #undef __
897 #define __ ACCESS_MASM((masm()))
898 
899 
900 Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
901  Register object_reg,
902  Handle<JSObject> holder,
903  Register holder_reg,
904  Register scratch1,
905  Register scratch2,
906  Handle<String> name,
907  int save_at_depth,
908  Label* miss) {
909  // Make sure there's no overlap between holder and object registers.
910  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
911  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
912  && !scratch2.is(scratch1));
913 
914  // Keep track of the current object in register reg. On the first
915  // iteration, reg is an alias for object_reg, on later iterations,
916  // it is an alias for holder_reg.
917  Register reg = object_reg;
918  int depth = 0;
919 
920  if (save_at_depth == depth) {
921  __ movq(Operand(rsp, kPointerSize), object_reg);
922  }
923 
924  // Check the maps in the prototype chain.
925  // Traverse the prototype chain from the object and do map checks.
926  Handle<JSObject> current = object;
927  while (!current.is_identical_to(holder)) {
928  ++depth;
929 
930  // Only global objects and objects that do not require access
931  // checks are allowed in stubs.
932  ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
933 
934  Handle<JSObject> prototype(JSObject::cast(current->GetPrototype()));
935  if (!current->HasFastProperties() &&
936  !current->IsJSGlobalObject() &&
937  !current->IsJSGlobalProxy()) {
938  if (!name->IsSymbol()) {
939  name = factory()->LookupSymbol(name);
940  }
941  ASSERT(current->property_dictionary()->FindEntry(*name) ==
943 
944  GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
945  scratch1, scratch2);
946 
947  __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
948  reg = holder_reg; // From now on the object will be in holder_reg.
949  __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
950  } else {
951  bool in_new_space = heap()->InNewSpace(*prototype);
952  Handle<Map> current_map(current->map());
953  if (in_new_space) {
954  // Save the map in scratch1 for later.
955  __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
956  }
957  __ CheckMap(reg, Handle<Map>(current_map),
959 
960  // Check access rights to the global object. This has to happen after
961  // the map check so that we know that the object is actually a global
962  // object.
963  if (current->IsJSGlobalProxy()) {
964  __ CheckAccessGlobalProxy(reg, scratch2, miss);
965  }
966  reg = holder_reg; // From now on the object will be in holder_reg.
967 
968  if (in_new_space) {
969  // The prototype is in new space; we cannot store a reference to it
970  // in the code. Load it from the map.
971  __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
972  } else {
973  // The prototype is in old space; load it directly.
974  __ Move(reg, prototype);
975  }
976  }
977 
978  if (save_at_depth == depth) {
979  __ movq(Operand(rsp, kPointerSize), reg);
980  }
981 
982  // Go to the next object in the prototype chain.
983  current = prototype;
984  }
985  ASSERT(current.is_identical_to(holder));
986 
987  // Log the check depth.
988  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
989 
990  // Check the holder map.
991  __ CheckMap(reg, Handle<Map>(holder->map()),
993 
994  // Perform security check for access to the global object.
995  ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
996  if (current->IsJSGlobalProxy()) {
997  __ CheckAccessGlobalProxy(reg, scratch1, miss);
998  }
999 
1000  // If we've skipped any global objects, it's not enough to verify that
1001  // their maps haven't changed. We also need to check that the property
1002  // cell for the property is still empty.
1003  GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
1004 
1005  // Return the register containing the holder.
1006  return reg;
1007 }
1008 
1009 
1010 void StubCompiler::GenerateLoadField(Handle<JSObject> object,
1011  Handle<JSObject> holder,
1012  Register receiver,
1013  Register scratch1,
1014  Register scratch2,
1015  Register scratch3,
1016  int index,
1017  Handle<String> name,
1018  Label* miss) {
1019  // Check that the receiver isn't a smi.
1020  __ JumpIfSmi(receiver, miss);
1021 
1022  // Check the prototype chain.
1023  Register reg = CheckPrototypes(
1024  object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1025 
1026  // Get the value from the properties.
1027  GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
1028  __ ret(0);
1029 }
1030 
1031 
1032 void StubCompiler::GenerateDictionaryLoadCallback(Register receiver,
1033  Register name_reg,
1034  Register scratch1,
1035  Register scratch2,
1036  Register scratch3,
1037  Handle<AccessorInfo> callback,
1038  Handle<String> name,
1039  Label* miss) {
1040  ASSERT(!receiver.is(scratch1));
1041  ASSERT(!receiver.is(scratch2));
1042  ASSERT(!receiver.is(scratch3));
1043 
1044  // Load the properties dictionary.
1045  Register dictionary = scratch1;
1046  __ movq(dictionary, FieldOperand(receiver, JSObject::kPropertiesOffset));
1047 
1048  // Probe the dictionary.
1049  Label probe_done;
1051  miss,
1052  &probe_done,
1053  dictionary,
1054  name_reg,
1055  scratch2,
1056  scratch3);
1057  __ bind(&probe_done);
1058 
1059  // If probing finds an entry in the dictionary, scratch3 contains the
1060  // index into the dictionary. Check that the value is the callback.
1061  Register index = scratch3;
1062  const int kElementsStartOffset =
1065  const int kValueOffset = kElementsStartOffset + kPointerSize;
1066  __ movq(scratch2,
1067  Operand(dictionary, index, times_pointer_size,
1068  kValueOffset - kHeapObjectTag));
1069  __ movq(scratch3, callback, RelocInfo::EMBEDDED_OBJECT);
1070  __ cmpq(scratch2, scratch3);
1071  __ j(not_equal, miss);
1072 }
1073 
1074 
1075 void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
1076  Handle<JSObject> holder,
1077  Register receiver,
1078  Register name_reg,
1079  Register scratch1,
1080  Register scratch2,
1081  Register scratch3,
1082  Register scratch4,
1083  Handle<AccessorInfo> callback,
1084  Handle<String> name,
1085  Label* miss) {
1086  // Check that the receiver isn't a smi.
1087  __ JumpIfSmi(receiver, miss);
1088 
1089  // Check that the maps haven't changed.
1090  Register reg = CheckPrototypes(object, receiver, holder, scratch1,
1091  scratch2, scratch3, name, miss);
1092 
1093  if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
1094  GenerateDictionaryLoadCallback(
1095  reg, name_reg, scratch2, scratch3, scratch4, callback, name, miss);
1096  }
1097 
1098  // Insert additional parameters into the stack frame above return address.
1099  ASSERT(!scratch2.is(reg));
1100  __ pop(scratch2); // Get return address to place it below.
1101 
1102  __ push(receiver); // receiver
1103  __ push(reg); // holder
1104  if (heap()->InNewSpace(callback->data())) {
1105  __ Move(scratch1, callback);
1106  __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); // data
1107  } else {
1108  __ Push(Handle<Object>(callback->data()));
1109  }
1110  __ PushAddress(ExternalReference::isolate_address()); // isolate
1111  __ push(name_reg); // name
1112  // Save a pointer to where we pushed the arguments pointer.
1113  // This will be passed as the const AccessorInfo& to the C++ callback.
1114 
1115 #if defined(__MINGW64__)
1116  Register accessor_info_arg = rdx;
1117  Register name_arg = rcx;
1118 #elif defined(_WIN64)
1119  // Win64 uses first register--rcx--for returned value.
1120  Register accessor_info_arg = r8;
1121  Register name_arg = rdx;
1122 #else
1123  Register accessor_info_arg = rsi;
1124  Register name_arg = rdi;
1125 #endif
1126 
1127  ASSERT(!name_arg.is(scratch2));
1128  __ movq(name_arg, rsp);
1129  __ push(scratch2); // Restore return address.
1130 
1131  // 4 elements array for v8::Arguments::values_ and handler for name.
1132  const int kStackSpace = 5;
1133 
1134  // Allocate v8::AccessorInfo in non-GCed stack space.
1135  const int kArgStackSpace = 1;
1136 
1137  __ PrepareCallApiFunction(kArgStackSpace);
1138  __ lea(rax, Operand(name_arg, 4 * kPointerSize));
1139 
1140  // v8::AccessorInfo::args_.
1141  __ movq(StackSpaceOperand(0), rax);
1142 
1143  // The context register (rsi) has been saved in PrepareCallApiFunction and
1144  // could be used to pass arguments.
1145  __ lea(accessor_info_arg, StackSpaceOperand(0));
1146 
1147  Address getter_address = v8::ToCData<Address>(callback->getter());
1148  __ CallApiFunctionAndReturn(getter_address, kStackSpace);
1149 }
1150 
1151 
1152 void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
1153  Handle<JSObject> holder,
1154  Register receiver,
1155  Register scratch1,
1156  Register scratch2,
1157  Register scratch3,
1158  Handle<JSFunction> value,
1159  Handle<String> name,
1160  Label* miss) {
1161  // Check that the receiver isn't a smi.
1162  __ JumpIfSmi(receiver, miss);
1163 
1164  // Check that the maps haven't changed.
1165  CheckPrototypes(
1166  object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1167 
1168  // Return the constant value.
1169  __ LoadHeapObject(rax, value);
1170  __ ret(0);
1171 }
1172 
1173 
1174 void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
1175  Handle<JSObject> interceptor_holder,
1176  LookupResult* lookup,
1177  Register receiver,
1178  Register name_reg,
1179  Register scratch1,
1180  Register scratch2,
1181  Register scratch3,
1182  Handle<String> name,
1183  Label* miss) {
1184  ASSERT(interceptor_holder->HasNamedInterceptor());
1185  ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1186 
1187  // Check that the receiver isn't a smi.
1188  __ JumpIfSmi(receiver, miss);
1189 
1190  // So far the most popular follow ups for interceptor loads are FIELD
1191  // and CALLBACKS, so inline only them, other cases may be added
1192  // later.
1193  bool compile_followup_inline = false;
1194  if (lookup->IsFound() && lookup->IsCacheable()) {
1195  if (lookup->IsField()) {
1196  compile_followup_inline = true;
1197  } else if (lookup->type() == CALLBACKS &&
1198  lookup->GetCallbackObject()->IsAccessorInfo()) {
1199  AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1200  compile_followup_inline = callback->getter() != NULL &&
1201  callback->IsCompatibleReceiver(*object);
1202  }
1203  }
1204 
1205  if (compile_followup_inline) {
1206  // Compile the interceptor call, followed by inline code to load the
1207  // property from further up the prototype chain if the call fails.
1208  // Check that the maps haven't changed.
1209  Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1210  scratch1, scratch2, scratch3,
1211  name, miss);
1212  ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1213 
1214  // Preserve the receiver register explicitly whenever it is different from
1215  // the holder and it is needed should the interceptor return without any
1216  // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1217  // the FIELD case might cause a miss during the prototype check.
1218  bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1219  bool must_preserve_receiver_reg = !receiver.is(holder_reg) &&
1220  (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1221 
1222  // Save necessary data before invoking an interceptor.
1223  // Requires a frame to make GC aware of pushed pointers.
1224  {
1225  FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1226 
1227  if (must_preserve_receiver_reg) {
1228  __ push(receiver);
1229  }
1230  __ push(holder_reg);
1231  __ push(name_reg);
1232 
1233  // Invoke an interceptor. Note: map checks from receiver to
1234  // interceptor's holder has been compiled before (see a caller
1235  // of this method.)
1236  CompileCallLoadPropertyWithInterceptor(masm(),
1237  receiver,
1238  holder_reg,
1239  name_reg,
1240  interceptor_holder);
1241 
1242  // Check if interceptor provided a value for property. If it's
1243  // the case, return immediately.
1244  Label interceptor_failed;
1245  __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
1246  __ j(equal, &interceptor_failed);
1247  frame_scope.GenerateLeaveFrame();
1248  __ ret(0);
1249 
1250  __ bind(&interceptor_failed);
1251  __ pop(name_reg);
1252  __ pop(holder_reg);
1253  if (must_preserve_receiver_reg) {
1254  __ pop(receiver);
1255  }
1256 
1257  // Leave the internal frame.
1258  }
1259 
1260  // Check that the maps from interceptor's holder to lookup's holder
1261  // haven't changed. And load lookup's holder into |holder| register.
1262  if (must_perfrom_prototype_check) {
1263  holder_reg = CheckPrototypes(interceptor_holder,
1264  holder_reg,
1265  Handle<JSObject>(lookup->holder()),
1266  scratch1,
1267  scratch2,
1268  scratch3,
1269  name,
1270  miss);
1271  }
1272 
1273  if (lookup->IsField()) {
1274  // We found FIELD property in prototype chain of interceptor's holder.
1275  // Retrieve a field from field's holder.
1276  GenerateFastPropertyLoad(masm(), rax, holder_reg,
1277  Handle<JSObject>(lookup->holder()),
1278  lookup->GetFieldIndex());
1279  __ ret(0);
1280  } else {
1281  // We found CALLBACKS property in prototype chain of interceptor's
1282  // holder.
1283  ASSERT(lookup->type() == CALLBACKS);
1284  Handle<AccessorInfo> callback(
1285  AccessorInfo::cast(lookup->GetCallbackObject()));
1286  ASSERT(callback->getter() != NULL);
1287 
1288  // Tail call to runtime.
1289  // Important invariant in CALLBACKS case: the code above must be
1290  // structured to never clobber |receiver| register.
1291  __ pop(scratch2); // return address
1292  __ push(receiver);
1293  __ push(holder_reg);
1294  __ Move(holder_reg, callback);
1295  __ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
1296  __ PushAddress(ExternalReference::isolate_address());
1297  __ push(holder_reg);
1298  __ push(name_reg);
1299  __ push(scratch2); // restore return address
1300 
1301  ExternalReference ref =
1302  ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1303  isolate());
1304  __ TailCallExternalReference(ref, 6, 1);
1305  }
1306  } else { // !compile_followup_inline
1307  // Call the runtime system to load the interceptor.
1308  // Check that the maps haven't changed.
1309  Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1310  scratch1, scratch2, scratch3,
1311  name, miss);
1312  __ pop(scratch2); // save old return address
1313  PushInterceptorArguments(masm(), receiver, holder_reg,
1314  name_reg, interceptor_holder);
1315  __ push(scratch2); // restore old return address
1316 
1317  ExternalReference ref = ExternalReference(
1318  IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
1319  __ TailCallExternalReference(ref, 6, 1);
1320  }
1321 }
1322 
1323 
1324 void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) {
1325  if (kind_ == Code::KEYED_CALL_IC) {
1326  __ Cmp(rcx, name);
1327  __ j(not_equal, miss);
1328  }
1329 }
1330 
1331 
1332 void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
1333  Handle<JSObject> holder,
1334  Handle<String> name,
1335  Label* miss) {
1336  ASSERT(holder->IsGlobalObject());
1337 
1338  // Get the number of arguments.
1339  const int argc = arguments().immediate();
1340 
1341  // Get the receiver from the stack.
1342  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1343 
1344 
1345  // Check that the maps haven't changed.
1346  __ JumpIfSmi(rdx, miss);
1347  CheckPrototypes(object, rdx, holder, rbx, rax, rdi, name, miss);
1348 }
1349 
1350 
1351 void CallStubCompiler::GenerateLoadFunctionFromCell(
1352  Handle<JSGlobalPropertyCell> cell,
1353  Handle<JSFunction> function,
1354  Label* miss) {
1355  // Get the value from the cell.
1356  __ Move(rdi, cell);
1358 
1359  // Check that the cell contains the same function.
1360  if (heap()->InNewSpace(*function)) {
1361  // We can't embed a pointer to a function in new space so we have
1362  // to verify that the shared function info is unchanged. This has
1363  // the nice side effect that multiple closures based on the same
1364  // function can all use this call IC. Before we load through the
1365  // function, we have to verify that it still is a function.
1366  __ JumpIfSmi(rdi, miss);
1367  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax);
1368  __ j(not_equal, miss);
1369 
1370  // Check the shared function info. Make sure it hasn't changed.
1371  __ Move(rax, Handle<SharedFunctionInfo>(function->shared()));
1373  } else {
1374  __ Cmp(rdi, function);
1375  }
1376  __ j(not_equal, miss);
1377 }
1378 
1379 
1380 void CallStubCompiler::GenerateMissBranch() {
1381  Handle<Code> code =
1382  isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1383  kind_,
1384  extra_state_);
1385  __ Jump(code, RelocInfo::CODE_TARGET);
1386 }
1387 
1388 
1389 Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
1390  Handle<JSObject> holder,
1391  int index,
1392  Handle<String> name) {
1393  // ----------- S t a t e -------------
1394  // rcx : function name
1395  // rsp[0] : return address
1396  // rsp[8] : argument argc
1397  // rsp[16] : argument argc - 1
1398  // ...
1399  // rsp[argc * 8] : argument 1
1400  // rsp[(argc + 1) * 8] : argument 0 = receiver
1401  // -----------------------------------
1402  Label miss;
1403 
1404  GenerateNameCheck(name, &miss);
1405 
1406  // Get the receiver from the stack.
1407  const int argc = arguments().immediate();
1408  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1409 
1410  // Check that the receiver isn't a smi.
1411  __ JumpIfSmi(rdx, &miss);
1412 
1413  // Do the right check and compute the holder register.
1414  Register reg = CheckPrototypes(object, rdx, holder, rbx, rax, rdi,
1415  name, &miss);
1416 
1417  GenerateFastPropertyLoad(masm(), rdi, reg, holder, index);
1418 
1419  // Check that the function really is a function.
1420  __ JumpIfSmi(rdi, &miss);
1421  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rbx);
1422  __ j(not_equal, &miss);
1423 
1424  // Patch the receiver on the stack with the global proxy if
1425  // necessary.
1426  if (object->IsGlobalObject()) {
1428  __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
1429  }
1430 
1431  // Invoke the function.
1432  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
1434  : CALL_AS_METHOD;
1435  __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION,
1436  NullCallWrapper(), call_kind);
1437 
1438  // Handle call cache miss.
1439  __ bind(&miss);
1440  GenerateMissBranch();
1441 
1442  // Return the generated code.
1443  return GetCode(Code::FIELD, name);
1444 }
1445 
1446 
1447 Handle<Code> CallStubCompiler::CompileArrayPushCall(
1448  Handle<Object> object,
1449  Handle<JSObject> holder,
1450  Handle<JSGlobalPropertyCell> cell,
1451  Handle<JSFunction> function,
1452  Handle<String> name) {
1453  // ----------- S t a t e -------------
1454  // -- rcx : name
1455  // -- rsp[0] : return address
1456  // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1457  // -- ...
1458  // -- rsp[(argc + 1) * 8] : receiver
1459  // -----------------------------------
1460 
1461  // If object is not an array, bail out to regular call.
1462  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1463 
1464  Label miss;
1465  GenerateNameCheck(name, &miss);
1466 
1467  // Get the receiver from the stack.
1468  const int argc = arguments().immediate();
1469  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1470 
1471  // Check that the receiver isn't a smi.
1472  __ JumpIfSmi(rdx, &miss);
1473 
1474  CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi,
1475  name, &miss);
1476 
1477  if (argc == 0) {
1478  // Noop, return the length.
1480  __ ret((argc + 1) * kPointerSize);
1481  } else {
1482  Label call_builtin;
1483 
1484  if (argc == 1) { // Otherwise fall through to call builtin.
1485  Label attempt_to_grow_elements, with_write_barrier;
1486 
1487  // Get the elements array of the object.
1489 
1490  // Check that the elements are in fast mode and writable.
1492  factory()->fixed_array_map());
1493  __ j(not_equal, &call_builtin);
1494 
1495  // Get the array's length into rax and calculate new length.
1496  __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset));
1498  __ addl(rax, Immediate(argc));
1499 
1500  // Get the elements' length into rcx.
1501  __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
1502 
1503  // Check if we could survive without allocation.
1504  __ cmpl(rax, rcx);
1505  __ j(greater, &attempt_to_grow_elements);
1506 
1507  // Check if value is a smi.
1508  __ movq(rcx, Operand(rsp, argc * kPointerSize));
1509  __ JumpIfNotSmi(rcx, &with_write_barrier);
1510 
1511  // Save new length.
1512  __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1513 
1514  // Store the value.
1515  __ movq(FieldOperand(rdi,
1516  rax,
1518  FixedArray::kHeaderSize - argc * kPointerSize),
1519  rcx);
1520 
1521  __ Integer32ToSmi(rax, rax); // Return new length as smi.
1522  __ ret((argc + 1) * kPointerSize);
1523 
1524  __ bind(&with_write_barrier);
1525 
1527 
1528  if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
1529  Label fast_object, not_fast_object;
1530  __ CheckFastObjectElements(rbx, &not_fast_object, Label::kNear);
1531  __ jmp(&fast_object);
1532  // In case of fast smi-only, convert to fast object, otherwise bail out.
1533  __ bind(&not_fast_object);
1534  __ CheckFastSmiElements(rbx, &call_builtin);
1535  // rdx: receiver
1536  // rbx: map
1537 
1538  Label try_holey_map;
1539  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1540  FAST_ELEMENTS,
1541  rbx,
1542  rdi,
1543  &try_holey_map);
1544 
1547  // Restore edi.
1549  __ jmp(&fast_object);
1550 
1551  __ bind(&try_holey_map);
1552  __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
1554  rbx,
1555  rdi,
1556  &call_builtin);
1560  __ bind(&fast_object);
1561  } else {
1562  __ CheckFastObjectElements(rbx, &call_builtin);
1563  }
1564 
1565  // Save new length.
1566  __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1567 
1568  // Store the value.
1569  __ lea(rdx, FieldOperand(rdi,
1571  FixedArray::kHeaderSize - argc * kPointerSize));
1572  __ movq(Operand(rdx, 0), rcx);
1573 
1574  __ RecordWrite(rdi, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1575  OMIT_SMI_CHECK);
1576 
1577  __ Integer32ToSmi(rax, rax); // Return new length as smi.
1578  __ ret((argc + 1) * kPointerSize);
1579 
1580  __ bind(&attempt_to_grow_elements);
1581  if (!FLAG_inline_new) {
1582  __ jmp(&call_builtin);
1583  }
1584 
1585  __ movq(rbx, Operand(rsp, argc * kPointerSize));
1586  // Growing elements that are SMI-only requires special handling in case
1587  // the new element is non-Smi. For now, delegate to the builtin.
1588  Label no_fast_elements_check;
1589  __ JumpIfSmi(rbx, &no_fast_elements_check);
1591  __ CheckFastObjectElements(rcx, &call_builtin, Label::kFar);
1592  __ bind(&no_fast_elements_check);
1593 
1594  ExternalReference new_space_allocation_top =
1595  ExternalReference::new_space_allocation_top_address(isolate());
1596  ExternalReference new_space_allocation_limit =
1597  ExternalReference::new_space_allocation_limit_address(isolate());
1598 
1599  const int kAllocationDelta = 4;
1600  // Load top.
1601  __ Load(rcx, new_space_allocation_top);
1602 
1603  // Check if it's the end of elements.
1604  __ lea(rdx, FieldOperand(rdi,
1606  FixedArray::kHeaderSize - argc * kPointerSize));
1607  __ cmpq(rdx, rcx);
1608  __ j(not_equal, &call_builtin);
1609  __ addq(rcx, Immediate(kAllocationDelta * kPointerSize));
1610  Operand limit_operand =
1611  masm()->ExternalOperand(new_space_allocation_limit);
1612  __ cmpq(rcx, limit_operand);
1613  __ j(above, &call_builtin);
1614 
1615  // We fit and could grow elements.
1616  __ Store(new_space_allocation_top, rcx);
1617 
1618  // Push the argument...
1619  __ movq(Operand(rdx, 0), rbx);
1620  // ... and fill the rest with holes.
1621  __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
1622  for (int i = 1; i < kAllocationDelta; i++) {
1623  __ movq(Operand(rdx, i * kPointerSize), kScratchRegister);
1624  }
1625 
1626  // We know the elements array is in new space so we don't need the
1627  // remembered set, but we just pushed a value onto it so we may have to
1628  // tell the incremental marker to rescan the object that we just grew. We
1629  // don't need to worry about the holes because they are in old space and
1630  // already marked black.
1631  __ RecordWrite(rdi, rdx, rbx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
1632 
1633  // Restore receiver to rdx as finish sequence assumes it's here.
1634  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1635 
1636  // Increment element's and array's sizes.
1637  __ SmiAddConstant(FieldOperand(rdi, FixedArray::kLengthOffset),
1638  Smi::FromInt(kAllocationDelta));
1639 
1640  // Make new length a smi before returning it.
1641  __ Integer32ToSmi(rax, rax);
1643 
1644  __ ret((argc + 1) * kPointerSize);
1645  }
1646 
1647  __ bind(&call_builtin);
1648  __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1649  isolate()),
1650  argc + 1,
1651  1);
1652  }
1653 
1654  __ bind(&miss);
1655  GenerateMissBranch();
1656 
1657  // Return the generated code.
1658  return GetCode(function);
1659 }
1660 
1661 
1662 Handle<Code> CallStubCompiler::CompileArrayPopCall(
1663  Handle<Object> object,
1664  Handle<JSObject> holder,
1665  Handle<JSGlobalPropertyCell> cell,
1666  Handle<JSFunction> function,
1667  Handle<String> name) {
1668  // ----------- S t a t e -------------
1669  // -- rcx : name
1670  // -- rsp[0] : return address
1671  // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1672  // -- ...
1673  // -- rsp[(argc + 1) * 8] : receiver
1674  // -----------------------------------
1675 
1676  // If object is not an array, bail out to regular call.
1677  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1678 
1679  Label miss, return_undefined, call_builtin;
1680  GenerateNameCheck(name, &miss);
1681 
1682  // Get the receiver from the stack.
1683  const int argc = arguments().immediate();
1684  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1685 
1686  // Check that the receiver isn't a smi.
1687  __ JumpIfSmi(rdx, &miss);
1688 
1689  CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi,
1690  name, &miss);
1691 
1692  // Get the elements array of the object.
1694 
1695  // Check that the elements are in fast mode and writable.
1696  __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1697  Heap::kFixedArrayMapRootIndex);
1698  __ j(not_equal, &call_builtin);
1699 
1700  // Get the array's length into rcx and calculate new length.
1701  __ SmiToInteger32(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
1702  __ subl(rcx, Immediate(1));
1703  __ j(negative, &return_undefined);
1704 
1705  // Get the last element.
1706  __ LoadRoot(r9, Heap::kTheHoleValueRootIndex);
1707  __ movq(rax, FieldOperand(rbx,
1709  FixedArray::kHeaderSize));
1710  // Check if element is already the hole.
1711  __ cmpq(rax, r9);
1712  // If so, call slow-case to also check prototypes for value.
1713  __ j(equal, &call_builtin);
1714 
1715  // Set the array's length.
1716  __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
1717 
1718  // Fill with the hole and return original value.
1719  __ movq(FieldOperand(rbx,
1721  FixedArray::kHeaderSize),
1722  r9);
1723  __ ret((argc + 1) * kPointerSize);
1724 
1725  __ bind(&return_undefined);
1726  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1727  __ ret((argc + 1) * kPointerSize);
1728 
1729  __ bind(&call_builtin);
1730  __ TailCallExternalReference(
1731  ExternalReference(Builtins::c_ArrayPop, isolate()),
1732  argc + 1,
1733  1);
1734 
1735  __ bind(&miss);
1736  GenerateMissBranch();
1737 
1738  // Return the generated code.
1739  return GetCode(function);
1740 }
1741 
1742 
1743 Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
1744  Handle<Object> object,
1745  Handle<JSObject> holder,
1746  Handle<JSGlobalPropertyCell> cell,
1747  Handle<JSFunction> function,
1748  Handle<String> name) {
1749  // ----------- S t a t e -------------
1750  // -- rcx : function name
1751  // -- rsp[0] : return address
1752  // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1753  // -- ...
1754  // -- rsp[(argc + 1) * 8] : receiver
1755  // -----------------------------------
1756 
1757  // If object is not a string, bail out to regular call.
1758  if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
1759 
1760  const int argc = arguments().immediate();
1761 
1762  Label miss;
1763  Label name_miss;
1764  Label index_out_of_range;
1765  Label* index_out_of_range_label = &index_out_of_range;
1766  if (kind_ == Code::CALL_IC &&
1767  (CallICBase::StringStubState::decode(extra_state_) ==
1769  index_out_of_range_label = &miss;
1770  }
1771  GenerateNameCheck(name, &name_miss);
1772 
1773  // Check that the maps starting from the prototype haven't changed.
1774  GenerateDirectLoadGlobalFunctionPrototype(masm(),
1776  rax,
1777  &miss);
1778  ASSERT(!object.is_identical_to(holder));
1779  CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())),
1780  rax, holder, rbx, rdx, rdi, name, &miss);
1781 
1782  Register receiver = rbx;
1783  Register index = rdi;
1784  Register result = rax;
1785  __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
1786  if (argc > 0) {
1787  __ movq(index, Operand(rsp, (argc - 0) * kPointerSize));
1788  } else {
1789  __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1790  }
1791 
1792  StringCharCodeAtGenerator generator(receiver,
1793  index,
1794  result,
1795  &miss, // When not a string.
1796  &miss, // When not a number.
1797  index_out_of_range_label,
1799  generator.GenerateFast(masm());
1800  __ ret((argc + 1) * kPointerSize);
1801 
1802  StubRuntimeCallHelper call_helper;
1803  generator.GenerateSlow(masm(), call_helper);
1804 
1805  if (index_out_of_range.is_linked()) {
1806  __ bind(&index_out_of_range);
1807  __ LoadRoot(rax, Heap::kNanValueRootIndex);
1808  __ ret((argc + 1) * kPointerSize);
1809  }
1810 
1811  __ bind(&miss);
1812  // Restore function name in rcx.
1813  __ Move(rcx, name);
1814  __ bind(&name_miss);
1815  GenerateMissBranch();
1816 
1817  // Return the generated code.
1818  return GetCode(function);
1819 }
1820 
1821 
1822 Handle<Code> CallStubCompiler::CompileStringCharAtCall(
1823  Handle<Object> object,
1824  Handle<JSObject> holder,
1825  Handle<JSGlobalPropertyCell> cell,
1826  Handle<JSFunction> function,
1827  Handle<String> name) {
1828  // ----------- S t a t e -------------
1829  // -- rcx : function name
1830  // -- rsp[0] : return address
1831  // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1832  // -- ...
1833  // -- rsp[(argc + 1) * 8] : receiver
1834  // -----------------------------------
1835 
1836  // If object is not a string, bail out to regular call.
1837  if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
1838 
1839  const int argc = arguments().immediate();
1840  Label miss;
1841  Label name_miss;
1842  Label index_out_of_range;
1843  Label* index_out_of_range_label = &index_out_of_range;
1844  if (kind_ == Code::CALL_IC &&
1845  (CallICBase::StringStubState::decode(extra_state_) ==
1847  index_out_of_range_label = &miss;
1848  }
1849  GenerateNameCheck(name, &name_miss);
1850 
1851  // Check that the maps starting from the prototype haven't changed.
1852  GenerateDirectLoadGlobalFunctionPrototype(masm(),
1854  rax,
1855  &miss);
1856  ASSERT(!object.is_identical_to(holder));
1857  CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())),
1858  rax, holder, rbx, rdx, rdi, name, &miss);
1859 
1860  Register receiver = rax;
1861  Register index = rdi;
1862  Register scratch = rdx;
1863  Register result = rax;
1864  __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
1865  if (argc > 0) {
1866  __ movq(index, Operand(rsp, (argc - 0) * kPointerSize));
1867  } else {
1868  __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1869  }
1870 
1871  StringCharAtGenerator generator(receiver,
1872  index,
1873  scratch,
1874  result,
1875  &miss, // When not a string.
1876  &miss, // When not a number.
1877  index_out_of_range_label,
1879  generator.GenerateFast(masm());
1880  __ ret((argc + 1) * kPointerSize);
1881 
1882  StubRuntimeCallHelper call_helper;
1883  generator.GenerateSlow(masm(), call_helper);
1884 
1885  if (index_out_of_range.is_linked()) {
1886  __ bind(&index_out_of_range);
1887  __ LoadRoot(rax, Heap::kEmptyStringRootIndex);
1888  __ ret((argc + 1) * kPointerSize);
1889  }
1890  __ bind(&miss);
1891  // Restore function name in rcx.
1892  __ Move(rcx, name);
1893  __ bind(&name_miss);
1894  GenerateMissBranch();
1895 
1896  // Return the generated code.
1897  return GetCode(function);
1898 }
1899 
1900 
1901 Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
1902  Handle<Object> object,
1903  Handle<JSObject> holder,
1904  Handle<JSGlobalPropertyCell> cell,
1905  Handle<JSFunction> function,
1906  Handle<String> name) {
1907  // ----------- S t a t e -------------
1908  // -- rcx : function name
1909  // -- rsp[0] : return address
1910  // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1911  // -- ...
1912  // -- rsp[(argc + 1) * 8] : receiver
1913  // -----------------------------------
1914 
1915  // If the object is not a JSObject or we got an unexpected number of
1916  // arguments, bail out to the regular call.
1917  const int argc = arguments().immediate();
1918  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
1919 
1920  Label miss;
1921  GenerateNameCheck(name, &miss);
1922 
1923  if (cell.is_null()) {
1924  __ movq(rdx, Operand(rsp, 2 * kPointerSize));
1925  __ JumpIfSmi(rdx, &miss);
1926  CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi,
1927  name, &miss);
1928  } else {
1929  ASSERT(cell->value() == *function);
1930  GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
1931  &miss);
1932  GenerateLoadFunctionFromCell(cell, function, &miss);
1933  }
1934 
1935  // Load the char code argument.
1936  Register code = rbx;
1937  __ movq(code, Operand(rsp, 1 * kPointerSize));
1938 
1939  // Check the code is a smi.
1940  Label slow;
1941  __ JumpIfNotSmi(code, &slow);
1942 
1943  // Convert the smi code to uint16.
1944  __ SmiAndConstant(code, code, Smi::FromInt(0xffff));
1945 
1946  StringCharFromCodeGenerator generator(code, rax);
1947  generator.GenerateFast(masm());
1948  __ ret(2 * kPointerSize);
1949 
1950  StubRuntimeCallHelper call_helper;
1951  generator.GenerateSlow(masm(), call_helper);
1952 
1953  // Tail call the full function. We do not have to patch the receiver
1954  // because the function makes no use of it.
1955  __ bind(&slow);
1956  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
1958  : CALL_AS_METHOD;
1959  __ InvokeFunction(function, arguments(), JUMP_FUNCTION,
1960  NullCallWrapper(), call_kind);
1961 
1962  __ bind(&miss);
1963  // rcx: function name.
1964  GenerateMissBranch();
1965 
1966  // Return the generated code.
1967  return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
1968 }
1969 
1970 
1971 Handle<Code> CallStubCompiler::CompileMathFloorCall(
1972  Handle<Object> object,
1973  Handle<JSObject> holder,
1974  Handle<JSGlobalPropertyCell> cell,
1975  Handle<JSFunction> function,
1976  Handle<String> name) {
1977  // TODO(872): implement this.
1978  return Handle<Code>::null();
1979 }
1980 
1981 
1982 Handle<Code> CallStubCompiler::CompileMathAbsCall(
1983  Handle<Object> object,
1984  Handle<JSObject> holder,
1985  Handle<JSGlobalPropertyCell> cell,
1986  Handle<JSFunction> function,
1987  Handle<String> name) {
1988  // ----------- S t a t e -------------
1989  // -- rcx : function name
1990  // -- rsp[0] : return address
1991  // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1992  // -- ...
1993  // -- rsp[(argc + 1) * 8] : receiver
1994  // -----------------------------------
1995 
1996  // If the object is not a JSObject or we got an unexpected number of
1997  // arguments, bail out to the regular call.
1998  const int argc = arguments().immediate();
1999  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2000 
2001  Label miss;
2002  GenerateNameCheck(name, &miss);
2003 
2004  if (cell.is_null()) {
2005  __ movq(rdx, Operand(rsp, 2 * kPointerSize));
2006  __ JumpIfSmi(rdx, &miss);
2007  CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi,
2008  name, &miss);
2009  } else {
2010  ASSERT(cell->value() == *function);
2011  GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2012  &miss);
2013  GenerateLoadFunctionFromCell(cell, function, &miss);
2014  }
2015  // Load the (only) argument into rax.
2016  __ movq(rax, Operand(rsp, 1 * kPointerSize));
2017 
2018  // Check if the argument is a smi.
2019  Label not_smi;
2020  STATIC_ASSERT(kSmiTag == 0);
2021  __ JumpIfNotSmi(rax, &not_smi);
2022  __ SmiToInteger32(rax, rax);
2023 
2024  // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0
2025  // otherwise.
2026  __ movl(rbx, rax);
2027  __ sarl(rbx, Immediate(kBitsPerInt - 1));
2028 
2029  // Do bitwise not or do nothing depending on ebx.
2030  __ xorl(rax, rbx);
2031 
2032  // Add 1 or do nothing depending on ebx.
2033  __ subl(rax, rbx);
2034 
2035  // If the result is still negative, go to the slow case.
2036  // This only happens for the most negative smi.
2037  Label slow;
2038  __ j(negative, &slow);
2039 
2040  // Smi case done.
2041  __ Integer32ToSmi(rax, rax);
2042  __ ret(2 * kPointerSize);
2043 
2044  // Check if the argument is a heap number and load its value.
2045  __ bind(&not_smi);
2046  __ CheckMap(rax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK);
2048 
2049  // Check the sign of the argument. If the argument is positive,
2050  // just return it.
2051  Label negative_sign;
2052  const int sign_mask_shift =
2054  __ movq(rdi, static_cast<int64_t>(HeapNumber::kSignMask) << sign_mask_shift,
2055  RelocInfo::NONE);
2056  __ testq(rbx, rdi);
2057  __ j(not_zero, &negative_sign);
2058  __ ret(2 * kPointerSize);
2059 
2060  // If the argument is negative, clear the sign, and return a new
2061  // number. We still have the sign mask in rdi.
2062  __ bind(&negative_sign);
2063  __ xor_(rbx, rdi);
2064  __ AllocateHeapNumber(rax, rdx, &slow);
2066  __ ret(2 * kPointerSize);
2067 
2068  // Tail call the full function. We do not have to patch the receiver
2069  // because the function makes no use of it.
2070  __ bind(&slow);
2071  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2073  : CALL_AS_METHOD;
2074  __ InvokeFunction(function, arguments(), JUMP_FUNCTION,
2075  NullCallWrapper(), call_kind);
2076 
2077  __ bind(&miss);
2078  // rcx: function name.
2079  GenerateMissBranch();
2080 
2081  // Return the generated code.
2082  return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
2083 }
2084 
2085 
2086 Handle<Code> CallStubCompiler::CompileFastApiCall(
2087  const CallOptimization& optimization,
2088  Handle<Object> object,
2089  Handle<JSObject> holder,
2090  Handle<JSGlobalPropertyCell> cell,
2091  Handle<JSFunction> function,
2092  Handle<String> name) {
2093  ASSERT(optimization.is_simple_api_call());
2094  // Bail out if object is a global object as we don't want to
2095  // repatch it to global receiver.
2096  if (object->IsGlobalObject()) return Handle<Code>::null();
2097  if (!cell.is_null()) return Handle<Code>::null();
2098  if (!object->IsJSObject()) return Handle<Code>::null();
2099  int depth = optimization.GetPrototypeDepthOfExpectedType(
2100  Handle<JSObject>::cast(object), holder);
2101  if (depth == kInvalidProtoDepth) return Handle<Code>::null();
2102 
2103  Label miss, miss_before_stack_reserved;
2104  GenerateNameCheck(name, &miss_before_stack_reserved);
2105 
2106  // Get the receiver from the stack.
2107  const int argc = arguments().immediate();
2108  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2109 
2110  // Check that the receiver isn't a smi.
2111  __ JumpIfSmi(rdx, &miss_before_stack_reserved);
2112 
2113  Counters* counters = isolate()->counters();
2114  __ IncrementCounter(counters->call_const(), 1);
2115  __ IncrementCounter(counters->call_const_fast_api(), 1);
2116 
2117  // Allocate space for v8::Arguments implicit values. Must be initialized
2118  // before calling any runtime function.
2119  __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2120 
2121  // Check that the maps haven't changed and find a Holder as a side effect.
2122  CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi,
2123  name, depth, &miss);
2124 
2125  // Move the return address on top of the stack.
2126  __ movq(rax, Operand(rsp, 4 * kPointerSize));
2127  __ movq(Operand(rsp, 0 * kPointerSize), rax);
2128 
2129  GenerateFastApiCall(masm(), optimization, argc);
2130 
2131  __ bind(&miss);
2132  __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2133 
2134  __ bind(&miss_before_stack_reserved);
2135  GenerateMissBranch();
2136 
2137  // Return the generated code.
2138  return GetCode(function);
2139 }
2140 
2141 
2142 Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
2143  Handle<JSObject> holder,
2144  Handle<JSFunction> function,
2145  Handle<String> name,
2146  CheckType check) {
2147  // ----------- S t a t e -------------
2148  // rcx : function name
2149  // rsp[0] : return address
2150  // rsp[8] : argument argc
2151  // rsp[16] : argument argc - 1
2152  // ...
2153  // rsp[argc * 8] : argument 1
2154  // rsp[(argc + 1) * 8] : argument 0 = receiver
2155  // -----------------------------------
2156 
2157  if (HasCustomCallGenerator(function)) {
2158  Handle<Code> code = CompileCustomCall(object, holder,
2159  Handle<JSGlobalPropertyCell>::null(),
2160  function, name);
2161  // A null handle means bail out to the regular compiler code below.
2162  if (!code.is_null()) return code;
2163  }
2164 
2165  Label miss;
2166  GenerateNameCheck(name, &miss);
2167 
2168  // Get the receiver from the stack.
2169  const int argc = arguments().immediate();
2170  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2171 
2172  // Check that the receiver isn't a smi.
2173  if (check != NUMBER_CHECK) {
2174  __ JumpIfSmi(rdx, &miss);
2175  }
2176 
2177  // Make sure that it's okay not to patch the on stack receiver
2178  // unless we're doing a receiver map check.
2179  ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2180 
2181  Counters* counters = isolate()->counters();
2182  switch (check) {
2183  case RECEIVER_MAP_CHECK:
2184  __ IncrementCounter(counters->call_const(), 1);
2185 
2186  // Check that the maps haven't changed.
2187  CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax,
2188  rdi, name, &miss);
2189 
2190  // Patch the receiver on the stack with the global proxy if
2191  // necessary.
2192  if (object->IsGlobalObject()) {
2194  __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2195  }
2196  break;
2197 
2198  case STRING_CHECK:
2199  if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2200  // Check that the object is a two-byte string or a symbol.
2201  __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax);
2202  __ j(above_equal, &miss);
2203  // Check that the maps starting from the prototype haven't changed.
2204  GenerateDirectLoadGlobalFunctionPrototype(
2205  masm(), Context::STRING_FUNCTION_INDEX, rax, &miss);
2206  CheckPrototypes(
2207  Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2208  rax, holder, rbx, rdx, rdi, name, &miss);
2209  } else {
2210  // Calling non-strict non-builtins with a value as the receiver
2211  // requires boxing.
2212  __ jmp(&miss);
2213  }
2214  break;
2215 
2216  case NUMBER_CHECK:
2217  if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2218  Label fast;
2219  // Check that the object is a smi or a heap number.
2220  __ JumpIfSmi(rdx, &fast);
2221  __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rax);
2222  __ j(not_equal, &miss);
2223  __ bind(&fast);
2224  // Check that the maps starting from the prototype haven't changed.
2225  GenerateDirectLoadGlobalFunctionPrototype(
2226  masm(), Context::NUMBER_FUNCTION_INDEX, rax, &miss);
2227  CheckPrototypes(
2228  Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2229  rax, holder, rbx, rdx, rdi, name, &miss);
2230  } else {
2231  // Calling non-strict non-builtins with a value as the receiver
2232  // requires boxing.
2233  __ jmp(&miss);
2234  }
2235  break;
2236 
2237  case BOOLEAN_CHECK:
2238  if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2239  Label fast;
2240  // Check that the object is a boolean.
2241  __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
2242  __ j(equal, &fast);
2243  __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
2244  __ j(not_equal, &miss);
2245  __ bind(&fast);
2246  // Check that the maps starting from the prototype haven't changed.
2247  GenerateDirectLoadGlobalFunctionPrototype(
2248  masm(), Context::BOOLEAN_FUNCTION_INDEX, rax, &miss);
2249  CheckPrototypes(
2250  Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2251  rax, holder, rbx, rdx, rdi, name, &miss);
2252  } else {
2253  // Calling non-strict non-builtins with a value as the receiver
2254  // requires boxing.
2255  __ jmp(&miss);
2256  }
2257  break;
2258  }
2259 
2260  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2262  : CALL_AS_METHOD;
2263  __ InvokeFunction(function, arguments(), JUMP_FUNCTION,
2264  NullCallWrapper(), call_kind);
2265 
2266  // Handle call cache miss.
2267  __ bind(&miss);
2268  GenerateMissBranch();
2269 
2270  // Return the generated code.
2271  return GetCode(function);
2272 }
2273 
2274 
2275 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
2276  Handle<JSObject> holder,
2277  Handle<String> name) {
2278  // ----------- S t a t e -------------
2279  // rcx : function name
2280  // rsp[0] : return address
2281  // rsp[8] : argument argc
2282  // rsp[16] : argument argc - 1
2283  // ...
2284  // rsp[argc * 8] : argument 1
2285  // rsp[(argc + 1) * 8] : argument 0 = receiver
2286  // -----------------------------------
2287  Label miss;
2288  GenerateNameCheck(name, &miss);
2289 
2290  // Get the number of arguments.
2291  const int argc = arguments().immediate();
2292 
2293  LookupResult lookup(isolate());
2294  LookupPostInterceptor(holder, name, &lookup);
2295 
2296  // Get the receiver from the stack.
2297  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2298 
2299  CallInterceptorCompiler compiler(this, arguments(), rcx, extra_state_);
2300  compiler.Compile(masm(), object, holder, name, &lookup, rdx, rbx, rdi, rax,
2301  &miss);
2302 
2303  // Restore receiver.
2304  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2305 
2306  // Check that the function really is a function.
2307  __ JumpIfSmi(rax, &miss);
2308  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2309  __ j(not_equal, &miss);
2310 
2311  // Patch the receiver on the stack with the global proxy if
2312  // necessary.
2313  if (object->IsGlobalObject()) {
2315  __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2316  }
2317 
2318  // Invoke the function.
2319  __ movq(rdi, rax);
2320  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2322  : CALL_AS_METHOD;
2323  __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION,
2324  NullCallWrapper(), call_kind);
2325 
2326  // Handle load cache miss.
2327  __ bind(&miss);
2328  GenerateMissBranch();
2329 
2330  // Return the generated code.
2331  return GetCode(Code::INTERCEPTOR, name);
2332 }
2333 
2334 
2336  Handle<JSObject> object,
2337  Handle<GlobalObject> holder,
2338  Handle<JSGlobalPropertyCell> cell,
2339  Handle<JSFunction> function,
2340  Handle<String> name) {
2341  // ----------- S t a t e -------------
2342  // rcx : function name
2343  // rsp[0] : return address
2344  // rsp[8] : argument argc
2345  // rsp[16] : argument argc - 1
2346  // ...
2347  // rsp[argc * 8] : argument 1
2348  // rsp[(argc + 1) * 8] : argument 0 = receiver
2349  // -----------------------------------
2350 
2351  if (HasCustomCallGenerator(function)) {
2352  Handle<Code> code = CompileCustomCall(object, holder, cell, function, name);
2353  // A null handle means bail out to the regular compiler code below.
2354  if (!code.is_null()) return code;
2355  }
2356 
2357  Label miss;
2358  GenerateNameCheck(name, &miss);
2359 
2360  // Get the number of arguments.
2361  const int argc = arguments().immediate();
2362  GenerateGlobalReceiverCheck(object, holder, name, &miss);
2363  GenerateLoadFunctionFromCell(cell, function, &miss);
2364 
2365  // Patch the receiver on the stack with the global proxy.
2366  if (object->IsGlobalObject()) {
2368  __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2369  }
2370 
2371  // Set up the context (function already in rdi).
2373 
2374  // Jump to the cached code (tail call).
2375  Counters* counters = isolate()->counters();
2376  __ IncrementCounter(counters->call_global_inline(), 1);
2377  ParameterCount expected(function->shared()->formal_parameter_count());
2378  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2380  : CALL_AS_METHOD;
2381  // We call indirectly through the code field in the function to
2382  // allow recompilation to take effect without changing any of the
2383  // call sites.
2384  __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2385  __ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION,
2386  NullCallWrapper(), call_kind);
2387 
2388  // Handle call cache miss.
2389  __ bind(&miss);
2390  __ IncrementCounter(counters->call_global_inline_miss(), 1);
2391  GenerateMissBranch();
2392 
2393  // Return the generated code.
2394  return GetCode(Code::NORMAL, name);
2395 }
2396 
2397 
2398 Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
2399  int index,
2400  Handle<Map> transition,
2401  Handle<String> name) {
2402  // ----------- S t a t e -------------
2403  // -- rax : value
2404  // -- rcx : name
2405  // -- rdx : receiver
2406  // -- rsp[0] : return address
2407  // -----------------------------------
2408  Label miss;
2409 
2410  // Generate store field code. Preserves receiver and name on jump to miss.
2411  GenerateStoreField(masm(),
2412  object,
2413  index,
2414  transition,
2415  name,
2416  rdx, rcx, rbx, rdi,
2417  &miss);
2418 
2419  // Handle store cache miss.
2420  __ bind(&miss);
2421  Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2422  __ Jump(ic, RelocInfo::CODE_TARGET);
2423 
2424  // Return the generated code.
2425  return GetCode(transition.is_null()
2426  ? Code::FIELD
2427  : Code::MAP_TRANSITION, name);
2428 }
2429 
2430 
2432  Handle<String> name,
2433  Handle<JSObject> receiver,
2434  Handle<JSObject> holder,
2435  Handle<AccessorInfo> callback) {
2436  // ----------- S t a t e -------------
2437  // -- rax : value
2438  // -- rcx : name
2439  // -- rdx : receiver
2440  // -- rsp[0] : return address
2441  // -----------------------------------
2442  Label miss;
2443  // Check that the maps haven't changed.
2444  __ JumpIfSmi(rdx, &miss);
2445  CheckPrototypes(receiver, rdx, holder, rbx, r8, rdi, name, &miss);
2446 
2447  // Stub never generated for non-global objects that require access checks.
2448  ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
2449 
2450  __ pop(rbx); // remove the return address
2451  __ push(rdx); // receiver
2452  __ Push(callback); // callback info
2453  __ push(rcx); // name
2454  __ push(rax); // value
2455  __ push(rbx); // restore return address
2456 
2457  // Do tail-call to the runtime system.
2458  ExternalReference store_callback_property =
2459  ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
2460  __ TailCallExternalReference(store_callback_property, 4, 1);
2461 
2462  // Handle store cache miss.
2463  __ bind(&miss);
2464  Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2465  __ Jump(ic, RelocInfo::CODE_TARGET);
2466 
2467  // Return the generated code.
2468  return GetCode(Code::CALLBACKS, name);
2469 }
2470 
2471 
2472 #undef __
2473 #define __ ACCESS_MASM(masm)
2474 
2475 
2477  MacroAssembler* masm,
2478  Handle<JSFunction> setter) {
2479  // ----------- S t a t e -------------
2480  // -- rax : value
2481  // -- rcx : name
2482  // -- rdx : receiver
2483  // -- rsp[0] : return address
2484  // -----------------------------------
2485  {
2486  FrameScope scope(masm, StackFrame::INTERNAL);
2487 
2488  // Save value register, so we can restore it later.
2489  __ push(rax);
2490 
2491  if (!setter.is_null()) {
2492  // Call the JavaScript setter with receiver and value on the stack.
2493  __ push(rdx);
2494  __ push(rax);
2495  ParameterCount actual(1);
2496  __ InvokeFunction(setter, actual, CALL_FUNCTION, NullCallWrapper(),
2497  CALL_AS_METHOD);
2498  } else {
2499  // If we generate a global code snippet for deoptimization only, remember
2500  // the place to continue after deoptimization.
2501  masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
2502  }
2503 
2504  // We have to return the passed value, not the return value of the setter.
2505  __ pop(rax);
2506 
2507  // Restore context register.
2509  }
2510  __ ret(0);
2511 }
2512 
2513 
2514 #undef __
2515 #define __ ACCESS_MASM(masm())
2516 
2517 
2519  Handle<String> name,
2520  Handle<JSObject> receiver,
2521  Handle<JSObject> holder,
2522  Handle<JSFunction> setter) {
2523  // ----------- S t a t e -------------
2524  // -- rax : value
2525  // -- rcx : name
2526  // -- rdx : receiver
2527  // -- rsp[0] : return address
2528  // -----------------------------------
2529  Label miss;
2530 
2531  // Check that the maps haven't changed.
2532  __ JumpIfSmi(rdx, &miss);
2533  CheckPrototypes(receiver, rdx, holder, rbx, r8, rdi, name, &miss);
2534 
2535  GenerateStoreViaSetter(masm(), setter);
2536 
2537  __ bind(&miss);
2538  Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2539  __ Jump(ic, RelocInfo::CODE_TARGET);
2540 
2541  // Return the generated code.
2542  return GetCode(Code::CALLBACKS, name);
2543 }
2544 
2545 
2547  Handle<JSObject> receiver,
2548  Handle<String> name) {
2549  // ----------- S t a t e -------------
2550  // -- rax : value
2551  // -- rcx : name
2552  // -- rdx : receiver
2553  // -- rsp[0] : return address
2554  // -----------------------------------
2555  Label miss;
2556 
2557  // Check that the map of the object hasn't changed.
2558  __ CheckMap(rdx, Handle<Map>(receiver->map()), &miss,
2560 
2561  // Perform global security token check if needed.
2562  if (receiver->IsJSGlobalProxy()) {
2563  __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2564  }
2565 
2566  // Stub never generated for non-global objects that require access
2567  // checks.
2568  ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2569 
2570  __ pop(rbx); // remove the return address
2571  __ push(rdx); // receiver
2572  __ push(rcx); // name
2573  __ push(rax); // value
2574  __ Push(Smi::FromInt(strict_mode_));
2575  __ push(rbx); // restore return address
2576 
2577  // Do tail-call to the runtime system.
2578  ExternalReference store_ic_property =
2579  ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
2580  __ TailCallExternalReference(store_ic_property, 4, 1);
2581 
2582  // Handle store cache miss.
2583  __ bind(&miss);
2584  Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2585  __ Jump(ic, RelocInfo::CODE_TARGET);
2586 
2587  // Return the generated code.
2588  return GetCode(Code::INTERCEPTOR, name);
2589 }
2590 
2591 
2593  Handle<GlobalObject> object,
2594  Handle<JSGlobalPropertyCell> cell,
2595  Handle<String> name) {
2596  // ----------- S t a t e -------------
2597  // -- rax : value
2598  // -- rcx : name
2599  // -- rdx : receiver
2600  // -- rsp[0] : return address
2601  // -----------------------------------
2602  Label miss;
2603 
2604  // Check that the map of the global has not changed.
2606  Handle<Map>(object->map()));
2607  __ j(not_equal, &miss);
2608 
2609  // Compute the cell operand to use.
2610  __ Move(rbx, cell);
2611  Operand cell_operand = FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset);
2612 
2613  // Check that the value in the cell is not the hole. If it is, this
2614  // cell could have been deleted and reintroducing the global needs
2615  // to update the property details in the property dictionary of the
2616  // global object. We bail out to the runtime system to do that.
2617  __ CompareRoot(cell_operand, Heap::kTheHoleValueRootIndex);
2618  __ j(equal, &miss);
2619 
2620  // Store the value in the cell.
2621  __ movq(cell_operand, rax);
2622  // Cells are always rescanned, so no write barrier here.
2623 
2624  // Return the value (register rax).
2625  Counters* counters = isolate()->counters();
2626  __ IncrementCounter(counters->named_store_global_inline(), 1);
2627  __ ret(0);
2628 
2629  // Handle store cache miss.
2630  __ bind(&miss);
2631  __ IncrementCounter(counters->named_store_global_inline_miss(), 1);
2632  Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2633  __ Jump(ic, RelocInfo::CODE_TARGET);
2634 
2635  // Return the generated code.
2636  return GetCode(Code::NORMAL, name);
2637 }
2638 
2639 
2640 Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
2641  int index,
2642  Handle<Map> transition,
2643  Handle<String> name) {
2644  // ----------- S t a t e -------------
2645  // -- rax : value
2646  // -- rcx : key
2647  // -- rdx : receiver
2648  // -- rsp[0] : return address
2649  // -----------------------------------
2650  Label miss;
2651 
2652  Counters* counters = isolate()->counters();
2653  __ IncrementCounter(counters->keyed_store_field(), 1);
2654 
2655  // Check that the name has not changed.
2656  __ Cmp(rcx, name);
2657  __ j(not_equal, &miss);
2658 
2659  // Generate store field code. Preserves receiver and name on jump to miss.
2660  GenerateStoreField(masm(),
2661  object,
2662  index,
2663  transition,
2664  name,
2665  rdx, rcx, rbx, rdi,
2666  &miss);
2667 
2668  // Handle store cache miss.
2669  __ bind(&miss);
2670  __ DecrementCounter(counters->keyed_store_field(), 1);
2671  Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2672  __ Jump(ic, RelocInfo::CODE_TARGET);
2673 
2674  // Return the generated code.
2675  return GetCode(transition.is_null()
2676  ? Code::FIELD
2677  : Code::MAP_TRANSITION, name);
2678 }
2679 
2680 
2682  Handle<Map> receiver_map) {
2683  // ----------- S t a t e -------------
2684  // -- rax : value
2685  // -- rcx : key
2686  // -- rdx : receiver
2687  // -- rsp[0] : return address
2688  // -----------------------------------
2689 
2690  ElementsKind elements_kind = receiver_map->elements_kind();
2691  bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
2692  Handle<Code> stub =
2693  KeyedStoreElementStub(is_js_array, elements_kind, grow_mode_).GetCode();
2694 
2695  __ DispatchMap(rdx, receiver_map, stub, DO_SMI_CHECK);
2696 
2697  Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2698  __ jmp(ic, RelocInfo::CODE_TARGET);
2699 
2700  // Return the generated code.
2701  return GetCode(Code::NORMAL, factory()->empty_string());
2702 }
2703 
2704 
2706  MapHandleList* receiver_maps,
2707  CodeHandleList* handler_stubs,
2708  MapHandleList* transitioned_maps) {
2709  // ----------- S t a t e -------------
2710  // -- rax : value
2711  // -- rcx : key
2712  // -- rdx : receiver
2713  // -- rsp[0] : return address
2714  // -----------------------------------
2715  Label miss;
2716  __ JumpIfSmi(rdx, &miss, Label::kNear);
2717 
2718  __ movq(rdi, FieldOperand(rdx, HeapObject::kMapOffset));
2719  int receiver_count = receiver_maps->length();
2720  for (int i = 0; i < receiver_count; ++i) {
2721  // Check map and tail call if there's a match
2722  __ Cmp(rdi, receiver_maps->at(i));
2723  if (transitioned_maps->at(i).is_null()) {
2724  __ j(equal, handler_stubs->at(i), RelocInfo::CODE_TARGET);
2725  } else {
2726  Label next_map;
2727  __ j(not_equal, &next_map, Label::kNear);
2728  __ movq(rbx, transitioned_maps->at(i), RelocInfo::EMBEDDED_OBJECT);
2729  __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
2730  __ bind(&next_map);
2731  }
2732  }
2733 
2734  __ bind(&miss);
2735  Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2736  __ jmp(ic, RelocInfo::CODE_TARGET);
2737 
2738  // Return the generated code.
2739  return GetCode(Code::NORMAL, factory()->empty_string(), MEGAMORPHIC);
2740 }
2741 
2742 
2743 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
2744  Handle<JSObject> object,
2745  Handle<JSObject> last) {
2746  // ----------- S t a t e -------------
2747  // -- rax : receiver
2748  // -- rcx : name
2749  // -- rsp[0] : return address
2750  // -----------------------------------
2751  Label miss;
2752 
2753  // Check that receiver is not a smi.
2754  __ JumpIfSmi(rax, &miss);
2755 
2756  // Check the maps of the full prototype chain. Also check that
2757  // global property cells up to (but not including) the last object
2758  // in the prototype chain are empty.
2759  CheckPrototypes(object, rax, last, rbx, rdx, rdi, name, &miss);
2760 
2761  // If the last object in the prototype chain is a global object,
2762  // check that the global property cell is empty.
2763  if (last->IsGlobalObject()) {
2764  GenerateCheckPropertyCell(
2765  masm(), Handle<GlobalObject>::cast(last), name, rdx, &miss);
2766  }
2767 
2768  // Return undefined if maps of the full prototype chain are still the
2769  // same and no global property with this name contains a value.
2770  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2771  __ ret(0);
2772 
2773  __ bind(&miss);
2774  GenerateLoadMiss(masm(), Code::LOAD_IC);
2775 
2776  // Return the generated code.
2777  return GetCode(Code::NONEXISTENT, factory()->empty_string());
2778 }
2779 
2780 
2781 Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
2782  Handle<JSObject> holder,
2783  int index,
2784  Handle<String> name) {
2785  // ----------- S t a t e -------------
2786  // -- rax : receiver
2787  // -- rcx : name
2788  // -- rsp[0] : return address
2789  // -----------------------------------
2790  Label miss;
2791 
2792  GenerateLoadField(object, holder, rax, rbx, rdx, rdi, index, name, &miss);
2793  __ bind(&miss);
2794  GenerateLoadMiss(masm(), Code::LOAD_IC);
2795 
2796  // Return the generated code.
2797  return GetCode(Code::FIELD, name);
2798 }
2799 
2800 
2802  Handle<String> name,
2803  Handle<JSObject> object,
2804  Handle<JSObject> holder,
2805  Handle<AccessorInfo> callback) {
2806  // ----------- S t a t e -------------
2807  // -- rax : receiver
2808  // -- rcx : name
2809  // -- rsp[0] : return address
2810  // -----------------------------------
2811  Label miss;
2812  GenerateLoadCallback(object, holder, rax, rcx, rdx, rbx, rdi, r8, callback,
2813  name, &miss);
2814  __ bind(&miss);
2815  GenerateLoadMiss(masm(), Code::LOAD_IC);
2816 
2817  // Return the generated code.
2818  return GetCode(Code::CALLBACKS, name);
2819 }
2820 
2821 
2822 #undef __
2823 #define __ ACCESS_MASM(masm)
2824 
2825 
2826 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
2827  Handle<JSFunction> getter) {
2828  // ----------- S t a t e -------------
2829  // -- rax : receiver
2830  // -- rcx : name
2831  // -- rsp[0] : return address
2832  // -----------------------------------
2833  {
2834  FrameScope scope(masm, StackFrame::INTERNAL);
2835 
2836  if (!getter.is_null()) {
2837  // Call the JavaScript getter with the receiver on the stack.
2838  __ push(rax);
2839  ParameterCount actual(0);
2840  __ InvokeFunction(getter, actual, CALL_FUNCTION, NullCallWrapper(),
2841  CALL_AS_METHOD);
2842  } else {
2843  // If we generate a global code snippet for deoptimization only, remember
2844  // the place to continue after deoptimization.
2845  masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
2846  }
2847 
2848  // Restore context register.
2850  }
2851  __ ret(0);
2852 }
2853 
2854 
2855 #undef __
2856 #define __ ACCESS_MASM(masm())
2857 
2858 
2860  Handle<String> name,
2861  Handle<JSObject> receiver,
2862  Handle<JSObject> holder,
2863  Handle<JSFunction> getter) {
2864  // ----------- S t a t e -------------
2865  // -- rax : receiver
2866  // -- rcx : name
2867  // -- rsp[0] : return address
2868  // -----------------------------------
2869  Label miss;
2870 
2871  // Check that the maps haven't changed.
2872  __ JumpIfSmi(rax, &miss);
2873  CheckPrototypes(receiver, rax, holder, rbx, rdx, rdi, name, &miss);
2874 
2875  GenerateLoadViaGetter(masm(), getter),
2876 
2877  __ bind(&miss);
2878  GenerateLoadMiss(masm(), Code::LOAD_IC);
2879 
2880  // Return the generated code.
2881  return GetCode(Code::CALLBACKS, name);
2882 }
2883 
2884 
2885 Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
2886  Handle<JSObject> holder,
2887  Handle<JSFunction> value,
2888  Handle<String> name) {
2889  // ----------- S t a t e -------------
2890  // -- rax : receiver
2891  // -- rcx : name
2892  // -- rsp[0] : return address
2893  // -----------------------------------
2894  Label miss;
2895 
2896  GenerateLoadConstant(object, holder, rax, rbx, rdx, rdi, value, name, &miss);
2897  __ bind(&miss);
2898  GenerateLoadMiss(masm(), Code::LOAD_IC);
2899 
2900  // Return the generated code.
2901  return GetCode(Code::CONSTANT_FUNCTION, name);
2902 }
2903 
2904 
2905 Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> receiver,
2906  Handle<JSObject> holder,
2907  Handle<String> name) {
2908  // ----------- S t a t e -------------
2909  // -- rax : receiver
2910  // -- rcx : name
2911  // -- rsp[0] : return address
2912  // -----------------------------------
2913  Label miss;
2914  LookupResult lookup(isolate());
2915  LookupPostInterceptor(holder, name, &lookup);
2916 
2917  // TODO(368): Compile in the whole chain: all the interceptors in
2918  // prototypes and ultimate answer.
2919  GenerateLoadInterceptor(receiver, holder, &lookup, rax, rcx, rdx, rbx, rdi,
2920  name, &miss);
2921  __ bind(&miss);
2922  GenerateLoadMiss(masm(), Code::LOAD_IC);
2923 
2924  // Return the generated code.
2925  return GetCode(Code::INTERCEPTOR, name);
2926 }
2927 
2928 
2930  Handle<JSObject> object,
2931  Handle<GlobalObject> holder,
2932  Handle<JSGlobalPropertyCell> cell,
2933  Handle<String> name,
2934  bool is_dont_delete) {
2935  // ----------- S t a t e -------------
2936  // -- rax : receiver
2937  // -- rcx : name
2938  // -- rsp[0] : return address
2939  // -----------------------------------
2940  Label miss;
2941 
2942  // Check that the maps haven't changed.
2943  __ JumpIfSmi(rax, &miss);
2944  CheckPrototypes(object, rax, holder, rbx, rdx, rdi, name, &miss);
2945 
2946  // Get the value from the cell.
2947  __ Move(rbx, cell);
2949 
2950  // Check for deleted property if property can actually be deleted.
2951  if (!is_dont_delete) {
2952  __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2953  __ j(equal, &miss);
2954  } else if (FLAG_debug_code) {
2955  __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2956  __ Check(not_equal, "DontDelete cells can't contain the hole");
2957  }
2958 
2959  Counters* counters = isolate()->counters();
2960  __ IncrementCounter(counters->named_load_global_stub(), 1);
2961  __ movq(rax, rbx);
2962  __ ret(0);
2963 
2964  __ bind(&miss);
2965  __ IncrementCounter(counters->named_load_global_stub_miss(), 1);
2966  GenerateLoadMiss(masm(), Code::LOAD_IC);
2967 
2968  // Return the generated code.
2969  return GetCode(Code::NORMAL, name);
2970 }
2971 
2972 
2973 Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
2974  Handle<JSObject> receiver,
2975  Handle<JSObject> holder,
2976  int index) {
2977  // ----------- S t a t e -------------
2978  // -- rax : key
2979  // -- rdx : receiver
2980  // -- rsp[0] : return address
2981  // -----------------------------------
2982  Label miss;
2983 
2984  Counters* counters = isolate()->counters();
2985  __ IncrementCounter(counters->keyed_load_field(), 1);
2986 
2987  // Check that the name has not changed.
2988  __ Cmp(rax, name);
2989  __ j(not_equal, &miss);
2990 
2991  GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss);
2992 
2993  __ bind(&miss);
2994  __ DecrementCounter(counters->keyed_load_field(), 1);
2995  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2996 
2997  // Return the generated code.
2998  return GetCode(Code::FIELD, name);
2999 }
3000 
3001 
3003  Handle<String> name,
3004  Handle<JSObject> receiver,
3005  Handle<JSObject> holder,
3006  Handle<AccessorInfo> callback) {
3007  // ----------- S t a t e -------------
3008  // -- rax : key
3009  // -- rdx : receiver
3010  // -- rsp[0] : return address
3011  // -----------------------------------
3012  Label miss;
3013  Counters* counters = isolate()->counters();
3014  __ IncrementCounter(counters->keyed_load_callback(), 1);
3015 
3016  // Check that the name has not changed.
3017  __ Cmp(rax, name);
3018  __ j(not_equal, &miss);
3019 
3020  GenerateLoadCallback(receiver, holder, rdx, rax, rbx, rcx, rdi, r8, callback,
3021  name, &miss);
3022  __ bind(&miss);
3023  __ DecrementCounter(counters->keyed_load_callback(), 1);
3024  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3025 
3026  // Return the generated code.
3027  return GetCode(Code::CALLBACKS, name);
3028 }
3029 
3030 
3032  Handle<String> name,
3033  Handle<JSObject> receiver,
3034  Handle<JSObject> holder,
3035  Handle<JSFunction> value) {
3036  // ----------- S t a t e -------------
3037  // -- rax : key
3038  // -- rdx : receiver
3039  // -- rsp[0] : return address
3040  // -----------------------------------
3041  Label miss;
3042 
3043  Counters* counters = isolate()->counters();
3044  __ IncrementCounter(counters->keyed_load_constant_function(), 1);
3045 
3046  // Check that the name has not changed.
3047  __ Cmp(rax, name);
3048  __ j(not_equal, &miss);
3049 
3050  GenerateLoadConstant(receiver, holder, rdx, rbx, rcx, rdi,
3051  value, name, &miss);
3052  __ bind(&miss);
3053  __ DecrementCounter(counters->keyed_load_constant_function(), 1);
3054  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3055 
3056  // Return the generated code.
3057  return GetCode(Code::CONSTANT_FUNCTION, name);
3058 }
3059 
3060 
3062  Handle<JSObject> receiver,
3063  Handle<JSObject> holder,
3064  Handle<String> name) {
3065  // ----------- S t a t e -------------
3066  // -- rax : key
3067  // -- rdx : receiver
3068  // -- rsp[0] : return address
3069  // -----------------------------------
3070  Label miss;
3071  Counters* counters = isolate()->counters();
3072  __ IncrementCounter(counters->keyed_load_interceptor(), 1);
3073 
3074  // Check that the name has not changed.
3075  __ Cmp(rax, name);
3076  __ j(not_equal, &miss);
3077 
3078  LookupResult lookup(isolate());
3079  LookupPostInterceptor(holder, name, &lookup);
3080  GenerateLoadInterceptor(receiver, holder, &lookup, rdx, rax, rcx, rbx, rdi,
3081  name, &miss);
3082  __ bind(&miss);
3083  __ DecrementCounter(counters->keyed_load_interceptor(), 1);
3084  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3085 
3086  // Return the generated code.
3087  return GetCode(Code::INTERCEPTOR, name);
3088 }
3089 
3090 
3092  Handle<String> name) {
3093  // ----------- S t a t e -------------
3094  // -- rax : key
3095  // -- rdx : receiver
3096  // -- rsp[0] : return address
3097  // -----------------------------------
3098  Label miss;
3099 
3100  Counters* counters = isolate()->counters();
3101  __ IncrementCounter(counters->keyed_load_array_length(), 1);
3102 
3103  // Check that the name has not changed.
3104  __ Cmp(rax, name);
3105  __ j(not_equal, &miss);
3106 
3107  GenerateLoadArrayLength(masm(), rdx, rcx, &miss);
3108  __ bind(&miss);
3109  __ DecrementCounter(counters->keyed_load_array_length(), 1);
3110  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3111 
3112  // Return the generated code.
3113  return GetCode(Code::CALLBACKS, name);
3114 }
3115 
3116 
3118  Handle<String> name) {
3119  // ----------- S t a t e -------------
3120  // -- rax : key
3121  // -- rdx : receiver
3122  // -- rsp[0] : return address
3123  // -----------------------------------
3124  Label miss;
3125 
3126  Counters* counters = isolate()->counters();
3127  __ IncrementCounter(counters->keyed_load_string_length(), 1);
3128 
3129  // Check that the name has not changed.
3130  __ Cmp(rax, name);
3131  __ j(not_equal, &miss);
3132 
3133  GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss, true);
3134  __ bind(&miss);
3135  __ DecrementCounter(counters->keyed_load_string_length(), 1);
3136  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3137 
3138  // Return the generated code.
3139  return GetCode(Code::CALLBACKS, name);
3140 }
3141 
3142 
3144  Handle<String> name) {
3145  // ----------- S t a t e -------------
3146  // -- rax : key
3147  // -- rdx : receiver
3148  // -- rsp[0] : return address
3149  // -----------------------------------
3150  Label miss;
3151 
3152  Counters* counters = isolate()->counters();
3153  __ IncrementCounter(counters->keyed_load_function_prototype(), 1);
3154 
3155  // Check that the name has not changed.
3156  __ Cmp(rax, name);
3157  __ j(not_equal, &miss);
3158 
3159  GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss);
3160  __ bind(&miss);
3161  __ DecrementCounter(counters->keyed_load_function_prototype(), 1);
3162  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3163 
3164  // Return the generated code.
3165  return GetCode(Code::CALLBACKS, name);
3166 }
3167 
3168 
3170  Handle<Map> receiver_map) {
3171  // ----------- S t a t e -------------
3172  // -- rax : key
3173  // -- rdx : receiver
3174  // -- rsp[0] : return address
3175  // -----------------------------------
3176  ElementsKind elements_kind = receiver_map->elements_kind();
3177  Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode();
3178 
3179  __ DispatchMap(rdx, receiver_map, stub, DO_SMI_CHECK);
3180 
3181  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3182  __ jmp(ic, RelocInfo::CODE_TARGET);
3183 
3184  // Return the generated code.
3185  return GetCode(Code::NORMAL, factory()->empty_string());
3186 }
3187 
3188 
3190  MapHandleList* receiver_maps,
3191  CodeHandleList* handler_ics) {
3192  // ----------- S t a t e -------------
3193  // -- rax : key
3194  // -- rdx : receiver
3195  // -- rsp[0] : return address
3196  // -----------------------------------
3197  Label miss;
3198  __ JumpIfSmi(rdx, &miss);
3199 
3200  Register map_reg = rbx;
3201  __ movq(map_reg, FieldOperand(rdx, HeapObject::kMapOffset));
3202  int receiver_count = receiver_maps->length();
3203  for (int current = 0; current < receiver_count; ++current) {
3204  // Check map and tail call if there's a match
3205  __ Cmp(map_reg, receiver_maps->at(current));
3206  __ j(equal, handler_ics->at(current), RelocInfo::CODE_TARGET);
3207  }
3208 
3209  __ bind(&miss);
3210  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3211 
3212  // Return the generated code.
3213  return GetCode(Code::NORMAL, factory()->empty_string(), MEGAMORPHIC);
3214 }
3215 
3216 
3217 // Specialized stub for constructing objects from functions which only have only
3218 // simple assignments of the form this.x = ...; in their body.
3220  Handle<JSFunction> function) {
3221  // ----------- S t a t e -------------
3222  // -- rax : argc
3223  // -- rdi : constructor
3224  // -- rsp[0] : return address
3225  // -- rsp[4] : last argument
3226  // -----------------------------------
3227  Label generic_stub_call;
3228 
3229  // Use r8 for holding undefined which is used in several places below.
3230  __ Move(r8, factory()->undefined_value());
3231 
3232 #ifdef ENABLE_DEBUGGER_SUPPORT
3233  // Check to see whether there are any break points in the function code. If
3234  // there are jump to the generic constructor stub which calls the actual
3235  // code for the function thereby hitting the break points.
3238  __ cmpq(rbx, r8);
3239  __ j(not_equal, &generic_stub_call);
3240 #endif
3241 
3242  // Load the initial map and verify that it is in fact a map.
3244  // Will both indicate a NULL and a Smi.
3245  STATIC_ASSERT(kSmiTag == 0);
3246  __ JumpIfSmi(rbx, &generic_stub_call);
3247  __ CmpObjectType(rbx, MAP_TYPE, rcx);
3248  __ j(not_equal, &generic_stub_call);
3249 
3250 #ifdef DEBUG
3251  // Cannot construct functions this way.
3252  // rdi: constructor
3253  // rbx: initial map
3254  __ CmpInstanceType(rbx, JS_FUNCTION_TYPE);
3255  __ Assert(not_equal, "Function constructed by construct stub.");
3256 #endif
3257 
3258  // Now allocate the JSObject in new space.
3259  // rdi: constructor
3260  // rbx: initial map
3261  __ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
3262  __ shl(rcx, Immediate(kPointerSizeLog2));
3263  __ AllocateInNewSpace(rcx, rdx, rcx, no_reg,
3264  &generic_stub_call, NO_ALLOCATION_FLAGS);
3265 
3266  // Allocated the JSObject, now initialize the fields and add the heap tag.
3267  // rbx: initial map
3268  // rdx: JSObject (untagged)
3269  __ movq(Operand(rdx, JSObject::kMapOffset), rbx);
3270  __ Move(rbx, factory()->empty_fixed_array());
3271  __ movq(Operand(rdx, JSObject::kPropertiesOffset), rbx);
3272  __ movq(Operand(rdx, JSObject::kElementsOffset), rbx);
3273 
3274  // rax: argc
3275  // rdx: JSObject (untagged)
3276  // Load the address of the first in-object property into r9.
3277  __ lea(r9, Operand(rdx, JSObject::kHeaderSize));
3278  // Calculate the location of the first argument. The stack contains only the
3279  // return address on top of the argc arguments.
3280  __ lea(rcx, Operand(rsp, rax, times_pointer_size, 0));
3281 
3282  // rax: argc
3283  // rcx: first argument
3284  // rdx: JSObject (untagged)
3285  // r8: undefined
3286  // r9: first in-object property of the JSObject
3287  // Fill the initialized properties with a constant value or a passed argument
3288  // depending on the this.x = ...; assignment in the function.
3289  Handle<SharedFunctionInfo> shared(function->shared());
3290  for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3291  if (shared->IsThisPropertyAssignmentArgument(i)) {
3292  // Check if the argument assigned to the property is actually passed.
3293  // If argument is not passed the property is set to undefined,
3294  // otherwise find it on the stack.
3295  int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3296  __ movq(rbx, r8);
3297  __ cmpq(rax, Immediate(arg_number));
3298  __ cmovq(above, rbx, Operand(rcx, arg_number * -kPointerSize));
3299  // Store value in the property.
3300  __ movq(Operand(r9, i * kPointerSize), rbx);
3301  } else {
3302  // Set the property to the constant value.
3303  Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3304  __ Move(Operand(r9, i * kPointerSize), constant);
3305  }
3306  }
3307 
3308  // Fill the unused in-object property fields with undefined.
3309  ASSERT(function->has_initial_map());
3310  for (int i = shared->this_property_assignments_count();
3311  i < function->initial_map()->inobject_properties();
3312  i++) {
3313  __ movq(Operand(r9, i * kPointerSize), r8);
3314  }
3315 
3316  // rax: argc
3317  // rdx: JSObject (untagged)
3318  // Move argc to rbx and the JSObject to return to rax and tag it.
3319  __ movq(rbx, rax);
3320  __ movq(rax, rdx);
3321  __ or_(rax, Immediate(kHeapObjectTag));
3322 
3323  // rax: JSObject
3324  // rbx: argc
3325  // Remove caller arguments and receiver from the stack and return.
3326  __ pop(rcx);
3327  __ lea(rsp, Operand(rsp, rbx, times_pointer_size, 1 * kPointerSize));
3328  __ push(rcx);
3329  Counters* counters = isolate()->counters();
3330  __ IncrementCounter(counters->constructed_objects(), 1);
3331  __ IncrementCounter(counters->constructed_objects_stub(), 1);
3332  __ ret(0);
3333 
3334  // Jump to the generic stub in case the specialized code cannot handle the
3335  // construction.
3336  __ bind(&generic_stub_call);
3337  Handle<Code> code = isolate()->builtins()->JSConstructStubGeneric();
3338  __ Jump(code, RelocInfo::CODE_TARGET);
3339 
3340  // Return the generated code.
3341  return GetCode();
3342 }
3343 
3344 
3345 #undef __
3346 #define __ ACCESS_MASM(masm)
3347 
3348 
3350  MacroAssembler* masm) {
3351  // ----------- S t a t e -------------
3352  // -- rax : key
3353  // -- rdx : receiver
3354  // -- rsp[0] : return address
3355  // -----------------------------------
3356  Label slow, miss_force_generic;
3357 
3358  // This stub is meant to be tail-jumped to, the receiver must already
3359  // have been verified by the caller to not be a smi.
3360 
3361  __ JumpIfNotSmi(rax, &miss_force_generic);
3362  __ SmiToInteger32(rbx, rax);
3364 
3365  // Check whether the elements is a number dictionary.
3366  // rdx: receiver
3367  // rax: key
3368  // rbx: key as untagged int32
3369  // rcx: elements
3370  __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax);
3371  __ ret(0);
3372 
3373  __ bind(&slow);
3374  // ----------- S t a t e -------------
3375  // -- rax : key
3376  // -- rdx : receiver
3377  // -- rsp[0] : return address
3378  // -----------------------------------
3379  Handle<Code> slow_ic =
3380  masm->isolate()->builtins()->KeyedLoadIC_Slow();
3381  __ jmp(slow_ic, RelocInfo::CODE_TARGET);
3382 
3383  __ bind(&miss_force_generic);
3384  // ----------- S t a t e -------------
3385  // -- rax : key
3386  // -- rdx : receiver
3387  // -- rsp[0] : return address
3388  // -----------------------------------
3389  Handle<Code> miss_ic =
3390  masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3391  __ jmp(miss_ic, RelocInfo::CODE_TARGET);
3392 }
3393 
3394 
3395 static void GenerateSmiKeyCheck(MacroAssembler* masm,
3396  Register key,
3397  Register scratch,
3398  XMMRegister xmm_scratch0,
3399  XMMRegister xmm_scratch1,
3400  Label* fail) {
3401  // Check that key is a smi or a heap number containing a smi and branch
3402  // if the check fails.
3403  Label key_ok;
3404  __ JumpIfSmi(key, &key_ok);
3405  __ CheckMap(key,
3406  masm->isolate()->factory()->heap_number_map(),
3407  fail,
3409  __ movsd(xmm_scratch0, FieldOperand(key, HeapNumber::kValueOffset));
3410  __ cvttsd2si(scratch, xmm_scratch0);
3411  __ cvtlsi2sd(xmm_scratch1, scratch);
3412  __ ucomisd(xmm_scratch1, xmm_scratch0);
3413  __ j(not_equal, fail);
3414  __ j(parity_even, fail); // NaN.
3415  __ Integer32ToSmi(key, scratch);
3416  __ bind(&key_ok);
3417 }
3418 
3419 
3421  MacroAssembler* masm,
3422  ElementsKind elements_kind) {
3423  // ----------- S t a t e -------------
3424  // -- rax : key
3425  // -- rdx : receiver
3426  // -- rsp[0] : return address
3427  // -----------------------------------
3428  Label slow, miss_force_generic;
3429 
3430  // This stub is meant to be tail-jumped to, the receiver must already
3431  // have been verified by the caller to not be a smi.
3432 
3433  // Check that the key is a smi or a heap number convertible to a smi.
3434  GenerateSmiKeyCheck(masm, rax, rcx, xmm0, xmm1, &miss_force_generic);
3435 
3436  // Check that the index is in range.
3437  __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
3438  __ SmiToInteger32(rcx, rax);
3440  // Unsigned comparison catches both negative and too-large values.
3441  __ j(above_equal, &miss_force_generic);
3442 
3443  // rax: index (as a smi)
3444  // rdx: receiver (JSObject)
3445  // rcx: untagged index
3446  // rbx: elements array
3448  // rbx: base pointer of external storage
3449  switch (elements_kind) {
3451  __ movsxbq(rcx, Operand(rbx, rcx, times_1, 0));
3452  break;
3455  __ movzxbq(rcx, Operand(rbx, rcx, times_1, 0));
3456  break;
3458  __ movsxwq(rcx, Operand(rbx, rcx, times_2, 0));
3459  break;
3461  __ movzxwq(rcx, Operand(rbx, rcx, times_2, 0));
3462  break;
3463  case EXTERNAL_INT_ELEMENTS:
3464  __ movsxlq(rcx, Operand(rbx, rcx, times_4, 0));
3465  break;
3467  __ movl(rcx, Operand(rbx, rcx, times_4, 0));
3468  break;
3470  __ cvtss2sd(xmm0, Operand(rbx, rcx, times_4, 0));
3471  break;
3473  __ movsd(xmm0, Operand(rbx, rcx, times_8, 0));
3474  break;
3475  default:
3476  UNREACHABLE();
3477  break;
3478  }
3479 
3480  // rax: index
3481  // rdx: receiver
3482  // For integer array types:
3483  // rcx: value
3484  // For floating-point array type:
3485  // xmm0: value as double.
3486 
3487  ASSERT(kSmiValueSize == 32);
3488  if (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) {
3489  // For the UnsignedInt array type, we need to see whether
3490  // the value can be represented in a Smi. If not, we need to convert
3491  // it to a HeapNumber.
3492  Label box_int;
3493 
3494  __ JumpIfUIntNotValidSmiValue(rcx, &box_int, Label::kNear);
3495 
3496  __ Integer32ToSmi(rax, rcx);
3497  __ ret(0);
3498 
3499  __ bind(&box_int);
3500 
3501  // Allocate a HeapNumber for the int and perform int-to-double
3502  // conversion.
3503  // The value is zero-extended since we loaded the value from memory
3504  // with movl.
3505  __ cvtqsi2sd(xmm0, rcx);
3506 
3507  __ AllocateHeapNumber(rcx, rbx, &slow);
3508  // Set the value.
3510  __ movq(rax, rcx);
3511  __ ret(0);
3512  } else if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
3513  elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3514  // For the floating-point array type, we need to always allocate a
3515  // HeapNumber.
3516  __ AllocateHeapNumber(rcx, rbx, &slow);
3517  // Set the value.
3519  __ movq(rax, rcx);
3520  __ ret(0);
3521  } else {
3522  __ Integer32ToSmi(rax, rcx);
3523  __ ret(0);
3524  }
3525 
3526  // Slow case: Jump to runtime.
3527  __ bind(&slow);
3528  Counters* counters = masm->isolate()->counters();
3529  __ IncrementCounter(counters->keyed_load_external_array_slow(), 1);
3530 
3531  // ----------- S t a t e -------------
3532  // -- rax : key
3533  // -- rdx : receiver
3534  // -- rsp[0] : return address
3535  // -----------------------------------
3536 
3537  Handle<Code> ic = masm->isolate()->builtins()->KeyedLoadIC_Slow();
3538  __ jmp(ic, RelocInfo::CODE_TARGET);
3539 
3540  // Miss case: Jump to runtime.
3541  __ bind(&miss_force_generic);
3542 
3543  // ----------- S t a t e -------------
3544  // -- rax : key
3545  // -- rdx : receiver
3546  // -- rsp[0] : return address
3547  // -----------------------------------
3548  Handle<Code> miss_ic =
3549  masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3550  __ jmp(miss_ic, RelocInfo::CODE_TARGET);
3551 }
3552 
3553 
3555  MacroAssembler* masm,
3556  ElementsKind elements_kind) {
3557  // ----------- S t a t e -------------
3558  // -- rax : value
3559  // -- rcx : key
3560  // -- rdx : receiver
3561  // -- rsp[0] : return address
3562  // -----------------------------------
3563  Label slow, miss_force_generic;
3564 
3565  // This stub is meant to be tail-jumped to, the receiver must already
3566  // have been verified by the caller to not be a smi.
3567 
3568  // Check that the key is a smi or a heap number convertible to a smi.
3569  GenerateSmiKeyCheck(masm, rcx, rbx, xmm0, xmm1, &miss_force_generic);
3570 
3571  // Check that the index is in range.
3572  __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
3573  __ SmiToInteger32(rdi, rcx); // Untag the index.
3575  // Unsigned comparison catches both negative and too-large values.
3576  __ j(above_equal, &miss_force_generic);
3577 
3578  // Handle both smis and HeapNumbers in the fast path. Go to the
3579  // runtime for all other kinds of values.
3580  // rax: value
3581  // rcx: key (a smi)
3582  // rdx: receiver (a JSObject)
3583  // rbx: elements array
3584  // rdi: untagged key
3585  Label check_heap_number;
3586  if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) {
3587  // Float to pixel conversion is only implemented in the runtime for now.
3588  __ JumpIfNotSmi(rax, &slow);
3589  } else {
3590  __ JumpIfNotSmi(rax, &check_heap_number, Label::kNear);
3591  }
3592  // No more branches to slow case on this path. Key and receiver not needed.
3593  __ SmiToInteger32(rdx, rax);
3595  // rbx: base pointer of external storage
3596  switch (elements_kind) {
3598  { // Clamp the value to [0..255].
3599  Label done;
3600  __ testl(rdx, Immediate(0xFFFFFF00));
3601  __ j(zero, &done, Label::kNear);
3602  __ setcc(negative, rdx); // 1 if negative, 0 if positive.
3603  __ decb(rdx); // 0 if negative, 255 if positive.
3604  __ bind(&done);
3605  }
3606  __ movb(Operand(rbx, rdi, times_1, 0), rdx);
3607  break;
3610  __ movb(Operand(rbx, rdi, times_1, 0), rdx);
3611  break;
3614  __ movw(Operand(rbx, rdi, times_2, 0), rdx);
3615  break;
3616  case EXTERNAL_INT_ELEMENTS:
3618  __ movl(Operand(rbx, rdi, times_4, 0), rdx);
3619  break;
3621  // Need to perform int-to-float conversion.
3622  __ cvtlsi2ss(xmm0, rdx);
3623  __ movss(Operand(rbx, rdi, times_4, 0), xmm0);
3624  break;
3626  // Need to perform int-to-float conversion.
3627  __ cvtlsi2sd(xmm0, rdx);
3628  __ movsd(Operand(rbx, rdi, times_8, 0), xmm0);
3629  break;
3630  case FAST_ELEMENTS:
3631  case FAST_SMI_ELEMENTS:
3632  case FAST_DOUBLE_ELEMENTS:
3633  case FAST_HOLEY_ELEMENTS:
3636  case DICTIONARY_ELEMENTS:
3638  UNREACHABLE();
3639  break;
3640  }
3641  __ ret(0);
3642 
3643  // TODO(danno): handle heap number -> pixel array conversion
3644  if (elements_kind != EXTERNAL_PIXEL_ELEMENTS) {
3645  __ bind(&check_heap_number);
3646  // rax: value
3647  // rcx: key (a smi)
3648  // rdx: receiver (a JSObject)
3649  // rbx: elements array
3650  // rdi: untagged key
3651  __ CmpObjectType(rax, HEAP_NUMBER_TYPE, kScratchRegister);
3652  __ j(not_equal, &slow);
3653  // No more branches to slow case on this path.
3654 
3655  // The WebGL specification leaves the behavior of storing NaN and
3656  // +/-Infinity into integer arrays basically undefined. For more
3657  // reproducible behavior, convert these to zero.
3660  // rdi: untagged index
3661  // rbx: base pointer of external storage
3662  // top of FPU stack: value
3663  if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3664  __ cvtsd2ss(xmm0, xmm0);
3665  __ movss(Operand(rbx, rdi, times_4, 0), xmm0);
3666  __ ret(0);
3667  } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3668  __ movsd(Operand(rbx, rdi, times_8, 0), xmm0);
3669  __ ret(0);
3670  } else {
3671  // Perform float-to-int conversion with truncation (round-to-zero)
3672  // behavior.
3673  // Fast path: use machine instruction to convert to int64. If that
3674  // fails (out-of-range), go into the runtime.
3675  __ cvttsd2siq(r8, xmm0);
3676  __ Set(kScratchRegister, V8_UINT64_C(0x8000000000000000));
3677  __ cmpq(r8, kScratchRegister);
3678  __ j(equal, &slow);
3679 
3680  // rdx: value (converted to an untagged integer)
3681  // rdi: untagged index
3682  // rbx: base pointer of external storage
3683  switch (elements_kind) {
3686  __ movb(Operand(rbx, rdi, times_1, 0), r8);
3687  break;
3690  __ movw(Operand(rbx, rdi, times_2, 0), r8);
3691  break;
3692  case EXTERNAL_INT_ELEMENTS:
3694  __ movl(Operand(rbx, rdi, times_4, 0), r8);
3695  break;
3699  case FAST_ELEMENTS:
3700  case FAST_SMI_ELEMENTS:
3701  case FAST_DOUBLE_ELEMENTS:
3702  case FAST_HOLEY_ELEMENTS:
3705  case DICTIONARY_ELEMENTS:
3707  UNREACHABLE();
3708  break;
3709  }
3710  __ ret(0);
3711  }
3712  }
3713 
3714  // Slow case: call runtime.
3715  __ bind(&slow);
3716 
3717  // ----------- S t a t e -------------
3718  // -- rax : value
3719  // -- rcx : key
3720  // -- rdx : receiver
3721  // -- rsp[0] : return address
3722  // -----------------------------------
3723 
3724  Handle<Code> ic = masm->isolate()->builtins()->KeyedStoreIC_Slow();
3725  __ jmp(ic, RelocInfo::CODE_TARGET);
3726 
3727  // Miss case: call runtime.
3728  __ bind(&miss_force_generic);
3729 
3730  // ----------- S t a t e -------------
3731  // -- rax : value
3732  // -- rcx : key
3733  // -- rdx : receiver
3734  // -- rsp[0] : return address
3735  // -----------------------------------
3736 
3737  Handle<Code> miss_ic =
3738  masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
3739  __ jmp(miss_ic, RelocInfo::CODE_TARGET);
3740 }
3741 
3742 
3743 void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
3744  // ----------- S t a t e -------------
3745  // -- rax : key
3746  // -- rdx : receiver
3747  // -- rsp[0] : return address
3748  // -----------------------------------
3749  Label miss_force_generic;
3750 
3751  // This stub is meant to be tail-jumped to, the receiver must already
3752  // have been verified by the caller to not be a smi.
3753 
3754  // Check that the key is a smi or a heap number convertible to a smi.
3755  GenerateSmiKeyCheck(masm, rax, rcx, xmm0, xmm1, &miss_force_generic);
3756 
3757  // Get the elements array.
3759  __ AssertFastElements(rcx);
3760 
3761  // Check that the key is within bounds.
3763  __ j(above_equal, &miss_force_generic);
3764 
3765  // Load the result and make sure it's not the hole.
3766  SmiIndex index = masm->SmiToIndex(rbx, rax, kPointerSizeLog2);
3767  __ movq(rbx, FieldOperand(rcx,
3768  index.reg,
3769  index.scale,
3770  FixedArray::kHeaderSize));
3771  __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
3772  __ j(equal, &miss_force_generic);
3773  __ movq(rax, rbx);
3774  __ ret(0);
3775 
3776  __ bind(&miss_force_generic);
3777  Code* code = masm->isolate()->builtins()->builtin(
3778  Builtins::kKeyedLoadIC_MissForceGeneric);
3779  Handle<Code> ic(code);
3780  __ jmp(ic, RelocInfo::CODE_TARGET);
3781 }
3782 
3783 
3785  MacroAssembler* masm) {
3786  // ----------- S t a t e -------------
3787  // -- rax : key
3788  // -- rdx : receiver
3789  // -- rsp[0] : return address
3790  // -----------------------------------
3791  Label miss_force_generic, slow_allocate_heapnumber;
3792 
3793  // This stub is meant to be tail-jumped to, the receiver must already
3794  // have been verified by the caller to not be a smi.
3795 
3796  // Check that the key is a smi or a heap number convertible to a smi.
3797  GenerateSmiKeyCheck(masm, rax, rcx, xmm0, xmm1, &miss_force_generic);
3798 
3799  // Get the elements array.
3801  __ AssertFastElements(rcx);
3802 
3803  // Check that the key is within bounds.
3805  __ j(above_equal, &miss_force_generic);
3806 
3807  // Check for the hole
3808  __ SmiToInteger32(kScratchRegister, rax);
3809  uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
3810  __ cmpl(FieldOperand(rcx, kScratchRegister, times_8, offset),
3811  Immediate(kHoleNanUpper32));
3812  __ j(equal, &miss_force_generic);
3813 
3814  // Always allocate a heap number for the result.
3817  __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber);
3818  // Set the value.
3819  __ movq(rax, rcx);
3821  __ ret(0);
3822 
3823  __ bind(&slow_allocate_heapnumber);
3824  Handle<Code> slow_ic =
3825  masm->isolate()->builtins()->KeyedLoadIC_Slow();
3826  __ jmp(slow_ic, RelocInfo::CODE_TARGET);
3827 
3828  __ bind(&miss_force_generic);
3829  Handle<Code> miss_ic =
3830  masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3831  __ jmp(miss_ic, RelocInfo::CODE_TARGET);
3832 }
3833 
3834 
3836  MacroAssembler* masm,
3837  bool is_js_array,
3838  ElementsKind elements_kind,
3839  KeyedAccessGrowMode grow_mode) {
3840  // ----------- S t a t e -------------
3841  // -- rax : value
3842  // -- rcx : key
3843  // -- rdx : receiver
3844  // -- rsp[0] : return address
3845  // -----------------------------------
3846  Label miss_force_generic, transition_elements_kind, finish_store, grow;
3847  Label check_capacity, slow;
3848 
3849  // This stub is meant to be tail-jumped to, the receiver must already
3850  // have been verified by the caller to not be a smi.
3851 
3852  // Check that the key is a smi or a heap number convertible to a smi.
3853  GenerateSmiKeyCheck(masm, rcx, rbx, xmm0, xmm1, &miss_force_generic);
3854 
3855  if (IsFastSmiElementsKind(elements_kind)) {
3856  __ JumpIfNotSmi(rax, &transition_elements_kind);
3857  }
3858 
3859  // Get the elements array and make sure it is a fast element array, not 'cow'.
3860  __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
3861  // Check that the key is within bounds.
3862  if (is_js_array) {
3863  __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
3864  if (grow_mode == ALLOW_JSARRAY_GROWTH) {
3865  __ j(above_equal, &grow);
3866  } else {
3867  __ j(above_equal, &miss_force_generic);
3868  }
3869  } else {
3870  __ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
3871  __ j(above_equal, &miss_force_generic);
3872  }
3873 
3874  __ CompareRoot(FieldOperand(rdi, HeapObject::kMapOffset),
3875  Heap::kFixedArrayMapRootIndex);
3876  __ j(not_equal, &miss_force_generic);
3877 
3878  __ bind(&finish_store);
3879  if (IsFastSmiElementsKind(elements_kind)) {
3880  __ SmiToInteger32(rcx, rcx);
3881  __ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize),
3882  rax);
3883  } else {
3884  // Do the store and update the write barrier.
3885  ASSERT(IsFastObjectElementsKind(elements_kind));
3886  __ SmiToInteger32(rcx, rcx);
3887  __ lea(rcx,
3888  FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize));
3889  __ movq(Operand(rcx, 0), rax);
3890  // Make sure to preserve the value in register rax.
3891  __ movq(rbx, rax);
3892  __ RecordWrite(rdi, rcx, rbx, kDontSaveFPRegs);
3893  }
3894 
3895  // Done.
3896  __ ret(0);
3897 
3898  // Handle store cache miss.
3899  __ bind(&miss_force_generic);
3900  Handle<Code> ic_force_generic =
3901  masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
3902  __ jmp(ic_force_generic, RelocInfo::CODE_TARGET);
3903 
3904  __ bind(&transition_elements_kind);
3905  Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
3906  __ jmp(ic_miss, RelocInfo::CODE_TARGET);
3907 
3908  if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
3909  // Grow the array by a single element if possible.
3910  __ bind(&grow);
3911 
3912  // Make sure the array is only growing by a single element, anything else
3913  // must be handled by the runtime. Flags are already set by previous
3914  // compare.
3915  __ j(not_equal, &miss_force_generic);
3916 
3917  // Check for the empty array, and preallocate a small backing store if
3918  // possible.
3919  __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
3920  __ CompareRoot(rdi, Heap::kEmptyFixedArrayRootIndex);
3921  __ j(not_equal, &check_capacity);
3922 
3924  __ AllocateInNewSpace(size, rdi, rbx, r8, &slow, TAG_OBJECT);
3925 
3926  // rax: value
3927  // rcx: key
3928  // rdx: receiver
3929  // rdi: elements
3930  // Make sure that the backing store can hold additional elements.
3932  masm->isolate()->factory()->fixed_array_map());
3935  __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex);
3936  for (int i = 1; i < JSArray::kPreallocatedArrayElements; ++i) {
3937  __ movq(FieldOperand(rdi, FixedArray::SizeFor(i)), rbx);
3938  }
3939 
3940  // Store the element at index zero.
3941  __ movq(FieldOperand(rdi, FixedArray::SizeFor(0)), rax);
3942 
3943  // Install the new backing store in the JSArray.
3944  __ movq(FieldOperand(rdx, JSObject::kElementsOffset), rdi);
3945  __ RecordWriteField(rdx, JSObject::kElementsOffset, rdi, rbx,
3947 
3948  // Increment the length of the array.
3950  __ ret(0);
3951 
3952  __ bind(&check_capacity);
3953  // Check for cow elements, in general they are not handled by this stub.
3954  __ CompareRoot(FieldOperand(rdi, HeapObject::kMapOffset),
3955  Heap::kFixedCOWArrayMapRootIndex);
3956  __ j(equal, &miss_force_generic);
3957 
3958  // rax: value
3959  // rcx: key
3960  // rdx: receiver
3961  // rdi: elements
3962  // Make sure that the backing store can hold additional elements.
3964  __ j(above_equal, &slow);
3965 
3966  // Grow the array and finish the store.
3967  __ SmiAddConstant(FieldOperand(rdx, JSArray::kLengthOffset),
3968  Smi::FromInt(1));
3969  __ jmp(&finish_store);
3970 
3971  __ bind(&slow);
3972  Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
3973  __ jmp(ic_slow, RelocInfo::CODE_TARGET);
3974  }
3975 }
3976 
3977 
3979  MacroAssembler* masm,
3980  bool is_js_array,
3981  KeyedAccessGrowMode grow_mode) {
3982  // ----------- S t a t e -------------
3983  // -- rax : value
3984  // -- rcx : key
3985  // -- rdx : receiver
3986  // -- rsp[0] : return address
3987  // -----------------------------------
3988  Label miss_force_generic, transition_elements_kind, finish_store;
3989  Label grow, slow, check_capacity;
3990 
3991  // This stub is meant to be tail-jumped to, the receiver must already
3992  // have been verified by the caller to not be a smi.
3993 
3994  // Check that the key is a smi or a heap number convertible to a smi.
3995  GenerateSmiKeyCheck(masm, rcx, rbx, xmm0, xmm1, &miss_force_generic);
3996 
3997  // Get the elements array.
3998  __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
3999  __ AssertFastElements(rdi);
4000 
4001  // Check that the key is within bounds.
4002  if (is_js_array) {
4003  __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
4004  if (grow_mode == ALLOW_JSARRAY_GROWTH) {
4005  __ j(above_equal, &grow);
4006  } else {
4007  __ j(above_equal, &miss_force_generic);
4008  }
4009  } else {
4011  __ j(above_equal, &miss_force_generic);
4012  }
4013 
4014  // Handle smi values specially
4015  __ bind(&finish_store);
4016  __ SmiToInteger32(rcx, rcx);
4017  __ StoreNumberToDoubleElements(rax, rdi, rcx, xmm0,
4018  &transition_elements_kind);
4019  __ ret(0);
4020 
4021  // Handle store cache miss, replacing the ic with the generic stub.
4022  __ bind(&miss_force_generic);
4023  Handle<Code> ic_force_generic =
4024  masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4025  __ jmp(ic_force_generic, RelocInfo::CODE_TARGET);
4026 
4027  __ bind(&transition_elements_kind);
4028  // Restore smi-tagging of rcx.
4029  __ Integer32ToSmi(rcx, rcx);
4030  Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
4031  __ jmp(ic_miss, RelocInfo::CODE_TARGET);
4032 
4033  if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
4034  // Grow the array by a single element if possible.
4035  __ bind(&grow);
4036 
4037  // Make sure the array is only growing by a single element, anything else
4038  // must be handled by the runtime. Flags are already set by previous
4039  // compare.
4040  __ j(not_equal, &miss_force_generic);
4041 
4042  // Transition on values that can't be stored in a FixedDoubleArray.
4043  Label value_is_smi;
4044  __ JumpIfSmi(rax, &value_is_smi);
4045  __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
4046  Heap::kHeapNumberMapRootIndex);
4047  __ j(not_equal, &transition_elements_kind);
4048  __ bind(&value_is_smi);
4049 
4050  // Check for the empty array, and preallocate a small backing store if
4051  // possible.
4052  __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
4053  __ CompareRoot(rdi, Heap::kEmptyFixedArrayRootIndex);
4054  __ j(not_equal, &check_capacity);
4055 
4056  int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
4057  __ AllocateInNewSpace(size, rdi, rbx, r8, &slow, TAG_OBJECT);
4058 
4059  // rax: value
4060  // rcx: key
4061  // rdx: receiver
4062  // rdi: elements
4063  // Initialize the new FixedDoubleArray. Leave elements unitialized for
4064  // efficiency, they are guaranteed to be initialized before use.
4066  masm->isolate()->factory()->fixed_double_array_map());
4068  Smi::FromInt(JSArray::kPreallocatedArrayElements));
4069 
4070  // Install the new backing store in the JSArray.
4071  __ movq(FieldOperand(rdx, JSObject::kElementsOffset), rdi);
4072  __ RecordWriteField(rdx, JSObject::kElementsOffset, rdi, rbx,
4074 
4075  // Increment the length of the array.
4077  __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
4078  __ jmp(&finish_store);
4079 
4080  __ bind(&check_capacity);
4081  // rax: value
4082  // rcx: key
4083  // rdx: receiver
4084  // rdi: elements
4085  // Make sure that the backing store can hold additional elements.
4087  __ j(above_equal, &slow);
4088 
4089  // Grow the array and finish the store.
4090  __ SmiAddConstant(FieldOperand(rdx, JSArray::kLengthOffset),
4091  Smi::FromInt(1));
4092  __ jmp(&finish_store);
4093 
4094  __ bind(&slow);
4095  Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
4096  __ jmp(ic_slow, RelocInfo::CODE_TARGET);
4097  }
4098 }
4099 
4100 
4101 #undef __
4102 
4103 } } // namespace v8::internal
4104 
4105 #endif // V8_TARGET_ARCH_X64
byte * Address
Definition: globals.h:157
const Register rdx
static const int kBitFieldOffset
Definition: objects.h:5160
Handle< Code > CompileLoadFunctionPrototype(Handle< String > name)
static const int kMaxLength
Definition: objects.h:2366
Handle< Code > CompileLoadCallback(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< AccessorInfo > callback)
static void GenerateStoreViaSetter(MacroAssembler *masm, Handle< JSFunction > setter)
static const int kCodeEntryOffset
Definition: objects.h:6182
Handle< Code > CompileStoreField(Handle< JSObject > object, int index, Handle< Map > transition, Handle< String > name)
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:6183
static int SlotOffset(int index)
Definition: contexts.h:425
static const int kDataOffset
Definition: objects.h:8409
Handle< Code > CompileLoadNonexistent(Handle< String > name, Handle< JSObject > object, Handle< JSObject > last)
Handle< Code > CompileStoreElement(Handle< Map > receiver_map)
void GenerateProbe(MacroAssembler *masm, Code::Flags flags, Register receiver, Register name, Register scratch, Register extra, Register extra2=no_reg, Register extra3=no_reg)
static const int kFlagsOffset
Definition: objects.h:4540
static Smi * FromInt(int value)
Definition: objects-inl.h:981
bool IsFastObjectElementsKind(ElementsKind kind)
const Register rbp
#define LOG(isolate, Call)
Definition: log.h:81
static void GenerateStoreExternalArray(MacroAssembler *masm, ElementsKind elements_kind)
static const int kGlobalReceiverOffset
Definition: objects.h:6288
static void GenerateLoadFastDoubleElement(MacroAssembler *masm)
const int kSmiValueSize
Definition: v8.h:4061
const Register rsi
static StubType ExtractTypeFromFlags(Flags flags)
Definition: objects-inl.h:3538
static const int kExternalPointerOffset
Definition: objects.h:3741
static const int kHasNamedInterceptor
Definition: objects.h:5169
static const int kIsAccessCheckNeeded
Definition: objects.h:5173
List< Handle< Map > > MapHandleList
Definition: list.h:198
#define ASSERT(condition)
Definition: checks.h:270
const int kPointerSizeLog2
Definition: globals.h:232
static const int kInstanceSizeOffset
Definition: objects.h:5147
static const int kDebugInfoOffset
Definition: objects.h:5805
static const int kContextOffset
Definition: objects.h:6187
Handle< Code > CompileLoadField(Handle< JSObject > object, Handle< JSObject > holder, int index, Handle< String > name)
Handle< Code > CompileStoreInterceptor(Handle< JSObject > object, Handle< String > name)
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< String > name, Register scratch0)
Handle< Code > CompileStoreField(Handle< JSObject > object, int index, Handle< Map > transition, Handle< String > name)
static const int kHashFieldOffset
Definition: objects.h:7319
const uint32_t kNotStringTag
Definition: objects.h:457
Handle< Code > CompileLoadInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
#define UNREACHABLE()
Definition: checks.h:50
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
Definition: objects.h:7318
Handle< Code > CompileCallGlobal(Handle< JSObject > object, Handle< GlobalObject > holder, Handle< JSGlobalPropertyCell > cell, Handle< JSFunction > function, Handle< String > name)
Handle< Code > CompileLoadField(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, int index)
static const int kValueOffset
Definition: objects.h:1342
static void GenerateLoadViaGetter(MacroAssembler *masm, Handle< JSFunction > getter)
const uint32_t kHoleNanUpper32
Definition: v8globals.h:469
Handle< Code > CompileStoreGlobal(Handle< GlobalObject > object, Handle< JSGlobalPropertyCell > holder, Handle< String > name)
Handle< Code > CompileLoadViaGetter(Handle< String > name, Handle< JSObject > receiver, Handle< JSObject > holder, Handle< JSFunction > getter)
Handle< Code > CompileLoadConstant(Handle< JSObject > object, Handle< JSObject > holder, Handle< JSFunction > value, Handle< String > name)
const XMMRegister xmm1
Handle< Code > CompileLoadConstant(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< JSFunction > value)
Handle< Code > CompileCallField(Handle< JSObject > object, Handle< JSObject > holder, int index, Handle< String > name)
const Register r9
const int kPointerSize
Definition: globals.h:220
static void GenerateStoreFastElement(MacroAssembler *masm, bool is_js_array, ElementsKind element_kind, KeyedAccessGrowMode grow_mode)
Handle< Code > CompileLoadStringLength(Handle< String > name)
Operand FieldOperand(Register object, int offset)
const int kHeapObjectTag
Definition: v8.h:4009
const Register rbx
const uint32_t kHoleNanLower32
Definition: v8globals.h:470
const Register rsp
#define __
static bool decode(uint32_t value)
Definition: utils.h:273
Operand StackSpaceOperand(int index)
static const int kPropertiesOffset
Definition: objects.h:2171
Handle< Code > CompileLoadGlobal(Handle< JSObject > object, Handle< GlobalObject > holder, Handle< JSGlobalPropertyCell > cell, Handle< String > name, bool is_dont_delete)
Handle< Code > CompileStoreCallback(Handle< String > name, Handle< JSObject > receiver, Handle< JSObject > holder, Handle< AccessorInfo > callback)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
const Register rax
bool IsFastSmiElementsKind(ElementsKind kind)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
static const int kDataOffset
Definition: objects.h:8539
const Register rdi
const int kBitsPerByte
Definition: globals.h:237
static int SizeFor(int length)
Definition: objects.h:2434
const Register r0
static const int kElementsOffset
Definition: objects.h:2172
#define BASE_EMBEDDED
Definition: allocation.h:68
const int kBitsPerInt
Definition: globals.h:240
static void GenerateLoadDictionaryElement(MacroAssembler *masm)
static void GenerateLoadExternalArray(MacroAssembler *masm, ElementsKind elements_kind)
static const int kLengthOffset
Definition: objects.h:8332
static int SizeFor(int length)
Definition: objects.h:2353
static const int kHeaderSize
Definition: objects.h:2296
static const int kMapOffset
Definition: objects.h:1261
List< Handle< Code > > CodeHandleList
Definition: list.h:199
const Register r1
static const int kLengthOffset
Definition: objects.h:2295
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
Handle< Code > CompileCallInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
static const int kDataOffset
Definition: objects.h:8563
const Register kScratchRegister
static void GenerateLoadFastElement(MacroAssembler *masm)
static const uint32_t kSignMask
Definition: objects.h:1351
v8::Handle< v8::Value > Load(const v8::Arguments &args)
Definition: shell.cc:159
friend class Isolate
Definition: stub-cache.h:392
static void GenerateStoreFastDoubleElement(MacroAssembler *masm, bool is_js_array, KeyedAccessGrowMode grow_mode)
static const int kHeaderSize
Definition: objects.h:4549
const Register r8
static Handle< T > null()
Definition: handles.h:86
const Register rcx
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
Handle< Code > CompileLoadArrayLength(Handle< String > name)
void USE(T)
Definition: globals.h:289
const int kSmiTag
Definition: v8.h:4014
Handle< Code > CompileCallConstant(Handle< Object > object, Handle< JSObject > holder, Handle< JSFunction > function, Handle< String > name, CheckType check)
static AccessorInfo * cast(Object *obj)
static const int kHeaderSize
Definition: objects.h:2173
const int kHeapObjectTagSize
Definition: v8.h:4010
static Handle< JSGlobalPropertyCell > EnsurePropertyCell(Handle< GlobalObject > global, Handle< String > name)
Definition: objects.cc:12019
static bool HasCustomCallGenerator(Handle< JSFunction > function)
Definition: stub-cache.cc:1444
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
static const int kPreallocatedArrayElements
Definition: objects.h:8329
static const int kPrototypeOffset
Definition: objects.h:5126
static const int kFlagsNotUsedInLookup
Definition: objects.h:4649
const int kInvalidProtoDepth
const Register no_reg
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
Definition: objects.h:6385
static const int kNativeContextOffset
Definition: objects.h:6286
Handle< Code > CompileLoadCallback(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< AccessorInfo > callback)
Handle< Code > CompileLoadPolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_ics)
Handle< Code > CompileLoadInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
Handle< Code > CompileStorePolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_stubs, MapHandleList *transitioned_maps)
static const int kSharedFunctionInfoOffset
Definition: objects.h:6185
KeyedAccessGrowMode
Definition: objects.h:142
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
static const int kMaxValue
Definition: objects.h:1050
void check(i::Vector< const char > string)
static const int kExponentOffset
Definition: objects.h:1348
Handle< Code > CompileLoadElement(Handle< Map > receiver_map)
Handle< Code > CompileConstructStub(Handle< JSFunction > function)
Handle< Code > CompileStoreViaSetter(Handle< String > name, Handle< JSObject > receiver, Handle< JSObject > holder, Handle< JSFunction > setter)
static JSObject * cast(Object *obj)
static const int kInstanceTypeOffset
Definition: objects.h:5158
const XMMRegister xmm0
static JSFunction * cast(Object *obj)