v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
stub-cache-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_X64
31 
32 #include "arguments.h"
33 #include "ic-inl.h"
34 #include "codegen.h"
35 #include "stub-cache.h"
36 
37 namespace v8 {
38 namespace internal {
39 
40 #define __ ACCESS_MASM(masm)
41 
42 
43 static void ProbeTable(Isolate* isolate,
44  MacroAssembler* masm,
46  StubCache::Table table,
47  Register receiver,
48  Register name,
49  // The offset is scaled by 4, based on
50  // kHeapObjectTagSize, which is two bits
51  Register offset) {
52  // We need to scale up the pointer by 2 when the offset is scaled by less
53  // than the pointer size.
57  ScaleFactor scale_factor = kPointerSize == kInt64Size ? times_2 : times_1;
58 
59  ASSERT_EQ(3 * kPointerSize, sizeof(StubCache::Entry));
60  // The offset register holds the entry offset times four (due to masking
61  // and shifting optimizations).
62  ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
63  ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
64  Label miss;
65 
66  // Multiply by 3 because there are 3 fields per entry (name, code, map).
67  __ leap(offset, Operand(offset, offset, times_2, 0));
68 
69  __ LoadAddress(kScratchRegister, key_offset);
70 
71  // Check that the key in the entry matches the name.
72  // Multiply entry offset by 16 to get the entry address. Since the
73  // offset register already holds the entry offset times four, multiply
74  // by a further four.
75  __ cmpl(name, Operand(kScratchRegister, offset, scale_factor, 0));
76  __ j(not_equal, &miss);
77 
78  // Get the map entry from the cache.
79  // Use key_offset + kPointerSize * 2, rather than loading map_offset.
80  __ movp(kScratchRegister,
81  Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2));
83  __ j(not_equal, &miss);
84 
85  // Get the code entry from the cache.
86  __ LoadAddress(kScratchRegister, value_offset);
87  __ movp(kScratchRegister,
88  Operand(kScratchRegister, offset, scale_factor, 0));
89 
90  // Check that the flags match what we're looking for.
92  __ andp(offset, Immediate(~Code::kFlagsNotUsedInLookup));
93  __ cmpl(offset, Immediate(flags));
94  __ j(not_equal, &miss);
95 
96 #ifdef DEBUG
97  if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
98  __ jmp(&miss);
99  } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
100  __ jmp(&miss);
101  }
102 #endif
103 
104  // Jump to the first instruction in the code stub.
106  __ jmp(kScratchRegister);
107 
108  __ bind(&miss);
109 }
110 
111 
112 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
113  Label* miss_label,
114  Register receiver,
115  Handle<Name> name,
116  Register scratch0,
117  Register scratch1) {
118  ASSERT(name->IsUniqueName());
119  ASSERT(!receiver.is(scratch0));
120  Counters* counters = masm->isolate()->counters();
121  __ IncrementCounter(counters->negative_lookups(), 1);
122  __ IncrementCounter(counters->negative_lookups_miss(), 1);
123 
124  __ movp(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
125 
126  const int kInterceptorOrAccessCheckNeededMask =
128 
129  // Bail out if the receiver has a named interceptor or requires access checks.
130  __ testb(FieldOperand(scratch0, Map::kBitFieldOffset),
131  Immediate(kInterceptorOrAccessCheckNeededMask));
132  __ j(not_zero, miss_label);
133 
134  // Check that receiver is a JSObject.
135  __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE);
136  __ j(below, miss_label);
137 
138  // Load properties array.
139  Register properties = scratch0;
140  __ movp(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
141 
142  // Check that the properties array is a dictionary.
143  __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset),
144  Heap::kHashTableMapRootIndex);
145  __ j(not_equal, miss_label);
146 
147  Label done;
149  miss_label,
150  &done,
151  properties,
152  name,
153  scratch1);
154  __ bind(&done);
155  __ DecrementCounter(counters->negative_lookups_miss(), 1);
156 }
157 
158 
159 void StubCache::GenerateProbe(MacroAssembler* masm,
160  Code::Flags flags,
161  Register receiver,
162  Register name,
163  Register scratch,
164  Register extra,
165  Register extra2,
166  Register extra3) {
167  Isolate* isolate = masm->isolate();
168  Label miss;
169  USE(extra); // The register extra is not used on the X64 platform.
170  USE(extra2); // The register extra2 is not used on the X64 platform.
171  USE(extra3); // The register extra2 is not used on the X64 platform.
172  // Make sure that code is valid. The multiplying code relies on the
173  // entry size being 3 * kPointerSize.
174  ASSERT(sizeof(Entry) == 3 * kPointerSize);
175 
176  // Make sure the flags do not name a specific type.
177  ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
178 
179  // Make sure that there are no register conflicts.
180  ASSERT(!scratch.is(receiver));
181  ASSERT(!scratch.is(name));
182 
183  // Check scratch register is valid, extra and extra2 are unused.
184  ASSERT(!scratch.is(no_reg));
185  ASSERT(extra2.is(no_reg));
186  ASSERT(extra3.is(no_reg));
187 
188  Counters* counters = masm->isolate()->counters();
189  __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
190 
191  // Check that the receiver isn't a smi.
192  __ JumpIfSmi(receiver, &miss);
193 
194  // Get the map of the receiver and compute the hash.
195  __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
196  // Use only the low 32 bits of the map pointer.
197  __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
198  __ xorp(scratch, Immediate(flags));
199  // We mask out the last two bits because they are not part of the hash and
200  // they are always 01 for maps. Also in the two 'and' instructions below.
201  __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
202 
203  // Probe the primary table.
204  ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch);
205 
206  // Primary miss: Compute hash for secondary probe.
207  __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
208  __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
209  __ xorp(scratch, Immediate(flags));
210  __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
211  __ subl(scratch, name);
212  __ addl(scratch, Immediate(flags));
213  __ andp(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
214 
215  // Probe the secondary table.
216  ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch);
217 
218  // Cache miss: Fall-through and let caller handle the miss by
219  // entering the runtime system.
220  __ bind(&miss);
221  __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
222 }
223 
224 
225 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
226  int index,
227  Register prototype) {
228  // Load the global or builtins object from the current context.
229  __ movp(prototype,
231  // Load the native context from the global or builtins object.
232  __ movp(prototype,
234  // Load the function from the native context.
235  __ movp(prototype, Operand(prototype, Context::SlotOffset(index)));
236  // Load the initial map. The global functions all have initial maps.
237  __ movp(prototype,
239  // Load the prototype from the initial map.
240  __ movp(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
241 }
242 
243 
244 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
245  MacroAssembler* masm,
246  int index,
247  Register prototype,
248  Label* miss) {
249  Isolate* isolate = masm->isolate();
250  // Get the global function with the given index.
251  Handle<JSFunction> function(
252  JSFunction::cast(isolate->native_context()->get(index)));
253 
254  // Check we're still in the same context.
255  Register scratch = prototype;
257  __ movp(scratch, Operand(rsi, offset));
258  __ movp(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
259  __ Cmp(Operand(scratch, Context::SlotOffset(index)), function);
260  __ j(not_equal, miss);
261 
262  // Load its initial map. The global functions all have initial maps.
263  __ Move(prototype, Handle<Map>(function->initial_map()));
264  // Load the prototype from the initial map.
265  __ movp(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
266 }
267 
268 
269 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
270  Register receiver,
271  Register scratch,
272  Label* miss_label) {
273  // Check that the receiver isn't a smi.
274  __ JumpIfSmi(receiver, miss_label);
275 
276  // Check that the object is a JS array.
277  __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
278  __ j(not_equal, miss_label);
279 
280  // Load length directly from the JS array.
281  __ movp(rax, FieldOperand(receiver, JSArray::kLengthOffset));
282  __ ret(0);
283 }
284 
285 
286 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
287  Register receiver,
288  Register result,
289  Register scratch,
290  Label* miss_label) {
291  __ TryGetFunctionPrototype(receiver, result, miss_label);
292  if (!result.is(rax)) __ movp(rax, result);
293  __ ret(0);
294 }
295 
296 
297 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
298  Register dst,
299  Register src,
300  bool inobject,
301  int index,
302  Representation representation) {
303  ASSERT(!representation.IsDouble());
304  int offset = index * kPointerSize;
305  if (!inobject) {
306  // Calculate the offset into the properties array.
307  offset = offset + FixedArray::kHeaderSize;
309  src = dst;
310  }
311  __ movp(dst, FieldOperand(src, offset));
312 }
313 
314 
315 static void PushInterceptorArguments(MacroAssembler* masm,
316  Register receiver,
317  Register holder,
318  Register name,
319  Handle<JSObject> holder_obj) {
325  __ Push(name);
326  Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
327  ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
328  __ Move(kScratchRegister, interceptor);
329  __ Push(kScratchRegister);
330  __ Push(receiver);
331  __ Push(holder);
332 }
333 
334 
335 static void CompileCallLoadPropertyWithInterceptor(
336  MacroAssembler* masm,
337  Register receiver,
338  Register holder,
339  Register name,
340  Handle<JSObject> holder_obj,
341  IC::UtilityId id) {
342  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
343  __ CallExternalReference(
344  ExternalReference(IC_Utility(id), masm->isolate()),
346 }
347 
348 
349 // Generate call to api function.
350 void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
351  const CallOptimization& optimization,
352  Handle<Map> receiver_map,
353  Register receiver,
354  Register scratch_in,
355  bool is_store,
356  int argc,
357  Register* values) {
358  ASSERT(optimization.is_simple_api_call());
359 
360  __ PopReturnAddressTo(scratch_in);
361  // receiver
362  __ Push(receiver);
363  // Write the arguments to stack frame.
364  for (int i = 0; i < argc; i++) {
365  Register arg = values[argc-1-i];
366  ASSERT(!receiver.is(arg));
367  ASSERT(!scratch_in.is(arg));
368  __ Push(arg);
369  }
370  __ PushReturnAddressFrom(scratch_in);
371  // Stack now matches JSFunction abi.
372 
373  // Abi for CallApiFunctionStub.
374  Register callee = rax;
375  Register call_data = rbx;
376  Register holder = rcx;
377  Register api_function_address = rdx;
378  Register scratch = rdi; // scratch_in is no longer valid.
379 
380  // Put holder in place.
381  CallOptimization::HolderLookup holder_lookup;
382  Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
383  receiver_map,
384  &holder_lookup);
385  switch (holder_lookup) {
386  case CallOptimization::kHolderIsReceiver:
387  __ Move(holder, receiver);
388  break;
389  case CallOptimization::kHolderFound:
390  __ Move(holder, api_holder);
391  break;
392  case CallOptimization::kHolderNotFound:
393  UNREACHABLE();
394  break;
395  }
396 
397  Isolate* isolate = masm->isolate();
398  Handle<JSFunction> function = optimization.constant_function();
399  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
400  Handle<Object> call_data_obj(api_call_info->data(), isolate);
401 
402  // Put callee in place.
403  __ Move(callee, function);
404 
405  bool call_data_undefined = false;
406  // Put call_data in place.
407  if (isolate->heap()->InNewSpace(*call_data_obj)) {
408  __ Move(scratch, api_call_info);
409  __ movp(call_data, FieldOperand(scratch, CallHandlerInfo::kDataOffset));
410  } else if (call_data_obj->IsUndefined()) {
411  call_data_undefined = true;
412  __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
413  } else {
414  __ Move(call_data, call_data_obj);
415  }
416 
417  // Put api_function_address in place.
418  Address function_address = v8::ToCData<Address>(api_call_info->callback());
419  __ Move(
420  api_function_address, function_address, RelocInfo::EXTERNAL_REFERENCE);
421 
422  // Jump to stub.
423  CallApiFunctionStub stub(is_store, call_data_undefined, argc);
424  __ TailCallStub(&stub);
425 }
426 
427 
428 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
429  Label* label,
430  Handle<Name> name) {
431  if (!label->is_unused()) {
432  __ bind(label);
433  __ Move(this->name(), name);
434  }
435 }
436 
437 
438 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
439  Handle<JSGlobalObject> global,
440  Handle<Name> name,
441  Register scratch,
442  Label* miss) {
443  Handle<PropertyCell> cell =
445  ASSERT(cell->value()->IsTheHole());
446  __ Move(scratch, cell);
447  __ Cmp(FieldOperand(scratch, Cell::kValueOffset),
448  masm->isolate()->factory()->the_hole_value());
449  __ j(not_equal, miss);
450 }
451 
452 
454  MacroAssembler* masm,
455  Handle<JSObject> holder,
456  Register holder_reg,
457  Handle<Name> name,
458  Label* miss) {
459  if (holder->IsJSGlobalObject()) {
460  GenerateCheckPropertyCell(
461  masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
462  } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
463  GenerateDictionaryNegativeLookup(
464  masm, miss, holder_reg, name, scratch1(), scratch2());
465  }
466 }
467 
468 
469 // Receiver_reg is preserved on jumps to miss_label, but may be destroyed if
470 // store is successful.
471 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
472  Handle<JSObject> object,
473  LookupResult* lookup,
474  Handle<Map> transition,
475  Handle<Name> name,
476  Register receiver_reg,
477  Register storage_reg,
478  Register value_reg,
479  Register scratch1,
480  Register scratch2,
481  Register unused,
482  Label* miss_label,
483  Label* slow) {
484  int descriptor = transition->LastAdded();
485  DescriptorArray* descriptors = transition->instance_descriptors();
486  PropertyDetails details = descriptors->GetDetails(descriptor);
487  Representation representation = details.representation();
488  ASSERT(!representation.IsNone());
489 
490  if (details.type() == CONSTANT) {
491  Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
492  __ Cmp(value_reg, constant);
493  __ j(not_equal, miss_label);
494  } else if (representation.IsSmi()) {
495  __ JumpIfNotSmi(value_reg, miss_label);
496  } else if (representation.IsHeapObject()) {
497  __ JumpIfSmi(value_reg, miss_label);
498  } else if (representation.IsDouble()) {
499  Label do_store, heap_number;
500  __ AllocateHeapNumber(storage_reg, scratch1, slow);
501 
502  __ JumpIfNotSmi(value_reg, &heap_number);
503  __ SmiToInteger32(scratch1, value_reg);
504  __ Cvtlsi2sd(xmm0, scratch1);
505  __ jmp(&do_store);
506 
507  __ bind(&heap_number);
508  __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
509  miss_label, DONT_DO_SMI_CHECK);
510  __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
511 
512  __ bind(&do_store);
513  __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
514  }
515 
516  // Stub never generated for non-global objects that require access
517  // checks.
518  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
519 
520  // Perform map transition for the receiver if necessary.
521  if (details.type() == FIELD &&
522  object->map()->unused_property_fields() == 0) {
523  // The properties must be extended before we can store the value.
524  // We jump to a runtime call that extends the properties array.
525  __ PopReturnAddressTo(scratch1);
526  __ Push(receiver_reg);
527  __ Push(transition);
528  __ Push(value_reg);
529  __ PushReturnAddressFrom(scratch1);
530  __ TailCallExternalReference(
531  ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
532  masm->isolate()),
533  3,
534  1);
535  return;
536  }
537 
538  // Update the map of the object.
539  __ Move(scratch1, transition);
540  __ movp(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
541 
542  // Update the write barrier for the map field.
543  __ RecordWriteField(receiver_reg,
545  scratch1,
546  scratch2,
550 
551  if (details.type() == CONSTANT) {
552  ASSERT(value_reg.is(rax));
553  __ ret(0);
554  return;
555  }
556 
557  int index = transition->instance_descriptors()->GetFieldIndex(
558  transition->LastAdded());
559 
560  // Adjust for the number of properties stored in the object. Even in the
561  // face of a transition we can use the old map here because the size of the
562  // object and the number of in-object properties is not going to change.
563  index -= object->map()->inobject_properties();
564 
565  // TODO(verwaest): Share this code as a code stub.
566  SmiCheck smi_check = representation.IsTagged()
568  if (index < 0) {
569  // Set the property straight into the object.
570  int offset = object->map()->instance_size() + (index * kPointerSize);
571  if (representation.IsDouble()) {
572  __ movp(FieldOperand(receiver_reg, offset), storage_reg);
573  } else {
574  __ movp(FieldOperand(receiver_reg, offset), value_reg);
575  }
576 
577  if (!representation.IsSmi()) {
578  // Update the write barrier for the array address.
579  if (!representation.IsDouble()) {
580  __ movp(storage_reg, value_reg);
581  }
582  __ RecordWriteField(
583  receiver_reg, offset, storage_reg, scratch1, kDontSaveFPRegs,
584  EMIT_REMEMBERED_SET, smi_check);
585  }
586  } else {
587  // Write to the properties array.
588  int offset = index * kPointerSize + FixedArray::kHeaderSize;
589  // Get the properties array (optimistically).
590  __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
591  if (representation.IsDouble()) {
592  __ movp(FieldOperand(scratch1, offset), storage_reg);
593  } else {
594  __ movp(FieldOperand(scratch1, offset), value_reg);
595  }
596 
597  if (!representation.IsSmi()) {
598  // Update the write barrier for the array address.
599  if (!representation.IsDouble()) {
600  __ movp(storage_reg, value_reg);
601  }
602  __ RecordWriteField(
603  scratch1, offset, storage_reg, receiver_reg, kDontSaveFPRegs,
604  EMIT_REMEMBERED_SET, smi_check);
605  }
606  }
607 
608  // Return the value (register rax).
609  ASSERT(value_reg.is(rax));
610  __ ret(0);
611 }
612 
613 
614 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
615 // but may be destroyed if store is successful.
616 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
617  Handle<JSObject> object,
618  LookupResult* lookup,
619  Register receiver_reg,
620  Register name_reg,
621  Register value_reg,
622  Register scratch1,
623  Register scratch2,
624  Label* miss_label) {
625  // Stub never generated for non-global objects that require access
626  // checks.
627  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
628 
629  int index = lookup->GetFieldIndex().field_index();
630 
631  // Adjust for the number of properties stored in the object. Even in the
632  // face of a transition we can use the old map here because the size of the
633  // object and the number of in-object properties is not going to change.
634  index -= object->map()->inobject_properties();
635 
636  Representation representation = lookup->representation();
637  ASSERT(!representation.IsNone());
638  if (representation.IsSmi()) {
639  __ JumpIfNotSmi(value_reg, miss_label);
640  } else if (representation.IsHeapObject()) {
641  __ JumpIfSmi(value_reg, miss_label);
642  } else if (representation.IsDouble()) {
643  // Load the double storage.
644  if (index < 0) {
645  int offset = object->map()->instance_size() + (index * kPointerSize);
646  __ movp(scratch1, FieldOperand(receiver_reg, offset));
647  } else {
648  __ movp(scratch1,
650  int offset = index * kPointerSize + FixedArray::kHeaderSize;
651  __ movp(scratch1, FieldOperand(scratch1, offset));
652  }
653 
654  // Store the value into the storage.
655  Label do_store, heap_number;
656  __ JumpIfNotSmi(value_reg, &heap_number);
657  __ SmiToInteger32(scratch2, value_reg);
658  __ Cvtlsi2sd(xmm0, scratch2);
659  __ jmp(&do_store);
660 
661  __ bind(&heap_number);
662  __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
663  miss_label, DONT_DO_SMI_CHECK);
664  __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
665  __ bind(&do_store);
666  __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0);
667  // Return the value (register rax).
668  ASSERT(value_reg.is(rax));
669  __ ret(0);
670  return;
671  }
672 
673  // TODO(verwaest): Share this code as a code stub.
674  SmiCheck smi_check = representation.IsTagged()
676  if (index < 0) {
677  // Set the property straight into the object.
678  int offset = object->map()->instance_size() + (index * kPointerSize);
679  __ movp(FieldOperand(receiver_reg, offset), value_reg);
680 
681  if (!representation.IsSmi()) {
682  // Update the write barrier for the array address.
683  // Pass the value being stored in the now unused name_reg.
684  __ movp(name_reg, value_reg);
685  __ RecordWriteField(
686  receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs,
687  EMIT_REMEMBERED_SET, smi_check);
688  }
689  } else {
690  // Write to the properties array.
691  int offset = index * kPointerSize + FixedArray::kHeaderSize;
692  // Get the properties array (optimistically).
693  __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
694  __ movp(FieldOperand(scratch1, offset), value_reg);
695 
696  if (!representation.IsSmi()) {
697  // Update the write barrier for the array address.
698  // Pass the value being stored in the now unused name_reg.
699  __ movp(name_reg, value_reg);
700  __ RecordWriteField(
701  scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs,
702  EMIT_REMEMBERED_SET, smi_check);
703  }
704  }
705 
706  // Return the value (register rax).
707  ASSERT(value_reg.is(rax));
708  __ ret(0);
709 }
710 
711 
712 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
713  __ jmp(code, RelocInfo::CODE_TARGET);
714 }
715 
716 
717 #undef __
718 #define __ ACCESS_MASM((masm()))
719 
720 
721 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
722  Register object_reg,
723  Handle<JSObject> holder,
724  Register holder_reg,
725  Register scratch1,
726  Register scratch2,
727  Handle<Name> name,
728  Label* miss,
730  Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
731 
732  // Make sure there's no overlap between holder and object registers.
733  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
734  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
735  && !scratch2.is(scratch1));
736 
737  // Keep track of the current object in register reg. On the first
738  // iteration, reg is an alias for object_reg, on later iterations,
739  // it is an alias for holder_reg.
740  Register reg = object_reg;
741  int depth = 0;
742 
743  Handle<JSObject> current = Handle<JSObject>::null();
744  if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant());
745  Handle<JSObject> prototype = Handle<JSObject>::null();
746  Handle<Map> current_map = receiver_map;
747  Handle<Map> holder_map(holder->map());
748  // Traverse the prototype chain and check the maps in the prototype chain for
749  // fast and global objects or do negative lookup for normal objects.
750  while (!current_map.is_identical_to(holder_map)) {
751  ++depth;
752 
753  // Only global objects and objects that do not require access
754  // checks are allowed in stubs.
755  ASSERT(current_map->IsJSGlobalProxyMap() ||
756  !current_map->is_access_check_needed());
757 
758  prototype = handle(JSObject::cast(current_map->prototype()));
759  if (current_map->is_dictionary_map() &&
760  !current_map->IsJSGlobalObjectMap() &&
761  !current_map->IsJSGlobalProxyMap()) {
762  if (!name->IsUniqueName()) {
763  ASSERT(name->IsString());
764  name = factory()->InternalizeString(Handle<String>::cast(name));
765  }
766  ASSERT(current.is_null() ||
767  current->property_dictionary()->FindEntry(*name) ==
769 
770  GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
771  scratch1, scratch2);
772 
773  __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
774  reg = holder_reg; // From now on the object will be in holder_reg.
775  __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
776  } else {
777  bool in_new_space = heap()->InNewSpace(*prototype);
778  if (in_new_space) {
779  // Save the map in scratch1 for later.
780  __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
781  }
782  if (depth != 1 || check == CHECK_ALL_MAPS) {
783  __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
784  }
785 
786  // Check access rights to the global object. This has to happen after
787  // the map check so that we know that the object is actually a global
788  // object.
789  if (current_map->IsJSGlobalProxyMap()) {
790  __ CheckAccessGlobalProxy(reg, scratch2, miss);
791  } else if (current_map->IsJSGlobalObjectMap()) {
792  GenerateCheckPropertyCell(
793  masm(), Handle<JSGlobalObject>::cast(current), name,
794  scratch2, miss);
795  }
796  reg = holder_reg; // From now on the object will be in holder_reg.
797 
798  if (in_new_space) {
799  // The prototype is in new space; we cannot store a reference to it
800  // in the code. Load it from the map.
801  __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
802  } else {
803  // The prototype is in old space; load it directly.
804  __ Move(reg, prototype);
805  }
806  }
807 
808  // Go to the next object in the prototype chain.
809  current = prototype;
810  current_map = handle(current->map());
811  }
812 
813  // Log the check depth.
814  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
815 
816  if (depth != 0 || check == CHECK_ALL_MAPS) {
817  // Check the holder map.
818  __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
819  }
820 
821  // Perform security check for access to the global object.
822  ASSERT(current_map->IsJSGlobalProxyMap() ||
823  !current_map->is_access_check_needed());
824  if (current_map->IsJSGlobalProxyMap()) {
825  __ CheckAccessGlobalProxy(reg, scratch1, miss);
826  }
827 
828  // Return the register containing the holder.
829  return reg;
830 }
831 
832 
833 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
834  if (!miss->is_unused()) {
835  Label success;
836  __ jmp(&success);
837  __ bind(miss);
838  TailCallBuiltin(masm(), MissBuiltin(kind()));
839  __ bind(&success);
840  }
841 }
842 
843 
844 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
845  if (!miss->is_unused()) {
846  Label success;
847  __ jmp(&success);
848  GenerateRestoreName(masm(), miss, name);
849  TailCallBuiltin(masm(), MissBuiltin(kind()));
850  __ bind(&success);
851  }
852 }
853 
854 
856  Handle<HeapType> type,
857  Register object_reg,
858  Handle<JSObject> holder,
859  Handle<Name> name,
860  Handle<Object> callback) {
861  Label miss;
862 
863  Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
864 
865  if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
866  ASSERT(!reg.is(scratch2()));
867  ASSERT(!reg.is(scratch3()));
868  ASSERT(!reg.is(scratch4()));
869 
870  // Load the properties dictionary.
871  Register dictionary = scratch4();
872  __ movp(dictionary, FieldOperand(reg, JSObject::kPropertiesOffset));
873 
874  // Probe the dictionary.
875  Label probe_done;
877  &miss,
878  &probe_done,
879  dictionary,
880  this->name(),
881  scratch2(),
882  scratch3());
883  __ bind(&probe_done);
884 
885  // If probing finds an entry in the dictionary, scratch3 contains the
886  // index into the dictionary. Check that the value is the callback.
887  Register index = scratch3();
888  const int kElementsStartOffset =
891  const int kValueOffset = kElementsStartOffset + kPointerSize;
892  __ movp(scratch2(),
893  Operand(dictionary, index, times_pointer_size,
894  kValueOffset - kHeapObjectTag));
895  __ Move(scratch3(), callback, RelocInfo::EMBEDDED_OBJECT);
896  __ cmpp(scratch2(), scratch3());
897  __ j(not_equal, &miss);
898  }
899 
900  HandlerFrontendFooter(name, &miss);
901  return reg;
902 }
903 
904 
905 void LoadStubCompiler::GenerateLoadField(Register reg,
906  Handle<JSObject> holder,
907  PropertyIndex field,
908  Representation representation) {
909  if (!reg.is(receiver())) __ movp(receiver(), reg);
910  if (kind() == Code::LOAD_IC) {
911  LoadFieldStub stub(field.is_inobject(holder),
912  field.translate(holder),
913  representation);
914  GenerateTailCall(masm(), stub.GetCode(isolate()));
915  } else {
916  KeyedLoadFieldStub stub(field.is_inobject(holder),
917  field.translate(holder),
918  representation);
919  GenerateTailCall(masm(), stub.GetCode(isolate()));
920  }
921 }
922 
923 
925  Register reg,
926  Handle<ExecutableAccessorInfo> callback) {
927  // Insert additional parameters into the stack frame above return address.
928  ASSERT(!scratch4().is(reg));
929  __ PopReturnAddressTo(scratch4());
930 
938  __ Push(receiver()); // receiver
939  if (heap()->InNewSpace(callback->data())) {
940  ASSERT(!scratch2().is(reg));
941  __ Move(scratch2(), callback);
942  __ Push(FieldOperand(scratch2(),
944  } else {
945  __ Push(Handle<Object>(callback->data(), isolate()));
946  }
947  ASSERT(!kScratchRegister.is(reg));
948  __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
949  __ Push(kScratchRegister); // return value
950  __ Push(kScratchRegister); // return value default
951  __ PushAddress(ExternalReference::isolate_address(isolate()));
952  __ Push(reg); // holder
953  __ Push(name()); // name
954  // Save a pointer to where we pushed the arguments pointer. This will be
955  // passed as the const PropertyAccessorInfo& to the C++ callback.
956 
957  __ PushReturnAddressFrom(scratch4());
958 
959  // Abi for CallApiGetter
960  Register api_function_address = r8;
961  Address getter_address = v8::ToCData<Address>(callback->getter());
962  __ Move(api_function_address, getter_address, RelocInfo::EXTERNAL_REFERENCE);
963 
964  CallApiGetterStub stub;
965  __ TailCallStub(&stub);
966 }
967 
968 
969 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
970  // Return the constant value.
971  __ Move(rax, value);
972  __ ret(0);
973 }
974 
975 
977  Register holder_reg,
978  Handle<Object> object,
979  Handle<JSObject> interceptor_holder,
980  LookupResult* lookup,
981  Handle<Name> name) {
982  ASSERT(interceptor_holder->HasNamedInterceptor());
983  ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
984 
985  // So far the most popular follow ups for interceptor loads are FIELD
986  // and CALLBACKS, so inline only them, other cases may be added
987  // later.
988  bool compile_followup_inline = false;
989  if (lookup->IsFound() && lookup->IsCacheable()) {
990  if (lookup->IsField()) {
991  compile_followup_inline = true;
992  } else if (lookup->type() == CALLBACKS &&
993  lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
994  ExecutableAccessorInfo* callback =
995  ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
996  compile_followup_inline = callback->getter() != NULL &&
997  callback->IsCompatibleReceiver(*object);
998  }
999  }
1000 
1001  if (compile_followup_inline) {
1002  // Compile the interceptor call, followed by inline code to load the
1003  // property from further up the prototype chain if the call fails.
1004  // Check that the maps haven't changed.
1005  ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1006 
1007  // Preserve the receiver register explicitly whenever it is different from
1008  // the holder and it is needed should the interceptor return without any
1009  // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1010  // the FIELD case might cause a miss during the prototype check.
1011  bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1012  bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
1013  (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1014 
1015  // Save necessary data before invoking an interceptor.
1016  // Requires a frame to make GC aware of pushed pointers.
1017  {
1018  FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1019 
1020  if (must_preserve_receiver_reg) {
1021  __ Push(receiver());
1022  }
1023  __ Push(holder_reg);
1024  __ Push(this->name());
1025 
1026  // Invoke an interceptor. Note: map checks from receiver to
1027  // interceptor's holder has been compiled before (see a caller
1028  // of this method.)
1029  CompileCallLoadPropertyWithInterceptor(
1030  masm(), receiver(), holder_reg, this->name(), interceptor_holder,
1031  IC::kLoadPropertyWithInterceptorOnly);
1032 
1033  // Check if interceptor provided a value for property. If it's
1034  // the case, return immediately.
1035  Label interceptor_failed;
1036  __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
1037  __ j(equal, &interceptor_failed);
1038  frame_scope.GenerateLeaveFrame();
1039  __ ret(0);
1040 
1041  __ bind(&interceptor_failed);
1042  __ Pop(this->name());
1043  __ Pop(holder_reg);
1044  if (must_preserve_receiver_reg) {
1045  __ Pop(receiver());
1046  }
1047 
1048  // Leave the internal frame.
1049  }
1050 
1051  GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1052  } else { // !compile_followup_inline
1053  // Call the runtime system to load the interceptor.
1054  // Check that the maps haven't changed.
1055  __ PopReturnAddressTo(scratch2());
1056  PushInterceptorArguments(masm(), receiver(), holder_reg,
1057  this->name(), interceptor_holder);
1058  __ PushReturnAddressFrom(scratch2());
1059 
1060  ExternalReference ref = ExternalReference(
1061  IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
1062  __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1063  }
1064 }
1065 
1066 
1067 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
1068  Label success;
1069  // Check that the object is a boolean.
1070  __ Cmp(object, factory()->true_value());
1071  __ j(equal, &success);
1072  __ Cmp(object, factory()->false_value());
1073  __ j(not_equal, miss);
1074  __ bind(&success);
1075 }
1076 
1077 
1079  Handle<JSObject> object,
1080  Handle<JSObject> holder,
1081  Handle<Name> name,
1082  Handle<ExecutableAccessorInfo> callback) {
1083  Register holder_reg = HandlerFrontend(
1084  IC::CurrentTypeOf(object, isolate()), receiver(), holder, name);
1085 
1086  __ PopReturnAddressTo(scratch1());
1087  __ Push(receiver());
1088  __ Push(holder_reg);
1089  __ Push(callback); // callback info
1090  __ Push(name);
1091  __ Push(value());
1092  __ PushReturnAddressFrom(scratch1());
1093 
1094  // Do tail-call to the runtime system.
1095  ExternalReference store_callback_property =
1096  ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1097  __ TailCallExternalReference(store_callback_property, 5, 1);
1098 
1099  // Return the generated code.
1100  return GetCode(kind(), Code::FAST, name);
1101 }
1102 
1103 
1104 #undef __
1105 #define __ ACCESS_MASM(masm)
1106 
1107 
1109  MacroAssembler* masm,
1110  Handle<HeapType> type,
1111  Register receiver,
1112  Handle<JSFunction> setter) {
1113  // ----------- S t a t e -------------
1114  // -- rsp[0] : return address
1115  // -----------------------------------
1116  {
1117  FrameScope scope(masm, StackFrame::INTERNAL);
1118 
1119  // Save value register, so we can restore it later.
1120  __ Push(value());
1121 
1122  if (!setter.is_null()) {
1123  // Call the JavaScript setter with receiver and value on the stack.
1124  if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1125  // Swap in the global receiver.
1126  __ movp(receiver,
1128  }
1129  __ Push(receiver);
1130  __ Push(value());
1131  ParameterCount actual(1);
1132  ParameterCount expected(setter);
1133  __ InvokeFunction(setter, expected, actual,
1134  CALL_FUNCTION, NullCallWrapper());
1135  } else {
1136  // If we generate a global code snippet for deoptimization only, remember
1137  // the place to continue after deoptimization.
1138  masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1139  }
1140 
1141  // We have to return the passed value, not the return value of the setter.
1142  __ Pop(rax);
1143 
1144  // Restore context register.
1146  }
1147  __ ret(0);
1148 }
1149 
1150 
1151 #undef __
1152 #define __ ACCESS_MASM(masm())
1153 
1154 
1156  Handle<JSObject> object,
1157  Handle<Name> name) {
1158  __ PopReturnAddressTo(scratch1());
1159  __ Push(receiver());
1160  __ Push(this->name());
1161  __ Push(value());
1162  __ PushReturnAddressFrom(scratch1());
1163 
1164  // Do tail-call to the runtime system.
1165  ExternalReference store_ic_property =
1166  ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1167  __ TailCallExternalReference(store_ic_property, 3, 1);
1168 
1169  // Return the generated code.
1170  return GetCode(kind(), Code::FAST, name);
1171 }
1172 
1173 
1175  // Prepare tail call to StoreIC_ArrayLength.
1176  __ PopReturnAddressTo(scratch1());
1177  __ Push(receiver());
1178  __ Push(value());
1179  __ PushReturnAddressFrom(scratch1());
1180 
1181  ExternalReference ref =
1182  ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength),
1183  masm()->isolate());
1184  __ TailCallExternalReference(ref, 2, 1);
1185 }
1186 
1187 
1189  MapHandleList* receiver_maps,
1190  CodeHandleList* handler_stubs,
1191  MapHandleList* transitioned_maps) {
1192  Label miss;
1193  __ JumpIfSmi(receiver(), &miss, Label::kNear);
1194 
1196  int receiver_count = receiver_maps->length();
1197  for (int i = 0; i < receiver_count; ++i) {
1198  // Check map and tail call if there's a match
1199  __ Cmp(scratch1(), receiver_maps->at(i));
1200  if (transitioned_maps->at(i).is_null()) {
1201  __ j(equal, handler_stubs->at(i), RelocInfo::CODE_TARGET);
1202  } else {
1203  Label next_map;
1204  __ j(not_equal, &next_map, Label::kNear);
1205  __ Move(transition_map(),
1206  transitioned_maps->at(i),
1207  RelocInfo::EMBEDDED_OBJECT);
1208  __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
1209  __ bind(&next_map);
1210  }
1211  }
1212 
1213  __ bind(&miss);
1214 
1215  TailCallBuiltin(masm(), MissBuiltin(kind()));
1216 
1217  // Return the generated code.
1218  return GetICCode(
1219  kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
1220 }
1221 
1222 
1223 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
1224  Handle<JSObject> last,
1225  Handle<Name> name) {
1226  NonexistentHandlerFrontend(type, last, name);
1227 
1228  // Return undefined if maps of the full prototype chain are still the
1229  // same and no global property with this name contains a value.
1230  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1231  __ ret(0);
1232 
1233  // Return the generated code.
1234  return GetCode(kind(), Code::FAST, name);
1235 }
1236 
1237 
1238 Register* LoadStubCompiler::registers() {
1239  // receiver, name, scratch1, scratch2, scratch3, scratch4.
1240  static Register registers[] = { rax, rcx, rdx, rbx, rdi, r8 };
1241  return registers;
1242 }
1243 
1244 
1245 Register* KeyedLoadStubCompiler::registers() {
1246  // receiver, name, scratch1, scratch2, scratch3, scratch4.
1247  static Register registers[] = { rdx, rax, rbx, rcx, rdi, r8 };
1248  return registers;
1249 }
1250 
1251 
1252 Register StoreStubCompiler::value() {
1253  return rax;
1254 }
1255 
1256 
1257 Register* StoreStubCompiler::registers() {
1258  // receiver, name, scratch1, scratch2, scratch3.
1259  static Register registers[] = { rdx, rcx, rbx, rdi, r8 };
1260  return registers;
1261 }
1262 
1263 
1264 Register* KeyedStoreStubCompiler::registers() {
1265  // receiver, name, scratch1, scratch2, scratch3.
1266  static Register registers[] = { rdx, rcx, rbx, rdi, r8 };
1267  return registers;
1268 }
1269 
1270 
1271 #undef __
1272 #define __ ACCESS_MASM(masm)
1273 
1274 
1275 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
1276  Handle<HeapType> type,
1277  Register receiver,
1278  Handle<JSFunction> getter) {
1279  // ----------- S t a t e -------------
1280  // -- rax : receiver
1281  // -- rcx : name
1282  // -- rsp[0] : return address
1283  // -----------------------------------
1284  {
1285  FrameScope scope(masm, StackFrame::INTERNAL);
1286 
1287  if (!getter.is_null()) {
1288  // Call the JavaScript getter with the receiver on the stack.
1289  if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1290  // Swap in the global receiver.
1291  __ movp(receiver,
1293  }
1294  __ Push(receiver);
1295  ParameterCount actual(0);
1296  ParameterCount expected(getter);
1297  __ InvokeFunction(getter, expected, actual,
1298  CALL_FUNCTION, NullCallWrapper());
1299  } else {
1300  // If we generate a global code snippet for deoptimization only, remember
1301  // the place to continue after deoptimization.
1302  masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1303  }
1304 
1305  // Restore context register.
1307  }
1308  __ ret(0);
1309 }
1310 
1311 
1312 #undef __
1313 #define __ ACCESS_MASM(masm())
1314 
1315 
1317  Handle<HeapType> type,
1318  Handle<GlobalObject> global,
1319  Handle<PropertyCell> cell,
1320  Handle<Name> name,
1321  bool is_dont_delete) {
1322  Label miss;
1323  // TODO(verwaest): Directly store to rax. Currently we cannot do this, since
1324  // rax is used as receiver(), which we would otherwise clobber before a
1325  // potential miss.
1326  HandlerFrontendHeader(type, receiver(), global, name, &miss);
1327 
1328  // Get the value from the cell.
1329  __ Move(rbx, cell);
1331 
1332  // Check for deleted property if property can actually be deleted.
1333  if (!is_dont_delete) {
1334  __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
1335  __ j(equal, &miss);
1336  } else if (FLAG_debug_code) {
1337  __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
1338  __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
1339  }
1340 
1341  Counters* counters = isolate()->counters();
1342  __ IncrementCounter(counters->named_load_global_stub(), 1);
1343  __ movp(rax, rbx);
1344  __ ret(0);
1345 
1346  HandlerFrontendFooter(name, &miss);
1347 
1348  // Return the generated code.
1349  return GetCode(kind(), Code::NORMAL, name);
1350 }
1351 
1352 
1354  TypeHandleList* types,
1355  CodeHandleList* handlers,
1356  Handle<Name> name,
1357  Code::StubType type,
1358  IcCheckType check) {
1359  Label miss;
1360 
1361  if (check == PROPERTY &&
1362  (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
1363  __ Cmp(this->name(), name);
1364  __ j(not_equal, &miss);
1365  }
1366 
1367  Label number_case;
1368  Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1369  __ JumpIfSmi(receiver(), smi_target);
1370 
1371  Register map_reg = scratch1();
1372  __ movp(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset));
1373  int receiver_count = types->length();
1374  int number_of_handled_maps = 0;
1375  for (int current = 0; current < receiver_count; ++current) {
1376  Handle<HeapType> type = types->at(current);
1377  Handle<Map> map = IC::TypeToMap(*type, isolate());
1378  if (!map->is_deprecated()) {
1379  number_of_handled_maps++;
1380  // Check map and tail call if there's a match
1381  __ Cmp(map_reg, map);
1382  if (type->Is(HeapType::Number())) {
1383  ASSERT(!number_case.is_unused());
1384  __ bind(&number_case);
1385  }
1386  __ j(equal, handlers->at(current), RelocInfo::CODE_TARGET);
1387  }
1388  }
1389  ASSERT(number_of_handled_maps > 0);
1390 
1391  __ bind(&miss);
1392  TailCallBuiltin(masm(), MissBuiltin(kind()));
1393 
1394  // Return the generated code.
1395  InlineCacheState state =
1396  number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
1397  return GetICCode(kind(), type, name, state);
1398 }
1399 
1400 
1401 #undef __
1402 #define __ ACCESS_MASM(masm)
1403 
1404 
1406  MacroAssembler* masm) {
1407  // ----------- S t a t e -------------
1408  // -- rax : key
1409  // -- rdx : receiver
1410  // -- rsp[0] : return address
1411  // -----------------------------------
1412  Label slow, miss;
1413 
1414  // This stub is meant to be tail-jumped to, the receiver must already
1415  // have been verified by the caller to not be a smi.
1416 
1417  __ JumpIfNotSmi(rax, &miss);
1418  __ SmiToInteger32(rbx, rax);
1420 
1421  // Check whether the elements is a number dictionary.
1422  // rdx: receiver
1423  // rax: key
1424  // rbx: key as untagged int32
1425  // rcx: elements
1426  __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax);
1427  __ ret(0);
1428 
1429  __ bind(&slow);
1430  // ----------- S t a t e -------------
1431  // -- rax : key
1432  // -- rdx : receiver
1433  // -- rsp[0] : return address
1434  // -----------------------------------
1435  TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1436 
1437  __ bind(&miss);
1438  // ----------- S t a t e -------------
1439  // -- rax : key
1440  // -- rdx : receiver
1441  // -- rsp[0] : return address
1442  // -----------------------------------
1443  TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1444 }
1445 
1446 
1447 #undef __
1448 
1449 } } // namespace v8::internal
1450 
1451 #endif // V8_TARGET_ARCH_X64
byte * Address
Definition: globals.h:186
virtual void HandlerFrontendFooter(Handle< Name > name, Label *miss)
const Register rdx
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
void GenerateRestoreName(MacroAssembler *masm, Label *label, Handle< Name > name)
static const int kHashFieldOffset
Definition: objects.h:8629
static const int kBitFieldOffset
Definition: objects.h:6461
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:7519
static const int kValueOffset
Definition: objects.h:9547
static int SlotOffset(int index)
Definition: contexts.h:498
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
static ExecutableAccessorInfo * cast(Object *obj)
void GenerateProbe(MacroAssembler *masm, Code::Flags flags, Register receiver, Register name, Register scratch, Register extra, Register extra2=no_reg, Register extra3=no_reg)
static const int kFlagsOffset
Definition: objects.h:5592
const Register rbp
#define LOG(isolate, Call)
Definition: log.h:86
Handle< Code > CompileStoreInterceptor(Handle< JSObject > object, Handle< Name > name)
static Handle< String > cast(Handle< S > that)
Definition: handles.h:75
static const int kGlobalReceiverOffset
Definition: objects.h:7613
const Register rsi
void GenerateLoadField(Register reg, Handle< JSObject > holder, PropertyIndex field, Representation representation)
static StubType ExtractTypeFromFlags(Flags flags)
Definition: objects-inl.h:4646
static const int kInterceptorArgsLength
Definition: stub-cache.h:207
static const int kInterceptorArgsNameIndex
Definition: stub-cache.h:203
static const int kHasNamedInterceptor
Definition: objects.h:6470
static const int kIsAccessCheckNeeded
Definition: objects.h:6474
uint32_t Flags
Definition: objects.h:5184
List< Handle< Map > > MapHandleList
Definition: list.h:218
#define ASSERT(condition)
Definition: checks.h:329
static const int kContextOffset
Definition: frames.h:185
const int kPointerSizeLog2
Definition: globals.h:281
Handle< Code > CompileStoreCallback(Handle< JSObject > object, Handle< JSObject > holder, Handle< Name > name, Handle< ExecutableAccessorInfo > callback)
virtual Register HandlerFrontendHeader(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name, Label *miss)
Definition: stub-cache.cc:790
virtual void HandlerFrontendFooter(Handle< Name > name, Label *miss)
#define UNREACHABLE()
Definition: checks.h:52
void GenerateLoadPostInterceptor(Register reg, Handle< JSObject > interceptor_holder, Handle< Name > name, LookupResult *lookup)
Definition: stub-cache.cc:983
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kValueOffset
Definition: objects.h:1971
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Definition: flags.cc:665
static void GenerateStoreViaSetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > setter)
static Handle< HeapType > CurrentTypeOf(Handle< Object > object, Isolate *isolate)
Definition: ic.cc:676
const Register r9
const int kPointerSize
Definition: globals.h:268
void check(i::Vector< const uint8_t > string)
Handle< Code > CompilePolymorphicIC(TypeHandleList *types, CodeHandleList *handlers, Handle< Name > name, Code::StubType type, IcCheckType check)
Operand FieldOperand(Register object, int offset)
const int kHeapObjectTag
Definition: v8.h:5473
void GenerateLoadConstant(Handle< Object > value)
const Register rbx
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define __
static const int kInterceptorArgsThisIndex
Definition: stub-cache.h:205
static Handle< PropertyCell > EnsurePropertyCell(Handle< JSGlobalObject > global, Handle< Name > name)
Definition: objects.cc:14752
List< Handle< HeapType > > TypeHandleList
Definition: list.h:219
static const int kPropertiesOffset
Definition: objects.h:2755
static const int kReturnValueDefaultValueIndex
Definition: arguments.h:179
const Register rax
void GenerateNegativeHolderLookup(MacroAssembler *masm, Handle< JSObject > holder, Register holder_reg, Handle< Name > name, Label *miss)
const Register rdi
static const int kElementsOffset
Definition: objects.h:2756
Handle< Code > CompileLoadNonexistent(Handle< HeapType > type, Handle< JSObject > last, Handle< Name > name)
Handle< Code > CompileLoadGlobal(Handle< HeapType > type, Handle< GlobalObject > holder, Handle< PropertyCell > cell, Handle< Name > name, bool is_dont_delete)
static void GenerateLoadDictionaryElement(MacroAssembler *masm)
void GenerateLoadInterceptor(Register holder_reg, Handle< Object > object, Handle< JSObject > holder, LookupResult *lookup, Handle< Name > name)
static const int kLengthOffset
Definition: objects.h:10076
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
static const int kInterceptorArgsInfoIndex
Definition: stub-cache.h:204
static const int kHeaderSize
Definition: objects.h:3016
void GenerateLoadCallback(Register reg, Handle< ExecutableAccessorInfo > callback)
static Builtins::Name MissBuiltin(Code::Kind kind)
Definition: stub-cache.h:466
static const int kMapOffset
Definition: objects.h:1890
bool is(Register reg) const
List< Handle< Code > > CodeHandleList
Definition: list.h:220
Register CallbackHandlerFrontend(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name, Handle< Object > callback)
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:103
static const int kDataOffset
Definition: objects.h:10433
const Register kScratchRegister
void GenerateStoreField(MacroAssembler *masm, Handle< JSObject > object, LookupResult *lookup, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Label *miss_label)
friend class Isolate
Definition: stub-cache.h:280
const int kInt64Size
Definition: globals.h:265
static const int kHeaderSize
Definition: objects.h:5604
const Register r8
static Handle< T > null()
Definition: handles.h:80
const Register rcx
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
void USE(T)
Definition: globals.h:341
void NonexistentHandlerFrontend(Handle< HeapType > type, Handle< JSObject > last, Handle< Name > name)
Definition: stub-cache.cc:864
const int kHeapObjectTagSize
Definition: v8.h:5474
static const int kPrototypeOffset
Definition: objects.h:6427
static const int kFlagsNotUsedInLookup
Definition: objects.h:5684
const Register no_reg
Handle< Code > GetCode(Code::Kind kind, Code::StubType type, Handle< Name > name)
Definition: stub-cache.cc:1281
void GenerateStoreTransition(MacroAssembler *masm, Handle< JSObject > object, LookupResult *lookup, Handle< Map > transition, Handle< Name > name, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Register scratch3, Label *miss_label, Label *slow)
static const int kNativeContextOffset
Definition: objects.h:7611
Register HandlerFrontend(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name)
Definition: stub-cache.cc:850
Handle< Code > CompileStorePolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_stubs, MapHandleList *transitioned_maps)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static void GenerateLoadViaGetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > getter)
static const int kInterceptorArgsHolderIndex
Definition: stub-cache.h:206
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
Handle< Code > GetICCode(Code::Kind kind, Code::StubType type, Handle< Name > name, InlineCacheState state=MONOMORPHIC)
Definition: stub-cache.cc:1269
static JSObject * cast(Object *obj)
static Handle< Map > TypeToMap(HeapType *type, Isolate *isolate)
Definition: ic.cc:683
const XMMRegister xmm0
bool IncludesNumberType(TypeHandleList *types)
Definition: stub-cache.cc:842
static JSFunction * cast(Object *obj)