v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
ic-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_X64)
31 
32 #include "codegen.h"
33 #include "ic-inl.h"
34 #include "runtime.h"
35 #include "stub-cache.h"
36 
37 namespace v8 {
38 namespace internal {
39 
40 // ----------------------------------------------------------------------------
41 // Static IC stub generators.
42 //
43 
44 #define __ ACCESS_MASM(masm)
45 
46 
47 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
48  Register type,
49  Label* global_object) {
50  // Register usage:
51  // type: holds the receiver instance type on entry.
52  __ cmpb(type, Immediate(JS_GLOBAL_OBJECT_TYPE));
53  __ j(equal, global_object);
54  __ cmpb(type, Immediate(JS_BUILTINS_OBJECT_TYPE));
55  __ j(equal, global_object);
56  __ cmpb(type, Immediate(JS_GLOBAL_PROXY_TYPE));
57  __ j(equal, global_object);
58 }
59 
60 
61 // Generated code falls through if the receiver is a regular non-global
62 // JS object with slow properties and no interceptors.
63 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
64  Register receiver,
65  Register r0,
66  Register r1,
67  Label* miss) {
68  // Register usage:
69  // receiver: holds the receiver on entry and is unchanged.
70  // r0: used to hold receiver instance type.
71  // Holds the property dictionary on fall through.
72  // r1: used to hold receivers map.
73 
74  __ JumpIfSmi(receiver, miss);
75 
76  // Check that the receiver is a valid JS object.
77  __ movq(r1, FieldOperand(receiver, HeapObject::kMapOffset));
79  __ cmpb(r0, Immediate(FIRST_SPEC_OBJECT_TYPE));
80  __ j(below, miss);
81 
82  // If this assert fails, we have to check upper bound too.
84 
85  GenerateGlobalInstanceTypeCheck(masm, r0, miss);
86 
87  // Check for non-global object that requires access check.
89  Immediate((1 << Map::kIsAccessCheckNeeded) |
91  __ j(not_zero, miss);
92 
93  __ movq(r0, FieldOperand(receiver, JSObject::kPropertiesOffset));
94  __ CompareRoot(FieldOperand(r0, HeapObject::kMapOffset),
95  Heap::kHashTableMapRootIndex);
96  __ j(not_equal, miss);
97 }
98 
99 
100 
101 // Helper function used to load a property from a dictionary backing storage.
102 // This function may return false negatives, so miss_label
103 // must always call a backup property load that is complete.
104 // This function is safe to call if name is not a symbol, and will jump to
105 // the miss_label in that case.
106 // The generated code assumes that the receiver has slow properties,
107 // is not a global object and does not have interceptors.
108 static void GenerateDictionaryLoad(MacroAssembler* masm,
109  Label* miss_label,
110  Register elements,
111  Register name,
112  Register r0,
113  Register r1,
114  Register result) {
115  // Register use:
116  //
117  // elements - holds the property dictionary on entry and is unchanged.
118  //
119  // name - holds the name of the property on entry and is unchanged.
120  //
121  // r0 - used to hold the capacity of the property dictionary.
122  //
123  // r1 - used to hold the index into the property dictionary.
124  //
125  // result - holds the result on exit if the load succeeded.
126 
127  Label done;
128 
129  // Probe the dictionary.
131  miss_label,
132  &done,
133  elements,
134  name,
135  r0,
136  r1);
137 
138  // If probing finds an entry in the dictionary, r1 contains the
139  // index into the dictionary. Check that the value is a normal
140  // property.
141  __ bind(&done);
142  const int kElementsStartOffset =
145  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
146  __ Test(Operand(elements, r1, times_pointer_size,
147  kDetailsOffset - kHeapObjectTag),
148  Smi::FromInt(PropertyDetails::TypeField::kMask));
149  __ j(not_zero, miss_label);
150 
151  // Get the value at the masked, scaled index.
152  const int kValueOffset = kElementsStartOffset + kPointerSize;
153  __ movq(result,
154  Operand(elements, r1, times_pointer_size,
155  kValueOffset - kHeapObjectTag));
156 }
157 
158 
159 // Helper function used to store a property to a dictionary backing
160 // storage. This function may fail to store a property even though it
161 // is in the dictionary, so code at miss_label must always call a
162 // backup property store that is complete. This function is safe to
163 // call if name is not a symbol, and will jump to the miss_label in
164 // that case. The generated code assumes that the receiver has slow
165 // properties, is not a global object and does not have interceptors.
166 static void GenerateDictionaryStore(MacroAssembler* masm,
167  Label* miss_label,
168  Register elements,
169  Register name,
170  Register value,
171  Register scratch0,
172  Register scratch1) {
173  // Register use:
174  //
175  // elements - holds the property dictionary on entry and is clobbered.
176  //
177  // name - holds the name of the property on entry and is unchanged.
178  //
179  // value - holds the value to store and is unchanged.
180  //
181  // scratch0 - used during the positive dictionary lookup and is clobbered.
182  //
183  // scratch1 - used for index into the property dictionary and is clobbered.
184  Label done;
185 
186  // Probe the dictionary.
188  miss_label,
189  &done,
190  elements,
191  name,
192  scratch0,
193  scratch1);
194 
195  // If probing finds an entry in the dictionary, scratch0 contains the
196  // index into the dictionary. Check that the value is a normal
197  // property that is not read only.
198  __ bind(&done);
199  const int kElementsStartOffset =
202  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
203  const int kTypeAndReadOnlyMask =
204  (PropertyDetails::TypeField::kMask |
205  PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
206  __ Test(Operand(elements,
207  scratch1,
209  kDetailsOffset - kHeapObjectTag),
210  Smi::FromInt(kTypeAndReadOnlyMask));
211  __ j(not_zero, miss_label);
212 
213  // Store the value at the masked, scaled index.
214  const int kValueOffset = kElementsStartOffset + kPointerSize;
215  __ lea(scratch1, Operand(elements,
216  scratch1,
218  kValueOffset - kHeapObjectTag));
219  __ movq(Operand(scratch1, 0), value);
220 
221  // Update write barrier. Make sure not to clobber the value.
222  __ movq(scratch0, value);
223  __ RecordWrite(elements, scratch1, scratch0, kDontSaveFPRegs);
224 }
225 
226 
227 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
228  // ----------- S t a t e -------------
229  // -- rax : receiver
230  // -- rcx : name
231  // -- rsp[0] : return address
232  // -----------------------------------
233  Label miss;
234 
235  StubCompiler::GenerateLoadArrayLength(masm, rax, rdx, &miss);
236  __ bind(&miss);
237  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
238 }
239 
240 
241 void LoadIC::GenerateStringLength(MacroAssembler* masm, bool support_wrappers) {
242  // ----------- S t a t e -------------
243  // -- rax : receiver
244  // -- rcx : name
245  // -- rsp[0] : return address
246  // -----------------------------------
247  Label miss;
248 
249  StubCompiler::GenerateLoadStringLength(masm, rax, rdx, rbx, &miss,
250  support_wrappers);
251  __ bind(&miss);
252  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
253 }
254 
255 
256 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
257  // ----------- S t a t e -------------
258  // -- rax : receiver
259  // -- rcx : name
260  // -- rsp[0] : return address
261  // -----------------------------------
262  Label miss;
263 
264  StubCompiler::GenerateLoadFunctionPrototype(masm, rax, rdx, rbx, &miss);
265  __ bind(&miss);
266  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
267 }
268 
269 
270 // Checks the receiver for special cases (value type, slow case bits).
271 // Falls through for regular JS object.
272 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
273  Register receiver,
274  Register map,
275  int interceptor_bit,
276  Label* slow) {
277  // Register use:
278  // receiver - holds the receiver and is unchanged.
279  // Scratch registers:
280  // map - used to hold the map of the receiver.
281 
282  // Check that the object isn't a smi.
283  __ JumpIfSmi(receiver, slow);
284 
285  // Check that the object is some kind of JS object EXCEPT JS Value type.
286  // In the case that the object is a value-wrapper object,
287  // we enter the runtime system to make sure that indexing
288  // into string objects work as intended.
290  __ CmpObjectType(receiver, JS_OBJECT_TYPE, map);
291  __ j(below, slow);
292 
293  // Check bit field.
295  Immediate((1 << Map::kIsAccessCheckNeeded) |
296  (1 << interceptor_bit)));
297  __ j(not_zero, slow);
298 }
299 
300 
301 // Loads an indexed element from a fast case array.
302 // If not_fast_array is NULL, doesn't perform the elements map check.
303 static void GenerateFastArrayLoad(MacroAssembler* masm,
304  Register receiver,
305  Register key,
306  Register elements,
307  Register scratch,
308  Register result,
309  Label* not_fast_array,
310  Label* out_of_range) {
311  // Register use:
312  //
313  // receiver - holds the receiver on entry.
314  // Unchanged unless 'result' is the same register.
315  //
316  // key - holds the smi key on entry.
317  // Unchanged unless 'result' is the same register.
318  //
319  // elements - holds the elements of the receiver on exit.
320  //
321  // result - holds the result on exit if the load succeeded.
322  // Allowed to be the the same as 'receiver' or 'key'.
323  // Unchanged on bailout so 'receiver' and 'key' can be safely
324  // used by further computation.
325  //
326  // Scratch registers:
327  //
328  // scratch - used to hold elements of the receiver and the loaded value.
329 
330  __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset));
331  if (not_fast_array != NULL) {
332  // Check that the object is in fast mode and writable.
333  __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
334  Heap::kFixedArrayMapRootIndex);
335  __ j(not_equal, not_fast_array);
336  } else {
337  __ AssertFastElements(elements);
338  }
339  // Check that the key (index) is within bounds.
340  __ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset));
341  // Unsigned comparison rejects negative indices.
342  __ j(above_equal, out_of_range);
343  // Fast case: Do the load.
344  SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2);
345  __ movq(scratch, FieldOperand(elements,
346  index.reg,
347  index.scale,
349  __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex);
350  // In case the loaded value is the_hole we have to consult GetProperty
351  // to ensure the prototype chain is searched.
352  __ j(equal, out_of_range);
353  if (!result.is(scratch)) {
354  __ movq(result, scratch);
355  }
356 }
357 
358 
359 // Checks whether a key is an array index string or a symbol string.
360 // Falls through if the key is a symbol.
361 static void GenerateKeyStringCheck(MacroAssembler* masm,
362  Register key,
363  Register map,
364  Register hash,
365  Label* index_string,
366  Label* not_symbol) {
367  // Register use:
368  // key - holds the key and is unchanged. Assumed to be non-smi.
369  // Scratch registers:
370  // map - used to hold the map of the key.
371  // hash - used to hold the hash of the key.
372  __ CmpObjectType(key, FIRST_NONSTRING_TYPE, map);
373  __ j(above_equal, not_symbol);
374  // Is the string an array index, with cached numeric value?
375  __ movl(hash, FieldOperand(key, String::kHashFieldOffset));
376  __ testl(hash, Immediate(String::kContainsCachedArrayIndexMask));
377  __ j(zero, index_string); // The value in hash is used at jump target.
378 
379  // Is the string a symbol?
382  Immediate(kIsSymbolMask));
383  __ j(zero, not_symbol);
384 }
385 
386 
387 
388 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
389  // ----------- S t a t e -------------
390  // -- rax : key
391  // -- rdx : receiver
392  // -- rsp[0] : return address
393  // -----------------------------------
394  Label slow, check_string, index_smi, index_string, property_array_property;
395  Label probe_dictionary, check_number_dictionary;
396 
397  // Check that the key is a smi.
398  __ JumpIfNotSmi(rax, &check_string);
399  __ bind(&index_smi);
400  // Now the key is known to be a smi. This place is also jumped to from below
401  // where a numeric string is converted to a smi.
402 
403  GenerateKeyedLoadReceiverCheck(
404  masm, rdx, rcx, Map::kHasIndexedInterceptor, &slow);
405 
406  // Check the receiver's map to see if it has fast elements.
407  __ CheckFastElements(rcx, &check_number_dictionary);
408 
409  GenerateFastArrayLoad(masm,
410  rdx,
411  rax,
412  rcx,
413  rbx,
414  rax,
415  NULL,
416  &slow);
417  Counters* counters = masm->isolate()->counters();
418  __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
419  __ ret(0);
420 
421  __ bind(&check_number_dictionary);
422  __ SmiToInteger32(rbx, rax);
424 
425  // Check whether the elements is a number dictionary.
426  // rdx: receiver
427  // rax: key
428  // rbx: key as untagged int32
429  // rcx: elements
431  Heap::kHashTableMapRootIndex);
432  __ j(not_equal, &slow);
433  __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax);
434  __ ret(0);
435 
436  __ bind(&slow);
437  // Slow case: Jump to runtime.
438  // rdx: receiver
439  // rax: key
440  __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
442 
443  __ bind(&check_string);
444  GenerateKeyStringCheck(masm, rax, rcx, rbx, &index_string, &slow);
445 
446  GenerateKeyedLoadReceiverCheck(
447  masm, rdx, rcx, Map::kHasNamedInterceptor, &slow);
448 
449  // If the receiver is a fast-case object, check the keyed lookup
450  // cache. Otherwise probe the dictionary leaving result in rcx.
453  Heap::kHashTableMapRootIndex);
454  __ j(equal, &probe_dictionary);
455 
456  // Load the map of the receiver, compute the keyed lookup cache hash
457  // based on 32 bits of the map pointer and the string hash.
459  __ movl(rcx, rbx);
460  __ shr(rcx, Immediate(KeyedLookupCache::kMapHashShift));
462  __ shr(rdi, Immediate(String::kHashShift));
463  __ xor_(rcx, rdi);
465  __ and_(rcx, Immediate(mask));
466 
467  // Load the key (consisting of map and symbol) from the cache and
468  // check for match.
469  Label load_in_object_property;
470  static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
471  Label hit_on_nth_entry[kEntriesPerBucket];
472  ExternalReference cache_keys
473  = ExternalReference::keyed_lookup_cache_keys(masm->isolate());
474 
475  for (int i = 0; i < kEntriesPerBucket - 1; i++) {
476  Label try_next_entry;
477  __ movq(rdi, rcx);
478  __ shl(rdi, Immediate(kPointerSizeLog2 + 1));
479  __ LoadAddress(kScratchRegister, cache_keys);
480  int off = kPointerSize * i * 2;
481  __ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, off));
482  __ j(not_equal, &try_next_entry);
483  __ cmpq(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
484  __ j(equal, &hit_on_nth_entry[i]);
485  __ bind(&try_next_entry);
486  }
487 
488  int off = kPointerSize * (kEntriesPerBucket - 1) * 2;
489  __ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, off));
490  __ j(not_equal, &slow);
491  __ cmpq(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
492  __ j(not_equal, &slow);
493 
494  // Get field offset, which is a 32-bit integer.
495  ExternalReference cache_field_offsets
496  = ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
497 
498  // Hit on nth entry.
499  for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
500  __ bind(&hit_on_nth_entry[i]);
501  if (i != 0) {
502  __ addl(rcx, Immediate(i));
503  }
504  __ LoadAddress(kScratchRegister, cache_field_offsets);
505  __ movl(rdi, Operand(kScratchRegister, rcx, times_4, 0));
507  __ subq(rdi, rcx);
508  __ j(above_equal, &property_array_property);
509  if (i != 0) {
510  __ jmp(&load_in_object_property);
511  }
512  }
513 
514  // Load in-object property.
515  __ bind(&load_in_object_property);
517  __ addq(rcx, rdi);
519  __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
520  __ ret(0);
521 
522  // Load property array property.
523  __ bind(&property_array_property);
527  __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
528  __ ret(0);
529 
530  // Do a quick inline probe of the receiver's dictionary, if it
531  // exists.
532  __ bind(&probe_dictionary);
533  // rdx: receiver
534  // rax: key
535  // rbx: elements
536 
539  GenerateGlobalInstanceTypeCheck(masm, rcx, &slow);
540 
541  GenerateDictionaryLoad(masm, &slow, rbx, rax, rcx, rdi, rax);
542  __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
543  __ ret(0);
544 
545  __ bind(&index_string);
546  __ IndexFromHash(rbx, rax);
547  __ jmp(&index_smi);
548 }
549 
550 
551 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
552  // ----------- S t a t e -------------
553  // -- rax : key
554  // -- rdx : receiver
555  // -- rsp[0] : return address
556  // -----------------------------------
557  Label miss;
558 
559  Register receiver = rdx;
560  Register index = rax;
561  Register scratch = rcx;
562  Register result = rax;
563 
564  StringCharAtGenerator char_at_generator(receiver,
565  index,
566  scratch,
567  result,
568  &miss, // When not a string.
569  &miss, // When not a number.
570  &miss, // When index out of range.
572  char_at_generator.GenerateFast(masm);
573  __ ret(0);
574 
575  StubRuntimeCallHelper call_helper;
576  char_at_generator.GenerateSlow(masm, call_helper);
577 
578  __ bind(&miss);
579  GenerateMiss(masm, false);
580 }
581 
582 
583 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
584  // ----------- S t a t e -------------
585  // -- rax : key
586  // -- rdx : receiver
587  // -- rsp[0] : return address
588  // -----------------------------------
589  Label slow;
590 
591  // Check that the receiver isn't a smi.
592  __ JumpIfSmi(rdx, &slow);
593 
594  // Check that the key is an array index, that is Uint32.
596  __ JumpUnlessNonNegativeSmi(rax, &slow);
597 
598  // Get the map of the receiver.
600 
601  // Check that it has indexed interceptor and access checks
602  // are not enabled for this object.
604  __ andb(rcx, Immediate(kSlowCaseBitFieldMask));
605  __ cmpb(rcx, Immediate(1 << Map::kHasIndexedInterceptor));
606  __ j(not_zero, &slow);
607 
608  // Everything is fine, call runtime.
609  __ pop(rcx);
610  __ push(rdx); // receiver
611  __ push(rax); // key
612  __ push(rcx); // return address
613 
614  // Perform tail call to the entry.
615  __ TailCallExternalReference(
616  ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
617  masm->isolate()),
618  2,
619  1);
620 
621  __ bind(&slow);
622  GenerateMiss(masm, false);
623 }
624 
625 
626 static void KeyedStoreGenerateGenericHelper(
627  MacroAssembler* masm,
628  Label* fast_object,
629  Label* fast_double,
630  Label* slow,
631  KeyedStoreCheckMap check_map,
632  KeyedStoreIncrementLength increment_length) {
633  Label transition_smi_elements;
634  Label finish_object_store, non_double_value, transition_double_elements;
635  Label fast_double_without_map_check;
636  // Fast case: Do the store, could be either Object or double.
637  __ bind(fast_object);
638  // rax: value
639  // rbx: receiver's elements array (a FixedArray)
640  // rcx: index
641  // rdx: receiver (a JSArray)
642  // r9: map of receiver
643  if (check_map == kCheckMap) {
645  __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
646  __ j(not_equal, fast_double);
647  }
648  // Smi stores don't require further checks.
649  Label non_smi_value;
650  __ JumpIfNotSmi(rax, &non_smi_value);
651  if (increment_length == kIncrementLength) {
652  // Add 1 to receiver->length.
653  __ leal(rdi, Operand(rcx, 1));
654  __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
655  }
656  // It's irrelevant whether array is smi-only or not when writing a smi.
658  rax);
659  __ ret(0);
660 
661  __ bind(&non_smi_value);
662  // Writing a non-smi, check whether array allows non-smi elements.
663  // r9: receiver's map
664  __ CheckFastObjectElements(r9, &transition_smi_elements);
665 
666  __ bind(&finish_object_store);
667  if (increment_length == kIncrementLength) {
668  // Add 1 to receiver->length.
669  __ leal(rdi, Operand(rcx, 1));
670  __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
671  }
673  rax);
674  __ movq(rdx, rax); // Preserve the value which is returned.
675  __ RecordWriteArray(
677  __ ret(0);
678 
679  __ bind(fast_double);
680  if (check_map == kCheckMap) {
681  // Check for fast double array case. If this fails, call through to the
682  // runtime.
683  // rdi: elements array's map
684  __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
685  __ j(not_equal, slow);
686  }
687  __ bind(&fast_double_without_map_check);
688  __ StoreNumberToDoubleElements(rax, rbx, rcx, xmm0,
689  &transition_double_elements);
690  if (increment_length == kIncrementLength) {
691  // Add 1 to receiver->length.
692  __ leal(rdi, Operand(rcx, 1));
693  __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
694  }
695  __ ret(0);
696 
697  __ bind(&transition_smi_elements);
699 
700  // Transition the array appropriately depending on the value type.
702  __ CompareRoot(r9, Heap::kHeapNumberMapRootIndex);
703  __ j(not_equal, &non_double_value);
704 
705  // Value is a double. Transition FAST_SMI_ELEMENTS ->
706  // FAST_DOUBLE_ELEMENTS and complete the store.
707  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
709  rbx,
710  rdi,
711  slow);
714  __ jmp(&fast_double_without_map_check);
715 
716  __ bind(&non_double_value);
717  // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
718  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
720  rbx,
721  rdi,
722  slow);
725  __ jmp(&finish_object_store);
726 
727  __ bind(&transition_double_elements);
728  // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
729  // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
730  // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
732  __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
734  rbx,
735  rdi,
736  slow);
739  __ jmp(&finish_object_store);
740 }
741 
742 
743 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
744  StrictModeFlag strict_mode) {
745  // ----------- S t a t e -------------
746  // -- rax : value
747  // -- rcx : key
748  // -- rdx : receiver
749  // -- rsp[0] : return address
750  // -----------------------------------
751  Label slow, slow_with_tagged_index, fast_object, fast_object_grow;
752  Label fast_double, fast_double_grow;
753  Label array, extra, check_if_double_array;
754 
755  // Check that the object isn't a smi.
756  __ JumpIfSmi(rdx, &slow_with_tagged_index);
757  // Get the map from the receiver.
759  // Check that the receiver does not require access checks. We need
760  // to do this because this generic stub does not perform map checks.
762  Immediate(1 << Map::kIsAccessCheckNeeded));
763  __ j(not_zero, &slow_with_tagged_index);
764  // Check that the key is a smi.
765  __ JumpIfNotSmi(rcx, &slow_with_tagged_index);
766  __ SmiToInteger32(rcx, rcx);
767 
768  __ CmpInstanceType(r9, JS_ARRAY_TYPE);
769  __ j(equal, &array);
770  // Check that the object is some kind of JSObject.
771  __ CmpInstanceType(r9, FIRST_JS_OBJECT_TYPE);
772  __ j(below, &slow);
773 
774  // Object case: Check key against length in the elements array.
775  // rax: value
776  // rdx: JSObject
777  // rcx: index
779  // Check array bounds.
780  __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx);
781  // rax: value
782  // rbx: FixedArray
783  // rcx: index
784  __ j(above, &fast_object);
785 
786  // Slow case: call runtime.
787  __ bind(&slow);
788  __ Integer32ToSmi(rcx, rcx);
789  __ bind(&slow_with_tagged_index);
790  GenerateRuntimeSetProperty(masm, strict_mode);
791  // Never returns to here.
792 
793  // Extra capacity case: Check if there is extra capacity to
794  // perform the store and update the length. Used for adding one
795  // element to the array by writing to array[array.length].
796  __ bind(&extra);
797  // rax: value
798  // rdx: receiver (a JSArray)
799  // rbx: receiver's elements array (a FixedArray)
800  // rcx: index
801  // flags: smicompare (rdx.length(), rbx)
802  __ j(not_equal, &slow); // do not leave holes in the array
803  __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx);
804  __ j(below_equal, &slow);
805  // Increment index to get new length.
807  __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
808  __ j(not_equal, &check_if_double_array);
809  __ jmp(&fast_object_grow);
810 
811  __ bind(&check_if_double_array);
812  // rdi: elements array's map
813  __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
814  __ j(not_equal, &slow);
815  __ jmp(&fast_double_grow);
816 
817  // Array case: Get the length and the elements array from the JS
818  // array. Check that the array is in fast mode (and writable); if it
819  // is the length is always a smi.
820  __ bind(&array);
821  // rax: value
822  // rdx: receiver (a JSArray)
823  // rcx: index
825 
826  // Check the key against the length in the array, compute the
827  // address to store into and fall through to fast case.
828  __ SmiCompareInteger32(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
829  __ j(below_equal, &extra);
830 
831  KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double,
833  KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
835 }
836 
837 
838 // The generated code does not accept smi keys.
839 // The generated code falls through if both probes miss.
840 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
841  int argc,
842  Code::Kind kind,
843  Code::ExtraICState extra_state) {
844  // ----------- S t a t e -------------
845  // rcx : function name
846  // rdx : receiver
847  // -----------------------------------
848  Label number, non_number, non_string, boolean, probe, miss;
849 
850  // Probe the stub cache.
852  MONOMORPHIC,
853  extra_state,
854  Code::NORMAL,
855  argc);
856  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
857  rax);
858 
859  // If the stub cache probing failed, the receiver might be a value.
860  // For value objects, we use the map of the prototype objects for
861  // the corresponding JSValue for the cache and that is what we need
862  // to probe.
863  //
864  // Check for number.
865  __ JumpIfSmi(rdx, &number);
866  __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rbx);
867  __ j(not_equal, &non_number);
868  __ bind(&number);
869  StubCompiler::GenerateLoadGlobalFunctionPrototype(
871  __ jmp(&probe);
872 
873  // Check for string.
874  __ bind(&non_number);
875  __ CmpInstanceType(rbx, FIRST_NONSTRING_TYPE);
876  __ j(above_equal, &non_string);
877  StubCompiler::GenerateLoadGlobalFunctionPrototype(
879  __ jmp(&probe);
880 
881  // Check for boolean.
882  __ bind(&non_string);
883  __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
884  __ j(equal, &boolean);
885  __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
886  __ j(not_equal, &miss);
887  __ bind(&boolean);
888  StubCompiler::GenerateLoadGlobalFunctionPrototype(
890 
891  // Probe the stub cache for the value object.
892  __ bind(&probe);
893  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
894  no_reg);
895 
896  __ bind(&miss);
897 }
898 
899 
900 static void GenerateFunctionTailCall(MacroAssembler* masm,
901  int argc,
902  Label* miss) {
903  // ----------- S t a t e -------------
904  // rcx : function name
905  // rdi : function
906  // rsp[0] : return address
907  // rsp[8] : argument argc
908  // rsp[16] : argument argc - 1
909  // ...
910  // rsp[argc * 8] : argument 1
911  // rsp[(argc + 1) * 8] : argument 0 = receiver
912  // -----------------------------------
913  __ JumpIfSmi(rdi, miss);
914  // Check that the value is a JavaScript function.
915  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rdx);
916  __ j(not_equal, miss);
917 
918  // Invoke the function.
919  ParameterCount actual(argc);
920  __ InvokeFunction(rdi, actual, JUMP_FUNCTION,
921  NullCallWrapper(), CALL_AS_METHOD);
922 }
923 
924 
925 // The generated code falls through if the call should be handled by runtime.
926 void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
927  // ----------- S t a t e -------------
928  // rcx : function name
929  // rsp[0] : return address
930  // rsp[8] : argument argc
931  // rsp[16] : argument argc - 1
932  // ...
933  // rsp[argc * 8] : argument 1
934  // rsp[(argc + 1) * 8] : argument 0 = receiver
935  // -----------------------------------
936  Label miss;
937 
938  // Get the receiver of the function from the stack.
939  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
940 
941  GenerateStringDictionaryReceiverCheck(masm, rdx, rax, rbx, &miss);
942 
943  // rax: elements
944  // Search the dictionary placing the result in rdi.
945  GenerateDictionaryLoad(masm, &miss, rax, rcx, rbx, rdi, rdi);
946 
947  GenerateFunctionTailCall(masm, argc, &miss);
948 
949  __ bind(&miss);
950 }
951 
952 
953 void CallICBase::GenerateMiss(MacroAssembler* masm,
954  int argc,
955  IC::UtilityId id,
956  Code::ExtraICState extra_state) {
957  // ----------- S t a t e -------------
958  // rcx : function name
959  // rsp[0] : return address
960  // rsp[8] : argument argc
961  // rsp[16] : argument argc - 1
962  // ...
963  // rsp[argc * 8] : argument 1
964  // rsp[(argc + 1) * 8] : argument 0 = receiver
965  // -----------------------------------
966 
967  Counters* counters = masm->isolate()->counters();
968  if (id == IC::kCallIC_Miss) {
969  __ IncrementCounter(counters->call_miss(), 1);
970  } else {
971  __ IncrementCounter(counters->keyed_call_miss(), 1);
972  }
973 
974  // Get the receiver of the function from the stack; 1 ~ return address.
975  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
976 
977  // Enter an internal frame.
978  {
979  FrameScope scope(masm, StackFrame::INTERNAL);
980 
981  // Push the receiver and the name of the function.
982  __ push(rdx);
983  __ push(rcx);
984 
985  // Call the entry.
986  CEntryStub stub(1);
987  __ Set(rax, 2);
988  __ LoadAddress(rbx, ExternalReference(IC_Utility(id), masm->isolate()));
989  __ CallStub(&stub);
990 
991  // Move result to rdi and exit the internal frame.
992  __ movq(rdi, rax);
993  }
994 
995  // Check if the receiver is a global object of some sort.
996  // This can happen only for regular CallIC but not KeyedCallIC.
997  if (id == IC::kCallIC_Miss) {
998  Label invoke, global;
999  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); // receiver
1000  __ JumpIfSmi(rdx, &invoke);
1001  __ CmpObjectType(rdx, JS_GLOBAL_OBJECT_TYPE, rcx);
1002  __ j(equal, &global);
1003  __ CmpInstanceType(rcx, JS_BUILTINS_OBJECT_TYPE);
1004  __ j(not_equal, &invoke);
1005 
1006  // Patch the receiver on the stack.
1007  __ bind(&global);
1009  __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
1010  __ bind(&invoke);
1011  }
1012 
1013  // Invoke the function.
1014  CallKind call_kind = CallICBase::Contextual::decode(extra_state)
1016  : CALL_AS_METHOD;
1017  ParameterCount actual(argc);
1018  __ InvokeFunction(rdi,
1019  actual,
1020  JUMP_FUNCTION,
1021  NullCallWrapper(),
1022  call_kind);
1023 }
1024 
1025 
1026 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
1027  int argc,
1028  Code::ExtraICState extra_ic_state) {
1029  // ----------- S t a t e -------------
1030  // rcx : function name
1031  // rsp[0] : return address
1032  // rsp[8] : argument argc
1033  // rsp[16] : argument argc - 1
1034  // ...
1035  // rsp[argc * 8] : argument 1
1036  // rsp[(argc + 1) * 8] : argument 0 = receiver
1037  // -----------------------------------
1038 
1039  // Get the receiver of the function from the stack; 1 ~ return address.
1040  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1041  GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
1042  GenerateMiss(masm, argc, extra_ic_state);
1043 }
1044 
1045 
1046 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
1047  // ----------- S t a t e -------------
1048  // rcx : function name
1049  // rsp[0] : return address
1050  // rsp[8] : argument argc
1051  // rsp[16] : argument argc - 1
1052  // ...
1053  // rsp[argc * 8] : argument 1
1054  // rsp[(argc + 1) * 8] : argument 0 = receiver
1055  // -----------------------------------
1056 
1057  // Get the receiver of the function from the stack; 1 ~ return address.
1058  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1059 
1060  Label do_call, slow_call, slow_load;
1061  Label check_number_dictionary, check_string, lookup_monomorphic_cache;
1062  Label index_smi, index_string;
1063 
1064  // Check that the key is a smi.
1065  __ JumpIfNotSmi(rcx, &check_string);
1066 
1067  __ bind(&index_smi);
1068  // Now the key is known to be a smi. This place is also jumped to from below
1069  // where a numeric string is converted to a smi.
1070 
1071  GenerateKeyedLoadReceiverCheck(
1072  masm, rdx, rax, Map::kHasIndexedInterceptor, &slow_call);
1073 
1074  GenerateFastArrayLoad(
1075  masm, rdx, rcx, rax, rbx, rdi, &check_number_dictionary, &slow_load);
1076  Counters* counters = masm->isolate()->counters();
1077  __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1);
1078 
1079  __ bind(&do_call);
1080  // receiver in rdx is not used after this point.
1081  // rcx: key
1082  // rdi: function
1083  GenerateFunctionTailCall(masm, argc, &slow_call);
1084 
1085  __ bind(&check_number_dictionary);
1086  // rax: elements
1087  // rcx: smi key
1088  // Check whether the elements is a number dictionary.
1089  __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1090  Heap::kHashTableMapRootIndex);
1091  __ j(not_equal, &slow_load);
1092  __ SmiToInteger32(rbx, rcx);
1093  // ebx: untagged index
1094  __ LoadFromNumberDictionary(&slow_load, rax, rcx, rbx, r9, rdi, rdi);
1095  __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1);
1096  __ jmp(&do_call);
1097 
1098  __ bind(&slow_load);
1099  // This branch is taken when calling KeyedCallIC_Miss is neither required
1100  // nor beneficial.
1101  __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1);
1102  {
1103  FrameScope scope(masm, StackFrame::INTERNAL);
1104  __ push(rcx); // save the key
1105  __ push(rdx); // pass the receiver
1106  __ push(rcx); // pass the key
1107  __ CallRuntime(Runtime::kKeyedGetProperty, 2);
1108  __ pop(rcx); // restore the key
1109  }
1110  __ movq(rdi, rax);
1111  __ jmp(&do_call);
1112 
1113  __ bind(&check_string);
1114  GenerateKeyStringCheck(masm, rcx, rax, rbx, &index_string, &slow_call);
1115 
1116  // The key is known to be a symbol.
1117  // If the receiver is a regular JS object with slow properties then do
1118  // a quick inline probe of the receiver's dictionary.
1119  // Otherwise do the monomorphic cache probe.
1120  GenerateKeyedLoadReceiverCheck(
1121  masm, rdx, rax, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
1122 
1124  __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1125  Heap::kHashTableMapRootIndex);
1126  __ j(not_equal, &lookup_monomorphic_cache);
1127 
1128  GenerateDictionaryLoad(masm, &slow_load, rbx, rcx, rax, rdi, rdi);
1129  __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1);
1130  __ jmp(&do_call);
1131 
1132  __ bind(&lookup_monomorphic_cache);
1133  __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1);
1135  argc,
1136  Code::KEYED_CALL_IC,
1138  // Fall through on miss.
1139 
1140  __ bind(&slow_call);
1141  // This branch is taken if:
1142  // - the receiver requires boxing or access check,
1143  // - the key is neither smi nor symbol,
1144  // - the value loaded is not a function,
1145  // - there is hope that the runtime will create a monomorphic call stub
1146  // that will get fetched next time.
1147  __ IncrementCounter(counters->keyed_call_generic_slow(), 1);
1148  GenerateMiss(masm, argc);
1149 
1150  __ bind(&index_string);
1151  __ IndexFromHash(rbx, rcx);
1152  // Now jump to the place where smi keys are handled.
1153  __ jmp(&index_smi);
1154 }
1155 
1156 
1157 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
1158  // ----------- S t a t e -------------
1159  // rcx : function name
1160  // rsp[0] : return address
1161  // rsp[8] : argument argc
1162  // rsp[16] : argument argc - 1
1163  // ...
1164  // rsp[argc * 8] : argument 1
1165  // rsp[(argc + 1) * 8] : argument 0 = receiver
1166  // -----------------------------------
1167 
1168  // Check if the name is a string.
1169  Label miss;
1170  __ JumpIfSmi(rcx, &miss);
1171  Condition cond = masm->IsObjectStringType(rcx, rax, rax);
1172  __ j(NegateCondition(cond), &miss);
1173  CallICBase::GenerateNormal(masm, argc);
1174  __ bind(&miss);
1175  GenerateMiss(masm, argc);
1176 }
1177 
1178 
1179 static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm,
1180  Register object,
1181  Register key,
1182  Register scratch1,
1183  Register scratch2,
1184  Register scratch3,
1185  Label* unmapped_case,
1186  Label* slow_case) {
1187  Heap* heap = masm->isolate()->heap();
1188 
1189  // Check that the receiver is a JSObject. Because of the elements
1190  // map check later, we do not need to check for interceptors or
1191  // whether it requires access checks.
1192  __ JumpIfSmi(object, slow_case);
1193  // Check that the object is some kind of JSObject.
1194  __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1);
1195  __ j(below, slow_case);
1196 
1197  // Check that the key is a positive smi.
1198  Condition check = masm->CheckNonNegativeSmi(key);
1199  __ j(NegateCondition(check), slow_case);
1200 
1201  // Load the elements into scratch1 and check its map. If not, jump
1202  // to the unmapped lookup with the parameter map in scratch1.
1203  Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
1204  __ movq(scratch1, FieldOperand(object, JSObject::kElementsOffset));
1205  __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
1206 
1207  // Check if element is in the range of mapped arguments.
1208  __ movq(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
1209  __ SmiSubConstant(scratch2, scratch2, Smi::FromInt(2));
1210  __ cmpq(key, scratch2);
1211  __ j(greater_equal, unmapped_case);
1212 
1213  // Load element index and check whether it is the hole.
1214  const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
1215  __ SmiToInteger64(scratch3, key);
1216  __ movq(scratch2, FieldOperand(scratch1,
1217  scratch3,
1219  kHeaderSize));
1220  __ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex);
1221  __ j(equal, unmapped_case);
1222 
1223  // Load value from context and return it. We can reuse scratch1 because
1224  // we do not jump to the unmapped lookup (which requires the parameter
1225  // map in scratch1).
1226  __ movq(scratch1, FieldOperand(scratch1, FixedArray::kHeaderSize));
1227  __ SmiToInteger64(scratch3, scratch2);
1228  return FieldOperand(scratch1,
1229  scratch3,
1232 }
1233 
1234 
1235 static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
1236  Register key,
1237  Register parameter_map,
1238  Register scratch,
1239  Label* slow_case) {
1240  // Element is in arguments backing store, which is referenced by the
1241  // second element of the parameter_map. The parameter_map register
1242  // must be loaded with the parameter map of the arguments object and is
1243  // overwritten.
1244  const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
1245  Register backing_store = parameter_map;
1246  __ movq(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
1247  Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
1248  __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK);
1249  __ movq(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
1250  __ cmpq(key, scratch);
1251  __ j(greater_equal, slow_case);
1252  __ SmiToInteger64(scratch, key);
1253  return FieldOperand(backing_store,
1254  scratch,
1257 }
1258 
1259 
1260 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
1261  // ----------- S t a t e -------------
1262  // -- rax : key
1263  // -- rdx : receiver
1264  // -- rsp[0] : return address
1265  // -----------------------------------
1266  Label slow, notin;
1267  Operand mapped_location =
1268  GenerateMappedArgumentsLookup(
1269  masm, rdx, rax, rbx, rcx, rdi, &notin, &slow);
1270  __ movq(rax, mapped_location);
1271  __ Ret();
1272  __ bind(&notin);
1273  // The unmapped lookup expects that the parameter map is in rbx.
1274  Operand unmapped_location =
1275  GenerateUnmappedArgumentsLookup(masm, rax, rbx, rcx, &slow);
1276  __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
1277  __ j(equal, &slow);
1278  __ movq(rax, unmapped_location);
1279  __ Ret();
1280  __ bind(&slow);
1281  GenerateMiss(masm, false);
1282 }
1283 
1284 
1285 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
1286  // ----------- S t a t e -------------
1287  // -- rax : value
1288  // -- rcx : key
1289  // -- rdx : receiver
1290  // -- rsp[0] : return address
1291  // -----------------------------------
1292  Label slow, notin;
1293  Operand mapped_location = GenerateMappedArgumentsLookup(
1294  masm, rdx, rcx, rbx, rdi, r8, &notin, &slow);
1295  __ movq(mapped_location, rax);
1296  __ lea(r9, mapped_location);
1297  __ movq(r8, rax);
1298  __ RecordWrite(rbx,
1299  r9,
1300  r8,
1304  __ Ret();
1305  __ bind(&notin);
1306  // The unmapped lookup expects that the parameter map is in rbx.
1307  Operand unmapped_location =
1308  GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rdi, &slow);
1309  __ movq(unmapped_location, rax);
1310  __ lea(r9, unmapped_location);
1311  __ movq(r8, rax);
1312  __ RecordWrite(rbx,
1313  r9,
1314  r8,
1318  __ Ret();
1319  __ bind(&slow);
1320  GenerateMiss(masm, false);
1321 }
1322 
1323 
1324 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
1325  int argc) {
1326  // ----------- S t a t e -------------
1327  // rcx : function name
1328  // rsp[0] : return address
1329  // rsp[8] : argument argc
1330  // rsp[16] : argument argc - 1
1331  // ...
1332  // rsp[argc * 8] : argument 1
1333  // rsp[(argc + 1) * 8] : argument 0 = receiver
1334  // -----------------------------------
1335  Label slow, notin;
1336  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1337  Operand mapped_location = GenerateMappedArgumentsLookup(
1338  masm, rdx, rcx, rbx, rax, r8, &notin, &slow);
1339  __ movq(rdi, mapped_location);
1340  GenerateFunctionTailCall(masm, argc, &slow);
1341  __ bind(&notin);
1342  // The unmapped lookup expects that the parameter map is in rbx.
1343  Operand unmapped_location =
1344  GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rax, &slow);
1345  __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
1346  __ j(equal, &slow);
1347  __ movq(rdi, unmapped_location);
1348  GenerateFunctionTailCall(masm, argc, &slow);
1349  __ bind(&slow);
1350  GenerateMiss(masm, argc);
1351 }
1352 
1353 
1354 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
1355  // ----------- S t a t e -------------
1356  // -- rax : receiver
1357  // -- rcx : name
1358  // -- rsp[0] : return address
1359  // -----------------------------------
1360 
1361  // Probe the stub cache.
1362  Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, MONOMORPHIC);
1363  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rax, rcx, rbx,
1364  rdx);
1365 
1366  // Cache miss: Jump to runtime.
1367  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
1368 }
1369 
1370 
1371 void LoadIC::GenerateNormal(MacroAssembler* masm) {
1372  // ----------- S t a t e -------------
1373  // -- rax : receiver
1374  // -- rcx : name
1375  // -- rsp[0] : return address
1376  // -----------------------------------
1377  Label miss;
1378 
1379  GenerateStringDictionaryReceiverCheck(masm, rax, rdx, rbx, &miss);
1380 
1381  // rdx: elements
1382  // Search the dictionary placing the result in rax.
1383  GenerateDictionaryLoad(masm, &miss, rdx, rcx, rbx, rdi, rax);
1384  __ ret(0);
1385 
1386  // Cache miss: Jump to runtime.
1387  __ bind(&miss);
1388  GenerateMiss(masm);
1389 }
1390 
1391 
1392 void LoadIC::GenerateMiss(MacroAssembler* masm) {
1393  // ----------- S t a t e -------------
1394  // -- rax : receiver
1395  // -- rcx : name
1396  // -- rsp[0] : return address
1397  // -----------------------------------
1398 
1399  Counters* counters = masm->isolate()->counters();
1400  __ IncrementCounter(counters->load_miss(), 1);
1401 
1402  __ pop(rbx);
1403  __ push(rax); // receiver
1404  __ push(rcx); // name
1405  __ push(rbx); // return address
1406 
1407  // Perform tail call to the entry.
1408  ExternalReference ref =
1409  ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
1410  __ TailCallExternalReference(ref, 2, 1);
1411 }
1412 
1413 
1414 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1415  // ----------- S t a t e -------------
1416  // -- rax : key
1417  // -- rdx : receiver
1418  // -- rsp[0] : return address
1419  // -----------------------------------
1420 
1421  Counters* counters = masm->isolate()->counters();
1422  __ IncrementCounter(counters->keyed_load_miss(), 1);
1423 
1424  __ pop(rbx);
1425  __ push(rdx); // receiver
1426  __ push(rax); // name
1427  __ push(rbx); // return address
1428 
1429  // Perform tail call to the entry.
1430  ExternalReference ref = force_generic
1431  ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric),
1432  masm->isolate())
1433  : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
1434  __ TailCallExternalReference(ref, 2, 1);
1435 }
1436 
1437 
1438 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
1439  // ----------- S t a t e -------------
1440  // -- rax : key
1441  // -- rdx : receiver
1442  // -- rsp[0] : return address
1443  // -----------------------------------
1444 
1445  __ pop(rbx);
1446  __ push(rdx); // receiver
1447  __ push(rax); // name
1448  __ push(rbx); // return address
1449 
1450  // Perform tail call to the entry.
1451  __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
1452 }
1453 
1454 
1455 void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
1456  StrictModeFlag strict_mode) {
1457  // ----------- S t a t e -------------
1458  // -- rax : value
1459  // -- rcx : name
1460  // -- rdx : receiver
1461  // -- rsp[0] : return address
1462  // -----------------------------------
1463 
1464  // Get the receiver from the stack and probe the stub cache.
1465  Code::Flags flags =
1466  Code::ComputeFlags(Code::STORE_IC, MONOMORPHIC, strict_mode);
1467  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
1468  no_reg);
1469 
1470  // Cache miss: Jump to runtime.
1471  GenerateMiss(masm);
1472 }
1473 
1474 
1475 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1476  // ----------- S t a t e -------------
1477  // -- rax : value
1478  // -- rcx : name
1479  // -- rdx : receiver
1480  // -- rsp[0] : return address
1481  // -----------------------------------
1482 
1483  __ pop(rbx);
1484  __ push(rdx); // receiver
1485  __ push(rcx); // name
1486  __ push(rax); // value
1487  __ push(rbx); // return address
1488 
1489  // Perform tail call to the entry.
1490  ExternalReference ref =
1491  ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
1492  __ TailCallExternalReference(ref, 3, 1);
1493 }
1494 
1495 
1496 void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
1497  // ----------- S t a t e -------------
1498  // -- rax : value
1499  // -- rcx : name
1500  // -- rdx : receiver
1501  // -- rsp[0] : return address
1502  // -----------------------------------
1503  //
1504  // This accepts as a receiver anything JSArray::SetElementsLength accepts
1505  // (currently anything except for external arrays which means anything with
1506  // elements of FixedArray type). Value must be a number, but only smis are
1507  // accepted as the most common case.
1508 
1509  Label miss;
1510 
1511  Register receiver = rdx;
1512  Register value = rax;
1513  Register scratch = rbx;
1514 
1515  // Check that the receiver isn't a smi.
1516  __ JumpIfSmi(receiver, &miss);
1517 
1518  // Check that the object is a JS array.
1519  __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
1520  __ j(not_equal, &miss);
1521 
1522  // Check that elements are FixedArray.
1523  // We rely on StoreIC_ArrayLength below to deal with all types of
1524  // fast elements (including COW).
1525  __ movq(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
1526  __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
1527  __ j(not_equal, &miss);
1528 
1529  // Check that the array has fast properties, otherwise the length
1530  // property might have been redefined.
1531  __ movq(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset));
1532  __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset),
1533  Heap::kHashTableMapRootIndex);
1534  __ j(equal, &miss);
1535 
1536  // Check that value is a smi.
1537  __ JumpIfNotSmi(value, &miss);
1538 
1539  // Prepare tail call to StoreIC_ArrayLength.
1540  __ pop(scratch);
1541  __ push(receiver);
1542  __ push(value);
1543  __ push(scratch); // return address
1544 
1545  ExternalReference ref =
1546  ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
1547  __ TailCallExternalReference(ref, 2, 1);
1548 
1549  __ bind(&miss);
1550 
1551  GenerateMiss(masm);
1552 }
1553 
1554 
1555 void StoreIC::GenerateNormal(MacroAssembler* masm) {
1556  // ----------- S t a t e -------------
1557  // -- rax : value
1558  // -- rcx : name
1559  // -- rdx : receiver
1560  // -- rsp[0] : return address
1561  // -----------------------------------
1562 
1563  Label miss;
1564 
1565  GenerateStringDictionaryReceiverCheck(masm, rdx, rbx, rdi, &miss);
1566 
1567  GenerateDictionaryStore(masm, &miss, rbx, rcx, rax, r8, r9);
1568  Counters* counters = masm->isolate()->counters();
1569  __ IncrementCounter(counters->store_normal_hit(), 1);
1570  __ ret(0);
1571 
1572  __ bind(&miss);
1573  __ IncrementCounter(counters->store_normal_miss(), 1);
1574  GenerateMiss(masm);
1575 }
1576 
1577 
1578 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
1579  StrictModeFlag strict_mode) {
1580  // ----------- S t a t e -------------
1581  // -- rax : value
1582  // -- rcx : name
1583  // -- rdx : receiver
1584  // -- rsp[0] : return address
1585  // -----------------------------------
1586  __ pop(rbx);
1587  __ push(rdx);
1588  __ push(rcx);
1589  __ push(rax);
1590  __ Push(Smi::FromInt(NONE)); // PropertyAttributes
1591  __ Push(Smi::FromInt(strict_mode));
1592  __ push(rbx); // return address
1593 
1594  // Do tail-call to runtime routine.
1595  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1596 }
1597 
1598 
1599 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1600  StrictModeFlag strict_mode) {
1601  // ----------- S t a t e -------------
1602  // -- rax : value
1603  // -- rcx : key
1604  // -- rdx : receiver
1605  // -- rsp[0] : return address
1606  // -----------------------------------
1607 
1608  __ pop(rbx);
1609  __ push(rdx); // receiver
1610  __ push(rcx); // key
1611  __ push(rax); // value
1612  __ Push(Smi::FromInt(NONE)); // PropertyAttributes
1613  __ Push(Smi::FromInt(strict_mode)); // Strict mode.
1614  __ push(rbx); // return address
1615 
1616  // Do tail-call to runtime routine.
1617  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1618 }
1619 
1620 
1621 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1622  // ----------- S t a t e -------------
1623  // -- rax : value
1624  // -- rcx : key
1625  // -- rdx : receiver
1626  // -- rsp[0] : return address
1627  // -----------------------------------
1628 
1629  __ pop(rbx);
1630  __ push(rdx); // receiver
1631  __ push(rcx); // key
1632  __ push(rax); // value
1633  __ push(rbx); // return address
1634 
1635  // Do tail-call to runtime routine.
1636  ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1637  __ TailCallExternalReference(ref, 3, 1);
1638 }
1639 
1640 
1641 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1642  // ----------- S t a t e -------------
1643  // -- rax : value
1644  // -- rcx : key
1645  // -- rdx : receiver
1646  // -- rsp[0] : return address
1647  // -----------------------------------
1648 
1649  __ pop(rbx);
1650  __ push(rdx); // receiver
1651  __ push(rcx); // key
1652  __ push(rax); // value
1653  __ push(rbx); // return address
1654 
1655  // Do tail-call to runtime routine.
1656  ExternalReference ref = force_generic
1657  ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1658  masm->isolate())
1659  : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1660  __ TailCallExternalReference(ref, 3, 1);
1661 }
1662 
1663 
1664 void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
1665  // ----------- S t a t e -------------
1666  // -- rbx : target map
1667  // -- rdx : receiver
1668  // -- rsp[0] : return address
1669  // -----------------------------------
1670  // Must return the modified receiver in eax.
1671  if (!FLAG_trace_elements_transitions) {
1672  Label fail;
1674  __ movq(rax, rdx);
1675  __ Ret();
1676  __ bind(&fail);
1677  }
1678 
1679  __ pop(rbx);
1680  __ push(rdx);
1681  __ push(rbx); // return address
1682  __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
1683 }
1684 
1685 
1687  MacroAssembler* masm) {
1688  // ----------- S t a t e -------------
1689  // -- rbx : target map
1690  // -- rdx : receiver
1691  // -- rsp[0] : return address
1692  // -----------------------------------
1693  // Must return the modified receiver in eax.
1694  if (!FLAG_trace_elements_transitions) {
1695  Label fail;
1697  __ movq(rax, rdx);
1698  __ Ret();
1699  __ bind(&fail);
1700  }
1701 
1702  __ pop(rbx);
1703  __ push(rdx);
1704  __ push(rbx); // return address
1705  __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
1706 }
1707 
1708 
1709 #undef __
1710 
1711 
1713  switch (op) {
1714  case Token::EQ_STRICT:
1715  case Token::EQ:
1716  return equal;
1717  case Token::LT:
1718  return less;
1719  case Token::GT:
1720  return greater;
1721  case Token::LTE:
1722  return less_equal;
1723  case Token::GTE:
1724  return greater_equal;
1725  default:
1726  UNREACHABLE();
1727  return no_condition;
1728  }
1729 }
1730 
1731 
1732 static bool HasInlinedSmiCode(Address address) {
1733  // The address of the instruction following the call.
1734  Address test_instruction_address =
1736 
1737  // If the instruction following the call is not a test al, nothing
1738  // was inlined.
1739  return *test_instruction_address == Assembler::kTestAlByte;
1740 }
1741 
1742 
1743 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
1744  HandleScope scope;
1745  Handle<Code> rewritten;
1746  State previous_state = GetState();
1747 
1748  State state = TargetState(previous_state, HasInlinedSmiCode(address()), x, y);
1749  if (state == GENERIC) {
1750  CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
1751  rewritten = stub.GetCode();
1752  } else {
1753  ICCompareStub stub(op_, state);
1754  if (state == KNOWN_OBJECTS) {
1755  stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
1756  }
1757  rewritten = stub.GetCode();
1758  }
1759  set_target(*rewritten);
1760 
1761 #ifdef DEBUG
1762  if (FLAG_trace_ic) {
1763  PrintF("[CompareIC (%s->%s)#%s]\n",
1764  GetStateName(previous_state),
1765  GetStateName(state),
1766  Token::Name(op_));
1767  }
1768 #endif
1769 
1770  // Activate inlined smi code.
1771  if (previous_state == UNINITIALIZED) {
1773  }
1774 }
1775 
1776 void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
1777  // The address of the instruction following the call.
1778  Address test_instruction_address =
1780 
1781  // If the instruction following the call is not a test al, nothing
1782  // was inlined.
1783  if (*test_instruction_address != Assembler::kTestAlByte) {
1784  ASSERT(*test_instruction_address == Assembler::kNopByte);
1785  return;
1786  }
1787 
1788  Address delta_address = test_instruction_address + 1;
1789  // The delta to the start of the map check instruction and the
1790  // condition code uses at the patched jump.
1791  int8_t delta = *reinterpret_cast<int8_t*>(delta_address);
1792  if (FLAG_trace_ic) {
1793  PrintF("[ patching ic at %p, test=%p, delta=%d\n",
1794  address, test_instruction_address, delta);
1795  }
1796 
1797  // Patch with a short conditional jump. Enabling means switching from a short
1798  // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
1799  // reverse operation of that.
1800  Address jmp_address = test_instruction_address - delta;
1801  ASSERT((check == ENABLE_INLINED_SMI_CHECK)
1802  ? (*jmp_address == Assembler::kJncShortOpcode ||
1803  *jmp_address == Assembler::kJcShortOpcode)
1804  : (*jmp_address == Assembler::kJnzShortOpcode ||
1805  *jmp_address == Assembler::kJzShortOpcode));
1807  ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
1808  : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
1809  *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
1810 }
1811 
1812 
1813 } } // namespace v8::internal
1814 
1815 #endif // V8_TARGET_ARCH_X64
byte * Address
Definition: globals.h:157
const Register rdx
static void GenerateTransitionElementsDoubleToObject(MacroAssembler *masm)
static const int kBitFieldOffset
Definition: objects.h:5160
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
static const byte kJccShortPrefix
static void GenerateNormal(MacroAssembler *masm, int argc)
static const int kMapHashShift
Definition: heap.h:2350
void PrintF(const char *format,...)
Definition: v8utils.cc:40
static void GenerateGeneric(MacroAssembler *masm, StrictModeFlag strict_mode)
static const char * Name(Value tok)
Definition: token.h:196
static Smi * FromInt(int value)
Definition: objects-inl.h:981
static void GenerateMiss(MacroAssembler *masm, int argc, IC::UtilityId id, Code::ExtraICState extra_state)
static const int kGlobalReceiverOffset
Definition: objects.h:6288
static void GenerateGlobalProxy(MacroAssembler *masm, StrictModeFlag strict_mode)
const int kSmiValueSize
Definition: v8.h:4061
static void GenerateMegamorphic(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateStringLength(MacroAssembler *masm, bool support_wrappers)
KeyedStoreCheckMap
Definition: ic.h:634
static const int kHasNamedInterceptor
Definition: objects.h:5169
static const int kIsAccessCheckNeeded
Definition: objects.h:5173
Address address() const
Definition: ic-inl.h:41
static void GenerateMegamorphic(MacroAssembler *masm)
#define ASSERT(condition)
Definition: checks.h:270
const int kPointerSizeLog2
Definition: globals.h:232
static const int kInstanceSizeOffset
Definition: objects.h:5147
static const ExtraICState kNoExtraICState
Definition: objects.h:4236
static const byte kTestAlByte
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check)
static void GenerateArrayLength(MacroAssembler *masm)
static const int kHashFieldOffset
Definition: objects.h:7319
uint8_t byte
Definition: globals.h:156
static const int kHasIndexedInterceptor
Definition: objects.h:5170
static const byte kJcShortOpcode
void UpdateCaches(Handle< Object > x, Handle< Object > y)
#define UNREACHABLE()
Definition: checks.h:50
static Condition ComputeCondition(Token::Value op)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
const uint32_t kIsSymbolMask
Definition: objects.h:462
const Register r9
const int kPointerSize
Definition: globals.h:220
static void GenerateGeneric(MacroAssembler *masm)
static const byte kNopByte
Operand FieldOperand(Register object, int offset)
static const byte kJzShortOpcode
const int kHeapObjectTag
Definition: v8.h:4009
static void GenerateMiss(MacroAssembler *masm)
const Register rbx
const Register rsp
#define __
static bool decode(uint32_t value)
Definition: utils.h:273
static const int kPropertiesOffset
Definition: objects.h:2171
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:3491
const Register rax
static const int kInObjectPropertiesOffset
Definition: objects.h:5149
static void GenerateSmiToDouble(MacroAssembler *masm, Label *fail)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
const Register rdi
const Register r0
static const int kElementsOffset
Definition: objects.h:2172
static const int kContainsCachedArrayIndexMask
Definition: objects.h:7374
static const int kCallTargetAddressOffset
STATIC_ASSERT(kGrowICDelta==STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT-STORE_TRANSITION_SMI_TO_OBJECT)
static void GenerateMegamorphic(MacroAssembler *masm, int argc)
static const int kLengthOffset
Definition: objects.h:8332
static const int kHeaderSize
Definition: objects.h:2296
static void GenerateSlow(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, int argc, Code::ExtraICState extra_state)
Definition: ic.h:278
static const int kMapOffset
Definition: objects.h:1261
static void GenerateRuntimeSetProperty(MacroAssembler *masm, StrictModeFlag strict_mode)
const Register r1
static void GenerateDoubleToObject(MacroAssembler *masm, Label *fail)
static const byte kJncShortOpcode
static const int kLengthOffset
Definition: objects.h:2295
static const int kSlowCaseBitFieldMask
Definition: ic.h:508
KeyedStoreIncrementLength
Definition: ic.h:640
static void GenerateNormal(MacroAssembler *masm, int argc)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
InlinedSmiCheck
Definition: ic.h:853
const Register kScratchRegister
static void GenerateMonomorphicCacheProbe(MacroAssembler *masm, int argc, Code::Kind kind, Code::ExtraICState extra_state)
static void GenerateString(MacroAssembler *masm)
const int kSmiTagSize
Definition: v8.h:4015
const Register r8
const Register rcx
Condition NegateCondition(Condition cond)
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMegamorphic(MacroAssembler *masm, int argc, Code::ExtraICState extra_ic_state)
static void GenerateNormal(MacroAssembler *masm)
static void GenerateFunctionPrototype(MacroAssembler *masm)
static void GenerateIndexedInterceptor(MacroAssembler *masm)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
void set_target(Code *code)
Definition: ic.h:149
static const byte kJnzShortOpcode
const Register no_reg
const uint32_t kSymbolTag
Definition: objects.h:464
static const int kCapacityMask
Definition: heap.h:2349
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)
static void GenerateMiss(MacroAssembler *masm)
static void GenerateTransitionElementsSmiToDouble(MacroAssembler *masm)
static const int kHashShift
Definition: objects.h:7341
static void GenerateNonStrictArguments(MacroAssembler *masm, int argc)
static void GenerateArrayLength(MacroAssembler *masm)
static const char * GetStateName(State state)
Definition: ic.cc:2586
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
void check(i::Vector< const char > string)
static const int kHashMask
Definition: heap.h:2351
static void GenerateMiss(MacroAssembler *masm, int argc)
Definition: ic.h:311
static const int kInstanceTypeOffset
Definition: objects.h:5158
static const int kEntriesPerBucket
Definition: heap.h:2352
static void GenerateNormal(MacroAssembler *masm)
const XMMRegister xmm0
static void GenerateMiss(MacroAssembler *masm, bool force_generic)