v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
ic-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_X64)
31 
32 #include "codegen.h"
33 #include "ic-inl.h"
34 #include "runtime.h"
35 #include "stub-cache.h"
36 
37 namespace v8 {
38 namespace internal {
39 
40 // ----------------------------------------------------------------------------
41 // Static IC stub generators.
42 //
43 
44 #define __ ACCESS_MASM(masm)
45 
46 
47 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
48  Register type,
49  Label* global_object) {
50  // Register usage:
51  // type: holds the receiver instance type on entry.
52  __ cmpb(type, Immediate(JS_GLOBAL_OBJECT_TYPE));
53  __ j(equal, global_object);
54  __ cmpb(type, Immediate(JS_BUILTINS_OBJECT_TYPE));
55  __ j(equal, global_object);
56  __ cmpb(type, Immediate(JS_GLOBAL_PROXY_TYPE));
57  __ j(equal, global_object);
58 }
59 
60 
61 // Generated code falls through if the receiver is a regular non-global
62 // JS object with slow properties and no interceptors.
63 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
64  Register receiver,
65  Register r0,
66  Register r1,
67  Label* miss) {
68  // Register usage:
69  // receiver: holds the receiver on entry and is unchanged.
70  // r0: used to hold receiver instance type.
71  // Holds the property dictionary on fall through.
72  // r1: used to hold receivers map.
73 
74  __ JumpIfSmi(receiver, miss);
75 
76  // Check that the receiver is a valid JS object.
77  __ movq(r1, FieldOperand(receiver, HeapObject::kMapOffset));
79  __ cmpb(r0, Immediate(FIRST_SPEC_OBJECT_TYPE));
80  __ j(below, miss);
81 
82  // If this assert fails, we have to check upper bound too.
84 
85  GenerateGlobalInstanceTypeCheck(masm, r0, miss);
86 
87  // Check for non-global object that requires access check.
89  Immediate((1 << Map::kIsAccessCheckNeeded) |
91  __ j(not_zero, miss);
92 
93  __ movq(r0, FieldOperand(receiver, JSObject::kPropertiesOffset));
94  __ CompareRoot(FieldOperand(r0, HeapObject::kMapOffset),
95  Heap::kHashTableMapRootIndex);
96  __ j(not_equal, miss);
97 }
98 
99 
100 
101 // Helper function used to load a property from a dictionary backing storage.
102 // This function may return false negatives, so miss_label
103 // must always call a backup property load that is complete.
104 // This function is safe to call if name is not a symbol, and will jump to
105 // the miss_label in that case.
106 // The generated code assumes that the receiver has slow properties,
107 // is not a global object and does not have interceptors.
108 static void GenerateDictionaryLoad(MacroAssembler* masm,
109  Label* miss_label,
110  Register elements,
111  Register name,
112  Register r0,
113  Register r1,
114  Register result) {
115  // Register use:
116  //
117  // elements - holds the property dictionary on entry and is unchanged.
118  //
119  // name - holds the name of the property on entry and is unchanged.
120  //
121  // r0 - used to hold the capacity of the property dictionary.
122  //
123  // r1 - used to hold the index into the property dictionary.
124  //
125  // result - holds the result on exit if the load succeeded.
126 
127  Label done;
128 
129  // Probe the dictionary.
131  miss_label,
132  &done,
133  elements,
134  name,
135  r0,
136  r1);
137 
138  // If probing finds an entry in the dictionary, r0 contains the
139  // index into the dictionary. Check that the value is a normal
140  // property.
141  __ bind(&done);
142  const int kElementsStartOffset =
145  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
146  __ Test(Operand(elements, r1, times_pointer_size,
147  kDetailsOffset - kHeapObjectTag),
148  Smi::FromInt(PropertyDetails::TypeField::kMask));
149  __ j(not_zero, miss_label);
150 
151  // Get the value at the masked, scaled index.
152  const int kValueOffset = kElementsStartOffset + kPointerSize;
153  __ movq(result,
154  Operand(elements, r1, times_pointer_size,
155  kValueOffset - kHeapObjectTag));
156 }
157 
158 
159 // Helper function used to store a property to a dictionary backing
160 // storage. This function may fail to store a property even though it
161 // is in the dictionary, so code at miss_label must always call a
162 // backup property store that is complete. This function is safe to
163 // call if name is not a symbol, and will jump to the miss_label in
164 // that case. The generated code assumes that the receiver has slow
165 // properties, is not a global object and does not have interceptors.
166 static void GenerateDictionaryStore(MacroAssembler* masm,
167  Label* miss_label,
168  Register elements,
169  Register name,
170  Register value,
171  Register scratch0,
172  Register scratch1) {
173  // Register use:
174  //
175  // elements - holds the property dictionary on entry and is clobbered.
176  //
177  // name - holds the name of the property on entry and is unchanged.
178  //
179  // value - holds the value to store and is unchanged.
180  //
181  // scratch0 - used for index into the property dictionary and is clobbered.
182  //
183  // scratch1 - used to hold the capacity of the property dictionary and is
184  // clobbered.
185  Label done;
186 
187  // Probe the dictionary.
189  miss_label,
190  &done,
191  elements,
192  name,
193  scratch0,
194  scratch1);
195 
196  // If probing finds an entry in the dictionary, scratch0 contains the
197  // index into the dictionary. Check that the value is a normal
198  // property that is not read only.
199  __ bind(&done);
200  const int kElementsStartOffset =
203  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
204  const int kTypeAndReadOnlyMask =
205  (PropertyDetails::TypeField::kMask |
206  PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
207  __ Test(Operand(elements,
208  scratch1,
210  kDetailsOffset - kHeapObjectTag),
211  Smi::FromInt(kTypeAndReadOnlyMask));
212  __ j(not_zero, miss_label);
213 
214  // Store the value at the masked, scaled index.
215  const int kValueOffset = kElementsStartOffset + kPointerSize;
216  __ lea(scratch1, Operand(elements,
217  scratch1,
219  kValueOffset - kHeapObjectTag));
220  __ movq(Operand(scratch1, 0), value);
221 
222  // Update write barrier. Make sure not to clobber the value.
223  __ movq(scratch0, value);
224  __ RecordWrite(elements, scratch1, scratch0, kDontSaveFPRegs);
225 }
226 
227 
228 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
229  // ----------- S t a t e -------------
230  // -- rax : receiver
231  // -- rcx : name
232  // -- rsp[0] : return address
233  // -----------------------------------
234  Label miss;
235 
236  StubCompiler::GenerateLoadArrayLength(masm, rax, rdx, &miss);
237  __ bind(&miss);
238  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
239 }
240 
241 
242 void LoadIC::GenerateStringLength(MacroAssembler* masm, bool support_wrappers) {
243  // ----------- S t a t e -------------
244  // -- rax : receiver
245  // -- rcx : name
246  // -- rsp[0] : return address
247  // -----------------------------------
248  Label miss;
249 
250  StubCompiler::GenerateLoadStringLength(masm, rax, rdx, rbx, &miss,
251  support_wrappers);
252  __ bind(&miss);
253  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
254 }
255 
256 
257 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
258  // ----------- S t a t e -------------
259  // -- rax : receiver
260  // -- rcx : name
261  // -- rsp[0] : return address
262  // -----------------------------------
263  Label miss;
264 
265  StubCompiler::GenerateLoadFunctionPrototype(masm, rax, rdx, rbx, &miss);
266  __ bind(&miss);
267  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
268 }
269 
270 
271 // Checks the receiver for special cases (value type, slow case bits).
272 // Falls through for regular JS object.
273 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
274  Register receiver,
275  Register map,
276  int interceptor_bit,
277  Label* slow) {
278  // Register use:
279  // receiver - holds the receiver and is unchanged.
280  // Scratch registers:
281  // map - used to hold the map of the receiver.
282 
283  // Check that the object isn't a smi.
284  __ JumpIfSmi(receiver, slow);
285 
286  // Check that the object is some kind of JS object EXCEPT JS Value type.
287  // In the case that the object is a value-wrapper object,
288  // we enter the runtime system to make sure that indexing
289  // into string objects work as intended.
291  __ CmpObjectType(receiver, JS_OBJECT_TYPE, map);
292  __ j(below, slow);
293 
294  // Check bit field.
296  Immediate((1 << Map::kIsAccessCheckNeeded) |
297  (1 << interceptor_bit)));
298  __ j(not_zero, slow);
299 }
300 
301 
302 // Loads an indexed element from a fast case array.
303 // If not_fast_array is NULL, doesn't perform the elements map check.
304 static void GenerateFastArrayLoad(MacroAssembler* masm,
305  Register receiver,
306  Register key,
307  Register elements,
308  Register scratch,
309  Register result,
310  Label* not_fast_array,
311  Label* out_of_range) {
312  // Register use:
313  //
314  // receiver - holds the receiver on entry.
315  // Unchanged unless 'result' is the same register.
316  //
317  // key - holds the smi key on entry.
318  // Unchanged unless 'result' is the same register.
319  //
320  // elements - holds the elements of the receiver on exit.
321  //
322  // result - holds the result on exit if the load succeeded.
323  // Allowed to be the the same as 'receiver' or 'key'.
324  // Unchanged on bailout so 'receiver' and 'key' can be safely
325  // used by further computation.
326  //
327  // Scratch registers:
328  //
329  // scratch - used to hold elements of the receiver and the loaded value.
330 
331  __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset));
332  if (not_fast_array != NULL) {
333  // Check that the object is in fast mode and writable.
334  __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
335  Heap::kFixedArrayMapRootIndex);
336  __ j(not_equal, not_fast_array);
337  } else {
338  __ AssertFastElements(elements);
339  }
340  // Check that the key (index) is within bounds.
341  __ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset));
342  // Unsigned comparison rejects negative indices.
343  __ j(above_equal, out_of_range);
344  // Fast case: Do the load.
345  SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2);
346  __ movq(scratch, FieldOperand(elements,
347  index.reg,
348  index.scale,
350  __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex);
351  // In case the loaded value is the_hole we have to consult GetProperty
352  // to ensure the prototype chain is searched.
353  __ j(equal, out_of_range);
354  if (!result.is(scratch)) {
355  __ movq(result, scratch);
356  }
357 }
358 
359 
360 // Checks whether a key is an array index string or a symbol string.
361 // Falls through if the key is a symbol.
362 static void GenerateKeyStringCheck(MacroAssembler* masm,
363  Register key,
364  Register map,
365  Register hash,
366  Label* index_string,
367  Label* not_symbol) {
368  // Register use:
369  // key - holds the key and is unchanged. Assumed to be non-smi.
370  // Scratch registers:
371  // map - used to hold the map of the key.
372  // hash - used to hold the hash of the key.
373  __ CmpObjectType(key, FIRST_NONSTRING_TYPE, map);
374  __ j(above_equal, not_symbol);
375  // Is the string an array index, with cached numeric value?
376  __ movl(hash, FieldOperand(key, String::kHashFieldOffset));
377  __ testl(hash, Immediate(String::kContainsCachedArrayIndexMask));
378  __ j(zero, index_string); // The value in hash is used at jump target.
379 
380  // Is the string a symbol?
383  Immediate(kIsSymbolMask));
384  __ j(zero, not_symbol);
385 }
386 
387 
388 
389 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
390  // ----------- S t a t e -------------
391  // -- rax : key
392  // -- rdx : receiver
393  // -- rsp[0] : return address
394  // -----------------------------------
395  Label slow, check_string, index_smi, index_string, property_array_property;
396  Label probe_dictionary, check_number_dictionary;
397 
398  // Check that the key is a smi.
399  __ JumpIfNotSmi(rax, &check_string);
400  __ bind(&index_smi);
401  // Now the key is known to be a smi. This place is also jumped to from below
402  // where a numeric string is converted to a smi.
403 
404  GenerateKeyedLoadReceiverCheck(
405  masm, rdx, rcx, Map::kHasIndexedInterceptor, &slow);
406 
407  // Check the receiver's map to see if it has fast elements.
408  __ CheckFastElements(rcx, &check_number_dictionary);
409 
410  GenerateFastArrayLoad(masm,
411  rdx,
412  rax,
413  rcx,
414  rbx,
415  rax,
416  NULL,
417  &slow);
418  Counters* counters = masm->isolate()->counters();
419  __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
420  __ ret(0);
421 
422  __ bind(&check_number_dictionary);
423  __ SmiToInteger32(rbx, rax);
425 
426  // Check whether the elements is a number dictionary.
427  // rdx: receiver
428  // rax: key
429  // rbx: key as untagged int32
430  // rcx: elements
432  Heap::kHashTableMapRootIndex);
433  __ j(not_equal, &slow);
434  __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax);
435  __ ret(0);
436 
437  __ bind(&slow);
438  // Slow case: Jump to runtime.
439  // rdx: receiver
440  // rax: key
441  __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
443 
444  __ bind(&check_string);
445  GenerateKeyStringCheck(masm, rax, rcx, rbx, &index_string, &slow);
446 
447  GenerateKeyedLoadReceiverCheck(
448  masm, rdx, rcx, Map::kHasNamedInterceptor, &slow);
449 
450  // If the receiver is a fast-case object, check the keyed lookup
451  // cache. Otherwise probe the dictionary leaving result in rcx.
454  Heap::kHashTableMapRootIndex);
455  __ j(equal, &probe_dictionary);
456 
457  // Load the map of the receiver, compute the keyed lookup cache hash
458  // based on 32 bits of the map pointer and the string hash.
460  __ movl(rcx, rbx);
461  __ shr(rcx, Immediate(KeyedLookupCache::kMapHashShift));
463  __ shr(rdi, Immediate(String::kHashShift));
464  __ xor_(rcx, rdi);
466  __ and_(rcx, Immediate(mask));
467 
468  // Load the key (consisting of map and symbol) from the cache and
469  // check for match.
470  Label load_in_object_property;
471  static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
472  Label hit_on_nth_entry[kEntriesPerBucket];
473  ExternalReference cache_keys
474  = ExternalReference::keyed_lookup_cache_keys(masm->isolate());
475 
476  for (int i = 0; i < kEntriesPerBucket - 1; i++) {
477  Label try_next_entry;
478  __ movq(rdi, rcx);
479  __ shl(rdi, Immediate(kPointerSizeLog2 + 1));
480  __ LoadAddress(kScratchRegister, cache_keys);
481  int off = kPointerSize * i * 2;
482  __ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, off));
483  __ j(not_equal, &try_next_entry);
484  __ cmpq(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
485  __ j(equal, &hit_on_nth_entry[i]);
486  __ bind(&try_next_entry);
487  }
488 
489  int off = kPointerSize * (kEntriesPerBucket - 1) * 2;
490  __ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, off));
491  __ j(not_equal, &slow);
492  __ cmpq(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
493  __ j(not_equal, &slow);
494 
495  // Get field offset, which is a 32-bit integer.
496  ExternalReference cache_field_offsets
497  = ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
498 
499  // Hit on nth entry.
500  for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
501  __ bind(&hit_on_nth_entry[i]);
502  if (i != 0) {
503  __ addl(rcx, Immediate(i));
504  }
505  __ LoadAddress(kScratchRegister, cache_field_offsets);
506  __ movl(rdi, Operand(kScratchRegister, rcx, times_4, 0));
508  __ subq(rdi, rcx);
509  __ j(above_equal, &property_array_property);
510  if (i != 0) {
511  __ jmp(&load_in_object_property);
512  }
513  }
514 
515  // Load in-object property.
516  __ bind(&load_in_object_property);
518  __ addq(rcx, rdi);
520  __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
521  __ ret(0);
522 
523  // Load property array property.
524  __ bind(&property_array_property);
528  __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
529  __ ret(0);
530 
531  // Do a quick inline probe of the receiver's dictionary, if it
532  // exists.
533  __ bind(&probe_dictionary);
534  // rdx: receiver
535  // rax: key
536  // rbx: elements
537 
540  GenerateGlobalInstanceTypeCheck(masm, rcx, &slow);
541 
542  GenerateDictionaryLoad(masm, &slow, rbx, rax, rcx, rdi, rax);
543  __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
544  __ ret(0);
545 
546  __ bind(&index_string);
547  __ IndexFromHash(rbx, rax);
548  __ jmp(&index_smi);
549 }
550 
551 
552 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
553  // ----------- S t a t e -------------
554  // -- rax : key
555  // -- rdx : receiver
556  // -- rsp[0] : return address
557  // -----------------------------------
558  Label miss;
559 
560  Register receiver = rdx;
561  Register index = rax;
562  Register scratch = rcx;
563  Register result = rax;
564 
565  StringCharAtGenerator char_at_generator(receiver,
566  index,
567  scratch,
568  result,
569  &miss, // When not a string.
570  &miss, // When not a number.
571  &miss, // When index out of range.
573  char_at_generator.GenerateFast(masm);
574  __ ret(0);
575 
576  StubRuntimeCallHelper call_helper;
577  char_at_generator.GenerateSlow(masm, call_helper);
578 
579  __ bind(&miss);
580  GenerateMiss(masm, false);
581 }
582 
583 
584 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
585  // ----------- S t a t e -------------
586  // -- rax : key
587  // -- rdx : receiver
588  // -- rsp[0] : return address
589  // -----------------------------------
590  Label slow;
591 
592  // Check that the receiver isn't a smi.
593  __ JumpIfSmi(rdx, &slow);
594 
595  // Check that the key is an array index, that is Uint32.
597  __ JumpUnlessNonNegativeSmi(rax, &slow);
598 
599  // Get the map of the receiver.
601 
602  // Check that it has indexed interceptor and access checks
603  // are not enabled for this object.
605  __ andb(rcx, Immediate(kSlowCaseBitFieldMask));
606  __ cmpb(rcx, Immediate(1 << Map::kHasIndexedInterceptor));
607  __ j(not_zero, &slow);
608 
609  // Everything is fine, call runtime.
610  __ pop(rcx);
611  __ push(rdx); // receiver
612  __ push(rax); // key
613  __ push(rcx); // return address
614 
615  // Perform tail call to the entry.
616  __ TailCallExternalReference(
617  ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
618  masm->isolate()),
619  2,
620  1);
621 
622  __ bind(&slow);
623  GenerateMiss(masm, false);
624 }
625 
626 
627 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
628  StrictModeFlag strict_mode) {
629  // ----------- S t a t e -------------
630  // -- rax : value
631  // -- rcx : key
632  // -- rdx : receiver
633  // -- rsp[0] : return address
634  // -----------------------------------
635  Label slow, slow_with_tagged_index, fast, array, extra, check_extra_double;
636  Label fast_object_with_map_check, fast_object_without_map_check;
637  Label fast_double_with_map_check, fast_double_without_map_check;
638  Label transition_smi_elements, finish_object_store, non_double_value;
639  Label transition_double_elements;
640 
641  // Check that the object isn't a smi.
642  __ JumpIfSmi(rdx, &slow_with_tagged_index);
643  // Get the map from the receiver.
645  // Check that the receiver does not require access checks. We need
646  // to do this because this generic stub does not perform map checks.
648  Immediate(1 << Map::kIsAccessCheckNeeded));
649  __ j(not_zero, &slow_with_tagged_index);
650  // Check that the key is a smi.
651  __ JumpIfNotSmi(rcx, &slow_with_tagged_index);
652  __ SmiToInteger32(rcx, rcx);
653 
654  __ CmpInstanceType(r9, JS_ARRAY_TYPE);
655  __ j(equal, &array);
656  // Check that the object is some kind of JSObject.
657  __ CmpInstanceType(r9, FIRST_JS_OBJECT_TYPE);
658  __ j(below, &slow);
659 
660  // Object case: Check key against length in the elements array.
661  // rax: value
662  // rdx: JSObject
663  // rcx: index
665  // Check array bounds.
666  __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx);
667  // rax: value
668  // rbx: FixedArray
669  // rcx: index
670  __ j(above, &fast_object_with_map_check);
671 
672  // Slow case: call runtime.
673  __ bind(&slow);
674  __ Integer32ToSmi(rcx, rcx);
675  __ bind(&slow_with_tagged_index);
676  GenerateRuntimeSetProperty(masm, strict_mode);
677  // Never returns to here.
678 
679  // Extra capacity case: Check if there is extra capacity to
680  // perform the store and update the length. Used for adding one
681  // element to the array by writing to array[array.length].
682  __ bind(&extra);
683  // rax: value
684  // rdx: receiver (a JSArray)
685  // rbx: receiver's elements array (a FixedArray)
686  // rcx: index
687  // flags: smicompare (rdx.length(), rbx)
688  __ j(not_equal, &slow); // do not leave holes in the array
689  __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx);
690  __ j(below_equal, &slow);
691  // Increment index to get new length.
693  __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
694  __ j(not_equal, &check_extra_double);
695  __ leal(rdi, Operand(rcx, 1));
696  __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
697  __ jmp(&fast_object_without_map_check);
698 
699  __ bind(&check_extra_double);
700  // rdi: elements array's map
701  __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
702  __ j(not_equal, &slow);
703  __ leal(rdi, Operand(rcx, 1));
704  __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
705  __ jmp(&fast_double_without_map_check);
706 
707  // Array case: Get the length and the elements array from the JS
708  // array. Check that the array is in fast mode (and writable); if it
709  // is the length is always a smi.
710  __ bind(&array);
711  // rax: value
712  // rdx: receiver (a JSArray)
713  // rcx: index
715 
716  // Check the key against the length in the array, compute the
717  // address to store into and fall through to fast case.
718  __ SmiCompareInteger32(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
719  __ j(below_equal, &extra);
720 
721  // Fast case: Do the store.
722  __ bind(&fast_object_with_map_check);
723  // rax: value
724  // rbx: receiver's elements array (a FixedArray)
725  // rcx: index
726  // rdx: receiver (a JSArray)
728  __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
729  __ j(not_equal, &fast_double_with_map_check);
730  __ bind(&fast_object_without_map_check);
731  // Smi stores don't require further checks.
732  Label non_smi_value;
733  __ JumpIfNotSmi(rax, &non_smi_value);
734  // It's irrelevant whether array is smi-only or not when writing a smi.
736  rax);
737  __ ret(0);
738 
739  __ bind(&non_smi_value);
740  // Writing a non-smi, check whether array allows non-smi elements.
741  // r9: receiver's map
742  __ CheckFastObjectElements(r9, &transition_smi_elements);
743  __ bind(&finish_object_store);
745  rax);
746  __ movq(rdx, rax); // Preserve the value which is returned.
747  __ RecordWriteArray(
749  __ ret(0);
750 
751  __ bind(&fast_double_with_map_check);
752  // Check for fast double array case. If this fails, call through to the
753  // runtime.
754  // rdi: elements array's map
755  __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
756  __ j(not_equal, &slow);
757  __ bind(&fast_double_without_map_check);
758  // If the value is a number, store it as a double in the FastDoubleElements
759  // array.
760  __ StoreNumberToDoubleElements(rax, rbx, rcx, xmm0,
761  &transition_double_elements);
762  __ ret(0);
763 
764  __ bind(&transition_smi_elements);
766 
767  // Transition the array appropriately depending on the value type.
769  __ CompareRoot(r9, Heap::kHeapNumberMapRootIndex);
770  __ j(not_equal, &non_double_value);
771 
772  // Value is a double. Transition FAST_SMI_ELEMENTS ->
773  // FAST_DOUBLE_ELEMENTS and complete the store.
774  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
776  rbx,
777  rdi,
778  &slow);
781  __ jmp(&fast_double_without_map_check);
782 
783  __ bind(&non_double_value);
784  // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
785  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
787  rbx,
788  rdi,
789  &slow);
792  __ jmp(&finish_object_store);
793 
794  __ bind(&transition_double_elements);
795  // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
796  // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
797  // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
799  __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
801  rbx,
802  rdi,
803  &slow);
806  __ jmp(&finish_object_store);
807 }
808 
809 
810 // The generated code does not accept smi keys.
811 // The generated code falls through if both probes miss.
812 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
813  int argc,
814  Code::Kind kind,
815  Code::ExtraICState extra_state) {
816  // ----------- S t a t e -------------
817  // rcx : function name
818  // rdx : receiver
819  // -----------------------------------
820  Label number, non_number, non_string, boolean, probe, miss;
821 
822  // Probe the stub cache.
824  MONOMORPHIC,
825  extra_state,
826  NORMAL,
827  argc);
828  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
829  rax);
830 
831  // If the stub cache probing failed, the receiver might be a value.
832  // For value objects, we use the map of the prototype objects for
833  // the corresponding JSValue for the cache and that is what we need
834  // to probe.
835  //
836  // Check for number.
837  __ JumpIfSmi(rdx, &number);
838  __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rbx);
839  __ j(not_equal, &non_number);
840  __ bind(&number);
841  StubCompiler::GenerateLoadGlobalFunctionPrototype(
843  __ jmp(&probe);
844 
845  // Check for string.
846  __ bind(&non_number);
847  __ CmpInstanceType(rbx, FIRST_NONSTRING_TYPE);
848  __ j(above_equal, &non_string);
849  StubCompiler::GenerateLoadGlobalFunctionPrototype(
851  __ jmp(&probe);
852 
853  // Check for boolean.
854  __ bind(&non_string);
855  __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
856  __ j(equal, &boolean);
857  __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
858  __ j(not_equal, &miss);
859  __ bind(&boolean);
860  StubCompiler::GenerateLoadGlobalFunctionPrototype(
862 
863  // Probe the stub cache for the value object.
864  __ bind(&probe);
865  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
866  no_reg);
867 
868  __ bind(&miss);
869 }
870 
871 
872 static void GenerateFunctionTailCall(MacroAssembler* masm,
873  int argc,
874  Label* miss) {
875  // ----------- S t a t e -------------
876  // rcx : function name
877  // rdi : function
878  // rsp[0] : return address
879  // rsp[8] : argument argc
880  // rsp[16] : argument argc - 1
881  // ...
882  // rsp[argc * 8] : argument 1
883  // rsp[(argc + 1) * 8] : argument 0 = receiver
884  // -----------------------------------
885  __ JumpIfSmi(rdi, miss);
886  // Check that the value is a JavaScript function.
887  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rdx);
888  __ j(not_equal, miss);
889 
890  // Invoke the function.
891  ParameterCount actual(argc);
892  __ InvokeFunction(rdi, actual, JUMP_FUNCTION,
893  NullCallWrapper(), CALL_AS_METHOD);
894 }
895 
896 
897 // The generated code falls through if the call should be handled by runtime.
898 void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
899  // ----------- S t a t e -------------
900  // rcx : function name
901  // rsp[0] : return address
902  // rsp[8] : argument argc
903  // rsp[16] : argument argc - 1
904  // ...
905  // rsp[argc * 8] : argument 1
906  // rsp[(argc + 1) * 8] : argument 0 = receiver
907  // -----------------------------------
908  Label miss;
909 
910  // Get the receiver of the function from the stack.
911  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
912 
913  GenerateStringDictionaryReceiverCheck(masm, rdx, rax, rbx, &miss);
914 
915  // rax: elements
916  // Search the dictionary placing the result in rdi.
917  GenerateDictionaryLoad(masm, &miss, rax, rcx, rbx, rdi, rdi);
918 
919  GenerateFunctionTailCall(masm, argc, &miss);
920 
921  __ bind(&miss);
922 }
923 
924 
925 void CallICBase::GenerateMiss(MacroAssembler* masm,
926  int argc,
927  IC::UtilityId id,
928  Code::ExtraICState extra_state) {
929  // ----------- S t a t e -------------
930  // rcx : function name
931  // rsp[0] : return address
932  // rsp[8] : argument argc
933  // rsp[16] : argument argc - 1
934  // ...
935  // rsp[argc * 8] : argument 1
936  // rsp[(argc + 1) * 8] : argument 0 = receiver
937  // -----------------------------------
938 
939  Counters* counters = masm->isolate()->counters();
940  if (id == IC::kCallIC_Miss) {
941  __ IncrementCounter(counters->call_miss(), 1);
942  } else {
943  __ IncrementCounter(counters->keyed_call_miss(), 1);
944  }
945 
946  // Get the receiver of the function from the stack; 1 ~ return address.
947  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
948 
949  // Enter an internal frame.
950  {
951  FrameScope scope(masm, StackFrame::INTERNAL);
952 
953  // Push the receiver and the name of the function.
954  __ push(rdx);
955  __ push(rcx);
956 
957  // Call the entry.
958  CEntryStub stub(1);
959  __ Set(rax, 2);
960  __ LoadAddress(rbx, ExternalReference(IC_Utility(id), masm->isolate()));
961  __ CallStub(&stub);
962 
963  // Move result to rdi and exit the internal frame.
964  __ movq(rdi, rax);
965  }
966 
967  // Check if the receiver is a global object of some sort.
968  // This can happen only for regular CallIC but not KeyedCallIC.
969  if (id == IC::kCallIC_Miss) {
970  Label invoke, global;
971  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); // receiver
972  __ JumpIfSmi(rdx, &invoke);
973  __ CmpObjectType(rdx, JS_GLOBAL_OBJECT_TYPE, rcx);
974  __ j(equal, &global);
975  __ CmpInstanceType(rcx, JS_BUILTINS_OBJECT_TYPE);
976  __ j(not_equal, &invoke);
977 
978  // Patch the receiver on the stack.
979  __ bind(&global);
981  __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
982  __ bind(&invoke);
983  }
984 
985  // Invoke the function.
986  CallKind call_kind = CallICBase::Contextual::decode(extra_state)
988  : CALL_AS_METHOD;
989  ParameterCount actual(argc);
990  __ InvokeFunction(rdi,
991  actual,
993  NullCallWrapper(),
994  call_kind);
995 }
996 
997 
998 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
999  int argc,
1000  Code::ExtraICState extra_ic_state) {
1001  // ----------- S t a t e -------------
1002  // rcx : function name
1003  // rsp[0] : return address
1004  // rsp[8] : argument argc
1005  // rsp[16] : argument argc - 1
1006  // ...
1007  // rsp[argc * 8] : argument 1
1008  // rsp[(argc + 1) * 8] : argument 0 = receiver
1009  // -----------------------------------
1010 
1011  // Get the receiver of the function from the stack; 1 ~ return address.
1012  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1013  GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
1014  GenerateMiss(masm, argc, extra_ic_state);
1015 }
1016 
1017 
1018 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
1019  // ----------- S t a t e -------------
1020  // rcx : function name
1021  // rsp[0] : return address
1022  // rsp[8] : argument argc
1023  // rsp[16] : argument argc - 1
1024  // ...
1025  // rsp[argc * 8] : argument 1
1026  // rsp[(argc + 1) * 8] : argument 0 = receiver
1027  // -----------------------------------
1028 
1029  // Get the receiver of the function from the stack; 1 ~ return address.
1030  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1031 
1032  Label do_call, slow_call, slow_load;
1033  Label check_number_dictionary, check_string, lookup_monomorphic_cache;
1034  Label index_smi, index_string;
1035 
1036  // Check that the key is a smi.
1037  __ JumpIfNotSmi(rcx, &check_string);
1038 
1039  __ bind(&index_smi);
1040  // Now the key is known to be a smi. This place is also jumped to from below
1041  // where a numeric string is converted to a smi.
1042 
1043  GenerateKeyedLoadReceiverCheck(
1044  masm, rdx, rax, Map::kHasIndexedInterceptor, &slow_call);
1045 
1046  GenerateFastArrayLoad(
1047  masm, rdx, rcx, rax, rbx, rdi, &check_number_dictionary, &slow_load);
1048  Counters* counters = masm->isolate()->counters();
1049  __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1);
1050 
1051  __ bind(&do_call);
1052  // receiver in rdx is not used after this point.
1053  // rcx: key
1054  // rdi: function
1055  GenerateFunctionTailCall(masm, argc, &slow_call);
1056 
1057  __ bind(&check_number_dictionary);
1058  // rax: elements
1059  // rcx: smi key
1060  // Check whether the elements is a number dictionary.
1061  __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1062  Heap::kHashTableMapRootIndex);
1063  __ j(not_equal, &slow_load);
1064  __ SmiToInteger32(rbx, rcx);
1065  // ebx: untagged index
1066  __ LoadFromNumberDictionary(&slow_load, rax, rcx, rbx, r9, rdi, rdi);
1067  __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1);
1068  __ jmp(&do_call);
1069 
1070  __ bind(&slow_load);
1071  // This branch is taken when calling KeyedCallIC_Miss is neither required
1072  // nor beneficial.
1073  __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1);
1074  {
1075  FrameScope scope(masm, StackFrame::INTERNAL);
1076  __ push(rcx); // save the key
1077  __ push(rdx); // pass the receiver
1078  __ push(rcx); // pass the key
1079  __ CallRuntime(Runtime::kKeyedGetProperty, 2);
1080  __ pop(rcx); // restore the key
1081  }
1082  __ movq(rdi, rax);
1083  __ jmp(&do_call);
1084 
1085  __ bind(&check_string);
1086  GenerateKeyStringCheck(masm, rcx, rax, rbx, &index_string, &slow_call);
1087 
1088  // The key is known to be a symbol.
1089  // If the receiver is a regular JS object with slow properties then do
1090  // a quick inline probe of the receiver's dictionary.
1091  // Otherwise do the monomorphic cache probe.
1092  GenerateKeyedLoadReceiverCheck(
1093  masm, rdx, rax, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
1094 
1096  __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1097  Heap::kHashTableMapRootIndex);
1098  __ j(not_equal, &lookup_monomorphic_cache);
1099 
1100  GenerateDictionaryLoad(masm, &slow_load, rbx, rcx, rax, rdi, rdi);
1101  __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1);
1102  __ jmp(&do_call);
1103 
1104  __ bind(&lookup_monomorphic_cache);
1105  __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1);
1107  argc,
1110  // Fall through on miss.
1111 
1112  __ bind(&slow_call);
1113  // This branch is taken if:
1114  // - the receiver requires boxing or access check,
1115  // - the key is neither smi nor symbol,
1116  // - the value loaded is not a function,
1117  // - there is hope that the runtime will create a monomorphic call stub
1118  // that will get fetched next time.
1119  __ IncrementCounter(counters->keyed_call_generic_slow(), 1);
1120  GenerateMiss(masm, argc);
1121 
1122  __ bind(&index_string);
1123  __ IndexFromHash(rbx, rcx);
1124  // Now jump to the place where smi keys are handled.
1125  __ jmp(&index_smi);
1126 }
1127 
1128 
1129 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
1130  // ----------- S t a t e -------------
1131  // rcx : function name
1132  // rsp[0] : return address
1133  // rsp[8] : argument argc
1134  // rsp[16] : argument argc - 1
1135  // ...
1136  // rsp[argc * 8] : argument 1
1137  // rsp[(argc + 1) * 8] : argument 0 = receiver
1138  // -----------------------------------
1139 
1140  // Check if the name is a string.
1141  Label miss;
1142  __ JumpIfSmi(rcx, &miss);
1143  Condition cond = masm->IsObjectStringType(rcx, rax, rax);
1144  __ j(NegateCondition(cond), &miss);
1145  CallICBase::GenerateNormal(masm, argc);
1146  __ bind(&miss);
1147  GenerateMiss(masm, argc);
1148 }
1149 
1150 
1151 static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm,
1152  Register object,
1153  Register key,
1154  Register scratch1,
1155  Register scratch2,
1156  Register scratch3,
1157  Label* unmapped_case,
1158  Label* slow_case) {
1159  Heap* heap = masm->isolate()->heap();
1160 
1161  // Check that the receiver is a JSObject. Because of the elements
1162  // map check later, we do not need to check for interceptors or
1163  // whether it requires access checks.
1164  __ JumpIfSmi(object, slow_case);
1165  // Check that the object is some kind of JSObject.
1166  __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1);
1167  __ j(below, slow_case);
1168 
1169  // Check that the key is a positive smi.
1170  Condition check = masm->CheckNonNegativeSmi(key);
1171  __ j(NegateCondition(check), slow_case);
1172 
1173  // Load the elements into scratch1 and check its map. If not, jump
1174  // to the unmapped lookup with the parameter map in scratch1.
1175  Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
1176  __ movq(scratch1, FieldOperand(object, JSObject::kElementsOffset));
1177  __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
1178 
1179  // Check if element is in the range of mapped arguments.
1180  __ movq(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
1181  __ SmiSubConstant(scratch2, scratch2, Smi::FromInt(2));
1182  __ cmpq(key, scratch2);
1183  __ j(greater_equal, unmapped_case);
1184 
1185  // Load element index and check whether it is the hole.
1186  const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
1187  __ SmiToInteger64(scratch3, key);
1188  __ movq(scratch2, FieldOperand(scratch1,
1189  scratch3,
1191  kHeaderSize));
1192  __ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex);
1193  __ j(equal, unmapped_case);
1194 
1195  // Load value from context and return it. We can reuse scratch1 because
1196  // we do not jump to the unmapped lookup (which requires the parameter
1197  // map in scratch1).
1198  __ movq(scratch1, FieldOperand(scratch1, FixedArray::kHeaderSize));
1199  __ SmiToInteger64(scratch3, scratch2);
1200  return FieldOperand(scratch1,
1201  scratch3,
1204 }
1205 
1206 
1207 static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
1208  Register key,
1209  Register parameter_map,
1210  Register scratch,
1211  Label* slow_case) {
1212  // Element is in arguments backing store, which is referenced by the
1213  // second element of the parameter_map. The parameter_map register
1214  // must be loaded with the parameter map of the arguments object and is
1215  // overwritten.
1216  const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
1217  Register backing_store = parameter_map;
1218  __ movq(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
1219  Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
1220  __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK);
1221  __ movq(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
1222  __ cmpq(key, scratch);
1223  __ j(greater_equal, slow_case);
1224  __ SmiToInteger64(scratch, key);
1225  return FieldOperand(backing_store,
1226  scratch,
1229 }
1230 
1231 
1232 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
1233  // ----------- S t a t e -------------
1234  // -- rax : key
1235  // -- rdx : receiver
1236  // -- rsp[0] : return address
1237  // -----------------------------------
1238  Label slow, notin;
1239  Operand mapped_location =
1240  GenerateMappedArgumentsLookup(
1241  masm, rdx, rax, rbx, rcx, rdi, &notin, &slow);
1242  __ movq(rax, mapped_location);
1243  __ Ret();
1244  __ bind(&notin);
1245  // The unmapped lookup expects that the parameter map is in rbx.
1246  Operand unmapped_location =
1247  GenerateUnmappedArgumentsLookup(masm, rax, rbx, rcx, &slow);
1248  __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
1249  __ j(equal, &slow);
1250  __ movq(rax, unmapped_location);
1251  __ Ret();
1252  __ bind(&slow);
1253  GenerateMiss(masm, false);
1254 }
1255 
1256 
1257 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
1258  // ----------- S t a t e -------------
1259  // -- rax : value
1260  // -- rcx : key
1261  // -- rdx : receiver
1262  // -- rsp[0] : return address
1263  // -----------------------------------
1264  Label slow, notin;
1265  Operand mapped_location = GenerateMappedArgumentsLookup(
1266  masm, rdx, rcx, rbx, rdi, r8, &notin, &slow);
1267  __ movq(mapped_location, rax);
1268  __ lea(r9, mapped_location);
1269  __ movq(r8, rax);
1270  __ RecordWrite(rbx,
1271  r9,
1272  r8,
1276  __ Ret();
1277  __ bind(&notin);
1278  // The unmapped lookup expects that the parameter map is in rbx.
1279  Operand unmapped_location =
1280  GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rdi, &slow);
1281  __ movq(unmapped_location, rax);
1282  __ lea(r9, unmapped_location);
1283  __ movq(r8, rax);
1284  __ RecordWrite(rbx,
1285  r9,
1286  r8,
1290  __ Ret();
1291  __ bind(&slow);
1292  GenerateMiss(masm, false);
1293 }
1294 
1295 
1296 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
1297  int argc) {
1298  // ----------- S t a t e -------------
1299  // rcx : function name
1300  // rsp[0] : return address
1301  // rsp[8] : argument argc
1302  // rsp[16] : argument argc - 1
1303  // ...
1304  // rsp[argc * 8] : argument 1
1305  // rsp[(argc + 1) * 8] : argument 0 = receiver
1306  // -----------------------------------
1307  Label slow, notin;
1308  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1309  Operand mapped_location = GenerateMappedArgumentsLookup(
1310  masm, rdx, rcx, rbx, rax, r8, &notin, &slow);
1311  __ movq(rdi, mapped_location);
1312  GenerateFunctionTailCall(masm, argc, &slow);
1313  __ bind(&notin);
1314  // The unmapped lookup expects that the parameter map is in rbx.
1315  Operand unmapped_location =
1316  GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rax, &slow);
1317  __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
1318  __ j(equal, &slow);
1319  __ movq(rdi, unmapped_location);
1320  GenerateFunctionTailCall(masm, argc, &slow);
1321  __ bind(&slow);
1322  GenerateMiss(masm, argc);
1323 }
1324 
1325 
1326 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
1327  // ----------- S t a t e -------------
1328  // -- rax : receiver
1329  // -- rcx : name
1330  // -- rsp[0] : return address
1331  // -----------------------------------
1332 
1333  // Probe the stub cache.
1335  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rax, rcx, rbx,
1336  rdx);
1337 
1338  // Cache miss: Jump to runtime.
1339  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
1340 }
1341 
1342 
1343 void LoadIC::GenerateNormal(MacroAssembler* masm) {
1344  // ----------- S t a t e -------------
1345  // -- rax : receiver
1346  // -- rcx : name
1347  // -- rsp[0] : return address
1348  // -----------------------------------
1349  Label miss;
1350 
1351  GenerateStringDictionaryReceiverCheck(masm, rax, rdx, rbx, &miss);
1352 
1353  // rdx: elements
1354  // Search the dictionary placing the result in rax.
1355  GenerateDictionaryLoad(masm, &miss, rdx, rcx, rbx, rdi, rax);
1356  __ ret(0);
1357 
1358  // Cache miss: Jump to runtime.
1359  __ bind(&miss);
1360  GenerateMiss(masm);
1361 }
1362 
1363 
1364 void LoadIC::GenerateMiss(MacroAssembler* masm) {
1365  // ----------- S t a t e -------------
1366  // -- rax : receiver
1367  // -- rcx : name
1368  // -- rsp[0] : return address
1369  // -----------------------------------
1370 
1371  Counters* counters = masm->isolate()->counters();
1372  __ IncrementCounter(counters->load_miss(), 1);
1373 
1374  __ pop(rbx);
1375  __ push(rax); // receiver
1376  __ push(rcx); // name
1377  __ push(rbx); // return address
1378 
1379  // Perform tail call to the entry.
1380  ExternalReference ref =
1381  ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
1382  __ TailCallExternalReference(ref, 2, 1);
1383 }
1384 
1385 
1386 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1387  // ----------- S t a t e -------------
1388  // -- rax : key
1389  // -- rdx : receiver
1390  // -- rsp[0] : return address
1391  // -----------------------------------
1392 
1393  Counters* counters = masm->isolate()->counters();
1394  __ IncrementCounter(counters->keyed_load_miss(), 1);
1395 
1396  __ pop(rbx);
1397  __ push(rdx); // receiver
1398  __ push(rax); // name
1399  __ push(rbx); // return address
1400 
1401  // Perform tail call to the entry.
1402  ExternalReference ref = force_generic
1403  ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric),
1404  masm->isolate())
1405  : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
1406  __ TailCallExternalReference(ref, 2, 1);
1407 }
1408 
1409 
1410 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
1411  // ----------- S t a t e -------------
1412  // -- rax : key
1413  // -- rdx : receiver
1414  // -- rsp[0] : return address
1415  // -----------------------------------
1416 
1417  __ pop(rbx);
1418  __ push(rdx); // receiver
1419  __ push(rax); // name
1420  __ push(rbx); // return address
1421 
1422  // Perform tail call to the entry.
1423  __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
1424 }
1425 
1426 
1427 void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
1428  StrictModeFlag strict_mode) {
1429  // ----------- S t a t e -------------
1430  // -- rax : value
1431  // -- rcx : name
1432  // -- rdx : receiver
1433  // -- rsp[0] : return address
1434  // -----------------------------------
1435 
1436  // Get the receiver from the stack and probe the stub cache.
1437  Code::Flags flags =
1439  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
1440  no_reg);
1441 
1442  // Cache miss: Jump to runtime.
1443  GenerateMiss(masm);
1444 }
1445 
1446 
1447 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1448  // ----------- S t a t e -------------
1449  // -- rax : value
1450  // -- rcx : name
1451  // -- rdx : receiver
1452  // -- rsp[0] : return address
1453  // -----------------------------------
1454 
1455  __ pop(rbx);
1456  __ push(rdx); // receiver
1457  __ push(rcx); // name
1458  __ push(rax); // value
1459  __ push(rbx); // return address
1460 
1461  // Perform tail call to the entry.
1462  ExternalReference ref =
1463  ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
1464  __ TailCallExternalReference(ref, 3, 1);
1465 }
1466 
1467 
1468 void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
1469  // ----------- S t a t e -------------
1470  // -- rax : value
1471  // -- rcx : name
1472  // -- rdx : receiver
1473  // -- rsp[0] : return address
1474  // -----------------------------------
1475  //
1476  // This accepts as a receiver anything JSArray::SetElementsLength accepts
1477  // (currently anything except for external arrays which means anything with
1478  // elements of FixedArray type). Value must be a number, but only smis are
1479  // accepted as the most common case.
1480 
1481  Label miss;
1482 
1483  Register receiver = rdx;
1484  Register value = rax;
1485  Register scratch = rbx;
1486 
1487  // Check that the receiver isn't a smi.
1488  __ JumpIfSmi(receiver, &miss);
1489 
1490  // Check that the object is a JS array.
1491  __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
1492  __ j(not_equal, &miss);
1493 
1494  // Check that elements are FixedArray.
1495  // We rely on StoreIC_ArrayLength below to deal with all types of
1496  // fast elements (including COW).
1497  __ movq(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
1498  __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
1499  __ j(not_equal, &miss);
1500 
1501  // Check that the array has fast properties, otherwise the length
1502  // property might have been redefined.
1503  __ movq(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset));
1504  __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset),
1505  Heap::kHashTableMapRootIndex);
1506  __ j(equal, &miss);
1507 
1508  // Check that value is a smi.
1509  __ JumpIfNotSmi(value, &miss);
1510 
1511  // Prepare tail call to StoreIC_ArrayLength.
1512  __ pop(scratch);
1513  __ push(receiver);
1514  __ push(value);
1515  __ push(scratch); // return address
1516 
1517  ExternalReference ref =
1518  ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
1519  __ TailCallExternalReference(ref, 2, 1);
1520 
1521  __ bind(&miss);
1522 
1523  GenerateMiss(masm);
1524 }
1525 
1526 
1527 void StoreIC::GenerateNormal(MacroAssembler* masm) {
1528  // ----------- S t a t e -------------
1529  // -- rax : value
1530  // -- rcx : name
1531  // -- rdx : receiver
1532  // -- rsp[0] : return address
1533  // -----------------------------------
1534 
1535  Label miss;
1536 
1537  GenerateStringDictionaryReceiverCheck(masm, rdx, rbx, rdi, &miss);
1538 
1539  GenerateDictionaryStore(masm, &miss, rbx, rcx, rax, r8, r9);
1540  Counters* counters = masm->isolate()->counters();
1541  __ IncrementCounter(counters->store_normal_hit(), 1);
1542  __ ret(0);
1543 
1544  __ bind(&miss);
1545  __ IncrementCounter(counters->store_normal_miss(), 1);
1546  GenerateMiss(masm);
1547 }
1548 
1549 
1550 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
1551  StrictModeFlag strict_mode) {
1552  // ----------- S t a t e -------------
1553  // -- rax : value
1554  // -- rcx : name
1555  // -- rdx : receiver
1556  // -- rsp[0] : return address
1557  // -----------------------------------
1558  __ pop(rbx);
1559  __ push(rdx);
1560  __ push(rcx);
1561  __ push(rax);
1562  __ Push(Smi::FromInt(NONE)); // PropertyAttributes
1563  __ Push(Smi::FromInt(strict_mode));
1564  __ push(rbx); // return address
1565 
1566  // Do tail-call to runtime routine.
1567  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1568 }
1569 
1570 
1571 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1572  StrictModeFlag strict_mode) {
1573  // ----------- S t a t e -------------
1574  // -- rax : value
1575  // -- rcx : key
1576  // -- rdx : receiver
1577  // -- rsp[0] : return address
1578  // -----------------------------------
1579 
1580  __ pop(rbx);
1581  __ push(rdx); // receiver
1582  __ push(rcx); // key
1583  __ push(rax); // value
1584  __ Push(Smi::FromInt(NONE)); // PropertyAttributes
1585  __ Push(Smi::FromInt(strict_mode)); // Strict mode.
1586  __ push(rbx); // return address
1587 
1588  // Do tail-call to runtime routine.
1589  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1590 }
1591 
1592 
1593 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1594  // ----------- S t a t e -------------
1595  // -- rax : value
1596  // -- rcx : key
1597  // -- rdx : receiver
1598  // -- rsp[0] : return address
1599  // -----------------------------------
1600 
1601  __ pop(rbx);
1602  __ push(rdx); // receiver
1603  __ push(rcx); // key
1604  __ push(rax); // value
1605  __ push(rbx); // return address
1606 
1607  // Do tail-call to runtime routine.
1608  ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1609  __ TailCallExternalReference(ref, 3, 1);
1610 }
1611 
1612 
1613 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1614  // ----------- S t a t e -------------
1615  // -- rax : value
1616  // -- rcx : key
1617  // -- rdx : receiver
1618  // -- rsp[0] : return address
1619  // -----------------------------------
1620 
1621  __ pop(rbx);
1622  __ push(rdx); // receiver
1623  __ push(rcx); // key
1624  __ push(rax); // value
1625  __ push(rbx); // return address
1626 
1627  // Do tail-call to runtime routine.
1628  ExternalReference ref = force_generic
1629  ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1630  masm->isolate())
1631  : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1632  __ TailCallExternalReference(ref, 3, 1);
1633 }
1634 
1635 
1636 void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
1637  // ----------- S t a t e -------------
1638  // -- rbx : target map
1639  // -- rdx : receiver
1640  // -- rsp[0] : return address
1641  // -----------------------------------
1642  // Must return the modified receiver in eax.
1643  if (!FLAG_trace_elements_transitions) {
1644  Label fail;
1646  __ movq(rax, rdx);
1647  __ Ret();
1648  __ bind(&fail);
1649  }
1650 
1651  __ pop(rbx);
1652  __ push(rdx);
1653  __ push(rbx); // return address
1654  __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
1655 }
1656 
1657 
1659  MacroAssembler* masm) {
1660  // ----------- S t a t e -------------
1661  // -- rbx : target map
1662  // -- rdx : receiver
1663  // -- rsp[0] : return address
1664  // -----------------------------------
1665  // Must return the modified receiver in eax.
1666  if (!FLAG_trace_elements_transitions) {
1667  Label fail;
1669  __ movq(rax, rdx);
1670  __ Ret();
1671  __ bind(&fail);
1672  }
1673 
1674  __ pop(rbx);
1675  __ push(rdx);
1676  __ push(rbx); // return address
1677  __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
1678 }
1679 
1680 
1681 #undef __
1682 
1683 
1685  switch (op) {
1686  case Token::EQ_STRICT:
1687  case Token::EQ:
1688  return equal;
1689  case Token::LT:
1690  return less;
1691  case Token::GT:
1692  return greater;
1693  case Token::LTE:
1694  return less_equal;
1695  case Token::GTE:
1696  return greater_equal;
1697  default:
1698  UNREACHABLE();
1699  return no_condition;
1700  }
1701 }
1702 
1703 
1704 static bool HasInlinedSmiCode(Address address) {
1705  // The address of the instruction following the call.
1706  Address test_instruction_address =
1708 
1709  // If the instruction following the call is not a test al, nothing
1710  // was inlined.
1711  return *test_instruction_address == Assembler::kTestAlByte;
1712 }
1713 
1714 
1715 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
1716  HandleScope scope;
1717  Handle<Code> rewritten;
1718  State previous_state = GetState();
1719 
1720  State state = TargetState(previous_state, HasInlinedSmiCode(address()), x, y);
1721  if (state == GENERIC) {
1722  CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
1723  rewritten = stub.GetCode();
1724  } else {
1725  ICCompareStub stub(op_, state);
1726  if (state == KNOWN_OBJECTS) {
1727  stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
1728  }
1729  rewritten = stub.GetCode();
1730  }
1731  set_target(*rewritten);
1732 
1733 #ifdef DEBUG
1734  if (FLAG_trace_ic) {
1735  PrintF("[CompareIC (%s->%s)#%s]\n",
1736  GetStateName(previous_state),
1737  GetStateName(state),
1738  Token::Name(op_));
1739  }
1740 #endif
1741 
1742  // Activate inlined smi code.
1743  if (previous_state == UNINITIALIZED) {
1745  }
1746 }
1747 
1748 void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
1749  // The address of the instruction following the call.
1750  Address test_instruction_address =
1752 
1753  // If the instruction following the call is not a test al, nothing
1754  // was inlined.
1755  if (*test_instruction_address != Assembler::kTestAlByte) {
1756  ASSERT(*test_instruction_address == Assembler::kNopByte);
1757  return;
1758  }
1759 
1760  Address delta_address = test_instruction_address + 1;
1761  // The delta to the start of the map check instruction and the
1762  // condition code uses at the patched jump.
1763  int8_t delta = *reinterpret_cast<int8_t*>(delta_address);
1764  if (FLAG_trace_ic) {
1765  PrintF("[ patching ic at %p, test=%p, delta=%d\n",
1766  address, test_instruction_address, delta);
1767  }
1768 
1769  // Patch with a short conditional jump. Enabling means switching from a short
1770  // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
1771  // reverse operation of that.
1772  Address jmp_address = test_instruction_address - delta;
1773  ASSERT((check == ENABLE_INLINED_SMI_CHECK)
1774  ? (*jmp_address == Assembler::kJncShortOpcode ||
1775  *jmp_address == Assembler::kJcShortOpcode)
1776  : (*jmp_address == Assembler::kJnzShortOpcode ||
1777  *jmp_address == Assembler::kJzShortOpcode));
1779  ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
1780  : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
1781  *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
1782 }
1783 
1784 
1785 } } // namespace v8::internal
1786 
1787 #endif // V8_TARGET_ARCH_X64
byte * Address
Definition: globals.h:172
const Register rdx
static void GenerateTransitionElementsDoubleToObject(MacroAssembler *masm)
static const int kBitFieldOffset
Definition: objects.h:4994
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
static const byte kJccShortPrefix
static void GenerateNormal(MacroAssembler *masm, int argc)
static const int kMapHashShift
Definition: heap.h:2235
void PrintF(const char *format,...)
Definition: v8utils.cc:40
static void GenerateGeneric(MacroAssembler *masm, StrictModeFlag strict_mode)
static const char * Name(Value tok)
Definition: token.h:196
static Smi * FromInt(int value)
Definition: objects-inl.h:973
static void GenerateMiss(MacroAssembler *masm, int argc, IC::UtilityId id, Code::ExtraICState extra_state)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
static const int kGlobalReceiverOffset
Definition: objects.h:6085
static void GenerateGlobalProxy(MacroAssembler *masm, StrictModeFlag strict_mode)
const int kSmiValueSize
Definition: v8.h:3900
static void GenerateMegamorphic(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateStringLength(MacroAssembler *masm, bool support_wrappers)
Flag flags[]
Definition: flags.cc:1467
static const int kHasNamedInterceptor
Definition: objects.h:5003
static const int kIsAccessCheckNeeded
Definition: objects.h:5007
Address address() const
Definition: ic-inl.h:41
static void GenerateMegamorphic(MacroAssembler *masm)
#define ASSERT(condition)
Definition: checks.h:270
const int kPointerSizeLog2
Definition: globals.h:246
static const int kInstanceSizeOffset
Definition: objects.h:4981
static const ExtraICState kNoExtraICState
Definition: objects.h:4199
static const byte kTestAlByte
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check)
static void GenerateArrayLength(MacroAssembler *masm)
static const int kHashFieldOffset
Definition: objects.h:7099
uint8_t byte
Definition: globals.h:171
static const int kHasIndexedInterceptor
Definition: objects.h:5004
static const byte kJcShortOpcode
void UpdateCaches(Handle< Object > x, Handle< Object > y)
#define UNREACHABLE()
Definition: checks.h:50
static Condition ComputeCondition(Token::Value op)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
const uint32_t kIsSymbolMask
Definition: objects.h:443
const Register r9
const int kPointerSize
Definition: globals.h:234
static void GenerateGeneric(MacroAssembler *masm)
static const byte kNopByte
Operand FieldOperand(Register object, int offset)
static const byte kJzShortOpcode
const int kHeapObjectTag
Definition: v8.h:3848
static void GenerateMiss(MacroAssembler *masm)
const Register rbx
const Register rsp
#define __
static bool decode(uint32_t value)
Definition: utils.h:272
static const int kPropertiesOffset
Definition: objects.h:2113
const Register rax
static const int kInObjectPropertiesOffset
Definition: objects.h:4983
static void GenerateSmiToDouble(MacroAssembler *masm, Label *fail)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
const Register rdi
const Register r0
static const int kElementsOffset
Definition: objects.h:2114
static const int kContainsCachedArrayIndexMask
Definition: objects.h:7154
static const int kCallTargetAddressOffset
STATIC_ASSERT(kGrowICDelta==STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT-STORE_TRANSITION_SMI_TO_OBJECT)
static void GenerateMegamorphic(MacroAssembler *masm, int argc)
static const int kLengthOffset
Definition: objects.h:8111
static const int kHeaderSize
Definition: objects.h:2233
static void GenerateSlow(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, int argc, Code::ExtraICState extra_state)
Definition: ic.h:278
static const int kMapOffset
Definition: objects.h:1219
static void GenerateRuntimeSetProperty(MacroAssembler *masm, StrictModeFlag strict_mode)
const Register r1
static void GenerateDoubleToObject(MacroAssembler *masm, Label *fail)
static const byte kJncShortOpcode
static const int kLengthOffset
Definition: objects.h:2232
static const int kSlowCaseBitFieldMask
Definition: ic.h:508
static void GenerateNormal(MacroAssembler *masm, int argc)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
InlinedSmiCheck
Definition: ic.h:841
const Register kScratchRegister
static void GenerateMonomorphicCacheProbe(MacroAssembler *masm, int argc, Code::Kind kind, Code::ExtraICState extra_state)
static void GenerateString(MacroAssembler *masm)
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, PropertyType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:3312
const int kSmiTagSize
Definition: v8.h:3854
const Register r8
const Register rcx
Condition NegateCondition(Condition cond)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMegamorphic(MacroAssembler *masm, int argc, Code::ExtraICState extra_ic_state)
static void GenerateNormal(MacroAssembler *masm)
static void GenerateFunctionPrototype(MacroAssembler *masm)
static void GenerateIndexedInterceptor(MacroAssembler *masm)
void set_target(Code *code)
Definition: ic.h:149
static const byte kJnzShortOpcode
const Register no_reg
const uint32_t kSymbolTag
Definition: objects.h:445
static const int kCapacityMask
Definition: heap.h:2234
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)
static void GenerateMiss(MacroAssembler *masm)
static void GenerateTransitionElementsSmiToDouble(MacroAssembler *masm)
static const int kHashShift
Definition: objects.h:7121
static void GenerateNonStrictArguments(MacroAssembler *masm, int argc)
static void GenerateArrayLength(MacroAssembler *masm)
static const char * GetStateName(State state)
Definition: ic.cc:2564
void check(i::Vector< const char > string)
static const int kHashMask
Definition: heap.h:2236
static void GenerateMiss(MacroAssembler *masm, int argc)
Definition: ic.h:311
FlagType type() const
Definition: flags.cc:1358
static const int kInstanceTypeOffset
Definition: objects.h:4992
static const int kEntriesPerBucket
Definition: heap.h:2237
static void GenerateNormal(MacroAssembler *masm)
const XMMRegister xmm0
static void GenerateMiss(MacroAssembler *masm, bool force_generic)