v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
ic-ia32.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_IA32)
31 
32 #include "codegen.h"
33 #include "ic-inl.h"
34 #include "runtime.h"
35 #include "stub-cache.h"
36 
37 namespace v8 {
38 namespace internal {
39 
40 // ----------------------------------------------------------------------------
41 // Static IC stub generators.
42 //
43 
44 #define __ ACCESS_MASM(masm)
45 
46 
47 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
48  Register type,
49  Label* global_object) {
50  // Register usage:
51  // type: holds the receiver instance type on entry.
52  __ cmp(type, JS_GLOBAL_OBJECT_TYPE);
53  __ j(equal, global_object);
54  __ cmp(type, JS_BUILTINS_OBJECT_TYPE);
55  __ j(equal, global_object);
56  __ cmp(type, JS_GLOBAL_PROXY_TYPE);
57  __ j(equal, global_object);
58 }
59 
60 
61 // Generated code falls through if the receiver is a regular non-global
62 // JS object with slow properties and no interceptors.
63 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
64  Register receiver,
65  Register r0,
66  Register r1,
67  Label* miss) {
68  // Register usage:
69  // receiver: holds the receiver on entry and is unchanged.
70  // r0: used to hold receiver instance type.
71  // Holds the property dictionary on fall through.
72  // r1: used to hold receivers map.
73 
74  // Check that the receiver isn't a smi.
75  __ JumpIfSmi(receiver, miss);
76 
77  // Check that the receiver is a valid JS object.
78  __ mov(r1, FieldOperand(receiver, HeapObject::kMapOffset));
79  __ movzx_b(r0, FieldOperand(r1, Map::kInstanceTypeOffset));
80  __ cmp(r0, FIRST_SPEC_OBJECT_TYPE);
81  __ j(below, miss);
82 
83  // If this assert fails, we have to check upper bound too.
85 
86  GenerateGlobalInstanceTypeCheck(masm, r0, miss);
87 
88  // Check for non-global object that requires access check.
92  __ j(not_zero, miss);
93 
94  __ mov(r0, FieldOperand(receiver, JSObject::kPropertiesOffset));
95  __ CheckMap(r0, FACTORY->hash_table_map(), miss, DONT_DO_SMI_CHECK);
96 }
97 
98 
99 // Helper function used to load a property from a dictionary backing
100 // storage. This function may fail to load a property even though it is
101 // in the dictionary, so code at miss_label must always call a backup
102 // property load that is complete. This function is safe to call if
103 // name is not a symbol, and will jump to the miss_label in that
104 // case. The generated code assumes that the receiver has slow
105 // properties, is not a global object and does not have interceptors.
106 static void GenerateDictionaryLoad(MacroAssembler* masm,
107  Label* miss_label,
108  Register elements,
109  Register name,
110  Register r0,
111  Register r1,
112  Register result) {
113  // Register use:
114  //
115  // elements - holds the property dictionary on entry and is unchanged.
116  //
117  // name - holds the name of the property on entry and is unchanged.
118  //
119  // Scratch registers:
120  //
121  // r0 - used for the index into the property dictionary
122  //
123  // r1 - used to hold the capacity of the property dictionary.
124  //
125  // result - holds the result on exit.
126 
127  Label done;
128 
129  // Probe the dictionary.
131  miss_label,
132  &done,
133  elements,
134  name,
135  r0,
136  r1);
137 
138  // If probing finds an entry in the dictionary, r0 contains the
139  // index into the dictionary. Check that the value is a normal
140  // property.
141  __ bind(&done);
142  const int kElementsStartOffset =
145  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
146  __ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag),
147  Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
148  __ j(not_zero, miss_label);
149 
150  // Get the value at the masked, scaled index.
151  const int kValueOffset = kElementsStartOffset + kPointerSize;
152  __ mov(result, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag));
153 }
154 
155 
156 // Helper function used to store a property to a dictionary backing
157 // storage. This function may fail to store a property eventhough it
158 // is in the dictionary, so code at miss_label must always call a
159 // backup property store that is complete. This function is safe to
160 // call if name is not a symbol, and will jump to the miss_label in
161 // that case. The generated code assumes that the receiver has slow
162 // properties, is not a global object and does not have interceptors.
163 static void GenerateDictionaryStore(MacroAssembler* masm,
164  Label* miss_label,
165  Register elements,
166  Register name,
167  Register value,
168  Register r0,
169  Register r1) {
170  // Register use:
171  //
172  // elements - holds the property dictionary on entry and is clobbered.
173  //
174  // name - holds the name of the property on entry and is unchanged.
175  //
176  // value - holds the value to store and is unchanged.
177  //
178  // r0 - used for index into the property dictionary and is clobbered.
179  //
180  // r1 - used to hold the capacity of the property dictionary and is clobbered.
181  Label done;
182 
183 
184  // Probe the dictionary.
186  miss_label,
187  &done,
188  elements,
189  name,
190  r0,
191  r1);
192 
193  // If probing finds an entry in the dictionary, r0 contains the
194  // index into the dictionary. Check that the value is a normal
195  // property that is not read only.
196  __ bind(&done);
197  const int kElementsStartOffset =
200  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
201  const int kTypeAndReadOnlyMask =
202  (PropertyDetails::TypeField::kMask |
203  PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
204  __ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag),
205  Immediate(kTypeAndReadOnlyMask));
206  __ j(not_zero, miss_label);
207 
208  // Store the value at the masked, scaled index.
209  const int kValueOffset = kElementsStartOffset + kPointerSize;
210  __ lea(r0, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag));
211  __ mov(Operand(r0, 0), value);
212 
213  // Update write barrier. Make sure not to clobber the value.
214  __ mov(r1, value);
215  __ RecordWrite(elements, r0, r1, kDontSaveFPRegs);
216 }
217 
218 
219 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
220  // ----------- S t a t e -------------
221  // -- ecx : name
222  // -- edx : receiver
223  // -- esp[0] : return address
224  // -----------------------------------
225  Label miss;
226 
227  StubCompiler::GenerateLoadArrayLength(masm, edx, eax, &miss);
228  __ bind(&miss);
229  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
230 }
231 
232 
233 void LoadIC::GenerateStringLength(MacroAssembler* masm,
234  bool support_wrappers) {
235  // ----------- S t a t e -------------
236  // -- ecx : name
237  // -- edx : receiver
238  // -- esp[0] : return address
239  // -----------------------------------
240  Label miss;
241 
242  StubCompiler::GenerateLoadStringLength(masm, edx, eax, ebx, &miss,
243  support_wrappers);
244  __ bind(&miss);
245  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
246 }
247 
248 
249 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
250  // ----------- S t a t e -------------
251  // -- ecx : name
252  // -- edx : receiver
253  // -- esp[0] : return address
254  // -----------------------------------
255  Label miss;
256 
257  StubCompiler::GenerateLoadFunctionPrototype(masm, edx, eax, ebx, &miss);
258  __ bind(&miss);
259  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
260 }
261 
262 
263 // Checks the receiver for special cases (value type, slow case bits).
264 // Falls through for regular JS object.
265 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
266  Register receiver,
267  Register map,
268  int interceptor_bit,
269  Label* slow) {
270  // Register use:
271  // receiver - holds the receiver and is unchanged.
272  // Scratch registers:
273  // map - used to hold the map of the receiver.
274 
275  // Check that the object isn't a smi.
276  __ JumpIfSmi(receiver, slow);
277 
278  // Get the map of the receiver.
279  __ mov(map, FieldOperand(receiver, HeapObject::kMapOffset));
280 
281  // Check bit field.
282  __ test_b(FieldOperand(map, Map::kBitFieldOffset),
283  (1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit));
284  __ j(not_zero, slow);
285  // Check that the object is some kind of JS object EXCEPT JS Value type.
286  // In the case that the object is a value-wrapper object,
287  // we enter the runtime system to make sure that indexing
288  // into string objects works as intended.
290 
291  __ CmpInstanceType(map, JS_OBJECT_TYPE);
292  __ j(below, slow);
293 }
294 
295 
296 // Loads an indexed element from a fast case array.
297 // If not_fast_array is NULL, doesn't perform the elements map check.
298 static void GenerateFastArrayLoad(MacroAssembler* masm,
299  Register receiver,
300  Register key,
301  Register scratch,
302  Register result,
303  Label* not_fast_array,
304  Label* out_of_range) {
305  // Register use:
306  // receiver - holds the receiver and is unchanged.
307  // key - holds the key and is unchanged (must be a smi).
308  // Scratch registers:
309  // scratch - used to hold elements of the receiver and the loaded value.
310  // result - holds the result on exit if the load succeeds and
311  // we fall through.
312 
313  __ mov(scratch, FieldOperand(receiver, JSObject::kElementsOffset));
314  if (not_fast_array != NULL) {
315  // Check that the object is in fast mode and writable.
316  __ CheckMap(scratch,
317  FACTORY->fixed_array_map(),
318  not_fast_array,
320  } else {
321  __ AssertFastElements(scratch);
322  }
323  // Check that the key (index) is within bounds.
324  __ cmp(key, FieldOperand(scratch, FixedArray::kLengthOffset));
325  __ j(above_equal, out_of_range);
326  // Fast case: Do the load.
327  STATIC_ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0));
328  __ mov(scratch, FieldOperand(scratch, key, times_2, FixedArray::kHeaderSize));
329  __ cmp(scratch, Immediate(FACTORY->the_hole_value()));
330  // In case the loaded value is the_hole we have to consult GetProperty
331  // to ensure the prototype chain is searched.
332  __ j(equal, out_of_range);
333  if (!result.is(scratch)) {
334  __ mov(result, scratch);
335  }
336 }
337 
338 
339 // Checks whether a key is an array index string or a symbol string.
340 // Falls through if the key is a symbol.
341 static void GenerateKeyStringCheck(MacroAssembler* masm,
342  Register key,
343  Register map,
344  Register hash,
345  Label* index_string,
346  Label* not_symbol) {
347  // Register use:
348  // key - holds the key and is unchanged. Assumed to be non-smi.
349  // Scratch registers:
350  // map - used to hold the map of the key.
351  // hash - used to hold the hash of the key.
352  __ CmpObjectType(key, FIRST_NONSTRING_TYPE, map);
353  __ j(above_equal, not_symbol);
354 
355  // Is the string an array index, with cached numeric value?
356  __ mov(hash, FieldOperand(key, String::kHashFieldOffset));
357  __ test(hash, Immediate(String::kContainsCachedArrayIndexMask));
358  __ j(zero, index_string);
359 
360  // Is the string a symbol?
363  __ j(zero, not_symbol);
364 }
365 
366 
367 static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm,
368  Register object,
369  Register key,
370  Register scratch1,
371  Register scratch2,
372  Label* unmapped_case,
373  Label* slow_case) {
374  Heap* heap = masm->isolate()->heap();
375  Factory* factory = masm->isolate()->factory();
376 
377  // Check that the receiver is a JSObject. Because of the elements
378  // map check later, we do not need to check for interceptors or
379  // whether it requires access checks.
380  __ JumpIfSmi(object, slow_case);
381  // Check that the object is some kind of JSObject.
382  __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1);
383  __ j(below, slow_case);
384 
385  // Check that the key is a positive smi.
386  __ test(key, Immediate(0x80000001));
387  __ j(not_zero, slow_case);
388 
389  // Load the elements into scratch1 and check its map.
390  Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
391  __ mov(scratch1, FieldOperand(object, JSObject::kElementsOffset));
392  __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
393 
394  // Check if element is in the range of mapped arguments. If not, jump
395  // to the unmapped lookup with the parameter map in scratch1.
396  __ mov(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
397  __ sub(scratch2, Immediate(Smi::FromInt(2)));
398  __ cmp(key, scratch2);
399  __ j(above_equal, unmapped_case);
400 
401  // Load element index and check whether it is the hole.
402  const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
403  __ mov(scratch2, FieldOperand(scratch1,
404  key,
406  kHeaderSize));
407  __ cmp(scratch2, factory->the_hole_value());
408  __ j(equal, unmapped_case);
409 
410  // Load value from context and return it. We can reuse scratch1 because
411  // we do not jump to the unmapped lookup (which requires the parameter
412  // map in scratch1).
413  const int kContextOffset = FixedArray::kHeaderSize;
414  __ mov(scratch1, FieldOperand(scratch1, kContextOffset));
415  return FieldOperand(scratch1,
416  scratch2,
419 }
420 
421 
422 static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
423  Register key,
424  Register parameter_map,
425  Register scratch,
426  Label* slow_case) {
427  // Element is in arguments backing store, which is referenced by the
428  // second element of the parameter_map.
429  const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
430  Register backing_store = parameter_map;
431  __ mov(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
432  Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
433  __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK);
434  __ mov(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
435  __ cmp(key, scratch);
436  __ j(greater_equal, slow_case);
437  return FieldOperand(backing_store,
438  key,
441 }
442 
443 
444 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
445  // ----------- S t a t e -------------
446  // -- ecx : key
447  // -- edx : receiver
448  // -- esp[0] : return address
449  // -----------------------------------
450  Label slow, check_string, index_smi, index_string, property_array_property;
451  Label probe_dictionary, check_number_dictionary;
452 
453  // Check that the key is a smi.
454  __ JumpIfNotSmi(ecx, &check_string);
455  __ bind(&index_smi);
456  // Now the key is known to be a smi. This place is also jumped to from
457  // where a numeric string is converted to a smi.
458 
459  GenerateKeyedLoadReceiverCheck(
460  masm, edx, eax, Map::kHasIndexedInterceptor, &slow);
461 
462  // Check the receiver's map to see if it has fast elements.
463  __ CheckFastElements(eax, &check_number_dictionary);
464 
465  GenerateFastArrayLoad(masm, edx, ecx, eax, eax, NULL, &slow);
466  Isolate* isolate = masm->isolate();
467  Counters* counters = isolate->counters();
468  __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
469  __ ret(0);
470 
471  __ bind(&check_number_dictionary);
472  __ mov(ebx, ecx);
473  __ SmiUntag(ebx);
475 
476  // Check whether the elements is a number dictionary.
477  // edx: receiver
478  // ebx: untagged index
479  // ecx: key
480  // eax: elements
481  __ CheckMap(eax,
482  isolate->factory()->hash_table_map(),
483  &slow,
485  Label slow_pop_receiver;
486  // Push receiver on the stack to free up a register for the dictionary
487  // probing.
488  __ push(edx);
489  __ LoadFromNumberDictionary(&slow_pop_receiver, eax, ecx, ebx, edx, edi, eax);
490  // Pop receiver before returning.
491  __ pop(edx);
492  __ ret(0);
493 
494  __ bind(&slow_pop_receiver);
495  // Pop the receiver from the stack and jump to runtime.
496  __ pop(edx);
497 
498  __ bind(&slow);
499  // Slow case: jump to runtime.
500  // edx: receiver
501  // ecx: key
502  __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
504 
505  __ bind(&check_string);
506  GenerateKeyStringCheck(masm, ecx, eax, ebx, &index_string, &slow);
507 
508  GenerateKeyedLoadReceiverCheck(
509  masm, edx, eax, Map::kHasNamedInterceptor, &slow);
510 
511  // If the receiver is a fast-case object, check the keyed lookup
512  // cache. Otherwise probe the dictionary.
515  Immediate(isolate->factory()->hash_table_map()));
516  __ j(equal, &probe_dictionary);
517 
518  // The receiver's map is still in eax, compute the keyed lookup cache hash
519  // based on 32 bits of the map pointer and the string hash.
520  if (FLAG_debug_code) {
522  __ Check(equal, "Map is no longer in eax.");
523  }
524  __ mov(ebx, eax); // Keep the map around for later.
527  __ shr(edi, String::kHashShift);
528  __ xor_(eax, edi);
530 
531  // Load the key (consisting of map and symbol) from the cache and
532  // check for match.
533  Label load_in_object_property;
534  static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
535  Label hit_on_nth_entry[kEntriesPerBucket];
536  ExternalReference cache_keys =
537  ExternalReference::keyed_lookup_cache_keys(masm->isolate());
538 
539  for (int i = 0; i < kEntriesPerBucket - 1; i++) {
540  Label try_next_entry;
541  __ mov(edi, eax);
542  __ shl(edi, kPointerSizeLog2 + 1);
543  if (i != 0) {
544  __ add(edi, Immediate(kPointerSize * i * 2));
545  }
546  __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
547  __ j(not_equal, &try_next_entry);
548  __ add(edi, Immediate(kPointerSize));
549  __ cmp(ecx, Operand::StaticArray(edi, times_1, cache_keys));
550  __ j(equal, &hit_on_nth_entry[i]);
551  __ bind(&try_next_entry);
552  }
553 
554  __ lea(edi, Operand(eax, 1));
555  __ shl(edi, kPointerSizeLog2 + 1);
556  __ add(edi, Immediate(kPointerSize * (kEntriesPerBucket - 1) * 2));
557  __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
558  __ j(not_equal, &slow);
559  __ add(edi, Immediate(kPointerSize));
560  __ cmp(ecx, Operand::StaticArray(edi, times_1, cache_keys));
561  __ j(not_equal, &slow);
562 
563  // Get field offset.
564  // edx : receiver
565  // ebx : receiver's map
566  // ecx : key
567  // eax : lookup cache index
568  ExternalReference cache_field_offsets =
569  ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
570 
571  // Hit on nth entry.
572  for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
573  __ bind(&hit_on_nth_entry[i]);
574  if (i != 0) {
575  __ add(eax, Immediate(i));
576  }
577  __ mov(edi,
578  Operand::StaticArray(eax, times_pointer_size, cache_field_offsets));
580  __ sub(edi, eax);
581  __ j(above_equal, &property_array_property);
582  if (i != 0) {
583  __ jmp(&load_in_object_property);
584  }
585  }
586 
587  // Load in-object property.
588  __ bind(&load_in_object_property);
590  __ add(eax, edi);
592  __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
593  __ ret(0);
594 
595  // Load property array property.
596  __ bind(&property_array_property);
600  __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
601  __ ret(0);
602 
603  // Do a quick inline probe of the receiver's dictionary, if it
604  // exists.
605  __ bind(&probe_dictionary);
606 
609  GenerateGlobalInstanceTypeCheck(masm, eax, &slow);
610 
611  GenerateDictionaryLoad(masm, &slow, ebx, ecx, eax, edi, eax);
612  __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
613  __ ret(0);
614 
615  __ bind(&index_string);
616  __ IndexFromHash(ebx, ecx);
617  // Now jump to the place where smi keys are handled.
618  __ jmp(&index_smi);
619 }
620 
621 
622 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
623  // ----------- S t a t e -------------
624  // -- ecx : key (index)
625  // -- edx : receiver
626  // -- esp[0] : return address
627  // -----------------------------------
628  Label miss;
629 
630  Register receiver = edx;
631  Register index = ecx;
632  Register scratch = ebx;
633  Register result = eax;
634 
635  StringCharAtGenerator char_at_generator(receiver,
636  index,
637  scratch,
638  result,
639  &miss, // When not a string.
640  &miss, // When not a number.
641  &miss, // When index out of range.
643  char_at_generator.GenerateFast(masm);
644  __ ret(0);
645 
646  StubRuntimeCallHelper call_helper;
647  char_at_generator.GenerateSlow(masm, call_helper);
648 
649  __ bind(&miss);
650  GenerateMiss(masm, false);
651 }
652 
653 
654 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
655  // ----------- S t a t e -------------
656  // -- ecx : key
657  // -- edx : receiver
658  // -- esp[0] : return address
659  // -----------------------------------
660  Label slow;
661 
662  // Check that the receiver isn't a smi.
663  __ JumpIfSmi(edx, &slow);
664 
665  // Check that the key is an array index, that is Uint32.
666  __ test(ecx, Immediate(kSmiTagMask | kSmiSignMask));
667  __ j(not_zero, &slow);
668 
669  // Get the map of the receiver.
671 
672  // Check that it has indexed interceptor and access checks
673  // are not enabled for this object.
675  __ and_(eax, Immediate(kSlowCaseBitFieldMask));
676  __ cmp(eax, Immediate(1 << Map::kHasIndexedInterceptor));
677  __ j(not_zero, &slow);
678 
679  // Everything is fine, call runtime.
680  __ pop(eax);
681  __ push(edx); // receiver
682  __ push(ecx); // key
683  __ push(eax); // return address
684 
685  // Perform tail call to the entry.
686  ExternalReference ref =
687  ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
688  masm->isolate());
689  __ TailCallExternalReference(ref, 2, 1);
690 
691  __ bind(&slow);
692  GenerateMiss(masm, false);
693 }
694 
695 
696 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
697  // ----------- S t a t e -------------
698  // -- ecx : key
699  // -- edx : receiver
700  // -- esp[0] : return address
701  // -----------------------------------
702  Label slow, notin;
703  Factory* factory = masm->isolate()->factory();
704  Operand mapped_location =
705  GenerateMappedArgumentsLookup(masm, edx, ecx, ebx, eax, &notin, &slow);
706  __ mov(eax, mapped_location);
707  __ Ret();
708  __ bind(&notin);
709  // The unmapped lookup expects that the parameter map is in ebx.
710  Operand unmapped_location =
711  GenerateUnmappedArgumentsLookup(masm, ecx, ebx, eax, &slow);
712  __ cmp(unmapped_location, factory->the_hole_value());
713  __ j(equal, &slow);
714  __ mov(eax, unmapped_location);
715  __ Ret();
716  __ bind(&slow);
717  GenerateMiss(masm, false);
718 }
719 
720 
721 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
722  // ----------- S t a t e -------------
723  // -- eax : value
724  // -- ecx : key
725  // -- edx : receiver
726  // -- esp[0] : return address
727  // -----------------------------------
728  Label slow, notin;
729  Operand mapped_location =
730  GenerateMappedArgumentsLookup(masm, edx, ecx, ebx, edi, &notin, &slow);
731  __ mov(mapped_location, eax);
732  __ lea(ecx, mapped_location);
733  __ mov(edx, eax);
734  __ RecordWrite(ebx, ecx, edx, kDontSaveFPRegs);
735  __ Ret();
736  __ bind(&notin);
737  // The unmapped lookup expects that the parameter map is in ebx.
738  Operand unmapped_location =
739  GenerateUnmappedArgumentsLookup(masm, ecx, ebx, edi, &slow);
740  __ mov(unmapped_location, eax);
741  __ lea(edi, unmapped_location);
742  __ mov(edx, eax);
743  __ RecordWrite(ebx, edi, edx, kDontSaveFPRegs);
744  __ Ret();
745  __ bind(&slow);
746  GenerateMiss(masm, false);
747 }
748 
749 
750 static void KeyedStoreGenerateGenericHelper(
751  MacroAssembler* masm,
752  Label* fast_object,
753  Label* fast_double,
754  Label* slow,
755  KeyedStoreCheckMap check_map,
756  KeyedStoreIncrementLength increment_length) {
757  Label transition_smi_elements;
758  Label finish_object_store, non_double_value, transition_double_elements;
759  Label fast_double_without_map_check;
760  // eax: value
761  // ecx: key (a smi)
762  // edx: receiver
763  // ebx: FixedArray receiver->elements
764  // edi: receiver map
765  // Fast case: Do the store, could either Object or double.
766  __ bind(fast_object);
767  if (check_map == kCheckMap) {
769  __ cmp(edi, masm->isolate()->factory()->fixed_array_map());
770  __ j(not_equal, fast_double);
771  }
772  // Smi stores don't require further checks.
773  Label non_smi_value;
774  __ JumpIfNotSmi(eax, &non_smi_value);
775  if (increment_length == kIncrementLength) {
776  // Add 1 to receiver->length.
778  Immediate(Smi::FromInt(1)));
779  }
780  // It's irrelevant whether array is smi-only or not when writing a smi.
782  __ ret(0);
783 
784  __ bind(&non_smi_value);
785  // Escape to elements kind transition case.
787  __ CheckFastObjectElements(edi, &transition_smi_elements);
788 
789  // Fast elements array, store the value to the elements backing store.
790  __ bind(&finish_object_store);
791  if (increment_length == kIncrementLength) {
792  // Add 1 to receiver->length.
794  Immediate(Smi::FromInt(1)));
795  }
797  // Update write barrier for the elements array address.
798  __ mov(edx, eax); // Preserve the value which is returned.
799  __ RecordWriteArray(
801  __ ret(0);
802 
803  __ bind(fast_double);
804  if (check_map == kCheckMap) {
805  // Check for fast double array case. If this fails, call through to the
806  // runtime.
807  __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
808  __ j(not_equal, slow);
809  // If the value is a number, store it as a double in the FastDoubleElements
810  // array.
811  }
812  __ bind(&fast_double_without_map_check);
813  __ StoreNumberToDoubleElements(eax, ebx, ecx, edi, xmm0,
814  &transition_double_elements, false);
815  if (increment_length == kIncrementLength) {
816  // Add 1 to receiver->length.
818  Immediate(Smi::FromInt(1)));
819  }
820  __ ret(0);
821 
822  __ bind(&transition_smi_elements);
824 
825  // Transition the array appropriately depending on the value type.
826  __ CheckMap(eax,
827  masm->isolate()->factory()->heap_number_map(),
828  &non_double_value,
830 
831  // Value is a double. Transition FAST_SMI_ELEMENTS -> FAST_DOUBLE_ELEMENTS
832  // and complete the store.
833  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
835  ebx,
836  edi,
837  slow);
840  __ jmp(&fast_double_without_map_check);
841 
842  __ bind(&non_double_value);
843  // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
844  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
846  ebx,
847  edi,
848  slow);
851  __ jmp(&finish_object_store);
852 
853  __ bind(&transition_double_elements);
854  // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
855  // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
856  // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
858  __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
860  ebx,
861  edi,
862  slow);
865  __ jmp(&finish_object_store);
866 }
867 
868 
869 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
870  StrictModeFlag strict_mode) {
871  // ----------- S t a t e -------------
872  // -- eax : value
873  // -- ecx : key
874  // -- edx : receiver
875  // -- esp[0] : return address
876  // -----------------------------------
877  Label slow, fast_object, fast_object_grow;
878  Label fast_double, fast_double_grow;
879  Label array, extra, check_if_double_array;
880 
881  // Check that the object isn't a smi.
882  __ JumpIfSmi(edx, &slow);
883  // Get the map from the receiver.
885  // Check that the receiver does not require access checks. We need
886  // to do this because this generic stub does not perform map checks.
889  __ j(not_zero, &slow);
890  // Check that the key is a smi.
891  __ JumpIfNotSmi(ecx, &slow);
892  __ CmpInstanceType(edi, JS_ARRAY_TYPE);
893  __ j(equal, &array);
894  // Check that the object is some kind of JSObject.
895  __ CmpInstanceType(edi, FIRST_JS_OBJECT_TYPE);
896  __ j(below, &slow);
897 
898  // Object case: Check key against length in the elements array.
899  // eax: value
900  // edx: JSObject
901  // ecx: key (a smi)
902  // edi: receiver map
904  // Check array bounds. Both the key and the length of FixedArray are smis.
906  __ j(below, &fast_object);
907 
908  // Slow case: call runtime.
909  __ bind(&slow);
910  GenerateRuntimeSetProperty(masm, strict_mode);
911 
912  // Extra capacity case: Check if there is extra capacity to
913  // perform the store and update the length. Used for adding one
914  // element to the array by writing to array[array.length].
915  __ bind(&extra);
916  // eax: value
917  // edx: receiver, a JSArray
918  // ecx: key, a smi.
919  // ebx: receiver->elements, a FixedArray
920  // edi: receiver map
921  // flags: compare (ecx, edx.length())
922  // do not leave holes in the array:
923  __ j(not_equal, &slow);
925  __ j(above_equal, &slow);
927  __ cmp(edi, masm->isolate()->factory()->fixed_array_map());
928  __ j(not_equal, &check_if_double_array);
929  __ jmp(&fast_object_grow);
930 
931  __ bind(&check_if_double_array);
932  __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
933  __ j(not_equal, &slow);
934  __ jmp(&fast_double_grow);
935 
936  // Array case: Get the length and the elements array from the JS
937  // array. Check that the array is in fast mode (and writable); if it
938  // is the length is always a smi.
939  __ bind(&array);
940  // eax: value
941  // edx: receiver, a JSArray
942  // ecx: key, a smi.
943  // edi: receiver map
945 
946  // Check the key against the length in the array and fall through to the
947  // common store code.
948  __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // Compare smis.
949  __ j(above_equal, &extra);
950 
951  KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double,
953  KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
955 }
956 
957 
958 // The generated code does not accept smi keys.
959 // The generated code falls through if both probes miss.
960 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
961  int argc,
962  Code::Kind kind,
963  Code::ExtraICState extra_state) {
964  // ----------- S t a t e -------------
965  // -- ecx : name
966  // -- edx : receiver
967  // -----------------------------------
968  Label number, non_number, non_string, boolean, probe, miss;
969 
970  // Probe the stub cache.
972  MONOMORPHIC,
973  extra_state,
974  Code::NORMAL,
975  argc);
976  Isolate* isolate = masm->isolate();
977  isolate->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx, eax);
978 
979  // If the stub cache probing failed, the receiver might be a value.
980  // For value objects, we use the map of the prototype objects for
981  // the corresponding JSValue for the cache and that is what we need
982  // to probe.
983  //
984  // Check for number.
985  __ JumpIfSmi(edx, &number);
986  __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ebx);
987  __ j(not_equal, &non_number);
988  __ bind(&number);
989  StubCompiler::GenerateLoadGlobalFunctionPrototype(
991  __ jmp(&probe);
992 
993  // Check for string.
994  __ bind(&non_number);
995  __ CmpInstanceType(ebx, FIRST_NONSTRING_TYPE);
996  __ j(above_equal, &non_string);
997  StubCompiler::GenerateLoadGlobalFunctionPrototype(
999  __ jmp(&probe);
1000 
1001  // Check for boolean.
1002  __ bind(&non_string);
1003  __ cmp(edx, isolate->factory()->true_value());
1004  __ j(equal, &boolean);
1005  __ cmp(edx, isolate->factory()->false_value());
1006  __ j(not_equal, &miss);
1007  __ bind(&boolean);
1008  StubCompiler::GenerateLoadGlobalFunctionPrototype(
1010 
1011  // Probe the stub cache for the value object.
1012  __ bind(&probe);
1013  isolate->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx, no_reg);
1014  __ bind(&miss);
1015 }
1016 
1017 
1018 static void GenerateFunctionTailCall(MacroAssembler* masm,
1019  int argc,
1020  Label* miss) {
1021  // ----------- S t a t e -------------
1022  // -- ecx : name
1023  // -- edi : function
1024  // -- esp[0] : return address
1025  // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1026  // -- ...
1027  // -- esp[(argc + 1) * 4] : receiver
1028  // -----------------------------------
1029 
1030  // Check that the result is not a smi.
1031  __ JumpIfSmi(edi, miss);
1032 
1033  // Check that the value is a JavaScript function, fetching its map into eax.
1034  __ CmpObjectType(edi, JS_FUNCTION_TYPE, eax);
1035  __ j(not_equal, miss);
1036 
1037  // Invoke the function.
1038  ParameterCount actual(argc);
1039  __ InvokeFunction(edi, actual, JUMP_FUNCTION,
1040  NullCallWrapper(), CALL_AS_METHOD);
1041 }
1042 
1043 
1044 // The generated code falls through if the call should be handled by runtime.
1045 void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
1046  // ----------- S t a t e -------------
1047  // -- ecx : name
1048  // -- esp[0] : return address
1049  // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1050  // -- ...
1051  // -- esp[(argc + 1) * 4] : receiver
1052  // -----------------------------------
1053  Label miss;
1054 
1055  // Get the receiver of the function from the stack; 1 ~ return address.
1056  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1057 
1058  GenerateStringDictionaryReceiverCheck(masm, edx, eax, ebx, &miss);
1059 
1060  // eax: elements
1061  // Search the dictionary placing the result in edi.
1062  GenerateDictionaryLoad(masm, &miss, eax, ecx, edi, ebx, edi);
1063  GenerateFunctionTailCall(masm, argc, &miss);
1064 
1065  __ bind(&miss);
1066 }
1067 
1068 
1069 void CallICBase::GenerateMiss(MacroAssembler* masm,
1070  int argc,
1071  IC::UtilityId id,
1072  Code::ExtraICState extra_state) {
1073  // ----------- S t a t e -------------
1074  // -- ecx : name
1075  // -- esp[0] : return address
1076  // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1077  // -- ...
1078  // -- esp[(argc + 1) * 4] : receiver
1079  // -----------------------------------
1080 
1081  Counters* counters = masm->isolate()->counters();
1082  if (id == IC::kCallIC_Miss) {
1083  __ IncrementCounter(counters->call_miss(), 1);
1084  } else {
1085  __ IncrementCounter(counters->keyed_call_miss(), 1);
1086  }
1087 
1088  // Get the receiver of the function from the stack; 1 ~ return address.
1089  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1090 
1091  {
1092  FrameScope scope(masm, StackFrame::INTERNAL);
1093 
1094  // Push the receiver and the name of the function.
1095  __ push(edx);
1096  __ push(ecx);
1097 
1098  // Call the entry.
1099  CEntryStub stub(1);
1100  __ mov(eax, Immediate(2));
1101  __ mov(ebx, Immediate(ExternalReference(IC_Utility(id), masm->isolate())));
1102  __ CallStub(&stub);
1103 
1104  // Move result to edi and exit the internal frame.
1105  __ mov(edi, eax);
1106  }
1107 
1108  // Check if the receiver is a global object of some sort.
1109  // This can happen only for regular CallIC but not KeyedCallIC.
1110  if (id == IC::kCallIC_Miss) {
1111  Label invoke, global;
1112  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); // receiver
1113  __ JumpIfSmi(edx, &invoke, Label::kNear);
1117  __ j(equal, &global, Label::kNear);
1119  __ j(not_equal, &invoke, Label::kNear);
1120 
1121  // Patch the receiver on the stack.
1122  __ bind(&global);
1124  __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
1125  __ bind(&invoke);
1126  }
1127 
1128  // Invoke the function.
1129  CallKind call_kind = CallICBase::Contextual::decode(extra_state)
1131  : CALL_AS_METHOD;
1132  ParameterCount actual(argc);
1133  __ InvokeFunction(edi,
1134  actual,
1135  JUMP_FUNCTION,
1136  NullCallWrapper(),
1137  call_kind);
1138 }
1139 
1140 
1141 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
1142  int argc,
1143  Code::ExtraICState extra_state) {
1144  // ----------- S t a t e -------------
1145  // -- ecx : name
1146  // -- esp[0] : return address
1147  // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1148  // -- ...
1149  // -- esp[(argc + 1) * 4] : receiver
1150  // -----------------------------------
1151 
1152  // Get the receiver of the function from the stack; 1 ~ return address.
1153  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1154  CallICBase::GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC,
1155  extra_state);
1156 
1157  GenerateMiss(masm, argc, extra_state);
1158 }
1159 
1160 
1161 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
1162  // ----------- S t a t e -------------
1163  // -- ecx : name
1164  // -- esp[0] : return address
1165  // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1166  // -- ...
1167  // -- esp[(argc + 1) * 4] : receiver
1168  // -----------------------------------
1169 
1170  // Get the receiver of the function from the stack; 1 ~ return address.
1171  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1172 
1173  Label do_call, slow_call, slow_load, slow_reload_receiver;
1174  Label check_number_dictionary, check_string, lookup_monomorphic_cache;
1175  Label index_smi, index_string;
1176 
1177  // Check that the key is a smi.
1178  __ JumpIfNotSmi(ecx, &check_string);
1179 
1180  __ bind(&index_smi);
1181  // Now the key is known to be a smi. This place is also jumped to from
1182  // where a numeric string is converted to a smi.
1183 
1184  GenerateKeyedLoadReceiverCheck(
1185  masm, edx, eax, Map::kHasIndexedInterceptor, &slow_call);
1186 
1187  GenerateFastArrayLoad(
1188  masm, edx, ecx, eax, edi, &check_number_dictionary, &slow_load);
1189  Isolate* isolate = masm->isolate();
1190  Counters* counters = isolate->counters();
1191  __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1);
1192 
1193  __ bind(&do_call);
1194  // receiver in edx is not used after this point.
1195  // ecx: key
1196  // edi: function
1197  GenerateFunctionTailCall(masm, argc, &slow_call);
1198 
1199  __ bind(&check_number_dictionary);
1200  // eax: elements
1201  // ecx: smi key
1202  // Check whether the elements is a number dictionary.
1203  __ CheckMap(eax,
1204  isolate->factory()->hash_table_map(),
1205  &slow_load,
1207  __ mov(ebx, ecx);
1208  __ SmiUntag(ebx);
1209  // ebx: untagged index
1210  // Receiver in edx will be clobbered, need to reload it on miss.
1211  __ LoadFromNumberDictionary(
1212  &slow_reload_receiver, eax, ecx, ebx, edx, edi, edi);
1213  __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1);
1214  __ jmp(&do_call);
1215 
1216  __ bind(&slow_reload_receiver);
1217  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1218 
1219  __ bind(&slow_load);
1220  // This branch is taken when calling KeyedCallIC_Miss is neither required
1221  // nor beneficial.
1222  __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1);
1223 
1224  {
1225  FrameScope scope(masm, StackFrame::INTERNAL);
1226  __ push(ecx); // save the key
1227  __ push(edx); // pass the receiver
1228  __ push(ecx); // pass the key
1229  __ CallRuntime(Runtime::kKeyedGetProperty, 2);
1230  __ pop(ecx); // restore the key
1231  // Leave the internal frame.
1232  }
1233 
1234  __ mov(edi, eax);
1235  __ jmp(&do_call);
1236 
1237  __ bind(&check_string);
1238  GenerateKeyStringCheck(masm, ecx, eax, ebx, &index_string, &slow_call);
1239 
1240  // The key is known to be a symbol.
1241  // If the receiver is a regular JS object with slow properties then do
1242  // a quick inline probe of the receiver's dictionary.
1243  // Otherwise do the monomorphic cache probe.
1244  GenerateKeyedLoadReceiverCheck(
1245  masm, edx, eax, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
1246 
1248  __ CheckMap(ebx,
1249  isolate->factory()->hash_table_map(),
1250  &lookup_monomorphic_cache,
1252 
1253  GenerateDictionaryLoad(masm, &slow_load, ebx, ecx, eax, edi, edi);
1254  __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1);
1255  __ jmp(&do_call);
1256 
1257  __ bind(&lookup_monomorphic_cache);
1258  __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1);
1259  CallICBase::GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC,
1261  // Fall through on miss.
1262 
1263  __ bind(&slow_call);
1264  // This branch is taken if:
1265  // - the receiver requires boxing or access check,
1266  // - the key is neither smi nor symbol,
1267  // - the value loaded is not a function,
1268  // - there is hope that the runtime will create a monomorphic call stub
1269  // that will get fetched next time.
1270  __ IncrementCounter(counters->keyed_call_generic_slow(), 1);
1271  GenerateMiss(masm, argc);
1272 
1273  __ bind(&index_string);
1274  __ IndexFromHash(ebx, ecx);
1275  // Now jump to the place where smi keys are handled.
1276  __ jmp(&index_smi);
1277 }
1278 
1279 
1280 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
1281  int argc) {
1282  // ----------- S t a t e -------------
1283  // -- ecx : name
1284  // -- esp[0] : return address
1285  // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1286  // -- ...
1287  // -- esp[(argc + 1) * 4] : receiver
1288  // -----------------------------------
1289  Label slow, notin;
1290  Factory* factory = masm->isolate()->factory();
1291  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1292  Operand mapped_location =
1293  GenerateMappedArgumentsLookup(masm, edx, ecx, ebx, eax, &notin, &slow);
1294  __ mov(edi, mapped_location);
1295  GenerateFunctionTailCall(masm, argc, &slow);
1296  __ bind(&notin);
1297  // The unmapped lookup expects that the parameter map is in ebx.
1298  Operand unmapped_location =
1299  GenerateUnmappedArgumentsLookup(masm, ecx, ebx, eax, &slow);
1300  __ cmp(unmapped_location, factory->the_hole_value());
1301  __ j(equal, &slow);
1302  __ mov(edi, unmapped_location);
1303  GenerateFunctionTailCall(masm, argc, &slow);
1304  __ bind(&slow);
1305  GenerateMiss(masm, argc);
1306 }
1307 
1308 
1309 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
1310  // ----------- S t a t e -------------
1311  // -- ecx : name
1312  // -- esp[0] : return address
1313  // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1314  // -- ...
1315  // -- esp[(argc + 1) * 4] : receiver
1316  // -----------------------------------
1317 
1318  // Check if the name is a string.
1319  Label miss;
1320  __ JumpIfSmi(ecx, &miss);
1321  Condition cond = masm->IsObjectStringType(ecx, eax, eax);
1322  __ j(NegateCondition(cond), &miss);
1323  CallICBase::GenerateNormal(masm, argc);
1324  __ bind(&miss);
1325  GenerateMiss(masm, argc);
1326 }
1327 
1328 
1329 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
1330  // ----------- S t a t e -------------
1331  // -- ecx : name
1332  // -- edx : receiver
1333  // -- esp[0] : return address
1334  // -----------------------------------
1335 
1336  // Probe the stub cache.
1337  Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, MONOMORPHIC);
1338  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx,
1339  eax);
1340 
1341  // Cache miss: Jump to runtime.
1342  GenerateMiss(masm);
1343 }
1344 
1345 
1346 void LoadIC::GenerateNormal(MacroAssembler* masm) {
1347  // ----------- S t a t e -------------
1348  // -- ecx : name
1349  // -- edx : receiver
1350  // -- esp[0] : return address
1351  // -----------------------------------
1352  Label miss;
1353 
1354  GenerateStringDictionaryReceiverCheck(masm, edx, eax, ebx, &miss);
1355 
1356  // eax: elements
1357  // Search the dictionary placing the result in eax.
1358  GenerateDictionaryLoad(masm, &miss, eax, ecx, edi, ebx, eax);
1359  __ ret(0);
1360 
1361  // Cache miss: Jump to runtime.
1362  __ bind(&miss);
1363  GenerateMiss(masm);
1364 }
1365 
1366 
1367 void LoadIC::GenerateMiss(MacroAssembler* masm) {
1368  // ----------- S t a t e -------------
1369  // -- ecx : name
1370  // -- edx : receiver
1371  // -- esp[0] : return address
1372  // -----------------------------------
1373 
1374  __ IncrementCounter(masm->isolate()->counters()->load_miss(), 1);
1375 
1376  __ pop(ebx);
1377  __ push(edx); // receiver
1378  __ push(ecx); // name
1379  __ push(ebx); // return address
1380 
1381  // Perform tail call to the entry.
1382  ExternalReference ref =
1383  ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
1384  __ TailCallExternalReference(ref, 2, 1);
1385 }
1386 
1387 
1388 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1389  // ----------- S t a t e -------------
1390  // -- ecx : key
1391  // -- edx : receiver
1392  // -- esp[0] : return address
1393  // -----------------------------------
1394 
1395  __ IncrementCounter(masm->isolate()->counters()->keyed_load_miss(), 1);
1396 
1397  __ pop(ebx);
1398  __ push(edx); // receiver
1399  __ push(ecx); // name
1400  __ push(ebx); // return address
1401 
1402  // Perform tail call to the entry.
1403  ExternalReference ref = force_generic
1404  ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric),
1405  masm->isolate())
1406  : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
1407  __ TailCallExternalReference(ref, 2, 1);
1408 }
1409 
1410 
1411 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
1412  // ----------- S t a t e -------------
1413  // -- ecx : key
1414  // -- edx : receiver
1415  // -- esp[0] : return address
1416  // -----------------------------------
1417 
1418  __ pop(ebx);
1419  __ push(edx); // receiver
1420  __ push(ecx); // name
1421  __ push(ebx); // return address
1422 
1423  // Perform tail call to the entry.
1424  __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
1425 }
1426 
1427 
1428 void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
1429  StrictModeFlag strict_mode) {
1430  // ----------- S t a t e -------------
1431  // -- eax : value
1432  // -- ecx : name
1433  // -- edx : receiver
1434  // -- esp[0] : return address
1435  // -----------------------------------
1436 
1437  Code::Flags flags =
1438  Code::ComputeFlags(Code::STORE_IC, MONOMORPHIC, strict_mode);
1439  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx,
1440  no_reg);
1441 
1442  // Cache miss: Jump to runtime.
1443  GenerateMiss(masm);
1444 }
1445 
1446 
1447 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1448  // ----------- S t a t e -------------
1449  // -- eax : value
1450  // -- ecx : name
1451  // -- edx : receiver
1452  // -- esp[0] : return address
1453  // -----------------------------------
1454 
1455  __ pop(ebx);
1456  __ push(edx);
1457  __ push(ecx);
1458  __ push(eax);
1459  __ push(ebx);
1460 
1461  // Perform tail call to the entry.
1462  ExternalReference ref =
1463  ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
1464  __ TailCallExternalReference(ref, 3, 1);
1465 }
1466 
1467 
1468 void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
1469  // ----------- S t a t e -------------
1470  // -- eax : value
1471  // -- ecx : name
1472  // -- edx : receiver
1473  // -- esp[0] : return address
1474  // -----------------------------------
1475  //
1476  // This accepts as a receiver anything JSArray::SetElementsLength accepts
1477  // (currently anything except for external arrays which means anything with
1478  // elements of FixedArray type). Value must be a number, but only smis are
1479  // accepted as the most common case.
1480 
1481  Label miss;
1482 
1483  Register receiver = edx;
1484  Register value = eax;
1485  Register scratch = ebx;
1486 
1487  // Check that the receiver isn't a smi.
1488  __ JumpIfSmi(receiver, &miss);
1489 
1490  // Check that the object is a JS array.
1491  __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
1492  __ j(not_equal, &miss);
1493 
1494  // Check that elements are FixedArray.
1495  // We rely on StoreIC_ArrayLength below to deal with all types of
1496  // fast elements (including COW).
1497  __ mov(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
1498  __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
1499  __ j(not_equal, &miss);
1500 
1501  // Check that the array has fast properties, otherwise the length
1502  // property might have been redefined.
1503  __ mov(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset));
1504  __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset),
1505  Heap::kHashTableMapRootIndex);
1506  __ j(equal, &miss);
1507 
1508  // Check that value is a smi.
1509  __ JumpIfNotSmi(value, &miss);
1510 
1511  // Prepare tail call to StoreIC_ArrayLength.
1512  __ pop(scratch);
1513  __ push(receiver);
1514  __ push(value);
1515  __ push(scratch); // return address
1516 
1517  ExternalReference ref =
1518  ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
1519  __ TailCallExternalReference(ref, 2, 1);
1520 
1521  __ bind(&miss);
1522 
1523  GenerateMiss(masm);
1524 }
1525 
1526 
1527 void StoreIC::GenerateNormal(MacroAssembler* masm) {
1528  // ----------- S t a t e -------------
1529  // -- eax : value
1530  // -- ecx : name
1531  // -- edx : receiver
1532  // -- esp[0] : return address
1533  // -----------------------------------
1534 
1535  Label miss, restore_miss;
1536 
1537  GenerateStringDictionaryReceiverCheck(masm, edx, ebx, edi, &miss);
1538 
1539  // A lot of registers are needed for storing to slow case
1540  // objects. Push and restore receiver but rely on
1541  // GenerateDictionaryStore preserving the value and name.
1542  __ push(edx);
1543  GenerateDictionaryStore(masm, &restore_miss, ebx, ecx, eax, edx, edi);
1544  __ Drop(1);
1545  Counters* counters = masm->isolate()->counters();
1546  __ IncrementCounter(counters->store_normal_hit(), 1);
1547  __ ret(0);
1548 
1549  __ bind(&restore_miss);
1550  __ pop(edx);
1551 
1552  __ bind(&miss);
1553  __ IncrementCounter(counters->store_normal_miss(), 1);
1554  GenerateMiss(masm);
1555 }
1556 
1557 
1558 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
1559  StrictModeFlag strict_mode) {
1560  // ----------- S t a t e -------------
1561  // -- eax : value
1562  // -- ecx : name
1563  // -- edx : receiver
1564  // -- esp[0] : return address
1565  // -----------------------------------
1566  __ pop(ebx);
1567  __ push(edx);
1568  __ push(ecx);
1569  __ push(eax);
1570  __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
1571  __ push(Immediate(Smi::FromInt(strict_mode)));
1572  __ push(ebx); // return address
1573 
1574  // Do tail-call to runtime routine.
1575  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1576 }
1577 
1578 
1579 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1580  StrictModeFlag strict_mode) {
1581  // ----------- S t a t e -------------
1582  // -- eax : value
1583  // -- ecx : key
1584  // -- edx : receiver
1585  // -- esp[0] : return address
1586  // -----------------------------------
1587 
1588  __ pop(ebx);
1589  __ push(edx);
1590  __ push(ecx);
1591  __ push(eax);
1592  __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
1593  __ push(Immediate(Smi::FromInt(strict_mode))); // Strict mode.
1594  __ push(ebx); // return address
1595 
1596  // Do tail-call to runtime routine.
1597  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1598 }
1599 
1600 
1601 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1602  // ----------- S t a t e -------------
1603  // -- eax : value
1604  // -- ecx : key
1605  // -- edx : receiver
1606  // -- esp[0] : return address
1607  // -----------------------------------
1608 
1609  __ pop(ebx);
1610  __ push(edx);
1611  __ push(ecx);
1612  __ push(eax);
1613  __ push(ebx);
1614 
1615  // Do tail-call to runtime routine.
1616  ExternalReference ref = force_generic
1617  ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1618  masm->isolate())
1619  : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1620  __ TailCallExternalReference(ref, 3, 1);
1621 }
1622 
1623 
1624 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1625  // ----------- S t a t e -------------
1626  // -- eax : value
1627  // -- ecx : key
1628  // -- edx : receiver
1629  // -- esp[0] : return address
1630  // -----------------------------------
1631 
1632  __ pop(ebx);
1633  __ push(edx);
1634  __ push(ecx);
1635  __ push(eax);
1636  __ push(ebx); // return address
1637 
1638  // Do tail-call to runtime routine.
1639  ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1640  __ TailCallExternalReference(ref, 3, 1);
1641 }
1642 
1643 
1644 void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
1645  // ----------- S t a t e -------------
1646  // -- ebx : target map
1647  // -- edx : receiver
1648  // -- esp[0] : return address
1649  // -----------------------------------
1650  // Must return the modified receiver in eax.
1651  if (!FLAG_trace_elements_transitions) {
1652  Label fail;
1654  __ mov(eax, edx);
1655  __ Ret();
1656  __ bind(&fail);
1657  }
1658 
1659  __ pop(ebx);
1660  __ push(edx);
1661  __ push(ebx); // return address
1662  // Leaving the code managed by the register allocator and return to the
1663  // convention of using esi as context register.
1665  __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
1666 }
1667 
1668 
1670  MacroAssembler* masm) {
1671  // ----------- S t a t e -------------
1672  // -- ebx : target map
1673  // -- edx : receiver
1674  // -- esp[0] : return address
1675  // -----------------------------------
1676  // Must return the modified receiver in eax.
1677  if (!FLAG_trace_elements_transitions) {
1678  Label fail;
1680  __ mov(eax, edx);
1681  __ Ret();
1682  __ bind(&fail);
1683  }
1684 
1685  __ pop(ebx);
1686  __ push(edx);
1687  __ push(ebx); // return address
1688  // Leaving the code managed by the register allocator and return to the
1689  // convention of using esi as context register.
1691  __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
1692 }
1693 
1694 
1695 #undef __
1696 
1697 
1699  switch (op) {
1700  case Token::EQ_STRICT:
1701  case Token::EQ:
1702  return equal;
1703  case Token::LT:
1704  return less;
1705  case Token::GT:
1706  return greater;
1707  case Token::LTE:
1708  return less_equal;
1709  case Token::GTE:
1710  return greater_equal;
1711  default:
1712  UNREACHABLE();
1713  return no_condition;
1714  }
1715 }
1716 
1717 
1718 static bool HasInlinedSmiCode(Address address) {
1719  // The address of the instruction following the call.
1720  Address test_instruction_address =
1722 
1723  // If the instruction following the call is not a test al, nothing
1724  // was inlined.
1725  return *test_instruction_address == Assembler::kTestAlByte;
1726 }
1727 
1728 
1729 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
1730  HandleScope scope;
1731  Handle<Code> rewritten;
1732  State previous_state = GetState();
1733 
1734  State state = TargetState(previous_state, HasInlinedSmiCode(address()), x, y);
1735  if (state == GENERIC) {
1736  CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
1737  rewritten = stub.GetCode();
1738  } else {
1739  ICCompareStub stub(op_, state);
1740  if (state == KNOWN_OBJECTS) {
1741  stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
1742  }
1743  rewritten = stub.GetCode();
1744  }
1745  set_target(*rewritten);
1746 
1747 #ifdef DEBUG
1748  if (FLAG_trace_ic) {
1749  PrintF("[CompareIC (%s->%s)#%s]\n",
1750  GetStateName(previous_state),
1751  GetStateName(state),
1752  Token::Name(op_));
1753  }
1754 #endif
1755 
1756  // Activate inlined smi code.
1757  if (previous_state == UNINITIALIZED) {
1759  }
1760 }
1761 
1762 
1764  // The address of the instruction following the call.
1765  Address test_instruction_address =
1767 
1768  // If the instruction following the call is not a test al, nothing
1769  // was inlined.
1770  if (*test_instruction_address != Assembler::kTestAlByte) {
1771  ASSERT(*test_instruction_address == Assembler::kNopByte);
1772  return;
1773  }
1774 
1775  Address delta_address = test_instruction_address + 1;
1776  // The delta to the start of the map check instruction and the
1777  // condition code uses at the patched jump.
1778  int8_t delta = *reinterpret_cast<int8_t*>(delta_address);
1779  if (FLAG_trace_ic) {
1780  PrintF("[ patching ic at %p, test=%p, delta=%d\n",
1781  address, test_instruction_address, delta);
1782  }
1783 
1784  // Patch with a short conditional jump. Enabling means switching from a short
1785  // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
1786  // reverse operation of that.
1787  Address jmp_address = test_instruction_address - delta;
1788  ASSERT((check == ENABLE_INLINED_SMI_CHECK)
1789  ? (*jmp_address == Assembler::kJncShortOpcode ||
1790  *jmp_address == Assembler::kJcShortOpcode)
1791  : (*jmp_address == Assembler::kJnzShortOpcode ||
1792  *jmp_address == Assembler::kJzShortOpcode));
1794  ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
1795  : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
1796  *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
1797 }
1798 
1799 
1800 } } // namespace v8::internal
1801 
1802 #endif // V8_TARGET_ARCH_IA32
byte * Address
Definition: globals.h:157
static void GenerateTransitionElementsDoubleToObject(MacroAssembler *masm)
static const int kBitFieldOffset
Definition: objects.h:5160
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
const intptr_t kSmiTagMask
Definition: v8.h:4016
const intptr_t kSmiSignMask
Definition: v8globals.h:41
static const byte kJccShortPrefix
static void GenerateNormal(MacroAssembler *masm, int argc)
static const int kMapHashShift
Definition: heap.h:2350
void PrintF(const char *format,...)
Definition: v8utils.cc:40
static void GenerateGeneric(MacroAssembler *masm, StrictModeFlag strict_mode)
static const char * Name(Value tok)
Definition: token.h:196
static Smi * FromInt(int value)
Definition: objects-inl.h:981
static void GenerateMiss(MacroAssembler *masm, int argc, IC::UtilityId id, Code::ExtraICState extra_state)
const Register esp
static const int kGlobalReceiverOffset
Definition: objects.h:6288
static void GenerateGlobalProxy(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateMegamorphic(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateStringLength(MacroAssembler *masm, bool support_wrappers)
KeyedStoreCheckMap
Definition: ic.h:634
static const int kHasNamedInterceptor
Definition: objects.h:5169
static const int kIsAccessCheckNeeded
Definition: objects.h:5173
Address address() const
Definition: ic-inl.h:41
static void GenerateMegamorphic(MacroAssembler *masm)
#define ASSERT(condition)
Definition: checks.h:270
const int kPointerSizeLog2
Definition: globals.h:232
static const int kInstanceSizeOffset
Definition: objects.h:5147
Isolate * isolate() const
Definition: ic.h:140
static const ExtraICState kNoExtraICState
Definition: objects.h:4236
static const byte kTestAlByte
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check)
static void GenerateArrayLength(MacroAssembler *masm)
const Register edi
static const int kHashFieldOffset
Definition: objects.h:7319
uint8_t byte
Definition: globals.h:156
static const int kHasIndexedInterceptor
Definition: objects.h:5170
static const byte kJcShortOpcode
void UpdateCaches(Handle< Object > x, Handle< Object > y)
const Register ebp
#define UNREACHABLE()
Definition: checks.h:50
static Condition ComputeCondition(Token::Value op)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
const uint32_t kIsSymbolMask
Definition: objects.h:462
const Register eax
const int kPointerSize
Definition: globals.h:220
static void GenerateGeneric(MacroAssembler *masm)
static const byte kNopByte
Operand FieldOperand(Register object, int offset)
const Register ecx
static const byte kJzShortOpcode
const int kHeapObjectTag
Definition: v8.h:4009
static void GenerateMiss(MacroAssembler *masm)
#define __
static bool decode(uint32_t value)
Definition: utils.h:273
static const int kPropertiesOffset
Definition: objects.h:2171
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:3491
static const int kInObjectPropertiesOffset
Definition: objects.h:5149
static void GenerateSmiToDouble(MacroAssembler *masm, Label *fail)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
const Register r0
static const int kElementsOffset
Definition: objects.h:2172
static const int kContainsCachedArrayIndexMask
Definition: objects.h:7374
static const int kCallTargetAddressOffset
static void GenerateMegamorphic(MacroAssembler *masm, int argc)
static const int kLengthOffset
Definition: objects.h:8332
static const int kHeaderSize
Definition: objects.h:2296
static void GenerateSlow(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, int argc, Code::ExtraICState extra_state)
Definition: ic.h:278
static const int kMapOffset
Definition: objects.h:1261
static void GenerateRuntimeSetProperty(MacroAssembler *masm, StrictModeFlag strict_mode)
const Register r1
static void GenerateDoubleToObject(MacroAssembler *masm, Label *fail)
static const byte kJncShortOpcode
static const int kLengthOffset
Definition: objects.h:2295
static const int kSlowCaseBitFieldMask
Definition: ic.h:508
const Register ebx
KeyedStoreIncrementLength
Definition: ic.h:640
static void GenerateNormal(MacroAssembler *masm, int argc)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
InlinedSmiCheck
Definition: ic.h:853
static void GenerateMonomorphicCacheProbe(MacroAssembler *masm, int argc, Code::Kind kind, Code::ExtraICState extra_state)
static void GenerateString(MacroAssembler *masm)
const int kSmiTagSize
Definition: v8.h:4015
Condition NegateCondition(Condition cond)
const Register esi
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMegamorphic(MacroAssembler *masm, int argc, Code::ExtraICState extra_ic_state)
Counters * counters()
Definition: isolate.h:819
const int kSmiTag
Definition: v8.h:4014
static Operand FixedArrayElementOperand(Register array, Register index_as_smi, int additional_offset=0)
Definition: codegen-ia32.h:63
static void GenerateNormal(MacroAssembler *masm)
static void GenerateFunctionPrototype(MacroAssembler *masm)
static void GenerateIndexedInterceptor(MacroAssembler *masm)
#define FACTORY
Definition: isolate.h:1434
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
void set_target(Code *code)
Definition: ic.h:149
static const byte kJnzShortOpcode
const Register no_reg
const uint32_t kSymbolTag
Definition: objects.h:464
static const int kCapacityMask
Definition: heap.h:2349
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)
static void GenerateMiss(MacroAssembler *masm)
static void GenerateTransitionElementsSmiToDouble(MacroAssembler *masm)
static const int kHashShift
Definition: objects.h:7341
static void GenerateNonStrictArguments(MacroAssembler *masm, int argc)
const Register edx
static void GenerateArrayLength(MacroAssembler *masm)
static const char * GetStateName(State state)
Definition: ic.cc:2586
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
void check(i::Vector< const char > string)
static const int kHashMask
Definition: heap.h:2351
static void GenerateMiss(MacroAssembler *masm, int argc)
Definition: ic.h:311
static const int kInstanceTypeOffset
Definition: objects.h:5158
static const int kEntriesPerBucket
Definition: heap.h:2352
static void GenerateNormal(MacroAssembler *masm)
const XMMRegister xmm0
static void GenerateMiss(MacroAssembler *masm, bool force_generic)