v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
ic-ia32.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_IA32)
31 
32 #include "codegen.h"
33 #include "ic-inl.h"
34 #include "runtime.h"
35 #include "stub-cache.h"
36 
37 namespace v8 {
38 namespace internal {
39 
40 // ----------------------------------------------------------------------------
41 // Static IC stub generators.
42 //
43 
44 #define __ ACCESS_MASM(masm)
45 
46 
47 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
48  Register type,
49  Label* global_object) {
50  // Register usage:
51  // type: holds the receiver instance type on entry.
52  __ cmp(type, JS_GLOBAL_OBJECT_TYPE);
53  __ j(equal, global_object);
54  __ cmp(type, JS_BUILTINS_OBJECT_TYPE);
55  __ j(equal, global_object);
56  __ cmp(type, JS_GLOBAL_PROXY_TYPE);
57  __ j(equal, global_object);
58 }
59 
60 
61 // Generated code falls through if the receiver is a regular non-global
62 // JS object with slow properties and no interceptors.
63 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
64  Register receiver,
65  Register r0,
66  Register r1,
67  Label* miss) {
68  // Register usage:
69  // receiver: holds the receiver on entry and is unchanged.
70  // r0: used to hold receiver instance type.
71  // Holds the property dictionary on fall through.
72  // r1: used to hold receivers map.
73 
74  // Check that the receiver isn't a smi.
75  __ JumpIfSmi(receiver, miss);
76 
77  // Check that the receiver is a valid JS object.
78  __ mov(r1, FieldOperand(receiver, HeapObject::kMapOffset));
79  __ movzx_b(r0, FieldOperand(r1, Map::kInstanceTypeOffset));
80  __ cmp(r0, FIRST_SPEC_OBJECT_TYPE);
81  __ j(below, miss);
82 
83  // If this assert fails, we have to check upper bound too.
85 
86  GenerateGlobalInstanceTypeCheck(masm, r0, miss);
87 
88  // Check for non-global object that requires access check.
92  __ j(not_zero, miss);
93 
94  __ mov(r0, FieldOperand(receiver, JSObject::kPropertiesOffset));
95  __ CheckMap(r0, FACTORY->hash_table_map(), miss, DONT_DO_SMI_CHECK);
96 }
97 
98 
99 // Helper function used to load a property from a dictionary backing
100 // storage. This function may fail to load a property even though it is
101 // in the dictionary, so code at miss_label must always call a backup
102 // property load that is complete. This function is safe to call if
103 // name is not a symbol, and will jump to the miss_label in that
104 // case. The generated code assumes that the receiver has slow
105 // properties, is not a global object and does not have interceptors.
106 static void GenerateDictionaryLoad(MacroAssembler* masm,
107  Label* miss_label,
108  Register elements,
109  Register name,
110  Register r0,
111  Register r1,
112  Register result) {
113  // Register use:
114  //
115  // elements - holds the property dictionary on entry and is unchanged.
116  //
117  // name - holds the name of the property on entry and is unchanged.
118  //
119  // Scratch registers:
120  //
121  // r0 - used for the index into the property dictionary
122  //
123  // r1 - used to hold the capacity of the property dictionary.
124  //
125  // result - holds the result on exit.
126 
127  Label done;
128 
129  // Probe the dictionary.
131  miss_label,
132  &done,
133  elements,
134  name,
135  r0,
136  r1);
137 
138  // If probing finds an entry in the dictionary, r0 contains the
139  // index into the dictionary. Check that the value is a normal
140  // property.
141  __ bind(&done);
142  const int kElementsStartOffset =
145  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
146  __ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag),
147  Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
148  __ j(not_zero, miss_label);
149 
150  // Get the value at the masked, scaled index.
151  const int kValueOffset = kElementsStartOffset + kPointerSize;
152  __ mov(result, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag));
153 }
154 
155 
156 // Helper function used to store a property to a dictionary backing
157 // storage. This function may fail to store a property eventhough it
158 // is in the dictionary, so code at miss_label must always call a
159 // backup property store that is complete. This function is safe to
160 // call if name is not a symbol, and will jump to the miss_label in
161 // that case. The generated code assumes that the receiver has slow
162 // properties, is not a global object and does not have interceptors.
163 static void GenerateDictionaryStore(MacroAssembler* masm,
164  Label* miss_label,
165  Register elements,
166  Register name,
167  Register value,
168  Register r0,
169  Register r1) {
170  // Register use:
171  //
172  // elements - holds the property dictionary on entry and is clobbered.
173  //
174  // name - holds the name of the property on entry and is unchanged.
175  //
176  // value - holds the value to store and is unchanged.
177  //
178  // r0 - used for index into the property dictionary and is clobbered.
179  //
180  // r1 - used to hold the capacity of the property dictionary and is clobbered.
181  Label done;
182 
183 
184  // Probe the dictionary.
186  miss_label,
187  &done,
188  elements,
189  name,
190  r0,
191  r1);
192 
193  // If probing finds an entry in the dictionary, r0 contains the
194  // index into the dictionary. Check that the value is a normal
195  // property that is not read only.
196  __ bind(&done);
197  const int kElementsStartOffset =
200  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
201  const int kTypeAndReadOnlyMask =
202  (PropertyDetails::TypeField::kMask |
203  PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
204  __ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag),
205  Immediate(kTypeAndReadOnlyMask));
206  __ j(not_zero, miss_label);
207 
208  // Store the value at the masked, scaled index.
209  const int kValueOffset = kElementsStartOffset + kPointerSize;
210  __ lea(r0, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag));
211  __ mov(Operand(r0, 0), value);
212 
213  // Update write barrier. Make sure not to clobber the value.
214  __ mov(r1, value);
215  __ RecordWrite(elements, r0, r1, kDontSaveFPRegs);
216 }
217 
218 
219 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
220  // ----------- S t a t e -------------
221  // -- ecx : name
222  // -- edx : receiver
223  // -- esp[0] : return address
224  // -----------------------------------
225  Label miss;
226 
227  StubCompiler::GenerateLoadArrayLength(masm, edx, eax, &miss);
228  __ bind(&miss);
229  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
230 }
231 
232 
233 void LoadIC::GenerateStringLength(MacroAssembler* masm,
234  bool support_wrappers) {
235  // ----------- S t a t e -------------
236  // -- ecx : name
237  // -- edx : receiver
238  // -- esp[0] : return address
239  // -----------------------------------
240  Label miss;
241 
242  StubCompiler::GenerateLoadStringLength(masm, edx, eax, ebx, &miss,
243  support_wrappers);
244  __ bind(&miss);
245  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
246 }
247 
248 
249 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
250  // ----------- S t a t e -------------
251  // -- ecx : name
252  // -- edx : receiver
253  // -- esp[0] : return address
254  // -----------------------------------
255  Label miss;
256 
257  StubCompiler::GenerateLoadFunctionPrototype(masm, edx, eax, ebx, &miss);
258  __ bind(&miss);
259  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
260 }
261 
262 
263 // Checks the receiver for special cases (value type, slow case bits).
264 // Falls through for regular JS object.
265 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
266  Register receiver,
267  Register map,
268  int interceptor_bit,
269  Label* slow) {
270  // Register use:
271  // receiver - holds the receiver and is unchanged.
272  // Scratch registers:
273  // map - used to hold the map of the receiver.
274 
275  // Check that the object isn't a smi.
276  __ JumpIfSmi(receiver, slow);
277 
278  // Get the map of the receiver.
279  __ mov(map, FieldOperand(receiver, HeapObject::kMapOffset));
280 
281  // Check bit field.
282  __ test_b(FieldOperand(map, Map::kBitFieldOffset),
283  (1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit));
284  __ j(not_zero, slow);
285  // Check that the object is some kind of JS object EXCEPT JS Value type.
286  // In the case that the object is a value-wrapper object,
287  // we enter the runtime system to make sure that indexing
288  // into string objects works as intended.
290 
291  __ CmpInstanceType(map, JS_OBJECT_TYPE);
292  __ j(below, slow);
293 }
294 
295 
296 // Loads an indexed element from a fast case array.
297 // If not_fast_array is NULL, doesn't perform the elements map check.
298 static void GenerateFastArrayLoad(MacroAssembler* masm,
299  Register receiver,
300  Register key,
301  Register scratch,
302  Register result,
303  Label* not_fast_array,
304  Label* out_of_range) {
305  // Register use:
306  // receiver - holds the receiver and is unchanged.
307  // key - holds the key and is unchanged (must be a smi).
308  // Scratch registers:
309  // scratch - used to hold elements of the receiver and the loaded value.
310  // result - holds the result on exit if the load succeeds and
311  // we fall through.
312 
313  __ mov(scratch, FieldOperand(receiver, JSObject::kElementsOffset));
314  if (not_fast_array != NULL) {
315  // Check that the object is in fast mode and writable.
316  __ CheckMap(scratch,
317  FACTORY->fixed_array_map(),
318  not_fast_array,
320  } else {
321  __ AssertFastElements(scratch);
322  }
323  // Check that the key (index) is within bounds.
324  __ cmp(key, FieldOperand(scratch, FixedArray::kLengthOffset));
325  __ j(above_equal, out_of_range);
326  // Fast case: Do the load.
327  STATIC_ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0));
328  __ mov(scratch, FieldOperand(scratch, key, times_2, FixedArray::kHeaderSize));
329  __ cmp(scratch, Immediate(FACTORY->the_hole_value()));
330  // In case the loaded value is the_hole we have to consult GetProperty
331  // to ensure the prototype chain is searched.
332  __ j(equal, out_of_range);
333  if (!result.is(scratch)) {
334  __ mov(result, scratch);
335  }
336 }
337 
338 
339 // Checks whether a key is an array index string or a symbol string.
340 // Falls through if the key is a symbol.
341 static void GenerateKeyStringCheck(MacroAssembler* masm,
342  Register key,
343  Register map,
344  Register hash,
345  Label* index_string,
346  Label* not_symbol) {
347  // Register use:
348  // key - holds the key and is unchanged. Assumed to be non-smi.
349  // Scratch registers:
350  // map - used to hold the map of the key.
351  // hash - used to hold the hash of the key.
352  __ CmpObjectType(key, FIRST_NONSTRING_TYPE, map);
353  __ j(above_equal, not_symbol);
354 
355  // Is the string an array index, with cached numeric value?
356  __ mov(hash, FieldOperand(key, String::kHashFieldOffset));
357  __ test(hash, Immediate(String::kContainsCachedArrayIndexMask));
358  __ j(zero, index_string);
359 
360  // Is the string a symbol?
363  __ j(zero, not_symbol);
364 }
365 
366 
367 static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm,
368  Register object,
369  Register key,
370  Register scratch1,
371  Register scratch2,
372  Label* unmapped_case,
373  Label* slow_case) {
374  Heap* heap = masm->isolate()->heap();
375  Factory* factory = masm->isolate()->factory();
376 
377  // Check that the receiver is a JSObject. Because of the elements
378  // map check later, we do not need to check for interceptors or
379  // whether it requires access checks.
380  __ JumpIfSmi(object, slow_case);
381  // Check that the object is some kind of JSObject.
382  __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1);
383  __ j(below, slow_case);
384 
385  // Check that the key is a positive smi.
386  __ test(key, Immediate(0x80000001));
387  __ j(not_zero, slow_case);
388 
389  // Load the elements into scratch1 and check its map.
390  Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
391  __ mov(scratch1, FieldOperand(object, JSObject::kElementsOffset));
392  __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
393 
394  // Check if element is in the range of mapped arguments. If not, jump
395  // to the unmapped lookup with the parameter map in scratch1.
396  __ mov(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
397  __ sub(scratch2, Immediate(Smi::FromInt(2)));
398  __ cmp(key, scratch2);
399  __ j(above_equal, unmapped_case);
400 
401  // Load element index and check whether it is the hole.
402  const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
403  __ mov(scratch2, FieldOperand(scratch1,
404  key,
406  kHeaderSize));
407  __ cmp(scratch2, factory->the_hole_value());
408  __ j(equal, unmapped_case);
409 
410  // Load value from context and return it. We can reuse scratch1 because
411  // we do not jump to the unmapped lookup (which requires the parameter
412  // map in scratch1).
413  const int kContextOffset = FixedArray::kHeaderSize;
414  __ mov(scratch1, FieldOperand(scratch1, kContextOffset));
415  return FieldOperand(scratch1,
416  scratch2,
419 }
420 
421 
422 static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
423  Register key,
424  Register parameter_map,
425  Register scratch,
426  Label* slow_case) {
427  // Element is in arguments backing store, which is referenced by the
428  // second element of the parameter_map.
429  const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
430  Register backing_store = parameter_map;
431  __ mov(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
432  Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
433  __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK);
434  __ mov(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
435  __ cmp(key, scratch);
436  __ j(greater_equal, slow_case);
437  return FieldOperand(backing_store,
438  key,
441 }
442 
443 
444 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
445  // ----------- S t a t e -------------
446  // -- ecx : key
447  // -- edx : receiver
448  // -- esp[0] : return address
449  // -----------------------------------
450  Label slow, check_string, index_smi, index_string, property_array_property;
451  Label probe_dictionary, check_number_dictionary;
452 
453  // Check that the key is a smi.
454  __ JumpIfNotSmi(ecx, &check_string);
455  __ bind(&index_smi);
456  // Now the key is known to be a smi. This place is also jumped to from
457  // where a numeric string is converted to a smi.
458 
459  GenerateKeyedLoadReceiverCheck(
460  masm, edx, eax, Map::kHasIndexedInterceptor, &slow);
461 
462  // Check the receiver's map to see if it has fast elements.
463  __ CheckFastElements(eax, &check_number_dictionary);
464 
465  GenerateFastArrayLoad(masm, edx, ecx, eax, eax, NULL, &slow);
466  Isolate* isolate = masm->isolate();
467  Counters* counters = isolate->counters();
468  __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
469  __ ret(0);
470 
471  __ bind(&check_number_dictionary);
472  __ mov(ebx, ecx);
473  __ SmiUntag(ebx);
475 
476  // Check whether the elements is a number dictionary.
477  // edx: receiver
478  // ebx: untagged index
479  // ecx: key
480  // eax: elements
481  __ CheckMap(eax,
482  isolate->factory()->hash_table_map(),
483  &slow,
485  Label slow_pop_receiver;
486  // Push receiver on the stack to free up a register for the dictionary
487  // probing.
488  __ push(edx);
489  __ LoadFromNumberDictionary(&slow_pop_receiver, eax, ecx, ebx, edx, edi, eax);
490  // Pop receiver before returning.
491  __ pop(edx);
492  __ ret(0);
493 
494  __ bind(&slow_pop_receiver);
495  // Pop the receiver from the stack and jump to runtime.
496  __ pop(edx);
497 
498  __ bind(&slow);
499  // Slow case: jump to runtime.
500  // edx: receiver
501  // ecx: key
502  __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
504 
505  __ bind(&check_string);
506  GenerateKeyStringCheck(masm, ecx, eax, ebx, &index_string, &slow);
507 
508  GenerateKeyedLoadReceiverCheck(
509  masm, edx, eax, Map::kHasNamedInterceptor, &slow);
510 
511  // If the receiver is a fast-case object, check the keyed lookup
512  // cache. Otherwise probe the dictionary.
515  Immediate(isolate->factory()->hash_table_map()));
516  __ j(equal, &probe_dictionary);
517 
518  // The receiver's map is still in eax, compute the keyed lookup cache hash
519  // based on 32 bits of the map pointer and the string hash.
520  if (FLAG_debug_code) {
522  __ Check(equal, "Map is no longer in eax.");
523  }
524  __ mov(ebx, eax); // Keep the map around for later.
527  __ shr(edi, String::kHashShift);
528  __ xor_(eax, edi);
530 
531  // Load the key (consisting of map and symbol) from the cache and
532  // check for match.
533  Label load_in_object_property;
534  static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
535  Label hit_on_nth_entry[kEntriesPerBucket];
536  ExternalReference cache_keys =
537  ExternalReference::keyed_lookup_cache_keys(masm->isolate());
538 
539  for (int i = 0; i < kEntriesPerBucket - 1; i++) {
540  Label try_next_entry;
541  __ mov(edi, eax);
542  __ shl(edi, kPointerSizeLog2 + 1);
543  if (i != 0) {
544  __ add(edi, Immediate(kPointerSize * i * 2));
545  }
546  __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
547  __ j(not_equal, &try_next_entry);
548  __ add(edi, Immediate(kPointerSize));
549  __ cmp(ecx, Operand::StaticArray(edi, times_1, cache_keys));
550  __ j(equal, &hit_on_nth_entry[i]);
551  __ bind(&try_next_entry);
552  }
553 
554  __ lea(edi, Operand(eax, 1));
555  __ shl(edi, kPointerSizeLog2 + 1);
556  __ add(edi, Immediate(kPointerSize * (kEntriesPerBucket - 1) * 2));
557  __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
558  __ j(not_equal, &slow);
559  __ add(edi, Immediate(kPointerSize));
560  __ cmp(ecx, Operand::StaticArray(edi, times_1, cache_keys));
561  __ j(not_equal, &slow);
562 
563  // Get field offset.
564  // edx : receiver
565  // ebx : receiver's map
566  // ecx : key
567  // eax : lookup cache index
568  ExternalReference cache_field_offsets =
569  ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
570 
571  // Hit on nth entry.
572  for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
573  __ bind(&hit_on_nth_entry[i]);
574  if (i != 0) {
575  __ add(eax, Immediate(i));
576  }
577  __ mov(edi,
578  Operand::StaticArray(eax, times_pointer_size, cache_field_offsets));
580  __ sub(edi, eax);
581  __ j(above_equal, &property_array_property);
582  if (i != 0) {
583  __ jmp(&load_in_object_property);
584  }
585  }
586 
587  // Load in-object property.
588  __ bind(&load_in_object_property);
590  __ add(eax, edi);
592  __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
593  __ ret(0);
594 
595  // Load property array property.
596  __ bind(&property_array_property);
600  __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
601  __ ret(0);
602 
603  // Do a quick inline probe of the receiver's dictionary, if it
604  // exists.
605  __ bind(&probe_dictionary);
606 
609  GenerateGlobalInstanceTypeCheck(masm, eax, &slow);
610 
611  GenerateDictionaryLoad(masm, &slow, ebx, ecx, eax, edi, eax);
612  __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
613  __ ret(0);
614 
615  __ bind(&index_string);
616  __ IndexFromHash(ebx, ecx);
617  // Now jump to the place where smi keys are handled.
618  __ jmp(&index_smi);
619 }
620 
621 
622 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
623  // ----------- S t a t e -------------
624  // -- ecx : key (index)
625  // -- edx : receiver
626  // -- esp[0] : return address
627  // -----------------------------------
628  Label miss;
629 
630  Register receiver = edx;
631  Register index = ecx;
632  Register scratch = ebx;
633  Register result = eax;
634 
635  StringCharAtGenerator char_at_generator(receiver,
636  index,
637  scratch,
638  result,
639  &miss, // When not a string.
640  &miss, // When not a number.
641  &miss, // When index out of range.
643  char_at_generator.GenerateFast(masm);
644  __ ret(0);
645 
646  StubRuntimeCallHelper call_helper;
647  char_at_generator.GenerateSlow(masm, call_helper);
648 
649  __ bind(&miss);
650  GenerateMiss(masm, false);
651 }
652 
653 
654 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
655  // ----------- S t a t e -------------
656  // -- ecx : key
657  // -- edx : receiver
658  // -- esp[0] : return address
659  // -----------------------------------
660  Label slow;
661 
662  // Check that the receiver isn't a smi.
663  __ JumpIfSmi(edx, &slow);
664 
665  // Check that the key is an array index, that is Uint32.
666  __ test(ecx, Immediate(kSmiTagMask | kSmiSignMask));
667  __ j(not_zero, &slow);
668 
669  // Get the map of the receiver.
671 
672  // Check that it has indexed interceptor and access checks
673  // are not enabled for this object.
675  __ and_(eax, Immediate(kSlowCaseBitFieldMask));
676  __ cmp(eax, Immediate(1 << Map::kHasIndexedInterceptor));
677  __ j(not_zero, &slow);
678 
679  // Everything is fine, call runtime.
680  __ pop(eax);
681  __ push(edx); // receiver
682  __ push(ecx); // key
683  __ push(eax); // return address
684 
685  // Perform tail call to the entry.
686  ExternalReference ref =
687  ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
688  masm->isolate());
689  __ TailCallExternalReference(ref, 2, 1);
690 
691  __ bind(&slow);
692  GenerateMiss(masm, false);
693 }
694 
695 
696 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
697  // ----------- S t a t e -------------
698  // -- ecx : key
699  // -- edx : receiver
700  // -- esp[0] : return address
701  // -----------------------------------
702  Label slow, notin;
703  Factory* factory = masm->isolate()->factory();
704  Operand mapped_location =
705  GenerateMappedArgumentsLookup(masm, edx, ecx, ebx, eax, &notin, &slow);
706  __ mov(eax, mapped_location);
707  __ Ret();
708  __ bind(&notin);
709  // The unmapped lookup expects that the parameter map is in ebx.
710  Operand unmapped_location =
711  GenerateUnmappedArgumentsLookup(masm, ecx, ebx, eax, &slow);
712  __ cmp(unmapped_location, factory->the_hole_value());
713  __ j(equal, &slow);
714  __ mov(eax, unmapped_location);
715  __ Ret();
716  __ bind(&slow);
717  GenerateMiss(masm, false);
718 }
719 
720 
721 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
722  // ----------- S t a t e -------------
723  // -- eax : value
724  // -- ecx : key
725  // -- edx : receiver
726  // -- esp[0] : return address
727  // -----------------------------------
728  Label slow, notin;
729  Operand mapped_location =
730  GenerateMappedArgumentsLookup(masm, edx, ecx, ebx, edi, &notin, &slow);
731  __ mov(mapped_location, eax);
732  __ lea(ecx, mapped_location);
733  __ mov(edx, eax);
734  __ RecordWrite(ebx, ecx, edx, kDontSaveFPRegs);
735  __ Ret();
736  __ bind(&notin);
737  // The unmapped lookup expects that the parameter map is in ebx.
738  Operand unmapped_location =
739  GenerateUnmappedArgumentsLookup(masm, ecx, ebx, edi, &slow);
740  __ mov(unmapped_location, eax);
741  __ lea(edi, unmapped_location);
742  __ mov(edx, eax);
743  __ RecordWrite(ebx, edi, edx, kDontSaveFPRegs);
744  __ Ret();
745  __ bind(&slow);
746  GenerateMiss(masm, false);
747 }
748 
749 
750 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
751  StrictModeFlag strict_mode) {
752  // ----------- S t a t e -------------
753  // -- eax : value
754  // -- ecx : key
755  // -- edx : receiver
756  // -- esp[0] : return address
757  // -----------------------------------
758  Label slow, fast_object_with_map_check, fast_object_without_map_check;
759  Label fast_double_with_map_check, fast_double_without_map_check;
760  Label check_if_double_array, array, extra, transition_smi_elements;
761  Label finish_object_store, non_double_value, transition_double_elements;
762 
763  // Check that the object isn't a smi.
764  __ JumpIfSmi(edx, &slow);
765  // Get the map from the receiver.
767  // Check that the receiver does not require access checks. We need
768  // to do this because this generic stub does not perform map checks.
771  __ j(not_zero, &slow);
772  // Check that the key is a smi.
773  __ JumpIfNotSmi(ecx, &slow);
774  __ CmpInstanceType(edi, JS_ARRAY_TYPE);
775  __ j(equal, &array);
776  // Check that the object is some kind of JSObject.
777  __ CmpInstanceType(edi, FIRST_JS_OBJECT_TYPE);
778  __ j(below, &slow);
779 
780  // Object case: Check key against length in the elements array.
781  // eax: value
782  // edx: JSObject
783  // ecx: key (a smi)
784  // edi: receiver map
786  // Check array bounds. Both the key and the length of FixedArray are smis.
788  __ j(below, &fast_object_with_map_check);
789 
790  // Slow case: call runtime.
791  __ bind(&slow);
792  GenerateRuntimeSetProperty(masm, strict_mode);
793 
794  // Extra capacity case: Check if there is extra capacity to
795  // perform the store and update the length. Used for adding one
796  // element to the array by writing to array[array.length].
797  __ bind(&extra);
798  // eax: value
799  // edx: receiver, a JSArray
800  // ecx: key, a smi.
801  // ebx: receiver->elements, a FixedArray
802  // edi: receiver map
803  // flags: compare (ecx, edx.length())
804  // do not leave holes in the array:
805  __ j(not_equal, &slow);
807  __ j(above_equal, &slow);
809  __ cmp(edi, masm->isolate()->factory()->fixed_array_map());
810  __ j(not_equal, &check_if_double_array);
811  // Add 1 to receiver->length, and go to common element store code for Objects.
813  Immediate(Smi::FromInt(1)));
814  __ jmp(&fast_object_without_map_check);
815 
816  __ bind(&check_if_double_array);
817  __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
818  __ j(not_equal, &slow);
819  // Add 1 to receiver->length, and go to common element store code for doubles.
821  Immediate(Smi::FromInt(1)));
822  __ jmp(&fast_double_without_map_check);
823 
824  // Array case: Get the length and the elements array from the JS
825  // array. Check that the array is in fast mode (and writable); if it
826  // is the length is always a smi.
827  __ bind(&array);
828  // eax: value
829  // edx: receiver, a JSArray
830  // ecx: key, a smi.
831  // edi: receiver map
833 
834  // Check the key against the length in the array and fall through to the
835  // common store code.
836  __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // Compare smis.
837  __ j(above_equal, &extra);
838 
839  // Fast case: Do the store, could either Object or double.
840  __ bind(&fast_object_with_map_check);
841  // eax: value
842  // ecx: key (a smi)
843  // edx: receiver
844  // ebx: FixedArray receiver->elements
845  // edi: receiver map
847  __ cmp(edi, masm->isolate()->factory()->fixed_array_map());
848  __ j(not_equal, &fast_double_with_map_check);
849  __ bind(&fast_object_without_map_check);
850  // Smi stores don't require further checks.
851  Label non_smi_value;
852  __ JumpIfNotSmi(eax, &non_smi_value);
853  // It's irrelevant whether array is smi-only or not when writing a smi.
855  __ ret(0);
856 
857  __ bind(&non_smi_value);
858  // Escape to elements kind transition case.
860  __ CheckFastObjectElements(edi, &transition_smi_elements);
861 
862  // Fast elements array, store the value to the elements backing store.
863  __ bind(&finish_object_store);
865  // Update write barrier for the elements array address.
866  __ mov(edx, eax); // Preserve the value which is returned.
867  __ RecordWriteArray(
869  __ ret(0);
870 
871  __ bind(&fast_double_with_map_check);
872  // Check for fast double array case. If this fails, call through to the
873  // runtime.
874  __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
875  __ j(not_equal, &slow);
876  __ bind(&fast_double_without_map_check);
877  // If the value is a number, store it as a double in the FastDoubleElements
878  // array.
879  __ StoreNumberToDoubleElements(eax, ebx, ecx, edx, xmm0,
880  &transition_double_elements, false);
881  __ ret(0);
882 
883  __ bind(&transition_smi_elements);
885 
886  // Transition the array appropriately depending on the value type.
887  __ CheckMap(eax,
888  masm->isolate()->factory()->heap_number_map(),
889  &non_double_value,
891 
892  // Value is a double. Transition FAST_SMI_ELEMENTS -> FAST_DOUBLE_ELEMENTS
893  // and complete the store.
894  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
896  ebx,
897  edi,
898  &slow);
901  __ jmp(&fast_double_without_map_check);
902 
903  __ bind(&non_double_value);
904  // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
905  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
907  ebx,
908  edi,
909  &slow);
912  __ jmp(&finish_object_store);
913 
914  __ bind(&transition_double_elements);
915  // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
916  // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
917  // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
919  __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
921  ebx,
922  edi,
923  &slow);
926  __ jmp(&finish_object_store);
927 }
928 
929 
930 // The generated code does not accept smi keys.
931 // The generated code falls through if both probes miss.
932 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
933  int argc,
934  Code::Kind kind,
935  Code::ExtraICState extra_state) {
936  // ----------- S t a t e -------------
937  // -- ecx : name
938  // -- edx : receiver
939  // -----------------------------------
940  Label number, non_number, non_string, boolean, probe, miss;
941 
942  // Probe the stub cache.
944  MONOMORPHIC,
945  extra_state,
946  NORMAL,
947  argc);
948  Isolate* isolate = masm->isolate();
949  isolate->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx, eax);
950 
951  // If the stub cache probing failed, the receiver might be a value.
952  // For value objects, we use the map of the prototype objects for
953  // the corresponding JSValue for the cache and that is what we need
954  // to probe.
955  //
956  // Check for number.
957  __ JumpIfSmi(edx, &number);
958  __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ebx);
959  __ j(not_equal, &non_number);
960  __ bind(&number);
961  StubCompiler::GenerateLoadGlobalFunctionPrototype(
963  __ jmp(&probe);
964 
965  // Check for string.
966  __ bind(&non_number);
967  __ CmpInstanceType(ebx, FIRST_NONSTRING_TYPE);
968  __ j(above_equal, &non_string);
969  StubCompiler::GenerateLoadGlobalFunctionPrototype(
971  __ jmp(&probe);
972 
973  // Check for boolean.
974  __ bind(&non_string);
975  __ cmp(edx, isolate->factory()->true_value());
976  __ j(equal, &boolean);
977  __ cmp(edx, isolate->factory()->false_value());
978  __ j(not_equal, &miss);
979  __ bind(&boolean);
980  StubCompiler::GenerateLoadGlobalFunctionPrototype(
982 
983  // Probe the stub cache for the value object.
984  __ bind(&probe);
985  isolate->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx, no_reg);
986  __ bind(&miss);
987 }
988 
989 
990 static void GenerateFunctionTailCall(MacroAssembler* masm,
991  int argc,
992  Label* miss) {
993  // ----------- S t a t e -------------
994  // -- ecx : name
995  // -- edi : function
996  // -- esp[0] : return address
997  // -- esp[(argc - n) * 4] : arg[n] (zero-based)
998  // -- ...
999  // -- esp[(argc + 1) * 4] : receiver
1000  // -----------------------------------
1001 
1002  // Check that the result is not a smi.
1003  __ JumpIfSmi(edi, miss);
1004 
1005  // Check that the value is a JavaScript function, fetching its map into eax.
1006  __ CmpObjectType(edi, JS_FUNCTION_TYPE, eax);
1007  __ j(not_equal, miss);
1008 
1009  // Invoke the function.
1010  ParameterCount actual(argc);
1011  __ InvokeFunction(edi, actual, JUMP_FUNCTION,
1012  NullCallWrapper(), CALL_AS_METHOD);
1013 }
1014 
1015 
1016 // The generated code falls through if the call should be handled by runtime.
1017 void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
1018  // ----------- S t a t e -------------
1019  // -- ecx : name
1020  // -- esp[0] : return address
1021  // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1022  // -- ...
1023  // -- esp[(argc + 1) * 4] : receiver
1024  // -----------------------------------
1025  Label miss;
1026 
1027  // Get the receiver of the function from the stack; 1 ~ return address.
1028  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1029 
1030  GenerateStringDictionaryReceiverCheck(masm, edx, eax, ebx, &miss);
1031 
1032  // eax: elements
1033  // Search the dictionary placing the result in edi.
1034  GenerateDictionaryLoad(masm, &miss, eax, ecx, edi, ebx, edi);
1035  GenerateFunctionTailCall(masm, argc, &miss);
1036 
1037  __ bind(&miss);
1038 }
1039 
1040 
1041 void CallICBase::GenerateMiss(MacroAssembler* masm,
1042  int argc,
1043  IC::UtilityId id,
1044  Code::ExtraICState extra_state) {
1045  // ----------- S t a t e -------------
1046  // -- ecx : name
1047  // -- esp[0] : return address
1048  // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1049  // -- ...
1050  // -- esp[(argc + 1) * 4] : receiver
1051  // -----------------------------------
1052 
1053  Counters* counters = masm->isolate()->counters();
1054  if (id == IC::kCallIC_Miss) {
1055  __ IncrementCounter(counters->call_miss(), 1);
1056  } else {
1057  __ IncrementCounter(counters->keyed_call_miss(), 1);
1058  }
1059 
1060  // Get the receiver of the function from the stack; 1 ~ return address.
1061  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1062 
1063  {
1064  FrameScope scope(masm, StackFrame::INTERNAL);
1065 
1066  // Push the receiver and the name of the function.
1067  __ push(edx);
1068  __ push(ecx);
1069 
1070  // Call the entry.
1071  CEntryStub stub(1);
1072  __ mov(eax, Immediate(2));
1073  __ mov(ebx, Immediate(ExternalReference(IC_Utility(id), masm->isolate())));
1074  __ CallStub(&stub);
1075 
1076  // Move result to edi and exit the internal frame.
1077  __ mov(edi, eax);
1078  }
1079 
1080  // Check if the receiver is a global object of some sort.
1081  // This can happen only for regular CallIC but not KeyedCallIC.
1082  if (id == IC::kCallIC_Miss) {
1083  Label invoke, global;
1084  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); // receiver
1085  __ JumpIfSmi(edx, &invoke, Label::kNear);
1089  __ j(equal, &global, Label::kNear);
1091  __ j(not_equal, &invoke, Label::kNear);
1092 
1093  // Patch the receiver on the stack.
1094  __ bind(&global);
1096  __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
1097  __ bind(&invoke);
1098  }
1099 
1100  // Invoke the function.
1101  CallKind call_kind = CallICBase::Contextual::decode(extra_state)
1103  : CALL_AS_METHOD;
1104  ParameterCount actual(argc);
1105  __ InvokeFunction(edi,
1106  actual,
1107  JUMP_FUNCTION,
1108  NullCallWrapper(),
1109  call_kind);
1110 }
1111 
1112 
1113 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
1114  int argc,
1115  Code::ExtraICState extra_state) {
1116  // ----------- S t a t e -------------
1117  // -- ecx : name
1118  // -- esp[0] : return address
1119  // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1120  // -- ...
1121  // -- esp[(argc + 1) * 4] : receiver
1122  // -----------------------------------
1123 
1124  // Get the receiver of the function from the stack; 1 ~ return address.
1125  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1127  extra_state);
1128 
1129  GenerateMiss(masm, argc, extra_state);
1130 }
1131 
1132 
1133 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
1134  // ----------- S t a t e -------------
1135  // -- ecx : name
1136  // -- esp[0] : return address
1137  // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1138  // -- ...
1139  // -- esp[(argc + 1) * 4] : receiver
1140  // -----------------------------------
1141 
1142  // Get the receiver of the function from the stack; 1 ~ return address.
1143  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1144 
1145  Label do_call, slow_call, slow_load, slow_reload_receiver;
1146  Label check_number_dictionary, check_string, lookup_monomorphic_cache;
1147  Label index_smi, index_string;
1148 
1149  // Check that the key is a smi.
1150  __ JumpIfNotSmi(ecx, &check_string);
1151 
1152  __ bind(&index_smi);
1153  // Now the key is known to be a smi. This place is also jumped to from
1154  // where a numeric string is converted to a smi.
1155 
1156  GenerateKeyedLoadReceiverCheck(
1157  masm, edx, eax, Map::kHasIndexedInterceptor, &slow_call);
1158 
1159  GenerateFastArrayLoad(
1160  masm, edx, ecx, eax, edi, &check_number_dictionary, &slow_load);
1161  Isolate* isolate = masm->isolate();
1162  Counters* counters = isolate->counters();
1163  __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1);
1164 
1165  __ bind(&do_call);
1166  // receiver in edx is not used after this point.
1167  // ecx: key
1168  // edi: function
1169  GenerateFunctionTailCall(masm, argc, &slow_call);
1170 
1171  __ bind(&check_number_dictionary);
1172  // eax: elements
1173  // ecx: smi key
1174  // Check whether the elements is a number dictionary.
1175  __ CheckMap(eax,
1176  isolate->factory()->hash_table_map(),
1177  &slow_load,
1179  __ mov(ebx, ecx);
1180  __ SmiUntag(ebx);
1181  // ebx: untagged index
1182  // Receiver in edx will be clobbered, need to reload it on miss.
1183  __ LoadFromNumberDictionary(
1184  &slow_reload_receiver, eax, ecx, ebx, edx, edi, edi);
1185  __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1);
1186  __ jmp(&do_call);
1187 
1188  __ bind(&slow_reload_receiver);
1189  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1190 
1191  __ bind(&slow_load);
1192  // This branch is taken when calling KeyedCallIC_Miss is neither required
1193  // nor beneficial.
1194  __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1);
1195 
1196  {
1197  FrameScope scope(masm, StackFrame::INTERNAL);
1198  __ push(ecx); // save the key
1199  __ push(edx); // pass the receiver
1200  __ push(ecx); // pass the key
1201  __ CallRuntime(Runtime::kKeyedGetProperty, 2);
1202  __ pop(ecx); // restore the key
1203  // Leave the internal frame.
1204  }
1205 
1206  __ mov(edi, eax);
1207  __ jmp(&do_call);
1208 
1209  __ bind(&check_string);
1210  GenerateKeyStringCheck(masm, ecx, eax, ebx, &index_string, &slow_call);
1211 
1212  // The key is known to be a symbol.
1213  // If the receiver is a regular JS object with slow properties then do
1214  // a quick inline probe of the receiver's dictionary.
1215  // Otherwise do the monomorphic cache probe.
1216  GenerateKeyedLoadReceiverCheck(
1217  masm, edx, eax, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
1218 
1220  __ CheckMap(ebx,
1221  isolate->factory()->hash_table_map(),
1222  &lookup_monomorphic_cache,
1224 
1225  GenerateDictionaryLoad(masm, &slow_load, ebx, ecx, eax, edi, edi);
1226  __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1);
1227  __ jmp(&do_call);
1228 
1229  __ bind(&lookup_monomorphic_cache);
1230  __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1);
1233  // Fall through on miss.
1234 
1235  __ bind(&slow_call);
1236  // This branch is taken if:
1237  // - the receiver requires boxing or access check,
1238  // - the key is neither smi nor symbol,
1239  // - the value loaded is not a function,
1240  // - there is hope that the runtime will create a monomorphic call stub
1241  // that will get fetched next time.
1242  __ IncrementCounter(counters->keyed_call_generic_slow(), 1);
1243  GenerateMiss(masm, argc);
1244 
1245  __ bind(&index_string);
1246  __ IndexFromHash(ebx, ecx);
1247  // Now jump to the place where smi keys are handled.
1248  __ jmp(&index_smi);
1249 }
1250 
1251 
1252 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
1253  int argc) {
1254  // ----------- S t a t e -------------
1255  // -- ecx : name
1256  // -- esp[0] : return address
1257  // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1258  // -- ...
1259  // -- esp[(argc + 1) * 4] : receiver
1260  // -----------------------------------
1261  Label slow, notin;
1262  Factory* factory = masm->isolate()->factory();
1263  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1264  Operand mapped_location =
1265  GenerateMappedArgumentsLookup(masm, edx, ecx, ebx, eax, &notin, &slow);
1266  __ mov(edi, mapped_location);
1267  GenerateFunctionTailCall(masm, argc, &slow);
1268  __ bind(&notin);
1269  // The unmapped lookup expects that the parameter map is in ebx.
1270  Operand unmapped_location =
1271  GenerateUnmappedArgumentsLookup(masm, ecx, ebx, eax, &slow);
1272  __ cmp(unmapped_location, factory->the_hole_value());
1273  __ j(equal, &slow);
1274  __ mov(edi, unmapped_location);
1275  GenerateFunctionTailCall(masm, argc, &slow);
1276  __ bind(&slow);
1277  GenerateMiss(masm, argc);
1278 }
1279 
1280 
1281 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
1282  // ----------- S t a t e -------------
1283  // -- ecx : name
1284  // -- esp[0] : return address
1285  // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1286  // -- ...
1287  // -- esp[(argc + 1) * 4] : receiver
1288  // -----------------------------------
1289 
1290  // Check if the name is a string.
1291  Label miss;
1292  __ JumpIfSmi(ecx, &miss);
1293  Condition cond = masm->IsObjectStringType(ecx, eax, eax);
1294  __ j(NegateCondition(cond), &miss);
1295  CallICBase::GenerateNormal(masm, argc);
1296  __ bind(&miss);
1297  GenerateMiss(masm, argc);
1298 }
1299 
1300 
1301 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
1302  // ----------- S t a t e -------------
1303  // -- ecx : name
1304  // -- edx : receiver
1305  // -- esp[0] : return address
1306  // -----------------------------------
1307 
1308  // Probe the stub cache.
1310  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx,
1311  eax);
1312 
1313  // Cache miss: Jump to runtime.
1314  GenerateMiss(masm);
1315 }
1316 
1317 
1318 void LoadIC::GenerateNormal(MacroAssembler* masm) {
1319  // ----------- S t a t e -------------
1320  // -- ecx : name
1321  // -- edx : receiver
1322  // -- esp[0] : return address
1323  // -----------------------------------
1324  Label miss;
1325 
1326  GenerateStringDictionaryReceiverCheck(masm, edx, eax, ebx, &miss);
1327 
1328  // eax: elements
1329  // Search the dictionary placing the result in eax.
1330  GenerateDictionaryLoad(masm, &miss, eax, ecx, edi, ebx, eax);
1331  __ ret(0);
1332 
1333  // Cache miss: Jump to runtime.
1334  __ bind(&miss);
1335  GenerateMiss(masm);
1336 }
1337 
1338 
1339 void LoadIC::GenerateMiss(MacroAssembler* masm) {
1340  // ----------- S t a t e -------------
1341  // -- ecx : name
1342  // -- edx : receiver
1343  // -- esp[0] : return address
1344  // -----------------------------------
1345 
1346  __ IncrementCounter(masm->isolate()->counters()->load_miss(), 1);
1347 
1348  __ pop(ebx);
1349  __ push(edx); // receiver
1350  __ push(ecx); // name
1351  __ push(ebx); // return address
1352 
1353  // Perform tail call to the entry.
1354  ExternalReference ref =
1355  ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
1356  __ TailCallExternalReference(ref, 2, 1);
1357 }
1358 
1359 
1360 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1361  // ----------- S t a t e -------------
1362  // -- ecx : key
1363  // -- edx : receiver
1364  // -- esp[0] : return address
1365  // -----------------------------------
1366 
1367  __ IncrementCounter(masm->isolate()->counters()->keyed_load_miss(), 1);
1368 
1369  __ pop(ebx);
1370  __ push(edx); // receiver
1371  __ push(ecx); // name
1372  __ push(ebx); // return address
1373 
1374  // Perform tail call to the entry.
1375  ExternalReference ref = force_generic
1376  ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric),
1377  masm->isolate())
1378  : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
1379  __ TailCallExternalReference(ref, 2, 1);
1380 }
1381 
1382 
1383 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
1384  // ----------- S t a t e -------------
1385  // -- ecx : key
1386  // -- edx : receiver
1387  // -- esp[0] : return address
1388  // -----------------------------------
1389 
1390  __ pop(ebx);
1391  __ push(edx); // receiver
1392  __ push(ecx); // name
1393  __ push(ebx); // return address
1394 
1395  // Perform tail call to the entry.
1396  __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
1397 }
1398 
1399 
1400 void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
1401  StrictModeFlag strict_mode) {
1402  // ----------- S t a t e -------------
1403  // -- eax : value
1404  // -- ecx : name
1405  // -- edx : receiver
1406  // -- esp[0] : return address
1407  // -----------------------------------
1408 
1409  Code::Flags flags =
1411  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx,
1412  no_reg);
1413 
1414  // Cache miss: Jump to runtime.
1415  GenerateMiss(masm);
1416 }
1417 
1418 
1419 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1420  // ----------- S t a t e -------------
1421  // -- eax : value
1422  // -- ecx : name
1423  // -- edx : receiver
1424  // -- esp[0] : return address
1425  // -----------------------------------
1426 
1427  __ pop(ebx);
1428  __ push(edx);
1429  __ push(ecx);
1430  __ push(eax);
1431  __ push(ebx);
1432 
1433  // Perform tail call to the entry.
1434  ExternalReference ref =
1435  ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
1436  __ TailCallExternalReference(ref, 3, 1);
1437 }
1438 
1439 
1440 void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
1441  // ----------- S t a t e -------------
1442  // -- eax : value
1443  // -- ecx : name
1444  // -- edx : receiver
1445  // -- esp[0] : return address
1446  // -----------------------------------
1447  //
1448  // This accepts as a receiver anything JSArray::SetElementsLength accepts
1449  // (currently anything except for external arrays which means anything with
1450  // elements of FixedArray type). Value must be a number, but only smis are
1451  // accepted as the most common case.
1452 
1453  Label miss;
1454 
1455  Register receiver = edx;
1456  Register value = eax;
1457  Register scratch = ebx;
1458 
1459  // Check that the receiver isn't a smi.
1460  __ JumpIfSmi(receiver, &miss);
1461 
1462  // Check that the object is a JS array.
1463  __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
1464  __ j(not_equal, &miss);
1465 
1466  // Check that elements are FixedArray.
1467  // We rely on StoreIC_ArrayLength below to deal with all types of
1468  // fast elements (including COW).
1469  __ mov(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
1470  __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
1471  __ j(not_equal, &miss);
1472 
1473  // Check that the array has fast properties, otherwise the length
1474  // property might have been redefined.
1475  __ mov(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset));
1476  __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset),
1477  Heap::kHashTableMapRootIndex);
1478  __ j(equal, &miss);
1479 
1480  // Check that value is a smi.
1481  __ JumpIfNotSmi(value, &miss);
1482 
1483  // Prepare tail call to StoreIC_ArrayLength.
1484  __ pop(scratch);
1485  __ push(receiver);
1486  __ push(value);
1487  __ push(scratch); // return address
1488 
1489  ExternalReference ref =
1490  ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
1491  __ TailCallExternalReference(ref, 2, 1);
1492 
1493  __ bind(&miss);
1494 
1495  GenerateMiss(masm);
1496 }
1497 
1498 
1499 void StoreIC::GenerateNormal(MacroAssembler* masm) {
1500  // ----------- S t a t e -------------
1501  // -- eax : value
1502  // -- ecx : name
1503  // -- edx : receiver
1504  // -- esp[0] : return address
1505  // -----------------------------------
1506 
1507  Label miss, restore_miss;
1508 
1509  GenerateStringDictionaryReceiverCheck(masm, edx, ebx, edi, &miss);
1510 
1511  // A lot of registers are needed for storing to slow case
1512  // objects. Push and restore receiver but rely on
1513  // GenerateDictionaryStore preserving the value and name.
1514  __ push(edx);
1515  GenerateDictionaryStore(masm, &restore_miss, ebx, ecx, eax, edx, edi);
1516  __ Drop(1);
1517  Counters* counters = masm->isolate()->counters();
1518  __ IncrementCounter(counters->store_normal_hit(), 1);
1519  __ ret(0);
1520 
1521  __ bind(&restore_miss);
1522  __ pop(edx);
1523 
1524  __ bind(&miss);
1525  __ IncrementCounter(counters->store_normal_miss(), 1);
1526  GenerateMiss(masm);
1527 }
1528 
1529 
1530 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
1531  StrictModeFlag strict_mode) {
1532  // ----------- S t a t e -------------
1533  // -- eax : value
1534  // -- ecx : name
1535  // -- edx : receiver
1536  // -- esp[0] : return address
1537  // -----------------------------------
1538  __ pop(ebx);
1539  __ push(edx);
1540  __ push(ecx);
1541  __ push(eax);
1542  __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
1543  __ push(Immediate(Smi::FromInt(strict_mode)));
1544  __ push(ebx); // return address
1545 
1546  // Do tail-call to runtime routine.
1547  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1548 }
1549 
1550 
1551 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1552  StrictModeFlag strict_mode) {
1553  // ----------- S t a t e -------------
1554  // -- eax : value
1555  // -- ecx : key
1556  // -- edx : receiver
1557  // -- esp[0] : return address
1558  // -----------------------------------
1559 
1560  __ pop(ebx);
1561  __ push(edx);
1562  __ push(ecx);
1563  __ push(eax);
1564  __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
1565  __ push(Immediate(Smi::FromInt(strict_mode))); // Strict mode.
1566  __ push(ebx); // return address
1567 
1568  // Do tail-call to runtime routine.
1569  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1570 }
1571 
1572 
1573 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1574  // ----------- S t a t e -------------
1575  // -- eax : value
1576  // -- ecx : key
1577  // -- edx : receiver
1578  // -- esp[0] : return address
1579  // -----------------------------------
1580 
1581  __ pop(ebx);
1582  __ push(edx);
1583  __ push(ecx);
1584  __ push(eax);
1585  __ push(ebx);
1586 
1587  // Do tail-call to runtime routine.
1588  ExternalReference ref = force_generic
1589  ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1590  masm->isolate())
1591  : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1592  __ TailCallExternalReference(ref, 3, 1);
1593 }
1594 
1595 
1596 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1597  // ----------- S t a t e -------------
1598  // -- eax : value
1599  // -- ecx : key
1600  // -- edx : receiver
1601  // -- esp[0] : return address
1602  // -----------------------------------
1603 
1604  __ pop(ebx);
1605  __ push(edx);
1606  __ push(ecx);
1607  __ push(eax);
1608  __ push(ebx); // return address
1609 
1610  // Do tail-call to runtime routine.
1611  ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1612  __ TailCallExternalReference(ref, 3, 1);
1613 }
1614 
1615 
1616 void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
1617  // ----------- S t a t e -------------
1618  // -- ebx : target map
1619  // -- edx : receiver
1620  // -- esp[0] : return address
1621  // -----------------------------------
1622  // Must return the modified receiver in eax.
1623  if (!FLAG_trace_elements_transitions) {
1624  Label fail;
1626  __ mov(eax, edx);
1627  __ Ret();
1628  __ bind(&fail);
1629  }
1630 
1631  __ pop(ebx);
1632  __ push(edx);
1633  __ push(ebx); // return address
1634  // Leaving the code managed by the register allocator and return to the
1635  // convention of using esi as context register.
1637  __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
1638 }
1639 
1640 
1642  MacroAssembler* masm) {
1643  // ----------- S t a t e -------------
1644  // -- ebx : target map
1645  // -- edx : receiver
1646  // -- esp[0] : return address
1647  // -----------------------------------
1648  // Must return the modified receiver in eax.
1649  if (!FLAG_trace_elements_transitions) {
1650  Label fail;
1652  __ mov(eax, edx);
1653  __ Ret();
1654  __ bind(&fail);
1655  }
1656 
1657  __ pop(ebx);
1658  __ push(edx);
1659  __ push(ebx); // return address
1660  // Leaving the code managed by the register allocator and return to the
1661  // convention of using esi as context register.
1663  __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
1664 }
1665 
1666 
1667 #undef __
1668 
1669 
1671  switch (op) {
1672  case Token::EQ_STRICT:
1673  case Token::EQ:
1674  return equal;
1675  case Token::LT:
1676  return less;
1677  case Token::GT:
1678  return greater;
1679  case Token::LTE:
1680  return less_equal;
1681  case Token::GTE:
1682  return greater_equal;
1683  default:
1684  UNREACHABLE();
1685  return no_condition;
1686  }
1687 }
1688 
1689 
1690 static bool HasInlinedSmiCode(Address address) {
1691  // The address of the instruction following the call.
1692  Address test_instruction_address =
1694 
1695  // If the instruction following the call is not a test al, nothing
1696  // was inlined.
1697  return *test_instruction_address == Assembler::kTestAlByte;
1698 }
1699 
1700 
1701 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
1702  HandleScope scope;
1703  Handle<Code> rewritten;
1704  State previous_state = GetState();
1705 
1706  State state = TargetState(previous_state, HasInlinedSmiCode(address()), x, y);
1707  if (state == GENERIC) {
1708  CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
1709  rewritten = stub.GetCode();
1710  } else {
1711  ICCompareStub stub(op_, state);
1712  if (state == KNOWN_OBJECTS) {
1713  stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
1714  }
1715  rewritten = stub.GetCode();
1716  }
1717  set_target(*rewritten);
1718 
1719 #ifdef DEBUG
1720  if (FLAG_trace_ic) {
1721  PrintF("[CompareIC (%s->%s)#%s]\n",
1722  GetStateName(previous_state),
1723  GetStateName(state),
1724  Token::Name(op_));
1725  }
1726 #endif
1727 
1728  // Activate inlined smi code.
1729  if (previous_state == UNINITIALIZED) {
1731  }
1732 }
1733 
1734 
1736  // The address of the instruction following the call.
1737  Address test_instruction_address =
1739 
1740  // If the instruction following the call is not a test al, nothing
1741  // was inlined.
1742  if (*test_instruction_address != Assembler::kTestAlByte) {
1743  ASSERT(*test_instruction_address == Assembler::kNopByte);
1744  return;
1745  }
1746 
1747  Address delta_address = test_instruction_address + 1;
1748  // The delta to the start of the map check instruction and the
1749  // condition code uses at the patched jump.
1750  int8_t delta = *reinterpret_cast<int8_t*>(delta_address);
1751  if (FLAG_trace_ic) {
1752  PrintF("[ patching ic at %p, test=%p, delta=%d\n",
1753  address, test_instruction_address, delta);
1754  }
1755 
1756  // Patch with a short conditional jump. Enabling means switching from a short
1757  // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
1758  // reverse operation of that.
1759  Address jmp_address = test_instruction_address - delta;
1760  ASSERT((check == ENABLE_INLINED_SMI_CHECK)
1761  ? (*jmp_address == Assembler::kJncShortOpcode ||
1762  *jmp_address == Assembler::kJcShortOpcode)
1763  : (*jmp_address == Assembler::kJnzShortOpcode ||
1764  *jmp_address == Assembler::kJzShortOpcode));
1766  ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
1767  : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
1768  *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
1769 }
1770 
1771 
1772 } } // namespace v8::internal
1773 
1774 #endif // V8_TARGET_ARCH_IA32
byte * Address
Definition: globals.h:172
static void GenerateTransitionElementsDoubleToObject(MacroAssembler *masm)
static const int kBitFieldOffset
Definition: objects.h:4994
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
const intptr_t kSmiTagMask
Definition: v8.h:3855
const intptr_t kSmiSignMask
Definition: v8globals.h:41
static const byte kJccShortPrefix
static void GenerateNormal(MacroAssembler *masm, int argc)
static const int kMapHashShift
Definition: heap.h:2235
void PrintF(const char *format,...)
Definition: v8utils.cc:40
static void GenerateGeneric(MacroAssembler *masm, StrictModeFlag strict_mode)
static const char * Name(Value tok)
Definition: token.h:196
static Smi * FromInt(int value)
Definition: objects-inl.h:973
static void GenerateMiss(MacroAssembler *masm, int argc, IC::UtilityId id, Code::ExtraICState extra_state)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
const Register esp
static const int kGlobalReceiverOffset
Definition: objects.h:6085
static void GenerateGlobalProxy(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateMegamorphic(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateStringLength(MacroAssembler *masm, bool support_wrappers)
Flag flags[]
Definition: flags.cc:1467
static const int kHasNamedInterceptor
Definition: objects.h:5003
static const int kIsAccessCheckNeeded
Definition: objects.h:5007
Address address() const
Definition: ic-inl.h:41
static void GenerateMegamorphic(MacroAssembler *masm)
#define ASSERT(condition)
Definition: checks.h:270
const int kPointerSizeLog2
Definition: globals.h:246
static const int kInstanceSizeOffset
Definition: objects.h:4981
Isolate * isolate() const
Definition: ic.h:140
static const ExtraICState kNoExtraICState
Definition: objects.h:4199
static const byte kTestAlByte
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check)
static void GenerateArrayLength(MacroAssembler *masm)
const Register edi
static const int kHashFieldOffset
Definition: objects.h:7099
uint8_t byte
Definition: globals.h:171
static const int kHasIndexedInterceptor
Definition: objects.h:5004
static const byte kJcShortOpcode
void UpdateCaches(Handle< Object > x, Handle< Object > y)
const Register ebp
#define UNREACHABLE()
Definition: checks.h:50
static Condition ComputeCondition(Token::Value op)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
const uint32_t kIsSymbolMask
Definition: objects.h:443
const Register eax
const int kPointerSize
Definition: globals.h:234
static void GenerateGeneric(MacroAssembler *masm)
static const byte kNopByte
Operand FieldOperand(Register object, int offset)
const Register ecx
static const byte kJzShortOpcode
const int kHeapObjectTag
Definition: v8.h:3848
static void GenerateMiss(MacroAssembler *masm)
#define __
static bool decode(uint32_t value)
Definition: utils.h:272
static const int kPropertiesOffset
Definition: objects.h:2113
static const int kInObjectPropertiesOffset
Definition: objects.h:4983
static void GenerateSmiToDouble(MacroAssembler *masm, Label *fail)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
const Register r0
static const int kElementsOffset
Definition: objects.h:2114
static const int kContainsCachedArrayIndexMask
Definition: objects.h:7154
static const int kCallTargetAddressOffset
static void GenerateMegamorphic(MacroAssembler *masm, int argc)
static const int kLengthOffset
Definition: objects.h:8111
static const int kHeaderSize
Definition: objects.h:2233
static void GenerateSlow(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, int argc, Code::ExtraICState extra_state)
Definition: ic.h:278
static const int kMapOffset
Definition: objects.h:1219
static void GenerateRuntimeSetProperty(MacroAssembler *masm, StrictModeFlag strict_mode)
const Register r1
static void GenerateDoubleToObject(MacroAssembler *masm, Label *fail)
static const byte kJncShortOpcode
static const int kLengthOffset
Definition: objects.h:2232
static const int kSlowCaseBitFieldMask
Definition: ic.h:508
const Register ebx
static void GenerateNormal(MacroAssembler *masm, int argc)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
InlinedSmiCheck
Definition: ic.h:841
static void GenerateMonomorphicCacheProbe(MacroAssembler *masm, int argc, Code::Kind kind, Code::ExtraICState extra_state)
static void GenerateString(MacroAssembler *masm)
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, PropertyType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:3312
const int kSmiTagSize
Definition: v8.h:3854
Condition NegateCondition(Condition cond)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
const Register esi
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMegamorphic(MacroAssembler *masm, int argc, Code::ExtraICState extra_ic_state)
Counters * counters()
Definition: isolate.h:804
const int kSmiTag
Definition: v8.h:3853
static Operand FixedArrayElementOperand(Register array, Register index_as_smi, int additional_offset=0)
Definition: codegen-ia32.h:63
static void GenerateNormal(MacroAssembler *masm)
static void GenerateFunctionPrototype(MacroAssembler *masm)
static void GenerateIndexedInterceptor(MacroAssembler *masm)
#define FACTORY
Definition: isolate.h:1409
void set_target(Code *code)
Definition: ic.h:149
static const byte kJnzShortOpcode
const Register no_reg
const uint32_t kSymbolTag
Definition: objects.h:445
static const int kCapacityMask
Definition: heap.h:2234
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)
static void GenerateMiss(MacroAssembler *masm)
static void GenerateTransitionElementsSmiToDouble(MacroAssembler *masm)
static const int kHashShift
Definition: objects.h:7121
static void GenerateNonStrictArguments(MacroAssembler *masm, int argc)
const Register edx
static void GenerateArrayLength(MacroAssembler *masm)
static const char * GetStateName(State state)
Definition: ic.cc:2564
void check(i::Vector< const char > string)
static const int kHashMask
Definition: heap.h:2236
static void GenerateMiss(MacroAssembler *masm, int argc)
Definition: ic.h:311
FlagType type() const
Definition: flags.cc:1358
static const int kInstanceTypeOffset
Definition: objects.h:4992
static const int kEntriesPerBucket
Definition: heap.h:2237
static void GenerateNormal(MacroAssembler *masm)
const XMMRegister xmm0
static void GenerateMiss(MacroAssembler *masm, bool force_generic)