v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
ic-mips.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 
29 
30 #include "v8.h"
31 
32 #if defined(V8_TARGET_ARCH_MIPS)
33 
34 #include "codegen.h"
35 #include "code-stubs.h"
36 #include "ic-inl.h"
37 #include "runtime.h"
38 #include "stub-cache.h"
39 
40 namespace v8 {
41 namespace internal {
42 
43 
44 // ----------------------------------------------------------------------------
45 // Static IC stub generators.
46 //
47 
48 #define __ ACCESS_MASM(masm)
49 
50 
51 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
52  Register type,
53  Label* global_object) {
54  // Register usage:
55  // type: holds the receiver instance type on entry.
56  __ Branch(global_object, eq, type, Operand(JS_GLOBAL_OBJECT_TYPE));
57  __ Branch(global_object, eq, type, Operand(JS_BUILTINS_OBJECT_TYPE));
58  __ Branch(global_object, eq, type, Operand(JS_GLOBAL_PROXY_TYPE));
59 }
60 
61 
62 // Generated code falls through if the receiver is a regular non-global
63 // JS object with slow properties and no interceptors.
64 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
65  Register receiver,
66  Register elements,
67  Register scratch0,
68  Register scratch1,
69  Label* miss) {
70  // Register usage:
71  // receiver: holds the receiver on entry and is unchanged.
72  // elements: holds the property dictionary on fall through.
73  // Scratch registers:
74  // scratch0: used to holds the receiver map.
75  // scratch1: used to holds the receiver instance type, receiver bit mask
76  // and elements map.
77 
78  // Check that the receiver isn't a smi.
79  __ JumpIfSmi(receiver, miss);
80 
81  // Check that the receiver is a valid JS object.
82  __ GetObjectType(receiver, scratch0, scratch1);
83  __ Branch(miss, lt, scratch1, Operand(FIRST_SPEC_OBJECT_TYPE));
84 
85  // If this assert fails, we have to check upper bound too.
87 
88  GenerateGlobalInstanceTypeCheck(masm, scratch1, miss);
89 
90  // Check that the global object does not require access checks.
91  __ lbu(scratch1, FieldMemOperand(scratch0, Map::kBitFieldOffset));
92  __ And(scratch1, scratch1, Operand((1 << Map::kIsAccessCheckNeeded) |
94  __ Branch(miss, ne, scratch1, Operand(zero_reg));
95 
96  __ lw(elements, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
97  __ lw(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
98  __ LoadRoot(scratch0, Heap::kHashTableMapRootIndex);
99  __ Branch(miss, ne, scratch1, Operand(scratch0));
100 }
101 
102 
103 // Helper function used from LoadIC/CallIC GenerateNormal.
104 //
105 // elements: Property dictionary. It is not clobbered if a jump to the miss
106 // label is done.
107 // name: Property name. It is not clobbered if a jump to the miss label is
108 // done
109 // result: Register for the result. It is only updated if a jump to the miss
110 // label is not done. Can be the same as elements or name clobbering
111 // one of these in the case of not jumping to the miss label.
112 // The two scratch registers need to be different from elements, name and
113 // result.
114 // The generated code assumes that the receiver has slow properties,
115 // is not a global object and does not have interceptors.
116 // The address returned from GenerateStringDictionaryProbes() in scratch2
117 // is used.
118 static void GenerateDictionaryLoad(MacroAssembler* masm,
119  Label* miss,
120  Register elements,
121  Register name,
122  Register result,
123  Register scratch1,
124  Register scratch2) {
125  // Main use of the scratch registers.
126  // scratch1: Used as temporary and to hold the capacity of the property
127  // dictionary.
128  // scratch2: Used as temporary.
129  Label done;
130 
131  // Probe the dictionary.
133  miss,
134  &done,
135  elements,
136  name,
137  scratch1,
138  scratch2);
139 
140  // If probing finds an entry check that the value is a normal
141  // property.
142  __ bind(&done); // scratch2 == elements + 4 * index.
143  const int kElementsStartOffset = StringDictionary::kHeaderSize +
145  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
146  __ lw(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
147  __ And(at,
148  scratch1,
149  Operand(PropertyDetails::TypeField::kMask << kSmiTagSize));
150  __ Branch(miss, ne, at, Operand(zero_reg));
151 
152  // Get the value at the masked, scaled index and return.
153  __ lw(result,
154  FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize));
155 }
156 
157 
158 // Helper function used from StoreIC::GenerateNormal.
159 //
160 // elements: Property dictionary. It is not clobbered if a jump to the miss
161 // label is done.
162 // name: Property name. It is not clobbered if a jump to the miss label is
163 // done
164 // value: The value to store.
165 // The two scratch registers need to be different from elements, name and
166 // result.
167 // The generated code assumes that the receiver has slow properties,
168 // is not a global object and does not have interceptors.
169 // The address returned from GenerateStringDictionaryProbes() in scratch2
170 // is used.
171 static void GenerateDictionaryStore(MacroAssembler* masm,
172  Label* miss,
173  Register elements,
174  Register name,
175  Register value,
176  Register scratch1,
177  Register scratch2) {
178  // Main use of the scratch registers.
179  // scratch1: Used as temporary and to hold the capacity of the property
180  // dictionary.
181  // scratch2: Used as temporary.
182  Label done;
183 
184  // Probe the dictionary.
186  miss,
187  &done,
188  elements,
189  name,
190  scratch1,
191  scratch2);
192 
193  // If probing finds an entry in the dictionary check that the value
194  // is a normal property that is not read only.
195  __ bind(&done); // scratch2 == elements + 4 * index.
196  const int kElementsStartOffset = StringDictionary::kHeaderSize +
198  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
199  const int kTypeAndReadOnlyMask =
200  (PropertyDetails::TypeField::kMask |
201  PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
202  __ lw(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
203  __ And(at, scratch1, Operand(kTypeAndReadOnlyMask));
204  __ Branch(miss, ne, at, Operand(zero_reg));
205 
206  // Store the value at the masked, scaled index and return.
207  const int kValueOffset = kElementsStartOffset + kPointerSize;
208  __ Addu(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag));
209  __ sw(value, MemOperand(scratch2));
210 
211  // Update the write barrier. Make sure not to clobber the value.
212  __ mov(scratch1, value);
213  __ RecordWrite(
214  elements, scratch2, scratch1, kRAHasNotBeenSaved, kDontSaveFPRegs);
215 }
216 
217 
218 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
219  // ----------- S t a t e -------------
220  // -- a2 : name
221  // -- ra : return address
222  // -- a0 : receiver
223  // -- sp[0] : receiver
224  // -----------------------------------
225  Label miss;
226 
227  StubCompiler::GenerateLoadArrayLength(masm, a0, a3, &miss);
228  __ bind(&miss);
229  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
230 }
231 
232 
233 void LoadIC::GenerateStringLength(MacroAssembler* masm, bool support_wrappers) {
234  // ----------- S t a t e -------------
235  // -- a2 : name
236  // -- lr : return address
237  // -- a0 : receiver
238  // -- sp[0] : receiver
239  // -----------------------------------
240  Label miss;
241 
242  StubCompiler::GenerateLoadStringLength(masm, a0, a1, a3, &miss,
243  support_wrappers);
244  // Cache miss: Jump to runtime.
245  __ bind(&miss);
246  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
247 }
248 
249 
250 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
251  // ----------- S t a t e -------------
252  // -- a2 : name
253  // -- lr : return address
254  // -- a0 : receiver
255  // -- sp[0] : receiver
256  // -----------------------------------
257  Label miss;
258 
259  StubCompiler::GenerateLoadFunctionPrototype(masm, a0, a1, a3, &miss);
260  __ bind(&miss);
261  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
262 }
263 
264 
265 // Checks the receiver for special cases (value type, slow case bits).
266 // Falls through for regular JS object.
267 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
268  Register receiver,
269  Register map,
270  Register scratch,
271  int interceptor_bit,
272  Label* slow) {
273  // Check that the object isn't a smi.
274  __ JumpIfSmi(receiver, slow);
275  // Get the map of the receiver.
276  __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
277  // Check bit field.
278  __ lbu(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
279  __ And(at, scratch, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
280  __ Branch(slow, ne, at, Operand(zero_reg));
281  // Check that the object is some kind of JS object EXCEPT JS Value type.
282  // In the case that the object is a value-wrapper object,
283  // we enter the runtime system to make sure that indexing into string
284  // objects work as intended.
286  __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
287  __ Branch(slow, lt, scratch, Operand(JS_OBJECT_TYPE));
288 }
289 
290 
291 // Loads an indexed element from a fast case array.
292 // If not_fast_array is NULL, doesn't perform the elements map check.
293 static void GenerateFastArrayLoad(MacroAssembler* masm,
294  Register receiver,
295  Register key,
296  Register elements,
297  Register scratch1,
298  Register scratch2,
299  Register result,
300  Label* not_fast_array,
301  Label* out_of_range) {
302  // Register use:
303  //
304  // receiver - holds the receiver on entry.
305  // Unchanged unless 'result' is the same register.
306  //
307  // key - holds the smi key on entry.
308  // Unchanged unless 'result' is the same register.
309  //
310  // elements - holds the elements of the receiver on exit.
311  //
312  // result - holds the result on exit if the load succeeded.
313  // Allowed to be the the same as 'receiver' or 'key'.
314  // Unchanged on bailout so 'receiver' and 'key' can be safely
315  // used by further computation.
316  //
317  // Scratch registers:
318  //
319  // scratch1 - used to hold elements map and elements length.
320  // Holds the elements map if not_fast_array branch is taken.
321  //
322  // scratch2 - used to hold the loaded value.
323 
324  __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
325  if (not_fast_array != NULL) {
326  // Check that the object is in fast mode (not dictionary).
327  __ lw(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
328  __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
329  __ Branch(not_fast_array, ne, scratch1, Operand(at));
330  } else {
331  __ AssertFastElements(elements);
332  }
333 
334  // Check that the key (index) is within bounds.
335  __ lw(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset));
336  __ Branch(out_of_range, hs, key, Operand(scratch1));
337 
338  // Fast case: Do the load.
339  __ Addu(scratch1, elements,
341  // The key is a smi.
343  __ sll(at, key, kPointerSizeLog2 - kSmiTagSize);
344  __ addu(at, at, scratch1);
345  __ lw(scratch2, MemOperand(at));
346 
347  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
348  // In case the loaded value is the_hole we have to consult GetProperty
349  // to ensure the prototype chain is searched.
350  __ Branch(out_of_range, eq, scratch2, Operand(at));
351  __ mov(result, scratch2);
352 }
353 
354 
355 // Checks whether a key is an array index string or a symbol string.
356 // Falls through if a key is a symbol.
357 static void GenerateKeyStringCheck(MacroAssembler* masm,
358  Register key,
359  Register map,
360  Register hash,
361  Label* index_string,
362  Label* not_symbol) {
363  // The key is not a smi.
364  // Is it a string?
365  __ GetObjectType(key, map, hash);
366  __ Branch(not_symbol, ge, hash, Operand(FIRST_NONSTRING_TYPE));
367 
368  // Is the string an array index, with cached numeric value?
370  __ And(at, hash, Operand(String::kContainsCachedArrayIndexMask));
371  __ Branch(index_string, eq, at, Operand(zero_reg));
372 
373  // Is the string a symbol?
374  // map: key map
377  __ And(at, hash, Operand(kIsSymbolMask));
378  __ Branch(not_symbol, eq, at, Operand(zero_reg));
379 }
380 
381 
382 // Defined in ic.cc.
383 Object* CallIC_Miss(Arguments args);
384 
385 // The generated code does not accept smi keys.
386 // The generated code falls through if both probes miss.
387 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
388  int argc,
389  Code::Kind kind,
390  Code::ExtraICState extra_state) {
391  // ----------- S t a t e -------------
392  // -- a1 : receiver
393  // -- a2 : name
394  // -----------------------------------
395  Label number, non_number, non_string, boolean, probe, miss;
396 
397  // Probe the stub cache.
399  MONOMORPHIC,
400  extra_state,
401  NORMAL,
402  argc);
403  Isolate::Current()->stub_cache()->GenerateProbe(
404  masm, flags, a1, a2, a3, t0, t1, t2);
405 
406  // If the stub cache probing failed, the receiver might be a value.
407  // For value objects, we use the map of the prototype objects for
408  // the corresponding JSValue for the cache and that is what we need
409  // to probe.
410  //
411  // Check for number.
412  __ JumpIfSmi(a1, &number, t1);
413  __ GetObjectType(a1, a3, a3);
414  __ Branch(&non_number, ne, a3, Operand(HEAP_NUMBER_TYPE));
415  __ bind(&number);
416  StubCompiler::GenerateLoadGlobalFunctionPrototype(
418  __ Branch(&probe);
419 
420  // Check for string.
421  __ bind(&non_number);
422  __ Branch(&non_string, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
423  StubCompiler::GenerateLoadGlobalFunctionPrototype(
425  __ Branch(&probe);
426 
427  // Check for boolean.
428  __ bind(&non_string);
429  __ LoadRoot(t0, Heap::kTrueValueRootIndex);
430  __ Branch(&boolean, eq, a1, Operand(t0));
431  __ LoadRoot(t1, Heap::kFalseValueRootIndex);
432  __ Branch(&miss, ne, a1, Operand(t1));
433  __ bind(&boolean);
434  StubCompiler::GenerateLoadGlobalFunctionPrototype(
436 
437  // Probe the stub cache for the value object.
438  __ bind(&probe);
439  Isolate::Current()->stub_cache()->GenerateProbe(
440  masm, flags, a1, a2, a3, t0, t1, t2);
441 
442  __ bind(&miss);
443 }
444 
445 
446 static void GenerateFunctionTailCall(MacroAssembler* masm,
447  int argc,
448  Label* miss,
449  Register scratch) {
450  // a1: function
451 
452  // Check that the value isn't a smi.
453  __ JumpIfSmi(a1, miss);
454 
455  // Check that the value is a JSFunction.
456  __ GetObjectType(a1, scratch, scratch);
457  __ Branch(miss, ne, scratch, Operand(JS_FUNCTION_TYPE));
458 
459  // Invoke the function.
460  ParameterCount actual(argc);
461  __ InvokeFunction(a1, actual, JUMP_FUNCTION,
462  NullCallWrapper(), CALL_AS_METHOD);
463 }
464 
465 
466 void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
467  // ----------- S t a t e -------------
468  // -- a2 : name
469  // -- ra : return address
470  // -----------------------------------
471  Label miss;
472 
473  // Get the receiver of the function from the stack into a1.
474  __ lw(a1, MemOperand(sp, argc * kPointerSize));
475 
476  GenerateStringDictionaryReceiverCheck(masm, a1, a0, a3, t0, &miss);
477 
478  // a0: elements
479  // Search the dictionary - put result in register a1.
480  GenerateDictionaryLoad(masm, &miss, a0, a2, a1, a3, t0);
481 
482  GenerateFunctionTailCall(masm, argc, &miss, t0);
483 
484  // Cache miss: Jump to runtime.
485  __ bind(&miss);
486 }
487 
488 
489 void CallICBase::GenerateMiss(MacroAssembler* masm,
490  int argc,
491  IC::UtilityId id,
492  Code::ExtraICState extra_state) {
493  // ----------- S t a t e -------------
494  // -- a2 : name
495  // -- ra : return address
496  // -----------------------------------
497  Isolate* isolate = masm->isolate();
498 
499  if (id == IC::kCallIC_Miss) {
500  __ IncrementCounter(isolate->counters()->call_miss(), 1, a3, t0);
501  } else {
502  __ IncrementCounter(isolate->counters()->keyed_call_miss(), 1, a3, t0);
503  }
504 
505  // Get the receiver of the function from the stack.
506  __ lw(a3, MemOperand(sp, argc*kPointerSize));
507 
508  {
509  FrameScope scope(masm, StackFrame::INTERNAL);
510 
511  // Push the receiver and the name of the function.
512  __ Push(a3, a2);
513 
514  // Call the entry.
515  __ PrepareCEntryArgs(2);
516  __ PrepareCEntryFunction(ExternalReference(IC_Utility(id), isolate));
517 
518  CEntryStub stub(1);
519  __ CallStub(&stub);
520 
521  // Move result to a1 and leave the internal frame.
522  __ mov(a1, v0);
523  }
524 
525  // Check if the receiver is a global object of some sort.
526  // This can happen only for regular CallIC but not KeyedCallIC.
527  if (id == IC::kCallIC_Miss) {
528  Label invoke, global;
529  __ lw(a2, MemOperand(sp, argc * kPointerSize));
530  __ JumpIfSmi(a2, &invoke);
531  __ GetObjectType(a2, a3, a3);
532  __ Branch(&global, eq, a3, Operand(JS_GLOBAL_OBJECT_TYPE));
533  __ Branch(&invoke, ne, a3, Operand(JS_BUILTINS_OBJECT_TYPE));
534 
535  // Patch the receiver on the stack.
536  __ bind(&global);
538  __ sw(a2, MemOperand(sp, argc * kPointerSize));
539  __ bind(&invoke);
540  }
541  // Invoke the function.
542  CallKind call_kind = CallICBase::Contextual::decode(extra_state)
544  : CALL_AS_METHOD;
545  ParameterCount actual(argc);
546  __ InvokeFunction(a1,
547  actual,
549  NullCallWrapper(),
550  call_kind);
551 }
552 
553 
554 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
555  int argc,
556  Code::ExtraICState extra_ic_state) {
557  // ----------- S t a t e -------------
558  // -- a2 : name
559  // -- ra : return address
560  // -----------------------------------
561 
562  // Get the receiver of the function from the stack into a1.
563  __ lw(a1, MemOperand(sp, argc * kPointerSize));
564  GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
565  GenerateMiss(masm, argc, extra_ic_state);
566 }
567 
568 
569 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
570  // ----------- S t a t e -------------
571  // -- a2 : name
572  // -- ra : return address
573  // -----------------------------------
574 
575  // Get the receiver of the function from the stack into a1.
576  __ lw(a1, MemOperand(sp, argc * kPointerSize));
577 
578  Label do_call, slow_call, slow_load, slow_reload_receiver;
579  Label check_number_dictionary, check_string, lookup_monomorphic_cache;
580  Label index_smi, index_string;
581 
582  // Check that the key is a smi.
583  __ JumpIfNotSmi(a2, &check_string);
584  __ bind(&index_smi);
585  // Now the key is known to be a smi. This place is also jumped to from below
586  // where a numeric string is converted to a smi.
587 
588  GenerateKeyedLoadReceiverCheck(
589  masm, a1, a0, a3, Map::kHasIndexedInterceptor, &slow_call);
590 
591  GenerateFastArrayLoad(
592  masm, a1, a2, t0, a3, a0, a1, &check_number_dictionary, &slow_load);
593  Counters* counters = masm->isolate()->counters();
594  __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1, a0, a3);
595 
596  __ bind(&do_call);
597  // receiver in a1 is not used after this point.
598  // a2: key
599  // a1: function
600 
601  GenerateFunctionTailCall(masm, argc, &slow_call, a0);
602 
603  __ bind(&check_number_dictionary);
604  // a2: key
605  // a3: elements map
606  // t0: elements pointer
607  // Check whether the elements is a number dictionary.
608  __ LoadRoot(at, Heap::kHashTableMapRootIndex);
609  __ Branch(&slow_load, ne, a3, Operand(at));
610  __ sra(a0, a2, kSmiTagSize);
611  // a0: untagged index
612  __ LoadFromNumberDictionary(&slow_load, t0, a2, a1, a0, a3, t1);
613  __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, a0, a3);
614  __ jmp(&do_call);
615 
616  __ bind(&slow_load);
617  // This branch is taken when calling KeyedCallIC_Miss is neither required
618  // nor beneficial.
619  __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, a0, a3);
620  {
621  FrameScope scope(masm, StackFrame::INTERNAL);
622  __ push(a2); // Save the key.
623  __ Push(a1, a2); // Pass the receiver and the key.
624  __ CallRuntime(Runtime::kKeyedGetProperty, 2);
625  __ pop(a2); // Restore the key.
626  }
627  __ mov(a1, v0);
628  __ jmp(&do_call);
629 
630  __ bind(&check_string);
631  GenerateKeyStringCheck(masm, a2, a0, a3, &index_string, &slow_call);
632 
633  // The key is known to be a symbol.
634  // If the receiver is a regular JS object with slow properties then do
635  // a quick inline probe of the receiver's dictionary.
636  // Otherwise do the monomorphic cache probe.
637  GenerateKeyedLoadReceiverCheck(
638  masm, a1, a0, a3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
639 
642  __ LoadRoot(at, Heap::kHashTableMapRootIndex);
643  __ Branch(&lookup_monomorphic_cache, ne, a3, Operand(at));
644 
645  GenerateDictionaryLoad(masm, &slow_load, a0, a2, a1, a3, t0);
646  __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, a0, a3);
647  __ jmp(&do_call);
648 
649  __ bind(&lookup_monomorphic_cache);
650  __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, a0, a3);
652  argc,
655  // Fall through on miss.
656 
657  __ bind(&slow_call);
658  // This branch is taken if:
659  // - the receiver requires boxing or access check,
660  // - the key is neither smi nor symbol,
661  // - the value loaded is not a function,
662  // - there is hope that the runtime will create a monomorphic call stub,
663  // that will get fetched next time.
664  __ IncrementCounter(counters->keyed_call_generic_slow(), 1, a0, a3);
665  GenerateMiss(masm, argc);
666 
667  __ bind(&index_string);
668  __ IndexFromHash(a3, a2);
669  // Now jump to the place where smi keys are handled.
670  __ jmp(&index_smi);
671 }
672 
673 
674 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
675  // ----------- S t a t e -------------
676  // -- a2 : name
677  // -- ra : return address
678  // -----------------------------------
679 
680  // Check if the name is a string.
681  Label miss;
682  __ JumpIfSmi(a2, &miss);
683  __ IsObjectJSStringType(a2, a0, &miss);
684 
685  CallICBase::GenerateNormal(masm, argc);
686  __ bind(&miss);
687  GenerateMiss(masm, argc);
688 }
689 
690 
691 // Defined in ic.cc.
692 Object* LoadIC_Miss(Arguments args);
693 
694 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
695  // ----------- S t a t e -------------
696  // -- a2 : name
697  // -- ra : return address
698  // -- a0 : receiver
699  // -- sp[0] : receiver
700  // -----------------------------------
701 
702  // Probe the stub cache.
704  Isolate::Current()->stub_cache()->GenerateProbe(
705  masm, flags, a0, a2, a3, t0, t1, t2);
706 
707  // Cache miss: Jump to runtime.
708  GenerateMiss(masm);
709 }
710 
711 
712 void LoadIC::GenerateNormal(MacroAssembler* masm) {
713  // ----------- S t a t e -------------
714  // -- a2 : name
715  // -- lr : return address
716  // -- a0 : receiver
717  // -- sp[0] : receiver
718  // -----------------------------------
719  Label miss;
720 
721  GenerateStringDictionaryReceiverCheck(masm, a0, a1, a3, t0, &miss);
722 
723  // a1: elements
724  GenerateDictionaryLoad(masm, &miss, a1, a2, v0, a3, t0);
725  __ Ret();
726 
727  // Cache miss: Jump to runtime.
728  __ bind(&miss);
729  GenerateMiss(masm);
730 }
731 
732 
733 void LoadIC::GenerateMiss(MacroAssembler* masm) {
734  // ----------- S t a t e -------------
735  // -- a2 : name
736  // -- ra : return address
737  // -- a0 : receiver
738  // -- sp[0] : receiver
739  // -----------------------------------
740  Isolate* isolate = masm->isolate();
741 
742  __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a3, t0);
743 
744  __ mov(a3, a0);
745  __ Push(a3, a2);
746 
747  // Perform tail call to the entry.
748  ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
749  __ TailCallExternalReference(ref, 2, 1);
750 }
751 
752 
753 static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
754  Register object,
755  Register key,
756  Register scratch1,
757  Register scratch2,
758  Register scratch3,
759  Label* unmapped_case,
760  Label* slow_case) {
761  // Check that the receiver is a JSObject. Because of the map check
762  // later, we do not need to check for interceptors or whether it
763  // requires access checks.
764  __ JumpIfSmi(object, slow_case);
765  // Check that the object is some kind of JSObject.
766  __ GetObjectType(object, scratch1, scratch2);
767  __ Branch(slow_case, lt, scratch2, Operand(FIRST_JS_RECEIVER_TYPE));
768 
769  // Check that the key is a positive smi.
770  __ And(scratch1, key, Operand(0x80000001));
771  __ Branch(slow_case, ne, scratch1, Operand(zero_reg));
772 
773  // Load the elements into scratch1 and check its map.
774  __ lw(scratch1, FieldMemOperand(object, JSObject::kElementsOffset));
775  __ CheckMap(scratch1,
776  scratch2,
777  Heap::kNonStrictArgumentsElementsMapRootIndex,
778  slow_case,
780  // Check if element is in the range of mapped arguments. If not, jump
781  // to the unmapped lookup with the parameter map in scratch1.
782  __ lw(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
783  __ Subu(scratch2, scratch2, Operand(Smi::FromInt(2)));
784  __ Branch(unmapped_case, Ugreater_equal, key, Operand(scratch2));
785 
786  // Load element index and check whether it is the hole.
787  const int kOffset =
788  FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag;
789 
790  __ li(scratch3, Operand(kPointerSize >> 1));
791  __ Mul(scratch3, key, scratch3);
792  __ Addu(scratch3, scratch3, Operand(kOffset));
793 
794  __ Addu(scratch2, scratch1, scratch3);
795  __ lw(scratch2, MemOperand(scratch2));
796  __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
797  __ Branch(unmapped_case, eq, scratch2, Operand(scratch3));
798 
799  // Load value from context and return it. We can reuse scratch1 because
800  // we do not jump to the unmapped lookup (which requires the parameter
801  // map in scratch1).
802  __ lw(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
803  __ li(scratch3, Operand(kPointerSize >> 1));
804  __ Mul(scratch3, scratch2, scratch3);
805  __ Addu(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag));
806  __ Addu(scratch2, scratch1, scratch3);
807  return MemOperand(scratch2);
808 }
809 
810 
811 static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
812  Register key,
813  Register parameter_map,
814  Register scratch,
815  Label* slow_case) {
816  // Element is in arguments backing store, which is referenced by the
817  // second element of the parameter_map. The parameter_map register
818  // must be loaded with the parameter map of the arguments object and is
819  // overwritten.
820  const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
821  Register backing_store = parameter_map;
822  __ lw(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset));
823  __ CheckMap(backing_store,
824  scratch,
825  Heap::kFixedArrayMapRootIndex,
826  slow_case,
828  __ lw(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset));
829  __ Branch(slow_case, Ugreater_equal, key, Operand(scratch));
830  __ li(scratch, Operand(kPointerSize >> 1));
831  __ Mul(scratch, key, scratch);
832  __ Addu(scratch,
833  scratch,
834  Operand(FixedArray::kHeaderSize - kHeapObjectTag));
835  __ Addu(scratch, backing_store, scratch);
836  return MemOperand(scratch);
837 }
838 
839 
840 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
841  // ---------- S t a t e --------------
842  // -- lr : return address
843  // -- a0 : key
844  // -- a1 : receiver
845  // -----------------------------------
846  Label slow, notin;
847  MemOperand mapped_location =
848  GenerateMappedArgumentsLookup(masm, a1, a0, a2, a3, t0, &notin, &slow);
849  __ Ret(USE_DELAY_SLOT);
850  __ lw(v0, mapped_location);
851  __ bind(&notin);
852  // The unmapped lookup expects that the parameter map is in a2.
853  MemOperand unmapped_location =
854  GenerateUnmappedArgumentsLookup(masm, a0, a2, a3, &slow);
855  __ lw(a2, unmapped_location);
856  __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
857  __ Branch(&slow, eq, a2, Operand(a3));
858  __ Ret(USE_DELAY_SLOT);
859  __ mov(v0, a2);
860  __ bind(&slow);
861  GenerateMiss(masm, false);
862 }
863 
864 
865 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
866  // ---------- S t a t e --------------
867  // -- a0 : value
868  // -- a1 : key
869  // -- a2 : receiver
870  // -- lr : return address
871  // -----------------------------------
872  Label slow, notin;
873  // Store address is returned in register (of MemOperand) mapped_location.
874  MemOperand mapped_location =
875  GenerateMappedArgumentsLookup(masm, a2, a1, a3, t0, t1, &notin, &slow);
876  __ sw(a0, mapped_location);
877  __ mov(t5, a0);
878  ASSERT_EQ(mapped_location.offset(), 0);
879  __ RecordWrite(a3, mapped_location.rm(), t5,
881  __ Ret(USE_DELAY_SLOT);
882  __ mov(v0, a0); // (In delay slot) return the value stored in v0.
883  __ bind(&notin);
884  // The unmapped lookup expects that the parameter map is in a3.
885  // Store address is returned in register (of MemOperand) unmapped_location.
886  MemOperand unmapped_location =
887  GenerateUnmappedArgumentsLookup(masm, a1, a3, t0, &slow);
888  __ sw(a0, unmapped_location);
889  __ mov(t5, a0);
890  ASSERT_EQ(unmapped_location.offset(), 0);
891  __ RecordWrite(a3, unmapped_location.rm(), t5,
893  __ Ret(USE_DELAY_SLOT);
894  __ mov(v0, a0); // (In delay slot) return the value stored in v0.
895  __ bind(&slow);
896  GenerateMiss(masm, false);
897 }
898 
899 
900 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
901  int argc) {
902  // ----------- S t a t e -------------
903  // -- a2 : name
904  // -- lr : return address
905  // -----------------------------------
906  Label slow, notin;
907  // Load receiver.
908  __ lw(a1, MemOperand(sp, argc * kPointerSize));
909  MemOperand mapped_location =
910  GenerateMappedArgumentsLookup(masm, a1, a2, a3, t0, t1, &notin, &slow);
911  __ lw(a1, mapped_location);
912  GenerateFunctionTailCall(masm, argc, &slow, a3);
913  __ bind(&notin);
914  // The unmapped lookup expects that the parameter map is in a3.
915  MemOperand unmapped_location =
916  GenerateUnmappedArgumentsLookup(masm, a2, a3, t0, &slow);
917  __ lw(a1, unmapped_location);
918  __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
919  __ Branch(&slow, eq, a1, Operand(a3));
920  GenerateFunctionTailCall(masm, argc, &slow, a3);
921  __ bind(&slow);
922  GenerateMiss(masm, argc);
923 }
924 
925 
926 Object* KeyedLoadIC_Miss(Arguments args);
927 
928 
929 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
930  // ---------- S t a t e --------------
931  // -- ra : return address
932  // -- a0 : key
933  // -- a1 : receiver
934  // -----------------------------------
935  Isolate* isolate = masm->isolate();
936 
937  __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a3, t0);
938 
939  __ Push(a1, a0);
940 
941  // Perform tail call to the entry.
942  ExternalReference ref = force_generic
943  ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), isolate)
944  : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
945 
946  __ TailCallExternalReference(ref, 2, 1);
947 }
948 
949 
950 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
951  // ---------- S t a t e --------------
952  // -- ra : return address
953  // -- a0 : key
954  // -- a1 : receiver
955  // -----------------------------------
956 
957  __ Push(a1, a0);
958 
959  __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
960 }
961 
962 
963 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
964  // ---------- S t a t e --------------
965  // -- ra : return address
966  // -- a0 : key
967  // -- a1 : receiver
968  // -----------------------------------
969  Label slow, check_string, index_smi, index_string, property_array_property;
970  Label probe_dictionary, check_number_dictionary;
971 
972  Register key = a0;
973  Register receiver = a1;
974 
975  Isolate* isolate = masm->isolate();
976 
977  // Check that the key is a smi.
978  __ JumpIfNotSmi(key, &check_string);
979  __ bind(&index_smi);
980  // Now the key is known to be a smi. This place is also jumped to from below
981  // where a numeric string is converted to a smi.
982 
983  GenerateKeyedLoadReceiverCheck(
984  masm, receiver, a2, a3, Map::kHasIndexedInterceptor, &slow);
985 
986  // Check the receiver's map to see if it has fast elements.
987  __ CheckFastElements(a2, a3, &check_number_dictionary);
988 
989  GenerateFastArrayLoad(
990  masm, receiver, key, t0, a3, a2, v0, NULL, &slow);
991 
992  __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, a2, a3);
993  __ Ret();
994 
995  __ bind(&check_number_dictionary);
996  __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
998 
999  // Check whether the elements is a number dictionary.
1000  // a0: key
1001  // a3: elements map
1002  // t0: elements
1003  __ LoadRoot(at, Heap::kHashTableMapRootIndex);
1004  __ Branch(&slow, ne, a3, Operand(at));
1005  __ sra(a2, a0, kSmiTagSize);
1006  __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
1007  __ Ret();
1008 
1009  // Slow case, key and receiver still in a0 and a1.
1010  __ bind(&slow);
1011  __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(),
1012  1,
1013  a2,
1014  a3);
1016 
1017  __ bind(&check_string);
1018  GenerateKeyStringCheck(masm, key, a2, a3, &index_string, &slow);
1019 
1020  GenerateKeyedLoadReceiverCheck(
1021  masm, receiver, a2, a3, Map::kHasIndexedInterceptor, &slow);
1022 
1023 
1024  // If the receiver is a fast-case object, check the keyed lookup
1025  // cache. Otherwise probe the dictionary.
1028  __ LoadRoot(at, Heap::kHashTableMapRootIndex);
1029  __ Branch(&probe_dictionary, eq, t0, Operand(at));
1030 
1031  // Load the map of the receiver, compute the keyed lookup cache hash
1032  // based on 32 bits of the map pointer and the string hash.
1034  __ sra(a3, a2, KeyedLookupCache::kMapHashShift);
1036  __ sra(at, t0, String::kHashShift);
1037  __ xor_(a3, a3, at);
1039  __ And(a3, a3, Operand(mask));
1040 
1041  // Load the key (consisting of map and symbol) from the cache and
1042  // check for match.
1043  Label load_in_object_property;
1044  static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
1045  Label hit_on_nth_entry[kEntriesPerBucket];
1046  ExternalReference cache_keys =
1047  ExternalReference::keyed_lookup_cache_keys(isolate);
1048  __ li(t0, Operand(cache_keys));
1049  __ sll(at, a3, kPointerSizeLog2 + 1);
1050  __ addu(t0, t0, at);
1051 
1052  for (int i = 0; i < kEntriesPerBucket - 1; i++) {
1053  Label try_next_entry;
1054  __ lw(t1, MemOperand(t0, kPointerSize * i * 2));
1055  __ Branch(&try_next_entry, ne, a2, Operand(t1));
1056  __ lw(t1, MemOperand(t0, kPointerSize * (i * 2 + 1)));
1057  __ Branch(&hit_on_nth_entry[i], eq, a0, Operand(t1));
1058  __ bind(&try_next_entry);
1059  }
1060 
1061  __ lw(t1, MemOperand(t0, kPointerSize * (kEntriesPerBucket - 1) * 2));
1062  __ Branch(&slow, ne, a2, Operand(t1));
1063  __ lw(t1, MemOperand(t0, kPointerSize * ((kEntriesPerBucket - 1) * 2 + 1)));
1064  __ Branch(&slow, ne, a0, Operand(t1));
1065 
1066  // Get field offset.
1067  // a0 : key
1068  // a1 : receiver
1069  // a2 : receiver's map
1070  // a3 : lookup cache index
1071  ExternalReference cache_field_offsets =
1072  ExternalReference::keyed_lookup_cache_field_offsets(isolate);
1073 
1074  // Hit on nth entry.
1075  for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
1076  __ bind(&hit_on_nth_entry[i]);
1077  __ li(t0, Operand(cache_field_offsets));
1078  __ sll(at, a3, kPointerSizeLog2);
1079  __ addu(at, t0, at);
1080  __ lw(t1, MemOperand(at, kPointerSize * i));
1082  __ Subu(t1, t1, t2);
1083  __ Branch(&property_array_property, ge, t1, Operand(zero_reg));
1084  if (i != 0) {
1085  __ Branch(&load_in_object_property);
1086  }
1087  }
1088 
1089  // Load in-object property.
1090  __ bind(&load_in_object_property);
1092  __ addu(t2, t2, t1); // Index from start of object.
1093  __ Subu(a1, a1, Operand(kHeapObjectTag)); // Remove the heap tag.
1094  __ sll(at, t2, kPointerSizeLog2);
1095  __ addu(at, a1, at);
1096  __ lw(v0, MemOperand(at));
1097  __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1098  1,
1099  a2,
1100  a3);
1101  __ Ret();
1102 
1103  // Load property array property.
1104  __ bind(&property_array_property);
1106  __ Addu(a1, a1, FixedArray::kHeaderSize - kHeapObjectTag);
1107  __ sll(t0, t1, kPointerSizeLog2);
1108  __ Addu(t0, t0, a1);
1109  __ lw(v0, MemOperand(t0));
1110  __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1111  1,
1112  a2,
1113  a3);
1114  __ Ret();
1115 
1116 
1117  // Do a quick inline probe of the receiver's dictionary, if it
1118  // exists.
1119  __ bind(&probe_dictionary);
1120  // a1: receiver
1121  // a0: key
1122  // a3: elements
1125  GenerateGlobalInstanceTypeCheck(masm, a2, &slow);
1126  // Load the property to v0.
1127  GenerateDictionaryLoad(masm, &slow, a3, a0, v0, a2, t0);
1128  __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(),
1129  1,
1130  a2,
1131  a3);
1132  __ Ret();
1133 
1134  __ bind(&index_string);
1135  __ IndexFromHash(a3, key);
1136  // Now jump to the place where smi keys are handled.
1137  __ Branch(&index_smi);
1138 }
1139 
1140 
1141 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
1142  // ---------- S t a t e --------------
1143  // -- ra : return address
1144  // -- a0 : key (index)
1145  // -- a1 : receiver
1146  // -----------------------------------
1147  Label miss;
1148 
1149  Register receiver = a1;
1150  Register index = a0;
1151  Register scratch = a3;
1152  Register result = v0;
1153 
1154  StringCharAtGenerator char_at_generator(receiver,
1155  index,
1156  scratch,
1157  result,
1158  &miss, // When not a string.
1159  &miss, // When not a number.
1160  &miss, // When index out of range.
1162  char_at_generator.GenerateFast(masm);
1163  __ Ret();
1164 
1165  StubRuntimeCallHelper call_helper;
1166  char_at_generator.GenerateSlow(masm, call_helper);
1167 
1168  __ bind(&miss);
1169  GenerateMiss(masm, false);
1170 }
1171 
1172 
1173 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1174  StrictModeFlag strict_mode) {
1175  // ---------- S t a t e --------------
1176  // -- a0 : value
1177  // -- a1 : key
1178  // -- a2 : receiver
1179  // -- ra : return address
1180  // -----------------------------------
1181 
1182  // Push receiver, key and value for runtime call.
1183  __ Push(a2, a1, a0);
1184  __ li(a1, Operand(Smi::FromInt(NONE))); // PropertyAttributes.
1185  __ li(a0, Operand(Smi::FromInt(strict_mode))); // Strict mode.
1186  __ Push(a1, a0);
1187 
1188  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1189 }
1190 
1191 
1192 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
1193  StrictModeFlag strict_mode) {
1194  // ---------- S t a t e --------------
1195  // -- a0 : value
1196  // -- a1 : key
1197  // -- a2 : receiver
1198  // -- ra : return address
1199  // -----------------------------------
1200  Label slow, array, extra, check_if_double_array;
1201  Label fast_object_with_map_check, fast_object_without_map_check;
1202  Label fast_double_with_map_check, fast_double_without_map_check;
1203  Label transition_smi_elements, finish_object_store, non_double_value;
1204  Label transition_double_elements;
1205 
1206  // Register usage.
1207  Register value = a0;
1208  Register key = a1;
1209  Register receiver = a2;
1210  Register receiver_map = a3;
1211  Register elements_map = t2;
1212  Register elements = t3; // Elements array of the receiver.
1213  // t0 and t1 are used as general scratch registers.
1214 
1215  // Check that the key is a smi.
1216  __ JumpIfNotSmi(key, &slow);
1217  // Check that the object isn't a smi.
1218  __ JumpIfSmi(receiver, &slow);
1219  // Get the map of the object.
1220  __ lw(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1221  // Check that the receiver does not require access checks. We need
1222  // to do this because this generic stub does not perform map checks.
1223  __ lbu(t0, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
1224  __ And(t0, t0, Operand(1 << Map::kIsAccessCheckNeeded));
1225  __ Branch(&slow, ne, t0, Operand(zero_reg));
1226  // Check if the object is a JS array or not.
1227  __ lbu(t0, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));
1228  __ Branch(&array, eq, t0, Operand(JS_ARRAY_TYPE));
1229  // Check that the object is some kind of JSObject.
1230  __ Branch(&slow, lt, t0, Operand(FIRST_JS_OBJECT_TYPE));
1231 
1232  // Object case: Check key against length in the elements array.
1233  __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1234  // Check array bounds. Both the key and the length of FixedArray are smis.
1235  __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1236  __ Branch(&fast_object_with_map_check, lo, key, Operand(t0));
1237 
1238  // Slow case, handle jump to runtime.
1239  __ bind(&slow);
1240  // Entry registers are intact.
1241  // a0: value.
1242  // a1: key.
1243  // a2: receiver.
1244  GenerateRuntimeSetProperty(masm, strict_mode);
1245 
1246  // Extra capacity case: Check if there is extra capacity to
1247  // perform the store and update the length. Used for adding one
1248  // element to the array by writing to array[array.length].
1249  __ bind(&extra);
1250  // Condition code from comparing key and array length is still available.
1251  // Only support writing to array[array.length].
1252  __ Branch(&slow, ne, key, Operand(t0));
1253  // Check for room in the elements backing store.
1254  // Both the key and the length of FixedArray are smis.
1255  __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1256  __ Branch(&slow, hs, key, Operand(t0));
1257  __ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1258  __ Branch(
1259  &check_if_double_array, ne, elements_map, Heap::kFixedArrayMapRootIndex);
1260 
1261  // Calculate key + 1 as smi.
1262  STATIC_ASSERT(kSmiTag == 0);
1263  __ Addu(t0, key, Operand(Smi::FromInt(1)));
1264  __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1265  __ Branch(&fast_object_without_map_check);
1266 
1267  __ bind(&check_if_double_array);
1268  __ Branch(&slow, ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex);
1269  // Add 1 to key, and go to common element store code for doubles.
1270  STATIC_ASSERT(kSmiTag == 0);
1271  __ Addu(t0, key, Operand(Smi::FromInt(1)));
1272  __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1273  __ jmp(&fast_double_without_map_check);
1274 
1275  // Array case: Get the length and the elements array from the JS
1276  // array. Check that the array is in fast mode (and writable); if it
1277  // is the length is always a smi.
1278  __ bind(&array);
1279  __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1280 
1281  // Check the key against the length in the array.
1282  __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1283  __ Branch(&extra, hs, key, Operand(t0));
1284  // Fall through to fast case.
1285 
1286  __ bind(&fast_object_with_map_check);
1287  Register scratch_value = t0;
1288  Register address = t1;
1289  __ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1290  __ Branch(&fast_double_with_map_check,
1291  ne,
1292  elements_map,
1293  Heap::kFixedArrayMapRootIndex);
1294  __ bind(&fast_object_without_map_check);
1295  // Smi stores don't require further checks.
1296  Label non_smi_value;
1297  __ JumpIfNotSmi(value, &non_smi_value);
1298  // It's irrelevant whether array is smi-only or not when writing a smi.
1299  __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1300  __ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize);
1301  __ Addu(address, address, scratch_value);
1302  __ sw(value, MemOperand(address));
1303  __ Ret(USE_DELAY_SLOT);
1304  __ mov(v0, value);
1305 
1306  __ bind(&non_smi_value);
1307  // Escape to elements kind transition case.
1308  __ CheckFastObjectElements(receiver_map, scratch_value,
1309  &transition_smi_elements);
1310  // Fast elements array, store the value to the elements backing store.
1311  __ bind(&finish_object_store);
1312  __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1313  __ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize);
1314  __ Addu(address, address, scratch_value);
1315  __ sw(value, MemOperand(address));
1316  // Update write barrier for the elements array address.
1317  __ mov(v0, value); // Preserve the value which is returned.
1318  __ RecordWrite(elements,
1319  address,
1320  value,
1321  kRAHasNotBeenSaved,
1324  OMIT_SMI_CHECK);
1325  __ Ret();
1326 
1327  __ bind(&fast_double_with_map_check);
1328  // Check for fast double array case. If this fails, call through to the
1329  // runtime.
1330  __ Branch(&slow, ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex);
1331  __ bind(&fast_double_without_map_check);
1332  __ StoreNumberToDoubleElements(value,
1333  key,
1334  receiver,
1335  elements,
1336  a3,
1337  t0,
1338  t1,
1339  t2,
1340  &transition_double_elements);
1341  __ Ret(USE_DELAY_SLOT);
1342  __ mov(v0, value);
1343 
1344  __ bind(&transition_smi_elements);
1345  // Transition the array appropriately depending on the value type.
1346  __ lw(t0, FieldMemOperand(value, HeapObject::kMapOffset));
1347  __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
1348  __ Branch(&non_double_value, ne, t0, Operand(at));
1349 
1350 
1351  // Value is a double. Transition FAST_SMI_ELEMENTS -> FAST_DOUBLE_ELEMENTS
1352  // and complete the store.
1353  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1355  receiver_map,
1356  t0,
1357  &slow);
1358  ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
1360  __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1361  __ jmp(&fast_double_without_map_check);
1362 
1363  __ bind(&non_double_value);
1364  // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
1365  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1366  FAST_ELEMENTS,
1367  receiver_map,
1368  t0,
1369  &slow);
1370  ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
1372  __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1373  __ jmp(&finish_object_store);
1374 
1375  __ bind(&transition_double_elements);
1376  // Elements are double, but value is an Object that's not a HeapNumber. Make
1377  // sure that the receiver is a Array with Object elements and transition array
1378  // from double elements to Object elements.
1379  __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
1380  FAST_ELEMENTS,
1381  receiver_map,
1382  t0,
1383  &slow);
1384  ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
1386  __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1387  __ jmp(&finish_object_store);
1388 }
1389 
1390 
1391 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
1392  // ---------- S t a t e --------------
1393  // -- ra : return address
1394  // -- a0 : key
1395  // -- a1 : receiver
1396  // -----------------------------------
1397  Label slow;
1398 
1399  // Check that the receiver isn't a smi.
1400  __ JumpIfSmi(a1, &slow);
1401 
1402  // Check that the key is an array index, that is Uint32.
1403  __ And(t0, a0, Operand(kSmiTagMask | kSmiSignMask));
1404  __ Branch(&slow, ne, t0, Operand(zero_reg));
1405 
1406  // Get the map of the receiver.
1408 
1409  // Check that it has indexed interceptor and access checks
1410  // are not enabled for this object.
1411  __ lbu(a3, FieldMemOperand(a2, Map::kBitFieldOffset));
1412  __ And(a3, a3, Operand(kSlowCaseBitFieldMask));
1413  __ Branch(&slow, ne, a3, Operand(1 << Map::kHasIndexedInterceptor));
1414  // Everything is fine, call runtime.
1415  __ Push(a1, a0); // Receiver, key.
1416 
1417  // Perform tail call to the entry.
1418  __ TailCallExternalReference(ExternalReference(
1419  IC_Utility(kKeyedLoadPropertyWithInterceptor), masm->isolate()), 2, 1);
1420 
1421  __ bind(&slow);
1422  GenerateMiss(masm, false);
1423 }
1424 
1425 
1426 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1427  // ---------- S t a t e --------------
1428  // -- a0 : value
1429  // -- a1 : key
1430  // -- a2 : receiver
1431  // -- ra : return address
1432  // -----------------------------------
1433 
1434  // Push receiver, key and value for runtime call.
1435  __ Push(a2, a1, a0);
1436 
1437  ExternalReference ref = force_generic
1438  ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1439  masm->isolate())
1440  : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1441  __ TailCallExternalReference(ref, 3, 1);
1442 }
1443 
1444 
1445 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1446  // ---------- S t a t e --------------
1447  // -- a0 : value
1448  // -- a1 : key
1449  // -- a2 : receiver
1450  // -- ra : return address
1451  // -----------------------------------
1452 
1453  // Push receiver, key and value for runtime call.
1454  // We can't use MultiPush as the order of the registers is important.
1455  __ Push(a2, a1, a0);
1456 
1457  // The slow case calls into the runtime to complete the store without causing
1458  // an IC miss that would otherwise cause a transition to the generic stub.
1459  ExternalReference ref =
1460  ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1461 
1462  __ TailCallExternalReference(ref, 3, 1);
1463 }
1464 
1465 
1466 void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
1467  // ---------- S t a t e --------------
1468  // -- a2 : receiver
1469  // -- a3 : target map
1470  // -- ra : return address
1471  // -----------------------------------
1472  // Must return the modified receiver in v0.
1473  if (!FLAG_trace_elements_transitions) {
1474  Label fail;
1476  __ Ret(USE_DELAY_SLOT);
1477  __ mov(v0, a2);
1478  __ bind(&fail);
1479  }
1480 
1481  __ push(a2);
1482  __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
1483 }
1484 
1485 
1487  MacroAssembler* masm) {
1488  // ---------- S t a t e --------------
1489  // -- a2 : receiver
1490  // -- a3 : target map
1491  // -- ra : return address
1492  // -----------------------------------
1493  // Must return the modified receiver in v0.
1494  if (!FLAG_trace_elements_transitions) {
1495  Label fail;
1497  __ Ret(USE_DELAY_SLOT);
1498  __ mov(v0, a2);
1499  __ bind(&fail);
1500  }
1501 
1502  __ push(a2);
1503  __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
1504 }
1505 
1506 
1507 void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
1508  StrictModeFlag strict_mode) {
1509  // ----------- S t a t e -------------
1510  // -- a0 : value
1511  // -- a1 : receiver
1512  // -- a2 : name
1513  // -- ra : return address
1514  // -----------------------------------
1515 
1516  // Get the receiver from the stack and probe the stub cache.
1517  Code::Flags flags =
1519  Isolate::Current()->stub_cache()->GenerateProbe(
1520  masm, flags, a1, a2, a3, t0, t1, t2);
1521 
1522  // Cache miss: Jump to runtime.
1523  GenerateMiss(masm);
1524 }
1525 
1526 
1527 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1528  // ----------- S t a t e -------------
1529  // -- a0 : value
1530  // -- a1 : receiver
1531  // -- a2 : name
1532  // -- ra : return address
1533  // -----------------------------------
1534 
1535  __ Push(a1, a2, a0);
1536  // Perform tail call to the entry.
1537  ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_Miss),
1538  masm->isolate());
1539  __ TailCallExternalReference(ref, 3, 1);
1540 }
1541 
1542 
1543 void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
1544  // ----------- S t a t e -------------
1545  // -- a0 : value
1546  // -- a1 : receiver
1547  // -- a2 : name
1548  // -- ra : return address
1549  // -----------------------------------
1550  //
1551  // This accepts as a receiver anything JSArray::SetElementsLength accepts
1552  // (currently anything except for external arrays which means anything with
1553  // elements of FixedArray type). Value must be a number, but only smis are
1554  // accepted as the most common case.
1555 
1556  Label miss;
1557 
1558  Register receiver = a1;
1559  Register value = a0;
1560  Register scratch = a3;
1561 
1562  // Check that the receiver isn't a smi.
1563  __ JumpIfSmi(receiver, &miss);
1564 
1565  // Check that the object is a JS array.
1566  __ GetObjectType(receiver, scratch, scratch);
1567  __ Branch(&miss, ne, scratch, Operand(JS_ARRAY_TYPE));
1568 
1569  // Check that elements are FixedArray.
1570  // We rely on StoreIC_ArrayLength below to deal with all types of
1571  // fast elements (including COW).
1572  __ lw(scratch, FieldMemOperand(receiver, JSArray::kElementsOffset));
1573  __ GetObjectType(scratch, scratch, scratch);
1574  __ Branch(&miss, ne, scratch, Operand(FIXED_ARRAY_TYPE));
1575 
1576  // Check that the array has fast properties, otherwise the length
1577  // property might have been redefined.
1578  __ lw(scratch, FieldMemOperand(receiver, JSArray::kPropertiesOffset));
1579  __ lw(scratch, FieldMemOperand(scratch, FixedArray::kMapOffset));
1580  __ LoadRoot(at, Heap::kHashTableMapRootIndex);
1581  __ Branch(&miss, eq, scratch, Operand(at));
1582 
1583  // Check that value is a smi.
1584  __ JumpIfNotSmi(value, &miss);
1585 
1586  // Prepare tail call to StoreIC_ArrayLength.
1587  __ Push(receiver, value);
1588 
1589  ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_ArrayLength),
1590  masm->isolate());
1591  __ TailCallExternalReference(ref, 2, 1);
1592 
1593  __ bind(&miss);
1594 
1595  GenerateMiss(masm);
1596 }
1597 
1598 
1599 void StoreIC::GenerateNormal(MacroAssembler* masm) {
1600  // ----------- S t a t e -------------
1601  // -- a0 : value
1602  // -- a1 : receiver
1603  // -- a2 : name
1604  // -- ra : return address
1605  // -----------------------------------
1606  Label miss;
1607 
1608  GenerateStringDictionaryReceiverCheck(masm, a1, a3, t0, t1, &miss);
1609 
1610  GenerateDictionaryStore(masm, &miss, a3, a2, a0, t0, t1);
1611  Counters* counters = masm->isolate()->counters();
1612  __ IncrementCounter(counters->store_normal_hit(), 1, t0, t1);
1613  __ Ret();
1614 
1615  __ bind(&miss);
1616  __ IncrementCounter(counters->store_normal_miss(), 1, t0, t1);
1617  GenerateMiss(masm);
1618 }
1619 
1620 
1621 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
1622  StrictModeFlag strict_mode) {
1623  // ----------- S t a t e -------------
1624  // -- a0 : value
1625  // -- a1 : receiver
1626  // -- a2 : name
1627  // -- ra : return address
1628  // -----------------------------------
1629 
1630  __ Push(a1, a2, a0);
1631 
1632  __ li(a1, Operand(Smi::FromInt(NONE))); // PropertyAttributes.
1633  __ li(a0, Operand(Smi::FromInt(strict_mode)));
1634  __ Push(a1, a0);
1635 
1636  // Do tail-call to runtime routine.
1637  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1638 }
1639 
1640 
1641 #undef __
1642 
1643 
1645  switch (op) {
1646  case Token::EQ_STRICT:
1647  case Token::EQ:
1648  return eq;
1649  case Token::LT:
1650  return lt;
1651  case Token::GT:
1652  return gt;
1653  case Token::LTE:
1654  return le;
1655  case Token::GTE:
1656  return ge;
1657  default:
1658  UNREACHABLE();
1659  return kNoCondition;
1660  }
1661 }
1662 
1663 
1664 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
1665  HandleScope scope;
1666  Handle<Code> rewritten;
1667  State previous_state = GetState();
1668  State state = TargetState(previous_state, false, x, y);
1669  if (state == GENERIC) {
1670  CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, a1, a0);
1671  rewritten = stub.GetCode();
1672  } else {
1673  ICCompareStub stub(op_, state);
1674  if (state == KNOWN_OBJECTS) {
1675  stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
1676  }
1677  rewritten = stub.GetCode();
1678  }
1679  set_target(*rewritten);
1680 
1681 #ifdef DEBUG
1682  if (FLAG_trace_ic) {
1683  PrintF("[CompareIC (%s->%s)#%s]\n",
1684  GetStateName(previous_state),
1685  GetStateName(state),
1686  Token::Name(op_));
1687  }
1688 #endif
1689 
1690  // Activate inlined smi code.
1691  if (previous_state == UNINITIALIZED) {
1693  }
1694 }
1695 
1696 
1698  Address andi_instruction_address =
1700 
1701  // If the instruction following the call is not a andi at, rx, #yyy, nothing
1702  // was inlined.
1703  Instr instr = Assembler::instr_at(andi_instruction_address);
1704  if (!(Assembler::IsAndImmediate(instr) &&
1705  Assembler::GetRt(instr) == (uint32_t)zero_reg.code())) {
1706  return;
1707  }
1708 
1709  // The delta to the start of the map check instruction and the
1710  // condition code uses at the patched jump.
1711  int delta = Assembler::GetImmediate16(instr);
1712  delta += Assembler::GetRs(instr) * kImm16Mask;
1713  // If the delta is 0 the instruction is andi at, zero_reg, #0 which also
1714  // signals that nothing was inlined.
1715  if (delta == 0) {
1716  return;
1717  }
1718 
1719 #ifdef DEBUG
1720  if (FLAG_trace_ic) {
1721  PrintF("[ patching ic at %p, andi=%p, delta=%d\n",
1722  address, andi_instruction_address, delta);
1723  }
1724 #endif
1725 
1726  Address patch_address =
1727  andi_instruction_address - delta * Instruction::kInstrSize;
1728  Instr instr_at_patch = Assembler::instr_at(patch_address);
1729  Instr branch_instr =
1730  Assembler::instr_at(patch_address + Instruction::kInstrSize);
1731  // This is patching a conditional "jump if not smi/jump if smi" site.
1732  // Enabling by changing from
1733  // andi at, rx, 0
1734  // Branch <target>, eq, at, Operand(zero_reg)
1735  // to:
1736  // andi at, rx, #kSmiTagMask
1737  // Branch <target>, ne, at, Operand(zero_reg)
1738  // and vice-versa to be disabled again.
1739  CodePatcher patcher(patch_address, 2);
1740  Register reg = Register::from_code(Assembler::GetRs(instr_at_patch));
1741  if (check == ENABLE_INLINED_SMI_CHECK) {
1742  ASSERT(Assembler::IsAndImmediate(instr_at_patch));
1743  ASSERT_EQ(0, Assembler::GetImmediate16(instr_at_patch));
1744  patcher.masm()->andi(at, reg, kSmiTagMask);
1745  } else {
1747  ASSERT(Assembler::IsAndImmediate(instr_at_patch));
1748  patcher.masm()->andi(at, reg, 0);
1749  }
1750  ASSERT(Assembler::IsBranch(branch_instr));
1751  if (Assembler::IsBeq(branch_instr)) {
1752  patcher.ChangeBranchCondition(ne);
1753  } else {
1754  ASSERT(Assembler::IsBne(branch_instr));
1755  patcher.ChangeBranchCondition(eq);
1756  }
1757 }
1758 
1759 
1760 } } // namespace v8::internal
1761 
1762 #endif // V8_TARGET_ARCH_MIPS
byte * Address
Definition: globals.h:172
static bool IsBranch(Instr instr)
static void GenerateTransitionElementsDoubleToObject(MacroAssembler *masm)
static const int kBitFieldOffset
Definition: objects.h:4994
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
const intptr_t kSmiTagMask
Definition: v8.h:3855
const intptr_t kSmiSignMask
Definition: v8globals.h:41
static uint32_t GetRt(Instr instr)
static void GenerateNormal(MacroAssembler *masm, int argc)
static const int kMapHashShift
Definition: heap.h:2235
void PrintF(const char *format,...)
Definition: v8utils.cc:40
static void GenerateGeneric(MacroAssembler *masm, StrictModeFlag strict_mode)
static const char * Name(Value tok)
Definition: token.h:196
static Smi * FromInt(int value)
Definition: objects-inl.h:973
static void GenerateMiss(MacroAssembler *masm, int argc, IC::UtilityId id, Code::ExtraICState extra_state)
static uint32_t GetImmediate16(Instr instr)
const int kImm16Mask
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
static const int kGlobalReceiverOffset
Definition: objects.h:6085
static void GenerateGlobalProxy(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateMegamorphic(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateStringLength(MacroAssembler *masm, bool support_wrappers)
Flag flags[]
Definition: flags.cc:1467
static const int kHasNamedInterceptor
Definition: objects.h:5003
static const int kIsAccessCheckNeeded
Definition: objects.h:5007
static uint32_t GetRs(Instr instr)
Address address() const
Definition: ic-inl.h:41
static void GenerateMegamorphic(MacroAssembler *masm)
#define ASSERT(condition)
Definition: checks.h:270
const int kPointerSizeLog2
Definition: globals.h:246
static const int kInstanceSizeOffset
Definition: objects.h:4981
Isolate * isolate() const
Definition: ic.h:140
static const ExtraICState kNoExtraICState
Definition: objects.h:4199
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check)
static void GenerateArrayLength(MacroAssembler *masm)
static const int kHashFieldOffset
Definition: objects.h:7099
static const int kHasIndexedInterceptor
Definition: objects.h:5004
void UpdateCaches(Handle< Object > x, Handle< Object > y)
const Register sp
#define UNREACHABLE()
Definition: checks.h:50
static Condition ComputeCondition(Token::Value op)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
const uint32_t kIsSymbolMask
Definition: objects.h:443
const int kPointerSize
Definition: globals.h:234
static void GenerateGeneric(MacroAssembler *masm)
const int kHeapObjectTag
Definition: v8.h:3848
static void GenerateMiss(MacroAssembler *masm)
#define __
static bool decode(uint32_t value)
Definition: utils.h:272
static const int kPropertiesOffset
Definition: objects.h:2113
static Register from_code(int code)
static const int kInObjectPropertiesOffset
Definition: objects.h:4983
static void GenerateSmiToDouble(MacroAssembler *masm, Label *fail)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
static const int kElementsOffset
Definition: objects.h:2114
static const int kContainsCachedArrayIndexMask
Definition: objects.h:7154
static const int kCallTargetAddressOffset
STATIC_ASSERT(kGrowICDelta==STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT-STORE_TRANSITION_SMI_TO_OBJECT)
static void GenerateMegamorphic(MacroAssembler *masm, int argc)
static const int kLengthOffset
Definition: objects.h:8111
static const int kHeaderSize
Definition: objects.h:2233
static void GenerateSlow(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, int argc, Code::ExtraICState extra_state)
Definition: ic.h:278
static const int kMapOffset
Definition: objects.h:1219
static void GenerateRuntimeSetProperty(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateDoubleToObject(MacroAssembler *masm, Label *fail)
static const int kLengthOffset
Definition: objects.h:2232
static const int kSlowCaseBitFieldMask
Definition: ic.h:508
static void GenerateNormal(MacroAssembler *masm, int argc)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
InlinedSmiCheck
Definition: ic.h:841
MemOperand FieldMemOperand(Register object, int offset)
static void GenerateMonomorphicCacheProbe(MacroAssembler *masm, int argc, Code::Kind kind, Code::ExtraICState extra_state)
static void GenerateString(MacroAssembler *masm)
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, PropertyType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:3312
const int kSmiTagSize
Definition: v8.h:3854
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMegamorphic(MacroAssembler *masm, int argc, Code::ExtraICState extra_ic_state)
const int kSmiTag
Definition: v8.h:3853
static void GenerateNormal(MacroAssembler *masm)
static bool IsBne(Instr instr)
static void GenerateFunctionPrototype(MacroAssembler *masm)
static void GenerateIndexedInterceptor(MacroAssembler *masm)
static bool IsBeq(Instr instr)
void set_target(Code *code)
Definition: ic.h:149
const uint32_t kSymbolTag
Definition: objects.h:445
static const int kCapacityMask
Definition: heap.h:2234
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)
static void GenerateMiss(MacroAssembler *masm)
static void GenerateTransitionElementsSmiToDouble(MacroAssembler *masm)
static const int kHashShift
Definition: objects.h:7121
static void GenerateNonStrictArguments(MacroAssembler *masm, int argc)
static void GenerateArrayLength(MacroAssembler *masm)
static const char * GetStateName(State state)
Definition: ic.cc:2564
void check(i::Vector< const char > string)
static const int kHashMask
Definition: heap.h:2236
static void GenerateMiss(MacroAssembler *masm, int argc)
Definition: ic.h:311
FlagType type() const
Definition: flags.cc:1358
static const int kInstanceTypeOffset
Definition: objects.h:4992
static const int kEntriesPerBucket
Definition: heap.h:2237
static bool IsAndImmediate(Instr instr)
static void GenerateNormal(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)