v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
ic-arm.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_ARM)
31 
32 #include "assembler-arm.h"
33 #include "code-stubs.h"
34 #include "codegen.h"
35 #include "disasm.h"
36 #include "ic-inl.h"
37 #include "runtime.h"
38 #include "stub-cache.h"
39 
40 namespace v8 {
41 namespace internal {
42 
43 
44 // ----------------------------------------------------------------------------
45 // Static IC stub generators.
46 //
47 
48 #define __ ACCESS_MASM(masm)
49 
50 
51 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
52  Register type,
53  Label* global_object) {
54  // Register usage:
55  // type: holds the receiver instance type on entry.
56  __ cmp(type, Operand(JS_GLOBAL_OBJECT_TYPE));
57  __ b(eq, global_object);
58  __ cmp(type, Operand(JS_BUILTINS_OBJECT_TYPE));
59  __ b(eq, global_object);
60  __ cmp(type, Operand(JS_GLOBAL_PROXY_TYPE));
61  __ b(eq, global_object);
62 }
63 
64 
65 // Generated code falls through if the receiver is a regular non-global
66 // JS object with slow properties and no interceptors.
67 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
68  Register receiver,
69  Register elements,
70  Register t0,
71  Register t1,
72  Label* miss) {
73  // Register usage:
74  // receiver: holds the receiver on entry and is unchanged.
75  // elements: holds the property dictionary on fall through.
76  // Scratch registers:
77  // t0: used to holds the receiver map.
78  // t1: used to holds the receiver instance type, receiver bit mask and
79  // elements map.
80 
81  // Check that the receiver isn't a smi.
82  __ JumpIfSmi(receiver, miss);
83 
84  // Check that the receiver is a valid JS object.
85  __ CompareObjectType(receiver, t0, t1, FIRST_SPEC_OBJECT_TYPE);
86  __ b(lt, miss);
87 
88  // If this assert fails, we have to check upper bound too.
90 
91  GenerateGlobalInstanceTypeCheck(masm, t1, miss);
92 
93  // Check that the global object does not require access checks.
95  __ tst(t1, Operand((1 << Map::kIsAccessCheckNeeded) |
97  __ b(ne, miss);
98 
99  __ ldr(elements, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
100  __ ldr(t1, FieldMemOperand(elements, HeapObject::kMapOffset));
101  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
102  __ cmp(t1, ip);
103  __ b(ne, miss);
104 }
105 
106 
107 // Helper function used from LoadIC/CallIC GenerateNormal.
108 //
109 // elements: Property dictionary. It is not clobbered if a jump to the miss
110 // label is done.
111 // name: Property name. It is not clobbered if a jump to the miss label is
112 // done
113 // result: Register for the result. It is only updated if a jump to the miss
114 // label is not done. Can be the same as elements or name clobbering
115 // one of these in the case of not jumping to the miss label.
116 // The two scratch registers need to be different from elements, name and
117 // result.
118 // The generated code assumes that the receiver has slow properties,
119 // is not a global object and does not have interceptors.
120 static void GenerateDictionaryLoad(MacroAssembler* masm,
121  Label* miss,
122  Register elements,
123  Register name,
124  Register result,
125  Register scratch1,
126  Register scratch2) {
127  // Main use of the scratch registers.
128  // scratch1: Used as temporary and to hold the capacity of the property
129  // dictionary.
130  // scratch2: Used as temporary.
131  Label done;
132 
133  // Probe the dictionary.
135  miss,
136  &done,
137  elements,
138  name,
139  scratch1,
140  scratch2);
141 
142  // If probing finds an entry check that the value is a normal
143  // property.
144  __ bind(&done); // scratch2 == elements + 4 * index
145  const int kElementsStartOffset = StringDictionary::kHeaderSize +
147  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
148  __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
149  __ tst(scratch1, Operand(PropertyDetails::TypeField::kMask << kSmiTagSize));
150  __ b(ne, miss);
151 
152  // Get the value at the masked, scaled index and return.
153  __ ldr(result,
154  FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize));
155 }
156 
157 
158 // Helper function used from StoreIC::GenerateNormal.
159 //
160 // elements: Property dictionary. It is not clobbered if a jump to the miss
161 // label is done.
162 // name: Property name. It is not clobbered if a jump to the miss label is
163 // done
164 // value: The value to store.
165 // The two scratch registers need to be different from elements, name and
166 // result.
167 // The generated code assumes that the receiver has slow properties,
168 // is not a global object and does not have interceptors.
169 static void GenerateDictionaryStore(MacroAssembler* masm,
170  Label* miss,
171  Register elements,
172  Register name,
173  Register value,
174  Register scratch1,
175  Register scratch2) {
176  // Main use of the scratch registers.
177  // scratch1: Used as temporary and to hold the capacity of the property
178  // dictionary.
179  // scratch2: Used as temporary.
180  Label done;
181 
182  // Probe the dictionary.
184  miss,
185  &done,
186  elements,
187  name,
188  scratch1,
189  scratch2);
190 
191  // If probing finds an entry in the dictionary check that the value
192  // is a normal property that is not read only.
193  __ bind(&done); // scratch2 == elements + 4 * index
194  const int kElementsStartOffset = StringDictionary::kHeaderSize +
196  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
197  const int kTypeAndReadOnlyMask =
198  (PropertyDetails::TypeField::kMask |
199  PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
200  __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
201  __ tst(scratch1, Operand(kTypeAndReadOnlyMask));
202  __ b(ne, miss);
203 
204  // Store the value at the masked, scaled index and return.
205  const int kValueOffset = kElementsStartOffset + kPointerSize;
206  __ add(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag));
207  __ str(value, MemOperand(scratch2));
208 
209  // Update the write barrier. Make sure not to clobber the value.
210  __ mov(scratch1, value);
211  __ RecordWrite(
212  elements, scratch2, scratch1, kLRHasNotBeenSaved, kDontSaveFPRegs);
213 }
214 
215 
216 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
217  // ----------- S t a t e -------------
218  // -- r2 : name
219  // -- lr : return address
220  // -- r0 : receiver
221  // -- sp[0] : receiver
222  // -----------------------------------
223  Label miss;
224 
225  StubCompiler::GenerateLoadArrayLength(masm, r0, r3, &miss);
226  __ bind(&miss);
227  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
228 }
229 
230 
231 void LoadIC::GenerateStringLength(MacroAssembler* masm, bool support_wrappers) {
232  // ----------- S t a t e -------------
233  // -- r2 : name
234  // -- lr : return address
235  // -- r0 : receiver
236  // -- sp[0] : receiver
237  // -----------------------------------
238  Label miss;
239 
240  StubCompiler::GenerateLoadStringLength(masm, r0, r1, r3, &miss,
241  support_wrappers);
242  // Cache miss: Jump to runtime.
243  __ bind(&miss);
244  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
245 }
246 
247 
248 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
249  // ----------- S t a t e -------------
250  // -- r2 : name
251  // -- lr : return address
252  // -- r0 : receiver
253  // -- sp[0] : receiver
254  // -----------------------------------
255  Label miss;
256 
257  StubCompiler::GenerateLoadFunctionPrototype(masm, r0, r1, r3, &miss);
258  __ bind(&miss);
259  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
260 }
261 
262 
263 // Checks the receiver for special cases (value type, slow case bits).
264 // Falls through for regular JS object.
265 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
266  Register receiver,
267  Register map,
268  Register scratch,
269  int interceptor_bit,
270  Label* slow) {
271  // Check that the object isn't a smi.
272  __ JumpIfSmi(receiver, slow);
273  // Get the map of the receiver.
274  __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
275  // Check bit field.
276  __ ldrb(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
277  __ tst(scratch,
278  Operand((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)));
279  __ b(ne, slow);
280  // Check that the object is some kind of JS object EXCEPT JS Value type.
281  // In the case that the object is a value-wrapper object,
282  // we enter the runtime system to make sure that indexing into string
283  // objects work as intended.
285  __ ldrb(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
286  __ cmp(scratch, Operand(JS_OBJECT_TYPE));
287  __ b(lt, slow);
288 }
289 
290 
291 // Loads an indexed element from a fast case array.
292 // If not_fast_array is NULL, doesn't perform the elements map check.
293 static void GenerateFastArrayLoad(MacroAssembler* masm,
294  Register receiver,
295  Register key,
296  Register elements,
297  Register scratch1,
298  Register scratch2,
299  Register result,
300  Label* not_fast_array,
301  Label* out_of_range) {
302  // Register use:
303  //
304  // receiver - holds the receiver on entry.
305  // Unchanged unless 'result' is the same register.
306  //
307  // key - holds the smi key on entry.
308  // Unchanged unless 'result' is the same register.
309  //
310  // elements - holds the elements of the receiver on exit.
311  //
312  // result - holds the result on exit if the load succeeded.
313  // Allowed to be the the same as 'receiver' or 'key'.
314  // Unchanged on bailout so 'receiver' and 'key' can be safely
315  // used by further computation.
316  //
317  // Scratch registers:
318  //
319  // scratch1 - used to hold elements map and elements length.
320  // Holds the elements map if not_fast_array branch is taken.
321  //
322  // scratch2 - used to hold the loaded value.
323 
324  __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
325  if (not_fast_array != NULL) {
326  // Check that the object is in fast mode and writable.
327  __ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
328  __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
329  __ cmp(scratch1, ip);
330  __ b(ne, not_fast_array);
331  } else {
332  __ AssertFastElements(elements);
333  }
334  // Check that the key (index) is within bounds.
335  __ ldr(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset));
336  __ cmp(key, Operand(scratch1));
337  __ b(hs, out_of_range);
338  // Fast case: Do the load.
339  __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
340  // The key is a smi.
342  __ ldr(scratch2,
343  MemOperand(scratch1, key, LSL, kPointerSizeLog2 - kSmiTagSize));
344  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
345  __ cmp(scratch2, ip);
346  // In case the loaded value is the_hole we have to consult GetProperty
347  // to ensure the prototype chain is searched.
348  __ b(eq, out_of_range);
349  __ mov(result, scratch2);
350 }
351 
352 
353 // Checks whether a key is an array index string or a symbol string.
354 // Falls through if a key is a symbol.
355 static void GenerateKeyStringCheck(MacroAssembler* masm,
356  Register key,
357  Register map,
358  Register hash,
359  Label* index_string,
360  Label* not_symbol) {
361  // The key is not a smi.
362  // Is it a string?
363  __ CompareObjectType(key, map, hash, FIRST_NONSTRING_TYPE);
364  __ b(ge, not_symbol);
365 
366  // Is the string an array index, with cached numeric value?
368  __ tst(hash, Operand(String::kContainsCachedArrayIndexMask));
369  __ b(eq, index_string);
370 
371  // Is the string a symbol?
372  // map: key map
373  __ ldrb(hash, FieldMemOperand(map, Map::kInstanceTypeOffset));
375  __ tst(hash, Operand(kIsSymbolMask));
376  __ b(eq, not_symbol);
377 }
378 
379 
380 // Defined in ic.cc.
381 Object* CallIC_Miss(Arguments args);
382 
383 // The generated code does not accept smi keys.
384 // The generated code falls through if both probes miss.
385 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
386  int argc,
387  Code::Kind kind,
388  Code::ExtraICState extra_state) {
389  // ----------- S t a t e -------------
390  // -- r1 : receiver
391  // -- r2 : name
392  // -----------------------------------
393  Label number, non_number, non_string, boolean, probe, miss;
394 
395  // Probe the stub cache.
397  MONOMORPHIC,
398  extra_state,
399  NORMAL,
400  argc);
401  Isolate::Current()->stub_cache()->GenerateProbe(
402  masm, flags, r1, r2, r3, r4, r5, r6);
403 
404  // If the stub cache probing failed, the receiver might be a value.
405  // For value objects, we use the map of the prototype objects for
406  // the corresponding JSValue for the cache and that is what we need
407  // to probe.
408  //
409  // Check for number.
410  __ JumpIfSmi(r1, &number);
411  __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE);
412  __ b(ne, &non_number);
413  __ bind(&number);
414  StubCompiler::GenerateLoadGlobalFunctionPrototype(
416  __ b(&probe);
417 
418  // Check for string.
419  __ bind(&non_number);
420  __ cmp(r3, Operand(FIRST_NONSTRING_TYPE));
421  __ b(hs, &non_string);
422  StubCompiler::GenerateLoadGlobalFunctionPrototype(
424  __ b(&probe);
425 
426  // Check for boolean.
427  __ bind(&non_string);
428  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
429  __ cmp(r1, ip);
430  __ b(eq, &boolean);
431  __ LoadRoot(ip, Heap::kFalseValueRootIndex);
432  __ cmp(r1, ip);
433  __ b(ne, &miss);
434  __ bind(&boolean);
435  StubCompiler::GenerateLoadGlobalFunctionPrototype(
437 
438  // Probe the stub cache for the value object.
439  __ bind(&probe);
440  Isolate::Current()->stub_cache()->GenerateProbe(
441  masm, flags, r1, r2, r3, r4, r5, r6);
442 
443  __ bind(&miss);
444 }
445 
446 
447 static void GenerateFunctionTailCall(MacroAssembler* masm,
448  int argc,
449  Label* miss,
450  Register scratch) {
451  // r1: function
452 
453  // Check that the value isn't a smi.
454  __ JumpIfSmi(r1, miss);
455 
456  // Check that the value is a JSFunction.
457  __ CompareObjectType(r1, scratch, scratch, JS_FUNCTION_TYPE);
458  __ b(ne, miss);
459 
460  // Invoke the function.
461  ParameterCount actual(argc);
462  __ InvokeFunction(r1, actual, JUMP_FUNCTION,
463  NullCallWrapper(), CALL_AS_METHOD);
464 }
465 
466 
467 void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
468  // ----------- S t a t e -------------
469  // -- r2 : name
470  // -- lr : return address
471  // -----------------------------------
472  Label miss;
473 
474  // Get the receiver of the function from the stack into r1.
475  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
476 
477  GenerateStringDictionaryReceiverCheck(masm, r1, r0, r3, r4, &miss);
478 
479  // r0: elements
480  // Search the dictionary - put result in register r1.
481  GenerateDictionaryLoad(masm, &miss, r0, r2, r1, r3, r4);
482 
483  GenerateFunctionTailCall(masm, argc, &miss, r4);
484 
485  __ bind(&miss);
486 }
487 
488 
489 void CallICBase::GenerateMiss(MacroAssembler* masm,
490  int argc,
491  IC::UtilityId id,
492  Code::ExtraICState extra_state) {
493  // ----------- S t a t e -------------
494  // -- r2 : name
495  // -- lr : return address
496  // -----------------------------------
497  Isolate* isolate = masm->isolate();
498 
499  if (id == IC::kCallIC_Miss) {
500  __ IncrementCounter(isolate->counters()->call_miss(), 1, r3, r4);
501  } else {
502  __ IncrementCounter(isolate->counters()->keyed_call_miss(), 1, r3, r4);
503  }
504 
505  // Get the receiver of the function from the stack.
506  __ ldr(r3, MemOperand(sp, argc * kPointerSize));
507 
508  {
509  FrameScope scope(masm, StackFrame::INTERNAL);
510 
511  // Push the receiver and the name of the function.
512  __ Push(r3, r2);
513 
514  // Call the entry.
515  __ mov(r0, Operand(2));
516  __ mov(r1, Operand(ExternalReference(IC_Utility(id), isolate)));
517 
518  CEntryStub stub(1);
519  __ CallStub(&stub);
520 
521  // Move result to r1 and leave the internal frame.
522  __ mov(r1, Operand(r0));
523  }
524 
525  // Check if the receiver is a global object of some sort.
526  // This can happen only for regular CallIC but not KeyedCallIC.
527  if (id == IC::kCallIC_Miss) {
528  Label invoke, global;
529  __ ldr(r2, MemOperand(sp, argc * kPointerSize)); // receiver
530  __ JumpIfSmi(r2, &invoke);
531  __ CompareObjectType(r2, r3, r3, JS_GLOBAL_OBJECT_TYPE);
532  __ b(eq, &global);
533  __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE));
534  __ b(ne, &invoke);
535 
536  // Patch the receiver on the stack.
537  __ bind(&global);
539  __ str(r2, MemOperand(sp, argc * kPointerSize));
540  __ bind(&invoke);
541  }
542 
543  // Invoke the function.
544  CallKind call_kind = CallICBase::Contextual::decode(extra_state)
546  : CALL_AS_METHOD;
547  ParameterCount actual(argc);
548  __ InvokeFunction(r1,
549  actual,
551  NullCallWrapper(),
552  call_kind);
553 }
554 
555 
556 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
557  int argc,
558  Code::ExtraICState extra_ic_state) {
559  // ----------- S t a t e -------------
560  // -- r2 : name
561  // -- lr : return address
562  // -----------------------------------
563 
564  // Get the receiver of the function from the stack into r1.
565  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
566  GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
567  GenerateMiss(masm, argc, extra_ic_state);
568 }
569 
570 
571 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
572  // ----------- S t a t e -------------
573  // -- r2 : name
574  // -- lr : return address
575  // -----------------------------------
576 
577  // Get the receiver of the function from the stack into r1.
578  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
579 
580  Label do_call, slow_call, slow_load, slow_reload_receiver;
581  Label check_number_dictionary, check_string, lookup_monomorphic_cache;
582  Label index_smi, index_string;
583 
584  // Check that the key is a smi.
585  __ JumpIfNotSmi(r2, &check_string);
586  __ bind(&index_smi);
587  // Now the key is known to be a smi. This place is also jumped to from below
588  // where a numeric string is converted to a smi.
589 
590  GenerateKeyedLoadReceiverCheck(
591  masm, r1, r0, r3, Map::kHasIndexedInterceptor, &slow_call);
592 
593  GenerateFastArrayLoad(
594  masm, r1, r2, r4, r3, r0, r1, &check_number_dictionary, &slow_load);
595  Counters* counters = masm->isolate()->counters();
596  __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1, r0, r3);
597 
598  __ bind(&do_call);
599  // receiver in r1 is not used after this point.
600  // r2: key
601  // r1: function
602  GenerateFunctionTailCall(masm, argc, &slow_call, r0);
603 
604  __ bind(&check_number_dictionary);
605  // r2: key
606  // r3: elements map
607  // r4: elements
608  // Check whether the elements is a number dictionary.
609  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
610  __ cmp(r3, ip);
611  __ b(ne, &slow_load);
612  __ mov(r0, Operand(r2, ASR, kSmiTagSize));
613  // r0: untagged index
614  __ LoadFromNumberDictionary(&slow_load, r4, r2, r1, r0, r3, r5);
615  __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, r0, r3);
616  __ jmp(&do_call);
617 
618  __ bind(&slow_load);
619  // This branch is taken when calling KeyedCallIC_Miss is neither required
620  // nor beneficial.
621  __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, r0, r3);
622  {
623  FrameScope scope(masm, StackFrame::INTERNAL);
624  __ push(r2); // save the key
625  __ Push(r1, r2); // pass the receiver and the key
626  __ CallRuntime(Runtime::kKeyedGetProperty, 2);
627  __ pop(r2); // restore the key
628  }
629  __ mov(r1, r0);
630  __ jmp(&do_call);
631 
632  __ bind(&check_string);
633  GenerateKeyStringCheck(masm, r2, r0, r3, &index_string, &slow_call);
634 
635  // The key is known to be a symbol.
636  // If the receiver is a regular JS object with slow properties then do
637  // a quick inline probe of the receiver's dictionary.
638  // Otherwise do the monomorphic cache probe.
639  GenerateKeyedLoadReceiverCheck(
640  masm, r1, r0, r3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
641 
644  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
645  __ cmp(r3, ip);
646  __ b(ne, &lookup_monomorphic_cache);
647 
648  GenerateDictionaryLoad(masm, &slow_load, r0, r2, r1, r3, r4);
649  __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, r0, r3);
650  __ jmp(&do_call);
651 
652  __ bind(&lookup_monomorphic_cache);
653  __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, r0, r3);
655  argc,
658  // Fall through on miss.
659 
660  __ bind(&slow_call);
661  // This branch is taken if:
662  // - the receiver requires boxing or access check,
663  // - the key is neither smi nor symbol,
664  // - the value loaded is not a function,
665  // - there is hope that the runtime will create a monomorphic call stub
666  // that will get fetched next time.
667  __ IncrementCounter(counters->keyed_call_generic_slow(), 1, r0, r3);
668  GenerateMiss(masm, argc);
669 
670  __ bind(&index_string);
671  __ IndexFromHash(r3, r2);
672  // Now jump to the place where smi keys are handled.
673  __ jmp(&index_smi);
674 }
675 
676 
677 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
678  // ----------- S t a t e -------------
679  // -- r2 : name
680  // -- lr : return address
681  // -----------------------------------
682 
683  // Check if the name is a string.
684  Label miss;
685  __ JumpIfSmi(r2, &miss);
686  __ IsObjectJSStringType(r2, r0, &miss);
687 
688  CallICBase::GenerateNormal(masm, argc);
689  __ bind(&miss);
690  GenerateMiss(masm, argc);
691 }
692 
693 
694 // Defined in ic.cc.
695 Object* LoadIC_Miss(Arguments args);
696 
697 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
698  // ----------- S t a t e -------------
699  // -- r2 : name
700  // -- lr : return address
701  // -- r0 : receiver
702  // -- sp[0] : receiver
703  // -----------------------------------
704 
705  // Probe the stub cache.
706  Code::Flags flags =
708  Isolate::Current()->stub_cache()->GenerateProbe(
709  masm, flags, r0, r2, r3, r4, r5, r6);
710 
711  // Cache miss: Jump to runtime.
712  GenerateMiss(masm);
713 }
714 
715 
716 void LoadIC::GenerateNormal(MacroAssembler* masm) {
717  // ----------- S t a t e -------------
718  // -- r2 : name
719  // -- lr : return address
720  // -- r0 : receiver
721  // -- sp[0] : receiver
722  // -----------------------------------
723  Label miss;
724 
725  GenerateStringDictionaryReceiverCheck(masm, r0, r1, r3, r4, &miss);
726 
727  // r1: elements
728  GenerateDictionaryLoad(masm, &miss, r1, r2, r0, r3, r4);
729  __ Ret();
730 
731  // Cache miss: Jump to runtime.
732  __ bind(&miss);
733  GenerateMiss(masm);
734 }
735 
736 
737 void LoadIC::GenerateMiss(MacroAssembler* masm) {
738  // ----------- S t a t e -------------
739  // -- r2 : name
740  // -- lr : return address
741  // -- r0 : receiver
742  // -- sp[0] : receiver
743  // -----------------------------------
744  Isolate* isolate = masm->isolate();
745 
746  __ IncrementCounter(isolate->counters()->load_miss(), 1, r3, r4);
747 
748  __ mov(r3, r0);
749  __ Push(r3, r2);
750 
751  // Perform tail call to the entry.
752  ExternalReference ref =
753  ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
754  __ TailCallExternalReference(ref, 2, 1);
755 }
756 
757 
758 static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
759  Register object,
760  Register key,
761  Register scratch1,
762  Register scratch2,
763  Register scratch3,
764  Label* unmapped_case,
765  Label* slow_case) {
766  Heap* heap = masm->isolate()->heap();
767 
768  // Check that the receiver is a JSObject. Because of the map check
769  // later, we do not need to check for interceptors or whether it
770  // requires access checks.
771  __ JumpIfSmi(object, slow_case);
772  // Check that the object is some kind of JSObject.
773  __ CompareObjectType(object, scratch1, scratch2, FIRST_JS_RECEIVER_TYPE);
774  __ b(lt, slow_case);
775 
776  // Check that the key is a positive smi.
777  __ tst(key, Operand(0x80000001));
778  __ b(ne, slow_case);
779 
780  // Load the elements into scratch1 and check its map.
781  Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
782  __ ldr(scratch1, FieldMemOperand(object, JSObject::kElementsOffset));
783  __ CheckMap(scratch1, scratch2, arguments_map, slow_case, DONT_DO_SMI_CHECK);
784 
785  // Check if element is in the range of mapped arguments. If not, jump
786  // to the unmapped lookup with the parameter map in scratch1.
787  __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
788  __ sub(scratch2, scratch2, Operand(Smi::FromInt(2)));
789  __ cmp(key, Operand(scratch2));
790  __ b(cs, unmapped_case);
791 
792  // Load element index and check whether it is the hole.
793  const int kOffset =
794  FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag;
795 
796  __ mov(scratch3, Operand(kPointerSize >> 1));
797  __ mul(scratch3, key, scratch3);
798  __ add(scratch3, scratch3, Operand(kOffset));
799 
800  __ ldr(scratch2, MemOperand(scratch1, scratch3));
801  __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
802  __ cmp(scratch2, scratch3);
803  __ b(eq, unmapped_case);
804 
805  // Load value from context and return it. We can reuse scratch1 because
806  // we do not jump to the unmapped lookup (which requires the parameter
807  // map in scratch1).
808  __ ldr(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
809  __ mov(scratch3, Operand(kPointerSize >> 1));
810  __ mul(scratch3, scratch2, scratch3);
811  __ add(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag));
812  return MemOperand(scratch1, scratch3);
813 }
814 
815 
816 static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
817  Register key,
818  Register parameter_map,
819  Register scratch,
820  Label* slow_case) {
821  // Element is in arguments backing store, which is referenced by the
822  // second element of the parameter_map. The parameter_map register
823  // must be loaded with the parameter map of the arguments object and is
824  // overwritten.
825  const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
826  Register backing_store = parameter_map;
827  __ ldr(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset));
828  Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
829  __ CheckMap(backing_store, scratch, fixed_array_map, slow_case,
831  __ ldr(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset));
832  __ cmp(key, Operand(scratch));
833  __ b(cs, slow_case);
834  __ mov(scratch, Operand(kPointerSize >> 1));
835  __ mul(scratch, key, scratch);
836  __ add(scratch,
837  scratch,
838  Operand(FixedArray::kHeaderSize - kHeapObjectTag));
839  return MemOperand(backing_store, scratch);
840 }
841 
842 
843 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
844  // ---------- S t a t e --------------
845  // -- lr : return address
846  // -- r0 : key
847  // -- r1 : receiver
848  // -----------------------------------
849  Label slow, notin;
850  MemOperand mapped_location =
851  GenerateMappedArgumentsLookup(masm, r1, r0, r2, r3, r4, &notin, &slow);
852  __ ldr(r0, mapped_location);
853  __ Ret();
854  __ bind(&notin);
855  // The unmapped lookup expects that the parameter map is in r2.
856  MemOperand unmapped_location =
857  GenerateUnmappedArgumentsLookup(masm, r0, r2, r3, &slow);
858  __ ldr(r2, unmapped_location);
859  __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
860  __ cmp(r2, r3);
861  __ b(eq, &slow);
862  __ mov(r0, r2);
863  __ Ret();
864  __ bind(&slow);
865  GenerateMiss(masm, false);
866 }
867 
868 
869 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
870  // ---------- S t a t e --------------
871  // -- r0 : value
872  // -- r1 : key
873  // -- r2 : receiver
874  // -- lr : return address
875  // -----------------------------------
876  Label slow, notin;
877  MemOperand mapped_location =
878  GenerateMappedArgumentsLookup(masm, r2, r1, r3, r4, r5, &notin, &slow);
879  __ str(r0, mapped_location);
880  __ add(r6, r3, r5);
881  __ mov(r9, r0);
882  __ RecordWrite(r3, r6, r9, kLRHasNotBeenSaved, kDontSaveFPRegs);
883  __ Ret();
884  __ bind(&notin);
885  // The unmapped lookup expects that the parameter map is in r3.
886  MemOperand unmapped_location =
887  GenerateUnmappedArgumentsLookup(masm, r1, r3, r4, &slow);
888  __ str(r0, unmapped_location);
889  __ add(r6, r3, r4);
890  __ mov(r9, r0);
891  __ RecordWrite(r3, r6, r9, kLRHasNotBeenSaved, kDontSaveFPRegs);
892  __ Ret();
893  __ bind(&slow);
894  GenerateMiss(masm, false);
895 }
896 
897 
898 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
899  int argc) {
900  // ----------- S t a t e -------------
901  // -- r2 : name
902  // -- lr : return address
903  // -----------------------------------
904  Label slow, notin;
905  // Load receiver.
906  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
907  MemOperand mapped_location =
908  GenerateMappedArgumentsLookup(masm, r1, r2, r3, r4, r5, &notin, &slow);
909  __ ldr(r1, mapped_location);
910  GenerateFunctionTailCall(masm, argc, &slow, r3);
911  __ bind(&notin);
912  // The unmapped lookup expects that the parameter map is in r3.
913  MemOperand unmapped_location =
914  GenerateUnmappedArgumentsLookup(masm, r2, r3, r4, &slow);
915  __ ldr(r1, unmapped_location);
916  __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
917  __ cmp(r1, r3);
918  __ b(eq, &slow);
919  GenerateFunctionTailCall(masm, argc, &slow, r3);
920  __ bind(&slow);
921  GenerateMiss(masm, argc);
922 }
923 
924 
925 Object* KeyedLoadIC_Miss(Arguments args);
926 
927 
928 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
929  // ---------- S t a t e --------------
930  // -- lr : return address
931  // -- r0 : key
932  // -- r1 : receiver
933  // -----------------------------------
934  Isolate* isolate = masm->isolate();
935 
936  __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, r3, r4);
937 
938  __ Push(r1, r0);
939 
940  // Perform tail call to the entry.
941  ExternalReference ref = force_generic
942  ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), isolate)
943  : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
944 
945  __ TailCallExternalReference(ref, 2, 1);
946 }
947 
948 
949 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
950  // ---------- S t a t e --------------
951  // -- lr : return address
952  // -- r0 : key
953  // -- r1 : receiver
954  // -----------------------------------
955 
956  __ Push(r1, r0);
957 
958  __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
959 }
960 
961 
962 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
963  // ---------- S t a t e --------------
964  // -- lr : return address
965  // -- r0 : key
966  // -- r1 : receiver
967  // -----------------------------------
968  Label slow, check_string, index_smi, index_string, property_array_property;
969  Label probe_dictionary, check_number_dictionary;
970 
971  Register key = r0;
972  Register receiver = r1;
973 
974  Isolate* isolate = masm->isolate();
975 
976  // Check that the key is a smi.
977  __ JumpIfNotSmi(key, &check_string);
978  __ bind(&index_smi);
979  // Now the key is known to be a smi. This place is also jumped to from below
980  // where a numeric string is converted to a smi.
981 
982  GenerateKeyedLoadReceiverCheck(
983  masm, receiver, r2, r3, Map::kHasIndexedInterceptor, &slow);
984 
985  // Check the receiver's map to see if it has fast elements.
986  __ CheckFastElements(r2, r3, &check_number_dictionary);
987 
988  GenerateFastArrayLoad(
989  masm, receiver, key, r4, r3, r2, r0, NULL, &slow);
990  __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, r2, r3);
991  __ Ret();
992 
993  __ bind(&check_number_dictionary);
996 
997  // Check whether the elements is a number dictionary.
998  // r0: key
999  // r3: elements map
1000  // r4: elements
1001  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
1002  __ cmp(r3, ip);
1003  __ b(ne, &slow);
1004  __ mov(r2, Operand(r0, ASR, kSmiTagSize));
1005  __ LoadFromNumberDictionary(&slow, r4, r0, r0, r2, r3, r5);
1006  __ Ret();
1007 
1008  // Slow case, key and receiver still in r0 and r1.
1009  __ bind(&slow);
1010  __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(),
1011  1, r2, r3);
1013 
1014  __ bind(&check_string);
1015  GenerateKeyStringCheck(masm, key, r2, r3, &index_string, &slow);
1016 
1017  GenerateKeyedLoadReceiverCheck(
1018  masm, receiver, r2, r3, Map::kHasNamedInterceptor, &slow);
1019 
1020  // If the receiver is a fast-case object, check the keyed lookup
1021  // cache. Otherwise probe the dictionary.
1024  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
1025  __ cmp(r4, ip);
1026  __ b(eq, &probe_dictionary);
1027 
1028  // Load the map of the receiver, compute the keyed lookup cache hash
1029  // based on 32 bits of the map pointer and the string hash.
1031  __ mov(r3, Operand(r2, ASR, KeyedLookupCache::kMapHashShift));
1033  __ eor(r3, r3, Operand(r4, ASR, String::kHashShift));
1035  __ And(r3, r3, Operand(mask));
1036 
1037  // Load the key (consisting of map and symbol) from the cache and
1038  // check for match.
1039  Label load_in_object_property;
1040  static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
1041  Label hit_on_nth_entry[kEntriesPerBucket];
1042  ExternalReference cache_keys =
1043  ExternalReference::keyed_lookup_cache_keys(isolate);
1044 
1045  __ mov(r4, Operand(cache_keys));
1046  __ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1));
1047 
1048  for (int i = 0; i < kEntriesPerBucket - 1; i++) {
1049  Label try_next_entry;
1050  // Load map and move r4 to next entry.
1051  __ ldr(r5, MemOperand(r4, kPointerSize * 2, PostIndex));
1052  __ cmp(r2, r5);
1053  __ b(ne, &try_next_entry);
1054  __ ldr(r5, MemOperand(r4, -kPointerSize)); // Load symbol
1055  __ cmp(r0, r5);
1056  __ b(eq, &hit_on_nth_entry[i]);
1057  __ bind(&try_next_entry);
1058  }
1059 
1060  // Last entry: Load map and move r4 to symbol.
1061  __ ldr(r5, MemOperand(r4, kPointerSize, PostIndex));
1062  __ cmp(r2, r5);
1063  __ b(ne, &slow);
1064  __ ldr(r5, MemOperand(r4));
1065  __ cmp(r0, r5);
1066  __ b(ne, &slow);
1067 
1068  // Get field offset.
1069  // r0 : key
1070  // r1 : receiver
1071  // r2 : receiver's map
1072  // r3 : lookup cache index
1073  ExternalReference cache_field_offsets =
1074  ExternalReference::keyed_lookup_cache_field_offsets(isolate);
1075 
1076  // Hit on nth entry.
1077  for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
1078  __ bind(&hit_on_nth_entry[i]);
1079  __ mov(r4, Operand(cache_field_offsets));
1080  if (i != 0) {
1081  __ add(r3, r3, Operand(i));
1082  }
1083  __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2));
1085  __ sub(r5, r5, r6, SetCC);
1086  __ b(ge, &property_array_property);
1087  if (i != 0) {
1088  __ jmp(&load_in_object_property);
1089  }
1090  }
1091 
1092  // Load in-object property.
1093  __ bind(&load_in_object_property);
1095  __ add(r6, r6, r5); // Index from start of object.
1096  __ sub(r1, r1, Operand(kHeapObjectTag)); // Remove the heap tag.
1097  __ ldr(r0, MemOperand(r1, r6, LSL, kPointerSizeLog2));
1098  __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1099  1, r2, r3);
1100  __ Ret();
1101 
1102  // Load property array property.
1103  __ bind(&property_array_property);
1105  __ add(r1, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1106  __ ldr(r0, MemOperand(r1, r5, LSL, kPointerSizeLog2));
1107  __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1108  1, r2, r3);
1109  __ Ret();
1110 
1111  // Do a quick inline probe of the receiver's dictionary, if it
1112  // exists.
1113  __ bind(&probe_dictionary);
1114  // r1: receiver
1115  // r0: key
1116  // r3: elements
1119  GenerateGlobalInstanceTypeCheck(masm, r2, &slow);
1120  // Load the property to r0.
1121  GenerateDictionaryLoad(masm, &slow, r3, r0, r0, r2, r4);
1122  __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(),
1123  1, r2, r3);
1124  __ Ret();
1125 
1126  __ bind(&index_string);
1127  __ IndexFromHash(r3, key);
1128  // Now jump to the place where smi keys are handled.
1129  __ jmp(&index_smi);
1130 }
1131 
1132 
1133 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
1134  // ---------- S t a t e --------------
1135  // -- lr : return address
1136  // -- r0 : key (index)
1137  // -- r1 : receiver
1138  // -----------------------------------
1139  Label miss;
1140 
1141  Register receiver = r1;
1142  Register index = r0;
1143  Register scratch = r3;
1144  Register result = r0;
1145 
1146  StringCharAtGenerator char_at_generator(receiver,
1147  index,
1148  scratch,
1149  result,
1150  &miss, // When not a string.
1151  &miss, // When not a number.
1152  &miss, // When index out of range.
1154  char_at_generator.GenerateFast(masm);
1155  __ Ret();
1156 
1157  StubRuntimeCallHelper call_helper;
1158  char_at_generator.GenerateSlow(masm, call_helper);
1159 
1160  __ bind(&miss);
1161  GenerateMiss(masm, false);
1162 }
1163 
1164 
1165 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
1166  // ---------- S t a t e --------------
1167  // -- lr : return address
1168  // -- r0 : key
1169  // -- r1 : receiver
1170  // -----------------------------------
1171  Label slow;
1172 
1173  // Check that the receiver isn't a smi.
1174  __ JumpIfSmi(r1, &slow);
1175 
1176  // Check that the key is an array index, that is Uint32.
1177  __ tst(r0, Operand(kSmiTagMask | kSmiSignMask));
1178  __ b(ne, &slow);
1179 
1180  // Get the map of the receiver.
1182 
1183  // Check that it has indexed interceptor and access checks
1184  // are not enabled for this object.
1186  __ and_(r3, r3, Operand(kSlowCaseBitFieldMask));
1187  __ cmp(r3, Operand(1 << Map::kHasIndexedInterceptor));
1188  __ b(ne, &slow);
1189 
1190  // Everything is fine, call runtime.
1191  __ Push(r1, r0); // Receiver, key.
1192 
1193  // Perform tail call to the entry.
1194  __ TailCallExternalReference(
1195  ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
1196  masm->isolate()),
1197  2,
1198  1);
1199 
1200  __ bind(&slow);
1201  GenerateMiss(masm, false);
1202 }
1203 
1204 
1205 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1206  // ---------- S t a t e --------------
1207  // -- r0 : value
1208  // -- r1 : key
1209  // -- r2 : receiver
1210  // -- lr : return address
1211  // -----------------------------------
1212 
1213  // Push receiver, key and value for runtime call.
1214  __ Push(r2, r1, r0);
1215 
1216  ExternalReference ref = force_generic
1217  ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1218  masm->isolate())
1219  : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1220  __ TailCallExternalReference(ref, 3, 1);
1221 }
1222 
1223 
1224 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1225  // ---------- S t a t e --------------
1226  // -- r0 : value
1227  // -- r1 : key
1228  // -- r2 : receiver
1229  // -- lr : return address
1230  // -----------------------------------
1231 
1232  // Push receiver, key and value for runtime call.
1233  __ Push(r2, r1, r0);
1234 
1235  // The slow case calls into the runtime to complete the store without causing
1236  // an IC miss that would otherwise cause a transition to the generic stub.
1237  ExternalReference ref =
1238  ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1239  __ TailCallExternalReference(ref, 3, 1);
1240 }
1241 
1242 
1243 void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
1244  // ---------- S t a t e --------------
1245  // -- r2 : receiver
1246  // -- r3 : target map
1247  // -- lr : return address
1248  // -----------------------------------
1249  // Must return the modified receiver in r0.
1250  if (!FLAG_trace_elements_transitions) {
1251  Label fail;
1253  __ mov(r0, r2);
1254  __ Ret();
1255  __ bind(&fail);
1256  }
1257 
1258  __ push(r2);
1259  __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
1260 }
1261 
1262 
1264  MacroAssembler* masm) {
1265  // ---------- S t a t e --------------
1266  // -- r2 : receiver
1267  // -- r3 : target map
1268  // -- lr : return address
1269  // -----------------------------------
1270  // Must return the modified receiver in r0.
1271  if (!FLAG_trace_elements_transitions) {
1272  Label fail;
1274  __ mov(r0, r2);
1275  __ Ret();
1276  __ bind(&fail);
1277  }
1278 
1279  __ push(r2);
1280  __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
1281 }
1282 
1283 
1284 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1285  StrictModeFlag strict_mode) {
1286  // ---------- S t a t e --------------
1287  // -- r0 : value
1288  // -- r1 : key
1289  // -- r2 : receiver
1290  // -- lr : return address
1291  // -----------------------------------
1292 
1293  // Push receiver, key and value for runtime call.
1294  __ Push(r2, r1, r0);
1295 
1296  __ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1297  __ mov(r0, Operand(Smi::FromInt(strict_mode))); // Strict mode.
1298  __ Push(r1, r0);
1299 
1300  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1301 }
1302 
1303 
1304 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
1305  StrictModeFlag strict_mode) {
1306  // ---------- S t a t e --------------
1307  // -- r0 : value
1308  // -- r1 : key
1309  // -- r2 : receiver
1310  // -- lr : return address
1311  // -----------------------------------
1312  Label slow, array, extra, check_if_double_array;
1313  Label fast_object_with_map_check, fast_object_without_map_check;
1314  Label fast_double_with_map_check, fast_double_without_map_check;
1315  Label transition_smi_elements, finish_object_store, non_double_value;
1316  Label transition_double_elements;
1317 
1318  // Register usage.
1319  Register value = r0;
1320  Register key = r1;
1321  Register receiver = r2;
1322  Register receiver_map = r3;
1323  Register elements_map = r6;
1324  Register elements = r7; // Elements array of the receiver.
1325  // r4 and r5 are used as general scratch registers.
1326 
1327  // Check that the key is a smi.
1328  __ JumpIfNotSmi(key, &slow);
1329  // Check that the object isn't a smi.
1330  __ JumpIfSmi(receiver, &slow);
1331  // Get the map of the object.
1332  __ ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1333  // Check that the receiver does not require access checks. We need
1334  // to do this because this generic stub does not perform map checks.
1335  __ ldrb(ip, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
1336  __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
1337  __ b(ne, &slow);
1338  // Check if the object is a JS array or not.
1339  __ ldrb(r4, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));
1340  __ cmp(r4, Operand(JS_ARRAY_TYPE));
1341  __ b(eq, &array);
1342  // Check that the object is some kind of JSObject.
1343  __ cmp(r4, Operand(FIRST_JS_OBJECT_TYPE));
1344  __ b(lt, &slow);
1345 
1346  // Object case: Check key against length in the elements array.
1347  __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1348  // Check array bounds. Both the key and the length of FixedArray are smis.
1350  __ cmp(key, Operand(ip));
1351  __ b(lo, &fast_object_with_map_check);
1352 
1353  // Slow case, handle jump to runtime.
1354  __ bind(&slow);
1355  // Entry registers are intact.
1356  // r0: value.
1357  // r1: key.
1358  // r2: receiver.
1359  GenerateRuntimeSetProperty(masm, strict_mode);
1360 
1361  // Extra capacity case: Check if there is extra capacity to
1362  // perform the store and update the length. Used for adding one
1363  // element to the array by writing to array[array.length].
1364  __ bind(&extra);
1365  // Condition code from comparing key and array length is still available.
1366  __ b(ne, &slow); // Only support writing to writing to array[array.length].
1367  // Check for room in the elements backing store.
1368  // Both the key and the length of FixedArray are smis.
1370  __ cmp(key, Operand(ip));
1371  __ b(hs, &slow);
1372  __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1373  __ cmp(elements_map,
1374  Operand(masm->isolate()->factory()->fixed_array_map()));
1375  __ b(ne, &check_if_double_array);
1376  // Calculate key + 1 as smi.
1377  STATIC_ASSERT(kSmiTag == 0);
1378  __ add(r4, key, Operand(Smi::FromInt(1)));
1379  __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1380  __ b(&fast_object_without_map_check);
1381 
1382  __ bind(&check_if_double_array);
1383  __ cmp(elements_map,
1384  Operand(masm->isolate()->factory()->fixed_double_array_map()));
1385  __ b(ne, &slow);
1386  // Add 1 to key, and go to common element store code for doubles.
1387  STATIC_ASSERT(kSmiTag == 0);
1388  __ add(r4, key, Operand(Smi::FromInt(1)));
1389  __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1390  __ jmp(&fast_double_without_map_check);
1391 
1392  // Array case: Get the length and the elements array from the JS
1393  // array. Check that the array is in fast mode (and writable); if it
1394  // is the length is always a smi.
1395  __ bind(&array);
1396  __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1397 
1398  // Check the key against the length in the array.
1399  __ ldr(ip, FieldMemOperand(receiver, JSArray::kLengthOffset));
1400  __ cmp(key, Operand(ip));
1401  __ b(hs, &extra);
1402  // Fall through to fast case.
1403 
1404  __ bind(&fast_object_with_map_check);
1405  Register scratch_value = r4;
1406  Register address = r5;
1407  __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1408  __ cmp(elements_map,
1409  Operand(masm->isolate()->factory()->fixed_array_map()));
1410  __ b(ne, &fast_double_with_map_check);
1411  __ bind(&fast_object_without_map_check);
1412  // Smi stores don't require further checks.
1413  Label non_smi_value;
1414  __ JumpIfNotSmi(value, &non_smi_value);
1415  // It's irrelevant whether array is smi-only or not when writing a smi.
1416  __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1417  __ add(address, address, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
1418  __ str(value, MemOperand(address));
1419  __ Ret();
1420 
1421  __ bind(&non_smi_value);
1422  // Escape to elements kind transition case.
1423  __ CheckFastObjectElements(receiver_map, scratch_value,
1424  &transition_smi_elements);
1425  // Fast elements array, store the value to the elements backing store.
1426  __ bind(&finish_object_store);
1427  __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1428  __ add(address, address, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
1429  __ str(value, MemOperand(address));
1430  // Update write barrier for the elements array address.
1431  __ mov(scratch_value, value); // Preserve the value which is returned.
1432  __ RecordWrite(elements,
1433  address,
1434  scratch_value,
1438  OMIT_SMI_CHECK);
1439  __ Ret();
1440 
1441  __ bind(&fast_double_with_map_check);
1442  // Check for fast double array case. If this fails, call through to the
1443  // runtime.
1444  __ cmp(elements_map,
1445  Operand(masm->isolate()->factory()->fixed_double_array_map()));
1446  __ b(ne, &slow);
1447  __ bind(&fast_double_without_map_check);
1448  __ StoreNumberToDoubleElements(value,
1449  key,
1450  receiver,
1451  elements,
1452  r3,
1453  r4,
1454  r5,
1455  r6,
1456  &transition_double_elements);
1457  __ Ret();
1458 
1459  __ bind(&transition_smi_elements);
1460  // Transition the array appropriately depending on the value type.
1462  __ CompareRoot(r4, Heap::kHeapNumberMapRootIndex);
1463  __ b(ne, &non_double_value);
1464 
1465  // Value is a double. Transition FAST_SMI_ELEMENTS ->
1466  // FAST_DOUBLE_ELEMENTS and complete the store.
1467  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1469  receiver_map,
1470  r4,
1471  &slow);
1472  ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
1474  __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1475  __ jmp(&fast_double_without_map_check);
1476 
1477  __ bind(&non_double_value);
1478  // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
1479  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1480  FAST_ELEMENTS,
1481  receiver_map,
1482  r4,
1483  &slow);
1484  ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
1486  __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1487  __ jmp(&finish_object_store);
1488 
1489  __ bind(&transition_double_elements);
1490  // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
1491  // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
1492  // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
1493  __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
1494  FAST_ELEMENTS,
1495  receiver_map,
1496  r4,
1497  &slow);
1498  ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
1500  __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1501  __ jmp(&finish_object_store);
1502 }
1503 
1504 
1505 void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
1506  StrictModeFlag strict_mode) {
1507  // ----------- S t a t e -------------
1508  // -- r0 : value
1509  // -- r1 : receiver
1510  // -- r2 : name
1511  // -- lr : return address
1512  // -----------------------------------
1513 
1514  // Get the receiver from the stack and probe the stub cache.
1515  Code::Flags flags =
1517 
1518  Isolate::Current()->stub_cache()->GenerateProbe(
1519  masm, flags, r1, r2, r3, r4, r5, r6);
1520 
1521  // Cache miss: Jump to runtime.
1522  GenerateMiss(masm);
1523 }
1524 
1525 
1526 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1527  // ----------- S t a t e -------------
1528  // -- r0 : value
1529  // -- r1 : receiver
1530  // -- r2 : name
1531  // -- lr : return address
1532  // -----------------------------------
1533 
1534  __ Push(r1, r2, r0);
1535 
1536  // Perform tail call to the entry.
1537  ExternalReference ref =
1538  ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
1539  __ TailCallExternalReference(ref, 3, 1);
1540 }
1541 
1542 
1543 void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
1544  // ----------- S t a t e -------------
1545  // -- r0 : value
1546  // -- r1 : receiver
1547  // -- r2 : name
1548  // -- lr : return address
1549  // -----------------------------------
1550  //
1551  // This accepts as a receiver anything JSArray::SetElementsLength accepts
1552  // (currently anything except for external arrays which means anything with
1553  // elements of FixedArray type). Value must be a number, but only smis are
1554  // accepted as the most common case.
1555 
1556  Label miss;
1557 
1558  Register receiver = r1;
1559  Register value = r0;
1560  Register scratch = r3;
1561 
1562  // Check that the receiver isn't a smi.
1563  __ JumpIfSmi(receiver, &miss);
1564 
1565  // Check that the object is a JS array.
1566  __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
1567  __ b(ne, &miss);
1568 
1569  // Check that elements are FixedArray.
1570  // We rely on StoreIC_ArrayLength below to deal with all types of
1571  // fast elements (including COW).
1572  __ ldr(scratch, FieldMemOperand(receiver, JSArray::kElementsOffset));
1573  __ CompareObjectType(scratch, scratch, scratch, FIXED_ARRAY_TYPE);
1574  __ b(ne, &miss);
1575 
1576  // Check that the array has fast properties, otherwise the length
1577  // property might have been redefined.
1578  __ ldr(scratch, FieldMemOperand(receiver, JSArray::kPropertiesOffset));
1579  __ ldr(scratch, FieldMemOperand(scratch, FixedArray::kMapOffset));
1580  __ CompareRoot(scratch, Heap::kHashTableMapRootIndex);
1581  __ b(eq, &miss);
1582 
1583  // Check that value is a smi.
1584  __ JumpIfNotSmi(value, &miss);
1585 
1586  // Prepare tail call to StoreIC_ArrayLength.
1587  __ Push(receiver, value);
1588 
1589  ExternalReference ref =
1590  ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
1591  __ TailCallExternalReference(ref, 2, 1);
1592 
1593  __ bind(&miss);
1594 
1595  GenerateMiss(masm);
1596 }
1597 
1598 
1599 void StoreIC::GenerateNormal(MacroAssembler* masm) {
1600  // ----------- S t a t e -------------
1601  // -- r0 : value
1602  // -- r1 : receiver
1603  // -- r2 : name
1604  // -- lr : return address
1605  // -----------------------------------
1606  Label miss;
1607 
1608  GenerateStringDictionaryReceiverCheck(masm, r1, r3, r4, r5, &miss);
1609 
1610  GenerateDictionaryStore(masm, &miss, r3, r2, r0, r4, r5);
1611  Counters* counters = masm->isolate()->counters();
1612  __ IncrementCounter(counters->store_normal_hit(),
1613  1, r4, r5);
1614  __ Ret();
1615 
1616  __ bind(&miss);
1617  __ IncrementCounter(counters->store_normal_miss(), 1, r4, r5);
1618  GenerateMiss(masm);
1619 }
1620 
1621 
1622 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
1623  StrictModeFlag strict_mode) {
1624  // ----------- S t a t e -------------
1625  // -- r0 : value
1626  // -- r1 : receiver
1627  // -- r2 : name
1628  // -- lr : return address
1629  // -----------------------------------
1630 
1631  __ Push(r1, r2, r0);
1632 
1633  __ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1634  __ mov(r0, Operand(Smi::FromInt(strict_mode)));
1635  __ Push(r1, r0);
1636 
1637  // Do tail-call to runtime routine.
1638  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1639 }
1640 
1641 
1642 #undef __
1643 
1644 
1646  switch (op) {
1647  case Token::EQ_STRICT:
1648  case Token::EQ:
1649  return eq;
1650  case Token::LT:
1651  return lt;
1652  case Token::GT:
1653  return gt;
1654  case Token::LTE:
1655  return le;
1656  case Token::GTE:
1657  return ge;
1658  default:
1659  UNREACHABLE();
1660  return kNoCondition;
1661  }
1662 }
1663 
1664 
1665 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
1666  HandleScope scope;
1667  Handle<Code> rewritten;
1668  State previous_state = GetState();
1669  State state = TargetState(previous_state, false, x, y);
1670  if (state == GENERIC) {
1671  CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, r1, r0);
1672  rewritten = stub.GetCode();
1673  } else {
1674  ICCompareStub stub(op_, state);
1675  if (state == KNOWN_OBJECTS) {
1676  stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
1677  }
1678  rewritten = stub.GetCode();
1679  }
1680  set_target(*rewritten);
1681 
1682 #ifdef DEBUG
1683  if (FLAG_trace_ic) {
1684  PrintF("[CompareIC (%s->%s)#%s]\n",
1685  GetStateName(previous_state),
1686  GetStateName(state),
1687  Token::Name(op_));
1688  }
1689 #endif
1690 
1691  // Activate inlined smi code.
1692  if (previous_state == UNINITIALIZED) {
1694  }
1695 }
1696 
1697 
1699  Address cmp_instruction_address =
1701 
1702  // If the instruction following the call is not a cmp rx, #yyy, nothing
1703  // was inlined.
1704  Instr instr = Assembler::instr_at(cmp_instruction_address);
1705  if (!Assembler::IsCmpImmediate(instr)) {
1706  return;
1707  }
1708 
1709  // The delta to the start of the map check instruction and the
1710  // condition code uses at the patched jump.
1711  int delta = Assembler::GetCmpImmediateRawImmediate(instr);
1712  delta +=
1714  // If the delta is 0 the instruction is cmp r0, #0 which also signals that
1715  // nothing was inlined.
1716  if (delta == 0) {
1717  return;
1718  }
1719 
1720 #ifdef DEBUG
1721  if (FLAG_trace_ic) {
1722  PrintF("[ patching ic at %p, cmp=%p, delta=%d\n",
1723  address, cmp_instruction_address, delta);
1724  }
1725 #endif
1726 
1727  Address patch_address =
1728  cmp_instruction_address - delta * Instruction::kInstrSize;
1729  Instr instr_at_patch = Assembler::instr_at(patch_address);
1730  Instr branch_instr =
1731  Assembler::instr_at(patch_address + Instruction::kInstrSize);
1732  // This is patching a conditional "jump if not smi/jump if smi" site.
1733  // Enabling by changing from
1734  // cmp rx, rx
1735  // b eq/ne, <target>
1736  // to
1737  // tst rx, #kSmiTagMask
1738  // b ne/eq, <target>
1739  // and vice-versa to be disabled again.
1740  CodePatcher patcher(patch_address, 2);
1741  Register reg = Assembler::GetRn(instr_at_patch);
1742  if (check == ENABLE_INLINED_SMI_CHECK) {
1743  ASSERT(Assembler::IsCmpRegister(instr_at_patch));
1744  ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(),
1745  Assembler::GetRm(instr_at_patch).code());
1746  patcher.masm()->tst(reg, Operand(kSmiTagMask));
1747  } else {
1749  ASSERT(Assembler::IsTstImmediate(instr_at_patch));
1750  patcher.masm()->cmp(reg, reg);
1751  }
1752  ASSERT(Assembler::IsBranch(branch_instr));
1753  if (Assembler::GetCondition(branch_instr) == eq) {
1754  patcher.EmitCondition(ne);
1755  } else {
1756  ASSERT(Assembler::GetCondition(branch_instr) == ne);
1757  patcher.EmitCondition(eq);
1758  }
1759 }
1760 
1761 
1762 } } // namespace v8::internal
1763 
1764 #endif // V8_TARGET_ARCH_ARM
byte * Address
Definition: globals.h:172
static bool IsBranch(Instr instr)
static void GenerateTransitionElementsDoubleToObject(MacroAssembler *masm)
static const int kBitFieldOffset
Definition: objects.h:4994
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
const intptr_t kSmiTagMask
Definition: v8.h:3855
const intptr_t kSmiSignMask
Definition: v8globals.h:41
static void GenerateNormal(MacroAssembler *masm, int argc)
static bool IsCmpRegister(Instr instr)
const Register r3
static const int kMapHashShift
Definition: heap.h:2235
void PrintF(const char *format,...)
Definition: v8utils.cc:40
static void GenerateGeneric(MacroAssembler *masm, StrictModeFlag strict_mode)
static const char * Name(Value tok)
Definition: token.h:196
static Smi * FromInt(int value)
Definition: objects-inl.h:973
static void GenerateMiss(MacroAssembler *masm, int argc, IC::UtilityId id, Code::ExtraICState extra_state)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
static const int kGlobalReceiverOffset
Definition: objects.h:6085
const Register r6
static void GenerateGlobalProxy(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateMegamorphic(MacroAssembler *masm, StrictModeFlag strict_mode)
static int GetCmpImmediateRawImmediate(Instr instr)
static void GenerateStringLength(MacroAssembler *masm, bool support_wrappers)
Flag flags[]
Definition: flags.cc:1467
static const int kHasNamedInterceptor
Definition: objects.h:5003
static const int kIsAccessCheckNeeded
Definition: objects.h:5007
static Register GetRm(Instr instr)
Address address() const
Definition: ic-inl.h:41
static void GenerateMegamorphic(MacroAssembler *masm)
#define ASSERT(condition)
Definition: checks.h:270
static bool IsCmpImmediate(Instr instr)
const int kPointerSizeLog2
Definition: globals.h:246
static const int kInstanceSizeOffset
Definition: objects.h:4981
Isolate * isolate() const
Definition: ic.h:140
static const ExtraICState kNoExtraICState
Definition: objects.h:4199
const Register r2
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check)
static void GenerateArrayLength(MacroAssembler *masm)
static const int kHashFieldOffset
Definition: objects.h:7099
static Condition GetCondition(Instr instr)
static const int kHasIndexedInterceptor
Definition: objects.h:5004
void UpdateCaches(Handle< Object > x, Handle< Object > y)
const Register sp
#define UNREACHABLE()
Definition: checks.h:50
static Condition ComputeCondition(Token::Value op)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
const uint32_t kIsSymbolMask
Definition: objects.h:443
const Register ip
const Register r9
const int kPointerSize
Definition: globals.h:234
static void GenerateGeneric(MacroAssembler *masm)
const int kHeapObjectTag
Definition: v8.h:3848
static void GenerateMiss(MacroAssembler *masm)
#define __
static bool decode(uint32_t value)
Definition: utils.h:272
static const int kPropertiesOffset
Definition: objects.h:2113
static const int kInObjectPropertiesOffset
Definition: objects.h:4983
static void GenerateSmiToDouble(MacroAssembler *masm, Label *fail)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
const Register r0
static const int kElementsOffset
Definition: objects.h:2114
static const int kContainsCachedArrayIndexMask
Definition: objects.h:7154
static Register GetRn(Instr instr)
static const int kCallTargetAddressOffset
STATIC_ASSERT(kGrowICDelta==STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT-STORE_TRANSITION_SMI_TO_OBJECT)
static void GenerateMegamorphic(MacroAssembler *masm, int argc)
static bool IsTstImmediate(Instr instr)
static const int kLengthOffset
Definition: objects.h:8111
static const int kHeaderSize
Definition: objects.h:2233
static void GenerateSlow(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, int argc, Code::ExtraICState extra_state)
Definition: ic.h:278
static const int kMapOffset
Definition: objects.h:1219
static void GenerateRuntimeSetProperty(MacroAssembler *masm, StrictModeFlag strict_mode)
const Register r1
static void GenerateDoubleToObject(MacroAssembler *masm, Label *fail)
static const int kLengthOffset
Definition: objects.h:2232
static const int kSlowCaseBitFieldMask
Definition: ic.h:508
static void GenerateNormal(MacroAssembler *masm, int argc)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
InlinedSmiCheck
Definition: ic.h:841
MemOperand FieldMemOperand(Register object, int offset)
static void GenerateMonomorphicCacheProbe(MacroAssembler *masm, int argc, Code::Kind kind, Code::ExtraICState extra_state)
static void GenerateString(MacroAssembler *masm)
static Register GetCmpImmediateRegister(Instr instr)
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, PropertyType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:3312
const int kSmiTagSize
Definition: v8.h:3854
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMegamorphic(MacroAssembler *masm, int argc, Code::ExtraICState extra_ic_state)
const int kSmiTag
Definition: v8.h:3853
static void GenerateNormal(MacroAssembler *masm)
static void GenerateFunctionPrototype(MacroAssembler *masm)
static void GenerateIndexedInterceptor(MacroAssembler *masm)
void set_target(Code *code)
Definition: ic.h:149
const uint32_t kSymbolTag
Definition: objects.h:445
static const int kCapacityMask
Definition: heap.h:2234
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)
static void GenerateMiss(MacroAssembler *masm)
static void GenerateTransitionElementsSmiToDouble(MacroAssembler *masm)
static const int kHashShift
Definition: objects.h:7121
static void GenerateNonStrictArguments(MacroAssembler *masm, int argc)
static void GenerateArrayLength(MacroAssembler *masm)
static const char * GetStateName(State state)
Definition: ic.cc:2564
void check(i::Vector< const char > string)
static const int kHashMask
Definition: heap.h:2236
static void GenerateMiss(MacroAssembler *masm, int argc)
Definition: ic.h:311
FlagType type() const
Definition: flags.cc:1358
const Register r5
static const int kInstanceTypeOffset
Definition: objects.h:4992
static const int kEntriesPerBucket
Definition: heap.h:2237
static void GenerateNormal(MacroAssembler *masm)
const Register r4
static void GenerateMiss(MacroAssembler *masm, bool force_generic)
const Register r7