v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
ic-arm.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_ARM)
31 
32 #include "assembler-arm.h"
33 #include "code-stubs.h"
34 #include "codegen.h"
35 #include "disasm.h"
36 #include "ic-inl.h"
37 #include "runtime.h"
38 #include "stub-cache.h"
39 
40 namespace v8 {
41 namespace internal {
42 
43 
44 // ----------------------------------------------------------------------------
45 // Static IC stub generators.
46 //
47 
48 #define __ ACCESS_MASM(masm)
49 
50 
51 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
52  Register type,
53  Label* global_object) {
54  // Register usage:
55  // type: holds the receiver instance type on entry.
56  __ cmp(type, Operand(JS_GLOBAL_OBJECT_TYPE));
57  __ b(eq, global_object);
58  __ cmp(type, Operand(JS_BUILTINS_OBJECT_TYPE));
59  __ b(eq, global_object);
60  __ cmp(type, Operand(JS_GLOBAL_PROXY_TYPE));
61  __ b(eq, global_object);
62 }
63 
64 
65 // Generated code falls through if the receiver is a regular non-global
66 // JS object with slow properties and no interceptors.
67 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
68  Register receiver,
69  Register elements,
70  Register t0,
71  Register t1,
72  Label* miss) {
73  // Register usage:
74  // receiver: holds the receiver on entry and is unchanged.
75  // elements: holds the property dictionary on fall through.
76  // Scratch registers:
77  // t0: used to holds the receiver map.
78  // t1: used to holds the receiver instance type, receiver bit mask and
79  // elements map.
80 
81  // Check that the receiver isn't a smi.
82  __ JumpIfSmi(receiver, miss);
83 
84  // Check that the receiver is a valid JS object.
85  __ CompareObjectType(receiver, t0, t1, FIRST_SPEC_OBJECT_TYPE);
86  __ b(lt, miss);
87 
88  // If this assert fails, we have to check upper bound too.
90 
91  GenerateGlobalInstanceTypeCheck(masm, t1, miss);
92 
93  // Check that the global object does not require access checks.
95  __ tst(t1, Operand((1 << Map::kIsAccessCheckNeeded) |
97  __ b(ne, miss);
98 
99  __ ldr(elements, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
100  __ ldr(t1, FieldMemOperand(elements, HeapObject::kMapOffset));
101  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
102  __ cmp(t1, ip);
103  __ b(ne, miss);
104 }
105 
106 
107 // Helper function used from LoadIC/CallIC GenerateNormal.
108 //
109 // elements: Property dictionary. It is not clobbered if a jump to the miss
110 // label is done.
111 // name: Property name. It is not clobbered if a jump to the miss label is
112 // done
113 // result: Register for the result. It is only updated if a jump to the miss
114 // label is not done. Can be the same as elements or name clobbering
115 // one of these in the case of not jumping to the miss label.
116 // The two scratch registers need to be different from elements, name and
117 // result.
118 // The generated code assumes that the receiver has slow properties,
119 // is not a global object and does not have interceptors.
120 static void GenerateDictionaryLoad(MacroAssembler* masm,
121  Label* miss,
122  Register elements,
123  Register name,
124  Register result,
125  Register scratch1,
126  Register scratch2) {
127  // Main use of the scratch registers.
128  // scratch1: Used as temporary and to hold the capacity of the property
129  // dictionary.
130  // scratch2: Used as temporary.
131  Label done;
132 
133  // Probe the dictionary.
135  miss,
136  &done,
137  elements,
138  name,
139  scratch1,
140  scratch2);
141 
142  // If probing finds an entry check that the value is a normal
143  // property.
144  __ bind(&done); // scratch2 == elements + 4 * index
145  const int kElementsStartOffset = StringDictionary::kHeaderSize +
147  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
148  __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
149  __ tst(scratch1, Operand(PropertyDetails::TypeField::kMask << kSmiTagSize));
150  __ b(ne, miss);
151 
152  // Get the value at the masked, scaled index and return.
153  __ ldr(result,
154  FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize));
155 }
156 
157 
158 // Helper function used from StoreIC::GenerateNormal.
159 //
160 // elements: Property dictionary. It is not clobbered if a jump to the miss
161 // label is done.
162 // name: Property name. It is not clobbered if a jump to the miss label is
163 // done
164 // value: The value to store.
165 // The two scratch registers need to be different from elements, name and
166 // result.
167 // The generated code assumes that the receiver has slow properties,
168 // is not a global object and does not have interceptors.
169 static void GenerateDictionaryStore(MacroAssembler* masm,
170  Label* miss,
171  Register elements,
172  Register name,
173  Register value,
174  Register scratch1,
175  Register scratch2) {
176  // Main use of the scratch registers.
177  // scratch1: Used as temporary and to hold the capacity of the property
178  // dictionary.
179  // scratch2: Used as temporary.
180  Label done;
181 
182  // Probe the dictionary.
184  miss,
185  &done,
186  elements,
187  name,
188  scratch1,
189  scratch2);
190 
191  // If probing finds an entry in the dictionary check that the value
192  // is a normal property that is not read only.
193  __ bind(&done); // scratch2 == elements + 4 * index
194  const int kElementsStartOffset = StringDictionary::kHeaderSize +
196  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
197  const int kTypeAndReadOnlyMask =
198  (PropertyDetails::TypeField::kMask |
199  PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
200  __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
201  __ tst(scratch1, Operand(kTypeAndReadOnlyMask));
202  __ b(ne, miss);
203 
204  // Store the value at the masked, scaled index and return.
205  const int kValueOffset = kElementsStartOffset + kPointerSize;
206  __ add(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag));
207  __ str(value, MemOperand(scratch2));
208 
209  // Update the write barrier. Make sure not to clobber the value.
210  __ mov(scratch1, value);
211  __ RecordWrite(
212  elements, scratch2, scratch1, kLRHasNotBeenSaved, kDontSaveFPRegs);
213 }
214 
215 
216 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
217  // ----------- S t a t e -------------
218  // -- r2 : name
219  // -- lr : return address
220  // -- r0 : receiver
221  // -- sp[0] : receiver
222  // -----------------------------------
223  Label miss;
224 
225  StubCompiler::GenerateLoadArrayLength(masm, r0, r3, &miss);
226  __ bind(&miss);
227  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
228 }
229 
230 
231 void LoadIC::GenerateStringLength(MacroAssembler* masm, bool support_wrappers) {
232  // ----------- S t a t e -------------
233  // -- r2 : name
234  // -- lr : return address
235  // -- r0 : receiver
236  // -- sp[0] : receiver
237  // -----------------------------------
238  Label miss;
239 
240  StubCompiler::GenerateLoadStringLength(masm, r0, r1, r3, &miss,
241  support_wrappers);
242  // Cache miss: Jump to runtime.
243  __ bind(&miss);
244  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
245 }
246 
247 
248 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
249  // ----------- S t a t e -------------
250  // -- r2 : name
251  // -- lr : return address
252  // -- r0 : receiver
253  // -- sp[0] : receiver
254  // -----------------------------------
255  Label miss;
256 
257  StubCompiler::GenerateLoadFunctionPrototype(masm, r0, r1, r3, &miss);
258  __ bind(&miss);
259  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
260 }
261 
262 
263 // Checks the receiver for special cases (value type, slow case bits).
264 // Falls through for regular JS object.
265 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
266  Register receiver,
267  Register map,
268  Register scratch,
269  int interceptor_bit,
270  Label* slow) {
271  // Check that the object isn't a smi.
272  __ JumpIfSmi(receiver, slow);
273  // Get the map of the receiver.
274  __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
275  // Check bit field.
276  __ ldrb(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
277  __ tst(scratch,
278  Operand((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)));
279  __ b(ne, slow);
280  // Check that the object is some kind of JS object EXCEPT JS Value type.
281  // In the case that the object is a value-wrapper object,
282  // we enter the runtime system to make sure that indexing into string
283  // objects work as intended.
285  __ ldrb(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
286  __ cmp(scratch, Operand(JS_OBJECT_TYPE));
287  __ b(lt, slow);
288 }
289 
290 
291 // Loads an indexed element from a fast case array.
292 // If not_fast_array is NULL, doesn't perform the elements map check.
293 static void GenerateFastArrayLoad(MacroAssembler* masm,
294  Register receiver,
295  Register key,
296  Register elements,
297  Register scratch1,
298  Register scratch2,
299  Register result,
300  Label* not_fast_array,
301  Label* out_of_range) {
302  // Register use:
303  //
304  // receiver - holds the receiver on entry.
305  // Unchanged unless 'result' is the same register.
306  //
307  // key - holds the smi key on entry.
308  // Unchanged unless 'result' is the same register.
309  //
310  // elements - holds the elements of the receiver on exit.
311  //
312  // result - holds the result on exit if the load succeeded.
313  // Allowed to be the the same as 'receiver' or 'key'.
314  // Unchanged on bailout so 'receiver' and 'key' can be safely
315  // used by further computation.
316  //
317  // Scratch registers:
318  //
319  // scratch1 - used to hold elements map and elements length.
320  // Holds the elements map if not_fast_array branch is taken.
321  //
322  // scratch2 - used to hold the loaded value.
323 
324  __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
325  if (not_fast_array != NULL) {
326  // Check that the object is in fast mode and writable.
327  __ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
328  __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
329  __ cmp(scratch1, ip);
330  __ b(ne, not_fast_array);
331  } else {
332  __ AssertFastElements(elements);
333  }
334  // Check that the key (index) is within bounds.
335  __ ldr(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset));
336  __ cmp(key, Operand(scratch1));
337  __ b(hs, out_of_range);
338  // Fast case: Do the load.
339  __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
340  // The key is a smi.
342  __ ldr(scratch2,
343  MemOperand(scratch1, key, LSL, kPointerSizeLog2 - kSmiTagSize));
344  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
345  __ cmp(scratch2, ip);
346  // In case the loaded value is the_hole we have to consult GetProperty
347  // to ensure the prototype chain is searched.
348  __ b(eq, out_of_range);
349  __ mov(result, scratch2);
350 }
351 
352 
353 // Checks whether a key is an array index string or a symbol string.
354 // Falls through if a key is a symbol.
355 static void GenerateKeyStringCheck(MacroAssembler* masm,
356  Register key,
357  Register map,
358  Register hash,
359  Label* index_string,
360  Label* not_symbol) {
361  // The key is not a smi.
362  // Is it a string?
363  __ CompareObjectType(key, map, hash, FIRST_NONSTRING_TYPE);
364  __ b(ge, not_symbol);
365 
366  // Is the string an array index, with cached numeric value?
368  __ tst(hash, Operand(String::kContainsCachedArrayIndexMask));
369  __ b(eq, index_string);
370 
371  // Is the string a symbol?
372  // map: key map
373  __ ldrb(hash, FieldMemOperand(map, Map::kInstanceTypeOffset));
375  __ tst(hash, Operand(kIsSymbolMask));
376  __ b(eq, not_symbol);
377 }
378 
379 
380 // Defined in ic.cc.
381 Object* CallIC_Miss(Arguments args);
382 
383 // The generated code does not accept smi keys.
384 // The generated code falls through if both probes miss.
385 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
386  int argc,
387  Code::Kind kind,
388  Code::ExtraICState extra_state) {
389  // ----------- S t a t e -------------
390  // -- r1 : receiver
391  // -- r2 : name
392  // -----------------------------------
393  Label number, non_number, non_string, boolean, probe, miss;
394 
395  // Probe the stub cache.
397  MONOMORPHIC,
398  extra_state,
399  Code::NORMAL,
400  argc);
401  Isolate::Current()->stub_cache()->GenerateProbe(
402  masm, flags, r1, r2, r3, r4, r5, r6);
403 
404  // If the stub cache probing failed, the receiver might be a value.
405  // For value objects, we use the map of the prototype objects for
406  // the corresponding JSValue for the cache and that is what we need
407  // to probe.
408  //
409  // Check for number.
410  __ JumpIfSmi(r1, &number);
411  __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE);
412  __ b(ne, &non_number);
413  __ bind(&number);
414  StubCompiler::GenerateLoadGlobalFunctionPrototype(
416  __ b(&probe);
417 
418  // Check for string.
419  __ bind(&non_number);
420  __ cmp(r3, Operand(FIRST_NONSTRING_TYPE));
421  __ b(hs, &non_string);
422  StubCompiler::GenerateLoadGlobalFunctionPrototype(
424  __ b(&probe);
425 
426  // Check for boolean.
427  __ bind(&non_string);
428  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
429  __ cmp(r1, ip);
430  __ b(eq, &boolean);
431  __ LoadRoot(ip, Heap::kFalseValueRootIndex);
432  __ cmp(r1, ip);
433  __ b(ne, &miss);
434  __ bind(&boolean);
435  StubCompiler::GenerateLoadGlobalFunctionPrototype(
437 
438  // Probe the stub cache for the value object.
439  __ bind(&probe);
440  Isolate::Current()->stub_cache()->GenerateProbe(
441  masm, flags, r1, r2, r3, r4, r5, r6);
442 
443  __ bind(&miss);
444 }
445 
446 
447 static void GenerateFunctionTailCall(MacroAssembler* masm,
448  int argc,
449  Label* miss,
450  Register scratch) {
451  // r1: function
452 
453  // Check that the value isn't a smi.
454  __ JumpIfSmi(r1, miss);
455 
456  // Check that the value is a JSFunction.
457  __ CompareObjectType(r1, scratch, scratch, JS_FUNCTION_TYPE);
458  __ b(ne, miss);
459 
460  // Invoke the function.
461  ParameterCount actual(argc);
462  __ InvokeFunction(r1, actual, JUMP_FUNCTION,
463  NullCallWrapper(), CALL_AS_METHOD);
464 }
465 
466 
467 void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
468  // ----------- S t a t e -------------
469  // -- r2 : name
470  // -- lr : return address
471  // -----------------------------------
472  Label miss;
473 
474  // Get the receiver of the function from the stack into r1.
475  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
476 
477  GenerateStringDictionaryReceiverCheck(masm, r1, r0, r3, r4, &miss);
478 
479  // r0: elements
480  // Search the dictionary - put result in register r1.
481  GenerateDictionaryLoad(masm, &miss, r0, r2, r1, r3, r4);
482 
483  GenerateFunctionTailCall(masm, argc, &miss, r4);
484 
485  __ bind(&miss);
486 }
487 
488 
489 void CallICBase::GenerateMiss(MacroAssembler* masm,
490  int argc,
491  IC::UtilityId id,
492  Code::ExtraICState extra_state) {
493  // ----------- S t a t e -------------
494  // -- r2 : name
495  // -- lr : return address
496  // -----------------------------------
497  Isolate* isolate = masm->isolate();
498 
499  if (id == IC::kCallIC_Miss) {
500  __ IncrementCounter(isolate->counters()->call_miss(), 1, r3, r4);
501  } else {
502  __ IncrementCounter(isolate->counters()->keyed_call_miss(), 1, r3, r4);
503  }
504 
505  // Get the receiver of the function from the stack.
506  __ ldr(r3, MemOperand(sp, argc * kPointerSize));
507 
508  {
509  FrameScope scope(masm, StackFrame::INTERNAL);
510 
511  // Push the receiver and the name of the function.
512  __ Push(r3, r2);
513 
514  // Call the entry.
515  __ mov(r0, Operand(2));
516  __ mov(r1, Operand(ExternalReference(IC_Utility(id), isolate)));
517 
518  CEntryStub stub(1);
519  __ CallStub(&stub);
520 
521  // Move result to r1 and leave the internal frame.
522  __ mov(r1, Operand(r0));
523  }
524 
525  // Check if the receiver is a global object of some sort.
526  // This can happen only for regular CallIC but not KeyedCallIC.
527  if (id == IC::kCallIC_Miss) {
528  Label invoke, global;
529  __ ldr(r2, MemOperand(sp, argc * kPointerSize)); // receiver
530  __ JumpIfSmi(r2, &invoke);
531  __ CompareObjectType(r2, r3, r3, JS_GLOBAL_OBJECT_TYPE);
532  __ b(eq, &global);
533  __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE));
534  __ b(ne, &invoke);
535 
536  // Patch the receiver on the stack.
537  __ bind(&global);
539  __ str(r2, MemOperand(sp, argc * kPointerSize));
540  __ bind(&invoke);
541  }
542 
543  // Invoke the function.
544  CallKind call_kind = CallICBase::Contextual::decode(extra_state)
546  : CALL_AS_METHOD;
547  ParameterCount actual(argc);
548  __ InvokeFunction(r1,
549  actual,
551  NullCallWrapper(),
552  call_kind);
553 }
554 
555 
556 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
557  int argc,
558  Code::ExtraICState extra_ic_state) {
559  // ----------- S t a t e -------------
560  // -- r2 : name
561  // -- lr : return address
562  // -----------------------------------
563 
564  // Get the receiver of the function from the stack into r1.
565  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
566  GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
567  GenerateMiss(masm, argc, extra_ic_state);
568 }
569 
570 
571 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
572  // ----------- S t a t e -------------
573  // -- r2 : name
574  // -- lr : return address
575  // -----------------------------------
576 
577  // Get the receiver of the function from the stack into r1.
578  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
579 
580  Label do_call, slow_call, slow_load, slow_reload_receiver;
581  Label check_number_dictionary, check_string, lookup_monomorphic_cache;
582  Label index_smi, index_string;
583 
584  // Check that the key is a smi.
585  __ JumpIfNotSmi(r2, &check_string);
586  __ bind(&index_smi);
587  // Now the key is known to be a smi. This place is also jumped to from below
588  // where a numeric string is converted to a smi.
589 
590  GenerateKeyedLoadReceiverCheck(
591  masm, r1, r0, r3, Map::kHasIndexedInterceptor, &slow_call);
592 
593  GenerateFastArrayLoad(
594  masm, r1, r2, r4, r3, r0, r1, &check_number_dictionary, &slow_load);
595  Counters* counters = masm->isolate()->counters();
596  __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1, r0, r3);
597 
598  __ bind(&do_call);
599  // receiver in r1 is not used after this point.
600  // r2: key
601  // r1: function
602  GenerateFunctionTailCall(masm, argc, &slow_call, r0);
603 
604  __ bind(&check_number_dictionary);
605  // r2: key
606  // r3: elements map
607  // r4: elements
608  // Check whether the elements is a number dictionary.
609  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
610  __ cmp(r3, ip);
611  __ b(ne, &slow_load);
612  __ mov(r0, Operand(r2, ASR, kSmiTagSize));
613  // r0: untagged index
614  __ LoadFromNumberDictionary(&slow_load, r4, r2, r1, r0, r3, r5);
615  __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, r0, r3);
616  __ jmp(&do_call);
617 
618  __ bind(&slow_load);
619  // This branch is taken when calling KeyedCallIC_Miss is neither required
620  // nor beneficial.
621  __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, r0, r3);
622  {
623  FrameScope scope(masm, StackFrame::INTERNAL);
624  __ push(r2); // save the key
625  __ Push(r1, r2); // pass the receiver and the key
626  __ CallRuntime(Runtime::kKeyedGetProperty, 2);
627  __ pop(r2); // restore the key
628  }
629  __ mov(r1, r0);
630  __ jmp(&do_call);
631 
632  __ bind(&check_string);
633  GenerateKeyStringCheck(masm, r2, r0, r3, &index_string, &slow_call);
634 
635  // The key is known to be a symbol.
636  // If the receiver is a regular JS object with slow properties then do
637  // a quick inline probe of the receiver's dictionary.
638  // Otherwise do the monomorphic cache probe.
639  GenerateKeyedLoadReceiverCheck(
640  masm, r1, r0, r3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
641 
644  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
645  __ cmp(r3, ip);
646  __ b(ne, &lookup_monomorphic_cache);
647 
648  GenerateDictionaryLoad(masm, &slow_load, r0, r2, r1, r3, r4);
649  __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, r0, r3);
650  __ jmp(&do_call);
651 
652  __ bind(&lookup_monomorphic_cache);
653  __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, r0, r3);
655  argc,
656  Code::KEYED_CALL_IC,
658  // Fall through on miss.
659 
660  __ bind(&slow_call);
661  // This branch is taken if:
662  // - the receiver requires boxing or access check,
663  // - the key is neither smi nor symbol,
664  // - the value loaded is not a function,
665  // - there is hope that the runtime will create a monomorphic call stub
666  // that will get fetched next time.
667  __ IncrementCounter(counters->keyed_call_generic_slow(), 1, r0, r3);
668  GenerateMiss(masm, argc);
669 
670  __ bind(&index_string);
671  __ IndexFromHash(r3, r2);
672  // Now jump to the place where smi keys are handled.
673  __ jmp(&index_smi);
674 }
675 
676 
677 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
678  // ----------- S t a t e -------------
679  // -- r2 : name
680  // -- lr : return address
681  // -----------------------------------
682 
683  // Check if the name is a string.
684  Label miss;
685  __ JumpIfSmi(r2, &miss);
686  __ IsObjectJSStringType(r2, r0, &miss);
687 
688  CallICBase::GenerateNormal(masm, argc);
689  __ bind(&miss);
690  GenerateMiss(masm, argc);
691 }
692 
693 
694 // Defined in ic.cc.
695 Object* LoadIC_Miss(Arguments args);
696 
697 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
698  // ----------- S t a t e -------------
699  // -- r2 : name
700  // -- lr : return address
701  // -- r0 : receiver
702  // -- sp[0] : receiver
703  // -----------------------------------
704 
705  // Probe the stub cache.
706  Code::Flags flags =
707  Code::ComputeFlags(Code::LOAD_IC, MONOMORPHIC);
708  Isolate::Current()->stub_cache()->GenerateProbe(
709  masm, flags, r0, r2, r3, r4, r5, r6);
710 
711  // Cache miss: Jump to runtime.
712  GenerateMiss(masm);
713 }
714 
715 
716 void LoadIC::GenerateNormal(MacroAssembler* masm) {
717  // ----------- S t a t e -------------
718  // -- r2 : name
719  // -- lr : return address
720  // -- r0 : receiver
721  // -- sp[0] : receiver
722  // -----------------------------------
723  Label miss;
724 
725  GenerateStringDictionaryReceiverCheck(masm, r0, r1, r3, r4, &miss);
726 
727  // r1: elements
728  GenerateDictionaryLoad(masm, &miss, r1, r2, r0, r3, r4);
729  __ Ret();
730 
731  // Cache miss: Jump to runtime.
732  __ bind(&miss);
733  GenerateMiss(masm);
734 }
735 
736 
737 void LoadIC::GenerateMiss(MacroAssembler* masm) {
738  // ----------- S t a t e -------------
739  // -- r2 : name
740  // -- lr : return address
741  // -- r0 : receiver
742  // -- sp[0] : receiver
743  // -----------------------------------
744  Isolate* isolate = masm->isolate();
745 
746  __ IncrementCounter(isolate->counters()->load_miss(), 1, r3, r4);
747 
748  __ mov(r3, r0);
749  __ Push(r3, r2);
750 
751  // Perform tail call to the entry.
752  ExternalReference ref =
753  ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
754  __ TailCallExternalReference(ref, 2, 1);
755 }
756 
757 
758 static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
759  Register object,
760  Register key,
761  Register scratch1,
762  Register scratch2,
763  Register scratch3,
764  Label* unmapped_case,
765  Label* slow_case) {
766  Heap* heap = masm->isolate()->heap();
767 
768  // Check that the receiver is a JSObject. Because of the map check
769  // later, we do not need to check for interceptors or whether it
770  // requires access checks.
771  __ JumpIfSmi(object, slow_case);
772  // Check that the object is some kind of JSObject.
773  __ CompareObjectType(object, scratch1, scratch2, FIRST_JS_RECEIVER_TYPE);
774  __ b(lt, slow_case);
775 
776  // Check that the key is a positive smi.
777  __ tst(key, Operand(0x80000001));
778  __ b(ne, slow_case);
779 
780  // Load the elements into scratch1 and check its map.
781  Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
782  __ ldr(scratch1, FieldMemOperand(object, JSObject::kElementsOffset));
783  __ CheckMap(scratch1, scratch2, arguments_map, slow_case, DONT_DO_SMI_CHECK);
784 
785  // Check if element is in the range of mapped arguments. If not, jump
786  // to the unmapped lookup with the parameter map in scratch1.
787  __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
788  __ sub(scratch2, scratch2, Operand(Smi::FromInt(2)));
789  __ cmp(key, Operand(scratch2));
790  __ b(cs, unmapped_case);
791 
792  // Load element index and check whether it is the hole.
793  const int kOffset =
794  FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag;
795 
796  __ mov(scratch3, Operand(kPointerSize >> 1));
797  __ mul(scratch3, key, scratch3);
798  __ add(scratch3, scratch3, Operand(kOffset));
799 
800  __ ldr(scratch2, MemOperand(scratch1, scratch3));
801  __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
802  __ cmp(scratch2, scratch3);
803  __ b(eq, unmapped_case);
804 
805  // Load value from context and return it. We can reuse scratch1 because
806  // we do not jump to the unmapped lookup (which requires the parameter
807  // map in scratch1).
808  __ ldr(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
809  __ mov(scratch3, Operand(kPointerSize >> 1));
810  __ mul(scratch3, scratch2, scratch3);
811  __ add(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag));
812  return MemOperand(scratch1, scratch3);
813 }
814 
815 
816 static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
817  Register key,
818  Register parameter_map,
819  Register scratch,
820  Label* slow_case) {
821  // Element is in arguments backing store, which is referenced by the
822  // second element of the parameter_map. The parameter_map register
823  // must be loaded with the parameter map of the arguments object and is
824  // overwritten.
825  const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
826  Register backing_store = parameter_map;
827  __ ldr(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset));
828  Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
829  __ CheckMap(backing_store, scratch, fixed_array_map, slow_case,
831  __ ldr(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset));
832  __ cmp(key, Operand(scratch));
833  __ b(cs, slow_case);
834  __ mov(scratch, Operand(kPointerSize >> 1));
835  __ mul(scratch, key, scratch);
836  __ add(scratch,
837  scratch,
838  Operand(FixedArray::kHeaderSize - kHeapObjectTag));
839  return MemOperand(backing_store, scratch);
840 }
841 
842 
843 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
844  // ---------- S t a t e --------------
845  // -- lr : return address
846  // -- r0 : key
847  // -- r1 : receiver
848  // -----------------------------------
849  Label slow, notin;
850  MemOperand mapped_location =
851  GenerateMappedArgumentsLookup(masm, r1, r0, r2, r3, r4, &notin, &slow);
852  __ ldr(r0, mapped_location);
853  __ Ret();
854  __ bind(&notin);
855  // The unmapped lookup expects that the parameter map is in r2.
856  MemOperand unmapped_location =
857  GenerateUnmappedArgumentsLookup(masm, r0, r2, r3, &slow);
858  __ ldr(r2, unmapped_location);
859  __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
860  __ cmp(r2, r3);
861  __ b(eq, &slow);
862  __ mov(r0, r2);
863  __ Ret();
864  __ bind(&slow);
865  GenerateMiss(masm, false);
866 }
867 
868 
869 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
870  // ---------- S t a t e --------------
871  // -- r0 : value
872  // -- r1 : key
873  // -- r2 : receiver
874  // -- lr : return address
875  // -----------------------------------
876  Label slow, notin;
877  MemOperand mapped_location =
878  GenerateMappedArgumentsLookup(masm, r2, r1, r3, r4, r5, &notin, &slow);
879  __ str(r0, mapped_location);
880  __ add(r6, r3, r5);
881  __ mov(r9, r0);
882  __ RecordWrite(r3, r6, r9, kLRHasNotBeenSaved, kDontSaveFPRegs);
883  __ Ret();
884  __ bind(&notin);
885  // The unmapped lookup expects that the parameter map is in r3.
886  MemOperand unmapped_location =
887  GenerateUnmappedArgumentsLookup(masm, r1, r3, r4, &slow);
888  __ str(r0, unmapped_location);
889  __ add(r6, r3, r4);
890  __ mov(r9, r0);
891  __ RecordWrite(r3, r6, r9, kLRHasNotBeenSaved, kDontSaveFPRegs);
892  __ Ret();
893  __ bind(&slow);
894  GenerateMiss(masm, false);
895 }
896 
897 
898 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
899  int argc) {
900  // ----------- S t a t e -------------
901  // -- r2 : name
902  // -- lr : return address
903  // -----------------------------------
904  Label slow, notin;
905  // Load receiver.
906  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
907  MemOperand mapped_location =
908  GenerateMappedArgumentsLookup(masm, r1, r2, r3, r4, r5, &notin, &slow);
909  __ ldr(r1, mapped_location);
910  GenerateFunctionTailCall(masm, argc, &slow, r3);
911  __ bind(&notin);
912  // The unmapped lookup expects that the parameter map is in r3.
913  MemOperand unmapped_location =
914  GenerateUnmappedArgumentsLookup(masm, r2, r3, r4, &slow);
915  __ ldr(r1, unmapped_location);
916  __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
917  __ cmp(r1, r3);
918  __ b(eq, &slow);
919  GenerateFunctionTailCall(masm, argc, &slow, r3);
920  __ bind(&slow);
921  GenerateMiss(masm, argc);
922 }
923 
924 
925 Object* KeyedLoadIC_Miss(Arguments args);
926 
927 
928 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
929  // ---------- S t a t e --------------
930  // -- lr : return address
931  // -- r0 : key
932  // -- r1 : receiver
933  // -----------------------------------
934  Isolate* isolate = masm->isolate();
935 
936  __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, r3, r4);
937 
938  __ Push(r1, r0);
939 
940  // Perform tail call to the entry.
941  ExternalReference ref = force_generic
942  ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), isolate)
943  : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
944 
945  __ TailCallExternalReference(ref, 2, 1);
946 }
947 
948 
949 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
950  // ---------- S t a t e --------------
951  // -- lr : return address
952  // -- r0 : key
953  // -- r1 : receiver
954  // -----------------------------------
955 
956  __ Push(r1, r0);
957 
958  __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
959 }
960 
961 
962 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
963  // ---------- S t a t e --------------
964  // -- lr : return address
965  // -- r0 : key
966  // -- r1 : receiver
967  // -----------------------------------
968  Label slow, check_string, index_smi, index_string, property_array_property;
969  Label probe_dictionary, check_number_dictionary;
970 
971  Register key = r0;
972  Register receiver = r1;
973 
974  Isolate* isolate = masm->isolate();
975 
976  // Check that the key is a smi.
977  __ JumpIfNotSmi(key, &check_string);
978  __ bind(&index_smi);
979  // Now the key is known to be a smi. This place is also jumped to from below
980  // where a numeric string is converted to a smi.
981 
982  GenerateKeyedLoadReceiverCheck(
983  masm, receiver, r2, r3, Map::kHasIndexedInterceptor, &slow);
984 
985  // Check the receiver's map to see if it has fast elements.
986  __ CheckFastElements(r2, r3, &check_number_dictionary);
987 
988  GenerateFastArrayLoad(
989  masm, receiver, key, r4, r3, r2, r0, NULL, &slow);
990  __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, r2, r3);
991  __ Ret();
992 
993  __ bind(&check_number_dictionary);
996 
997  // Check whether the elements is a number dictionary.
998  // r0: key
999  // r3: elements map
1000  // r4: elements
1001  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
1002  __ cmp(r3, ip);
1003  __ b(ne, &slow);
1004  __ mov(r2, Operand(r0, ASR, kSmiTagSize));
1005  __ LoadFromNumberDictionary(&slow, r4, r0, r0, r2, r3, r5);
1006  __ Ret();
1007 
1008  // Slow case, key and receiver still in r0 and r1.
1009  __ bind(&slow);
1010  __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(),
1011  1, r2, r3);
1013 
1014  __ bind(&check_string);
1015  GenerateKeyStringCheck(masm, key, r2, r3, &index_string, &slow);
1016 
1017  GenerateKeyedLoadReceiverCheck(
1018  masm, receiver, r2, r3, Map::kHasNamedInterceptor, &slow);
1019 
1020  // If the receiver is a fast-case object, check the keyed lookup
1021  // cache. Otherwise probe the dictionary.
1024  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
1025  __ cmp(r4, ip);
1026  __ b(eq, &probe_dictionary);
1027 
1028  // Load the map of the receiver, compute the keyed lookup cache hash
1029  // based on 32 bits of the map pointer and the string hash.
1031  __ mov(r3, Operand(r2, ASR, KeyedLookupCache::kMapHashShift));
1033  __ eor(r3, r3, Operand(r4, ASR, String::kHashShift));
1035  __ And(r3, r3, Operand(mask));
1036 
1037  // Load the key (consisting of map and symbol) from the cache and
1038  // check for match.
1039  Label load_in_object_property;
1040  static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
1041  Label hit_on_nth_entry[kEntriesPerBucket];
1042  ExternalReference cache_keys =
1043  ExternalReference::keyed_lookup_cache_keys(isolate);
1044 
1045  __ mov(r4, Operand(cache_keys));
1046  __ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1));
1047 
1048  for (int i = 0; i < kEntriesPerBucket - 1; i++) {
1049  Label try_next_entry;
1050  // Load map and move r4 to next entry.
1051  __ ldr(r5, MemOperand(r4, kPointerSize * 2, PostIndex));
1052  __ cmp(r2, r5);
1053  __ b(ne, &try_next_entry);
1054  __ ldr(r5, MemOperand(r4, -kPointerSize)); // Load symbol
1055  __ cmp(r0, r5);
1056  __ b(eq, &hit_on_nth_entry[i]);
1057  __ bind(&try_next_entry);
1058  }
1059 
1060  // Last entry: Load map and move r4 to symbol.
1061  __ ldr(r5, MemOperand(r4, kPointerSize, PostIndex));
1062  __ cmp(r2, r5);
1063  __ b(ne, &slow);
1064  __ ldr(r5, MemOperand(r4));
1065  __ cmp(r0, r5);
1066  __ b(ne, &slow);
1067 
1068  // Get field offset.
1069  // r0 : key
1070  // r1 : receiver
1071  // r2 : receiver's map
1072  // r3 : lookup cache index
1073  ExternalReference cache_field_offsets =
1074  ExternalReference::keyed_lookup_cache_field_offsets(isolate);
1075 
1076  // Hit on nth entry.
1077  for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
1078  __ bind(&hit_on_nth_entry[i]);
1079  __ mov(r4, Operand(cache_field_offsets));
1080  if (i != 0) {
1081  __ add(r3, r3, Operand(i));
1082  }
1083  __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2));
1085  __ sub(r5, r5, r6, SetCC);
1086  __ b(ge, &property_array_property);
1087  if (i != 0) {
1088  __ jmp(&load_in_object_property);
1089  }
1090  }
1091 
1092  // Load in-object property.
1093  __ bind(&load_in_object_property);
1095  __ add(r6, r6, r5); // Index from start of object.
1096  __ sub(r1, r1, Operand(kHeapObjectTag)); // Remove the heap tag.
1097  __ ldr(r0, MemOperand(r1, r6, LSL, kPointerSizeLog2));
1098  __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1099  1, r2, r3);
1100  __ Ret();
1101 
1102  // Load property array property.
1103  __ bind(&property_array_property);
1105  __ add(r1, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1106  __ ldr(r0, MemOperand(r1, r5, LSL, kPointerSizeLog2));
1107  __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1108  1, r2, r3);
1109  __ Ret();
1110 
1111  // Do a quick inline probe of the receiver's dictionary, if it
1112  // exists.
1113  __ bind(&probe_dictionary);
1114  // r1: receiver
1115  // r0: key
1116  // r3: elements
1119  GenerateGlobalInstanceTypeCheck(masm, r2, &slow);
1120  // Load the property to r0.
1121  GenerateDictionaryLoad(masm, &slow, r3, r0, r0, r2, r4);
1122  __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(),
1123  1, r2, r3);
1124  __ Ret();
1125 
1126  __ bind(&index_string);
1127  __ IndexFromHash(r3, key);
1128  // Now jump to the place where smi keys are handled.
1129  __ jmp(&index_smi);
1130 }
1131 
1132 
1133 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
1134  // ---------- S t a t e --------------
1135  // -- lr : return address
1136  // -- r0 : key (index)
1137  // -- r1 : receiver
1138  // -----------------------------------
1139  Label miss;
1140 
1141  Register receiver = r1;
1142  Register index = r0;
1143  Register scratch = r3;
1144  Register result = r0;
1145 
1146  StringCharAtGenerator char_at_generator(receiver,
1147  index,
1148  scratch,
1149  result,
1150  &miss, // When not a string.
1151  &miss, // When not a number.
1152  &miss, // When index out of range.
1154  char_at_generator.GenerateFast(masm);
1155  __ Ret();
1156 
1157  StubRuntimeCallHelper call_helper;
1158  char_at_generator.GenerateSlow(masm, call_helper);
1159 
1160  __ bind(&miss);
1161  GenerateMiss(masm, false);
1162 }
1163 
1164 
1165 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
1166  // ---------- S t a t e --------------
1167  // -- lr : return address
1168  // -- r0 : key
1169  // -- r1 : receiver
1170  // -----------------------------------
1171  Label slow;
1172 
1173  // Check that the receiver isn't a smi.
1174  __ JumpIfSmi(r1, &slow);
1175 
1176  // Check that the key is an array index, that is Uint32.
1177  __ tst(r0, Operand(kSmiTagMask | kSmiSignMask));
1178  __ b(ne, &slow);
1179 
1180  // Get the map of the receiver.
1182 
1183  // Check that it has indexed interceptor and access checks
1184  // are not enabled for this object.
1186  __ and_(r3, r3, Operand(kSlowCaseBitFieldMask));
1187  __ cmp(r3, Operand(1 << Map::kHasIndexedInterceptor));
1188  __ b(ne, &slow);
1189 
1190  // Everything is fine, call runtime.
1191  __ Push(r1, r0); // Receiver, key.
1192 
1193  // Perform tail call to the entry.
1194  __ TailCallExternalReference(
1195  ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
1196  masm->isolate()),
1197  2,
1198  1);
1199 
1200  __ bind(&slow);
1201  GenerateMiss(masm, false);
1202 }
1203 
1204 
1205 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1206  // ---------- S t a t e --------------
1207  // -- r0 : value
1208  // -- r1 : key
1209  // -- r2 : receiver
1210  // -- lr : return address
1211  // -----------------------------------
1212 
1213  // Push receiver, key and value for runtime call.
1214  __ Push(r2, r1, r0);
1215 
1216  ExternalReference ref = force_generic
1217  ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1218  masm->isolate())
1219  : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1220  __ TailCallExternalReference(ref, 3, 1);
1221 }
1222 
1223 
1224 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1225  // ---------- S t a t e --------------
1226  // -- r0 : value
1227  // -- r1 : key
1228  // -- r2 : receiver
1229  // -- lr : return address
1230  // -----------------------------------
1231 
1232  // Push receiver, key and value for runtime call.
1233  __ Push(r2, r1, r0);
1234 
1235  // The slow case calls into the runtime to complete the store without causing
1236  // an IC miss that would otherwise cause a transition to the generic stub.
1237  ExternalReference ref =
1238  ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1239  __ TailCallExternalReference(ref, 3, 1);
1240 }
1241 
1242 
1243 void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
1244  // ---------- S t a t e --------------
1245  // -- r2 : receiver
1246  // -- r3 : target map
1247  // -- lr : return address
1248  // -----------------------------------
1249  // Must return the modified receiver in r0.
1250  if (!FLAG_trace_elements_transitions) {
1251  Label fail;
1253  __ mov(r0, r2);
1254  __ Ret();
1255  __ bind(&fail);
1256  }
1257 
1258  __ push(r2);
1259  __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
1260 }
1261 
1262 
1264  MacroAssembler* masm) {
1265  // ---------- S t a t e --------------
1266  // -- r2 : receiver
1267  // -- r3 : target map
1268  // -- lr : return address
1269  // -----------------------------------
1270  // Must return the modified receiver in r0.
1271  if (!FLAG_trace_elements_transitions) {
1272  Label fail;
1274  __ mov(r0, r2);
1275  __ Ret();
1276  __ bind(&fail);
1277  }
1278 
1279  __ push(r2);
1280  __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
1281 }
1282 
1283 
1284 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1285  StrictModeFlag strict_mode) {
1286  // ---------- S t a t e --------------
1287  // -- r0 : value
1288  // -- r1 : key
1289  // -- r2 : receiver
1290  // -- lr : return address
1291  // -----------------------------------
1292 
1293  // Push receiver, key and value for runtime call.
1294  __ Push(r2, r1, r0);
1295 
1296  __ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1297  __ mov(r0, Operand(Smi::FromInt(strict_mode))); // Strict mode.
1298  __ Push(r1, r0);
1299 
1300  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1301 }
1302 
1303 
1304 static void KeyedStoreGenerateGenericHelper(
1305  MacroAssembler* masm,
1306  Label* fast_object,
1307  Label* fast_double,
1308  Label* slow,
1309  KeyedStoreCheckMap check_map,
1310  KeyedStoreIncrementLength increment_length,
1311  Register value,
1312  Register key,
1313  Register receiver,
1314  Register receiver_map,
1315  Register elements_map,
1316  Register elements) {
1317  Label transition_smi_elements;
1318  Label finish_object_store, non_double_value, transition_double_elements;
1319  Label fast_double_without_map_check;
1320 
1321  // Fast case: Do the store, could be either Object or double.
1322  __ bind(fast_object);
1323  Register scratch_value = r4;
1324  Register address = r5;
1325  if (check_map == kCheckMap) {
1326  __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1327  __ cmp(elements_map,
1328  Operand(masm->isolate()->factory()->fixed_array_map()));
1329  __ b(ne, fast_double);
1330  }
1331  // Smi stores don't require further checks.
1332  Label non_smi_value;
1333  __ JumpIfNotSmi(value, &non_smi_value);
1334 
1335  if (increment_length == kIncrementLength) {
1336  // Add 1 to receiver->length.
1337  __ add(scratch_value, key, Operand(Smi::FromInt(1)));
1338  __ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
1339  }
1340  // It's irrelevant whether array is smi-only or not when writing a smi.
1341  __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1342  __ add(address, address, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
1343  __ str(value, MemOperand(address));
1344  __ Ret();
1345 
1346  __ bind(&non_smi_value);
1347  // Escape to elements kind transition case.
1348  __ CheckFastObjectElements(receiver_map, scratch_value,
1349  &transition_smi_elements);
1350 
1351  // Fast elements array, store the value to the elements backing store.
1352  __ bind(&finish_object_store);
1353  if (increment_length == kIncrementLength) {
1354  // Add 1 to receiver->length.
1355  __ add(scratch_value, key, Operand(Smi::FromInt(1)));
1356  __ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
1357  }
1358  __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1359  __ add(address, address, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
1360  __ str(value, MemOperand(address));
1361  // Update write barrier for the elements array address.
1362  __ mov(scratch_value, value); // Preserve the value which is returned.
1363  __ RecordWrite(elements,
1364  address,
1365  scratch_value,
1369  OMIT_SMI_CHECK);
1370  __ Ret();
1371 
1372  __ bind(fast_double);
1373  if (check_map == kCheckMap) {
1374  // Check for fast double array case. If this fails, call through to the
1375  // runtime.
1376  __ CompareRoot(elements_map, Heap::kFixedDoubleArrayMapRootIndex);
1377  __ b(ne, slow);
1378  }
1379  __ bind(&fast_double_without_map_check);
1380  __ StoreNumberToDoubleElements(value,
1381  key,
1382  receiver,
1383  elements, // Overwritten.
1384  r3, // Scratch regs...
1385  r4,
1386  r5,
1387  r6,
1388  &transition_double_elements);
1389  if (increment_length == kIncrementLength) {
1390  // Add 1 to receiver->length.
1391  __ add(scratch_value, key, Operand(Smi::FromInt(1)));
1392  __ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
1393  }
1394  __ Ret();
1395 
1396  __ bind(&transition_smi_elements);
1397  // Transition the array appropriately depending on the value type.
1399  __ CompareRoot(r4, Heap::kHeapNumberMapRootIndex);
1400  __ b(ne, &non_double_value);
1401 
1402  // Value is a double. Transition FAST_SMI_ELEMENTS ->
1403  // FAST_DOUBLE_ELEMENTS and complete the store.
1404  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1406  receiver_map,
1407  r4,
1408  slow);
1409  ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
1411  __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1412  __ jmp(&fast_double_without_map_check);
1413 
1414  __ bind(&non_double_value);
1415  // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
1416  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1417  FAST_ELEMENTS,
1418  receiver_map,
1419  r4,
1420  slow);
1421  ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
1423  __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1424  __ jmp(&finish_object_store);
1425 
1426  __ bind(&transition_double_elements);
1427  // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
1428  // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
1429  // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
1430  __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
1431  FAST_ELEMENTS,
1432  receiver_map,
1433  r4,
1434  slow);
1435  ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
1437  __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1438  __ jmp(&finish_object_store);
1439 }
1440 
1441 
1442 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
1443  StrictModeFlag strict_mode) {
1444  // ---------- S t a t e --------------
1445  // -- r0 : value
1446  // -- r1 : key
1447  // -- r2 : receiver
1448  // -- lr : return address
1449  // -----------------------------------
1450  Label slow, fast_object, fast_object_grow;
1451  Label fast_double, fast_double_grow;
1452  Label array, extra, check_if_double_array;
1453 
1454  // Register usage.
1455  Register value = r0;
1456  Register key = r1;
1457  Register receiver = r2;
1458  Register receiver_map = r3;
1459  Register elements_map = r6;
1460  Register elements = r7; // Elements array of the receiver.
1461  // r4 and r5 are used as general scratch registers.
1462 
1463  // Check that the key is a smi.
1464  __ JumpIfNotSmi(key, &slow);
1465  // Check that the object isn't a smi.
1466  __ JumpIfSmi(receiver, &slow);
1467  // Get the map of the object.
1468  __ ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1469  // Check that the receiver does not require access checks. We need
1470  // to do this because this generic stub does not perform map checks.
1471  __ ldrb(ip, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
1472  __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
1473  __ b(ne, &slow);
1474  // Check if the object is a JS array or not.
1475  __ ldrb(r4, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));
1476  __ cmp(r4, Operand(JS_ARRAY_TYPE));
1477  __ b(eq, &array);
1478  // Check that the object is some kind of JSObject.
1479  __ cmp(r4, Operand(FIRST_JS_OBJECT_TYPE));
1480  __ b(lt, &slow);
1481 
1482  // Object case: Check key against length in the elements array.
1483  __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1484  // Check array bounds. Both the key and the length of FixedArray are smis.
1486  __ cmp(key, Operand(ip));
1487  __ b(lo, &fast_object);
1488 
1489  // Slow case, handle jump to runtime.
1490  __ bind(&slow);
1491  // Entry registers are intact.
1492  // r0: value.
1493  // r1: key.
1494  // r2: receiver.
1495  GenerateRuntimeSetProperty(masm, strict_mode);
1496 
1497  // Extra capacity case: Check if there is extra capacity to
1498  // perform the store and update the length. Used for adding one
1499  // element to the array by writing to array[array.length].
1500  __ bind(&extra);
1501  // Condition code from comparing key and array length is still available.
1502  __ b(ne, &slow); // Only support writing to writing to array[array.length].
1503  // Check for room in the elements backing store.
1504  // Both the key and the length of FixedArray are smis.
1506  __ cmp(key, Operand(ip));
1507  __ b(hs, &slow);
1508  __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1509  __ cmp(elements_map,
1510  Operand(masm->isolate()->factory()->fixed_array_map()));
1511  __ b(ne, &check_if_double_array);
1512  __ jmp(&fast_object_grow);
1513 
1514  __ bind(&check_if_double_array);
1515  __ cmp(elements_map,
1516  Operand(masm->isolate()->factory()->fixed_double_array_map()));
1517  __ b(ne, &slow);
1518  __ jmp(&fast_double_grow);
1519 
1520  // Array case: Get the length and the elements array from the JS
1521  // array. Check that the array is in fast mode (and writable); if it
1522  // is the length is always a smi.
1523  __ bind(&array);
1524  __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1525 
1526  // Check the key against the length in the array.
1527  __ ldr(ip, FieldMemOperand(receiver, JSArray::kLengthOffset));
1528  __ cmp(key, Operand(ip));
1529  __ b(hs, &extra);
1530 
1531  KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double,
1533  value, key, receiver, receiver_map,
1534  elements_map, elements);
1535  KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
1537  value, key, receiver, receiver_map,
1538  elements_map, elements);
1539 }
1540 
1541 
1542 void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
1543  StrictModeFlag strict_mode) {
1544  // ----------- S t a t e -------------
1545  // -- r0 : value
1546  // -- r1 : receiver
1547  // -- r2 : name
1548  // -- lr : return address
1549  // -----------------------------------
1550 
1551  // Get the receiver from the stack and probe the stub cache.
1552  Code::Flags flags =
1553  Code::ComputeFlags(Code::STORE_IC, MONOMORPHIC, strict_mode);
1554 
1555  Isolate::Current()->stub_cache()->GenerateProbe(
1556  masm, flags, r1, r2, r3, r4, r5, r6);
1557 
1558  // Cache miss: Jump to runtime.
1559  GenerateMiss(masm);
1560 }
1561 
1562 
1563 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1564  // ----------- S t a t e -------------
1565  // -- r0 : value
1566  // -- r1 : receiver
1567  // -- r2 : name
1568  // -- lr : return address
1569  // -----------------------------------
1570 
1571  __ Push(r1, r2, r0);
1572 
1573  // Perform tail call to the entry.
1574  ExternalReference ref =
1575  ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
1576  __ TailCallExternalReference(ref, 3, 1);
1577 }
1578 
1579 
1580 void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
1581  // ----------- S t a t e -------------
1582  // -- r0 : value
1583  // -- r1 : receiver
1584  // -- r2 : name
1585  // -- lr : return address
1586  // -----------------------------------
1587  //
1588  // This accepts as a receiver anything JSArray::SetElementsLength accepts
1589  // (currently anything except for external arrays which means anything with
1590  // elements of FixedArray type). Value must be a number, but only smis are
1591  // accepted as the most common case.
1592 
1593  Label miss;
1594 
1595  Register receiver = r1;
1596  Register value = r0;
1597  Register scratch = r3;
1598 
1599  // Check that the receiver isn't a smi.
1600  __ JumpIfSmi(receiver, &miss);
1601 
1602  // Check that the object is a JS array.
1603  __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
1604  __ b(ne, &miss);
1605 
1606  // Check that elements are FixedArray.
1607  // We rely on StoreIC_ArrayLength below to deal with all types of
1608  // fast elements (including COW).
1609  __ ldr(scratch, FieldMemOperand(receiver, JSArray::kElementsOffset));
1610  __ CompareObjectType(scratch, scratch, scratch, FIXED_ARRAY_TYPE);
1611  __ b(ne, &miss);
1612 
1613  // Check that the array has fast properties, otherwise the length
1614  // property might have been redefined.
1615  __ ldr(scratch, FieldMemOperand(receiver, JSArray::kPropertiesOffset));
1616  __ ldr(scratch, FieldMemOperand(scratch, FixedArray::kMapOffset));
1617  __ CompareRoot(scratch, Heap::kHashTableMapRootIndex);
1618  __ b(eq, &miss);
1619 
1620  // Check that value is a smi.
1621  __ JumpIfNotSmi(value, &miss);
1622 
1623  // Prepare tail call to StoreIC_ArrayLength.
1624  __ Push(receiver, value);
1625 
1626  ExternalReference ref =
1627  ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
1628  __ TailCallExternalReference(ref, 2, 1);
1629 
1630  __ bind(&miss);
1631 
1632  GenerateMiss(masm);
1633 }
1634 
1635 
1636 void StoreIC::GenerateNormal(MacroAssembler* masm) {
1637  // ----------- S t a t e -------------
1638  // -- r0 : value
1639  // -- r1 : receiver
1640  // -- r2 : name
1641  // -- lr : return address
1642  // -----------------------------------
1643  Label miss;
1644 
1645  GenerateStringDictionaryReceiverCheck(masm, r1, r3, r4, r5, &miss);
1646 
1647  GenerateDictionaryStore(masm, &miss, r3, r2, r0, r4, r5);
1648  Counters* counters = masm->isolate()->counters();
1649  __ IncrementCounter(counters->store_normal_hit(),
1650  1, r4, r5);
1651  __ Ret();
1652 
1653  __ bind(&miss);
1654  __ IncrementCounter(counters->store_normal_miss(), 1, r4, r5);
1655  GenerateMiss(masm);
1656 }
1657 
1658 
1659 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
1660  StrictModeFlag strict_mode) {
1661  // ----------- S t a t e -------------
1662  // -- r0 : value
1663  // -- r1 : receiver
1664  // -- r2 : name
1665  // -- lr : return address
1666  // -----------------------------------
1667 
1668  __ Push(r1, r2, r0);
1669 
1670  __ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1671  __ mov(r0, Operand(Smi::FromInt(strict_mode)));
1672  __ Push(r1, r0);
1673 
1674  // Do tail-call to runtime routine.
1675  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1676 }
1677 
1678 
1679 #undef __
1680 
1681 
1683  switch (op) {
1684  case Token::EQ_STRICT:
1685  case Token::EQ:
1686  return eq;
1687  case Token::LT:
1688  return lt;
1689  case Token::GT:
1690  return gt;
1691  case Token::LTE:
1692  return le;
1693  case Token::GTE:
1694  return ge;
1695  default:
1696  UNREACHABLE();
1697  return kNoCondition;
1698  }
1699 }
1700 
1701 
1702 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
1703  HandleScope scope;
1704  Handle<Code> rewritten;
1705  State previous_state = GetState();
1706  State state = TargetState(previous_state, false, x, y);
1707  if (state == GENERIC) {
1708  CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, r1, r0);
1709  rewritten = stub.GetCode();
1710  } else {
1711  ICCompareStub stub(op_, state);
1712  if (state == KNOWN_OBJECTS) {
1713  stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
1714  }
1715  rewritten = stub.GetCode();
1716  }
1717  set_target(*rewritten);
1718 
1719 #ifdef DEBUG
1720  if (FLAG_trace_ic) {
1721  PrintF("[CompareIC (%s->%s)#%s]\n",
1722  GetStateName(previous_state),
1723  GetStateName(state),
1724  Token::Name(op_));
1725  }
1726 #endif
1727 
1728  // Activate inlined smi code.
1729  if (previous_state == UNINITIALIZED) {
1731  }
1732 }
1733 
1734 
1736  Address cmp_instruction_address =
1737  Assembler::return_address_from_call_start(address);
1738 
1739  // If the instruction following the call is not a cmp rx, #yyy, nothing
1740  // was inlined.
1741  Instr instr = Assembler::instr_at(cmp_instruction_address);
1742  if (!Assembler::IsCmpImmediate(instr)) {
1743  return;
1744  }
1745 
1746  // The delta to the start of the map check instruction and the
1747  // condition code uses at the patched jump.
1748  int delta = Assembler::GetCmpImmediateRawImmediate(instr);
1749  delta +=
1751  // If the delta is 0 the instruction is cmp r0, #0 which also signals that
1752  // nothing was inlined.
1753  if (delta == 0) {
1754  return;
1755  }
1756 
1757 #ifdef DEBUG
1758  if (FLAG_trace_ic) {
1759  PrintF("[ patching ic at %p, cmp=%p, delta=%d\n",
1760  address, cmp_instruction_address, delta);
1761  }
1762 #endif
1763 
1764  Address patch_address =
1765  cmp_instruction_address - delta * Instruction::kInstrSize;
1766  Instr instr_at_patch = Assembler::instr_at(patch_address);
1767  Instr branch_instr =
1768  Assembler::instr_at(patch_address + Instruction::kInstrSize);
1769  // This is patching a conditional "jump if not smi/jump if smi" site.
1770  // Enabling by changing from
1771  // cmp rx, rx
1772  // b eq/ne, <target>
1773  // to
1774  // tst rx, #kSmiTagMask
1775  // b ne/eq, <target>
1776  // and vice-versa to be disabled again.
1777  CodePatcher patcher(patch_address, 2);
1778  Register reg = Assembler::GetRn(instr_at_patch);
1779  if (check == ENABLE_INLINED_SMI_CHECK) {
1780  ASSERT(Assembler::IsCmpRegister(instr_at_patch));
1781  ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(),
1782  Assembler::GetRm(instr_at_patch).code());
1783  patcher.masm()->tst(reg, Operand(kSmiTagMask));
1784  } else {
1786  ASSERT(Assembler::IsTstImmediate(instr_at_patch));
1787  patcher.masm()->cmp(reg, reg);
1788  }
1789  ASSERT(Assembler::IsBranch(branch_instr));
1790  if (Assembler::GetCondition(branch_instr) == eq) {
1791  patcher.EmitCondition(ne);
1792  } else {
1793  ASSERT(Assembler::GetCondition(branch_instr) == ne);
1794  patcher.EmitCondition(eq);
1795  }
1796 }
1797 
1798 
1799 } } // namespace v8::internal
1800 
1801 #endif // V8_TARGET_ARCH_ARM
byte * Address
Definition: globals.h:157
static bool IsBranch(Instr instr)
static void GenerateTransitionElementsDoubleToObject(MacroAssembler *masm)
static const int kBitFieldOffset
Definition: objects.h:5160
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
const intptr_t kSmiTagMask
Definition: v8.h:4016
const intptr_t kSmiSignMask
Definition: v8globals.h:41
static void GenerateNormal(MacroAssembler *masm, int argc)
static bool IsCmpRegister(Instr instr)
const Register r3
static const int kMapHashShift
Definition: heap.h:2350
void PrintF(const char *format,...)
Definition: v8utils.cc:40
static void GenerateGeneric(MacroAssembler *masm, StrictModeFlag strict_mode)
static const char * Name(Value tok)
Definition: token.h:196
static Smi * FromInt(int value)
Definition: objects-inl.h:981
static void GenerateMiss(MacroAssembler *masm, int argc, IC::UtilityId id, Code::ExtraICState extra_state)
static const int kGlobalReceiverOffset
Definition: objects.h:6288
const Register r6
static void GenerateGlobalProxy(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateMegamorphic(MacroAssembler *masm, StrictModeFlag strict_mode)
static int GetCmpImmediateRawImmediate(Instr instr)
static void GenerateStringLength(MacroAssembler *masm, bool support_wrappers)
KeyedStoreCheckMap
Definition: ic.h:634
static const int kHasNamedInterceptor
Definition: objects.h:5169
static const int kIsAccessCheckNeeded
Definition: objects.h:5173
static Register GetRm(Instr instr)
Address address() const
Definition: ic-inl.h:41
static void GenerateMegamorphic(MacroAssembler *masm)
#define ASSERT(condition)
Definition: checks.h:270
static bool IsCmpImmediate(Instr instr)
const int kPointerSizeLog2
Definition: globals.h:232
static const int kInstanceSizeOffset
Definition: objects.h:5147
Isolate * isolate() const
Definition: ic.h:140
static const ExtraICState kNoExtraICState
Definition: objects.h:4236
const Register r2
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check)
static void GenerateArrayLength(MacroAssembler *masm)
static const int kHashFieldOffset
Definition: objects.h:7319
static Condition GetCondition(Instr instr)
static const int kHasIndexedInterceptor
Definition: objects.h:5170
void UpdateCaches(Handle< Object > x, Handle< Object > y)
const Register sp
#define UNREACHABLE()
Definition: checks.h:50
static Condition ComputeCondition(Token::Value op)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
const uint32_t kIsSymbolMask
Definition: objects.h:462
const Register ip
const Register r9
const int kPointerSize
Definition: globals.h:220
static void GenerateGeneric(MacroAssembler *masm)
const int kHeapObjectTag
Definition: v8.h:4009
static void GenerateMiss(MacroAssembler *masm)
#define __
static bool decode(uint32_t value)
Definition: utils.h:273
static const int kPropertiesOffset
Definition: objects.h:2171
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:3491
static const int kInObjectPropertiesOffset
Definition: objects.h:5149
static void GenerateSmiToDouble(MacroAssembler *masm, Label *fail)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
const Register r0
static const int kElementsOffset
Definition: objects.h:2172
static const int kContainsCachedArrayIndexMask
Definition: objects.h:7374
static Register GetRn(Instr instr)
static void GenerateMegamorphic(MacroAssembler *masm, int argc)
static bool IsTstImmediate(Instr instr)
static const int kLengthOffset
Definition: objects.h:8332
static const int kHeaderSize
Definition: objects.h:2296
static void GenerateSlow(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, int argc, Code::ExtraICState extra_state)
Definition: ic.h:278
static const int kMapOffset
Definition: objects.h:1261
static void GenerateRuntimeSetProperty(MacroAssembler *masm, StrictModeFlag strict_mode)
const Register r1
static void GenerateDoubleToObject(MacroAssembler *masm, Label *fail)
static const int kLengthOffset
Definition: objects.h:2295
static const int kSlowCaseBitFieldMask
Definition: ic.h:508
KeyedStoreIncrementLength
Definition: ic.h:640
static void GenerateNormal(MacroAssembler *masm, int argc)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
InlinedSmiCheck
Definition: ic.h:853
MemOperand FieldMemOperand(Register object, int offset)
static void GenerateMonomorphicCacheProbe(MacroAssembler *masm, int argc, Code::Kind kind, Code::ExtraICState extra_state)
static void GenerateString(MacroAssembler *masm)
static Register GetCmpImmediateRegister(Instr instr)
const int kSmiTagSize
Definition: v8.h:4015
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMegamorphic(MacroAssembler *masm, int argc, Code::ExtraICState extra_ic_state)
const int kSmiTag
Definition: v8.h:4014
static void GenerateNormal(MacroAssembler *masm)
static void GenerateFunctionPrototype(MacroAssembler *masm)
static void GenerateIndexedInterceptor(MacroAssembler *masm)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
void set_target(Code *code)
Definition: ic.h:149
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
const uint32_t kSymbolTag
Definition: objects.h:464
static const int kCapacityMask
Definition: heap.h:2349
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)
static void GenerateMiss(MacroAssembler *masm)
static void GenerateTransitionElementsSmiToDouble(MacroAssembler *masm)
static const int kHashShift
Definition: objects.h:7341
static void GenerateNonStrictArguments(MacroAssembler *masm, int argc)
static void GenerateArrayLength(MacroAssembler *masm)
static const char * GetStateName(State state)
Definition: ic.cc:2586
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
void check(i::Vector< const char > string)
static const int kHashMask
Definition: heap.h:2351
static void GenerateMiss(MacroAssembler *masm, int argc)
Definition: ic.h:311
const Register r5
static const int kInstanceTypeOffset
Definition: objects.h:5158
static const int kEntriesPerBucket
Definition: heap.h:2352
static void GenerateNormal(MacroAssembler *masm)
const Register r4
static void GenerateMiss(MacroAssembler *masm, bool force_generic)
const Register r7