v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
ic-mips.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 
29 
30 #include "v8.h"
31 
32 #if defined(V8_TARGET_ARCH_MIPS)
33 
34 #include "codegen.h"
35 #include "code-stubs.h"
36 #include "ic-inl.h"
37 #include "runtime.h"
38 #include "stub-cache.h"
39 
40 namespace v8 {
41 namespace internal {
42 
43 
44 // ----------------------------------------------------------------------------
45 // Static IC stub generators.
46 //
47 
48 #define __ ACCESS_MASM(masm)
49 
50 
51 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
52  Register type,
53  Label* global_object) {
54  // Register usage:
55  // type: holds the receiver instance type on entry.
56  __ Branch(global_object, eq, type, Operand(JS_GLOBAL_OBJECT_TYPE));
57  __ Branch(global_object, eq, type, Operand(JS_BUILTINS_OBJECT_TYPE));
58  __ Branch(global_object, eq, type, Operand(JS_GLOBAL_PROXY_TYPE));
59 }
60 
61 
62 // Generated code falls through if the receiver is a regular non-global
63 // JS object with slow properties and no interceptors.
64 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
65  Register receiver,
66  Register elements,
67  Register scratch0,
68  Register scratch1,
69  Label* miss) {
70  // Register usage:
71  // receiver: holds the receiver on entry and is unchanged.
72  // elements: holds the property dictionary on fall through.
73  // Scratch registers:
74  // scratch0: used to holds the receiver map.
75  // scratch1: used to holds the receiver instance type, receiver bit mask
76  // and elements map.
77 
78  // Check that the receiver isn't a smi.
79  __ JumpIfSmi(receiver, miss);
80 
81  // Check that the receiver is a valid JS object.
82  __ GetObjectType(receiver, scratch0, scratch1);
83  __ Branch(miss, lt, scratch1, Operand(FIRST_SPEC_OBJECT_TYPE));
84 
85  // If this assert fails, we have to check upper bound too.
87 
88  GenerateGlobalInstanceTypeCheck(masm, scratch1, miss);
89 
90  // Check that the global object does not require access checks.
91  __ lbu(scratch1, FieldMemOperand(scratch0, Map::kBitFieldOffset));
92  __ And(scratch1, scratch1, Operand((1 << Map::kIsAccessCheckNeeded) |
94  __ Branch(miss, ne, scratch1, Operand(zero_reg));
95 
96  __ lw(elements, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
97  __ lw(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
98  __ LoadRoot(scratch0, Heap::kHashTableMapRootIndex);
99  __ Branch(miss, ne, scratch1, Operand(scratch0));
100 }
101 
102 
103 // Helper function used from LoadIC/CallIC GenerateNormal.
104 //
105 // elements: Property dictionary. It is not clobbered if a jump to the miss
106 // label is done.
107 // name: Property name. It is not clobbered if a jump to the miss label is
108 // done
109 // result: Register for the result. It is only updated if a jump to the miss
110 // label is not done. Can be the same as elements or name clobbering
111 // one of these in the case of not jumping to the miss label.
112 // The two scratch registers need to be different from elements, name and
113 // result.
114 // The generated code assumes that the receiver has slow properties,
115 // is not a global object and does not have interceptors.
116 // The address returned from GenerateStringDictionaryProbes() in scratch2
117 // is used.
118 static void GenerateDictionaryLoad(MacroAssembler* masm,
119  Label* miss,
120  Register elements,
121  Register name,
122  Register result,
123  Register scratch1,
124  Register scratch2) {
125  // Main use of the scratch registers.
126  // scratch1: Used as temporary and to hold the capacity of the property
127  // dictionary.
128  // scratch2: Used as temporary.
129  Label done;
130 
131  // Probe the dictionary.
133  miss,
134  &done,
135  elements,
136  name,
137  scratch1,
138  scratch2);
139 
140  // If probing finds an entry check that the value is a normal
141  // property.
142  __ bind(&done); // scratch2 == elements + 4 * index.
143  const int kElementsStartOffset = StringDictionary::kHeaderSize +
145  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
146  __ lw(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
147  __ And(at,
148  scratch1,
149  Operand(PropertyDetails::TypeField::kMask << kSmiTagSize));
150  __ Branch(miss, ne, at, Operand(zero_reg));
151 
152  // Get the value at the masked, scaled index and return.
153  __ lw(result,
154  FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize));
155 }
156 
157 
158 // Helper function used from StoreIC::GenerateNormal.
159 //
160 // elements: Property dictionary. It is not clobbered if a jump to the miss
161 // label is done.
162 // name: Property name. It is not clobbered if a jump to the miss label is
163 // done
164 // value: The value to store.
165 // The two scratch registers need to be different from elements, name and
166 // result.
167 // The generated code assumes that the receiver has slow properties,
168 // is not a global object and does not have interceptors.
169 // The address returned from GenerateStringDictionaryProbes() in scratch2
170 // is used.
171 static void GenerateDictionaryStore(MacroAssembler* masm,
172  Label* miss,
173  Register elements,
174  Register name,
175  Register value,
176  Register scratch1,
177  Register scratch2) {
178  // Main use of the scratch registers.
179  // scratch1: Used as temporary and to hold the capacity of the property
180  // dictionary.
181  // scratch2: Used as temporary.
182  Label done;
183 
184  // Probe the dictionary.
186  miss,
187  &done,
188  elements,
189  name,
190  scratch1,
191  scratch2);
192 
193  // If probing finds an entry in the dictionary check that the value
194  // is a normal property that is not read only.
195  __ bind(&done); // scratch2 == elements + 4 * index.
196  const int kElementsStartOffset = StringDictionary::kHeaderSize +
198  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
199  const int kTypeAndReadOnlyMask =
200  (PropertyDetails::TypeField::kMask |
201  PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
202  __ lw(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
203  __ And(at, scratch1, Operand(kTypeAndReadOnlyMask));
204  __ Branch(miss, ne, at, Operand(zero_reg));
205 
206  // Store the value at the masked, scaled index and return.
207  const int kValueOffset = kElementsStartOffset + kPointerSize;
208  __ Addu(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag));
209  __ sw(value, MemOperand(scratch2));
210 
211  // Update the write barrier. Make sure not to clobber the value.
212  __ mov(scratch1, value);
213  __ RecordWrite(
214  elements, scratch2, scratch1, kRAHasNotBeenSaved, kDontSaveFPRegs);
215 }
216 
217 
218 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
219  // ----------- S t a t e -------------
220  // -- a2 : name
221  // -- ra : return address
222  // -- a0 : receiver
223  // -- sp[0] : receiver
224  // -----------------------------------
225  Label miss;
226 
227  StubCompiler::GenerateLoadArrayLength(masm, a0, a3, &miss);
228  __ bind(&miss);
229  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
230 }
231 
232 
233 void LoadIC::GenerateStringLength(MacroAssembler* masm, bool support_wrappers) {
234  // ----------- S t a t e -------------
235  // -- a2 : name
236  // -- lr : return address
237  // -- a0 : receiver
238  // -- sp[0] : receiver
239  // -----------------------------------
240  Label miss;
241 
242  StubCompiler::GenerateLoadStringLength(masm, a0, a1, a3, &miss,
243  support_wrappers);
244  // Cache miss: Jump to runtime.
245  __ bind(&miss);
246  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
247 }
248 
249 
250 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
251  // ----------- S t a t e -------------
252  // -- a2 : name
253  // -- lr : return address
254  // -- a0 : receiver
255  // -- sp[0] : receiver
256  // -----------------------------------
257  Label miss;
258 
259  StubCompiler::GenerateLoadFunctionPrototype(masm, a0, a1, a3, &miss);
260  __ bind(&miss);
261  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
262 }
263 
264 
265 // Checks the receiver for special cases (value type, slow case bits).
266 // Falls through for regular JS object.
267 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
268  Register receiver,
269  Register map,
270  Register scratch,
271  int interceptor_bit,
272  Label* slow) {
273  // Check that the object isn't a smi.
274  __ JumpIfSmi(receiver, slow);
275  // Get the map of the receiver.
276  __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
277  // Check bit field.
278  __ lbu(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
279  __ And(at, scratch, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
280  __ Branch(slow, ne, at, Operand(zero_reg));
281  // Check that the object is some kind of JS object EXCEPT JS Value type.
282  // In the case that the object is a value-wrapper object,
283  // we enter the runtime system to make sure that indexing into string
284  // objects work as intended.
286  __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
287  __ Branch(slow, lt, scratch, Operand(JS_OBJECT_TYPE));
288 }
289 
290 
291 // Loads an indexed element from a fast case array.
292 // If not_fast_array is NULL, doesn't perform the elements map check.
293 static void GenerateFastArrayLoad(MacroAssembler* masm,
294  Register receiver,
295  Register key,
296  Register elements,
297  Register scratch1,
298  Register scratch2,
299  Register result,
300  Label* not_fast_array,
301  Label* out_of_range) {
302  // Register use:
303  //
304  // receiver - holds the receiver on entry.
305  // Unchanged unless 'result' is the same register.
306  //
307  // key - holds the smi key on entry.
308  // Unchanged unless 'result' is the same register.
309  //
310  // elements - holds the elements of the receiver on exit.
311  //
312  // result - holds the result on exit if the load succeeded.
313  // Allowed to be the the same as 'receiver' or 'key'.
314  // Unchanged on bailout so 'receiver' and 'key' can be safely
315  // used by further computation.
316  //
317  // Scratch registers:
318  //
319  // scratch1 - used to hold elements map and elements length.
320  // Holds the elements map if not_fast_array branch is taken.
321  //
322  // scratch2 - used to hold the loaded value.
323 
324  __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
325  if (not_fast_array != NULL) {
326  // Check that the object is in fast mode (not dictionary).
327  __ lw(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
328  __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
329  __ Branch(not_fast_array, ne, scratch1, Operand(at));
330  } else {
331  __ AssertFastElements(elements);
332  }
333 
334  // Check that the key (index) is within bounds.
335  __ lw(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset));
336  __ Branch(out_of_range, hs, key, Operand(scratch1));
337 
338  // Fast case: Do the load.
339  __ Addu(scratch1, elements,
341  // The key is a smi.
343  __ sll(at, key, kPointerSizeLog2 - kSmiTagSize);
344  __ addu(at, at, scratch1);
345  __ lw(scratch2, MemOperand(at));
346 
347  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
348  // In case the loaded value is the_hole we have to consult GetProperty
349  // to ensure the prototype chain is searched.
350  __ Branch(out_of_range, eq, scratch2, Operand(at));
351  __ mov(result, scratch2);
352 }
353 
354 
355 // Checks whether a key is an array index string or a symbol string.
356 // Falls through if a key is a symbol.
357 static void GenerateKeyStringCheck(MacroAssembler* masm,
358  Register key,
359  Register map,
360  Register hash,
361  Label* index_string,
362  Label* not_symbol) {
363  // The key is not a smi.
364  // Is it a string?
365  __ GetObjectType(key, map, hash);
366  __ Branch(not_symbol, ge, hash, Operand(FIRST_NONSTRING_TYPE));
367 
368  // Is the string an array index, with cached numeric value?
370  __ And(at, hash, Operand(String::kContainsCachedArrayIndexMask));
371  __ Branch(index_string, eq, at, Operand(zero_reg));
372 
373  // Is the string a symbol?
374  // map: key map
377  __ And(at, hash, Operand(kIsSymbolMask));
378  __ Branch(not_symbol, eq, at, Operand(zero_reg));
379 }
380 
381 
382 // Defined in ic.cc.
383 Object* CallIC_Miss(Arguments args);
384 
385 // The generated code does not accept smi keys.
386 // The generated code falls through if both probes miss.
387 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
388  int argc,
389  Code::Kind kind,
390  Code::ExtraICState extra_state) {
391  // ----------- S t a t e -------------
392  // -- a1 : receiver
393  // -- a2 : name
394  // -----------------------------------
395  Label number, non_number, non_string, boolean, probe, miss;
396 
397  // Probe the stub cache.
399  MONOMORPHIC,
400  extra_state,
401  Code::NORMAL,
402  argc);
403  Isolate::Current()->stub_cache()->GenerateProbe(
404  masm, flags, a1, a2, a3, t0, t1, t2);
405 
406  // If the stub cache probing failed, the receiver might be a value.
407  // For value objects, we use the map of the prototype objects for
408  // the corresponding JSValue for the cache and that is what we need
409  // to probe.
410  //
411  // Check for number.
412  __ JumpIfSmi(a1, &number, t1);
413  __ GetObjectType(a1, a3, a3);
414  __ Branch(&non_number, ne, a3, Operand(HEAP_NUMBER_TYPE));
415  __ bind(&number);
416  StubCompiler::GenerateLoadGlobalFunctionPrototype(
418  __ Branch(&probe);
419 
420  // Check for string.
421  __ bind(&non_number);
422  __ Branch(&non_string, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
423  StubCompiler::GenerateLoadGlobalFunctionPrototype(
425  __ Branch(&probe);
426 
427  // Check for boolean.
428  __ bind(&non_string);
429  __ LoadRoot(t0, Heap::kTrueValueRootIndex);
430  __ Branch(&boolean, eq, a1, Operand(t0));
431  __ LoadRoot(t1, Heap::kFalseValueRootIndex);
432  __ Branch(&miss, ne, a1, Operand(t1));
433  __ bind(&boolean);
434  StubCompiler::GenerateLoadGlobalFunctionPrototype(
436 
437  // Probe the stub cache for the value object.
438  __ bind(&probe);
439  Isolate::Current()->stub_cache()->GenerateProbe(
440  masm, flags, a1, a2, a3, t0, t1, t2);
441 
442  __ bind(&miss);
443 }
444 
445 
446 static void GenerateFunctionTailCall(MacroAssembler* masm,
447  int argc,
448  Label* miss,
449  Register scratch) {
450  // a1: function
451 
452  // Check that the value isn't a smi.
453  __ JumpIfSmi(a1, miss);
454 
455  // Check that the value is a JSFunction.
456  __ GetObjectType(a1, scratch, scratch);
457  __ Branch(miss, ne, scratch, Operand(JS_FUNCTION_TYPE));
458 
459  // Invoke the function.
460  ParameterCount actual(argc);
461  __ InvokeFunction(a1, actual, JUMP_FUNCTION,
462  NullCallWrapper(), CALL_AS_METHOD);
463 }
464 
465 
466 void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
467  // ----------- S t a t e -------------
468  // -- a2 : name
469  // -- ra : return address
470  // -----------------------------------
471  Label miss;
472 
473  // Get the receiver of the function from the stack into a1.
474  __ lw(a1, MemOperand(sp, argc * kPointerSize));
475 
476  GenerateStringDictionaryReceiverCheck(masm, a1, a0, a3, t0, &miss);
477 
478  // a0: elements
479  // Search the dictionary - put result in register a1.
480  GenerateDictionaryLoad(masm, &miss, a0, a2, a1, a3, t0);
481 
482  GenerateFunctionTailCall(masm, argc, &miss, t0);
483 
484  // Cache miss: Jump to runtime.
485  __ bind(&miss);
486 }
487 
488 
489 void CallICBase::GenerateMiss(MacroAssembler* masm,
490  int argc,
491  IC::UtilityId id,
492  Code::ExtraICState extra_state) {
493  // ----------- S t a t e -------------
494  // -- a2 : name
495  // -- ra : return address
496  // -----------------------------------
497  Isolate* isolate = masm->isolate();
498 
499  if (id == IC::kCallIC_Miss) {
500  __ IncrementCounter(isolate->counters()->call_miss(), 1, a3, t0);
501  } else {
502  __ IncrementCounter(isolate->counters()->keyed_call_miss(), 1, a3, t0);
503  }
504 
505  // Get the receiver of the function from the stack.
506  __ lw(a3, MemOperand(sp, argc*kPointerSize));
507 
508  {
509  FrameScope scope(masm, StackFrame::INTERNAL);
510 
511  // Push the receiver and the name of the function.
512  __ Push(a3, a2);
513 
514  // Call the entry.
515  __ PrepareCEntryArgs(2);
516  __ PrepareCEntryFunction(ExternalReference(IC_Utility(id), isolate));
517 
518  CEntryStub stub(1);
519  __ CallStub(&stub);
520 
521  // Move result to a1 and leave the internal frame.
522  __ mov(a1, v0);
523  }
524 
525  // Check if the receiver is a global object of some sort.
526  // This can happen only for regular CallIC but not KeyedCallIC.
527  if (id == IC::kCallIC_Miss) {
528  Label invoke, global;
529  __ lw(a2, MemOperand(sp, argc * kPointerSize));
530  __ JumpIfSmi(a2, &invoke);
531  __ GetObjectType(a2, a3, a3);
532  __ Branch(&global, eq, a3, Operand(JS_GLOBAL_OBJECT_TYPE));
533  __ Branch(&invoke, ne, a3, Operand(JS_BUILTINS_OBJECT_TYPE));
534 
535  // Patch the receiver on the stack.
536  __ bind(&global);
538  __ sw(a2, MemOperand(sp, argc * kPointerSize));
539  __ bind(&invoke);
540  }
541  // Invoke the function.
542  CallKind call_kind = CallICBase::Contextual::decode(extra_state)
544  : CALL_AS_METHOD;
545  ParameterCount actual(argc);
546  __ InvokeFunction(a1,
547  actual,
549  NullCallWrapper(),
550  call_kind);
551 }
552 
553 
554 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
555  int argc,
556  Code::ExtraICState extra_ic_state) {
557  // ----------- S t a t e -------------
558  // -- a2 : name
559  // -- ra : return address
560  // -----------------------------------
561 
562  // Get the receiver of the function from the stack into a1.
563  __ lw(a1, MemOperand(sp, argc * kPointerSize));
564  GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
565  GenerateMiss(masm, argc, extra_ic_state);
566 }
567 
568 
569 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
570  // ----------- S t a t e -------------
571  // -- a2 : name
572  // -- ra : return address
573  // -----------------------------------
574 
575  // Get the receiver of the function from the stack into a1.
576  __ lw(a1, MemOperand(sp, argc * kPointerSize));
577 
578  Label do_call, slow_call, slow_load, slow_reload_receiver;
579  Label check_number_dictionary, check_string, lookup_monomorphic_cache;
580  Label index_smi, index_string;
581 
582  // Check that the key is a smi.
583  __ JumpIfNotSmi(a2, &check_string);
584  __ bind(&index_smi);
585  // Now the key is known to be a smi. This place is also jumped to from below
586  // where a numeric string is converted to a smi.
587 
588  GenerateKeyedLoadReceiverCheck(
589  masm, a1, a0, a3, Map::kHasIndexedInterceptor, &slow_call);
590 
591  GenerateFastArrayLoad(
592  masm, a1, a2, t0, a3, a0, a1, &check_number_dictionary, &slow_load);
593  Counters* counters = masm->isolate()->counters();
594  __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1, a0, a3);
595 
596  __ bind(&do_call);
597  // receiver in a1 is not used after this point.
598  // a2: key
599  // a1: function
600 
601  GenerateFunctionTailCall(masm, argc, &slow_call, a0);
602 
603  __ bind(&check_number_dictionary);
604  // a2: key
605  // a3: elements map
606  // t0: elements pointer
607  // Check whether the elements is a number dictionary.
608  __ LoadRoot(at, Heap::kHashTableMapRootIndex);
609  __ Branch(&slow_load, ne, a3, Operand(at));
610  __ sra(a0, a2, kSmiTagSize);
611  // a0: untagged index
612  __ LoadFromNumberDictionary(&slow_load, t0, a2, a1, a0, a3, t1);
613  __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, a0, a3);
614  __ jmp(&do_call);
615 
616  __ bind(&slow_load);
617  // This branch is taken when calling KeyedCallIC_Miss is neither required
618  // nor beneficial.
619  __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, a0, a3);
620  {
621  FrameScope scope(masm, StackFrame::INTERNAL);
622  __ push(a2); // Save the key.
623  __ Push(a1, a2); // Pass the receiver and the key.
624  __ CallRuntime(Runtime::kKeyedGetProperty, 2);
625  __ pop(a2); // Restore the key.
626  }
627  __ mov(a1, v0);
628  __ jmp(&do_call);
629 
630  __ bind(&check_string);
631  GenerateKeyStringCheck(masm, a2, a0, a3, &index_string, &slow_call);
632 
633  // The key is known to be a symbol.
634  // If the receiver is a regular JS object with slow properties then do
635  // a quick inline probe of the receiver's dictionary.
636  // Otherwise do the monomorphic cache probe.
637  GenerateKeyedLoadReceiverCheck(
638  masm, a1, a0, a3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
639 
642  __ LoadRoot(at, Heap::kHashTableMapRootIndex);
643  __ Branch(&lookup_monomorphic_cache, ne, a3, Operand(at));
644 
645  GenerateDictionaryLoad(masm, &slow_load, a0, a2, a1, a3, t0);
646  __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, a0, a3);
647  __ jmp(&do_call);
648 
649  __ bind(&lookup_monomorphic_cache);
650  __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, a0, a3);
652  argc,
653  Code::KEYED_CALL_IC,
655  // Fall through on miss.
656 
657  __ bind(&slow_call);
658  // This branch is taken if:
659  // - the receiver requires boxing or access check,
660  // - the key is neither smi nor symbol,
661  // - the value loaded is not a function,
662  // - there is hope that the runtime will create a monomorphic call stub,
663  // that will get fetched next time.
664  __ IncrementCounter(counters->keyed_call_generic_slow(), 1, a0, a3);
665  GenerateMiss(masm, argc);
666 
667  __ bind(&index_string);
668  __ IndexFromHash(a3, a2);
669  // Now jump to the place where smi keys are handled.
670  __ jmp(&index_smi);
671 }
672 
673 
674 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
675  // ----------- S t a t e -------------
676  // -- a2 : name
677  // -- ra : return address
678  // -----------------------------------
679 
680  // Check if the name is a string.
681  Label miss;
682  __ JumpIfSmi(a2, &miss);
683  __ IsObjectJSStringType(a2, a0, &miss);
684 
685  CallICBase::GenerateNormal(masm, argc);
686  __ bind(&miss);
687  GenerateMiss(masm, argc);
688 }
689 
690 
691 // Defined in ic.cc.
692 Object* LoadIC_Miss(Arguments args);
693 
694 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
695  // ----------- S t a t e -------------
696  // -- a2 : name
697  // -- ra : return address
698  // -- a0 : receiver
699  // -- sp[0] : receiver
700  // -----------------------------------
701 
702  // Probe the stub cache.
703  Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, MONOMORPHIC);
704  Isolate::Current()->stub_cache()->GenerateProbe(
705  masm, flags, a0, a2, a3, t0, t1, t2);
706 
707  // Cache miss: Jump to runtime.
708  GenerateMiss(masm);
709 }
710 
711 
712 void LoadIC::GenerateNormal(MacroAssembler* masm) {
713  // ----------- S t a t e -------------
714  // -- a2 : name
715  // -- lr : return address
716  // -- a0 : receiver
717  // -- sp[0] : receiver
718  // -----------------------------------
719  Label miss;
720 
721  GenerateStringDictionaryReceiverCheck(masm, a0, a1, a3, t0, &miss);
722 
723  // a1: elements
724  GenerateDictionaryLoad(masm, &miss, a1, a2, v0, a3, t0);
725  __ Ret();
726 
727  // Cache miss: Jump to runtime.
728  __ bind(&miss);
729  GenerateMiss(masm);
730 }
731 
732 
733 void LoadIC::GenerateMiss(MacroAssembler* masm) {
734  // ----------- S t a t e -------------
735  // -- a2 : name
736  // -- ra : return address
737  // -- a0 : receiver
738  // -- sp[0] : receiver
739  // -----------------------------------
740  Isolate* isolate = masm->isolate();
741 
742  __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a3, t0);
743 
744  __ mov(a3, a0);
745  __ Push(a3, a2);
746 
747  // Perform tail call to the entry.
748  ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
749  __ TailCallExternalReference(ref, 2, 1);
750 }
751 
752 
753 static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
754  Register object,
755  Register key,
756  Register scratch1,
757  Register scratch2,
758  Register scratch3,
759  Label* unmapped_case,
760  Label* slow_case) {
761  // Check that the receiver is a JSObject. Because of the map check
762  // later, we do not need to check for interceptors or whether it
763  // requires access checks.
764  __ JumpIfSmi(object, slow_case);
765  // Check that the object is some kind of JSObject.
766  __ GetObjectType(object, scratch1, scratch2);
767  __ Branch(slow_case, lt, scratch2, Operand(FIRST_JS_RECEIVER_TYPE));
768 
769  // Check that the key is a positive smi.
770  __ And(scratch1, key, Operand(0x80000001));
771  __ Branch(slow_case, ne, scratch1, Operand(zero_reg));
772 
773  // Load the elements into scratch1 and check its map.
774  __ lw(scratch1, FieldMemOperand(object, JSObject::kElementsOffset));
775  __ CheckMap(scratch1,
776  scratch2,
777  Heap::kNonStrictArgumentsElementsMapRootIndex,
778  slow_case,
780  // Check if element is in the range of mapped arguments. If not, jump
781  // to the unmapped lookup with the parameter map in scratch1.
782  __ lw(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
783  __ Subu(scratch2, scratch2, Operand(Smi::FromInt(2)));
784  __ Branch(unmapped_case, Ugreater_equal, key, Operand(scratch2));
785 
786  // Load element index and check whether it is the hole.
787  const int kOffset =
788  FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag;
789 
790  __ li(scratch3, Operand(kPointerSize >> 1));
791  __ Mul(scratch3, key, scratch3);
792  __ Addu(scratch3, scratch3, Operand(kOffset));
793 
794  __ Addu(scratch2, scratch1, scratch3);
795  __ lw(scratch2, MemOperand(scratch2));
796  __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
797  __ Branch(unmapped_case, eq, scratch2, Operand(scratch3));
798 
799  // Load value from context and return it. We can reuse scratch1 because
800  // we do not jump to the unmapped lookup (which requires the parameter
801  // map in scratch1).
802  __ lw(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
803  __ li(scratch3, Operand(kPointerSize >> 1));
804  __ Mul(scratch3, scratch2, scratch3);
805  __ Addu(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag));
806  __ Addu(scratch2, scratch1, scratch3);
807  return MemOperand(scratch2);
808 }
809 
810 
811 static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
812  Register key,
813  Register parameter_map,
814  Register scratch,
815  Label* slow_case) {
816  // Element is in arguments backing store, which is referenced by the
817  // second element of the parameter_map. The parameter_map register
818  // must be loaded with the parameter map of the arguments object and is
819  // overwritten.
820  const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
821  Register backing_store = parameter_map;
822  __ lw(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset));
823  __ CheckMap(backing_store,
824  scratch,
825  Heap::kFixedArrayMapRootIndex,
826  slow_case,
828  __ lw(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset));
829  __ Branch(slow_case, Ugreater_equal, key, Operand(scratch));
830  __ li(scratch, Operand(kPointerSize >> 1));
831  __ Mul(scratch, key, scratch);
832  __ Addu(scratch,
833  scratch,
834  Operand(FixedArray::kHeaderSize - kHeapObjectTag));
835  __ Addu(scratch, backing_store, scratch);
836  return MemOperand(scratch);
837 }
838 
839 
840 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
841  // ---------- S t a t e --------------
842  // -- lr : return address
843  // -- a0 : key
844  // -- a1 : receiver
845  // -----------------------------------
846  Label slow, notin;
847  MemOperand mapped_location =
848  GenerateMappedArgumentsLookup(masm, a1, a0, a2, a3, t0, &notin, &slow);
849  __ Ret(USE_DELAY_SLOT);
850  __ lw(v0, mapped_location);
851  __ bind(&notin);
852  // The unmapped lookup expects that the parameter map is in a2.
853  MemOperand unmapped_location =
854  GenerateUnmappedArgumentsLookup(masm, a0, a2, a3, &slow);
855  __ lw(a2, unmapped_location);
856  __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
857  __ Branch(&slow, eq, a2, Operand(a3));
858  __ Ret(USE_DELAY_SLOT);
859  __ mov(v0, a2);
860  __ bind(&slow);
861  GenerateMiss(masm, false);
862 }
863 
864 
865 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
866  // ---------- S t a t e --------------
867  // -- a0 : value
868  // -- a1 : key
869  // -- a2 : receiver
870  // -- lr : return address
871  // -----------------------------------
872  Label slow, notin;
873  // Store address is returned in register (of MemOperand) mapped_location.
874  MemOperand mapped_location =
875  GenerateMappedArgumentsLookup(masm, a2, a1, a3, t0, t1, &notin, &slow);
876  __ sw(a0, mapped_location);
877  __ mov(t5, a0);
878  ASSERT_EQ(mapped_location.offset(), 0);
879  __ RecordWrite(a3, mapped_location.rm(), t5,
881  __ Ret(USE_DELAY_SLOT);
882  __ mov(v0, a0); // (In delay slot) return the value stored in v0.
883  __ bind(&notin);
884  // The unmapped lookup expects that the parameter map is in a3.
885  // Store address is returned in register (of MemOperand) unmapped_location.
886  MemOperand unmapped_location =
887  GenerateUnmappedArgumentsLookup(masm, a1, a3, t0, &slow);
888  __ sw(a0, unmapped_location);
889  __ mov(t5, a0);
890  ASSERT_EQ(unmapped_location.offset(), 0);
891  __ RecordWrite(a3, unmapped_location.rm(), t5,
893  __ Ret(USE_DELAY_SLOT);
894  __ mov(v0, a0); // (In delay slot) return the value stored in v0.
895  __ bind(&slow);
896  GenerateMiss(masm, false);
897 }
898 
899 
900 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
901  int argc) {
902  // ----------- S t a t e -------------
903  // -- a2 : name
904  // -- lr : return address
905  // -----------------------------------
906  Label slow, notin;
907  // Load receiver.
908  __ lw(a1, MemOperand(sp, argc * kPointerSize));
909  MemOperand mapped_location =
910  GenerateMappedArgumentsLookup(masm, a1, a2, a3, t0, t1, &notin, &slow);
911  __ lw(a1, mapped_location);
912  GenerateFunctionTailCall(masm, argc, &slow, a3);
913  __ bind(&notin);
914  // The unmapped lookup expects that the parameter map is in a3.
915  MemOperand unmapped_location =
916  GenerateUnmappedArgumentsLookup(masm, a2, a3, t0, &slow);
917  __ lw(a1, unmapped_location);
918  __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
919  __ Branch(&slow, eq, a1, Operand(a3));
920  GenerateFunctionTailCall(masm, argc, &slow, a3);
921  __ bind(&slow);
922  GenerateMiss(masm, argc);
923 }
924 
925 
926 Object* KeyedLoadIC_Miss(Arguments args);
927 
928 
929 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
930  // ---------- S t a t e --------------
931  // -- ra : return address
932  // -- a0 : key
933  // -- a1 : receiver
934  // -----------------------------------
935  Isolate* isolate = masm->isolate();
936 
937  __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a3, t0);
938 
939  __ Push(a1, a0);
940 
941  // Perform tail call to the entry.
942  ExternalReference ref = force_generic
943  ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), isolate)
944  : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
945 
946  __ TailCallExternalReference(ref, 2, 1);
947 }
948 
949 
950 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
951  // ---------- S t a t e --------------
952  // -- ra : return address
953  // -- a0 : key
954  // -- a1 : receiver
955  // -----------------------------------
956 
957  __ Push(a1, a0);
958 
959  __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
960 }
961 
962 
963 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
964  // ---------- S t a t e --------------
965  // -- ra : return address
966  // -- a0 : key
967  // -- a1 : receiver
968  // -----------------------------------
969  Label slow, check_string, index_smi, index_string, property_array_property;
970  Label probe_dictionary, check_number_dictionary;
971 
972  Register key = a0;
973  Register receiver = a1;
974 
975  Isolate* isolate = masm->isolate();
976 
977  // Check that the key is a smi.
978  __ JumpIfNotSmi(key, &check_string);
979  __ bind(&index_smi);
980  // Now the key is known to be a smi. This place is also jumped to from below
981  // where a numeric string is converted to a smi.
982 
983  GenerateKeyedLoadReceiverCheck(
984  masm, receiver, a2, a3, Map::kHasIndexedInterceptor, &slow);
985 
986  // Check the receiver's map to see if it has fast elements.
987  __ CheckFastElements(a2, a3, &check_number_dictionary);
988 
989  GenerateFastArrayLoad(
990  masm, receiver, key, t0, a3, a2, v0, NULL, &slow);
991 
992  __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, a2, a3);
993  __ Ret();
994 
995  __ bind(&check_number_dictionary);
996  __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
998 
999  // Check whether the elements is a number dictionary.
1000  // a0: key
1001  // a3: elements map
1002  // t0: elements
1003  __ LoadRoot(at, Heap::kHashTableMapRootIndex);
1004  __ Branch(&slow, ne, a3, Operand(at));
1005  __ sra(a2, a0, kSmiTagSize);
1006  __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
1007  __ Ret();
1008 
1009  // Slow case, key and receiver still in a0 and a1.
1010  __ bind(&slow);
1011  __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(),
1012  1,
1013  a2,
1014  a3);
1016 
1017  __ bind(&check_string);
1018  GenerateKeyStringCheck(masm, key, a2, a3, &index_string, &slow);
1019 
1020  GenerateKeyedLoadReceiverCheck(
1021  masm, receiver, a2, a3, Map::kHasIndexedInterceptor, &slow);
1022 
1023 
1024  // If the receiver is a fast-case object, check the keyed lookup
1025  // cache. Otherwise probe the dictionary.
1028  __ LoadRoot(at, Heap::kHashTableMapRootIndex);
1029  __ Branch(&probe_dictionary, eq, t0, Operand(at));
1030 
1031  // Load the map of the receiver, compute the keyed lookup cache hash
1032  // based on 32 bits of the map pointer and the string hash.
1034  __ sra(a3, a2, KeyedLookupCache::kMapHashShift);
1036  __ sra(at, t0, String::kHashShift);
1037  __ xor_(a3, a3, at);
1039  __ And(a3, a3, Operand(mask));
1040 
1041  // Load the key (consisting of map and symbol) from the cache and
1042  // check for match.
1043  Label load_in_object_property;
1044  static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
1045  Label hit_on_nth_entry[kEntriesPerBucket];
1046  ExternalReference cache_keys =
1047  ExternalReference::keyed_lookup_cache_keys(isolate);
1048  __ li(t0, Operand(cache_keys));
1049  __ sll(at, a3, kPointerSizeLog2 + 1);
1050  __ addu(t0, t0, at);
1051 
1052  for (int i = 0; i < kEntriesPerBucket - 1; i++) {
1053  Label try_next_entry;
1054  __ lw(t1, MemOperand(t0, kPointerSize * i * 2));
1055  __ Branch(&try_next_entry, ne, a2, Operand(t1));
1056  __ lw(t1, MemOperand(t0, kPointerSize * (i * 2 + 1)));
1057  __ Branch(&hit_on_nth_entry[i], eq, a0, Operand(t1));
1058  __ bind(&try_next_entry);
1059  }
1060 
1061  __ lw(t1, MemOperand(t0, kPointerSize * (kEntriesPerBucket - 1) * 2));
1062  __ Branch(&slow, ne, a2, Operand(t1));
1063  __ lw(t1, MemOperand(t0, kPointerSize * ((kEntriesPerBucket - 1) * 2 + 1)));
1064  __ Branch(&slow, ne, a0, Operand(t1));
1065 
1066  // Get field offset.
1067  // a0 : key
1068  // a1 : receiver
1069  // a2 : receiver's map
1070  // a3 : lookup cache index
1071  ExternalReference cache_field_offsets =
1072  ExternalReference::keyed_lookup_cache_field_offsets(isolate);
1073 
1074  // Hit on nth entry.
1075  for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
1076  __ bind(&hit_on_nth_entry[i]);
1077  __ li(t0, Operand(cache_field_offsets));
1078  __ sll(at, a3, kPointerSizeLog2);
1079  __ addu(at, t0, at);
1080  __ lw(t1, MemOperand(at, kPointerSize * i));
1082  __ Subu(t1, t1, t2);
1083  __ Branch(&property_array_property, ge, t1, Operand(zero_reg));
1084  if (i != 0) {
1085  __ Branch(&load_in_object_property);
1086  }
1087  }
1088 
1089  // Load in-object property.
1090  __ bind(&load_in_object_property);
1092  __ addu(t2, t2, t1); // Index from start of object.
1093  __ Subu(a1, a1, Operand(kHeapObjectTag)); // Remove the heap tag.
1094  __ sll(at, t2, kPointerSizeLog2);
1095  __ addu(at, a1, at);
1096  __ lw(v0, MemOperand(at));
1097  __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1098  1,
1099  a2,
1100  a3);
1101  __ Ret();
1102 
1103  // Load property array property.
1104  __ bind(&property_array_property);
1106  __ Addu(a1, a1, FixedArray::kHeaderSize - kHeapObjectTag);
1107  __ sll(t0, t1, kPointerSizeLog2);
1108  __ Addu(t0, t0, a1);
1109  __ lw(v0, MemOperand(t0));
1110  __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1111  1,
1112  a2,
1113  a3);
1114  __ Ret();
1115 
1116 
1117  // Do a quick inline probe of the receiver's dictionary, if it
1118  // exists.
1119  __ bind(&probe_dictionary);
1120  // a1: receiver
1121  // a0: key
1122  // a3: elements
1125  GenerateGlobalInstanceTypeCheck(masm, a2, &slow);
1126  // Load the property to v0.
1127  GenerateDictionaryLoad(masm, &slow, a3, a0, v0, a2, t0);
1128  __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(),
1129  1,
1130  a2,
1131  a3);
1132  __ Ret();
1133 
1134  __ bind(&index_string);
1135  __ IndexFromHash(a3, key);
1136  // Now jump to the place where smi keys are handled.
1137  __ Branch(&index_smi);
1138 }
1139 
1140 
1141 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
1142  // ---------- S t a t e --------------
1143  // -- ra : return address
1144  // -- a0 : key (index)
1145  // -- a1 : receiver
1146  // -----------------------------------
1147  Label miss;
1148 
1149  Register receiver = a1;
1150  Register index = a0;
1151  Register scratch = a3;
1152  Register result = v0;
1153 
1154  StringCharAtGenerator char_at_generator(receiver,
1155  index,
1156  scratch,
1157  result,
1158  &miss, // When not a string.
1159  &miss, // When not a number.
1160  &miss, // When index out of range.
1162  char_at_generator.GenerateFast(masm);
1163  __ Ret();
1164 
1165  StubRuntimeCallHelper call_helper;
1166  char_at_generator.GenerateSlow(masm, call_helper);
1167 
1168  __ bind(&miss);
1169  GenerateMiss(masm, false);
1170 }
1171 
1172 
1173 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1174  StrictModeFlag strict_mode) {
1175  // ---------- S t a t e --------------
1176  // -- a0 : value
1177  // -- a1 : key
1178  // -- a2 : receiver
1179  // -- ra : return address
1180  // -----------------------------------
1181 
1182  // Push receiver, key and value for runtime call.
1183  __ Push(a2, a1, a0);
1184  __ li(a1, Operand(Smi::FromInt(NONE))); // PropertyAttributes.
1185  __ li(a0, Operand(Smi::FromInt(strict_mode))); // Strict mode.
1186  __ Push(a1, a0);
1187 
1188  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1189 }
1190 
1191 
1192 static void KeyedStoreGenerateGenericHelper(
1193  MacroAssembler* masm,
1194  Label* fast_object,
1195  Label* fast_double,
1196  Label* slow,
1197  KeyedStoreCheckMap check_map,
1198  KeyedStoreIncrementLength increment_length,
1199  Register value,
1200  Register key,
1201  Register receiver,
1202  Register receiver_map,
1203  Register elements_map,
1204  Register elements) {
1205  Label transition_smi_elements;
1206  Label finish_object_store, non_double_value, transition_double_elements;
1207  Label fast_double_without_map_check;
1208 
1209  // Fast case: Do the store, could be either Object or double.
1210  __ bind(fast_object);
1211  Register scratch_value = t0;
1212  Register address = t1;
1213  if (check_map == kCheckMap) {
1214  __ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1215  __ Branch(fast_double, ne, elements_map,
1216  Operand(masm->isolate()->factory()->fixed_array_map()));
1217  }
1218  // Smi stores don't require further checks.
1219  Label non_smi_value;
1220  __ JumpIfNotSmi(value, &non_smi_value);
1221 
1222  if (increment_length == kIncrementLength) {
1223  // Add 1 to receiver->length.
1224  __ Addu(scratch_value, key, Operand(Smi::FromInt(1)));
1225  __ sw(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
1226  }
1227  // It's irrelevant whether array is smi-only or not when writing a smi.
1228  __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1229  __ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize);
1230  __ Addu(address, address, scratch_value);
1231  __ sw(value, MemOperand(address));
1232  __ Ret();
1233 
1234  __ bind(&non_smi_value);
1235  // Escape to elements kind transition case.
1236  __ CheckFastObjectElements(receiver_map, scratch_value,
1237  &transition_smi_elements);
1238 
1239  // Fast elements array, store the value to the elements backing store.
1240  __ bind(&finish_object_store);
1241  if (increment_length == kIncrementLength) {
1242  // Add 1 to receiver->length.
1243  __ Addu(scratch_value, key, Operand(Smi::FromInt(1)));
1244  __ sw(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
1245  }
1246  __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1247  __ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize);
1248  __ Addu(address, address, scratch_value);
1249  __ sw(value, MemOperand(address));
1250  // Update write barrier for the elements array address.
1251  __ mov(scratch_value, value); // Preserve the value which is returned.
1252  __ RecordWrite(elements,
1253  address,
1254  scratch_value,
1255  kRAHasNotBeenSaved,
1258  OMIT_SMI_CHECK);
1259  __ Ret();
1260 
1261  __ bind(fast_double);
1262  if (check_map == kCheckMap) {
1263  // Check for fast double array case. If this fails, call through to the
1264  // runtime.
1265  __ LoadRoot(at, Heap::kFixedDoubleArrayMapRootIndex);
1266  __ Branch(slow, ne, elements_map, Operand(at));
1267  }
1268  __ bind(&fast_double_without_map_check);
1269  __ StoreNumberToDoubleElements(value,
1270  key,
1271  receiver,
1272  elements, // Overwritten.
1273  a3, // Scratch regs...
1274  t0,
1275  t1,
1276  t2,
1277  &transition_double_elements);
1278  if (increment_length == kIncrementLength) {
1279  // Add 1 to receiver->length.
1280  __ Addu(scratch_value, key, Operand(Smi::FromInt(1)));
1281  __ sw(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
1282  }
1283  __ Ret();
1284 
1285  __ bind(&transition_smi_elements);
1286  // Transition the array appropriately depending on the value type.
1287  __ lw(t0, FieldMemOperand(value, HeapObject::kMapOffset));
1288  __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
1289  __ Branch(&non_double_value, ne, t0, Operand(at));
1290 
1291  // Value is a double. Transition FAST_SMI_ELEMENTS ->
1292  // FAST_DOUBLE_ELEMENTS and complete the store.
1293  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1295  receiver_map,
1296  t0,
1297  slow);
1298  ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
1300  __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1301  __ jmp(&fast_double_without_map_check);
1302 
1303  __ bind(&non_double_value);
1304  // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
1305  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1306  FAST_ELEMENTS,
1307  receiver_map,
1308  t0,
1309  slow);
1310  ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
1312  __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1313  __ jmp(&finish_object_store);
1314 
1315  __ bind(&transition_double_elements);
1316  // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
1317  // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
1318  // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
1319  __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
1320  FAST_ELEMENTS,
1321  receiver_map,
1322  t0,
1323  slow);
1324  ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
1326  __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1327  __ jmp(&finish_object_store);
1328 }
1329 
1330 
1331 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
1332  StrictModeFlag strict_mode) {
1333  // ---------- S t a t e --------------
1334  // -- a0 : value
1335  // -- a1 : key
1336  // -- a2 : receiver
1337  // -- ra : return address
1338  // -----------------------------------
1339  Label slow, fast_object, fast_object_grow;
1340  Label fast_double, fast_double_grow;
1341  Label array, extra, check_if_double_array;
1342 
1343  // Register usage.
1344  Register value = a0;
1345  Register key = a1;
1346  Register receiver = a2;
1347  Register receiver_map = a3;
1348  Register elements_map = t2;
1349  Register elements = t3; // Elements array of the receiver.
1350  // t0 and t1 are used as general scratch registers.
1351 
1352  // Check that the key is a smi.
1353  __ JumpIfNotSmi(key, &slow);
1354  // Check that the object isn't a smi.
1355  __ JumpIfSmi(receiver, &slow);
1356  // Get the map of the object.
1357  __ lw(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1358  // Check that the receiver does not require access checks. We need
1359  // to do this because this generic stub does not perform map checks.
1360  __ lbu(t0, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
1361  __ And(t0, t0, Operand(1 << Map::kIsAccessCheckNeeded));
1362  __ Branch(&slow, ne, t0, Operand(zero_reg));
1363  // Check if the object is a JS array or not.
1364  __ lbu(t0, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));
1365  __ Branch(&array, eq, t0, Operand(JS_ARRAY_TYPE));
1366  // Check that the object is some kind of JSObject.
1367  __ Branch(&slow, lt, t0, Operand(FIRST_JS_OBJECT_TYPE));
1368 
1369  // Object case: Check key against length in the elements array.
1370  __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1371  // Check array bounds. Both the key and the length of FixedArray are smis.
1372  __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1373  __ Branch(&fast_object, lo, key, Operand(t0));
1374 
1375  // Slow case, handle jump to runtime.
1376  __ bind(&slow);
1377  // Entry registers are intact.
1378  // a0: value.
1379  // a1: key.
1380  // a2: receiver.
1381  GenerateRuntimeSetProperty(masm, strict_mode);
1382 
1383  // Extra capacity case: Check if there is extra capacity to
1384  // perform the store and update the length. Used for adding one
1385  // element to the array by writing to array[array.length].
1386  __ bind(&extra);
1387  // Condition code from comparing key and array length is still available.
1388  // Only support writing to array[array.length].
1389  __ Branch(&slow, ne, key, Operand(t0));
1390  // Check for room in the elements backing store.
1391  // Both the key and the length of FixedArray are smis.
1392  __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1393  __ Branch(&slow, hs, key, Operand(t0));
1394  __ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1395  __ Branch(
1396  &check_if_double_array, ne, elements_map, Heap::kFixedArrayMapRootIndex);
1397 
1398  __ jmp(&fast_object_grow);
1399 
1400  __ bind(&check_if_double_array);
1401  __ Branch(&slow, ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex);
1402  __ jmp(&fast_double_grow);
1403 
1404  // Array case: Get the length and the elements array from the JS
1405  // array. Check that the array is in fast mode (and writable); if it
1406  // is the length is always a smi.
1407  __ bind(&array);
1408  __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1409 
1410  // Check the key against the length in the array.
1411  __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1412  __ Branch(&extra, hs, key, Operand(t0));
1413 
1414  KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double,
1416  value, key, receiver, receiver_map,
1417  elements_map, elements);
1418  KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
1420  value, key, receiver, receiver_map,
1421  elements_map, elements);
1422 }
1423 
1424 
1425 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
1426  // ---------- S t a t e --------------
1427  // -- ra : return address
1428  // -- a0 : key
1429  // -- a1 : receiver
1430  // -----------------------------------
1431  Label slow;
1432 
1433  // Check that the receiver isn't a smi.
1434  __ JumpIfSmi(a1, &slow);
1435 
1436  // Check that the key is an array index, that is Uint32.
1437  __ And(t0, a0, Operand(kSmiTagMask | kSmiSignMask));
1438  __ Branch(&slow, ne, t0, Operand(zero_reg));
1439 
1440  // Get the map of the receiver.
1442 
1443  // Check that it has indexed interceptor and access checks
1444  // are not enabled for this object.
1445  __ lbu(a3, FieldMemOperand(a2, Map::kBitFieldOffset));
1446  __ And(a3, a3, Operand(kSlowCaseBitFieldMask));
1447  __ Branch(&slow, ne, a3, Operand(1 << Map::kHasIndexedInterceptor));
1448  // Everything is fine, call runtime.
1449  __ Push(a1, a0); // Receiver, key.
1450 
1451  // Perform tail call to the entry.
1452  __ TailCallExternalReference(ExternalReference(
1453  IC_Utility(kKeyedLoadPropertyWithInterceptor), masm->isolate()), 2, 1);
1454 
1455  __ bind(&slow);
1456  GenerateMiss(masm, false);
1457 }
1458 
1459 
1460 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1461  // ---------- S t a t e --------------
1462  // -- a0 : value
1463  // -- a1 : key
1464  // -- a2 : receiver
1465  // -- ra : return address
1466  // -----------------------------------
1467 
1468  // Push receiver, key and value for runtime call.
1469  __ Push(a2, a1, a0);
1470 
1471  ExternalReference ref = force_generic
1472  ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1473  masm->isolate())
1474  : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1475  __ TailCallExternalReference(ref, 3, 1);
1476 }
1477 
1478 
1479 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1480  // ---------- S t a t e --------------
1481  // -- a0 : value
1482  // -- a1 : key
1483  // -- a2 : receiver
1484  // -- ra : return address
1485  // -----------------------------------
1486 
1487  // Push receiver, key and value for runtime call.
1488  // We can't use MultiPush as the order of the registers is important.
1489  __ Push(a2, a1, a0);
1490 
1491  // The slow case calls into the runtime to complete the store without causing
1492  // an IC miss that would otherwise cause a transition to the generic stub.
1493  ExternalReference ref =
1494  ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1495 
1496  __ TailCallExternalReference(ref, 3, 1);
1497 }
1498 
1499 
1500 void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
1501  // ---------- S t a t e --------------
1502  // -- a2 : receiver
1503  // -- a3 : target map
1504  // -- ra : return address
1505  // -----------------------------------
1506  // Must return the modified receiver in v0.
1507  if (!FLAG_trace_elements_transitions) {
1508  Label fail;
1510  __ Ret(USE_DELAY_SLOT);
1511  __ mov(v0, a2);
1512  __ bind(&fail);
1513  }
1514 
1515  __ push(a2);
1516  __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
1517 }
1518 
1519 
1521  MacroAssembler* masm) {
1522  // ---------- S t a t e --------------
1523  // -- a2 : receiver
1524  // -- a3 : target map
1525  // -- ra : return address
1526  // -----------------------------------
1527  // Must return the modified receiver in v0.
1528  if (!FLAG_trace_elements_transitions) {
1529  Label fail;
1531  __ Ret(USE_DELAY_SLOT);
1532  __ mov(v0, a2);
1533  __ bind(&fail);
1534  }
1535 
1536  __ push(a2);
1537  __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
1538 }
1539 
1540 
1541 void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
1542  StrictModeFlag strict_mode) {
1543  // ----------- S t a t e -------------
1544  // -- a0 : value
1545  // -- a1 : receiver
1546  // -- a2 : name
1547  // -- ra : return address
1548  // -----------------------------------
1549 
1550  // Get the receiver from the stack and probe the stub cache.
1551  Code::Flags flags =
1552  Code::ComputeFlags(Code::STORE_IC, MONOMORPHIC, strict_mode);
1553  Isolate::Current()->stub_cache()->GenerateProbe(
1554  masm, flags, a1, a2, a3, t0, t1, t2);
1555 
1556  // Cache miss: Jump to runtime.
1557  GenerateMiss(masm);
1558 }
1559 
1560 
1561 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1562  // ----------- S t a t e -------------
1563  // -- a0 : value
1564  // -- a1 : receiver
1565  // -- a2 : name
1566  // -- ra : return address
1567  // -----------------------------------
1568 
1569  __ Push(a1, a2, a0);
1570  // Perform tail call to the entry.
1571  ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_Miss),
1572  masm->isolate());
1573  __ TailCallExternalReference(ref, 3, 1);
1574 }
1575 
1576 
1577 void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
1578  // ----------- S t a t e -------------
1579  // -- a0 : value
1580  // -- a1 : receiver
1581  // -- a2 : name
1582  // -- ra : return address
1583  // -----------------------------------
1584  //
1585  // This accepts as a receiver anything JSArray::SetElementsLength accepts
1586  // (currently anything except for external arrays which means anything with
1587  // elements of FixedArray type). Value must be a number, but only smis are
1588  // accepted as the most common case.
1589 
1590  Label miss;
1591 
1592  Register receiver = a1;
1593  Register value = a0;
1594  Register scratch = a3;
1595 
1596  // Check that the receiver isn't a smi.
1597  __ JumpIfSmi(receiver, &miss);
1598 
1599  // Check that the object is a JS array.
1600  __ GetObjectType(receiver, scratch, scratch);
1601  __ Branch(&miss, ne, scratch, Operand(JS_ARRAY_TYPE));
1602 
1603  // Check that elements are FixedArray.
1604  // We rely on StoreIC_ArrayLength below to deal with all types of
1605  // fast elements (including COW).
1606  __ lw(scratch, FieldMemOperand(receiver, JSArray::kElementsOffset));
1607  __ GetObjectType(scratch, scratch, scratch);
1608  __ Branch(&miss, ne, scratch, Operand(FIXED_ARRAY_TYPE));
1609 
1610  // Check that the array has fast properties, otherwise the length
1611  // property might have been redefined.
1612  __ lw(scratch, FieldMemOperand(receiver, JSArray::kPropertiesOffset));
1613  __ lw(scratch, FieldMemOperand(scratch, FixedArray::kMapOffset));
1614  __ LoadRoot(at, Heap::kHashTableMapRootIndex);
1615  __ Branch(&miss, eq, scratch, Operand(at));
1616 
1617  // Check that value is a smi.
1618  __ JumpIfNotSmi(value, &miss);
1619 
1620  // Prepare tail call to StoreIC_ArrayLength.
1621  __ Push(receiver, value);
1622 
1623  ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_ArrayLength),
1624  masm->isolate());
1625  __ TailCallExternalReference(ref, 2, 1);
1626 
1627  __ bind(&miss);
1628 
1629  GenerateMiss(masm);
1630 }
1631 
1632 
1633 void StoreIC::GenerateNormal(MacroAssembler* masm) {
1634  // ----------- S t a t e -------------
1635  // -- a0 : value
1636  // -- a1 : receiver
1637  // -- a2 : name
1638  // -- ra : return address
1639  // -----------------------------------
1640  Label miss;
1641 
1642  GenerateStringDictionaryReceiverCheck(masm, a1, a3, t0, t1, &miss);
1643 
1644  GenerateDictionaryStore(masm, &miss, a3, a2, a0, t0, t1);
1645  Counters* counters = masm->isolate()->counters();
1646  __ IncrementCounter(counters->store_normal_hit(), 1, t0, t1);
1647  __ Ret();
1648 
1649  __ bind(&miss);
1650  __ IncrementCounter(counters->store_normal_miss(), 1, t0, t1);
1651  GenerateMiss(masm);
1652 }
1653 
1654 
1655 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
1656  StrictModeFlag strict_mode) {
1657  // ----------- S t a t e -------------
1658  // -- a0 : value
1659  // -- a1 : receiver
1660  // -- a2 : name
1661  // -- ra : return address
1662  // -----------------------------------
1663 
1664  __ Push(a1, a2, a0);
1665 
1666  __ li(a1, Operand(Smi::FromInt(NONE))); // PropertyAttributes.
1667  __ li(a0, Operand(Smi::FromInt(strict_mode)));
1668  __ Push(a1, a0);
1669 
1670  // Do tail-call to runtime routine.
1671  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1672 }
1673 
1674 
1675 #undef __
1676 
1677 
1679  switch (op) {
1680  case Token::EQ_STRICT:
1681  case Token::EQ:
1682  return eq;
1683  case Token::LT:
1684  return lt;
1685  case Token::GT:
1686  return gt;
1687  case Token::LTE:
1688  return le;
1689  case Token::GTE:
1690  return ge;
1691  default:
1692  UNREACHABLE();
1693  return kNoCondition;
1694  }
1695 }
1696 
1697 
1698 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
1699  HandleScope scope;
1700  Handle<Code> rewritten;
1701  State previous_state = GetState();
1702  State state = TargetState(previous_state, false, x, y);
1703  if (state == GENERIC) {
1704  CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, a1, a0);
1705  rewritten = stub.GetCode();
1706  } else {
1707  ICCompareStub stub(op_, state);
1708  if (state == KNOWN_OBJECTS) {
1709  stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
1710  }
1711  rewritten = stub.GetCode();
1712  }
1713  set_target(*rewritten);
1714 
1715 #ifdef DEBUG
1716  if (FLAG_trace_ic) {
1717  PrintF("[CompareIC (%s->%s)#%s]\n",
1718  GetStateName(previous_state),
1719  GetStateName(state),
1720  Token::Name(op_));
1721  }
1722 #endif
1723 
1724  // Activate inlined smi code.
1725  if (previous_state == UNINITIALIZED) {
1727  }
1728 }
1729 
1730 
1732  Address andi_instruction_address =
1734 
1735  // If the instruction following the call is not a andi at, rx, #yyy, nothing
1736  // was inlined.
1737  Instr instr = Assembler::instr_at(andi_instruction_address);
1738  if (!(Assembler::IsAndImmediate(instr) &&
1739  Assembler::GetRt(instr) == (uint32_t)zero_reg.code())) {
1740  return;
1741  }
1742 
1743  // The delta to the start of the map check instruction and the
1744  // condition code uses at the patched jump.
1745  int delta = Assembler::GetImmediate16(instr);
1746  delta += Assembler::GetRs(instr) * kImm16Mask;
1747  // If the delta is 0 the instruction is andi at, zero_reg, #0 which also
1748  // signals that nothing was inlined.
1749  if (delta == 0) {
1750  return;
1751  }
1752 
1753 #ifdef DEBUG
1754  if (FLAG_trace_ic) {
1755  PrintF("[ patching ic at %p, andi=%p, delta=%d\n",
1756  address, andi_instruction_address, delta);
1757  }
1758 #endif
1759 
1760  Address patch_address =
1761  andi_instruction_address - delta * Instruction::kInstrSize;
1762  Instr instr_at_patch = Assembler::instr_at(patch_address);
1763  Instr branch_instr =
1764  Assembler::instr_at(patch_address + Instruction::kInstrSize);
1765  // This is patching a conditional "jump if not smi/jump if smi" site.
1766  // Enabling by changing from
1767  // andi at, rx, 0
1768  // Branch <target>, eq, at, Operand(zero_reg)
1769  // to:
1770  // andi at, rx, #kSmiTagMask
1771  // Branch <target>, ne, at, Operand(zero_reg)
1772  // and vice-versa to be disabled again.
1773  CodePatcher patcher(patch_address, 2);
1774  Register reg = Register::from_code(Assembler::GetRs(instr_at_patch));
1775  if (check == ENABLE_INLINED_SMI_CHECK) {
1776  ASSERT(Assembler::IsAndImmediate(instr_at_patch));
1777  ASSERT_EQ(0, Assembler::GetImmediate16(instr_at_patch));
1778  patcher.masm()->andi(at, reg, kSmiTagMask);
1779  } else {
1781  ASSERT(Assembler::IsAndImmediate(instr_at_patch));
1782  patcher.masm()->andi(at, reg, 0);
1783  }
1784  ASSERT(Assembler::IsBranch(branch_instr));
1785  if (Assembler::IsBeq(branch_instr)) {
1786  patcher.ChangeBranchCondition(ne);
1787  } else {
1788  ASSERT(Assembler::IsBne(branch_instr));
1789  patcher.ChangeBranchCondition(eq);
1790  }
1791 }
1792 
1793 
1794 } } // namespace v8::internal
1795 
1796 #endif // V8_TARGET_ARCH_MIPS
byte * Address
Definition: globals.h:157
static bool IsBranch(Instr instr)
static void GenerateTransitionElementsDoubleToObject(MacroAssembler *masm)
static const int kBitFieldOffset
Definition: objects.h:5160
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
const intptr_t kSmiTagMask
Definition: v8.h:4016
const intptr_t kSmiSignMask
Definition: v8globals.h:41
static uint32_t GetRt(Instr instr)
static void GenerateNormal(MacroAssembler *masm, int argc)
static const int kMapHashShift
Definition: heap.h:2350
void PrintF(const char *format,...)
Definition: v8utils.cc:40
static void GenerateGeneric(MacroAssembler *masm, StrictModeFlag strict_mode)
static const char * Name(Value tok)
Definition: token.h:196
static Smi * FromInt(int value)
Definition: objects-inl.h:981
static void GenerateMiss(MacroAssembler *masm, int argc, IC::UtilityId id, Code::ExtraICState extra_state)
static uint32_t GetImmediate16(Instr instr)
const int kImm16Mask
static const int kGlobalReceiverOffset
Definition: objects.h:6288
static void GenerateGlobalProxy(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateMegamorphic(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateStringLength(MacroAssembler *masm, bool support_wrappers)
KeyedStoreCheckMap
Definition: ic.h:634
static const int kHasNamedInterceptor
Definition: objects.h:5169
static const int kIsAccessCheckNeeded
Definition: objects.h:5173
static uint32_t GetRs(Instr instr)
Address address() const
Definition: ic-inl.h:41
static void GenerateMegamorphic(MacroAssembler *masm)
#define ASSERT(condition)
Definition: checks.h:270
const int kPointerSizeLog2
Definition: globals.h:232
static const int kInstanceSizeOffset
Definition: objects.h:5147
Isolate * isolate() const
Definition: ic.h:140
static const ExtraICState kNoExtraICState
Definition: objects.h:4236
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check)
static void GenerateArrayLength(MacroAssembler *masm)
static const int kHashFieldOffset
Definition: objects.h:7319
static const int kHasIndexedInterceptor
Definition: objects.h:5170
void UpdateCaches(Handle< Object > x, Handle< Object > y)
const Register sp
#define UNREACHABLE()
Definition: checks.h:50
static Condition ComputeCondition(Token::Value op)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
const uint32_t kIsSymbolMask
Definition: objects.h:462
const int kPointerSize
Definition: globals.h:220
static void GenerateGeneric(MacroAssembler *masm)
const int kHeapObjectTag
Definition: v8.h:4009
static void GenerateMiss(MacroAssembler *masm)
#define __
static bool decode(uint32_t value)
Definition: utils.h:273
static const int kPropertiesOffset
Definition: objects.h:2171
static Register from_code(int code)
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:3491
static const int kInObjectPropertiesOffset
Definition: objects.h:5149
static void GenerateSmiToDouble(MacroAssembler *masm, Label *fail)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
static const int kElementsOffset
Definition: objects.h:2172
static const int kContainsCachedArrayIndexMask
Definition: objects.h:7374
static const int kCallTargetAddressOffset
static void GenerateMegamorphic(MacroAssembler *masm, int argc)
static const int kLengthOffset
Definition: objects.h:8332
static const int kHeaderSize
Definition: objects.h:2296
static void GenerateSlow(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, int argc, Code::ExtraICState extra_state)
Definition: ic.h:278
static const int kMapOffset
Definition: objects.h:1261
static void GenerateRuntimeSetProperty(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateDoubleToObject(MacroAssembler *masm, Label *fail)
static const int kLengthOffset
Definition: objects.h:2295
static const int kSlowCaseBitFieldMask
Definition: ic.h:508
KeyedStoreIncrementLength
Definition: ic.h:640
static void GenerateNormal(MacroAssembler *masm, int argc)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
InlinedSmiCheck
Definition: ic.h:853
MemOperand FieldMemOperand(Register object, int offset)
static void GenerateMonomorphicCacheProbe(MacroAssembler *masm, int argc, Code::Kind kind, Code::ExtraICState extra_state)
static void GenerateString(MacroAssembler *masm)
const int kSmiTagSize
Definition: v8.h:4015
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMegamorphic(MacroAssembler *masm, int argc, Code::ExtraICState extra_ic_state)
const int kSmiTag
Definition: v8.h:4014
static void GenerateNormal(MacroAssembler *masm)
static bool IsBne(Instr instr)
static void GenerateFunctionPrototype(MacroAssembler *masm)
static void GenerateIndexedInterceptor(MacroAssembler *masm)
static bool IsBeq(Instr instr)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
void set_target(Code *code)
Definition: ic.h:149
const uint32_t kSymbolTag
Definition: objects.h:464
static const int kCapacityMask
Definition: heap.h:2349
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)
static void GenerateMiss(MacroAssembler *masm)
static void GenerateTransitionElementsSmiToDouble(MacroAssembler *masm)
static const int kHashShift
Definition: objects.h:7341
static void GenerateNonStrictArguments(MacroAssembler *masm, int argc)
static void GenerateArrayLength(MacroAssembler *masm)
static const char * GetStateName(State state)
Definition: ic.cc:2586
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
void check(i::Vector< const char > string)
static const int kHashMask
Definition: heap.h:2351
static void GenerateMiss(MacroAssembler *masm, int argc)
Definition: ic.h:311
static const int kInstanceTypeOffset
Definition: objects.h:5158
static const int kEntriesPerBucket
Definition: heap.h:2352
static bool IsAndImmediate(Instr instr)
static void GenerateNormal(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)