v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
stub-cache-mips.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_MIPS)
31 
32 #include "ic-inl.h"
33 #include "codegen.h"
34 #include "stub-cache.h"
35 
36 namespace v8 {
37 namespace internal {
38 
39 #define __ ACCESS_MASM(masm)
40 
41 
42 static void ProbeTable(Isolate* isolate,
43  MacroAssembler* masm,
45  StubCache::Table table,
46  Register receiver,
47  Register name,
48  // Number of the cache entry, not scaled.
49  Register offset,
50  Register scratch,
51  Register scratch2,
52  Register offset_scratch) {
53  ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
54  ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
55  ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
56 
57  uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
58  uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
59  uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
60 
61  // Check the relative positions of the address fields.
62  ASSERT(value_off_addr > key_off_addr);
63  ASSERT((value_off_addr - key_off_addr) % 4 == 0);
64  ASSERT((value_off_addr - key_off_addr) < (256 * 4));
65  ASSERT(map_off_addr > key_off_addr);
66  ASSERT((map_off_addr - key_off_addr) % 4 == 0);
67  ASSERT((map_off_addr - key_off_addr) < (256 * 4));
68 
69  Label miss;
70  Register base_addr = scratch;
71  scratch = no_reg;
72 
73  // Multiply by 3 because there are 3 fields per entry (name, code, map).
74  __ sll(offset_scratch, offset, 1);
75  __ Addu(offset_scratch, offset_scratch, offset);
76 
77  // Calculate the base address of the entry.
78  __ li(base_addr, Operand(key_offset));
79  __ sll(at, offset_scratch, kPointerSizeLog2);
80  __ Addu(base_addr, base_addr, at);
81 
82  // Check that the key in the entry matches the name.
83  __ lw(at, MemOperand(base_addr, 0));
84  __ Branch(&miss, ne, name, Operand(at));
85 
86  // Check the map matches.
87  __ lw(at, MemOperand(base_addr, map_off_addr - key_off_addr));
88  __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
89  __ Branch(&miss, ne, at, Operand(scratch2));
90 
91  // Get the code entry from the cache.
92  Register code = scratch2;
93  scratch2 = no_reg;
94  __ lw(code, MemOperand(base_addr, value_off_addr - key_off_addr));
95 
96  // Check that the flags match what we're looking for.
97  Register flags_reg = base_addr;
98  base_addr = no_reg;
99  __ lw(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
100  __ And(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup));
101  __ Branch(&miss, ne, flags_reg, Operand(flags));
102 
103 #ifdef DEBUG
104  if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
105  __ jmp(&miss);
106  } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
107  __ jmp(&miss);
108  }
109 #endif
110 
111  // Jump to the first instruction in the code stub.
112  __ Addu(at, code, Operand(Code::kHeaderSize - kHeapObjectTag));
113  __ Jump(at);
114 
115  // Miss: fall through.
116  __ bind(&miss);
117 }
118 
119 
120 // Helper function used to check that the dictionary doesn't contain
121 // the property. This function may return false negatives, so miss_label
122 // must always call a backup property check that is complete.
123 // This function is safe to call if the receiver has fast properties.
124 // Name must be a symbol and receiver must be a heap object.
125 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
126  Label* miss_label,
127  Register receiver,
128  Handle<String> name,
129  Register scratch0,
130  Register scratch1) {
131  ASSERT(name->IsSymbol());
132  Counters* counters = masm->isolate()->counters();
133  __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
134  __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
135 
136  Label done;
137 
138  const int kInterceptorOrAccessCheckNeededMask =
140 
141  // Bail out if the receiver has a named interceptor or requires access checks.
142  Register map = scratch1;
143  __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
144  __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
145  __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
146  __ Branch(miss_label, ne, scratch0, Operand(zero_reg));
147 
148  // Check that receiver is a JSObject.
149  __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
150  __ Branch(miss_label, lt, scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
151 
152  // Load properties array.
153  Register properties = scratch0;
154  __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
155  // Check that the properties array is a dictionary.
156  __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
157  Register tmp = properties;
158  __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
159  __ Branch(miss_label, ne, map, Operand(tmp));
160 
161  // Restore the temporarily used register.
162  __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
163 
164 
166  miss_label,
167  &done,
168  receiver,
169  properties,
170  name,
171  scratch1);
172  __ bind(&done);
173  __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
174 }
175 
176 
177 void StubCache::GenerateProbe(MacroAssembler* masm,
178  Code::Flags flags,
179  Register receiver,
180  Register name,
181  Register scratch,
182  Register extra,
183  Register extra2,
184  Register extra3) {
185  Isolate* isolate = masm->isolate();
186  Label miss;
187 
188  // Make sure that code is valid. The multiplying code relies on the
189  // entry size being 12.
190  ASSERT(sizeof(Entry) == 12);
191 
192  // Make sure the flags does not name a specific type.
193  ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
194 
195  // Make sure that there are no register conflicts.
196  ASSERT(!scratch.is(receiver));
197  ASSERT(!scratch.is(name));
198  ASSERT(!extra.is(receiver));
199  ASSERT(!extra.is(name));
200  ASSERT(!extra.is(scratch));
201  ASSERT(!extra2.is(receiver));
202  ASSERT(!extra2.is(name));
203  ASSERT(!extra2.is(scratch));
204  ASSERT(!extra2.is(extra));
205 
206  // Check register validity.
207  ASSERT(!scratch.is(no_reg));
208  ASSERT(!extra.is(no_reg));
209  ASSERT(!extra2.is(no_reg));
210  ASSERT(!extra3.is(no_reg));
211 
212  Counters* counters = masm->isolate()->counters();
213  __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
214  extra2, extra3);
215 
216  // Check that the receiver isn't a smi.
217  __ JumpIfSmi(receiver, &miss);
218 
219  // Get the map of the receiver and compute the hash.
220  __ lw(scratch, FieldMemOperand(name, String::kHashFieldOffset));
221  __ lw(at, FieldMemOperand(receiver, HeapObject::kMapOffset));
222  __ Addu(scratch, scratch, at);
223  uint32_t mask = kPrimaryTableSize - 1;
224  // We shift out the last two bits because they are not part of the hash and
225  // they are always 01 for maps.
226  __ srl(scratch, scratch, kHeapObjectTagSize);
227  __ Xor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
228  __ And(scratch, scratch, Operand(mask));
229 
230  // Probe the primary table.
231  ProbeTable(isolate,
232  masm,
233  flags,
234  kPrimary,
235  receiver,
236  name,
237  scratch,
238  extra,
239  extra2,
240  extra3);
241 
242  // Primary miss: Compute hash for secondary probe.
243  __ srl(at, name, kHeapObjectTagSize);
244  __ Subu(scratch, scratch, at);
245  uint32_t mask2 = kSecondaryTableSize - 1;
246  __ Addu(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
247  __ And(scratch, scratch, Operand(mask2));
248 
249  // Probe the secondary table.
250  ProbeTable(isolate,
251  masm,
252  flags,
253  kSecondary,
254  receiver,
255  name,
256  scratch,
257  extra,
258  extra2,
259  extra3);
260 
261  // Cache miss: Fall-through and let caller handle the miss by
262  // entering the runtime system.
263  __ bind(&miss);
264  __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
265  extra2, extra3);
266 }
267 
268 
269 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
270  int index,
271  Register prototype) {
272  // Load the global or builtins object from the current context.
274  // Load the global context from the global or builtins object.
275  __ lw(prototype,
277  // Load the function from the global context.
278  __ lw(prototype, MemOperand(prototype, Context::SlotOffset(index)));
279  // Load the initial map. The global functions all have initial maps.
280  __ lw(prototype,
282  // Load the prototype from the initial map.
283  __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
284 }
285 
286 
287 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
288  MacroAssembler* masm,
289  int index,
290  Register prototype,
291  Label* miss) {
292  Isolate* isolate = masm->isolate();
293  // Check we're still in the same context.
295  ASSERT(!prototype.is(at));
296  __ li(at, isolate->global());
297  __ Branch(miss, ne, prototype, Operand(at));
298  // Get the global function with the given index.
299  Handle<JSFunction> function(
300  JSFunction::cast(isolate->global_context()->get(index)));
301  // Load its initial map. The global functions all have initial maps.
302  __ li(prototype, Handle<Map>(function->initial_map()));
303  // Load the prototype from the initial map.
304  __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
305 }
306 
307 
308 // Load a fast property out of a holder object (src). In-object properties
309 // are loaded directly otherwise the property is loaded from the properties
310 // fixed array.
311 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
312  Register dst,
313  Register src,
314  Handle<JSObject> holder,
315  int index) {
316  // Adjust for the number of properties stored in the holder.
317  index -= holder->map()->inobject_properties();
318  if (index < 0) {
319  // Get the property straight out of the holder.
320  int offset = holder->map()->instance_size() + (index * kPointerSize);
321  __ lw(dst, FieldMemOperand(src, offset));
322  } else {
323  // Calculate the offset into the properties array.
324  int offset = index * kPointerSize + FixedArray::kHeaderSize;
326  __ lw(dst, FieldMemOperand(dst, offset));
327  }
328 }
329 
330 
331 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
332  Register receiver,
333  Register scratch,
334  Label* miss_label) {
335  // Check that the receiver isn't a smi.
336  __ JumpIfSmi(receiver, miss_label);
337 
338  // Check that the object is a JS array.
339  __ GetObjectType(receiver, scratch, scratch);
340  __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE));
341 
342  // Load length directly from the JS array.
343  __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
344  __ Ret();
345 }
346 
347 
348 // Generate code to check if an object is a string. If the object is a
349 // heap object, its map's instance type is left in the scratch1 register.
350 // If this is not needed, scratch1 and scratch2 may be the same register.
351 static void GenerateStringCheck(MacroAssembler* masm,
352  Register receiver,
353  Register scratch1,
354  Register scratch2,
355  Label* smi,
356  Label* non_string_object) {
357  // Check that the receiver isn't a smi.
358  __ JumpIfSmi(receiver, smi, t0);
359 
360  // Check that the object is a string.
361  __ lw(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
362  __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
363  __ And(scratch2, scratch1, Operand(kIsNotStringMask));
364  // The cast is to resolve the overload for the argument of 0x0.
365  __ Branch(non_string_object,
366  ne,
367  scratch2,
368  Operand(static_cast<int32_t>(kStringTag)));
369 }
370 
371 
372 // Generate code to load the length from a string object and return the length.
373 // If the receiver object is not a string or a wrapped string object the
374 // execution continues at the miss label. The register containing the
375 // receiver is potentially clobbered.
376 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
377  Register receiver,
378  Register scratch1,
379  Register scratch2,
380  Label* miss,
381  bool support_wrappers) {
382  Label check_wrapper;
383 
384  // Check if the object is a string leaving the instance type in the
385  // scratch1 register.
386  GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
387  support_wrappers ? &check_wrapper : miss);
388 
389  // Load length directly from the string.
390  __ lw(v0, FieldMemOperand(receiver, String::kLengthOffset));
391  __ Ret();
392 
393  if (support_wrappers) {
394  // Check if the object is a JSValue wrapper.
395  __ bind(&check_wrapper);
396  __ Branch(miss, ne, scratch1, Operand(JS_VALUE_TYPE));
397 
398  // Unwrap the value and check if the wrapped value is a string.
399  __ lw(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
400  GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
401  __ lw(v0, FieldMemOperand(scratch1, String::kLengthOffset));
402  __ Ret();
403  }
404 }
405 
406 
407 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
408  Register receiver,
409  Register scratch1,
410  Register scratch2,
411  Label* miss_label) {
412  __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
413  __ mov(v0, scratch1);
414  __ Ret();
415 }
416 
417 
418 // Generate StoreField code, value is passed in a0 register.
419 // After executing generated code, the receiver_reg and name_reg
420 // may be clobbered.
421 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
422  Handle<JSObject> object,
423  int index,
424  Handle<Map> transition,
425  Handle<String> name,
426  Register receiver_reg,
427  Register name_reg,
428  Register scratch1,
429  Register scratch2,
430  Label* miss_label) {
431  // a0 : value.
432  Label exit;
433 
434  LookupResult lookup(masm->isolate());
435  object->Lookup(*name, &lookup);
436  if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
437  // In sloppy mode, we could just return the value and be done. However, we
438  // might be in strict mode, where we have to throw. Since we cannot tell,
439  // go into slow case unconditionally.
440  __ jmp(miss_label);
441  return;
442  }
443 
444  // Check that the map of the object hasn't changed.
445  CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS
447  __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label,
448  DO_SMI_CHECK, mode);
449 
450  // Perform global security token check if needed.
451  if (object->IsJSGlobalProxy()) {
452  __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
453  }
454 
455  // Check that we are allowed to write this.
456  if (!transition.is_null() && object->GetPrototype()->IsJSObject()) {
457  JSObject* holder;
458  if (lookup.IsFound()) {
459  holder = lookup.holder();
460  } else {
461  // Find the top object.
462  holder = *object;
463  do {
464  holder = JSObject::cast(holder->GetPrototype());
465  } while (holder->GetPrototype()->IsJSObject());
466  }
467  // We need an extra register, push
468  __ push(name_reg);
469  Label miss_pop, done_check;
470  CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg,
471  scratch1, scratch2, name, &miss_pop);
472  __ jmp(&done_check);
473  __ bind(&miss_pop);
474  __ pop(name_reg);
475  __ jmp(miss_label);
476  __ bind(&done_check);
477  __ pop(name_reg);
478  }
479 
480  // Stub never generated for non-global objects that require access
481  // checks.
482  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
483 
484  // Perform map transition for the receiver if necessary.
485  if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) {
486  // The properties must be extended before we can store the value.
487  // We jump to a runtime call that extends the properties array.
488  __ push(receiver_reg);
489  __ li(a2, Operand(transition));
490  __ Push(a2, a0);
491  __ TailCallExternalReference(
492  ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
493  masm->isolate()),
494  3, 1);
495  return;
496  }
497 
498  if (!transition.is_null()) {
499  // Update the map of the object.
500  __ li(scratch1, Operand(transition));
501  __ sw(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
502 
503  // Update the write barrier for the map field and pass the now unused
504  // name_reg as scratch register.
505  __ RecordWriteField(receiver_reg,
507  scratch1,
508  name_reg,
513  }
514 
515  // Adjust for the number of properties stored in the object. Even in the
516  // face of a transition we can use the old map here because the size of the
517  // object and the number of in-object properties is not going to change.
518  index -= object->map()->inobject_properties();
519 
520  if (index < 0) {
521  // Set the property straight into the object.
522  int offset = object->map()->instance_size() + (index * kPointerSize);
523  __ sw(a0, FieldMemOperand(receiver_reg, offset));
524 
525  // Skip updating write barrier if storing a smi.
526  __ JumpIfSmi(a0, &exit, scratch1);
527 
528  // Update the write barrier for the array address.
529  // Pass the now unused name_reg as a scratch register.
530  __ mov(name_reg, a0);
531  __ RecordWriteField(receiver_reg,
532  offset,
533  name_reg,
534  scratch1,
537  } else {
538  // Write to the properties array.
539  int offset = index * kPointerSize + FixedArray::kHeaderSize;
540  // Get the properties array.
541  __ lw(scratch1,
543  __ sw(a0, FieldMemOperand(scratch1, offset));
544 
545  // Skip updating write barrier if storing a smi.
546  __ JumpIfSmi(a0, &exit);
547 
548  // Update the write barrier for the array address.
549  // Ok to clobber receiver_reg and name_reg, since we return.
550  __ mov(name_reg, a0);
551  __ RecordWriteField(scratch1,
552  offset,
553  name_reg,
554  receiver_reg,
557  }
558 
559  // Return the value (register v0).
560  __ bind(&exit);
561  __ mov(v0, a0);
562  __ Ret();
563 }
564 
565 
566 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
567  ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
568  Handle<Code> code = (kind == Code::LOAD_IC)
569  ? masm->isolate()->builtins()->LoadIC_Miss()
570  : masm->isolate()->builtins()->KeyedLoadIC_Miss();
571  __ Jump(code, RelocInfo::CODE_TARGET);
572 }
573 
574 
575 static void GenerateCallFunction(MacroAssembler* masm,
576  Handle<Object> object,
577  const ParameterCount& arguments,
578  Label* miss,
579  Code::ExtraICState extra_ic_state) {
580  // ----------- S t a t e -------------
581  // -- a0: receiver
582  // -- a1: function to call
583  // -----------------------------------
584  // Check that the function really is a function.
585  __ JumpIfSmi(a1, miss);
586  __ GetObjectType(a1, a3, a3);
587  __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
588 
589  // Patch the receiver on the stack with the global proxy if
590  // necessary.
591  if (object->IsGlobalObject()) {
593  __ sw(a3, MemOperand(sp, arguments.immediate() * kPointerSize));
594  }
595 
596  // Invoke the function.
597  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
599  : CALL_AS_METHOD;
600  __ InvokeFunction(a1, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind);
601 }
602 
603 
604 static void PushInterceptorArguments(MacroAssembler* masm,
605  Register receiver,
606  Register holder,
607  Register name,
608  Handle<JSObject> holder_obj) {
609  __ push(name);
610  Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
611  ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
612  Register scratch = name;
613  __ li(scratch, Operand(interceptor));
614  __ Push(scratch, receiver, holder);
615  __ lw(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
616  __ push(scratch);
617  __ li(scratch, Operand(ExternalReference::isolate_address()));
618  __ push(scratch);
619 }
620 
621 
622 static void CompileCallLoadPropertyWithInterceptor(
623  MacroAssembler* masm,
624  Register receiver,
625  Register holder,
626  Register name,
627  Handle<JSObject> holder_obj) {
628  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
629 
630  ExternalReference ref =
631  ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
632  masm->isolate());
633  __ PrepareCEntryArgs(6);
634  __ PrepareCEntryFunction(ref);
635 
636  CEntryStub stub(1);
637  __ CallStub(&stub);
638 }
639 
640 
641 static const int kFastApiCallArguments = 4;
642 
643 
644 // Reserves space for the extra arguments to API function in the
645 // caller's frame.
646 //
647 // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
648 static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
649  Register scratch) {
650  ASSERT(Smi::FromInt(0) == 0);
651  for (int i = 0; i < kFastApiCallArguments; i++) {
652  __ push(zero_reg);
653  }
654 }
655 
656 
657 // Undoes the effects of ReserveSpaceForFastApiCall.
658 static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
659  __ Drop(kFastApiCallArguments);
660 }
661 
662 
663 static void GenerateFastApiDirectCall(MacroAssembler* masm,
664  const CallOptimization& optimization,
665  int argc) {
666  // ----------- S t a t e -------------
667  // -- sp[0] : holder (set by CheckPrototypes)
668  // -- sp[4] : callee JS function
669  // -- sp[8] : call data
670  // -- sp[12] : isolate
671  // -- sp[16] : last JS argument
672  // -- ...
673  // -- sp[(argc + 3) * 4] : first JS argument
674  // -- sp[(argc + 4) * 4] : receiver
675  // -----------------------------------
676  // Get the function and setup the context.
677  Handle<JSFunction> function = optimization.constant_function();
678  __ LoadHeapObject(t1, function);
680 
681  // Pass the additional arguments.
682  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
683  Handle<Object> call_data(api_call_info->data());
684  if (masm->isolate()->heap()->InNewSpace(*call_data)) {
685  __ li(a0, api_call_info);
687  } else {
688  __ li(t2, call_data);
689  }
690 
691  __ li(t3, Operand(ExternalReference::isolate_address()));
692  // Store JS function, call data and isolate.
693  __ sw(t1, MemOperand(sp, 1 * kPointerSize));
694  __ sw(t2, MemOperand(sp, 2 * kPointerSize));
695  __ sw(t3, MemOperand(sp, 3 * kPointerSize));
696 
697  // Prepare arguments.
698  __ Addu(a2, sp, Operand(3 * kPointerSize));
699 
700  // Allocate the v8::Arguments structure in the arguments' space since
701  // it's not controlled by GC.
702  const int kApiStackSpace = 4;
703 
704  FrameScope frame_scope(masm, StackFrame::MANUAL);
705  __ EnterExitFrame(false, kApiStackSpace);
706 
707  // NOTE: the O32 abi requires a0 to hold a special pointer when returning a
708  // struct from the function (which is currently the case). This means we pass
709  // the first argument in a1 instead of a0. TryCallApiFunctionAndReturn
710  // will handle setting up a0.
711 
712  // a1 = v8::Arguments&
713  // Arguments is built at sp + 1 (sp is a reserved spot for ra).
714  __ Addu(a1, sp, kPointerSize);
715 
716  // v8::Arguments::implicit_args_
717  __ sw(a2, MemOperand(a1, 0 * kPointerSize));
718  // v8::Arguments::values_
719  __ Addu(t0, a2, Operand(argc * kPointerSize));
720  __ sw(t0, MemOperand(a1, 1 * kPointerSize));
721  // v8::Arguments::length_ = argc
722  __ li(t0, Operand(argc));
723  __ sw(t0, MemOperand(a1, 2 * kPointerSize));
724  // v8::Arguments::is_construct_call = 0
725  __ sw(zero_reg, MemOperand(a1, 3 * kPointerSize));
726 
727  const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
728  Address function_address = v8::ToCData<Address>(api_call_info->callback());
729  ApiFunction fun(function_address);
730  ExternalReference ref =
731  ExternalReference(&fun,
732  ExternalReference::DIRECT_API_CALL,
733  masm->isolate());
734  AllowExternalCallThatCantCauseGC scope(masm);
735  __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
736 }
737 
738 class CallInterceptorCompiler BASE_EMBEDDED {
739  public:
740  CallInterceptorCompiler(StubCompiler* stub_compiler,
741  const ParameterCount& arguments,
742  Register name,
743  Code::ExtraICState extra_ic_state)
744  : stub_compiler_(stub_compiler),
745  arguments_(arguments),
746  name_(name),
747  extra_ic_state_(extra_ic_state) {}
748 
749  void Compile(MacroAssembler* masm,
750  Handle<JSObject> object,
751  Handle<JSObject> holder,
752  Handle<String> name,
753  LookupResult* lookup,
754  Register receiver,
755  Register scratch1,
756  Register scratch2,
757  Register scratch3,
758  Label* miss) {
759  ASSERT(holder->HasNamedInterceptor());
760  ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
761 
762  // Check that the receiver isn't a smi.
763  __ JumpIfSmi(receiver, miss);
764  CallOptimization optimization(lookup);
765  if (optimization.is_constant_call()) {
766  CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
767  holder, lookup, name, optimization, miss);
768  } else {
769  CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
770  name, holder, miss);
771  }
772  }
773 
774  private:
775  void CompileCacheable(MacroAssembler* masm,
776  Handle<JSObject> object,
777  Register receiver,
778  Register scratch1,
779  Register scratch2,
780  Register scratch3,
781  Handle<JSObject> interceptor_holder,
782  LookupResult* lookup,
783  Handle<String> name,
784  const CallOptimization& optimization,
785  Label* miss_label) {
786  ASSERT(optimization.is_constant_call());
787  ASSERT(!lookup->holder()->IsGlobalObject());
788  Counters* counters = masm->isolate()->counters();
789  int depth1 = kInvalidProtoDepth;
790  int depth2 = kInvalidProtoDepth;
791  bool can_do_fast_api_call = false;
792  if (optimization.is_simple_api_call() &&
793  !lookup->holder()->IsGlobalObject()) {
794  depth1 = optimization.GetPrototypeDepthOfExpectedType(
795  object, interceptor_holder);
796  if (depth1 == kInvalidProtoDepth) {
797  depth2 = optimization.GetPrototypeDepthOfExpectedType(
798  interceptor_holder, Handle<JSObject>(lookup->holder()));
799  }
800  can_do_fast_api_call =
801  depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
802  }
803 
804  __ IncrementCounter(counters->call_const_interceptor(), 1,
805  scratch1, scratch2);
806 
807  if (can_do_fast_api_call) {
808  __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
809  scratch1, scratch2);
810  ReserveSpaceForFastApiCall(masm, scratch1);
811  }
812 
813  // Check that the maps from receiver to interceptor's holder
814  // haven't changed and thus we can invoke interceptor.
815  Label miss_cleanup;
816  Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
817  Register holder =
818  stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
819  scratch1, scratch2, scratch3,
820  name, depth1, miss);
821 
822  // Invoke an interceptor and if it provides a value,
823  // branch to |regular_invoke|.
824  Label regular_invoke;
825  LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
826  &regular_invoke);
827 
828  // Interceptor returned nothing for this property. Try to use cached
829  // constant function.
830 
831  // Check that the maps from interceptor's holder to constant function's
832  // holder haven't changed and thus we can use cached constant function.
833  if (*interceptor_holder != lookup->holder()) {
834  stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
835  Handle<JSObject>(lookup->holder()),
836  scratch1, scratch2, scratch3,
837  name, depth2, miss);
838  } else {
839  // CheckPrototypes has a side effect of fetching a 'holder'
840  // for API (object which is instanceof for the signature). It's
841  // safe to omit it here, as if present, it should be fetched
842  // by the previous CheckPrototypes.
843  ASSERT(depth2 == kInvalidProtoDepth);
844  }
845 
846  // Invoke function.
847  if (can_do_fast_api_call) {
848  GenerateFastApiDirectCall(masm, optimization, arguments_.immediate());
849  } else {
850  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
852  : CALL_AS_METHOD;
853  __ InvokeFunction(optimization.constant_function(), arguments_,
854  JUMP_FUNCTION, NullCallWrapper(), call_kind);
855  }
856 
857  // Deferred code for fast API call case---clean preallocated space.
858  if (can_do_fast_api_call) {
859  __ bind(&miss_cleanup);
860  FreeSpaceForFastApiCall(masm);
861  __ Branch(miss_label);
862  }
863 
864  // Invoke a regular function.
865  __ bind(&regular_invoke);
866  if (can_do_fast_api_call) {
867  FreeSpaceForFastApiCall(masm);
868  }
869  }
870 
871  void CompileRegular(MacroAssembler* masm,
872  Handle<JSObject> object,
873  Register receiver,
874  Register scratch1,
875  Register scratch2,
876  Register scratch3,
877  Handle<String> name,
878  Handle<JSObject> interceptor_holder,
879  Label* miss_label) {
880  Register holder =
881  stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
882  scratch1, scratch2, scratch3,
883  name, miss_label);
884 
885  // Call a runtime function to load the interceptor property.
886  FrameScope scope(masm, StackFrame::INTERNAL);
887  // Save the name_ register across the call.
888  __ push(name_);
889 
890  PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder);
891 
892  __ CallExternalReference(
893  ExternalReference(
894  IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
895  masm->isolate()),
896  6);
897  // Restore the name_ register.
898  __ pop(name_);
899  // Leave the internal frame.
900  }
901 
902  void LoadWithInterceptor(MacroAssembler* masm,
903  Register receiver,
904  Register holder,
905  Handle<JSObject> holder_obj,
906  Register scratch,
907  Label* interceptor_succeeded) {
908  {
909  FrameScope scope(masm, StackFrame::INTERNAL);
910 
911  __ Push(holder, name_);
912  CompileCallLoadPropertyWithInterceptor(masm,
913  receiver,
914  holder,
915  name_,
916  holder_obj);
917  __ pop(name_); // Restore the name.
918  __ pop(receiver); // Restore the holder.
919  }
920  // If interceptor returns no-result sentinel, call the constant function.
921  __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
922  __ Branch(interceptor_succeeded, ne, v0, Operand(scratch));
923  }
924 
925  StubCompiler* stub_compiler_;
926  const ParameterCount& arguments_;
927  Register name_;
928  Code::ExtraICState extra_ic_state_;
929 };
930 
931 
932 
933 // Generate code to check that a global property cell is empty. Create
934 // the property cell at compilation time if no cell exists for the
935 // property.
936 static void GenerateCheckPropertyCell(MacroAssembler* masm,
937  Handle<GlobalObject> global,
938  Handle<String> name,
939  Register scratch,
940  Label* miss) {
941  Handle<JSGlobalPropertyCell> cell =
942  GlobalObject::EnsurePropertyCell(global, name);
943  ASSERT(cell->value()->IsTheHole());
944  __ li(scratch, Operand(cell));
945  __ lw(scratch,
947  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
948  __ Branch(miss, ne, scratch, Operand(at));
949 }
950 
951 
952 // Calls GenerateCheckPropertyCell for each global object in the prototype chain
953 // from object to (but not including) holder.
954 static void GenerateCheckPropertyCells(MacroAssembler* masm,
955  Handle<JSObject> object,
956  Handle<JSObject> holder,
957  Handle<String> name,
958  Register scratch,
959  Label* miss) {
960  Handle<JSObject> current = object;
961  while (!current.is_identical_to(holder)) {
962  if (current->IsGlobalObject()) {
963  GenerateCheckPropertyCell(masm,
964  Handle<GlobalObject>::cast(current),
965  name,
966  scratch,
967  miss);
968  }
969  current = Handle<JSObject>(JSObject::cast(current->GetPrototype()));
970  }
971 }
972 
973 
974 // Convert and store int passed in register ival to IEEE 754 single precision
975 // floating point value at memory location (dst + 4 * wordoffset)
976 // If FPU is available use it for conversion.
977 static void StoreIntAsFloat(MacroAssembler* masm,
978  Register dst,
979  Register wordoffset,
980  Register ival,
981  Register fval,
982  Register scratch1,
983  Register scratch2) {
985  CpuFeatures::Scope scope(FPU);
986  __ mtc1(ival, f0);
987  __ cvt_s_w(f0, f0);
988  __ sll(scratch1, wordoffset, 2);
989  __ addu(scratch1, dst, scratch1);
990  __ swc1(f0, MemOperand(scratch1, 0));
991  } else {
992  // FPU is not available, do manual conversions.
993 
994  Label not_special, done;
995  // Move sign bit from source to destination. This works because the sign
996  // bit in the exponent word of the double has the same position and polarity
997  // as the 2's complement sign bit in a Smi.
998  ASSERT(kBinary32SignMask == 0x80000000u);
999 
1000  __ And(fval, ival, Operand(kBinary32SignMask));
1001  // Negate value if it is negative.
1002  __ subu(scratch1, zero_reg, ival);
1003  __ Movn(ival, scratch1, fval);
1004 
1005  // We have -1, 0 or 1, which we treat specially. Register ival contains
1006  // absolute value: it is either equal to 1 (special case of -1 and 1),
1007  // greater than 1 (not a special case) or less than 1 (special case of 0).
1008  __ Branch(&not_special, gt, ival, Operand(1));
1009 
1010  // For 1 or -1 we need to or in the 0 exponent (biased).
1011  static const uint32_t exponent_word_for_1 =
1013 
1014  __ Xor(scratch1, ival, Operand(1));
1015  __ li(scratch2, exponent_word_for_1);
1016  __ or_(scratch2, fval, scratch2);
1017  __ Movz(fval, scratch2, scratch1); // Only if ival is equal to 1.
1018  __ Branch(&done);
1019 
1020  __ bind(&not_special);
1021  // Count leading zeros.
1022  // Gets the wrong answer for 0, but we already checked for that case above.
1023  Register zeros = scratch2;
1024  __ Clz(zeros, ival);
1025 
1026  // Compute exponent and or it into the exponent register.
1027  __ li(scratch1, (kBitsPerInt - 1) + kBinary32ExponentBias);
1028  __ subu(scratch1, scratch1, zeros);
1029 
1030  __ sll(scratch1, scratch1, kBinary32ExponentShift);
1031  __ or_(fval, fval, scratch1);
1032 
1033  // Shift up the source chopping the top bit off.
1034  __ Addu(zeros, zeros, Operand(1));
1035  // This wouldn't work for 1 and -1 as the shift would be 32 which means 0.
1036  __ sllv(ival, ival, zeros);
1037  // And the top (top 20 bits).
1038  __ srl(scratch1, ival, kBitsPerInt - kBinary32MantissaBits);
1039  __ or_(fval, fval, scratch1);
1040 
1041  __ bind(&done);
1042 
1043  __ sll(scratch1, wordoffset, 2);
1044  __ addu(scratch1, dst, scratch1);
1045  __ sw(fval, MemOperand(scratch1, 0));
1046  }
1047 }
1048 
1049 
1050 // Convert unsigned integer with specified number of leading zeroes in binary
1051 // representation to IEEE 754 double.
1052 // Integer to convert is passed in register hiword.
1053 // Resulting double is returned in registers hiword:loword.
1054 // This functions does not work correctly for 0.
1055 static void GenerateUInt2Double(MacroAssembler* masm,
1056  Register hiword,
1057  Register loword,
1058  Register scratch,
1059  int leading_zeroes) {
1060  const int meaningful_bits = kBitsPerInt - leading_zeroes - 1;
1061  const int biased_exponent = HeapNumber::kExponentBias + meaningful_bits;
1062 
1063  const int mantissa_shift_for_hi_word =
1064  meaningful_bits - HeapNumber::kMantissaBitsInTopWord;
1065 
1066  const int mantissa_shift_for_lo_word =
1067  kBitsPerInt - mantissa_shift_for_hi_word;
1068 
1069  __ li(scratch, biased_exponent << HeapNumber::kExponentShift);
1070  if (mantissa_shift_for_hi_word > 0) {
1071  __ sll(loword, hiword, mantissa_shift_for_lo_word);
1072  __ srl(hiword, hiword, mantissa_shift_for_hi_word);
1073  __ or_(hiword, scratch, hiword);
1074  } else {
1075  __ mov(loword, zero_reg);
1076  __ sll(hiword, hiword, mantissa_shift_for_hi_word);
1077  __ or_(hiword, scratch, hiword);
1078  }
1079 
1080  // If least significant bit of biased exponent was not 1 it was corrupted
1081  // by most significant bit of mantissa so we should fix that.
1082  if (!(biased_exponent & 1)) {
1083  __ li(scratch, 1 << HeapNumber::kExponentShift);
1084  __ nor(scratch, scratch, scratch);
1085  __ and_(hiword, hiword, scratch);
1086  }
1087 }
1088 
1089 
1090 #undef __
1091 #define __ ACCESS_MASM(masm())
1092 
1093 
1094 Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
1095  Register object_reg,
1096  Handle<JSObject> holder,
1097  Register holder_reg,
1098  Register scratch1,
1099  Register scratch2,
1100  Handle<String> name,
1101  int save_at_depth,
1102  Label* miss) {
1103  // Make sure there's no overlap between holder and object registers.
1104  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1105  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1106  && !scratch2.is(scratch1));
1107 
1108  // Keep track of the current object in register reg.
1109  Register reg = object_reg;
1110  int depth = 0;
1111 
1112  if (save_at_depth == depth) {
1113  __ sw(reg, MemOperand(sp));
1114  }
1115 
1116  // Check the maps in the prototype chain.
1117  // Traverse the prototype chain from the object and do map checks.
1118  Handle<JSObject> current = object;
1119  while (!current.is_identical_to(holder)) {
1120  ++depth;
1121 
1122  // Only global objects and objects that do not require access
1123  // checks are allowed in stubs.
1124  ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1125 
1126  Handle<JSObject> prototype(JSObject::cast(current->GetPrototype()));
1127  if (!current->HasFastProperties() &&
1128  !current->IsJSGlobalObject() &&
1129  !current->IsJSGlobalProxy()) {
1130  if (!name->IsSymbol()) {
1131  name = factory()->LookupSymbol(name);
1132  }
1133  ASSERT(current->property_dictionary()->FindEntry(*name) ==
1135 
1136  GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
1137  scratch1, scratch2);
1138 
1139  __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1140  reg = holder_reg; // From now on the object will be in holder_reg.
1141  __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1142  } else {
1143  Handle<Map> current_map(current->map());
1144  __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK,
1146  // Check access rights to the global object. This has to happen after
1147  // the map check so that we know that the object is actually a global
1148  // object.
1149  if (current->IsJSGlobalProxy()) {
1150  __ CheckAccessGlobalProxy(reg, scratch2, miss);
1151  }
1152  reg = holder_reg; // From now on the object will be in holder_reg.
1153 
1154  if (heap()->InNewSpace(*prototype)) {
1155  // The prototype is in new space; we cannot store a reference to it
1156  // in the code. Load it from the map.
1157  __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1158  } else {
1159  // The prototype is in old space; load it directly.
1160  __ li(reg, Operand(prototype));
1161  }
1162  }
1163 
1164  if (save_at_depth == depth) {
1165  __ sw(reg, MemOperand(sp));
1166  }
1167 
1168  // Go to the next object in the prototype chain.
1169  current = prototype;
1170  }
1171 
1172  // Log the check depth.
1173  LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
1174 
1175  // Check the holder map.
1176  __ CheckMap(reg, scratch1, Handle<Map>(current->map()), miss,
1178 
1179  // Perform security check for access to the global object.
1180  ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1181  if (holder->IsJSGlobalProxy()) {
1182  __ CheckAccessGlobalProxy(reg, scratch1, miss);
1183  }
1184 
1185  // If we've skipped any global objects, it's not enough to verify that
1186  // their maps haven't changed. We also need to check that the property
1187  // cell for the property is still empty.
1188  GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
1189 
1190  // Return the register containing the holder.
1191  return reg;
1192 }
1193 
1194 
1195 void StubCompiler::GenerateLoadField(Handle<JSObject> object,
1196  Handle<JSObject> holder,
1197  Register receiver,
1198  Register scratch1,
1199  Register scratch2,
1200  Register scratch3,
1201  int index,
1202  Handle<String> name,
1203  Label* miss) {
1204  // Check that the receiver isn't a smi.
1205  __ JumpIfSmi(receiver, miss);
1206 
1207  // Check that the maps haven't changed.
1208  Register reg = CheckPrototypes(
1209  object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1210  GenerateFastPropertyLoad(masm(), v0, reg, holder, index);
1211  __ Ret();
1212 }
1213 
1214 
1215 void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
1216  Handle<JSObject> holder,
1217  Register receiver,
1218  Register scratch1,
1219  Register scratch2,
1220  Register scratch3,
1221  Handle<JSFunction> value,
1222  Handle<String> name,
1223  Label* miss) {
1224  // Check that the receiver isn't a smi.
1225  __ JumpIfSmi(receiver, miss, scratch1);
1226 
1227  // Check that the maps haven't changed.
1228  CheckPrototypes(object, receiver, holder,
1229  scratch1, scratch2, scratch3, name, miss);
1230 
1231  // Return the constant value.
1232  __ LoadHeapObject(v0, value);
1233  __ Ret();
1234 }
1235 
1236 
1237 void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
1238  Handle<JSObject> holder,
1239  Register receiver,
1240  Register name_reg,
1241  Register scratch1,
1242  Register scratch2,
1243  Register scratch3,
1244  Handle<AccessorInfo> callback,
1245  Handle<String> name,
1246  Label* miss) {
1247  // Check that the receiver isn't a smi.
1248  __ JumpIfSmi(receiver, miss, scratch1);
1249 
1250  // Check that the maps haven't changed.
1251  Register reg = CheckPrototypes(object, receiver, holder, scratch1,
1252  scratch2, scratch3, name, miss);
1253 
1254  // Build AccessorInfo::args_ list on the stack and push property name below
1255  // the exit frame to make GC aware of them and store pointers to them.
1256  __ push(receiver);
1257  __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_
1258  if (heap()->InNewSpace(callback->data())) {
1259  __ li(scratch3, callback);
1260  __ lw(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1261  } else {
1262  __ li(scratch3, Handle<Object>(callback->data()));
1263  }
1264  __ Subu(sp, sp, 4 * kPointerSize);
1265  __ sw(reg, MemOperand(sp, 3 * kPointerSize));
1266  __ sw(scratch3, MemOperand(sp, 2 * kPointerSize));
1267  __ li(scratch3, Operand(ExternalReference::isolate_address()));
1268  __ sw(scratch3, MemOperand(sp, 1 * kPointerSize));
1269  __ sw(name_reg, MemOperand(sp, 0 * kPointerSize));
1270 
1271  __ mov(a2, scratch2); // Saved in case scratch2 == a1.
1272  __ mov(a1, sp); // a1 (first argument - see note below) = Handle<String>
1273 
1274  // NOTE: the O32 abi requires a0 to hold a special pointer when returning a
1275  // struct from the function (which is currently the case). This means we pass
1276  // the arguments in a1-a2 instead of a0-a1. TryCallApiFunctionAndReturn
1277  // will handle setting up a0.
1278 
1279  const int kApiStackSpace = 1;
1280  FrameScope frame_scope(masm(), StackFrame::MANUAL);
1281  __ EnterExitFrame(false, kApiStackSpace);
1282 
1283  // Create AccessorInfo instance on the stack above the exit frame with
1284  // scratch2 (internal::Object** args_) as the data.
1285  __ sw(a2, MemOperand(sp, kPointerSize));
1286  // a2 (second argument - see note above) = AccessorInfo&
1287  __ Addu(a2, sp, kPointerSize);
1288 
1289  const int kStackUnwindSpace = 5;
1290  Address getter_address = v8::ToCData<Address>(callback->getter());
1291  ApiFunction fun(getter_address);
1292  ExternalReference ref =
1293  ExternalReference(&fun,
1294  ExternalReference::DIRECT_GETTER_CALL,
1295  masm()->isolate());
1296  __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
1297 }
1298 
1299 
1300 void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
1301  Handle<JSObject> interceptor_holder,
1302  LookupResult* lookup,
1303  Register receiver,
1304  Register name_reg,
1305  Register scratch1,
1306  Register scratch2,
1307  Register scratch3,
1308  Handle<String> name,
1309  Label* miss) {
1310  ASSERT(interceptor_holder->HasNamedInterceptor());
1311  ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1312 
1313  // Check that the receiver isn't a smi.
1314  __ JumpIfSmi(receiver, miss);
1315 
1316  // So far the most popular follow ups for interceptor loads are FIELD
1317  // and CALLBACKS, so inline only them, other cases may be added
1318  // later.
1319  bool compile_followup_inline = false;
1320  if (lookup->IsFound() && lookup->IsCacheable()) {
1321  if (lookup->type() == FIELD) {
1322  compile_followup_inline = true;
1323  } else if (lookup->type() == CALLBACKS &&
1324  lookup->GetCallbackObject()->IsAccessorInfo()) {
1325  AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1326  compile_followup_inline = callback->getter() != NULL &&
1327  callback->IsCompatibleReceiver(*object);
1328  }
1329  }
1330 
1331  if (compile_followup_inline) {
1332  // Compile the interceptor call, followed by inline code to load the
1333  // property from further up the prototype chain if the call fails.
1334  // Check that the maps haven't changed.
1335  Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1336  scratch1, scratch2, scratch3,
1337  name, miss);
1338  ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1339 
1340  // Preserve the receiver register explicitly whenever it is different from
1341  // the holder and it is needed should the interceptor return without any
1342  // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1343  // the FIELD case might cause a miss during the prototype check.
1344  bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1345  bool must_preserve_receiver_reg = !receiver.is(holder_reg) &&
1346  (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1347 
1348  // Save necessary data before invoking an interceptor.
1349  // Requires a frame to make GC aware of pushed pointers.
1350  {
1351  FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1352  if (must_preserve_receiver_reg) {
1353  __ Push(receiver, holder_reg, name_reg);
1354  } else {
1355  __ Push(holder_reg, name_reg);
1356  }
1357  // Invoke an interceptor. Note: map checks from receiver to
1358  // interceptor's holder has been compiled before (see a caller
1359  // of this method).
1360  CompileCallLoadPropertyWithInterceptor(masm(),
1361  receiver,
1362  holder_reg,
1363  name_reg,
1364  interceptor_holder);
1365  // Check if interceptor provided a value for property. If it's
1366  // the case, return immediately.
1367  Label interceptor_failed;
1368  __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1369  __ Branch(&interceptor_failed, eq, v0, Operand(scratch1));
1370  frame_scope.GenerateLeaveFrame();
1371  __ Ret();
1372 
1373  __ bind(&interceptor_failed);
1374  __ pop(name_reg);
1375  __ pop(holder_reg);
1376  if (must_preserve_receiver_reg) {
1377  __ pop(receiver);
1378  }
1379  // Leave the internal frame.
1380  }
1381  // Check that the maps from interceptor's holder to lookup's holder
1382  // haven't changed. And load lookup's holder into |holder| register.
1383  if (must_perfrom_prototype_check) {
1384  holder_reg = CheckPrototypes(interceptor_holder,
1385  holder_reg,
1386  Handle<JSObject>(lookup->holder()),
1387  scratch1,
1388  scratch2,
1389  scratch3,
1390  name,
1391  miss);
1392  }
1393 
1394  if (lookup->type() == FIELD) {
1395  // We found FIELD property in prototype chain of interceptor's holder.
1396  // Retrieve a field from field's holder.
1397  GenerateFastPropertyLoad(masm(), v0, holder_reg,
1398  Handle<JSObject>(lookup->holder()),
1399  lookup->GetFieldIndex());
1400  __ Ret();
1401  } else {
1402  // We found CALLBACKS property in prototype chain of interceptor's
1403  // holder.
1404  ASSERT(lookup->type() == CALLBACKS);
1405  Handle<AccessorInfo> callback(
1406  AccessorInfo::cast(lookup->GetCallbackObject()));
1407  ASSERT(callback->getter() != NULL);
1408 
1409  // Tail call to runtime.
1410  // Important invariant in CALLBACKS case: the code above must be
1411  // structured to never clobber |receiver| register.
1412  __ li(scratch2, callback);
1413 
1414  __ Push(receiver, holder_reg);
1415  __ lw(scratch3,
1417  __ li(scratch1, Operand(ExternalReference::isolate_address()));
1418  __ Push(scratch3, scratch1, scratch2, name_reg);
1419 
1420  ExternalReference ref =
1421  ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1422  masm()->isolate());
1423  __ TailCallExternalReference(ref, 6, 1);
1424  }
1425  } else { // !compile_followup_inline
1426  // Call the runtime system to load the interceptor.
1427  // Check that the maps haven't changed.
1428  Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1429  scratch1, scratch2, scratch3,
1430  name, miss);
1431  PushInterceptorArguments(masm(), receiver, holder_reg,
1432  name_reg, interceptor_holder);
1433 
1434  ExternalReference ref = ExternalReference(
1435  IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), masm()->isolate());
1436  __ TailCallExternalReference(ref, 6, 1);
1437  }
1438 }
1439 
1440 
1441 void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) {
1442  if (kind_ == Code::KEYED_CALL_IC) {
1443  __ Branch(miss, ne, a2, Operand(name));
1444  }
1445 }
1446 
1447 
1448 void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
1449  Handle<JSObject> holder,
1450  Handle<String> name,
1451  Label* miss) {
1452  ASSERT(holder->IsGlobalObject());
1453 
1454  // Get the number of arguments.
1455  const int argc = arguments().immediate();
1456 
1457  // Get the receiver from the stack.
1458  __ lw(a0, MemOperand(sp, argc * kPointerSize));
1459 
1460  // Check that the maps haven't changed.
1461  __ JumpIfSmi(a0, miss);
1462  CheckPrototypes(object, a0, holder, a3, a1, t0, name, miss);
1463 }
1464 
1465 
1466 void CallStubCompiler::GenerateLoadFunctionFromCell(
1467  Handle<JSGlobalPropertyCell> cell,
1468  Handle<JSFunction> function,
1469  Label* miss) {
1470  // Get the value from the cell.
1471  __ li(a3, Operand(cell));
1473 
1474  // Check that the cell contains the same function.
1475  if (heap()->InNewSpace(*function)) {
1476  // We can't embed a pointer to a function in new space so we have
1477  // to verify that the shared function info is unchanged. This has
1478  // the nice side effect that multiple closures based on the same
1479  // function can all use this call IC. Before we load through the
1480  // function, we have to verify that it still is a function.
1481  __ JumpIfSmi(a1, miss);
1482  __ GetObjectType(a1, a3, a3);
1483  __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
1484 
1485  // Check the shared function info. Make sure it hasn't changed.
1486  __ li(a3, Handle<SharedFunctionInfo>(function->shared()));
1488  __ Branch(miss, ne, t0, Operand(a3));
1489  } else {
1490  __ Branch(miss, ne, a1, Operand(function));
1491  }
1492 }
1493 
1494 
1495 void CallStubCompiler::GenerateMissBranch() {
1496  Handle<Code> code =
1497  isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1498  kind_,
1499  extra_state_);
1500  __ Jump(code, RelocInfo::CODE_TARGET);
1501 }
1502 
1503 
1504 Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
1505  Handle<JSObject> holder,
1506  int index,
1507  Handle<String> name) {
1508  // ----------- S t a t e -------------
1509  // -- a2 : name
1510  // -- ra : return address
1511  // -----------------------------------
1512  Label miss;
1513 
1514  GenerateNameCheck(name, &miss);
1515 
1516  const int argc = arguments().immediate();
1517 
1518  // Get the receiver of the function from the stack into a0.
1519  __ lw(a0, MemOperand(sp, argc * kPointerSize));
1520  // Check that the receiver isn't a smi.
1521  __ JumpIfSmi(a0, &miss, t0);
1522 
1523  // Do the right check and compute the holder register.
1524  Register reg = CheckPrototypes(object, a0, holder, a1, a3, t0, name, &miss);
1525  GenerateFastPropertyLoad(masm(), a1, reg, holder, index);
1526 
1527  GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
1528 
1529  // Handle call cache miss.
1530  __ bind(&miss);
1531  GenerateMissBranch();
1532 
1533  // Return the generated code.
1534  return GetCode(FIELD, name);
1535 }
1536 
1537 
1538 Handle<Code> CallStubCompiler::CompileArrayPushCall(
1539  Handle<Object> object,
1540  Handle<JSObject> holder,
1541  Handle<JSGlobalPropertyCell> cell,
1542  Handle<JSFunction> function,
1543  Handle<String> name) {
1544  // ----------- S t a t e -------------
1545  // -- a2 : name
1546  // -- ra : return address
1547  // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1548  // -- ...
1549  // -- sp[argc * 4] : receiver
1550  // -----------------------------------
1551 
1552  // If object is not an array, bail out to regular call.
1553  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1554 
1555  Label miss;
1556 
1557  GenerateNameCheck(name, &miss);
1558 
1559  Register receiver = a1;
1560 
1561  // Get the receiver from the stack.
1562  const int argc = arguments().immediate();
1563  __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1564 
1565  // Check that the receiver isn't a smi.
1566  __ JumpIfSmi(receiver, &miss);
1567 
1568  // Check that the maps haven't changed.
1569  CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, a3, v0, t0,
1570  name, &miss);
1571 
1572  if (argc == 0) {
1573  // Nothing to do, just return the length.
1574  __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1575  __ Drop(argc + 1);
1576  __ Ret();
1577  } else {
1578  Label call_builtin;
1579  if (argc == 1) { // Otherwise fall through to call the builtin.
1580  Label attempt_to_grow_elements;
1581 
1582  Register elements = t2;
1583  Register end_elements = t1;
1584  // Get the elements array of the object.
1585  __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1586 
1587  // Check that the elements are in fast mode and writable.
1588  __ CheckMap(elements,
1589  v0,
1590  Heap::kFixedArrayMapRootIndex,
1591  &call_builtin,
1593 
1594  // Get the array's length into v0 and calculate new length.
1595  __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1596  STATIC_ASSERT(kSmiTagSize == 1);
1597  STATIC_ASSERT(kSmiTag == 0);
1598  __ Addu(v0, v0, Operand(Smi::FromInt(argc)));
1599 
1600  // Get the elements' length.
1601  __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1602 
1603  // Check if we could survive without allocation.
1604  __ Branch(&attempt_to_grow_elements, gt, v0, Operand(t0));
1605 
1606  // Check if value is a smi.
1607  Label with_write_barrier;
1608  __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize));
1609  __ JumpIfNotSmi(t0, &with_write_barrier);
1610 
1611  // Save new length.
1612  __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1613 
1614  // Store the value.
1615  // We may need a register containing the address end_elements below,
1616  // so write back the value in end_elements.
1617  __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1618  __ Addu(end_elements, elements, end_elements);
1619  const int kEndElementsOffset =
1620  FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1621  __ Addu(end_elements, end_elements, kEndElementsOffset);
1622  __ sw(t0, MemOperand(end_elements));
1623 
1624  // Check for a smi.
1625  __ Drop(argc + 1);
1626  __ Ret();
1627 
1628  __ bind(&with_write_barrier);
1629 
1630  __ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset));
1631 
1632  if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
1633  Label fast_object, not_fast_object;
1634  __ CheckFastObjectElements(a3, t3, &not_fast_object);
1635  __ jmp(&fast_object);
1636  // In case of fast smi-only, convert to fast object, otherwise bail out.
1637  __ bind(&not_fast_object);
1638  __ CheckFastSmiElements(a3, t3, &call_builtin);
1639  // edx: receiver
1640  // r3: map
1641  Label try_holey_map;
1642  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1643  FAST_ELEMENTS,
1644  a3,
1645  t3,
1646  &try_holey_map);
1647  __ mov(a2, receiver);
1650  __ jmp(&fast_object);
1651 
1652  __ bind(&try_holey_map);
1653  __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
1655  a3,
1656  t3,
1657  &call_builtin);
1658  __ mov(a2, receiver);
1661  __ bind(&fast_object);
1662  } else {
1663  __ CheckFastObjectElements(a3, a3, &call_builtin);
1664  }
1665 
1666  // Save new length.
1667  __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1668 
1669  // Store the value.
1670  // We may need a register containing the address end_elements below,
1671  // so write back the value in end_elements.
1672  __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1673  __ Addu(end_elements, elements, end_elements);
1674  __ Addu(end_elements, end_elements, kEndElementsOffset);
1675  __ sw(t0, MemOperand(end_elements));
1676 
1677  __ RecordWrite(elements,
1678  end_elements,
1679  t0,
1683  OMIT_SMI_CHECK);
1684  __ Drop(argc + 1);
1685  __ Ret();
1686 
1687  __ bind(&attempt_to_grow_elements);
1688  // v0: array's length + 1.
1689  // t0: elements' length.
1690 
1691  if (!FLAG_inline_new) {
1692  __ Branch(&call_builtin);
1693  }
1694 
1695  __ lw(a2, MemOperand(sp, (argc - 1) * kPointerSize));
1696  // Growing elements that are SMI-only requires special handling in case
1697  // the new element is non-Smi. For now, delegate to the builtin.
1698  Label no_fast_elements_check;
1699  __ JumpIfSmi(a2, &no_fast_elements_check);
1700  __ lw(t3, FieldMemOperand(receiver, HeapObject::kMapOffset));
1701  __ CheckFastObjectElements(t3, t3, &call_builtin);
1702  __ bind(&no_fast_elements_check);
1703 
1704  ExternalReference new_space_allocation_top =
1705  ExternalReference::new_space_allocation_top_address(
1706  masm()->isolate());
1707  ExternalReference new_space_allocation_limit =
1708  ExternalReference::new_space_allocation_limit_address(
1709  masm()->isolate());
1710 
1711  const int kAllocationDelta = 4;
1712  // Load top and check if it is the end of elements.
1713  __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1714  __ Addu(end_elements, elements, end_elements);
1715  __ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
1716  __ li(t3, Operand(new_space_allocation_top));
1717  __ lw(a3, MemOperand(t3));
1718  __ Branch(&call_builtin, ne, end_elements, Operand(a3));
1719 
1720  __ li(t5, Operand(new_space_allocation_limit));
1721  __ lw(t5, MemOperand(t5));
1722  __ Addu(a3, a3, Operand(kAllocationDelta * kPointerSize));
1723  __ Branch(&call_builtin, hi, a3, Operand(t5));
1724 
1725  // We fit and could grow elements.
1726  // Update new_space_allocation_top.
1727  __ sw(a3, MemOperand(t3));
1728  // Push the argument.
1729  __ sw(a2, MemOperand(end_elements));
1730  // Fill the rest with holes.
1731  __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
1732  for (int i = 1; i < kAllocationDelta; i++) {
1733  __ sw(a3, MemOperand(end_elements, i * kPointerSize));
1734  }
1735 
1736  // Update elements' and array's sizes.
1737  __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1738  __ Addu(t0, t0, Operand(Smi::FromInt(kAllocationDelta)));
1739  __ sw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1740 
1741  // Elements are in new space, so write barrier is not required.
1742  __ Drop(argc + 1);
1743  __ Ret();
1744  }
1745  __ bind(&call_builtin);
1746  __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1747  masm()->isolate()),
1748  argc + 1,
1749  1);
1750  }
1751 
1752  // Handle call cache miss.
1753  __ bind(&miss);
1754  GenerateMissBranch();
1755 
1756  // Return the generated code.
1757  return GetCode(function);
1758 }
1759 
1760 
1761 Handle<Code> CallStubCompiler::CompileArrayPopCall(
1762  Handle<Object> object,
1763  Handle<JSObject> holder,
1764  Handle<JSGlobalPropertyCell> cell,
1765  Handle<JSFunction> function,
1766  Handle<String> name) {
1767  // ----------- S t a t e -------------
1768  // -- a2 : name
1769  // -- ra : return address
1770  // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1771  // -- ...
1772  // -- sp[argc * 4] : receiver
1773  // -----------------------------------
1774 
1775  // If object is not an array, bail out to regular call.
1776  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1777 
1778  Label miss, return_undefined, call_builtin;
1779  Register receiver = a1;
1780  Register elements = a3;
1781  GenerateNameCheck(name, &miss);
1782 
1783  // Get the receiver from the stack.
1784  const int argc = arguments().immediate();
1785  __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1786  // Check that the receiver isn't a smi.
1787  __ JumpIfSmi(receiver, &miss);
1788 
1789  // Check that the maps haven't changed.
1790  CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, elements,
1791  t0, v0, name, &miss);
1792 
1793  // Get the elements array of the object.
1794  __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1795 
1796  // Check that the elements are in fast mode and writable.
1797  __ CheckMap(elements,
1798  v0,
1799  Heap::kFixedArrayMapRootIndex,
1800  &call_builtin,
1802 
1803  // Get the array's length into t0 and calculate new length.
1804  __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1805  __ Subu(t0, t0, Operand(Smi::FromInt(1)));
1806  __ Branch(&return_undefined, lt, t0, Operand(zero_reg));
1807 
1808  // Get the last element.
1809  __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
1810  STATIC_ASSERT(kSmiTagSize == 1);
1811  STATIC_ASSERT(kSmiTag == 0);
1812  // We can't address the last element in one operation. Compute the more
1813  // expensive shift first, and use an offset later on.
1814  __ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize);
1815  __ Addu(elements, elements, t1);
1816  __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
1817  __ Branch(&call_builtin, eq, v0, Operand(t2));
1818 
1819  // Set the array's length.
1820  __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1821 
1822  // Fill with the hole.
1823  __ sw(t2, FieldMemOperand(elements, FixedArray::kHeaderSize));
1824  __ Drop(argc + 1);
1825  __ Ret();
1826 
1827  __ bind(&return_undefined);
1828  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
1829  __ Drop(argc + 1);
1830  __ Ret();
1831 
1832  __ bind(&call_builtin);
1833  __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1834  masm()->isolate()),
1835  argc + 1,
1836  1);
1837 
1838  // Handle call cache miss.
1839  __ bind(&miss);
1840  GenerateMissBranch();
1841 
1842  // Return the generated code.
1843  return GetCode(function);
1844 }
1845 
1846 
1847 Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
1848  Handle<Object> object,
1849  Handle<JSObject> holder,
1850  Handle<JSGlobalPropertyCell> cell,
1851  Handle<JSFunction> function,
1852  Handle<String> name) {
1853  // ----------- S t a t e -------------
1854  // -- a2 : function name
1855  // -- ra : return address
1856  // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1857  // -- ...
1858  // -- sp[argc * 4] : receiver
1859  // -----------------------------------
1860 
1861  // If object is not a string, bail out to regular call.
1862  if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
1863 
1864  const int argc = arguments().immediate();
1865  Label miss;
1866  Label name_miss;
1867  Label index_out_of_range;
1868 
1869  Label* index_out_of_range_label = &index_out_of_range;
1870 
1871  if (kind_ == Code::CALL_IC &&
1872  (CallICBase::StringStubState::decode(extra_state_) ==
1874  index_out_of_range_label = &miss;
1875  }
1876 
1877  GenerateNameCheck(name, &name_miss);
1878 
1879  // Check that the maps starting from the prototype haven't changed.
1880  GenerateDirectLoadGlobalFunctionPrototype(masm(),
1882  v0,
1883  &miss);
1884  ASSERT(!object.is_identical_to(holder));
1885  CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())),
1886  v0, holder, a1, a3, t0, name, &miss);
1887 
1888  Register receiver = a1;
1889  Register index = t1;
1890  Register result = v0;
1891  __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1892  if (argc > 0) {
1893  __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
1894  } else {
1895  __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1896  }
1897 
1898  StringCharCodeAtGenerator generator(receiver,
1899  index,
1900  result,
1901  &miss, // When not a string.
1902  &miss, // When not a number.
1903  index_out_of_range_label,
1905  generator.GenerateFast(masm());
1906  __ Drop(argc + 1);
1907  __ Ret();
1908 
1909  StubRuntimeCallHelper call_helper;
1910  generator.GenerateSlow(masm(), call_helper);
1911 
1912  if (index_out_of_range.is_linked()) {
1913  __ bind(&index_out_of_range);
1914  __ LoadRoot(v0, Heap::kNanValueRootIndex);
1915  __ Drop(argc + 1);
1916  __ Ret();
1917  }
1918 
1919  __ bind(&miss);
1920  // Restore function name in a2.
1921  __ li(a2, name);
1922  __ bind(&name_miss);
1923  GenerateMissBranch();
1924 
1925  // Return the generated code.
1926  return GetCode(function);
1927 }
1928 
1929 
1930 Handle<Code> CallStubCompiler::CompileStringCharAtCall(
1931  Handle<Object> object,
1932  Handle<JSObject> holder,
1933  Handle<JSGlobalPropertyCell> cell,
1934  Handle<JSFunction> function,
1935  Handle<String> name) {
1936  // ----------- S t a t e -------------
1937  // -- a2 : function name
1938  // -- ra : return address
1939  // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1940  // -- ...
1941  // -- sp[argc * 4] : receiver
1942  // -----------------------------------
1943 
1944  // If object is not a string, bail out to regular call.
1945  if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
1946 
1947  const int argc = arguments().immediate();
1948  Label miss;
1949  Label name_miss;
1950  Label index_out_of_range;
1951  Label* index_out_of_range_label = &index_out_of_range;
1952  if (kind_ == Code::CALL_IC &&
1953  (CallICBase::StringStubState::decode(extra_state_) ==
1955  index_out_of_range_label = &miss;
1956  }
1957  GenerateNameCheck(name, &name_miss);
1958 
1959  // Check that the maps starting from the prototype haven't changed.
1960  GenerateDirectLoadGlobalFunctionPrototype(masm(),
1962  v0,
1963  &miss);
1964  ASSERT(!object.is_identical_to(holder));
1965  CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())),
1966  v0, holder, a1, a3, t0, name, &miss);
1967 
1968  Register receiver = v0;
1969  Register index = t1;
1970  Register scratch = a3;
1971  Register result = v0;
1972  __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1973  if (argc > 0) {
1974  __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
1975  } else {
1976  __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1977  }
1978 
1979  StringCharAtGenerator generator(receiver,
1980  index,
1981  scratch,
1982  result,
1983  &miss, // When not a string.
1984  &miss, // When not a number.
1985  index_out_of_range_label,
1987  generator.GenerateFast(masm());
1988  __ Drop(argc + 1);
1989  __ Ret();
1990 
1991  StubRuntimeCallHelper call_helper;
1992  generator.GenerateSlow(masm(), call_helper);
1993 
1994  if (index_out_of_range.is_linked()) {
1995  __ bind(&index_out_of_range);
1996  __ LoadRoot(v0, Heap::kEmptyStringRootIndex);
1997  __ Drop(argc + 1);
1998  __ Ret();
1999  }
2000 
2001  __ bind(&miss);
2002  // Restore function name in a2.
2003  __ li(a2, name);
2004  __ bind(&name_miss);
2005  GenerateMissBranch();
2006 
2007  // Return the generated code.
2008  return GetCode(function);
2009 }
2010 
2011 
2012 Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
2013  Handle<Object> object,
2014  Handle<JSObject> holder,
2015  Handle<JSGlobalPropertyCell> cell,
2016  Handle<JSFunction> function,
2017  Handle<String> name) {
2018  // ----------- S t a t e -------------
2019  // -- a2 : function name
2020  // -- ra : return address
2021  // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2022  // -- ...
2023  // -- sp[argc * 4] : receiver
2024  // -----------------------------------
2025 
2026  const int argc = arguments().immediate();
2027 
2028  // If the object is not a JSObject or we got an unexpected number of
2029  // arguments, bail out to the regular call.
2030  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2031 
2032  Label miss;
2033  GenerateNameCheck(name, &miss);
2034 
2035  if (cell.is_null()) {
2036  __ lw(a1, MemOperand(sp, 1 * kPointerSize));
2037 
2038  STATIC_ASSERT(kSmiTag == 0);
2039  __ JumpIfSmi(a1, &miss);
2040 
2041  CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, v0, a3, t0,
2042  name, &miss);
2043  } else {
2044  ASSERT(cell->value() == *function);
2045  GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2046  &miss);
2047  GenerateLoadFunctionFromCell(cell, function, &miss);
2048  }
2049 
2050  // Load the char code argument.
2051  Register code = a1;
2052  __ lw(code, MemOperand(sp, 0 * kPointerSize));
2053 
2054  // Check the code is a smi.
2055  Label slow;
2056  STATIC_ASSERT(kSmiTag == 0);
2057  __ JumpIfNotSmi(code, &slow);
2058 
2059  // Convert the smi code to uint16.
2060  __ And(code, code, Operand(Smi::FromInt(0xffff)));
2061 
2062  StringCharFromCodeGenerator generator(code, v0);
2063  generator.GenerateFast(masm());
2064  __ Drop(argc + 1);
2065  __ Ret();
2066 
2067  StubRuntimeCallHelper call_helper;
2068  generator.GenerateSlow(masm(), call_helper);
2069 
2070  // Tail call the full function. We do not have to patch the receiver
2071  // because the function makes no use of it.
2072  __ bind(&slow);
2073  __ InvokeFunction(
2074  function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2075 
2076  __ bind(&miss);
2077  // a2: function name.
2078  GenerateMissBranch();
2079 
2080  // Return the generated code.
2081  return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
2082 }
2083 
2084 
2085 Handle<Code> CallStubCompiler::CompileMathFloorCall(
2086  Handle<Object> object,
2087  Handle<JSObject> holder,
2088  Handle<JSGlobalPropertyCell> cell,
2089  Handle<JSFunction> function,
2090  Handle<String> name) {
2091  // ----------- S t a t e -------------
2092  // -- a2 : function name
2093  // -- ra : return address
2094  // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2095  // -- ...
2096  // -- sp[argc * 4] : receiver
2097  // -----------------------------------
2098 
2099  if (!CpuFeatures::IsSupported(FPU)) {
2100  return Handle<Code>::null();
2101  }
2102 
2103  CpuFeatures::Scope scope_fpu(FPU);
2104  const int argc = arguments().immediate();
2105  // If the object is not a JSObject or we got an unexpected number of
2106  // arguments, bail out to the regular call.
2107  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2108 
2109  Label miss, slow;
2110  GenerateNameCheck(name, &miss);
2111 
2112  if (cell.is_null()) {
2113  __ lw(a1, MemOperand(sp, 1 * kPointerSize));
2114  STATIC_ASSERT(kSmiTag == 0);
2115  __ JumpIfSmi(a1, &miss);
2116  CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, a0, a3, t0,
2117  name, &miss);
2118  } else {
2119  ASSERT(cell->value() == *function);
2120  GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2121  &miss);
2122  GenerateLoadFunctionFromCell(cell, function, &miss);
2123  }
2124 
2125  // Load the (only) argument into v0.
2126  __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2127 
2128  // If the argument is a smi, just return.
2129  STATIC_ASSERT(kSmiTag == 0);
2130  __ And(t0, v0, Operand(kSmiTagMask));
2131  __ Drop(argc + 1, eq, t0, Operand(zero_reg));
2132  __ Ret(eq, t0, Operand(zero_reg));
2133 
2134  __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2135 
2136  Label wont_fit_smi, no_fpu_error, restore_fcsr_and_return;
2137 
2138  // If fpu is enabled, we use the floor instruction.
2139 
2140  // Load the HeapNumber value.
2142 
2143  // Backup FCSR.
2144  __ cfc1(a3, FCSR);
2145  // Clearing FCSR clears the exception mask with no side-effects.
2146  __ ctc1(zero_reg, FCSR);
2147  // Convert the argument to an integer.
2148  __ floor_w_d(f0, f0);
2149 
2150  // Start checking for special cases.
2151  // Get the argument exponent and clear the sign bit.
2152  __ lw(t1, FieldMemOperand(v0, HeapNumber::kValueOffset + kPointerSize));
2153  __ And(t2, t1, Operand(~HeapNumber::kSignMask));
2154  __ srl(t2, t2, HeapNumber::kMantissaBitsInTopWord);
2155 
2156  // Retrieve FCSR and check for fpu errors.
2157  __ cfc1(t5, FCSR);
2158  __ And(t5, t5, Operand(kFCSRExceptionFlagMask));
2159  __ Branch(&no_fpu_error, eq, t5, Operand(zero_reg));
2160 
2161  // Check for NaN, Infinity, and -Infinity.
2162  // They are invariant through a Math.Floor call, so just
2163  // return the original argument.
2164  __ Subu(t3, t2, Operand(HeapNumber::kExponentMask
2165  >> HeapNumber::kMantissaBitsInTopWord));
2166  __ Branch(&restore_fcsr_and_return, eq, t3, Operand(zero_reg));
2167  // We had an overflow or underflow in the conversion. Check if we
2168  // have a big exponent.
2169  // If greater or equal, the argument is already round and in v0.
2170  __ Branch(&restore_fcsr_and_return, ge, t3,
2171  Operand(HeapNumber::kMantissaBits));
2172  __ Branch(&wont_fit_smi);
2173 
2174  __ bind(&no_fpu_error);
2175  // Move the result back to v0.
2176  __ mfc1(v0, f0);
2177  // Check if the result fits into a smi.
2178  __ Addu(a1, v0, Operand(0x40000000));
2179  __ Branch(&wont_fit_smi, lt, a1, Operand(zero_reg));
2180  // Tag the result.
2181  STATIC_ASSERT(kSmiTag == 0);
2182  __ sll(v0, v0, kSmiTagSize);
2183 
2184  // Check for -0.
2185  __ Branch(&restore_fcsr_and_return, ne, v0, Operand(zero_reg));
2186  // t1 already holds the HeapNumber exponent.
2187  __ And(t0, t1, Operand(HeapNumber::kSignMask));
2188  // If our HeapNumber is negative it was -0, so load its address and return.
2189  // Else v0 is loaded with 0, so we can also just return.
2190  __ Branch(&restore_fcsr_and_return, eq, t0, Operand(zero_reg));
2191  __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2192 
2193  __ bind(&restore_fcsr_and_return);
2194  // Restore FCSR and return.
2195  __ ctc1(a3, FCSR);
2196 
2197  __ Drop(argc + 1);
2198  __ Ret();
2199 
2200  __ bind(&wont_fit_smi);
2201  // Restore FCSR and fall to slow case.
2202  __ ctc1(a3, FCSR);
2203 
2204  __ bind(&slow);
2205  // Tail call the full function. We do not have to patch the receiver
2206  // because the function makes no use of it.
2207  __ InvokeFunction(
2208  function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2209 
2210  __ bind(&miss);
2211  // a2: function name.
2212  GenerateMissBranch();
2213 
2214  // Return the generated code.
2215  return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
2216 }
2217 
2218 
2219 Handle<Code> CallStubCompiler::CompileMathAbsCall(
2220  Handle<Object> object,
2221  Handle<JSObject> holder,
2222  Handle<JSGlobalPropertyCell> cell,
2223  Handle<JSFunction> function,
2224  Handle<String> name) {
2225  // ----------- S t a t e -------------
2226  // -- a2 : function name
2227  // -- ra : return address
2228  // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2229  // -- ...
2230  // -- sp[argc * 4] : receiver
2231  // -----------------------------------
2232 
2233  const int argc = arguments().immediate();
2234  // If the object is not a JSObject or we got an unexpected number of
2235  // arguments, bail out to the regular call.
2236  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2237 
2238  Label miss;
2239 
2240  GenerateNameCheck(name, &miss);
2241  if (cell.is_null()) {
2242  __ lw(a1, MemOperand(sp, 1 * kPointerSize));
2243  STATIC_ASSERT(kSmiTag == 0);
2244  __ JumpIfSmi(a1, &miss);
2245  CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, v0, a3, t0,
2246  name, &miss);
2247  } else {
2248  ASSERT(cell->value() == *function);
2249  GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2250  &miss);
2251  GenerateLoadFunctionFromCell(cell, function, &miss);
2252  }
2253 
2254  // Load the (only) argument into v0.
2255  __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2256 
2257  // Check if the argument is a smi.
2258  Label not_smi;
2259  STATIC_ASSERT(kSmiTag == 0);
2260  __ JumpIfNotSmi(v0, &not_smi);
2261 
2262  // Do bitwise not or do nothing depending on the sign of the
2263  // argument.
2264  __ sra(t0, v0, kBitsPerInt - 1);
2265  __ Xor(a1, v0, t0);
2266 
2267  // Add 1 or do nothing depending on the sign of the argument.
2268  __ Subu(v0, a1, t0);
2269 
2270  // If the result is still negative, go to the slow case.
2271  // This only happens for the most negative smi.
2272  Label slow;
2273  __ Branch(&slow, lt, v0, Operand(zero_reg));
2274 
2275  // Smi case done.
2276  __ Drop(argc + 1);
2277  __ Ret();
2278 
2279  // Check if the argument is a heap number and load its exponent and
2280  // sign.
2281  __ bind(&not_smi);
2282  __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2284 
2285  // Check the sign of the argument. If the argument is positive,
2286  // just return it.
2287  Label negative_sign;
2288  __ And(t0, a1, Operand(HeapNumber::kSignMask));
2289  __ Branch(&negative_sign, ne, t0, Operand(zero_reg));
2290  __ Drop(argc + 1);
2291  __ Ret();
2292 
2293  // If the argument is negative, clear the sign, and return a new
2294  // number.
2295  __ bind(&negative_sign);
2296  __ Xor(a1, a1, Operand(HeapNumber::kSignMask));
2298  __ LoadRoot(t2, Heap::kHeapNumberMapRootIndex);
2299  __ AllocateHeapNumber(v0, t0, t1, t2, &slow);
2302  __ Drop(argc + 1);
2303  __ Ret();
2304 
2305  // Tail call the full function. We do not have to patch the receiver
2306  // because the function makes no use of it.
2307  __ bind(&slow);
2308  __ InvokeFunction(
2309  function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2310 
2311  __ bind(&miss);
2312  // a2: function name.
2313  GenerateMissBranch();
2314 
2315  // Return the generated code.
2316  return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
2317 }
2318 
2319 
2320 Handle<Code> CallStubCompiler::CompileFastApiCall(
2321  const CallOptimization& optimization,
2322  Handle<Object> object,
2323  Handle<JSObject> holder,
2324  Handle<JSGlobalPropertyCell> cell,
2325  Handle<JSFunction> function,
2326  Handle<String> name) {
2327 
2328  Counters* counters = isolate()->counters();
2329 
2330  ASSERT(optimization.is_simple_api_call());
2331  // Bail out if object is a global object as we don't want to
2332  // repatch it to global receiver.
2333  if (object->IsGlobalObject()) return Handle<Code>::null();
2334  if (!cell.is_null()) return Handle<Code>::null();
2335  if (!object->IsJSObject()) return Handle<Code>::null();
2336  int depth = optimization.GetPrototypeDepthOfExpectedType(
2337  Handle<JSObject>::cast(object), holder);
2338  if (depth == kInvalidProtoDepth) return Handle<Code>::null();
2339 
2340  Label miss, miss_before_stack_reserved;
2341 
2342  GenerateNameCheck(name, &miss_before_stack_reserved);
2343 
2344  // Get the receiver from the stack.
2345  const int argc = arguments().immediate();
2346  __ lw(a1, MemOperand(sp, argc * kPointerSize));
2347 
2348  // Check that the receiver isn't a smi.
2349  __ JumpIfSmi(a1, &miss_before_stack_reserved);
2350 
2351  __ IncrementCounter(counters->call_const(), 1, a0, a3);
2352  __ IncrementCounter(counters->call_const_fast_api(), 1, a0, a3);
2353 
2354  ReserveSpaceForFastApiCall(masm(), a0);
2355 
2356  // Check that the maps haven't changed and find a Holder as a side effect.
2357  CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, a0, a3, t0, name,
2358  depth, &miss);
2359 
2360  GenerateFastApiDirectCall(masm(), optimization, argc);
2361 
2362  __ bind(&miss);
2363  FreeSpaceForFastApiCall(masm());
2364 
2365  __ bind(&miss_before_stack_reserved);
2366  GenerateMissBranch();
2367 
2368  // Return the generated code.
2369  return GetCode(function);
2370 }
2371 
2372 
2373 Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
2374  Handle<JSObject> holder,
2375  Handle<JSFunction> function,
2376  Handle<String> name,
2377  CheckType check) {
2378  // ----------- S t a t e -------------
2379  // -- a2 : name
2380  // -- ra : return address
2381  // -----------------------------------
2382  if (HasCustomCallGenerator(function)) {
2383  Handle<Code> code = CompileCustomCall(object, holder,
2384  Handle<JSGlobalPropertyCell>::null(),
2385  function, name);
2386  // A null handle means bail out to the regular compiler code below.
2387  if (!code.is_null()) return code;
2388  }
2389 
2390  Label miss;
2391 
2392  GenerateNameCheck(name, &miss);
2393 
2394  // Get the receiver from the stack.
2395  const int argc = arguments().immediate();
2396  __ lw(a1, MemOperand(sp, argc * kPointerSize));
2397 
2398  // Check that the receiver isn't a smi.
2399  if (check != NUMBER_CHECK) {
2400  __ JumpIfSmi(a1, &miss);
2401  }
2402 
2403  // Make sure that it's okay not to patch the on stack receiver
2404  // unless we're doing a receiver map check.
2405  ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2406  switch (check) {
2407  case RECEIVER_MAP_CHECK:
2408  __ IncrementCounter(masm()->isolate()->counters()->call_const(),
2409  1, a0, a3);
2410 
2411  // Check that the maps haven't changed.
2412  CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, a0, a3, t0,
2413  name, &miss);
2414 
2415  // Patch the receiver on the stack with the global proxy if
2416  // necessary.
2417  if (object->IsGlobalObject()) {
2419  __ sw(a3, MemOperand(sp, argc * kPointerSize));
2420  }
2421  break;
2422 
2423  case STRING_CHECK:
2424  if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2425  // Check that the object is a two-byte string or a symbol.
2426  __ GetObjectType(a1, a3, a3);
2427  __ Branch(&miss, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
2428  // Check that the maps starting from the prototype haven't changed.
2429  GenerateDirectLoadGlobalFunctionPrototype(
2430  masm(), Context::STRING_FUNCTION_INDEX, a0, &miss);
2431  CheckPrototypes(
2432  Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2433  a0, holder, a3, a1, t0, name, &miss);
2434  } else {
2435  // Calling non-strict non-builtins with a value as the receiver
2436  // requires boxing.
2437  __ jmp(&miss);
2438  }
2439  break;
2440 
2441  case NUMBER_CHECK:
2442  if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2443  Label fast;
2444  // Check that the object is a smi or a heap number.
2445  __ JumpIfSmi(a1, &fast);
2446  __ GetObjectType(a1, a0, a0);
2447  __ Branch(&miss, ne, a0, Operand(HEAP_NUMBER_TYPE));
2448  __ bind(&fast);
2449  // Check that the maps starting from the prototype haven't changed.
2450  GenerateDirectLoadGlobalFunctionPrototype(
2451  masm(), Context::NUMBER_FUNCTION_INDEX, a0, &miss);
2452  CheckPrototypes(
2453  Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2454  a0, holder, a3, a1, t0, name, &miss);
2455  } else {
2456  // Calling non-strict non-builtins with a value as the receiver
2457  // requires boxing.
2458  __ jmp(&miss);
2459  }
2460  break;
2461 
2462  case BOOLEAN_CHECK:
2463  if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2464  Label fast;
2465  // Check that the object is a boolean.
2466  __ LoadRoot(t0, Heap::kTrueValueRootIndex);
2467  __ Branch(&fast, eq, a1, Operand(t0));
2468  __ LoadRoot(t0, Heap::kFalseValueRootIndex);
2469  __ Branch(&miss, ne, a1, Operand(t0));
2470  __ bind(&fast);
2471  // Check that the maps starting from the prototype haven't changed.
2472  GenerateDirectLoadGlobalFunctionPrototype(
2473  masm(), Context::BOOLEAN_FUNCTION_INDEX, a0, &miss);
2474  CheckPrototypes(
2475  Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2476  a0, holder, a3, a1, t0, name, &miss);
2477  } else {
2478  // Calling non-strict non-builtins with a value as the receiver
2479  // requires boxing.
2480  __ jmp(&miss);
2481  }
2482  break;
2483  }
2484 
2485  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2487  : CALL_AS_METHOD;
2488  __ InvokeFunction(
2489  function, arguments(), JUMP_FUNCTION, NullCallWrapper(), call_kind);
2490 
2491  // Handle call cache miss.
2492  __ bind(&miss);
2493 
2494  GenerateMissBranch();
2495 
2496  // Return the generated code.
2497  return GetCode(function);
2498 }
2499 
2500 
2501 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
2502  Handle<JSObject> holder,
2503  Handle<String> name) {
2504  // ----------- S t a t e -------------
2505  // -- a2 : name
2506  // -- ra : return address
2507  // -----------------------------------
2508 
2509  Label miss;
2510 
2511  GenerateNameCheck(name, &miss);
2512 
2513  // Get the number of arguments.
2514  const int argc = arguments().immediate();
2515  LookupResult lookup(isolate());
2516  LookupPostInterceptor(holder, name, &lookup);
2517 
2518  // Get the receiver from the stack.
2519  __ lw(a1, MemOperand(sp, argc * kPointerSize));
2520 
2521  CallInterceptorCompiler compiler(this, arguments(), a2, extra_state_);
2522  compiler.Compile(masm(), object, holder, name, &lookup, a1, a3, t0, a0,
2523  &miss);
2524 
2525  // Move returned value, the function to call, to a1.
2526  __ mov(a1, v0);
2527  // Restore receiver.
2528  __ lw(a0, MemOperand(sp, argc * kPointerSize));
2529 
2530  GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
2531 
2532  // Handle call cache miss.
2533  __ bind(&miss);
2534  GenerateMissBranch();
2535 
2536  // Return the generated code.
2537  return GetCode(INTERCEPTOR, name);
2538 }
2539 
2540 
2542  Handle<JSObject> object,
2543  Handle<GlobalObject> holder,
2544  Handle<JSGlobalPropertyCell> cell,
2545  Handle<JSFunction> function,
2546  Handle<String> name) {
2547  // ----------- S t a t e -------------
2548  // -- a2 : name
2549  // -- ra : return address
2550  // -----------------------------------
2551 
2552  if (HasCustomCallGenerator(function)) {
2553  Handle<Code> code = CompileCustomCall(object, holder, cell, function, name);
2554  // A null handle means bail out to the regular compiler code below.
2555  if (!code.is_null()) return code;
2556  }
2557 
2558  Label miss;
2559  GenerateNameCheck(name, &miss);
2560 
2561  // Get the number of arguments.
2562  const int argc = arguments().immediate();
2563  GenerateGlobalReceiverCheck(object, holder, name, &miss);
2564  GenerateLoadFunctionFromCell(cell, function, &miss);
2565 
2566  // Patch the receiver on the stack with the global proxy if
2567  // necessary.
2568  if (object->IsGlobalObject()) {
2570  __ sw(a3, MemOperand(sp, argc * kPointerSize));
2571  }
2572 
2573  // Set up the context (function already in r1).
2575 
2576  // Jump to the cached code (tail call).
2577  Counters* counters = masm()->isolate()->counters();
2578  __ IncrementCounter(counters->call_global_inline(), 1, a3, t0);
2579  ParameterCount expected(function->shared()->formal_parameter_count());
2580  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2582  : CALL_AS_METHOD;
2583  // We call indirectly through the code field in the function to
2584  // allow recompilation to take effect without changing any of the
2585  // call sites.
2587  __ InvokeCode(a3, expected, arguments(), JUMP_FUNCTION,
2588  NullCallWrapper(), call_kind);
2589 
2590  // Handle call cache miss.
2591  __ bind(&miss);
2592  __ IncrementCounter(counters->call_global_inline_miss(), 1, a1, a3);
2593  GenerateMissBranch();
2594 
2595  // Return the generated code.
2596  return GetCode(NORMAL, name);
2597 }
2598 
2599 
2600 Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
2601  int index,
2602  Handle<Map> transition,
2603  Handle<String> name) {
2604  // ----------- S t a t e -------------
2605  // -- a0 : value
2606  // -- a1 : receiver
2607  // -- a2 : name
2608  // -- ra : return address
2609  // -----------------------------------
2610  Label miss;
2611 
2612  // Name register might be clobbered.
2613  GenerateStoreField(masm(),
2614  object,
2615  index,
2616  transition,
2617  name,
2618  a1, a2, a3, t0,
2619  &miss);
2620  __ bind(&miss);
2621  __ li(a2, Operand(Handle<String>(name))); // Restore name.
2622  Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
2623  __ Jump(ic, RelocInfo::CODE_TARGET);
2624 
2625  // Return the generated code.
2626  return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
2627 }
2628 
2629 
2631  Handle<JSObject> object,
2632  Handle<AccessorInfo> callback,
2633  Handle<String> name) {
2634  // ----------- S t a t e -------------
2635  // -- a0 : value
2636  // -- a1 : receiver
2637  // -- a2 : name
2638  // -- ra : return address
2639  // -----------------------------------
2640  Label miss;
2641 
2642  // Check that the map of the object hasn't changed.
2643  __ CheckMap(a1, a3, Handle<Map>(object->map()), &miss,
2645 
2646  // Perform global security token check if needed.
2647  if (object->IsJSGlobalProxy()) {
2648  __ CheckAccessGlobalProxy(a1, a3, &miss);
2649  }
2650 
2651  // Stub never generated for non-global objects that require access
2652  // checks.
2653  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2654 
2655  __ push(a1); // Receiver.
2656  __ li(a3, Operand(callback)); // Callback info.
2657  __ Push(a3, a2, a0);
2658 
2659  // Do tail-call to the runtime system.
2660  ExternalReference store_callback_property =
2661  ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
2662  masm()->isolate());
2663  __ TailCallExternalReference(store_callback_property, 4, 1);
2664 
2665  // Handle store cache miss.
2666  __ bind(&miss);
2667  Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2668  __ Jump(ic, RelocInfo::CODE_TARGET);
2669 
2670  // Return the generated code.
2671  return GetCode(CALLBACKS, name);
2672 }
2673 
2674 
2676  Handle<JSObject> receiver,
2677  Handle<JSFunction> setter,
2678  Handle<String> name) {
2679  // ----------- S t a t e -------------
2680  // -- a0 : value
2681  // -- a1 : receiver
2682  // -- a2 : name
2683  // -- ra : return address
2684  // -----------------------------------
2685  Label miss;
2686 
2687  // Check that the map of the object hasn't changed.
2688  __ CheckMap(a1, a3, Handle<Map>(receiver->map()), &miss, DO_SMI_CHECK,
2690 
2691  {
2692  FrameScope scope(masm(), StackFrame::INTERNAL);
2693 
2694  // Save value register, so we can restore it later.
2695  __ push(a0);
2696 
2697  // Call the JavaScript getter with the receiver and the value on the stack.
2698  __ push(a1);
2699  __ push(a0);
2700  ParameterCount actual(1);
2701  __ InvokeFunction(setter, actual, CALL_FUNCTION, NullCallWrapper(),
2702  CALL_AS_METHOD);
2703 
2704  // We have to return the passed value, not the return value of the setter.
2705  __ pop(v0);
2706 
2707  // Restore context register.
2709  }
2710  __ Ret();
2711 
2712  __ bind(&miss);
2713  Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2714  __ Jump(ic, RelocInfo::CODE_TARGET);
2715 
2716  // Return the generated code.
2717  return GetCode(CALLBACKS, name);
2718 }
2719 
2720 
2722  Handle<JSObject> receiver,
2723  Handle<String> name) {
2724  // ----------- S t a t e -------------
2725  // -- a0 : value
2726  // -- a1 : receiver
2727  // -- a2 : name
2728  // -- ra : return address
2729  // -----------------------------------
2730  Label miss;
2731 
2732  // Check that the map of the object hasn't changed.
2733  __ CheckMap(a1, a3, Handle<Map>(receiver->map()), &miss,
2735 
2736  // Perform global security token check if needed.
2737  if (receiver->IsJSGlobalProxy()) {
2738  __ CheckAccessGlobalProxy(a1, a3, &miss);
2739  }
2740 
2741  // Stub is never generated for non-global objects that require access
2742  // checks.
2743  ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2744 
2745  __ Push(a1, a2, a0); // Receiver, name, value.
2746 
2747  __ li(a0, Operand(Smi::FromInt(strict_mode_)));
2748  __ push(a0); // Strict mode.
2749 
2750  // Do tail-call to the runtime system.
2751  ExternalReference store_ic_property =
2752  ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
2753  masm()->isolate());
2754  __ TailCallExternalReference(store_ic_property, 4, 1);
2755 
2756  // Handle store cache miss.
2757  __ bind(&miss);
2758  Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
2759  __ Jump(ic, RelocInfo::CODE_TARGET);
2760 
2761  // Return the generated code.
2762  return GetCode(INTERCEPTOR, name);
2763 }
2764 
2765 
2767  Handle<GlobalObject> object,
2768  Handle<JSGlobalPropertyCell> cell,
2769  Handle<String> name) {
2770  // ----------- S t a t e -------------
2771  // -- a0 : value
2772  // -- a1 : receiver
2773  // -- a2 : name
2774  // -- ra : return address
2775  // -----------------------------------
2776  Label miss;
2777 
2778  // Check that the map of the global has not changed.
2780  __ Branch(&miss, ne, a3, Operand(Handle<Map>(object->map())));
2781 
2782  // Check that the value in the cell is not the hole. If it is, this
2783  // cell could have been deleted and reintroducing the global needs
2784  // to update the property details in the property dictionary of the
2785  // global object. We bail out to the runtime system to do that.
2786  __ li(t0, Operand(cell));
2787  __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
2789  __ Branch(&miss, eq, t1, Operand(t2));
2790 
2791  // Store the value in the cell.
2793  __ mov(v0, a0); // Stored value must be returned in v0.
2794  // Cells are always rescanned, so no write barrier here.
2795 
2796  Counters* counters = masm()->isolate()->counters();
2797  __ IncrementCounter(counters->named_store_global_inline(), 1, a1, a3);
2798  __ Ret();
2799 
2800  // Handle store cache miss.
2801  __ bind(&miss);
2802  __ IncrementCounter(counters->named_store_global_inline_miss(), 1, a1, a3);
2803  Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2804  __ Jump(ic, RelocInfo::CODE_TARGET);
2805 
2806  // Return the generated code.
2807  return GetCode(NORMAL, name);
2808 }
2809 
2810 
2811 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
2812  Handle<JSObject> object,
2813  Handle<JSObject> last) {
2814  // ----------- S t a t e -------------
2815  // -- a0 : receiver
2816  // -- ra : return address
2817  // -----------------------------------
2818  Label miss;
2819 
2820  // Check that the receiver is not a smi.
2821  __ JumpIfSmi(a0, &miss);
2822 
2823  // Check the maps of the full prototype chain.
2824  CheckPrototypes(object, a0, last, a3, a1, t0, name, &miss);
2825 
2826  // If the last object in the prototype chain is a global object,
2827  // check that the global property cell is empty.
2828  if (last->IsGlobalObject()) {
2829  GenerateCheckPropertyCell(
2830  masm(), Handle<GlobalObject>::cast(last), name, a1, &miss);
2831  }
2832 
2833  // Return undefined if maps of the full prototype chain is still the same.
2834  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2835  __ Ret();
2836 
2837  __ bind(&miss);
2838  GenerateLoadMiss(masm(), Code::LOAD_IC);
2839 
2840  // Return the generated code.
2841  return GetCode(NONEXISTENT, factory()->empty_string());
2842 }
2843 
2844 
2845 Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
2846  Handle<JSObject> holder,
2847  int index,
2848  Handle<String> name) {
2849  // ----------- S t a t e -------------
2850  // -- a0 : receiver
2851  // -- a2 : name
2852  // -- ra : return address
2853  // -----------------------------------
2854  Label miss;
2855 
2856  __ mov(v0, a0);
2857 
2858  GenerateLoadField(object, holder, v0, a3, a1, t0, index, name, &miss);
2859  __ bind(&miss);
2860  GenerateLoadMiss(masm(), Code::LOAD_IC);
2861 
2862  // Return the generated code.
2863  return GetCode(FIELD, name);
2864 }
2865 
2866 
2868  Handle<String> name,
2869  Handle<JSObject> object,
2870  Handle<JSObject> holder,
2871  Handle<AccessorInfo> callback) {
2872  // ----------- S t a t e -------------
2873  // -- a0 : receiver
2874  // -- a2 : name
2875  // -- ra : return address
2876  // -----------------------------------
2877  Label miss;
2878  GenerateLoadCallback(object, holder, a0, a2, a3, a1, t0, callback, name,
2879  &miss);
2880  __ bind(&miss);
2881  GenerateLoadMiss(masm(), Code::LOAD_IC);
2882 
2883  // Return the generated code.
2884  return GetCode(CALLBACKS, name);
2885 }
2886 
2887 
2889  Handle<String> name,
2890  Handle<JSObject> receiver,
2891  Handle<JSObject> holder,
2892  Handle<JSFunction> getter) {
2893  // ----------- S t a t e -------------
2894  // -- a0 : receiver
2895  // -- a2 : name
2896  // -- ra : return address
2897  // -----------------------------------
2898  Label miss;
2899 
2900  // Check that the maps haven't changed.
2901  __ JumpIfSmi(a0, &miss);
2902  CheckPrototypes(receiver, a0, holder, a3, t0, a1, name, &miss);
2903 
2904  {
2905  FrameScope scope(masm(), StackFrame::INTERNAL);
2906 
2907  // Call the JavaScript getter with the receiver on the stack.
2908  __ push(a0);
2909  ParameterCount actual(0);
2910  __ InvokeFunction(getter, actual, CALL_FUNCTION, NullCallWrapper(),
2911  CALL_AS_METHOD);
2912 
2913  // Restore context register.
2915  }
2916  __ Ret();
2917 
2918  __ bind(&miss);
2919  GenerateLoadMiss(masm(), Code::LOAD_IC);
2920 
2921  // Return the generated code.
2922  return GetCode(CALLBACKS, name);
2923 }
2924 
2925 
2926 Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
2927  Handle<JSObject> holder,
2928  Handle<JSFunction> value,
2929  Handle<String> name) {
2930  // ----------- S t a t e -------------
2931  // -- a0 : receiver
2932  // -- a2 : name
2933  // -- ra : return address
2934  // -----------------------------------
2935  Label miss;
2936 
2937  GenerateLoadConstant(object, holder, a0, a3, a1, t0, value, name, &miss);
2938  __ bind(&miss);
2939  GenerateLoadMiss(masm(), Code::LOAD_IC);
2940 
2941  // Return the generated code.
2942  return GetCode(CONSTANT_FUNCTION, name);
2943 }
2944 
2945 
2946 Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> object,
2947  Handle<JSObject> holder,
2948  Handle<String> name) {
2949  // ----------- S t a t e -------------
2950  // -- a0 : receiver
2951  // -- a2 : name
2952  // -- ra : return address
2953  // -- [sp] : receiver
2954  // -----------------------------------
2955  Label miss;
2956 
2957  LookupResult lookup(isolate());
2958  LookupPostInterceptor(holder, name, &lookup);
2959  GenerateLoadInterceptor(object, holder, &lookup, a0, a2, a3, a1, t0, name,
2960  &miss);
2961  __ bind(&miss);
2962  GenerateLoadMiss(masm(), Code::LOAD_IC);
2963 
2964  // Return the generated code.
2965  return GetCode(INTERCEPTOR, name);
2966 }
2967 
2968 
2970  Handle<JSObject> object,
2971  Handle<GlobalObject> holder,
2972  Handle<JSGlobalPropertyCell> cell,
2973  Handle<String> name,
2974  bool is_dont_delete) {
2975  // ----------- S t a t e -------------
2976  // -- a0 : receiver
2977  // -- a2 : name
2978  // -- ra : return address
2979  // -----------------------------------
2980  Label miss;
2981 
2982  // Check that the map of the global has not changed.
2983  __ JumpIfSmi(a0, &miss);
2984  CheckPrototypes(object, a0, holder, a3, t0, a1, name, &miss);
2985 
2986  // Get the value from the cell.
2987  __ li(a3, Operand(cell));
2989 
2990  // Check for deleted property if property can actually be deleted.
2991  if (!is_dont_delete) {
2992  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2993  __ Branch(&miss, eq, t0, Operand(at));
2994  }
2995 
2996  __ mov(v0, t0);
2997  Counters* counters = masm()->isolate()->counters();
2998  __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
2999  __ Ret();
3000 
3001  __ bind(&miss);
3002  __ IncrementCounter(counters->named_load_global_stub_miss(), 1, a1, a3);
3003  GenerateLoadMiss(masm(), Code::LOAD_IC);
3004 
3005  // Return the generated code.
3006  return GetCode(NORMAL, name);
3007 }
3008 
3009 
3010 Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
3011  Handle<JSObject> receiver,
3012  Handle<JSObject> holder,
3013  int index) {
3014  // ----------- S t a t e -------------
3015  // -- ra : return address
3016  // -- a0 : key
3017  // -- a1 : receiver
3018  // -----------------------------------
3019  Label miss;
3020 
3021  // Check the key is the cached one.
3022  __ Branch(&miss, ne, a0, Operand(name));
3023 
3024  GenerateLoadField(receiver, holder, a1, a2, a3, t0, index, name, &miss);
3025  __ bind(&miss);
3026  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3027 
3028  return GetCode(FIELD, name);
3029 }
3030 
3031 
3033  Handle<String> name,
3034  Handle<JSObject> receiver,
3035  Handle<JSObject> holder,
3036  Handle<AccessorInfo> callback) {
3037  // ----------- S t a t e -------------
3038  // -- ra : return address
3039  // -- a0 : key
3040  // -- a1 : receiver
3041  // -----------------------------------
3042  Label miss;
3043 
3044  // Check the key is the cached one.
3045  __ Branch(&miss, ne, a0, Operand(name));
3046 
3047  GenerateLoadCallback(receiver, holder, a1, a0, a2, a3, t0, callback, name,
3048  &miss);
3049  __ bind(&miss);
3050  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3051 
3052  return GetCode(CALLBACKS, name);
3053 }
3054 
3055 
3057  Handle<String> name,
3058  Handle<JSObject> receiver,
3059  Handle<JSObject> holder,
3060  Handle<JSFunction> value) {
3061  // ----------- S t a t e -------------
3062  // -- ra : return address
3063  // -- a0 : key
3064  // -- a1 : receiver
3065  // -----------------------------------
3066  Label miss;
3067 
3068  // Check the key is the cached one.
3069  __ Branch(&miss, ne, a0, Operand(name));
3070 
3071  GenerateLoadConstant(receiver, holder, a1, a2, a3, t0, value, name, &miss);
3072  __ bind(&miss);
3073  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3074 
3075  // Return the generated code.
3076  return GetCode(CONSTANT_FUNCTION, name);
3077 }
3078 
3079 
3081  Handle<JSObject> receiver,
3082  Handle<JSObject> holder,
3083  Handle<String> name) {
3084  // ----------- S t a t e -------------
3085  // -- ra : return address
3086  // -- a0 : key
3087  // -- a1 : receiver
3088  // -----------------------------------
3089  Label miss;
3090 
3091  // Check the key is the cached one.
3092  __ Branch(&miss, ne, a0, Operand(name));
3093 
3094  LookupResult lookup(isolate());
3095  LookupPostInterceptor(holder, name, &lookup);
3096  GenerateLoadInterceptor(receiver, holder, &lookup, a1, a0, a2, a3, t0, name,
3097  &miss);
3098  __ bind(&miss);
3099  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3100 
3101  return GetCode(INTERCEPTOR, name);
3102 }
3103 
3104 
3106  Handle<String> name) {
3107  // ----------- S t a t e -------------
3108  // -- ra : return address
3109  // -- a0 : key
3110  // -- a1 : receiver
3111  // -----------------------------------
3112  Label miss;
3113 
3114  // Check the key is the cached one.
3115  __ Branch(&miss, ne, a0, Operand(name));
3116 
3117  GenerateLoadArrayLength(masm(), a1, a2, &miss);
3118  __ bind(&miss);
3119  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3120 
3121  return GetCode(CALLBACKS, name);
3122 }
3123 
3124 
3126  Handle<String> name) {
3127  // ----------- S t a t e -------------
3128  // -- ra : return address
3129  // -- a0 : key
3130  // -- a1 : receiver
3131  // -----------------------------------
3132  Label miss;
3133 
3134  Counters* counters = masm()->isolate()->counters();
3135  __ IncrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
3136 
3137  // Check the key is the cached one.
3138  __ Branch(&miss, ne, a0, Operand(name));
3139 
3140  GenerateLoadStringLength(masm(), a1, a2, a3, &miss, true);
3141  __ bind(&miss);
3142  __ DecrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
3143 
3144  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3145 
3146  return GetCode(CALLBACKS, name);
3147 }
3148 
3149 
3151  Handle<String> name) {
3152  // ----------- S t a t e -------------
3153  // -- ra : return address
3154  // -- a0 : key
3155  // -- a1 : receiver
3156  // -----------------------------------
3157  Label miss;
3158 
3159  Counters* counters = masm()->isolate()->counters();
3160  __ IncrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
3161 
3162  // Check the name hasn't changed.
3163  __ Branch(&miss, ne, a0, Operand(name));
3164 
3165  GenerateLoadFunctionPrototype(masm(), a1, a2, a3, &miss);
3166  __ bind(&miss);
3167  __ DecrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
3168  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3169 
3170  return GetCode(CALLBACKS, name);
3171 }
3172 
3173 
3175  Handle<Map> receiver_map) {
3176  // ----------- S t a t e -------------
3177  // -- ra : return address
3178  // -- a0 : key
3179  // -- a1 : receiver
3180  // -----------------------------------
3181  ElementsKind elements_kind = receiver_map->elements_kind();
3182  Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode();
3183 
3184  __ DispatchMap(a1, a2, receiver_map, stub, DO_SMI_CHECK);
3185 
3186  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3187  __ Jump(ic, RelocInfo::CODE_TARGET);
3188 
3189  // Return the generated code.
3190  return GetCode(NORMAL, factory()->empty_string());
3191 }
3192 
3193 
3195  MapHandleList* receiver_maps,
3196  CodeHandleList* handler_ics) {
3197  // ----------- S t a t e -------------
3198  // -- ra : return address
3199  // -- a0 : key
3200  // -- a1 : receiver
3201  // -----------------------------------
3202  Label miss;
3203  __ JumpIfSmi(a1, &miss);
3204 
3205  int receiver_count = receiver_maps->length();
3207  for (int current = 0; current < receiver_count; ++current) {
3208  __ Jump(handler_ics->at(current), RelocInfo::CODE_TARGET,
3209  eq, a2, Operand(receiver_maps->at(current)));
3210  }
3211 
3212  __ bind(&miss);
3213  Handle<Code> miss_ic = isolate()->builtins()->KeyedLoadIC_Miss();
3214  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3215 
3216  // Return the generated code.
3217  return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
3218 }
3219 
3220 
3221 Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
3222  int index,
3223  Handle<Map> transition,
3224  Handle<String> name) {
3225  // ----------- S t a t e -------------
3226  // -- a0 : value
3227  // -- a1 : key
3228  // -- a2 : receiver
3229  // -- ra : return address
3230  // -----------------------------------
3231 
3232  Label miss;
3233 
3234  Counters* counters = masm()->isolate()->counters();
3235  __ IncrementCounter(counters->keyed_store_field(), 1, a3, t0);
3236 
3237  // Check that the name has not changed.
3238  __ Branch(&miss, ne, a1, Operand(name));
3239 
3240  // a3 is used as scratch register. a1 and a2 keep their values if a jump to
3241  // the miss label is generated.
3242  GenerateStoreField(masm(),
3243  object,
3244  index,
3245  transition,
3246  name,
3247  a2, a1, a3, t0,
3248  &miss);
3249  __ bind(&miss);
3250 
3251  __ DecrementCounter(counters->keyed_store_field(), 1, a3, t0);
3252  Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3253  __ Jump(ic, RelocInfo::CODE_TARGET);
3254 
3255  // Return the generated code.
3256  return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
3257 }
3258 
3259 
3261  Handle<Map> receiver_map) {
3262  // ----------- S t a t e -------------
3263  // -- a0 : value
3264  // -- a1 : key
3265  // -- a2 : receiver
3266  // -- ra : return address
3267  // -- a3 : scratch
3268  // -----------------------------------
3269  ElementsKind elements_kind = receiver_map->elements_kind();
3270  bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
3271  Handle<Code> stub =
3272  KeyedStoreElementStub(is_js_array, elements_kind, grow_mode_).GetCode();
3273 
3274  __ DispatchMap(a2, a3, receiver_map, stub, DO_SMI_CHECK);
3275 
3276  Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
3277  __ Jump(ic, RelocInfo::CODE_TARGET);
3278 
3279  // Return the generated code.
3280  return GetCode(NORMAL, factory()->empty_string());
3281 }
3282 
3283 
3285  MapHandleList* receiver_maps,
3286  CodeHandleList* handler_stubs,
3287  MapHandleList* transitioned_maps) {
3288  // ----------- S t a t e -------------
3289  // -- a0 : value
3290  // -- a1 : key
3291  // -- a2 : receiver
3292  // -- ra : return address
3293  // -- a3 : scratch
3294  // -----------------------------------
3295  Label miss;
3296  __ JumpIfSmi(a2, &miss);
3297 
3298  int receiver_count = receiver_maps->length();
3300  for (int i = 0; i < receiver_count; ++i) {
3301  if (transitioned_maps->at(i).is_null()) {
3302  __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq,
3303  a3, Operand(receiver_maps->at(i)));
3304  } else {
3305  Label next_map;
3306  __ Branch(&next_map, ne, a3, Operand(receiver_maps->at(i)));
3307  __ li(a3, Operand(transitioned_maps->at(i)));
3308  __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET);
3309  __ bind(&next_map);
3310  }
3311  }
3312 
3313  __ bind(&miss);
3314  Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
3315  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3316 
3317  // Return the generated code.
3318  return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
3319 }
3320 
3321 
3323  Handle<JSFunction> function) {
3324  // a0 : argc
3325  // a1 : constructor
3326  // ra : return address
3327  // [sp] : last argument
3328  Label generic_stub_call;
3329 
3330  // Use t7 for holding undefined which is used in several places below.
3331  __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
3332 
3333 #ifdef ENABLE_DEBUGGER_SUPPORT
3334  // Check to see whether there are any break points in the function code. If
3335  // there are jump to the generic constructor stub which calls the actual
3336  // code for the function thereby hitting the break points.
3339  __ Branch(&generic_stub_call, ne, a2, Operand(t7));
3340 #endif
3341 
3342  // Load the initial map and verify that it is in fact a map.
3343  // a1: constructor function
3344  // t7: undefined
3346  __ JumpIfSmi(a2, &generic_stub_call);
3347  __ GetObjectType(a2, a3, t0);
3348  __ Branch(&generic_stub_call, ne, t0, Operand(MAP_TYPE));
3349 
3350 #ifdef DEBUG
3351  // Cannot construct functions this way.
3352  // a0: argc
3353  // a1: constructor function
3354  // a2: initial map
3355  // t7: undefined
3357  __ Check(ne, "Function constructed by construct stub.",
3358  a3, Operand(JS_FUNCTION_TYPE));
3359 #endif
3360 
3361  // Now allocate the JSObject in new space.
3362  // a0: argc
3363  // a1: constructor function
3364  // a2: initial map
3365  // t7: undefined
3367  __ AllocateInNewSpace(a3, t4, t5, t6, &generic_stub_call, SIZE_IN_WORDS);
3368 
3369  // Allocated the JSObject, now initialize the fields. Map is set to initial
3370  // map and properties and elements are set to empty fixed array.
3371  // a0: argc
3372  // a1: constructor function
3373  // a2: initial map
3374  // a3: object size (in words)
3375  // t4: JSObject (not tagged)
3376  // t7: undefined
3377  __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
3378  __ mov(t5, t4);
3379  __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
3382  __ Addu(t5, t5, Operand(3 * kPointerSize));
3383  ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
3384  ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
3385  ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
3386 
3387 
3388  // Calculate the location of the first argument. The stack contains only the
3389  // argc arguments.
3390  __ sll(a1, a0, kPointerSizeLog2);
3391  __ Addu(a1, a1, sp);
3392 
3393  // Fill all the in-object properties with undefined.
3394  // a0: argc
3395  // a1: first argument
3396  // a3: object size (in words)
3397  // t4: JSObject (not tagged)
3398  // t5: First in-object property of JSObject (not tagged)
3399  // t7: undefined
3400  // Fill the initialized properties with a constant value or a passed argument
3401  // depending on the this.x = ...; assignment in the function.
3402  Handle<SharedFunctionInfo> shared(function->shared());
3403  for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3404  if (shared->IsThisPropertyAssignmentArgument(i)) {
3405  Label not_passed, next;
3406  // Check if the argument assigned to the property is actually passed.
3407  int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3408  __ Branch(&not_passed, less_equal, a0, Operand(arg_number));
3409  // Argument passed - find it on the stack.
3410  __ lw(a2, MemOperand(a1, (arg_number + 1) * -kPointerSize));
3411  __ sw(a2, MemOperand(t5));
3412  __ Addu(t5, t5, kPointerSize);
3413  __ jmp(&next);
3414  __ bind(&not_passed);
3415  // Set the property to undefined.
3416  __ sw(t7, MemOperand(t5));
3417  __ Addu(t5, t5, Operand(kPointerSize));
3418  __ bind(&next);
3419  } else {
3420  // Set the property to the constant value.
3421  Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3422  __ li(a2, Operand(constant));
3423  __ sw(a2, MemOperand(t5));
3424  __ Addu(t5, t5, kPointerSize);
3425  }
3426  }
3427 
3428  // Fill the unused in-object property fields with undefined.
3429  ASSERT(function->has_initial_map());
3430  for (int i = shared->this_property_assignments_count();
3431  i < function->initial_map()->inobject_properties();
3432  i++) {
3433  __ sw(t7, MemOperand(t5));
3434  __ Addu(t5, t5, kPointerSize);
3435  }
3436 
3437  // a0: argc
3438  // t4: JSObject (not tagged)
3439  // Move argc to a1 and the JSObject to return to v0 and tag it.
3440  __ mov(a1, a0);
3441  __ mov(v0, t4);
3442  __ Or(v0, v0, Operand(kHeapObjectTag));
3443 
3444  // v0: JSObject
3445  // a1: argc
3446  // Remove caller arguments and receiver from the stack and return.
3447  __ sll(t0, a1, kPointerSizeLog2);
3448  __ Addu(sp, sp, t0);
3449  __ Addu(sp, sp, Operand(kPointerSize));
3450  Counters* counters = masm()->isolate()->counters();
3451  __ IncrementCounter(counters->constructed_objects(), 1, a1, a2);
3452  __ IncrementCounter(counters->constructed_objects_stub(), 1, a1, a2);
3453  __ Ret();
3454 
3455  // Jump to the generic stub in case the specialized code cannot handle the
3456  // construction.
3457  __ bind(&generic_stub_call);
3458  Handle<Code> generic_construct_stub =
3459  masm()->isolate()->builtins()->JSConstructStubGeneric();
3460  __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3461 
3462  // Return the generated code.
3463  return GetCode();
3464 }
3465 
3466 
3467 #undef __
3468 #define __ ACCESS_MASM(masm)
3469 
3470 
3472  MacroAssembler* masm) {
3473  // ---------- S t a t e --------------
3474  // -- ra : return address
3475  // -- a0 : key
3476  // -- a1 : receiver
3477  // -----------------------------------
3478  Label slow, miss_force_generic;
3479 
3480  Register key = a0;
3481  Register receiver = a1;
3482 
3483  __ JumpIfNotSmi(key, &miss_force_generic);
3484  __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
3485  __ sra(a2, a0, kSmiTagSize);
3486  __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
3487  __ Ret();
3488 
3489  // Slow case, key and receiver still in a0 and a1.
3490  __ bind(&slow);
3491  __ IncrementCounter(
3492  masm->isolate()->counters()->keyed_load_external_array_slow(),
3493  1, a2, a3);
3494  // Entry registers are intact.
3495  // ---------- S t a t e --------------
3496  // -- ra : return address
3497  // -- a0 : key
3498  // -- a1 : receiver
3499  // -----------------------------------
3500  Handle<Code> slow_ic =
3501  masm->isolate()->builtins()->KeyedLoadIC_Slow();
3502  __ Jump(slow_ic, RelocInfo::CODE_TARGET);
3503 
3504  // Miss case, call the runtime.
3505  __ bind(&miss_force_generic);
3506 
3507  // ---------- S t a t e --------------
3508  // -- ra : return address
3509  // -- a0 : key
3510  // -- a1 : receiver
3511  // -----------------------------------
3512 
3513  Handle<Code> miss_ic =
3514  masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3515  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3516 }
3517 
3518 
3519 static bool IsElementTypeSigned(ElementsKind elements_kind) {
3520  switch (elements_kind) {
3523  case EXTERNAL_INT_ELEMENTS:
3524  return true;
3525 
3530  return false;
3531 
3534  case FAST_SMI_ELEMENTS:
3535  case FAST_ELEMENTS:
3536  case FAST_DOUBLE_ELEMENTS:
3538  case FAST_HOLEY_ELEMENTS:
3540  case DICTIONARY_ELEMENTS:
3542  UNREACHABLE();
3543  return false;
3544  }
3545  return false;
3546 }
3547 
3548 
3549 static void GenerateSmiKeyCheck(MacroAssembler* masm,
3550  Register key,
3551  Register scratch0,
3552  Register scratch1,
3553  FPURegister double_scratch0,
3554  Label* fail) {
3556  CpuFeatures::Scope scope(FPU);
3557  Label key_ok;
3558  // Check for smi or a smi inside a heap number. We convert the heap
3559  // number and check if the conversion is exact and fits into the smi
3560  // range.
3561  __ JumpIfSmi(key, &key_ok);
3562  __ CheckMap(key,
3563  scratch0,
3564  Heap::kHeapNumberMapRootIndex,
3565  fail,
3567  __ ldc1(double_scratch0, FieldMemOperand(key, HeapNumber::kValueOffset));
3568  __ EmitFPUTruncate(kRoundToZero,
3569  double_scratch0,
3570  double_scratch0,
3571  scratch0,
3572  scratch1,
3574 
3575  __ Branch(fail, ne, scratch1, Operand(zero_reg));
3576 
3577  __ mfc1(scratch0, double_scratch0);
3578  __ SmiTagCheckOverflow(key, scratch0, scratch1);
3579  __ BranchOnOverflow(fail, scratch1);
3580  __ bind(&key_ok);
3581  } else {
3582  // Check that the key is a smi.
3583  __ JumpIfNotSmi(key, fail);
3584  }
3585 }
3586 
3587 
3589  MacroAssembler* masm,
3590  ElementsKind elements_kind) {
3591  // ---------- S t a t e --------------
3592  // -- ra : return address
3593  // -- a0 : key
3594  // -- a1 : receiver
3595  // -----------------------------------
3596  Label miss_force_generic, slow, failed_allocation;
3597 
3598  Register key = a0;
3599  Register receiver = a1;
3600 
3601  // This stub is meant to be tail-jumped to, the receiver must already
3602  // have been verified by the caller to not be a smi.
3603 
3604  // Check that the key is a smi or a heap number convertible to a smi.
3605  GenerateSmiKeyCheck(masm, key, t0, t1, f2, &miss_force_generic);
3606 
3607  __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3608  // a3: elements array
3609 
3610  // Check that the index is in range.
3612  __ sra(t2, key, kSmiTagSize);
3613  // Unsigned comparison catches both negative and too-large values.
3614  __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1));
3615 
3617  // a3: base pointer of external storage
3618 
3619  // We are not untagging smi key and instead work with it
3620  // as if it was premultiplied by 2.
3621  STATIC_ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
3622 
3623  Register value = a2;
3624  switch (elements_kind) {
3626  __ srl(t2, key, 1);
3627  __ addu(t3, a3, t2);
3628  __ lb(value, MemOperand(t3, 0));
3629  break;
3632  __ srl(t2, key, 1);
3633  __ addu(t3, a3, t2);
3634  __ lbu(value, MemOperand(t3, 0));
3635  break;
3637  __ addu(t3, a3, key);
3638  __ lh(value, MemOperand(t3, 0));
3639  break;
3641  __ addu(t3, a3, key);
3642  __ lhu(value, MemOperand(t3, 0));
3643  break;
3644  case EXTERNAL_INT_ELEMENTS:
3646  __ sll(t2, key, 1);
3647  __ addu(t3, a3, t2);
3648  __ lw(value, MemOperand(t3, 0));
3649  break;
3651  __ sll(t3, t2, 2);
3652  __ addu(t3, a3, t3);
3654  CpuFeatures::Scope scope(FPU);
3655  __ lwc1(f0, MemOperand(t3, 0));
3656  } else {
3657  __ lw(value, MemOperand(t3, 0));
3658  }
3659  break;
3661  __ sll(t2, key, 2);
3662  __ addu(t3, a3, t2);
3664  CpuFeatures::Scope scope(FPU);
3665  __ ldc1(f0, MemOperand(t3, 0));
3666  } else {
3667  // t3: pointer to the beginning of the double we want to load.
3668  __ lw(a2, MemOperand(t3, 0));
3669  __ lw(a3, MemOperand(t3, Register::kSizeInBytes));
3670  }
3671  break;
3672  case FAST_ELEMENTS:
3673  case FAST_SMI_ELEMENTS:
3674  case FAST_DOUBLE_ELEMENTS:
3675  case FAST_HOLEY_ELEMENTS:
3678  case DICTIONARY_ELEMENTS:
3680  UNREACHABLE();
3681  break;
3682  }
3683 
3684  // For integer array types:
3685  // a2: value
3686  // For float array type:
3687  // f0: value (if FPU is supported)
3688  // a2: value (if FPU is not supported)
3689  // For double array type:
3690  // f0: value (if FPU is supported)
3691  // a2/a3: value (if FPU is not supported)
3692 
3693  if (elements_kind == EXTERNAL_INT_ELEMENTS) {
3694  // For the Int and UnsignedInt array types, we need to see whether
3695  // the value can be represented in a Smi. If not, we need to convert
3696  // it to a HeapNumber.
3697  Label box_int;
3698  __ Subu(t3, value, Operand(0xC0000000)); // Non-smi value gives neg result.
3699  __ Branch(&box_int, lt, t3, Operand(zero_reg));
3700  // Tag integer as smi and return it.
3701  __ sll(v0, value, kSmiTagSize);
3702  __ Ret();
3703 
3704  __ bind(&box_int);
3705  // Allocate a HeapNumber for the result and perform int-to-double
3706  // conversion.
3707  // The arm version uses a temporary here to save r0, but we don't need to
3708  // (a0 is not modified).
3709  __ LoadRoot(t1, Heap::kHeapNumberMapRootIndex);
3710  __ AllocateHeapNumber(v0, a3, t0, t1, &slow);
3711 
3713  CpuFeatures::Scope scope(FPU);
3714  __ mtc1(value, f0);
3715  __ cvt_d_w(f0, f0);
3717  __ Ret();
3718  } else {
3719  Register dst1 = t2;
3720  Register dst2 = t3;
3724  value,
3725  dest,
3726  f0,
3727  dst1,
3728  dst2,
3729  t1,
3730  f2);
3733  __ Ret();
3734  }
3735  } else if (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) {
3736  // The test is different for unsigned int values. Since we need
3737  // the value to be in the range of a positive smi, we can't
3738  // handle either of the top two bits being set in the value.
3740  CpuFeatures::Scope scope(FPU);
3741  Label pl_box_int;
3742  __ And(t2, value, Operand(0xC0000000));
3743  __ Branch(&pl_box_int, ne, t2, Operand(zero_reg));
3744 
3745  // It can fit in an Smi.
3746  // Tag integer as smi and return it.
3747  __ sll(v0, value, kSmiTagSize);
3748  __ Ret();
3749 
3750  __ bind(&pl_box_int);
3751  // Allocate a HeapNumber for the result and perform int-to-double
3752  // conversion. Don't use a0 and a1 as AllocateHeapNumber clobbers all
3753  // registers - also when jumping due to exhausted young space.
3754  __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3755  __ AllocateHeapNumber(v0, t2, t3, t6, &slow);
3756 
3757  // This is replaced by a macro:
3758  // __ mtc1(value, f0); // LS 32-bits.
3759  // __ mtc1(zero_reg, f1); // MS 32-bits are all zero.
3760  // __ cvt_d_l(f0, f0); // Use 64 bit conv to get correct unsigned 32-bit.
3761 
3762  __ Cvt_d_uw(f0, value, f22);
3763 
3765 
3766  __ Ret();
3767  } else {
3768  // Check whether unsigned integer fits into smi.
3769  Label box_int_0, box_int_1, done;
3770  __ And(t2, value, Operand(0x80000000));
3771  __ Branch(&box_int_0, ne, t2, Operand(zero_reg));
3772  __ And(t2, value, Operand(0x40000000));
3773  __ Branch(&box_int_1, ne, t2, Operand(zero_reg));
3774 
3775  // Tag integer as smi and return it.
3776  __ sll(v0, value, kSmiTagSize);
3777  __ Ret();
3778 
3779  Register hiword = value; // a2.
3780  Register loword = a3;
3781 
3782  __ bind(&box_int_0);
3783  // Integer does not have leading zeros.
3784  GenerateUInt2Double(masm, hiword, loword, t0, 0);
3785  __ Branch(&done);
3786 
3787  __ bind(&box_int_1);
3788  // Integer has one leading zero.
3789  GenerateUInt2Double(masm, hiword, loword, t0, 1);
3790 
3791 
3792  __ bind(&done);
3793  // Integer was converted to double in registers hiword:loword.
3794  // Wrap it into a HeapNumber. Don't use a0 and a1 as AllocateHeapNumber
3795  // clobbers all registers - also when jumping due to exhausted young
3796  // space.
3797  __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3798  __ AllocateHeapNumber(t2, t3, t5, t6, &slow);
3799 
3802 
3803  __ mov(v0, t2);
3804  __ Ret();
3805  }
3806  } else if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3807  // For the floating-point array type, we need to always allocate a
3808  // HeapNumber.
3810  CpuFeatures::Scope scope(FPU);
3811  // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3812  // AllocateHeapNumber clobbers all registers - also when jumping due to
3813  // exhausted young space.
3814  __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3815  __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3816  // The float (single) value is already in fpu reg f0 (if we use float).
3817  __ cvt_d_s(f0, f0);
3819  __ Ret();
3820  } else {
3821  // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3822  // AllocateHeapNumber clobbers all registers - also when jumping due to
3823  // exhausted young space.
3824  __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3825  __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3826  // FPU is not available, do manual single to double conversion.
3827 
3828  // a2: floating point value (binary32).
3829  // v0: heap number for result
3830 
3831  // Extract mantissa to t4.
3832  __ And(t4, value, Operand(kBinary32MantissaMask));
3833 
3834  // Extract exponent to t5.
3835  __ srl(t5, value, kBinary32MantissaBits);
3836  __ And(t5, t5, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
3837 
3838  Label exponent_rebiased;
3839  __ Branch(&exponent_rebiased, eq, t5, Operand(zero_reg));
3840 
3841  __ li(t0, 0x7ff);
3842  __ Xor(t1, t5, Operand(0xFF));
3843  __ Movz(t5, t0, t1); // Set t5 to 0x7ff only if t5 is equal to 0xff.
3844  __ Branch(&exponent_rebiased, eq, t1, Operand(zero_reg));
3845 
3846  // Rebias exponent.
3847  __ Addu(t5,
3848  t5,
3850 
3851  __ bind(&exponent_rebiased);
3852  __ And(a2, value, Operand(kBinary32SignMask));
3853  value = no_reg;
3854  __ sll(t0, t5, HeapNumber::kMantissaBitsInTopWord);
3855  __ or_(a2, a2, t0);
3856 
3857  // Shift mantissa.
3858  static const int kMantissaShiftForHiWord =
3860 
3861  static const int kMantissaShiftForLoWord =
3862  kBitsPerInt - kMantissaShiftForHiWord;
3863 
3864  __ srl(t0, t4, kMantissaShiftForHiWord);
3865  __ or_(a2, a2, t0);
3866  __ sll(a0, t4, kMantissaShiftForLoWord);
3867 
3870  __ Ret();
3871  }
3872 
3873  } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3875  CpuFeatures::Scope scope(FPU);
3876  // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3877  // AllocateHeapNumber clobbers all registers - also when jumping due to
3878  // exhausted young space.
3879  __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3880  __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3881  // The double value is already in f0
3883  __ Ret();
3884  } else {
3885  // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3886  // AllocateHeapNumber clobbers all registers - also when jumping due to
3887  // exhausted young space.
3888  __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3889  __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3890 
3893  __ Ret();
3894  }
3895 
3896  } else {
3897  // Tag integer as smi and return it.
3898  __ sll(v0, value, kSmiTagSize);
3899  __ Ret();
3900  }
3901 
3902  // Slow case, key and receiver still in a0 and a1.
3903  __ bind(&slow);
3904  __ IncrementCounter(
3905  masm->isolate()->counters()->keyed_load_external_array_slow(),
3906  1, a2, a3);
3907 
3908  // ---------- S t a t e --------------
3909  // -- ra : return address
3910  // -- a0 : key
3911  // -- a1 : receiver
3912  // -----------------------------------
3913 
3914  __ Push(a1, a0);
3915 
3916  __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3917 
3918  __ bind(&miss_force_generic);
3919  Handle<Code> stub =
3920  masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3921  __ Jump(stub, RelocInfo::CODE_TARGET);
3922 }
3923 
3924 
3926  MacroAssembler* masm,
3927  ElementsKind elements_kind) {
3928  // ---------- S t a t e --------------
3929  // -- a0 : value
3930  // -- a1 : key
3931  // -- a2 : receiver
3932  // -- ra : return address
3933  // -----------------------------------
3934 
3935  Label slow, check_heap_number, miss_force_generic;
3936 
3937  // Register usage.
3938  Register value = a0;
3939  Register key = a1;
3940  Register receiver = a2;
3941  // a3 mostly holds the elements array or the destination external array.
3942 
3943  // This stub is meant to be tail-jumped to, the receiver must already
3944  // have been verified by the caller to not be a smi.
3945 
3946  // Check that the key is a smi or a heap number convertible to a smi.
3947  GenerateSmiKeyCheck(masm, key, t0, t1, f2, &miss_force_generic);
3948 
3949  __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3950 
3951  // Check that the index is in range.
3953  // Unsigned comparison catches both negative and too-large values.
3954  __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1));
3955 
3956  // Handle both smis and HeapNumbers in the fast path. Go to the
3957  // runtime for all other kinds of values.
3958  // a3: external array.
3959 
3960  if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) {
3961  // Double to pixel conversion is only implemented in the runtime for now.
3962  __ JumpIfNotSmi(value, &slow);
3963  } else {
3964  __ JumpIfNotSmi(value, &check_heap_number);
3965  }
3966  __ SmiUntag(t1, value);
3968 
3969  // a3: base pointer of external storage.
3970  // t1: value (integer).
3971 
3972  switch (elements_kind) {
3973  case EXTERNAL_PIXEL_ELEMENTS: {
3974  // Clamp the value to [0..255].
3975  // v0 is used as a scratch register here.
3976  Label done;
3977  __ li(v0, Operand(255));
3978  // Normal branch: nop in delay slot.
3979  __ Branch(&done, gt, t1, Operand(v0));
3980  // Use delay slot in this branch.
3981  __ Branch(USE_DELAY_SLOT, &done, lt, t1, Operand(zero_reg));
3982  __ mov(v0, zero_reg); // In delay slot.
3983  __ mov(v0, t1); // Value is in range 0..255.
3984  __ bind(&done);
3985  __ mov(t1, v0);
3986 
3987  __ srl(t8, key, 1);
3988  __ addu(t8, a3, t8);
3989  __ sb(t1, MemOperand(t8, 0));
3990  }
3991  break;
3994  __ srl(t8, key, 1);
3995  __ addu(t8, a3, t8);
3996  __ sb(t1, MemOperand(t8, 0));
3997  break;
4000  __ addu(t8, a3, key);
4001  __ sh(t1, MemOperand(t8, 0));
4002  break;
4003  case EXTERNAL_INT_ELEMENTS:
4005  __ sll(t8, key, 1);
4006  __ addu(t8, a3, t8);
4007  __ sw(t1, MemOperand(t8, 0));
4008  break;
4010  // Perform int-to-float conversion and store to memory.
4011  __ SmiUntag(t0, key);
4012  StoreIntAsFloat(masm, a3, t0, t1, t2, t3, t4);
4013  break;
4015  __ sll(t8, key, 2);
4016  __ addu(a3, a3, t8);
4017  // a3: effective address of the double element
4020  destination = FloatingPointHelper::kFPURegisters;
4021  } else {
4023  }
4025  masm, t1, destination,
4026  f0, t2, t3, // These are: double_dst, dst1, dst2.
4027  t0, f2); // These are: scratch2, single_scratch.
4028  if (destination == FloatingPointHelper::kFPURegisters) {
4029  CpuFeatures::Scope scope(FPU);
4030  __ sdc1(f0, MemOperand(a3, 0));
4031  } else {
4032  __ sw(t2, MemOperand(a3, 0));
4033  __ sw(t3, MemOperand(a3, Register::kSizeInBytes));
4034  }
4035  break;
4036  case FAST_ELEMENTS:
4037  case FAST_SMI_ELEMENTS:
4038  case FAST_DOUBLE_ELEMENTS:
4039  case FAST_HOLEY_ELEMENTS:
4042  case DICTIONARY_ELEMENTS:
4044  UNREACHABLE();
4045  break;
4046  }
4047 
4048  // Entry registers are intact, a0 holds the value which is the return value.
4049  __ mov(v0, a0);
4050  __ Ret();
4051 
4052  if (elements_kind != EXTERNAL_PIXEL_ELEMENTS) {
4053  // a3: external array.
4054  __ bind(&check_heap_number);
4055  __ GetObjectType(value, t1, t2);
4056  __ Branch(&slow, ne, t2, Operand(HEAP_NUMBER_TYPE));
4057 
4059 
4060  // a3: base pointer of external storage.
4061 
4062  // The WebGL specification leaves the behavior of storing NaN and
4063  // +/-Infinity into integer arrays basically undefined. For more
4064  // reproducible behavior, convert these to zero.
4065 
4067  CpuFeatures::Scope scope(FPU);
4068 
4070 
4071  if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
4072  __ cvt_s_d(f0, f0);
4073  __ sll(t8, key, 1);
4074  __ addu(t8, a3, t8);
4075  __ swc1(f0, MemOperand(t8, 0));
4076  } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
4077  __ sll(t8, key, 2);
4078  __ addu(t8, a3, t8);
4079  __ sdc1(f0, MemOperand(t8, 0));
4080  } else {
4081  __ EmitECMATruncate(t3, f0, f2, t2, t1, t5);
4082 
4083  switch (elements_kind) {
4086  __ srl(t8, key, 1);
4087  __ addu(t8, a3, t8);
4088  __ sb(t3, MemOperand(t8, 0));
4089  break;
4092  __ addu(t8, a3, key);
4093  __ sh(t3, MemOperand(t8, 0));
4094  break;
4095  case EXTERNAL_INT_ELEMENTS:
4097  __ sll(t8, key, 1);
4098  __ addu(t8, a3, t8);
4099  __ sw(t3, MemOperand(t8, 0));
4100  break;
4104  case FAST_ELEMENTS:
4105  case FAST_SMI_ELEMENTS:
4106  case FAST_DOUBLE_ELEMENTS:
4107  case FAST_HOLEY_ELEMENTS:
4110  case DICTIONARY_ELEMENTS:
4112  UNREACHABLE();
4113  break;
4114  }
4115  }
4116 
4117  // Entry registers are intact, a0 holds the value
4118  // which is the return value.
4119  __ mov(v0, a0);
4120  __ Ret();
4121  } else {
4122  // FPU is not available, do manual conversions.
4123 
4126 
4127  if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
4128  Label done, nan_or_infinity_or_zero;
4129  static const int kMantissaInHiWordShift =
4131 
4132  static const int kMantissaInLoWordShift =
4133  kBitsPerInt - kMantissaInHiWordShift;
4134 
4135  // Test for all special exponent values: zeros, subnormal numbers, NaNs
4136  // and infinities. All these should be converted to 0.
4137  __ li(t5, HeapNumber::kExponentMask);
4138  __ and_(t6, t3, t5);
4139  __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(zero_reg));
4140 
4141  __ xor_(t1, t6, t5);
4142  __ li(t2, kBinary32ExponentMask);
4143  __ Movz(t6, t2, t1); // Only if t6 is equal to t5.
4144  __ Branch(&nan_or_infinity_or_zero, eq, t1, Operand(zero_reg));
4145 
4146  // Rebias exponent.
4147  __ srl(t6, t6, HeapNumber::kExponentShift);
4148  __ Addu(t6,
4149  t6,
4151 
4152  __ li(t1, Operand(kBinary32MaxExponent));
4153  __ Slt(t1, t1, t6);
4154  __ And(t2, t3, Operand(HeapNumber::kSignMask));
4155  __ Or(t2, t2, Operand(kBinary32ExponentMask));
4156  __ Movn(t3, t2, t1); // Only if t6 is gt kBinary32MaxExponent.
4157  __ Branch(&done, gt, t6, Operand(kBinary32MaxExponent));
4158 
4159  __ Slt(t1, t6, Operand(kBinary32MinExponent));
4160  __ And(t2, t3, Operand(HeapNumber::kSignMask));
4161  __ Movn(t3, t2, t1); // Only if t6 is lt kBinary32MinExponent.
4162  __ Branch(&done, lt, t6, Operand(kBinary32MinExponent));
4163 
4164  __ And(t7, t3, Operand(HeapNumber::kSignMask));
4165  __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
4166  __ sll(t3, t3, kMantissaInHiWordShift);
4167  __ or_(t7, t7, t3);
4168  __ srl(t4, t4, kMantissaInLoWordShift);
4169  __ or_(t7, t7, t4);
4170  __ sll(t6, t6, kBinary32ExponentShift);
4171  __ or_(t3, t7, t6);
4172 
4173  __ bind(&done);
4174  __ sll(t9, key, 1);
4175  __ addu(t9, a3, t9);
4176  __ sw(t3, MemOperand(t9, 0));
4177 
4178  // Entry registers are intact, a0 holds the value which is the return
4179  // value.
4180  __ mov(v0, a0);
4181  __ Ret();
4182 
4183  __ bind(&nan_or_infinity_or_zero);
4184  __ And(t7, t3, Operand(HeapNumber::kSignMask));
4185  __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
4186  __ or_(t6, t6, t7);
4187  __ sll(t3, t3, kMantissaInHiWordShift);
4188  __ or_(t6, t6, t3);
4189  __ srl(t4, t4, kMantissaInLoWordShift);
4190  __ or_(t3, t6, t4);
4191  __ Branch(&done);
4192  } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
4193  __ sll(t8, key, 2);
4194  __ addu(t8, a3, t8);
4195  // t8: effective address of destination element.
4196  __ sw(t4, MemOperand(t8, 0));
4197  __ sw(t3, MemOperand(t8, Register::kSizeInBytes));
4198  __ mov(v0, a0);
4199  __ Ret();
4200  } else {
4201  bool is_signed_type = IsElementTypeSigned(elements_kind);
4202  int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
4203  int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
4204 
4205  Label done, sign;
4206 
4207  // Test for all special exponent values: zeros, subnormal numbers, NaNs
4208  // and infinities. All these should be converted to 0.
4209  __ li(t5, HeapNumber::kExponentMask);
4210  __ and_(t6, t3, t5);
4211  __ Movz(t3, zero_reg, t6); // Only if t6 is equal to zero.
4212  __ Branch(&done, eq, t6, Operand(zero_reg));
4213 
4214  __ xor_(t2, t6, t5);
4215  __ Movz(t3, zero_reg, t2); // Only if t6 is equal to t5.
4216  __ Branch(&done, eq, t6, Operand(t5));
4217 
4218  // Unbias exponent.
4219  __ srl(t6, t6, HeapNumber::kExponentShift);
4220  __ Subu(t6, t6, Operand(HeapNumber::kExponentBias));
4221  // If exponent is negative then result is 0.
4222  __ slt(t2, t6, zero_reg);
4223  __ Movn(t3, zero_reg, t2); // Only if exponent is negative.
4224  __ Branch(&done, lt, t6, Operand(zero_reg));
4225 
4226  // If exponent is too big then result is minimal value.
4227  __ slti(t1, t6, meaningfull_bits - 1);
4228  __ li(t2, min_value);
4229  __ Movz(t3, t2, t1); // Only if t6 is ge meaningfull_bits - 1.
4230  __ Branch(&done, ge, t6, Operand(meaningfull_bits - 1));
4231 
4232  __ And(t5, t3, Operand(HeapNumber::kSignMask));
4233  __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
4234  __ Or(t3, t3, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
4235 
4236  __ li(t9, HeapNumber::kMantissaBitsInTopWord);
4237  __ subu(t6, t9, t6);
4238  __ slt(t1, t6, zero_reg);
4239  __ srlv(t2, t3, t6);
4240  __ Movz(t3, t2, t1); // Only if t6 is positive.
4241  __ Branch(&sign, ge, t6, Operand(zero_reg));
4242 
4243  __ subu(t6, zero_reg, t6);
4244  __ sllv(t3, t3, t6);
4245  __ li(t9, meaningfull_bits);
4246  __ subu(t6, t9, t6);
4247  __ srlv(t4, t4, t6);
4248  __ or_(t3, t3, t4);
4249 
4250  __ bind(&sign);
4251  __ subu(t2, t3, zero_reg);
4252  __ Movz(t3, t2, t5); // Only if t5 is zero.
4253 
4254  __ bind(&done);
4255 
4256  // Result is in t3.
4257  // This switch block should be exactly the same as above (FPU mode).
4258  switch (elements_kind) {
4261  __ srl(t8, key, 1);
4262  __ addu(t8, a3, t8);
4263  __ sb(t3, MemOperand(t8, 0));
4264  break;
4267  __ addu(t8, a3, key);
4268  __ sh(t3, MemOperand(t8, 0));
4269  break;
4270  case EXTERNAL_INT_ELEMENTS:
4272  __ sll(t8, key, 1);
4273  __ addu(t8, a3, t8);
4274  __ sw(t3, MemOperand(t8, 0));
4275  break;
4279  case FAST_ELEMENTS:
4280  case FAST_SMI_ELEMENTS:
4281  case FAST_DOUBLE_ELEMENTS:
4282  case FAST_HOLEY_ELEMENTS:
4285  case DICTIONARY_ELEMENTS:
4287  UNREACHABLE();
4288  break;
4289  }
4290  }
4291  }
4292  }
4293 
4294  // Slow case, key and receiver still in a0 and a1.
4295  __ bind(&slow);
4296  __ IncrementCounter(
4297  masm->isolate()->counters()->keyed_load_external_array_slow(),
4298  1, a2, a3);
4299  // Entry registers are intact.
4300  // ---------- S t a t e --------------
4301  // -- ra : return address
4302  // -- a0 : key
4303  // -- a1 : receiver
4304  // -----------------------------------
4305  Handle<Code> slow_ic =
4306  masm->isolate()->builtins()->KeyedStoreIC_Slow();
4307  __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4308 
4309  // Miss case, call the runtime.
4310  __ bind(&miss_force_generic);
4311 
4312  // ---------- S t a t e --------------
4313  // -- ra : return address
4314  // -- a0 : key
4315  // -- a1 : receiver
4316  // -----------------------------------
4317 
4318  Handle<Code> miss_ic =
4319  masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4320  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4321 }
4322 
4323 
4324 void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
4325  // ----------- S t a t e -------------
4326  // -- ra : return address
4327  // -- a0 : key
4328  // -- a1 : receiver
4329  // -----------------------------------
4330  Label miss_force_generic;
4331 
4332  // This stub is meant to be tail-jumped to, the receiver must already
4333  // have been verified by the caller to not be a smi.
4334 
4335  // Check that the key is a smi or a heap number convertible to a smi.
4336  GenerateSmiKeyCheck(masm, a0, t0, t1, f2, &miss_force_generic);
4337 
4338  // Get the elements array.
4340  __ AssertFastElements(a2);
4341 
4342  // Check that the key is within bounds.
4344  __ Branch(USE_DELAY_SLOT, &miss_force_generic, hs, a0, Operand(a3));
4345 
4346  // Load the result and make sure it's not the hole.
4347  __ Addu(a3, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4349  __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
4350  __ Addu(t0, t0, a3);
4351  __ lw(t0, MemOperand(t0));
4352  __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
4353  __ Branch(&miss_force_generic, eq, t0, Operand(t1));
4354  __ Ret(USE_DELAY_SLOT);
4355  __ mov(v0, t0);
4356 
4357  __ bind(&miss_force_generic);
4358  Handle<Code> stub =
4359  masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4360  __ Jump(stub, RelocInfo::CODE_TARGET);
4361 }
4362 
4363 
4365  MacroAssembler* masm) {
4366  // ----------- S t a t e -------------
4367  // -- ra : return address
4368  // -- a0 : key
4369  // -- a1 : receiver
4370  // -----------------------------------
4371  Label miss_force_generic, slow_allocate_heapnumber;
4372 
4373  Register key_reg = a0;
4374  Register receiver_reg = a1;
4375  Register elements_reg = a2;
4376  Register heap_number_reg = a2;
4377  Register indexed_double_offset = a3;
4378  Register scratch = t0;
4379  Register scratch2 = t1;
4380  Register scratch3 = t2;
4381  Register heap_number_map = t3;
4382 
4383  // This stub is meant to be tail-jumped to, the receiver must already
4384  // have been verified by the caller to not be a smi.
4385 
4386  // Check that the key is a smi or a heap number convertible to a smi.
4387  GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, &miss_force_generic);
4388 
4389  // Get the elements array.
4390  __ lw(elements_reg,
4392 
4393  // Check that the key is within bounds.
4394  __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4395  __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch));
4396 
4397  // Load the upper word of the double in the fixed array and test for NaN.
4398  __ sll(scratch2, key_reg, kDoubleSizeLog2 - kSmiTagSize);
4399  __ Addu(indexed_double_offset, elements_reg, Operand(scratch2));
4400  uint32_t upper_32_offset = FixedArray::kHeaderSize + sizeof(kHoleNanLower32);
4401  __ lw(scratch, FieldMemOperand(indexed_double_offset, upper_32_offset));
4402  __ Branch(&miss_force_generic, eq, scratch, Operand(kHoleNanUpper32));
4403 
4404  // Non-NaN. Allocate a new heap number and copy the double value into it.
4405  __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4406  __ AllocateHeapNumber(heap_number_reg, scratch2, scratch3,
4407  heap_number_map, &slow_allocate_heapnumber);
4408 
4409  // Don't need to reload the upper 32 bits of the double, it's already in
4410  // scratch.
4411  __ sw(scratch, FieldMemOperand(heap_number_reg,
4413  __ lw(scratch, FieldMemOperand(indexed_double_offset,
4414  FixedArray::kHeaderSize));
4415  __ sw(scratch, FieldMemOperand(heap_number_reg,
4417 
4418  __ mov(v0, heap_number_reg);
4419  __ Ret();
4420 
4421  __ bind(&slow_allocate_heapnumber);
4422  Handle<Code> slow_ic =
4423  masm->isolate()->builtins()->KeyedLoadIC_Slow();
4424  __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4425 
4426  __ bind(&miss_force_generic);
4427  Handle<Code> miss_ic =
4428  masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4429  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4430 }
4431 
4432 
4434  MacroAssembler* masm,
4435  bool is_js_array,
4436  ElementsKind elements_kind,
4437  KeyedAccessGrowMode grow_mode) {
4438  // ----------- S t a t e -------------
4439  // -- a0 : value
4440  // -- a1 : key
4441  // -- a2 : receiver
4442  // -- ra : return address
4443  // -- a3 : scratch
4444  // -- a4 : scratch (elements)
4445  // -----------------------------------
4446  Label miss_force_generic, transition_elements_kind, grow, slow;
4447  Label finish_store, check_capacity;
4448 
4449  Register value_reg = a0;
4450  Register key_reg = a1;
4451  Register receiver_reg = a2;
4452  Register scratch = t0;
4453  Register elements_reg = a3;
4454  Register length_reg = t1;
4455  Register scratch2 = t2;
4456 
4457  // This stub is meant to be tail-jumped to, the receiver must already
4458  // have been verified by the caller to not be a smi.
4459 
4460  // Check that the key is a smi or a heap number convertible to a smi.
4461  GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, &miss_force_generic);
4462 
4463  if (IsFastSmiElementsKind(elements_kind)) {
4464  __ JumpIfNotSmi(value_reg, &transition_elements_kind);
4465  }
4466 
4467  // Check that the key is within bounds.
4468  __ lw(elements_reg,
4470  if (is_js_array) {
4471  __ lw(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4472  } else {
4473  __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4474  }
4475  // Compare smis.
4476  if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
4477  __ Branch(&grow, hs, key_reg, Operand(scratch));
4478  } else {
4479  __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch));
4480  }
4481 
4482  // Make sure elements is a fast element array, not 'cow'.
4483  __ CheckMap(elements_reg,
4484  scratch,
4485  Heap::kFixedArrayMapRootIndex,
4486  &miss_force_generic,
4488 
4489  __ bind(&finish_store);
4490 
4491  if (IsFastSmiElementsKind(elements_kind)) {
4492  __ Addu(scratch,
4493  elements_reg,
4494  Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4496  __ sll(scratch2, key_reg, kPointerSizeLog2 - kSmiTagSize);
4497  __ Addu(scratch, scratch, scratch2);
4498  __ sw(value_reg, MemOperand(scratch));
4499  } else {
4500  ASSERT(IsFastObjectElementsKind(elements_kind));
4501  __ Addu(scratch,
4502  elements_reg,
4503  Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4505  __ sll(scratch2, key_reg, kPointerSizeLog2 - kSmiTagSize);
4506  __ Addu(scratch, scratch, scratch2);
4507  __ sw(value_reg, MemOperand(scratch));
4508  __ mov(receiver_reg, value_reg);
4509  __ RecordWrite(elements_reg, // Object.
4510  scratch, // Address.
4511  receiver_reg, // Value.
4513  kDontSaveFPRegs);
4514  }
4515  // value_reg (a0) is preserved.
4516  // Done.
4517  __ Ret();
4518 
4519  __ bind(&miss_force_generic);
4520  Handle<Code> ic =
4521  masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4522  __ Jump(ic, RelocInfo::CODE_TARGET);
4523 
4524  __ bind(&transition_elements_kind);
4525  Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
4526  __ Jump(ic_miss, RelocInfo::CODE_TARGET);
4527 
4528  if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
4529  // Grow the array by a single element if possible.
4530  __ bind(&grow);
4531 
4532  // Make sure the array is only growing by a single element, anything else
4533  // must be handled by the runtime.
4534  __ Branch(&miss_force_generic, ne, key_reg, Operand(scratch));
4535 
4536  // Check for the empty array, and preallocate a small backing store if
4537  // possible.
4538  __ lw(length_reg,
4539  FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4540  __ lw(elements_reg,
4542  __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
4543  __ Branch(&check_capacity, ne, elements_reg, Operand(at));
4544 
4546  __ AllocateInNewSpace(size, elements_reg, scratch, scratch2, &slow,
4547  TAG_OBJECT);
4548 
4549  __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex);
4550  __ sw(scratch, FieldMemOperand(elements_reg, JSObject::kMapOffset));
4551  __ li(scratch, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
4552  __ sw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4553  __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
4554  for (int i = 1; i < JSArray::kPreallocatedArrayElements; ++i) {
4555  __ sw(scratch, FieldMemOperand(elements_reg, FixedArray::SizeFor(i)));
4556  }
4557 
4558  // Store the element at index zero.
4559  __ sw(value_reg, FieldMemOperand(elements_reg, FixedArray::SizeFor(0)));
4560 
4561  // Install the new backing store in the JSArray.
4562  __ sw(elements_reg,
4564  __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
4567 
4568  // Increment the length of the array.
4569  __ li(length_reg, Operand(Smi::FromInt(1)));
4570  __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4571  __ Ret();
4572 
4573  __ bind(&check_capacity);
4574  // Check for cow elements, in general they are not handled by this stub
4575  __ CheckMap(elements_reg,
4576  scratch,
4577  Heap::kFixedCOWArrayMapRootIndex,
4578  &miss_force_generic,
4580 
4581  __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4582  __ Branch(&slow, hs, length_reg, Operand(scratch));
4583 
4584  // Grow the array and finish the store.
4585  __ Addu(length_reg, length_reg, Operand(Smi::FromInt(1)));
4586  __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4587  __ jmp(&finish_store);
4588 
4589  __ bind(&slow);
4590  Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
4591  __ Jump(ic_slow, RelocInfo::CODE_TARGET);
4592  }
4593 }
4594 
4595 
4597  MacroAssembler* masm,
4598  bool is_js_array,
4599  KeyedAccessGrowMode grow_mode) {
4600  // ----------- S t a t e -------------
4601  // -- a0 : value
4602  // -- a1 : key
4603  // -- a2 : receiver
4604  // -- ra : return address
4605  // -- a3 : scratch
4606  // -- t0 : scratch (elements_reg)
4607  // -- t1 : scratch (mantissa_reg)
4608  // -- t2 : scratch (exponent_reg)
4609  // -- t3 : scratch4
4610  // -----------------------------------
4611  Label miss_force_generic, transition_elements_kind, grow, slow;
4612  Label finish_store, check_capacity;
4613 
4614  Register value_reg = a0;
4615  Register key_reg = a1;
4616  Register receiver_reg = a2;
4617  Register elements_reg = a3;
4618  Register scratch1 = t0;
4619  Register scratch2 = t1;
4620  Register scratch3 = t2;
4621  Register scratch4 = t3;
4622  Register length_reg = t3;
4623 
4624  // This stub is meant to be tail-jumped to, the receiver must already
4625  // have been verified by the caller to not be a smi.
4626 
4627  // Check that the key is a smi or a heap number convertible to a smi.
4628  GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, &miss_force_generic);
4629 
4630  __ lw(elements_reg,
4632 
4633  // Check that the key is within bounds.
4634  if (is_js_array) {
4635  __ lw(scratch1, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4636  } else {
4637  __ lw(scratch1,
4639  }
4640  // Compare smis, unsigned compare catches both negative and out-of-bound
4641  // indexes.
4642  if (grow_mode == ALLOW_JSARRAY_GROWTH) {
4643  __ Branch(&grow, hs, key_reg, Operand(scratch1));
4644  } else {
4645  __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch1));
4646  }
4647 
4648  __ bind(&finish_store);
4649 
4650  __ StoreNumberToDoubleElements(value_reg,
4651  key_reg,
4652  receiver_reg,
4653  elements_reg,
4654  scratch1,
4655  scratch2,
4656  scratch3,
4657  scratch4,
4658  &transition_elements_kind);
4659 
4660  __ Ret(USE_DELAY_SLOT);
4661  __ mov(v0, value_reg); // In delay slot.
4662 
4663  // Handle store cache miss, replacing the ic with the generic stub.
4664  __ bind(&miss_force_generic);
4665  Handle<Code> ic =
4666  masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4667  __ Jump(ic, RelocInfo::CODE_TARGET);
4668 
4669  __ bind(&transition_elements_kind);
4670  Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
4671  __ Jump(ic_miss, RelocInfo::CODE_TARGET);
4672 
4673  if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
4674  // Grow the array by a single element if possible.
4675  __ bind(&grow);
4676 
4677  // Make sure the array is only growing by a single element, anything else
4678  // must be handled by the runtime.
4679  __ Branch(&miss_force_generic, ne, key_reg, Operand(scratch1));
4680 
4681  // Transition on values that can't be stored in a FixedDoubleArray.
4682  Label value_is_smi;
4683  __ JumpIfSmi(value_reg, &value_is_smi);
4684  __ lw(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
4685  __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4686  __ Branch(&transition_elements_kind, ne, scratch1, Operand(at));
4687  __ bind(&value_is_smi);
4688 
4689  // Check for the empty array, and preallocate a small backing store if
4690  // possible.
4691  __ lw(length_reg,
4692  FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4693  __ lw(elements_reg,
4695  __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
4696  __ Branch(&check_capacity, ne, elements_reg, Operand(at));
4697 
4698  int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
4699  __ AllocateInNewSpace(size, elements_reg, scratch1, scratch2, &slow,
4700  TAG_OBJECT);
4701 
4702  // Initialize the new FixedDoubleArray. Leave elements unitialized for
4703  // efficiency, they are guaranteed to be initialized before use.
4704  __ LoadRoot(scratch1, Heap::kFixedDoubleArrayMapRootIndex);
4705  __ sw(scratch1, FieldMemOperand(elements_reg, JSObject::kMapOffset));
4706  __ li(scratch1, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
4707  __ sw(scratch1,
4709 
4710  // Install the new backing store in the JSArray.
4711  __ sw(elements_reg,
4713  __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
4716 
4717  // Increment the length of the array.
4718  __ li(length_reg, Operand(Smi::FromInt(1)));
4719  __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4720  __ lw(elements_reg,
4722  __ jmp(&finish_store);
4723 
4724  __ bind(&check_capacity);
4725  // Make sure that the backing store can hold additional elements.
4726  __ lw(scratch1,
4728  __ Branch(&slow, hs, length_reg, Operand(scratch1));
4729 
4730  // Grow the array and finish the store.
4731  __ Addu(length_reg, length_reg, Operand(Smi::FromInt(1)));
4732  __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4733  __ jmp(&finish_store);
4734 
4735  __ bind(&slow);
4736  Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
4737  __ Jump(ic_slow, RelocInfo::CODE_TARGET);
4738  }
4739 }
4740 
4741 
4742 #undef __
4743 
4744 } } // namespace v8::internal
4745 
4746 #endif // V8_TARGET_ARCH_MIPS
byte * Address
Definition: globals.h:172
const Register cp
static const int kBitFieldOffset
Definition: objects.h:4994
Handle< Code > CompileLoadFunctionPrototype(Handle< String > name)
Handle< Code > CompileLoadCallback(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< AccessorInfo > callback)
const intptr_t kSmiTagMask
Definition: v8.h:3855
static const int kCodeEntryOffset
Definition: objects.h:5981
Handle< Code > CompileStoreField(Handle< JSObject > object, int index, Handle< Map > transition, Handle< String > name)
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:5982
static int SlotOffset(int index)
Definition: contexts.h:408
static const int kDataOffset
Definition: objects.h:8190
const int kBinary32ExponentShift
Definition: globals.h:264
Handle< Code > CompileLoadNonexistent(Handle< String > name, Handle< JSObject > object, Handle< JSObject > last)
const int kDoubleSizeLog2
Definition: globals.h:236
Handle< Code > CompileStoreElement(Handle< Map > receiver_map)
void GenerateProbe(MacroAssembler *masm, Code::Flags flags, Register receiver, Register name, Register scratch, Register extra, Register extra2=no_reg, Register extra3=no_reg)
static const uint32_t kExponentMask
Definition: objects.h:1317
static const int kFlagsOffset
Definition: objects.h:4504
const uint32_t kBinary32MantissaMask
Definition: globals.h:259
const int kBinary32MaxExponent
Definition: globals.h:261
const FPURegister f0
static Smi * FromInt(int value)
Definition: objects-inl.h:973
bool IsFastObjectElementsKind(ElementsKind kind)
#define LOG(isolate, Call)
Definition: log.h:81
static void GenerateStoreExternalArray(MacroAssembler *masm, ElementsKind elements_kind)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
Handle< Code > CompileStoreViaSetter(Handle< JSObject > receiver, Handle< JSFunction > setter, Handle< String > name)
const FPURegister f22
static const int kGlobalReceiverOffset
Definition: objects.h:6085
static void GenerateLoadFastDoubleElement(MacroAssembler *masm)
static PropertyType ExtractTypeFromFlags(Flags flags)
Definition: objects-inl.h:3359
const int kBinary32MantissaBits
Definition: globals.h:263
const uint32_t kFCSRExceptionFlagMask
Flag flags[]
Definition: flags.cc:1467
static const int kExponentBias
Definition: objects.h:1321
int int32_t
Definition: unicode.cc:47
static bool IsSupported(CpuFeature f)
static const int kExternalPointerOffset
Definition: objects.h:3720
static const int kHasNamedInterceptor
Definition: objects.h:5003
static const int kIsAccessCheckNeeded
Definition: objects.h:5007
List< Handle< Map > > MapHandleList
Definition: list.h:193
#define ASSERT(condition)
Definition: checks.h:270
Handle< Code > CompileStoreCallback(Handle< JSObject > object, Handle< AccessorInfo > callback, Handle< String > name)
const int kPointerSizeLog2
Definition: globals.h:246
static const int kInstanceSizeOffset
Definition: objects.h:4981
static const int kDebugInfoOffset
Definition: objects.h:5614
static const int kGlobalContextOffset
Definition: objects.h:6084
static const int kContextOffset
Definition: objects.h:5986
Handle< Code > CompileLoadField(Handle< JSObject > object, Handle< JSObject > holder, int index, Handle< String > name)
Handle< Code > CompileStoreInterceptor(Handle< JSObject > object, Handle< String > name)
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< String > name, Register scratch0)
Handle< Code > CompileStoreField(Handle< JSObject > object, int index, Handle< Map > transition, Handle< String > name)
static const int kHashFieldOffset
Definition: objects.h:7099
const Register sp
Handle< Code > CompileLoadInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
#define UNREACHABLE()
Definition: checks.h:50
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
Definition: objects.h:7098
Handle< Code > CompileCallGlobal(Handle< JSObject > object, Handle< GlobalObject > holder, Handle< JSGlobalPropertyCell > cell, Handle< JSFunction > function, Handle< String > name)
static const int kExponentShift
Definition: objects.h:1322
Handle< Code > CompileLoadField(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, int index)
static const int kValueOffset
Definition: objects.h:1307
const uint32_t kHoleNanUpper32
Definition: v8globals.h:476
Handle< Code > CompileStoreGlobal(Handle< GlobalObject > object, Handle< JSGlobalPropertyCell > holder, Handle< String > name)
Handle< Code > CompileLoadViaGetter(Handle< String > name, Handle< JSObject > receiver, Handle< JSObject > holder, Handle< JSFunction > getter)
Handle< Code > CompileLoadConstant(Handle< JSObject > object, Handle< JSObject > holder, Handle< JSFunction > value, Handle< String > name)
Handle< Code > CompileLoadConstant(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< JSFunction > value)
Handle< Code > CompileCallField(Handle< JSObject > object, Handle< JSObject > holder, int index, Handle< String > name)
const int kPointerSize
Definition: globals.h:234
static void GenerateStoreFastElement(MacroAssembler *masm, bool is_js_array, ElementsKind element_kind, KeyedAccessGrowMode grow_mode)
Handle< Code > CompileLoadStringLength(Handle< String > name)
const int kHeapObjectTag
Definition: v8.h:3848
const uint32_t kHoleNanLower32
Definition: v8globals.h:477
#define __
static bool decode(uint32_t value)
Definition: utils.h:272
static const int kPropertiesOffset
Definition: objects.h:2113
const int kBinary32MinExponent
Definition: globals.h:262
Handle< Code > CompileLoadGlobal(Handle< JSObject > object, Handle< GlobalObject > holder, Handle< JSGlobalPropertyCell > cell, Handle< String > name, bool is_dont_delete)
bool IsFastSmiElementsKind(ElementsKind kind)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
const int kBinary32ExponentBias
Definition: globals.h:260
static const int kDataOffset
Definition: objects.h:8326
static int SizeFor(int length)
Definition: objects.h:2369
static const int kElementsOffset
Definition: objects.h:2114
const FPURegister f2
const uint32_t kStringTag
Definition: objects.h:437
#define BASE_EMBEDDED
Definition: allocation.h:68
const int kBitsPerInt
Definition: globals.h:254
static void GenerateLoadDictionaryElement(MacroAssembler *masm)
static void GenerateLoadExternalArray(MacroAssembler *masm, ElementsKind elements_kind)
static const int kLengthOffset
Definition: objects.h:8111
static int SizeFor(int length)
Definition: objects.h:2288
static const int kHeaderSize
Definition: objects.h:2233
static const int kMapOffset
Definition: objects.h:1219
static const int kMantissaBitsInTopWord
Definition: objects.h:1323
const uint32_t kIsNotStringMask
Definition: objects.h:436
List< Handle< Code > > CodeHandleList
Definition: list.h:194
static const int kLengthOffset
Definition: objects.h:2232
static void ConvertIntToDouble(MacroAssembler *masm, Register int_scratch, Destination destination, DwVfpRegister double_dst, Register dst1, Register dst2, Register scratch2, SwVfpRegister single_scratch)
Handle< Code > CompileCallInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
MemOperand FieldMemOperand(Register object, int offset)
static const int kDataOffset
Definition: objects.h:8352
const FPUControlRegister FCSR
static void GenerateLoadFastElement(MacroAssembler *masm)
static const uint32_t kSignMask
Definition: objects.h:1316
friend class Isolate
Definition: stub-cache.h:391
const int kSmiTagSize
Definition: v8.h:3854
static void GenerateStoreFastDoubleElement(MacroAssembler *masm, bool is_js_array, KeyedAccessGrowMode grow_mode)
static const int kHeaderSize
Definition: objects.h:4513
static Handle< T > null()
Definition: handles.h:86
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
Handle< Code > CompileLoadArrayLength(Handle< String > name)
static const uint32_t kMantissaMask
Definition: objects.h:1318
const int kSmiTag
Definition: v8.h:3853
Handle< Code > CompileCallConstant(Handle< Object > object, Handle< JSObject > holder, Handle< JSFunction > function, Handle< String > name, CheckType check)
static AccessorInfo * cast(Object *obj)
const uint32_t kBinary32ExponentMask
Definition: globals.h:258
const uint32_t kBinary32SignMask
Definition: globals.h:257
const int kHeapObjectTagSize
Definition: v8.h:3849
static const int kSizeInBytes
Definition: assembler-arm.h:75
static Handle< JSGlobalPropertyCell > EnsurePropertyCell(Handle< GlobalObject > global, Handle< String > name)
Definition: objects.cc:11797
static bool HasCustomCallGenerator(Handle< JSFunction > function)
Definition: stub-cache.cc:1428
static const int kPreallocatedArrayElements
Definition: objects.h:8108
static const int kPrototypeOffset
Definition: objects.h:4953
static const int kFlagsNotUsedInLookup
Definition: objects.h:4557
const char * name_
Definition: flags.cc:1352
const int kInvalidProtoDepth
const Register no_reg
static const int kValueOffset
Definition: objects.h:6188
const Register fp
Handle< Code > CompileLoadCallback(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< AccessorInfo > callback)
Handle< Code > CompileLoadPolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_ics)
Handle< Code > CompileLoadInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
Handle< Code > CompileStorePolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_stubs, MapHandleList *transitioned_maps)
static const int kSharedFunctionInfoOffset
Definition: objects.h:5984
KeyedAccessGrowMode
Definition: objects.h:141
static const int kMantissaBits
Definition: objects.h:1319
void check(i::Vector< const char > string)
static const int kExponentOffset
Definition: objects.h:1313
Handle< Code > CompileLoadElement(Handle< Map > receiver_map)
Handle< Code > CompileConstructStub(Handle< JSFunction > function)
static JSObject * cast(Object *obj)
static const int kInstanceTypeOffset
Definition: objects.h:4992
static const int kMantissaOffset
Definition: objects.h:1312
static JSFunction * cast(Object *obj)