v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
stub-cache-arm.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_ARM)
31 
32 #include "ic-inl.h"
33 #include "codegen.h"
34 #include "stub-cache.h"
35 
36 namespace v8 {
37 namespace internal {
38 
39 #define __ ACCESS_MASM(masm)
40 
41 
42 static void ProbeTable(Isolate* isolate,
43  MacroAssembler* masm,
45  StubCache::Table table,
46  Register receiver,
47  Register name,
48  // Number of the cache entry, not scaled.
49  Register offset,
50  Register scratch,
51  Register scratch2,
52  Register offset_scratch) {
53  ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
54  ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
55  ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
56 
57  uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
58  uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
59  uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
60 
61  // Check the relative positions of the address fields.
62  ASSERT(value_off_addr > key_off_addr);
63  ASSERT((value_off_addr - key_off_addr) % 4 == 0);
64  ASSERT((value_off_addr - key_off_addr) < (256 * 4));
65  ASSERT(map_off_addr > key_off_addr);
66  ASSERT((map_off_addr - key_off_addr) % 4 == 0);
67  ASSERT((map_off_addr - key_off_addr) < (256 * 4));
68 
69  Label miss;
70  Register base_addr = scratch;
71  scratch = no_reg;
72 
73  // Multiply by 3 because there are 3 fields per entry (name, code, map).
74  __ add(offset_scratch, offset, Operand(offset, LSL, 1));
75 
76  // Calculate the base address of the entry.
77  __ mov(base_addr, Operand(key_offset));
78  __ add(base_addr, base_addr, Operand(offset_scratch, LSL, kPointerSizeLog2));
79 
80  // Check that the key in the entry matches the name.
81  __ ldr(ip, MemOperand(base_addr, 0));
82  __ cmp(name, ip);
83  __ b(ne, &miss);
84 
85  // Check the map matches.
86  __ ldr(ip, MemOperand(base_addr, map_off_addr - key_off_addr));
87  __ ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
88  __ cmp(ip, scratch2);
89  __ b(ne, &miss);
90 
91  // Get the code entry from the cache.
92  Register code = scratch2;
93  scratch2 = no_reg;
94  __ ldr(code, MemOperand(base_addr, value_off_addr - key_off_addr));
95 
96  // Check that the flags match what we're looking for.
97  Register flags_reg = base_addr;
98  base_addr = no_reg;
99  __ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
100  // It's a nice optimization if this constant is encodable in the bic insn.
101 
102  uint32_t mask = Code::kFlagsNotUsedInLookup;
103  ASSERT(__ ImmediateFitsAddrMode1Instruction(mask));
104  __ bic(flags_reg, flags_reg, Operand(mask));
105  // Using cmn and the negative instead of cmp means we can use movw.
106  if (flags < 0) {
107  __ cmn(flags_reg, Operand(-flags));
108  } else {
109  __ cmp(flags_reg, Operand(flags));
110  }
111  __ b(ne, &miss);
112 
113 #ifdef DEBUG
114  if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
115  __ jmp(&miss);
116  } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
117  __ jmp(&miss);
118  }
119 #endif
120 
121  // Jump to the first instruction in the code stub.
122  __ add(pc, code, Operand(Code::kHeaderSize - kHeapObjectTag));
123 
124  // Miss: fall through.
125  __ bind(&miss);
126 }
127 
128 
129 // Helper function used to check that the dictionary doesn't contain
130 // the property. This function may return false negatives, so miss_label
131 // must always call a backup property check that is complete.
132 // This function is safe to call if the receiver has fast properties.
133 // Name must be a symbol and receiver must be a heap object.
134 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
135  Label* miss_label,
136  Register receiver,
137  Handle<String> name,
138  Register scratch0,
139  Register scratch1) {
140  ASSERT(name->IsSymbol());
141  Counters* counters = masm->isolate()->counters();
142  __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
143  __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
144 
145  Label done;
146 
147  const int kInterceptorOrAccessCheckNeededMask =
149 
150  // Bail out if the receiver has a named interceptor or requires access checks.
151  Register map = scratch1;
152  __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
153  __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
154  __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
155  __ b(ne, miss_label);
156 
157  // Check that receiver is a JSObject.
158  __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
159  __ cmp(scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
160  __ b(lt, miss_label);
161 
162  // Load properties array.
163  Register properties = scratch0;
164  __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
165  // Check that the properties array is a dictionary.
166  __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
167  Register tmp = properties;
168  __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
169  __ cmp(map, tmp);
170  __ b(ne, miss_label);
171 
172  // Restore the temporarily used register.
173  __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
174 
175 
177  miss_label,
178  &done,
179  receiver,
180  properties,
181  name,
182  scratch1);
183  __ bind(&done);
184  __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
185 }
186 
187 
188 void StubCache::GenerateProbe(MacroAssembler* masm,
189  Code::Flags flags,
190  Register receiver,
191  Register name,
192  Register scratch,
193  Register extra,
194  Register extra2,
195  Register extra3) {
196  Isolate* isolate = masm->isolate();
197  Label miss;
198 
199  // Make sure that code is valid. The multiplying code relies on the
200  // entry size being 12.
201  ASSERT(sizeof(Entry) == 12);
202 
203  // Make sure the flags does not name a specific type.
204  ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
205 
206  // Make sure that there are no register conflicts.
207  ASSERT(!scratch.is(receiver));
208  ASSERT(!scratch.is(name));
209  ASSERT(!extra.is(receiver));
210  ASSERT(!extra.is(name));
211  ASSERT(!extra.is(scratch));
212  ASSERT(!extra2.is(receiver));
213  ASSERT(!extra2.is(name));
214  ASSERT(!extra2.is(scratch));
215  ASSERT(!extra2.is(extra));
216 
217  // Check scratch, extra and extra2 registers are valid.
218  ASSERT(!scratch.is(no_reg));
219  ASSERT(!extra.is(no_reg));
220  ASSERT(!extra2.is(no_reg));
221  ASSERT(!extra3.is(no_reg));
222 
223  Counters* counters = masm->isolate()->counters();
224  __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
225  extra2, extra3);
226 
227  // Check that the receiver isn't a smi.
228  __ JumpIfSmi(receiver, &miss);
229 
230  // Get the map of the receiver and compute the hash.
231  __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset));
232  __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
233  __ add(scratch, scratch, Operand(ip));
234  uint32_t mask = kPrimaryTableSize - 1;
235  // We shift out the last two bits because they are not part of the hash and
236  // they are always 01 for maps.
237  __ mov(scratch, Operand(scratch, LSR, kHeapObjectTagSize));
238  // Mask down the eor argument to the minimum to keep the immediate
239  // ARM-encodable.
240  __ eor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
241  // Prefer and_ to ubfx here because ubfx takes 2 cycles.
242  __ and_(scratch, scratch, Operand(mask));
243 
244  // Probe the primary table.
245  ProbeTable(isolate,
246  masm,
247  flags,
248  kPrimary,
249  receiver,
250  name,
251  scratch,
252  extra,
253  extra2,
254  extra3);
255 
256  // Primary miss: Compute hash for secondary probe.
257  __ sub(scratch, scratch, Operand(name, LSR, kHeapObjectTagSize));
258  uint32_t mask2 = kSecondaryTableSize - 1;
259  __ add(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
260  __ and_(scratch, scratch, Operand(mask2));
261 
262  // Probe the secondary table.
263  ProbeTable(isolate,
264  masm,
265  flags,
266  kSecondary,
267  receiver,
268  name,
269  scratch,
270  extra,
271  extra2,
272  extra3);
273 
274  // Cache miss: Fall-through and let caller handle the miss by
275  // entering the runtime system.
276  __ bind(&miss);
277  __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
278  extra2, extra3);
279 }
280 
281 
282 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
283  int index,
284  Register prototype) {
285  // Load the global or builtins object from the current context.
287  // Load the global context from the global or builtins object.
288  __ ldr(prototype,
290  // Load the function from the global context.
291  __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
292  // Load the initial map. The global functions all have initial maps.
293  __ ldr(prototype,
295  // Load the prototype from the initial map.
296  __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
297 }
298 
299 
300 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
301  MacroAssembler* masm,
302  int index,
303  Register prototype,
304  Label* miss) {
305  Isolate* isolate = masm->isolate();
306  // Check we're still in the same context.
308  __ Move(ip, isolate->global());
309  __ cmp(prototype, ip);
310  __ b(ne, miss);
311  // Get the global function with the given index.
312  Handle<JSFunction> function(
313  JSFunction::cast(isolate->global_context()->get(index)));
314  // Load its initial map. The global functions all have initial maps.
315  __ Move(prototype, Handle<Map>(function->initial_map()));
316  // Load the prototype from the initial map.
317  __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
318 }
319 
320 
321 // Load a fast property out of a holder object (src). In-object properties
322 // are loaded directly otherwise the property is loaded from the properties
323 // fixed array.
324 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
325  Register dst,
326  Register src,
327  Handle<JSObject> holder,
328  int index) {
329  // Adjust for the number of properties stored in the holder.
330  index -= holder->map()->inobject_properties();
331  if (index < 0) {
332  // Get the property straight out of the holder.
333  int offset = holder->map()->instance_size() + (index * kPointerSize);
334  __ ldr(dst, FieldMemOperand(src, offset));
335  } else {
336  // Calculate the offset into the properties array.
337  int offset = index * kPointerSize + FixedArray::kHeaderSize;
339  __ ldr(dst, FieldMemOperand(dst, offset));
340  }
341 }
342 
343 
344 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
345  Register receiver,
346  Register scratch,
347  Label* miss_label) {
348  // Check that the receiver isn't a smi.
349  __ JumpIfSmi(receiver, miss_label);
350 
351  // Check that the object is a JS array.
352  __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
353  __ b(ne, miss_label);
354 
355  // Load length directly from the JS array.
356  __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
357  __ Ret();
358 }
359 
360 
361 // Generate code to check if an object is a string. If the object is a
362 // heap object, its map's instance type is left in the scratch1 register.
363 // If this is not needed, scratch1 and scratch2 may be the same register.
364 static void GenerateStringCheck(MacroAssembler* masm,
365  Register receiver,
366  Register scratch1,
367  Register scratch2,
368  Label* smi,
369  Label* non_string_object) {
370  // Check that the receiver isn't a smi.
371  __ JumpIfSmi(receiver, smi);
372 
373  // Check that the object is a string.
374  __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
375  __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
376  __ and_(scratch2, scratch1, Operand(kIsNotStringMask));
377  // The cast is to resolve the overload for the argument of 0x0.
378  __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag)));
379  __ b(ne, non_string_object);
380 }
381 
382 
383 // Generate code to load the length from a string object and return the length.
384 // If the receiver object is not a string or a wrapped string object the
385 // execution continues at the miss label. The register containing the
386 // receiver is potentially clobbered.
387 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
388  Register receiver,
389  Register scratch1,
390  Register scratch2,
391  Label* miss,
392  bool support_wrappers) {
393  Label check_wrapper;
394 
395  // Check if the object is a string leaving the instance type in the
396  // scratch1 register.
397  GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
398  support_wrappers ? &check_wrapper : miss);
399 
400  // Load length directly from the string.
401  __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset));
402  __ Ret();
403 
404  if (support_wrappers) {
405  // Check if the object is a JSValue wrapper.
406  __ bind(&check_wrapper);
407  __ cmp(scratch1, Operand(JS_VALUE_TYPE));
408  __ b(ne, miss);
409 
410  // Unwrap the value and check if the wrapped value is a string.
411  __ ldr(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
412  GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
413  __ ldr(r0, FieldMemOperand(scratch1, String::kLengthOffset));
414  __ Ret();
415  }
416 }
417 
418 
419 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
420  Register receiver,
421  Register scratch1,
422  Register scratch2,
423  Label* miss_label) {
424  __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
425  __ mov(r0, scratch1);
426  __ Ret();
427 }
428 
429 
430 // Generate StoreField code, value is passed in r0 register.
431 // When leaving generated code after success, the receiver_reg and name_reg
432 // may be clobbered. Upon branch to miss_label, the receiver and name
433 // registers have their original values.
434 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
435  Handle<JSObject> object,
436  int index,
437  Handle<Map> transition,
438  Handle<String> name,
439  Register receiver_reg,
440  Register name_reg,
441  Register scratch1,
442  Register scratch2,
443  Label* miss_label) {
444  // r0 : value
445  Label exit;
446 
447  LookupResult lookup(masm->isolate());
448  object->Lookup(*name, &lookup);
449  if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
450  // In sloppy mode, we could just return the value and be done. However, we
451  // might be in strict mode, where we have to throw. Since we cannot tell,
452  // go into slow case unconditionally.
453  __ jmp(miss_label);
454  return;
455  }
456 
457  // Check that the map of the object hasn't changed.
458  CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS
460  __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label,
461  DO_SMI_CHECK, mode);
462 
463  // Perform global security token check if needed.
464  if (object->IsJSGlobalProxy()) {
465  __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
466  }
467 
468  // Check that we are allowed to write this.
469  if (!transition.is_null() && object->GetPrototype()->IsJSObject()) {
470  JSObject* holder;
471  if (lookup.IsFound()) {
472  holder = lookup.holder();
473  } else {
474  // Find the top object.
475  holder = *object;
476  do {
477  holder = JSObject::cast(holder->GetPrototype());
478  } while (holder->GetPrototype()->IsJSObject());
479  }
480  // We need an extra register, push
481  __ push(name_reg);
482  Label miss_pop, done_check;
483  CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg,
484  scratch1, scratch2, name, &miss_pop);
485  __ jmp(&done_check);
486  __ bind(&miss_pop);
487  __ pop(name_reg);
488  __ jmp(miss_label);
489  __ bind(&done_check);
490  __ pop(name_reg);
491  }
492 
493  // Stub never generated for non-global objects that require access
494  // checks.
495  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
496 
497  // Perform map transition for the receiver if necessary.
498  if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) {
499  // The properties must be extended before we can store the value.
500  // We jump to a runtime call that extends the properties array.
501  __ push(receiver_reg);
502  __ mov(r2, Operand(transition));
503  __ Push(r2, r0);
504  __ TailCallExternalReference(
505  ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
506  masm->isolate()),
507  3,
508  1);
509  return;
510  }
511 
512  if (!transition.is_null()) {
513  // Update the map of the object.
514  __ mov(scratch1, Operand(transition));
515  __ str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
516 
517  // Update the write barrier for the map field and pass the now unused
518  // name_reg as scratch register.
519  __ RecordWriteField(receiver_reg,
521  scratch1,
522  name_reg,
527  }
528 
529  // Adjust for the number of properties stored in the object. Even in the
530  // face of a transition we can use the old map here because the size of the
531  // object and the number of in-object properties is not going to change.
532  index -= object->map()->inobject_properties();
533 
534  if (index < 0) {
535  // Set the property straight into the object.
536  int offset = object->map()->instance_size() + (index * kPointerSize);
537  __ str(r0, FieldMemOperand(receiver_reg, offset));
538 
539  // Skip updating write barrier if storing a smi.
540  __ JumpIfSmi(r0, &exit);
541 
542  // Update the write barrier for the array address.
543  // Pass the now unused name_reg as a scratch register.
544  __ mov(name_reg, r0);
545  __ RecordWriteField(receiver_reg,
546  offset,
547  name_reg,
548  scratch1,
551  } else {
552  // Write to the properties array.
553  int offset = index * kPointerSize + FixedArray::kHeaderSize;
554  // Get the properties array
555  __ ldr(scratch1,
557  __ str(r0, FieldMemOperand(scratch1, offset));
558 
559  // Skip updating write barrier if storing a smi.
560  __ JumpIfSmi(r0, &exit);
561 
562  // Update the write barrier for the array address.
563  // Ok to clobber receiver_reg and name_reg, since we return.
564  __ mov(name_reg, r0);
565  __ RecordWriteField(scratch1,
566  offset,
567  name_reg,
568  receiver_reg,
571  }
572 
573  // Return the value (register r0).
574  __ bind(&exit);
575  __ Ret();
576 }
577 
578 
579 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
580  ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
581  Handle<Code> code = (kind == Code::LOAD_IC)
582  ? masm->isolate()->builtins()->LoadIC_Miss()
583  : masm->isolate()->builtins()->KeyedLoadIC_Miss();
584  __ Jump(code, RelocInfo::CODE_TARGET);
585 }
586 
587 
588 static void GenerateCallFunction(MacroAssembler* masm,
589  Handle<Object> object,
590  const ParameterCount& arguments,
591  Label* miss,
592  Code::ExtraICState extra_ic_state) {
593  // ----------- S t a t e -------------
594  // -- r0: receiver
595  // -- r1: function to call
596  // -----------------------------------
597 
598  // Check that the function really is a function.
599  __ JumpIfSmi(r1, miss);
600  __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
601  __ b(ne, miss);
602 
603  // Patch the receiver on the stack with the global proxy if
604  // necessary.
605  if (object->IsGlobalObject()) {
607  __ str(r3, MemOperand(sp, arguments.immediate() * kPointerSize));
608  }
609 
610  // Invoke the function.
611  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
613  : CALL_AS_METHOD;
614  __ InvokeFunction(r1, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind);
615 }
616 
617 
618 static void PushInterceptorArguments(MacroAssembler* masm,
619  Register receiver,
620  Register holder,
621  Register name,
622  Handle<JSObject> holder_obj) {
623  __ push(name);
624  Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
625  ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
626  Register scratch = name;
627  __ mov(scratch, Operand(interceptor));
628  __ push(scratch);
629  __ push(receiver);
630  __ push(holder);
631  __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
632  __ push(scratch);
633  __ mov(scratch, Operand(ExternalReference::isolate_address()));
634  __ push(scratch);
635 }
636 
637 
638 static void CompileCallLoadPropertyWithInterceptor(
639  MacroAssembler* masm,
640  Register receiver,
641  Register holder,
642  Register name,
643  Handle<JSObject> holder_obj) {
644  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
645 
646  ExternalReference ref =
647  ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
648  masm->isolate());
649  __ mov(r0, Operand(6));
650  __ mov(r1, Operand(ref));
651 
652  CEntryStub stub(1);
653  __ CallStub(&stub);
654 }
655 
656 
657 static const int kFastApiCallArguments = 4;
658 
659 // Reserves space for the extra arguments to API function in the
660 // caller's frame.
661 //
662 // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
663 static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
664  Register scratch) {
665  __ mov(scratch, Operand(Smi::FromInt(0)));
666  for (int i = 0; i < kFastApiCallArguments; i++) {
667  __ push(scratch);
668  }
669 }
670 
671 
672 // Undoes the effects of ReserveSpaceForFastApiCall.
673 static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
674  __ Drop(kFastApiCallArguments);
675 }
676 
677 
678 static void GenerateFastApiDirectCall(MacroAssembler* masm,
679  const CallOptimization& optimization,
680  int argc) {
681  // ----------- S t a t e -------------
682  // -- sp[0] : holder (set by CheckPrototypes)
683  // -- sp[4] : callee JS function
684  // -- sp[8] : call data
685  // -- sp[12] : isolate
686  // -- sp[16] : last JS argument
687  // -- ...
688  // -- sp[(argc + 3) * 4] : first JS argument
689  // -- sp[(argc + 4) * 4] : receiver
690  // -----------------------------------
691  // Get the function and setup the context.
692  Handle<JSFunction> function = optimization.constant_function();
693  __ LoadHeapObject(r5, function);
695 
696  // Pass the additional arguments.
697  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
698  Handle<Object> call_data(api_call_info->data());
699  if (masm->isolate()->heap()->InNewSpace(*call_data)) {
700  __ Move(r0, api_call_info);
702  } else {
703  __ Move(r6, call_data);
704  }
705  __ mov(r7, Operand(ExternalReference::isolate_address()));
706  // Store JS function, call data and isolate.
707  __ stm(ib, sp, r5.bit() | r6.bit() | r7.bit());
708 
709  // Prepare arguments.
710  __ add(r2, sp, Operand(3 * kPointerSize));
711 
712  // Allocate the v8::Arguments structure in the arguments' space since
713  // it's not controlled by GC.
714  const int kApiStackSpace = 4;
715 
716  FrameScope frame_scope(masm, StackFrame::MANUAL);
717  __ EnterExitFrame(false, kApiStackSpace);
718 
719  // r0 = v8::Arguments&
720  // Arguments is after the return address.
721  __ add(r0, sp, Operand(1 * kPointerSize));
722  // v8::Arguments::implicit_args_
723  __ str(r2, MemOperand(r0, 0 * kPointerSize));
724  // v8::Arguments::values_
725  __ add(ip, r2, Operand(argc * kPointerSize));
726  __ str(ip, MemOperand(r0, 1 * kPointerSize));
727  // v8::Arguments::length_ = argc
728  __ mov(ip, Operand(argc));
729  __ str(ip, MemOperand(r0, 2 * kPointerSize));
730  // v8::Arguments::is_construct_call = 0
731  __ mov(ip, Operand(0));
732  __ str(ip, MemOperand(r0, 3 * kPointerSize));
733 
734  const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
735  Address function_address = v8::ToCData<Address>(api_call_info->callback());
736  ApiFunction fun(function_address);
737  ExternalReference ref = ExternalReference(&fun,
738  ExternalReference::DIRECT_API_CALL,
739  masm->isolate());
740  AllowExternalCallThatCantCauseGC scope(masm);
741 
742  __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
743 }
744 
745 
746 class CallInterceptorCompiler BASE_EMBEDDED {
747  public:
748  CallInterceptorCompiler(StubCompiler* stub_compiler,
749  const ParameterCount& arguments,
750  Register name,
751  Code::ExtraICState extra_ic_state)
752  : stub_compiler_(stub_compiler),
753  arguments_(arguments),
754  name_(name),
755  extra_ic_state_(extra_ic_state) {}
756 
757  void Compile(MacroAssembler* masm,
758  Handle<JSObject> object,
759  Handle<JSObject> holder,
760  Handle<String> name,
761  LookupResult* lookup,
762  Register receiver,
763  Register scratch1,
764  Register scratch2,
765  Register scratch3,
766  Label* miss) {
767  ASSERT(holder->HasNamedInterceptor());
768  ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
769 
770  // Check that the receiver isn't a smi.
771  __ JumpIfSmi(receiver, miss);
772  CallOptimization optimization(lookup);
773  if (optimization.is_constant_call()) {
774  CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
775  holder, lookup, name, optimization, miss);
776  } else {
777  CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
778  name, holder, miss);
779  }
780  }
781 
782  private:
783  void CompileCacheable(MacroAssembler* masm,
784  Handle<JSObject> object,
785  Register receiver,
786  Register scratch1,
787  Register scratch2,
788  Register scratch3,
789  Handle<JSObject> interceptor_holder,
790  LookupResult* lookup,
791  Handle<String> name,
792  const CallOptimization& optimization,
793  Label* miss_label) {
794  ASSERT(optimization.is_constant_call());
795  ASSERT(!lookup->holder()->IsGlobalObject());
796  Counters* counters = masm->isolate()->counters();
797  int depth1 = kInvalidProtoDepth;
798  int depth2 = kInvalidProtoDepth;
799  bool can_do_fast_api_call = false;
800  if (optimization.is_simple_api_call() &&
801  !lookup->holder()->IsGlobalObject()) {
802  depth1 = optimization.GetPrototypeDepthOfExpectedType(
803  object, interceptor_holder);
804  if (depth1 == kInvalidProtoDepth) {
805  depth2 = optimization.GetPrototypeDepthOfExpectedType(
806  interceptor_holder, Handle<JSObject>(lookup->holder()));
807  }
808  can_do_fast_api_call =
809  depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
810  }
811 
812  __ IncrementCounter(counters->call_const_interceptor(), 1,
813  scratch1, scratch2);
814 
815  if (can_do_fast_api_call) {
816  __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
817  scratch1, scratch2);
818  ReserveSpaceForFastApiCall(masm, scratch1);
819  }
820 
821  // Check that the maps from receiver to interceptor's holder
822  // haven't changed and thus we can invoke interceptor.
823  Label miss_cleanup;
824  Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
825  Register holder =
826  stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
827  scratch1, scratch2, scratch3,
828  name, depth1, miss);
829 
830  // Invoke an interceptor and if it provides a value,
831  // branch to |regular_invoke|.
832  Label regular_invoke;
833  LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
834  &regular_invoke);
835 
836  // Interceptor returned nothing for this property. Try to use cached
837  // constant function.
838 
839  // Check that the maps from interceptor's holder to constant function's
840  // holder haven't changed and thus we can use cached constant function.
841  if (*interceptor_holder != lookup->holder()) {
842  stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
843  Handle<JSObject>(lookup->holder()),
844  scratch1, scratch2, scratch3,
845  name, depth2, miss);
846  } else {
847  // CheckPrototypes has a side effect of fetching a 'holder'
848  // for API (object which is instanceof for the signature). It's
849  // safe to omit it here, as if present, it should be fetched
850  // by the previous CheckPrototypes.
851  ASSERT(depth2 == kInvalidProtoDepth);
852  }
853 
854  // Invoke function.
855  if (can_do_fast_api_call) {
856  GenerateFastApiDirectCall(masm, optimization, arguments_.immediate());
857  } else {
858  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
860  : CALL_AS_METHOD;
861  __ InvokeFunction(optimization.constant_function(), arguments_,
862  JUMP_FUNCTION, NullCallWrapper(), call_kind);
863  }
864 
865  // Deferred code for fast API call case---clean preallocated space.
866  if (can_do_fast_api_call) {
867  __ bind(&miss_cleanup);
868  FreeSpaceForFastApiCall(masm);
869  __ b(miss_label);
870  }
871 
872  // Invoke a regular function.
873  __ bind(&regular_invoke);
874  if (can_do_fast_api_call) {
875  FreeSpaceForFastApiCall(masm);
876  }
877  }
878 
879  void CompileRegular(MacroAssembler* masm,
880  Handle<JSObject> object,
881  Register receiver,
882  Register scratch1,
883  Register scratch2,
884  Register scratch3,
885  Handle<String> name,
886  Handle<JSObject> interceptor_holder,
887  Label* miss_label) {
888  Register holder =
889  stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
890  scratch1, scratch2, scratch3,
891  name, miss_label);
892 
893  // Call a runtime function to load the interceptor property.
894  FrameScope scope(masm, StackFrame::INTERNAL);
895  // Save the name_ register across the call.
896  __ push(name_);
897  PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder);
898  __ CallExternalReference(
899  ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
900  masm->isolate()),
901  6);
902  // Restore the name_ register.
903  __ pop(name_);
904  // Leave the internal frame.
905  }
906 
907  void LoadWithInterceptor(MacroAssembler* masm,
908  Register receiver,
909  Register holder,
910  Handle<JSObject> holder_obj,
911  Register scratch,
912  Label* interceptor_succeeded) {
913  {
914  FrameScope scope(masm, StackFrame::INTERNAL);
915  __ Push(holder, name_);
916  CompileCallLoadPropertyWithInterceptor(masm,
917  receiver,
918  holder,
919  name_,
920  holder_obj);
921  __ pop(name_); // Restore the name.
922  __ pop(receiver); // Restore the holder.
923  }
924  // If interceptor returns no-result sentinel, call the constant function.
925  __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
926  __ cmp(r0, scratch);
927  __ b(ne, interceptor_succeeded);
928  }
929 
930  StubCompiler* stub_compiler_;
931  const ParameterCount& arguments_;
932  Register name_;
933  Code::ExtraICState extra_ic_state_;
934 };
935 
936 
937 // Generate code to check that a global property cell is empty. Create
938 // the property cell at compilation time if no cell exists for the
939 // property.
940 static void GenerateCheckPropertyCell(MacroAssembler* masm,
941  Handle<GlobalObject> global,
942  Handle<String> name,
943  Register scratch,
944  Label* miss) {
945  Handle<JSGlobalPropertyCell> cell =
946  GlobalObject::EnsurePropertyCell(global, name);
947  ASSERT(cell->value()->IsTheHole());
948  __ mov(scratch, Operand(cell));
949  __ ldr(scratch,
951  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
952  __ cmp(scratch, ip);
953  __ b(ne, miss);
954 }
955 
956 
957 // Calls GenerateCheckPropertyCell for each global object in the prototype chain
958 // from object to (but not including) holder.
959 static void GenerateCheckPropertyCells(MacroAssembler* masm,
960  Handle<JSObject> object,
961  Handle<JSObject> holder,
962  Handle<String> name,
963  Register scratch,
964  Label* miss) {
965  Handle<JSObject> current = object;
966  while (!current.is_identical_to(holder)) {
967  if (current->IsGlobalObject()) {
968  GenerateCheckPropertyCell(masm,
969  Handle<GlobalObject>::cast(current),
970  name,
971  scratch,
972  miss);
973  }
974  current = Handle<JSObject>(JSObject::cast(current->GetPrototype()));
975  }
976 }
977 
978 
979 // Convert and store int passed in register ival to IEEE 754 single precision
980 // floating point value at memory location (dst + 4 * wordoffset)
981 // If VFP3 is available use it for conversion.
982 static void StoreIntAsFloat(MacroAssembler* masm,
983  Register dst,
984  Register wordoffset,
985  Register ival,
986  Register fval,
987  Register scratch1,
988  Register scratch2) {
990  CpuFeatures::Scope scope(VFP3);
991  __ vmov(s0, ival);
992  __ add(scratch1, dst, Operand(wordoffset, LSL, 2));
993  __ vcvt_f32_s32(s0, s0);
994  __ vstr(s0, scratch1, 0);
995  } else {
996  Label not_special, done;
997  // Move sign bit from source to destination. This works because the sign
998  // bit in the exponent word of the double has the same position and polarity
999  // as the 2's complement sign bit in a Smi.
1000  ASSERT(kBinary32SignMask == 0x80000000u);
1001 
1002  __ and_(fval, ival, Operand(kBinary32SignMask), SetCC);
1003  // Negate value if it is negative.
1004  __ rsb(ival, ival, Operand(0, RelocInfo::NONE), LeaveCC, ne);
1005 
1006  // We have -1, 0 or 1, which we treat specially. Register ival contains
1007  // absolute value: it is either equal to 1 (special case of -1 and 1),
1008  // greater than 1 (not a special case) or less than 1 (special case of 0).
1009  __ cmp(ival, Operand(1));
1010  __ b(gt, &not_special);
1011 
1012  // For 1 or -1 we need to or in the 0 exponent (biased).
1013  static const uint32_t exponent_word_for_1 =
1015 
1016  __ orr(fval, fval, Operand(exponent_word_for_1), LeaveCC, eq);
1017  __ b(&done);
1018 
1019  __ bind(&not_special);
1020  // Count leading zeros.
1021  // Gets the wrong answer for 0, but we already checked for that case above.
1022  Register zeros = scratch2;
1023  __ CountLeadingZeros(zeros, ival, scratch1);
1024 
1025  // Compute exponent and or it into the exponent register.
1026  __ rsb(scratch1,
1027  zeros,
1028  Operand((kBitsPerInt - 1) + kBinary32ExponentBias));
1029 
1030  __ orr(fval,
1031  fval,
1032  Operand(scratch1, LSL, kBinary32ExponentShift));
1033 
1034  // Shift up the source chopping the top bit off.
1035  __ add(zeros, zeros, Operand(1));
1036  // This wouldn't work for 1 and -1 as the shift would be 32 which means 0.
1037  __ mov(ival, Operand(ival, LSL, zeros));
1038  // And the top (top 20 bits).
1039  __ orr(fval,
1040  fval,
1041  Operand(ival, LSR, kBitsPerInt - kBinary32MantissaBits));
1042 
1043  __ bind(&done);
1044  __ str(fval, MemOperand(dst, wordoffset, LSL, 2));
1045  }
1046 }
1047 
1048 
1049 // Convert unsigned integer with specified number of leading zeroes in binary
1050 // representation to IEEE 754 double.
1051 // Integer to convert is passed in register hiword.
1052 // Resulting double is returned in registers hiword:loword.
1053 // This functions does not work correctly for 0.
1054 static void GenerateUInt2Double(MacroAssembler* masm,
1055  Register hiword,
1056  Register loword,
1057  Register scratch,
1058  int leading_zeroes) {
1059  const int meaningful_bits = kBitsPerInt - leading_zeroes - 1;
1060  const int biased_exponent = HeapNumber::kExponentBias + meaningful_bits;
1061 
1062  const int mantissa_shift_for_hi_word =
1063  meaningful_bits - HeapNumber::kMantissaBitsInTopWord;
1064 
1065  const int mantissa_shift_for_lo_word =
1066  kBitsPerInt - mantissa_shift_for_hi_word;
1067 
1068  __ mov(scratch, Operand(biased_exponent << HeapNumber::kExponentShift));
1069  if (mantissa_shift_for_hi_word > 0) {
1070  __ mov(loword, Operand(hiword, LSL, mantissa_shift_for_lo_word));
1071  __ orr(hiword, scratch, Operand(hiword, LSR, mantissa_shift_for_hi_word));
1072  } else {
1073  __ mov(loword, Operand(0, RelocInfo::NONE));
1074  __ orr(hiword, scratch, Operand(hiword, LSL, mantissa_shift_for_hi_word));
1075  }
1076 
1077  // If least significant bit of biased exponent was not 1 it was corrupted
1078  // by most significant bit of mantissa so we should fix that.
1079  if (!(biased_exponent & 1)) {
1080  __ bic(hiword, hiword, Operand(1 << HeapNumber::kExponentShift));
1081  }
1082 }
1083 
1084 
1085 #undef __
1086 #define __ ACCESS_MASM(masm())
1087 
1088 
1089 Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
1090  Register object_reg,
1091  Handle<JSObject> holder,
1092  Register holder_reg,
1093  Register scratch1,
1094  Register scratch2,
1095  Handle<String> name,
1096  int save_at_depth,
1097  Label* miss) {
1098  // Make sure there's no overlap between holder and object registers.
1099  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1100  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1101  && !scratch2.is(scratch1));
1102 
1103  // Keep track of the current object in register reg.
1104  Register reg = object_reg;
1105  int depth = 0;
1106 
1107  if (save_at_depth == depth) {
1108  __ str(reg, MemOperand(sp));
1109  }
1110 
1111  // Check the maps in the prototype chain.
1112  // Traverse the prototype chain from the object and do map checks.
1113  Handle<JSObject> current = object;
1114  while (!current.is_identical_to(holder)) {
1115  ++depth;
1116 
1117  // Only global objects and objects that do not require access
1118  // checks are allowed in stubs.
1119  ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1120 
1121  Handle<JSObject> prototype(JSObject::cast(current->GetPrototype()));
1122  if (!current->HasFastProperties() &&
1123  !current->IsJSGlobalObject() &&
1124  !current->IsJSGlobalProxy()) {
1125  if (!name->IsSymbol()) {
1126  name = factory()->LookupSymbol(name);
1127  }
1128  ASSERT(current->property_dictionary()->FindEntry(*name) ==
1130 
1131  GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
1132  scratch1, scratch2);
1133 
1134  __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1135  reg = holder_reg; // From now on the object will be in holder_reg.
1136  __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1137  } else {
1138  Handle<Map> current_map(current->map());
1139  __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK,
1141 
1142  // Check access rights to the global object. This has to happen after
1143  // the map check so that we know that the object is actually a global
1144  // object.
1145  if (current->IsJSGlobalProxy()) {
1146  __ CheckAccessGlobalProxy(reg, scratch2, miss);
1147  }
1148  reg = holder_reg; // From now on the object will be in holder_reg.
1149 
1150  if (heap()->InNewSpace(*prototype)) {
1151  // The prototype is in new space; we cannot store a reference to it
1152  // in the code. Load it from the map.
1153  __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1154  } else {
1155  // The prototype is in old space; load it directly.
1156  __ mov(reg, Operand(prototype));
1157  }
1158  }
1159 
1160  if (save_at_depth == depth) {
1161  __ str(reg, MemOperand(sp));
1162  }
1163 
1164  // Go to the next object in the prototype chain.
1165  current = prototype;
1166  }
1167 
1168  // Log the check depth.
1169  LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
1170 
1171  // Check the holder map.
1172  __ CheckMap(reg, scratch1, Handle<Map>(current->map()), miss,
1174 
1175  // Perform security check for access to the global object.
1176  ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1177  if (holder->IsJSGlobalProxy()) {
1178  __ CheckAccessGlobalProxy(reg, scratch1, miss);
1179  }
1180 
1181  // If we've skipped any global objects, it's not enough to verify that
1182  // their maps haven't changed. We also need to check that the property
1183  // cell for the property is still empty.
1184  GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
1185 
1186  // Return the register containing the holder.
1187  return reg;
1188 }
1189 
1190 
1191 void StubCompiler::GenerateLoadField(Handle<JSObject> object,
1192  Handle<JSObject> holder,
1193  Register receiver,
1194  Register scratch1,
1195  Register scratch2,
1196  Register scratch3,
1197  int index,
1198  Handle<String> name,
1199  Label* miss) {
1200  // Check that the receiver isn't a smi.
1201  __ JumpIfSmi(receiver, miss);
1202 
1203  // Check that the maps haven't changed.
1204  Register reg = CheckPrototypes(
1205  object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1206  GenerateFastPropertyLoad(masm(), r0, reg, holder, index);
1207  __ Ret();
1208 }
1209 
1210 
1211 void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
1212  Handle<JSObject> holder,
1213  Register receiver,
1214  Register scratch1,
1215  Register scratch2,
1216  Register scratch3,
1217  Handle<JSFunction> value,
1218  Handle<String> name,
1219  Label* miss) {
1220  // Check that the receiver isn't a smi.
1221  __ JumpIfSmi(receiver, miss);
1222 
1223  // Check that the maps haven't changed.
1224  CheckPrototypes(
1225  object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1226 
1227  // Return the constant value.
1228  __ LoadHeapObject(r0, value);
1229  __ Ret();
1230 }
1231 
1232 
1233 void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
1234  Handle<JSObject> holder,
1235  Register receiver,
1236  Register name_reg,
1237  Register scratch1,
1238  Register scratch2,
1239  Register scratch3,
1240  Handle<AccessorInfo> callback,
1241  Handle<String> name,
1242  Label* miss) {
1243  // Check that the receiver isn't a smi.
1244  __ JumpIfSmi(receiver, miss);
1245 
1246  // Check that the maps haven't changed.
1247  Register reg = CheckPrototypes(object, receiver, holder, scratch1,
1248  scratch2, scratch3, name, miss);
1249 
1250  // Build AccessorInfo::args_ list on the stack and push property name below
1251  // the exit frame to make GC aware of them and store pointers to them.
1252  __ push(receiver);
1253  __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_
1254  if (heap()->InNewSpace(callback->data())) {
1255  __ Move(scratch3, callback);
1256  __ ldr(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1257  } else {
1258  __ Move(scratch3, Handle<Object>(callback->data()));
1259  }
1260  __ Push(reg, scratch3);
1261  __ mov(scratch3, Operand(ExternalReference::isolate_address()));
1262  __ Push(scratch3, name_reg);
1263  __ mov(r0, sp); // r0 = Handle<String>
1264 
1265  const int kApiStackSpace = 1;
1266  FrameScope frame_scope(masm(), StackFrame::MANUAL);
1267  __ EnterExitFrame(false, kApiStackSpace);
1268 
1269  // Create AccessorInfo instance on the stack above the exit frame with
1270  // scratch2 (internal::Object** args_) as the data.
1271  __ str(scratch2, MemOperand(sp, 1 * kPointerSize));
1272  __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo&
1273 
1274  const int kStackUnwindSpace = 5;
1275  Address getter_address = v8::ToCData<Address>(callback->getter());
1276  ApiFunction fun(getter_address);
1277  ExternalReference ref =
1278  ExternalReference(&fun,
1279  ExternalReference::DIRECT_GETTER_CALL,
1280  masm()->isolate());
1281  __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
1282 }
1283 
1284 
1285 void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
1286  Handle<JSObject> interceptor_holder,
1287  LookupResult* lookup,
1288  Register receiver,
1289  Register name_reg,
1290  Register scratch1,
1291  Register scratch2,
1292  Register scratch3,
1293  Handle<String> name,
1294  Label* miss) {
1295  ASSERT(interceptor_holder->HasNamedInterceptor());
1296  ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1297 
1298  // Check that the receiver isn't a smi.
1299  __ JumpIfSmi(receiver, miss);
1300 
1301  // So far the most popular follow ups for interceptor loads are FIELD
1302  // and CALLBACKS, so inline only them, other cases may be added
1303  // later.
1304  bool compile_followup_inline = false;
1305  if (lookup->IsFound() && lookup->IsCacheable()) {
1306  if (lookup->type() == FIELD) {
1307  compile_followup_inline = true;
1308  } else if (lookup->type() == CALLBACKS &&
1309  lookup->GetCallbackObject()->IsAccessorInfo()) {
1310  AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1311  compile_followup_inline = callback->getter() != NULL &&
1312  callback->IsCompatibleReceiver(*object);
1313  }
1314  }
1315 
1316  if (compile_followup_inline) {
1317  // Compile the interceptor call, followed by inline code to load the
1318  // property from further up the prototype chain if the call fails.
1319  // Check that the maps haven't changed.
1320  Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1321  scratch1, scratch2, scratch3,
1322  name, miss);
1323  ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1324 
1325  // Preserve the receiver register explicitly whenever it is different from
1326  // the holder and it is needed should the interceptor return without any
1327  // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1328  // the FIELD case might cause a miss during the prototype check.
1329  bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1330  bool must_preserve_receiver_reg = !receiver.is(holder_reg) &&
1331  (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1332 
1333  // Save necessary data before invoking an interceptor.
1334  // Requires a frame to make GC aware of pushed pointers.
1335  {
1336  FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1337  if (must_preserve_receiver_reg) {
1338  __ Push(receiver, holder_reg, name_reg);
1339  } else {
1340  __ Push(holder_reg, name_reg);
1341  }
1342  // Invoke an interceptor. Note: map checks from receiver to
1343  // interceptor's holder has been compiled before (see a caller
1344  // of this method.)
1345  CompileCallLoadPropertyWithInterceptor(masm(),
1346  receiver,
1347  holder_reg,
1348  name_reg,
1349  interceptor_holder);
1350  // Check if interceptor provided a value for property. If it's
1351  // the case, return immediately.
1352  Label interceptor_failed;
1353  __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1354  __ cmp(r0, scratch1);
1355  __ b(eq, &interceptor_failed);
1356  frame_scope.GenerateLeaveFrame();
1357  __ Ret();
1358 
1359  __ bind(&interceptor_failed);
1360  __ pop(name_reg);
1361  __ pop(holder_reg);
1362  if (must_preserve_receiver_reg) {
1363  __ pop(receiver);
1364  }
1365  // Leave the internal frame.
1366  }
1367  // Check that the maps from interceptor's holder to lookup's holder
1368  // haven't changed. And load lookup's holder into |holder| register.
1369  if (must_perfrom_prototype_check) {
1370  holder_reg = CheckPrototypes(interceptor_holder,
1371  holder_reg,
1372  Handle<JSObject>(lookup->holder()),
1373  scratch1,
1374  scratch2,
1375  scratch3,
1376  name,
1377  miss);
1378  }
1379 
1380  if (lookup->type() == FIELD) {
1381  // We found FIELD property in prototype chain of interceptor's holder.
1382  // Retrieve a field from field's holder.
1383  GenerateFastPropertyLoad(masm(), r0, holder_reg,
1384  Handle<JSObject>(lookup->holder()),
1385  lookup->GetFieldIndex());
1386  __ Ret();
1387  } else {
1388  // We found CALLBACKS property in prototype chain of interceptor's
1389  // holder.
1390  ASSERT(lookup->type() == CALLBACKS);
1391  Handle<AccessorInfo> callback(
1392  AccessorInfo::cast(lookup->GetCallbackObject()));
1393  ASSERT(callback->getter() != NULL);
1394 
1395  // Tail call to runtime.
1396  // Important invariant in CALLBACKS case: the code above must be
1397  // structured to never clobber |receiver| register.
1398  __ Move(scratch2, callback);
1399  // holder_reg is either receiver or scratch1.
1400  if (!receiver.is(holder_reg)) {
1401  ASSERT(scratch1.is(holder_reg));
1402  __ Push(receiver, holder_reg);
1403  } else {
1404  __ push(receiver);
1405  __ push(holder_reg);
1406  }
1407  __ ldr(scratch3,
1409  __ mov(scratch1, Operand(ExternalReference::isolate_address()));
1410  __ Push(scratch3, scratch1, scratch2, name_reg);
1411 
1412  ExternalReference ref =
1413  ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1414  masm()->isolate());
1415  __ TailCallExternalReference(ref, 6, 1);
1416  }
1417  } else { // !compile_followup_inline
1418  // Call the runtime system to load the interceptor.
1419  // Check that the maps haven't changed.
1420  Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1421  scratch1, scratch2, scratch3,
1422  name, miss);
1423  PushInterceptorArguments(masm(), receiver, holder_reg,
1424  name_reg, interceptor_holder);
1425 
1426  ExternalReference ref =
1427  ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1428  masm()->isolate());
1429  __ TailCallExternalReference(ref, 6, 1);
1430  }
1431 }
1432 
1433 
1434 void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) {
1435  if (kind_ == Code::KEYED_CALL_IC) {
1436  __ cmp(r2, Operand(name));
1437  __ b(ne, miss);
1438  }
1439 }
1440 
1441 
1442 void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
1443  Handle<JSObject> holder,
1444  Handle<String> name,
1445  Label* miss) {
1446  ASSERT(holder->IsGlobalObject());
1447 
1448  // Get the number of arguments.
1449  const int argc = arguments().immediate();
1450 
1451  // Get the receiver from the stack.
1452  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1453 
1454  // Check that the maps haven't changed.
1455  __ JumpIfSmi(r0, miss);
1456  CheckPrototypes(object, r0, holder, r3, r1, r4, name, miss);
1457 }
1458 
1459 
1460 void CallStubCompiler::GenerateLoadFunctionFromCell(
1461  Handle<JSGlobalPropertyCell> cell,
1462  Handle<JSFunction> function,
1463  Label* miss) {
1464  // Get the value from the cell.
1465  __ mov(r3, Operand(cell));
1467 
1468  // Check that the cell contains the same function.
1469  if (heap()->InNewSpace(*function)) {
1470  // We can't embed a pointer to a function in new space so we have
1471  // to verify that the shared function info is unchanged. This has
1472  // the nice side effect that multiple closures based on the same
1473  // function can all use this call IC. Before we load through the
1474  // function, we have to verify that it still is a function.
1475  __ JumpIfSmi(r1, miss);
1476  __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
1477  __ b(ne, miss);
1478 
1479  // Check the shared function info. Make sure it hasn't changed.
1480  __ Move(r3, Handle<SharedFunctionInfo>(function->shared()));
1482  __ cmp(r4, r3);
1483  } else {
1484  __ cmp(r1, Operand(function));
1485  }
1486  __ b(ne, miss);
1487 }
1488 
1489 
1490 void CallStubCompiler::GenerateMissBranch() {
1491  Handle<Code> code =
1492  isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1493  kind_,
1494  extra_state_);
1495  __ Jump(code, RelocInfo::CODE_TARGET);
1496 }
1497 
1498 
1499 Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
1500  Handle<JSObject> holder,
1501  int index,
1502  Handle<String> name) {
1503  // ----------- S t a t e -------------
1504  // -- r2 : name
1505  // -- lr : return address
1506  // -----------------------------------
1507  Label miss;
1508 
1509  GenerateNameCheck(name, &miss);
1510 
1511  const int argc = arguments().immediate();
1512 
1513  // Get the receiver of the function from the stack into r0.
1514  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1515  // Check that the receiver isn't a smi.
1516  __ JumpIfSmi(r0, &miss);
1517 
1518  // Do the right check and compute the holder register.
1519  Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss);
1520  GenerateFastPropertyLoad(masm(), r1, reg, holder, index);
1521 
1522  GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
1523 
1524  // Handle call cache miss.
1525  __ bind(&miss);
1526  GenerateMissBranch();
1527 
1528  // Return the generated code.
1529  return GetCode(FIELD, name);
1530 }
1531 
1532 
1533 Handle<Code> CallStubCompiler::CompileArrayPushCall(
1534  Handle<Object> object,
1535  Handle<JSObject> holder,
1536  Handle<JSGlobalPropertyCell> cell,
1537  Handle<JSFunction> function,
1538  Handle<String> name) {
1539  // ----------- S t a t e -------------
1540  // -- r2 : name
1541  // -- lr : return address
1542  // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1543  // -- ...
1544  // -- sp[argc * 4] : receiver
1545  // -----------------------------------
1546 
1547  // If object is not an array, bail out to regular call.
1548  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1549 
1550  Label miss;
1551  GenerateNameCheck(name, &miss);
1552 
1553  Register receiver = r1;
1554  // Get the receiver from the stack
1555  const int argc = arguments().immediate();
1556  __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1557 
1558  // Check that the receiver isn't a smi.
1559  __ JumpIfSmi(receiver, &miss);
1560 
1561  // Check that the maps haven't changed.
1562  CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, r3, r0, r4,
1563  name, &miss);
1564 
1565  if (argc == 0) {
1566  // Nothing to do, just return the length.
1567  __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1568  __ Drop(argc + 1);
1569  __ Ret();
1570  } else {
1571  Label call_builtin;
1572 
1573  if (argc == 1) { // Otherwise fall through to call the builtin.
1574  Label attempt_to_grow_elements;
1575 
1576  Register elements = r6;
1577  Register end_elements = r5;
1578  // Get the elements array of the object.
1579  __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1580 
1581  // Check that the elements are in fast mode and writable.
1582  __ CheckMap(elements,
1583  r0,
1584  Heap::kFixedArrayMapRootIndex,
1585  &call_builtin,
1587 
1588 
1589  // Get the array's length into r0 and calculate new length.
1590  __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1591  STATIC_ASSERT(kSmiTagSize == 1);
1592  STATIC_ASSERT(kSmiTag == 0);
1593  __ add(r0, r0, Operand(Smi::FromInt(argc)));
1594 
1595  // Get the elements' length.
1597 
1598  // Check if we could survive without allocation.
1599  __ cmp(r0, r4);
1600  __ b(gt, &attempt_to_grow_elements);
1601 
1602  // Check if value is a smi.
1603  Label with_write_barrier;
1604  __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
1605  __ JumpIfNotSmi(r4, &with_write_barrier);
1606 
1607  // Save new length.
1608  __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1609 
1610  // Store the value.
1611  // We may need a register containing the address end_elements below,
1612  // so write back the value in end_elements.
1613  __ add(end_elements, elements,
1614  Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1615  const int kEndElementsOffset =
1616  FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1617  __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
1618 
1619  // Check for a smi.
1620  __ Drop(argc + 1);
1621  __ Ret();
1622 
1623  __ bind(&with_write_barrier);
1624 
1625  __ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset));
1626 
1627  if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
1628  Label fast_object, not_fast_object;
1629  __ CheckFastObjectElements(r3, r7, &not_fast_object);
1630  __ jmp(&fast_object);
1631  // In case of fast smi-only, convert to fast object, otherwise bail out.
1632  __ bind(&not_fast_object);
1633  __ CheckFastSmiElements(r3, r7, &call_builtin);
1634  // edx: receiver
1635  // r3: map
1636  Label try_holey_map;
1637  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1638  FAST_ELEMENTS,
1639  r3,
1640  r7,
1641  &try_holey_map);
1642  __ mov(r2, receiver);
1645  __ jmp(&fast_object);
1646 
1647  __ bind(&try_holey_map);
1648  __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
1650  r3,
1651  r7,
1652  &call_builtin);
1653  __ mov(r2, receiver);
1656  __ bind(&fast_object);
1657  } else {
1658  __ CheckFastObjectElements(r3, r3, &call_builtin);
1659  }
1660 
1661  // Save new length.
1662  __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1663 
1664  // Store the value.
1665  // We may need a register containing the address end_elements below,
1666  // so write back the value in end_elements.
1667  __ add(end_elements, elements,
1668  Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1669  __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
1670 
1671  __ RecordWrite(elements,
1672  end_elements,
1673  r4,
1677  OMIT_SMI_CHECK);
1678  __ Drop(argc + 1);
1679  __ Ret();
1680 
1681  __ bind(&attempt_to_grow_elements);
1682  // r0: array's length + 1.
1683  // r4: elements' length.
1684 
1685  if (!FLAG_inline_new) {
1686  __ b(&call_builtin);
1687  }
1688 
1689  __ ldr(r2, MemOperand(sp, (argc - 1) * kPointerSize));
1690  // Growing elements that are SMI-only requires special handling in case
1691  // the new element is non-Smi. For now, delegate to the builtin.
1692  Label no_fast_elements_check;
1693  __ JumpIfSmi(r2, &no_fast_elements_check);
1694  __ ldr(r7, FieldMemOperand(receiver, HeapObject::kMapOffset));
1695  __ CheckFastObjectElements(r7, r7, &call_builtin);
1696  __ bind(&no_fast_elements_check);
1697 
1698  Isolate* isolate = masm()->isolate();
1699  ExternalReference new_space_allocation_top =
1700  ExternalReference::new_space_allocation_top_address(isolate);
1701  ExternalReference new_space_allocation_limit =
1702  ExternalReference::new_space_allocation_limit_address(isolate);
1703 
1704  const int kAllocationDelta = 4;
1705  // Load top and check if it is the end of elements.
1706  __ add(end_elements, elements,
1707  Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1708  __ add(end_elements, end_elements, Operand(kEndElementsOffset));
1709  __ mov(r7, Operand(new_space_allocation_top));
1710  __ ldr(r3, MemOperand(r7));
1711  __ cmp(end_elements, r3);
1712  __ b(ne, &call_builtin);
1713 
1714  __ mov(r9, Operand(new_space_allocation_limit));
1715  __ ldr(r9, MemOperand(r9));
1716  __ add(r3, r3, Operand(kAllocationDelta * kPointerSize));
1717  __ cmp(r3, r9);
1718  __ b(hi, &call_builtin);
1719 
1720  // We fit and could grow elements.
1721  // Update new_space_allocation_top.
1722  __ str(r3, MemOperand(r7));
1723  // Push the argument.
1724  __ str(r2, MemOperand(end_elements));
1725  // Fill the rest with holes.
1726  __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
1727  for (int i = 1; i < kAllocationDelta; i++) {
1728  __ str(r3, MemOperand(end_elements, i * kPointerSize));
1729  }
1730 
1731  // Update elements' and array's sizes.
1732  __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1733  __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta)));
1735 
1736  // Elements are in new space, so write barrier is not required.
1737  __ Drop(argc + 1);
1738  __ Ret();
1739  }
1740  __ bind(&call_builtin);
1741  __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1742  masm()->isolate()),
1743  argc + 1,
1744  1);
1745  }
1746 
1747  // Handle call cache miss.
1748  __ bind(&miss);
1749  GenerateMissBranch();
1750 
1751  // Return the generated code.
1752  return GetCode(function);
1753 }
1754 
1755 
1756 Handle<Code> CallStubCompiler::CompileArrayPopCall(
1757  Handle<Object> object,
1758  Handle<JSObject> holder,
1759  Handle<JSGlobalPropertyCell> cell,
1760  Handle<JSFunction> function,
1761  Handle<String> name) {
1762  // ----------- S t a t e -------------
1763  // -- r2 : name
1764  // -- lr : return address
1765  // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1766  // -- ...
1767  // -- sp[argc * 4] : receiver
1768  // -----------------------------------
1769 
1770  // If object is not an array, bail out to regular call.
1771  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1772 
1773  Label miss, return_undefined, call_builtin;
1774  Register receiver = r1;
1775  Register elements = r3;
1776  GenerateNameCheck(name, &miss);
1777 
1778  // Get the receiver from the stack
1779  const int argc = arguments().immediate();
1780  __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1781  // Check that the receiver isn't a smi.
1782  __ JumpIfSmi(receiver, &miss);
1783 
1784  // Check that the maps haven't changed.
1785  CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, elements,
1786  r4, r0, name, &miss);
1787 
1788  // Get the elements array of the object.
1789  __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1790 
1791  // Check that the elements are in fast mode and writable.
1792  __ CheckMap(elements,
1793  r0,
1794  Heap::kFixedArrayMapRootIndex,
1795  &call_builtin,
1797 
1798  // Get the array's length into r4 and calculate new length.
1799  __ ldr(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1800  __ sub(r4, r4, Operand(Smi::FromInt(1)), SetCC);
1801  __ b(lt, &return_undefined);
1802 
1803  // Get the last element.
1804  __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
1805  STATIC_ASSERT(kSmiTagSize == 1);
1806  STATIC_ASSERT(kSmiTag == 0);
1807  // We can't address the last element in one operation. Compute the more
1808  // expensive shift first, and use an offset later on.
1809  __ add(elements, elements, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
1810  __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
1811  __ cmp(r0, r6);
1812  __ b(eq, &call_builtin);
1813 
1814  // Set the array's length.
1815  __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1816 
1817  // Fill with the hole.
1818  __ str(r6, FieldMemOperand(elements, FixedArray::kHeaderSize));
1819  __ Drop(argc + 1);
1820  __ Ret();
1821 
1822  __ bind(&return_undefined);
1823  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1824  __ Drop(argc + 1);
1825  __ Ret();
1826 
1827  __ bind(&call_builtin);
1828  __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1829  masm()->isolate()),
1830  argc + 1,
1831  1);
1832 
1833  // Handle call cache miss.
1834  __ bind(&miss);
1835  GenerateMissBranch();
1836 
1837  // Return the generated code.
1838  return GetCode(function);
1839 }
1840 
1841 
1842 Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
1843  Handle<Object> object,
1844  Handle<JSObject> holder,
1845  Handle<JSGlobalPropertyCell> cell,
1846  Handle<JSFunction> function,
1847  Handle<String> name) {
1848  // ----------- S t a t e -------------
1849  // -- r2 : function name
1850  // -- lr : return address
1851  // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1852  // -- ...
1853  // -- sp[argc * 4] : receiver
1854  // -----------------------------------
1855 
1856  // If object is not a string, bail out to regular call.
1857  if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
1858 
1859  const int argc = arguments().immediate();
1860  Label miss;
1861  Label name_miss;
1862  Label index_out_of_range;
1863  Label* index_out_of_range_label = &index_out_of_range;
1864 
1865  if (kind_ == Code::CALL_IC &&
1866  (CallICBase::StringStubState::decode(extra_state_) ==
1868  index_out_of_range_label = &miss;
1869  }
1870  GenerateNameCheck(name, &name_miss);
1871 
1872  // Check that the maps starting from the prototype haven't changed.
1873  GenerateDirectLoadGlobalFunctionPrototype(masm(),
1875  r0,
1876  &miss);
1877  ASSERT(!object.is_identical_to(holder));
1878  CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())),
1879  r0, holder, r1, r3, r4, name, &miss);
1880 
1881  Register receiver = r1;
1882  Register index = r4;
1883  Register result = r0;
1884  __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1885  if (argc > 0) {
1886  __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
1887  } else {
1888  __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1889  }
1890 
1891  StringCharCodeAtGenerator generator(receiver,
1892  index,
1893  result,
1894  &miss, // When not a string.
1895  &miss, // When not a number.
1896  index_out_of_range_label,
1898  generator.GenerateFast(masm());
1899  __ Drop(argc + 1);
1900  __ Ret();
1901 
1902  StubRuntimeCallHelper call_helper;
1903  generator.GenerateSlow(masm(), call_helper);
1904 
1905  if (index_out_of_range.is_linked()) {
1906  __ bind(&index_out_of_range);
1907  __ LoadRoot(r0, Heap::kNanValueRootIndex);
1908  __ Drop(argc + 1);
1909  __ Ret();
1910  }
1911 
1912  __ bind(&miss);
1913  // Restore function name in r2.
1914  __ Move(r2, name);
1915  __ bind(&name_miss);
1916  GenerateMissBranch();
1917 
1918  // Return the generated code.
1919  return GetCode(function);
1920 }
1921 
1922 
1923 Handle<Code> CallStubCompiler::CompileStringCharAtCall(
1924  Handle<Object> object,
1925  Handle<JSObject> holder,
1926  Handle<JSGlobalPropertyCell> cell,
1927  Handle<JSFunction> function,
1928  Handle<String> name) {
1929  // ----------- S t a t e -------------
1930  // -- r2 : function name
1931  // -- lr : return address
1932  // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1933  // -- ...
1934  // -- sp[argc * 4] : receiver
1935  // -----------------------------------
1936 
1937  // If object is not a string, bail out to regular call.
1938  if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
1939 
1940  const int argc = arguments().immediate();
1941  Label miss;
1942  Label name_miss;
1943  Label index_out_of_range;
1944  Label* index_out_of_range_label = &index_out_of_range;
1945  if (kind_ == Code::CALL_IC &&
1946  (CallICBase::StringStubState::decode(extra_state_) ==
1948  index_out_of_range_label = &miss;
1949  }
1950  GenerateNameCheck(name, &name_miss);
1951 
1952  // Check that the maps starting from the prototype haven't changed.
1953  GenerateDirectLoadGlobalFunctionPrototype(masm(),
1955  r0,
1956  &miss);
1957  ASSERT(!object.is_identical_to(holder));
1958  CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())),
1959  r0, holder, r1, r3, r4, name, &miss);
1960 
1961  Register receiver = r0;
1962  Register index = r4;
1963  Register scratch = r3;
1964  Register result = r0;
1965  __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1966  if (argc > 0) {
1967  __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
1968  } else {
1969  __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1970  }
1971 
1972  StringCharAtGenerator generator(receiver,
1973  index,
1974  scratch,
1975  result,
1976  &miss, // When not a string.
1977  &miss, // When not a number.
1978  index_out_of_range_label,
1980  generator.GenerateFast(masm());
1981  __ Drop(argc + 1);
1982  __ Ret();
1983 
1984  StubRuntimeCallHelper call_helper;
1985  generator.GenerateSlow(masm(), call_helper);
1986 
1987  if (index_out_of_range.is_linked()) {
1988  __ bind(&index_out_of_range);
1989  __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
1990  __ Drop(argc + 1);
1991  __ Ret();
1992  }
1993 
1994  __ bind(&miss);
1995  // Restore function name in r2.
1996  __ Move(r2, name);
1997  __ bind(&name_miss);
1998  GenerateMissBranch();
1999 
2000  // Return the generated code.
2001  return GetCode(function);
2002 }
2003 
2004 
2005 Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
2006  Handle<Object> object,
2007  Handle<JSObject> holder,
2008  Handle<JSGlobalPropertyCell> cell,
2009  Handle<JSFunction> function,
2010  Handle<String> name) {
2011  // ----------- S t a t e -------------
2012  // -- r2 : function name
2013  // -- lr : return address
2014  // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2015  // -- ...
2016  // -- sp[argc * 4] : receiver
2017  // -----------------------------------
2018 
2019  const int argc = arguments().immediate();
2020 
2021  // If the object is not a JSObject or we got an unexpected number of
2022  // arguments, bail out to the regular call.
2023  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2024 
2025  Label miss;
2026  GenerateNameCheck(name, &miss);
2027 
2028  if (cell.is_null()) {
2029  __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2030 
2031  STATIC_ASSERT(kSmiTag == 0);
2032  __ JumpIfSmi(r1, &miss);
2033 
2034  CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
2035  name, &miss);
2036  } else {
2037  ASSERT(cell->value() == *function);
2038  GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2039  &miss);
2040  GenerateLoadFunctionFromCell(cell, function, &miss);
2041  }
2042 
2043  // Load the char code argument.
2044  Register code = r1;
2045  __ ldr(code, MemOperand(sp, 0 * kPointerSize));
2046 
2047  // Check the code is a smi.
2048  Label slow;
2049  STATIC_ASSERT(kSmiTag == 0);
2050  __ JumpIfNotSmi(code, &slow);
2051 
2052  // Convert the smi code to uint16.
2053  __ and_(code, code, Operand(Smi::FromInt(0xffff)));
2054 
2055  StringCharFromCodeGenerator generator(code, r0);
2056  generator.GenerateFast(masm());
2057  __ Drop(argc + 1);
2058  __ Ret();
2059 
2060  StubRuntimeCallHelper call_helper;
2061  generator.GenerateSlow(masm(), call_helper);
2062 
2063  // Tail call the full function. We do not have to patch the receiver
2064  // because the function makes no use of it.
2065  __ bind(&slow);
2066  __ InvokeFunction(
2067  function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2068 
2069  __ bind(&miss);
2070  // r2: function name.
2071  GenerateMissBranch();
2072 
2073  // Return the generated code.
2074  return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
2075 }
2076 
2077 
2078 Handle<Code> CallStubCompiler::CompileMathFloorCall(
2079  Handle<Object> object,
2080  Handle<JSObject> holder,
2081  Handle<JSGlobalPropertyCell> cell,
2082  Handle<JSFunction> function,
2083  Handle<String> name) {
2084  // ----------- S t a t e -------------
2085  // -- r2 : function name
2086  // -- lr : return address
2087  // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2088  // -- ...
2089  // -- sp[argc * 4] : receiver
2090  // -----------------------------------
2091 
2093  return Handle<Code>::null();
2094  }
2095 
2096  CpuFeatures::Scope scope_vfp3(VFP3);
2097  const int argc = arguments().immediate();
2098  // If the object is not a JSObject or we got an unexpected number of
2099  // arguments, bail out to the regular call.
2100  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2101 
2102  Label miss, slow;
2103  GenerateNameCheck(name, &miss);
2104 
2105  if (cell.is_null()) {
2106  __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2107  STATIC_ASSERT(kSmiTag == 0);
2108  __ JumpIfSmi(r1, &miss);
2109  CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
2110  name, &miss);
2111  } else {
2112  ASSERT(cell->value() == *function);
2113  GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2114  &miss);
2115  GenerateLoadFunctionFromCell(cell, function, &miss);
2116  }
2117 
2118  // Load the (only) argument into r0.
2119  __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
2120 
2121  // If the argument is a smi, just return.
2122  STATIC_ASSERT(kSmiTag == 0);
2123  __ tst(r0, Operand(kSmiTagMask));
2124  __ Drop(argc + 1, eq);
2125  __ Ret(eq);
2126 
2127  __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2128 
2129  Label wont_fit_smi, no_vfp_exception, restore_fpscr_and_return;
2130 
2131  // If vfp3 is enabled, we use the fpu rounding with the RM (round towards
2132  // minus infinity) mode.
2133 
2134  // Load the HeapNumber value.
2135  // We will need access to the value in the core registers, so we load it
2136  // with ldrd and move it to the fpu. It also spares a sub instruction for
2137  // updating the HeapNumber value address, as vldr expects a multiple
2138  // of 4 offset.
2140  __ vmov(d1, r4, r5);
2141 
2142  // Backup FPSCR.
2143  __ vmrs(r3);
2144  // Set custom FPCSR:
2145  // - Set rounding mode to "Round towards Minus Infinity"
2146  // (i.e. bits [23:22] = 0b10).
2147  // - Clear vfp cumulative exception flags (bits [3:0]).
2148  // - Make sure Flush-to-zero mode control bit is unset (bit 22).
2149  __ bic(r9, r3,
2151  __ orr(r9, r9, Operand(kRoundToMinusInf));
2152  __ vmsr(r9);
2153 
2154  // Convert the argument to an integer.
2155  __ vcvt_s32_f64(s0, d1, kFPSCRRounding);
2156 
2157  // Use vcvt latency to start checking for special cases.
2158  // Get the argument exponent and clear the sign bit.
2159  __ bic(r6, r5, Operand(HeapNumber::kSignMask));
2160  __ mov(r6, Operand(r6, LSR, HeapNumber::kMantissaBitsInTopWord));
2161 
2162  // Retrieve FPSCR and check for vfp exceptions.
2163  __ vmrs(r9);
2164  __ tst(r9, Operand(kVFPExceptionMask));
2165  __ b(&no_vfp_exception, eq);
2166 
2167  // Check for NaN, Infinity, and -Infinity.
2168  // They are invariant through a Math.Floor call, so just
2169  // return the original argument.
2170  __ sub(r7, r6, Operand(HeapNumber::kExponentMask
2171  >> HeapNumber::kMantissaBitsInTopWord), SetCC);
2172  __ b(&restore_fpscr_and_return, eq);
2173  // We had an overflow or underflow in the conversion. Check if we
2174  // have a big exponent.
2175  __ cmp(r7, Operand(HeapNumber::kMantissaBits));
2176  // If greater or equal, the argument is already round and in r0.
2177  __ b(&restore_fpscr_and_return, ge);
2178  __ b(&wont_fit_smi);
2179 
2180  __ bind(&no_vfp_exception);
2181  // Move the result back to general purpose register r0.
2182  __ vmov(r0, s0);
2183  // Check if the result fits into a smi.
2184  __ add(r1, r0, Operand(0x40000000), SetCC);
2185  __ b(&wont_fit_smi, mi);
2186  // Tag the result.
2187  STATIC_ASSERT(kSmiTag == 0);
2188  __ mov(r0, Operand(r0, LSL, kSmiTagSize));
2189 
2190  // Check for -0.
2191  __ cmp(r0, Operand(0, RelocInfo::NONE));
2192  __ b(&restore_fpscr_and_return, ne);
2193  // r5 already holds the HeapNumber exponent.
2194  __ tst(r5, Operand(HeapNumber::kSignMask));
2195  // If our HeapNumber is negative it was -0, so load its address and return.
2196  // Else r0 is loaded with 0, so we can also just return.
2197  __ ldr(r0, MemOperand(sp, 0 * kPointerSize), ne);
2198 
2199  __ bind(&restore_fpscr_and_return);
2200  // Restore FPSCR and return.
2201  __ vmsr(r3);
2202  __ Drop(argc + 1);
2203  __ Ret();
2204 
2205  __ bind(&wont_fit_smi);
2206  // Restore FPCSR and fall to slow case.
2207  __ vmsr(r3);
2208 
2209  __ bind(&slow);
2210  // Tail call the full function. We do not have to patch the receiver
2211  // because the function makes no use of it.
2212  __ InvokeFunction(
2213  function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2214 
2215  __ bind(&miss);
2216  // r2: function name.
2217  GenerateMissBranch();
2218 
2219  // Return the generated code.
2220  return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
2221 }
2222 
2223 
2224 Handle<Code> CallStubCompiler::CompileMathAbsCall(
2225  Handle<Object> object,
2226  Handle<JSObject> holder,
2227  Handle<JSGlobalPropertyCell> cell,
2228  Handle<JSFunction> function,
2229  Handle<String> name) {
2230  // ----------- S t a t e -------------
2231  // -- r2 : function name
2232  // -- lr : return address
2233  // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2234  // -- ...
2235  // -- sp[argc * 4] : receiver
2236  // -----------------------------------
2237 
2238  const int argc = arguments().immediate();
2239  // If the object is not a JSObject or we got an unexpected number of
2240  // arguments, bail out to the regular call.
2241  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2242 
2243  Label miss;
2244  GenerateNameCheck(name, &miss);
2245  if (cell.is_null()) {
2246  __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2247  STATIC_ASSERT(kSmiTag == 0);
2248  __ JumpIfSmi(r1, &miss);
2249  CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
2250  name, &miss);
2251  } else {
2252  ASSERT(cell->value() == *function);
2253  GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2254  &miss);
2255  GenerateLoadFunctionFromCell(cell, function, &miss);
2256  }
2257 
2258  // Load the (only) argument into r0.
2259  __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
2260 
2261  // Check if the argument is a smi.
2262  Label not_smi;
2263  STATIC_ASSERT(kSmiTag == 0);
2264  __ JumpIfNotSmi(r0, &not_smi);
2265 
2266  // Do bitwise not or do nothing depending on the sign of the
2267  // argument.
2268  __ eor(r1, r0, Operand(r0, ASR, kBitsPerInt - 1));
2269 
2270  // Add 1 or do nothing depending on the sign of the argument.
2271  __ sub(r0, r1, Operand(r0, ASR, kBitsPerInt - 1), SetCC);
2272 
2273  // If the result is still negative, go to the slow case.
2274  // This only happens for the most negative smi.
2275  Label slow;
2276  __ b(mi, &slow);
2277 
2278  // Smi case done.
2279  __ Drop(argc + 1);
2280  __ Ret();
2281 
2282  // Check if the argument is a heap number and load its exponent and
2283  // sign.
2284  __ bind(&not_smi);
2285  __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2287 
2288  // Check the sign of the argument. If the argument is positive,
2289  // just return it.
2290  Label negative_sign;
2291  __ tst(r1, Operand(HeapNumber::kSignMask));
2292  __ b(ne, &negative_sign);
2293  __ Drop(argc + 1);
2294  __ Ret();
2295 
2296  // If the argument is negative, clear the sign, and return a new
2297  // number.
2298  __ bind(&negative_sign);
2299  __ eor(r1, r1, Operand(HeapNumber::kSignMask));
2301  __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
2302  __ AllocateHeapNumber(r0, r4, r5, r6, &slow);
2305  __ Drop(argc + 1);
2306  __ Ret();
2307 
2308  // Tail call the full function. We do not have to patch the receiver
2309  // because the function makes no use of it.
2310  __ bind(&slow);
2311  __ InvokeFunction(
2312  function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2313 
2314  __ bind(&miss);
2315  // r2: function name.
2316  GenerateMissBranch();
2317 
2318  // Return the generated code.
2319  return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
2320 }
2321 
2322 
2323 Handle<Code> CallStubCompiler::CompileFastApiCall(
2324  const CallOptimization& optimization,
2325  Handle<Object> object,
2326  Handle<JSObject> holder,
2327  Handle<JSGlobalPropertyCell> cell,
2328  Handle<JSFunction> function,
2329  Handle<String> name) {
2330  Counters* counters = isolate()->counters();
2331 
2332  ASSERT(optimization.is_simple_api_call());
2333  // Bail out if object is a global object as we don't want to
2334  // repatch it to global receiver.
2335  if (object->IsGlobalObject()) return Handle<Code>::null();
2336  if (!cell.is_null()) return Handle<Code>::null();
2337  if (!object->IsJSObject()) return Handle<Code>::null();
2338  int depth = optimization.GetPrototypeDepthOfExpectedType(
2339  Handle<JSObject>::cast(object), holder);
2340  if (depth == kInvalidProtoDepth) return Handle<Code>::null();
2341 
2342  Label miss, miss_before_stack_reserved;
2343  GenerateNameCheck(name, &miss_before_stack_reserved);
2344 
2345  // Get the receiver from the stack.
2346  const int argc = arguments().immediate();
2347  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2348 
2349  // Check that the receiver isn't a smi.
2350  __ JumpIfSmi(r1, &miss_before_stack_reserved);
2351 
2352  __ IncrementCounter(counters->call_const(), 1, r0, r3);
2353  __ IncrementCounter(counters->call_const_fast_api(), 1, r0, r3);
2354 
2355  ReserveSpaceForFastApiCall(masm(), r0);
2356 
2357  // Check that the maps haven't changed and find a Holder as a side effect.
2358  CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4, name,
2359  depth, &miss);
2360 
2361  GenerateFastApiDirectCall(masm(), optimization, argc);
2362 
2363  __ bind(&miss);
2364  FreeSpaceForFastApiCall(masm());
2365 
2366  __ bind(&miss_before_stack_reserved);
2367  GenerateMissBranch();
2368 
2369  // Return the generated code.
2370  return GetCode(function);
2371 }
2372 
2373 
2374 Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
2375  Handle<JSObject> holder,
2376  Handle<JSFunction> function,
2377  Handle<String> name,
2378  CheckType check) {
2379  // ----------- S t a t e -------------
2380  // -- r2 : name
2381  // -- lr : return address
2382  // -----------------------------------
2383  if (HasCustomCallGenerator(function)) {
2384  Handle<Code> code = CompileCustomCall(object, holder,
2385  Handle<JSGlobalPropertyCell>::null(),
2386  function, name);
2387  // A null handle means bail out to the regular compiler code below.
2388  if (!code.is_null()) return code;
2389  }
2390 
2391  Label miss;
2392  GenerateNameCheck(name, &miss);
2393 
2394  // Get the receiver from the stack
2395  const int argc = arguments().immediate();
2396  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2397 
2398  // Check that the receiver isn't a smi.
2399  if (check != NUMBER_CHECK) {
2400  __ JumpIfSmi(r1, &miss);
2401  }
2402 
2403  // Make sure that it's okay not to patch the on stack receiver
2404  // unless we're doing a receiver map check.
2405  ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2406  switch (check) {
2407  case RECEIVER_MAP_CHECK:
2408  __ IncrementCounter(masm()->isolate()->counters()->call_const(),
2409  1, r0, r3);
2410 
2411  // Check that the maps haven't changed.
2412  CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
2413  name, &miss);
2414 
2415  // Patch the receiver on the stack with the global proxy if
2416  // necessary.
2417  if (object->IsGlobalObject()) {
2419  __ str(r3, MemOperand(sp, argc * kPointerSize));
2420  }
2421  break;
2422 
2423  case STRING_CHECK:
2424  if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2425  // Check that the object is a two-byte string or a symbol.
2426  __ CompareObjectType(r1, r3, r3, FIRST_NONSTRING_TYPE);
2427  __ b(ge, &miss);
2428  // Check that the maps starting from the prototype haven't changed.
2429  GenerateDirectLoadGlobalFunctionPrototype(
2430  masm(), Context::STRING_FUNCTION_INDEX, r0, &miss);
2431  CheckPrototypes(
2432  Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2433  r0, holder, r3, r1, r4, name, &miss);
2434  } else {
2435  // Calling non-strict non-builtins with a value as the receiver
2436  // requires boxing.
2437  __ jmp(&miss);
2438  }
2439  break;
2440 
2441  case NUMBER_CHECK:
2442  if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2443  Label fast;
2444  // Check that the object is a smi or a heap number.
2445  __ JumpIfSmi(r1, &fast);
2446  __ CompareObjectType(r1, r0, r0, HEAP_NUMBER_TYPE);
2447  __ b(ne, &miss);
2448  __ bind(&fast);
2449  // Check that the maps starting from the prototype haven't changed.
2450  GenerateDirectLoadGlobalFunctionPrototype(
2451  masm(), Context::NUMBER_FUNCTION_INDEX, r0, &miss);
2452  CheckPrototypes(
2453  Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2454  r0, holder, r3, r1, r4, name, &miss);
2455  } else {
2456  // Calling non-strict non-builtins with a value as the receiver
2457  // requires boxing.
2458  __ jmp(&miss);
2459  }
2460  break;
2461 
2462  case BOOLEAN_CHECK:
2463  if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2464  Label fast;
2465  // Check that the object is a boolean.
2466  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
2467  __ cmp(r1, ip);
2468  __ b(eq, &fast);
2469  __ LoadRoot(ip, Heap::kFalseValueRootIndex);
2470  __ cmp(r1, ip);
2471  __ b(ne, &miss);
2472  __ bind(&fast);
2473  // Check that the maps starting from the prototype haven't changed.
2474  GenerateDirectLoadGlobalFunctionPrototype(
2475  masm(), Context::BOOLEAN_FUNCTION_INDEX, r0, &miss);
2476  CheckPrototypes(
2477  Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2478  r0, holder, r3, r1, r4, name, &miss);
2479  } else {
2480  // Calling non-strict non-builtins with a value as the receiver
2481  // requires boxing.
2482  __ jmp(&miss);
2483  }
2484  break;
2485  }
2486 
2487  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2489  : CALL_AS_METHOD;
2490  __ InvokeFunction(
2491  function, arguments(), JUMP_FUNCTION, NullCallWrapper(), call_kind);
2492 
2493  // Handle call cache miss.
2494  __ bind(&miss);
2495  GenerateMissBranch();
2496 
2497  // Return the generated code.
2498  return GetCode(function);
2499 }
2500 
2501 
2502 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
2503  Handle<JSObject> holder,
2504  Handle<String> name) {
2505  // ----------- S t a t e -------------
2506  // -- r2 : name
2507  // -- lr : return address
2508  // -----------------------------------
2509  Label miss;
2510  GenerateNameCheck(name, &miss);
2511 
2512  // Get the number of arguments.
2513  const int argc = arguments().immediate();
2514  LookupResult lookup(isolate());
2515  LookupPostInterceptor(holder, name, &lookup);
2516 
2517  // Get the receiver from the stack.
2518  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2519 
2520  CallInterceptorCompiler compiler(this, arguments(), r2, extra_state_);
2521  compiler.Compile(masm(), object, holder, name, &lookup, r1, r3, r4, r0,
2522  &miss);
2523 
2524  // Move returned value, the function to call, to r1.
2525  __ mov(r1, r0);
2526  // Restore receiver.
2527  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
2528 
2529  GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
2530 
2531  // Handle call cache miss.
2532  __ bind(&miss);
2533  GenerateMissBranch();
2534 
2535  // Return the generated code.
2536  return GetCode(INTERCEPTOR, name);
2537 }
2538 
2539 
2541  Handle<JSObject> object,
2542  Handle<GlobalObject> holder,
2543  Handle<JSGlobalPropertyCell> cell,
2544  Handle<JSFunction> function,
2545  Handle<String> name) {
2546  // ----------- S t a t e -------------
2547  // -- r2 : name
2548  // -- lr : return address
2549  // -----------------------------------
2550  if (HasCustomCallGenerator(function)) {
2551  Handle<Code> code = CompileCustomCall(object, holder, cell, function, name);
2552  // A null handle means bail out to the regular compiler code below.
2553  if (!code.is_null()) return code;
2554  }
2555 
2556  Label miss;
2557  GenerateNameCheck(name, &miss);
2558 
2559  // Get the number of arguments.
2560  const int argc = arguments().immediate();
2561  GenerateGlobalReceiverCheck(object, holder, name, &miss);
2562  GenerateLoadFunctionFromCell(cell, function, &miss);
2563 
2564  // Patch the receiver on the stack with the global proxy if
2565  // necessary.
2566  if (object->IsGlobalObject()) {
2568  __ str(r3, MemOperand(sp, argc * kPointerSize));
2569  }
2570 
2571  // Set up the context (function already in r1).
2573 
2574  // Jump to the cached code (tail call).
2575  Counters* counters = masm()->isolate()->counters();
2576  __ IncrementCounter(counters->call_global_inline(), 1, r3, r4);
2577  ParameterCount expected(function->shared()->formal_parameter_count());
2578  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2580  : CALL_AS_METHOD;
2581  // We call indirectly through the code field in the function to
2582  // allow recompilation to take effect without changing any of the
2583  // call sites.
2585  __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION,
2586  NullCallWrapper(), call_kind);
2587 
2588  // Handle call cache miss.
2589  __ bind(&miss);
2590  __ IncrementCounter(counters->call_global_inline_miss(), 1, r1, r3);
2591  GenerateMissBranch();
2592 
2593  // Return the generated code.
2594  return GetCode(NORMAL, name);
2595 }
2596 
2597 
2598 Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
2599  int index,
2600  Handle<Map> transition,
2601  Handle<String> name) {
2602  // ----------- S t a t e -------------
2603  // -- r0 : value
2604  // -- r1 : receiver
2605  // -- r2 : name
2606  // -- lr : return address
2607  // -----------------------------------
2608  Label miss;
2609 
2610  GenerateStoreField(masm(),
2611  object,
2612  index,
2613  transition,
2614  name,
2615  r1, r2, r3, r4,
2616  &miss);
2617  __ bind(&miss);
2618  Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2619  __ Jump(ic, RelocInfo::CODE_TARGET);
2620 
2621  // Return the generated code.
2622  return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
2623 }
2624 
2625 
2627  Handle<JSObject> object,
2628  Handle<AccessorInfo> callback,
2629  Handle<String> name) {
2630  // ----------- S t a t e -------------
2631  // -- r0 : value
2632  // -- r1 : receiver
2633  // -- r2 : name
2634  // -- lr : return address
2635  // -----------------------------------
2636  Label miss;
2637 
2638  // Check that the map of the object hasn't changed.
2639  __ CheckMap(r1, r3, Handle<Map>(object->map()), &miss,
2641 
2642  // Perform global security token check if needed.
2643  if (object->IsJSGlobalProxy()) {
2644  __ CheckAccessGlobalProxy(r1, r3, &miss);
2645  }
2646 
2647  // Stub never generated for non-global objects that require access
2648  // checks.
2649  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2650 
2651  __ push(r1); // receiver
2652  __ mov(ip, Operand(callback)); // callback info
2653  __ Push(ip, r2, r0);
2654 
2655  // Do tail-call to the runtime system.
2656  ExternalReference store_callback_property =
2657  ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
2658  masm()->isolate());
2659  __ TailCallExternalReference(store_callback_property, 4, 1);
2660 
2661  // Handle store cache miss.
2662  __ bind(&miss);
2663  Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2664  __ Jump(ic, RelocInfo::CODE_TARGET);
2665 
2666  // Return the generated code.
2667  return GetCode(CALLBACKS, name);
2668 }
2669 
2670 
2672  Handle<JSObject> receiver,
2673  Handle<JSFunction> setter,
2674  Handle<String> name) {
2675  // ----------- S t a t e -------------
2676  // -- r0 : value
2677  // -- r1 : receiver
2678  // -- r2 : name
2679  // -- lr : return address
2680  // -----------------------------------
2681  Label miss;
2682 
2683  // Check that the map of the object hasn't changed.
2684  __ CheckMap(r1, r3, Handle<Map>(receiver->map()), &miss, DO_SMI_CHECK,
2686 
2687  {
2688  FrameScope scope(masm(), StackFrame::INTERNAL);
2689 
2690  // Save value register, so we can restore it later.
2691  __ push(r0);
2692 
2693  // Call the JavaScript getter with the receiver and the value on the stack.
2694  __ Push(r1, r0);
2695  ParameterCount actual(1);
2696  __ InvokeFunction(setter, actual, CALL_FUNCTION, NullCallWrapper(),
2697  CALL_AS_METHOD);
2698 
2699  // We have to return the passed value, not the return value of the setter.
2700  __ pop(r0);
2701 
2702  // Restore context register.
2704  }
2705  __ Ret();
2706 
2707  __ bind(&miss);
2708  Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2709  __ Jump(ic, RelocInfo::CODE_TARGET);
2710 
2711  // Return the generated code.
2712  return GetCode(CALLBACKS, name);
2713 }
2714 
2715 
2717  Handle<JSObject> receiver,
2718  Handle<String> name) {
2719  // ----------- S t a t e -------------
2720  // -- r0 : value
2721  // -- r1 : receiver
2722  // -- r2 : name
2723  // -- lr : return address
2724  // -----------------------------------
2725  Label miss;
2726 
2727  // Check that the map of the object hasn't changed.
2728  __ CheckMap(r1, r3, Handle<Map>(receiver->map()), &miss,
2730 
2731  // Perform global security token check if needed.
2732  if (receiver->IsJSGlobalProxy()) {
2733  __ CheckAccessGlobalProxy(r1, r3, &miss);
2734  }
2735 
2736  // Stub is never generated for non-global objects that require access
2737  // checks.
2738  ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2739 
2740  __ Push(r1, r2, r0); // Receiver, name, value.
2741 
2742  __ mov(r0, Operand(Smi::FromInt(strict_mode_)));
2743  __ push(r0); // strict mode
2744 
2745  // Do tail-call to the runtime system.
2746  ExternalReference store_ic_property =
2747  ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
2748  masm()->isolate());
2749  __ TailCallExternalReference(store_ic_property, 4, 1);
2750 
2751  // Handle store cache miss.
2752  __ bind(&miss);
2753  Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2754  __ Jump(ic, RelocInfo::CODE_TARGET);
2755 
2756  // Return the generated code.
2757  return GetCode(INTERCEPTOR, name);
2758 }
2759 
2760 
2762  Handle<GlobalObject> object,
2763  Handle<JSGlobalPropertyCell> cell,
2764  Handle<String> name) {
2765  // ----------- S t a t e -------------
2766  // -- r0 : value
2767  // -- r1 : receiver
2768  // -- r2 : name
2769  // -- lr : return address
2770  // -----------------------------------
2771  Label miss;
2772 
2773  // Check that the map of the global has not changed.
2775  __ cmp(r3, Operand(Handle<Map>(object->map())));
2776  __ b(ne, &miss);
2777 
2778  // Check that the value in the cell is not the hole. If it is, this
2779  // cell could have been deleted and reintroducing the global needs
2780  // to update the property details in the property dictionary of the
2781  // global object. We bail out to the runtime system to do that.
2782  __ mov(r4, Operand(cell));
2783  __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
2785  __ cmp(r5, r6);
2786  __ b(eq, &miss);
2787 
2788  // Store the value in the cell.
2790  // Cells are always rescanned, so no write barrier here.
2791 
2792  Counters* counters = masm()->isolate()->counters();
2793  __ IncrementCounter(counters->named_store_global_inline(), 1, r4, r3);
2794  __ Ret();
2795 
2796  // Handle store cache miss.
2797  __ bind(&miss);
2798  __ IncrementCounter(counters->named_store_global_inline_miss(), 1, r4, r3);
2799  Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2800  __ Jump(ic, RelocInfo::CODE_TARGET);
2801 
2802  // Return the generated code.
2803  return GetCode(NORMAL, name);
2804 }
2805 
2806 
2807 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
2808  Handle<JSObject> object,
2809  Handle<JSObject> last) {
2810  // ----------- S t a t e -------------
2811  // -- r0 : receiver
2812  // -- lr : return address
2813  // -----------------------------------
2814  Label miss;
2815 
2816  // Check that receiver is not a smi.
2817  __ JumpIfSmi(r0, &miss);
2818 
2819  // Check the maps of the full prototype chain.
2820  CheckPrototypes(object, r0, last, r3, r1, r4, name, &miss);
2821 
2822  // If the last object in the prototype chain is a global object,
2823  // check that the global property cell is empty.
2824  if (last->IsGlobalObject()) {
2825  GenerateCheckPropertyCell(
2826  masm(), Handle<GlobalObject>::cast(last), name, r1, &miss);
2827  }
2828 
2829  // Return undefined if maps of the full prototype chain are still the
2830  // same and no global property with this name contains a value.
2831  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2832  __ Ret();
2833 
2834  __ bind(&miss);
2835  GenerateLoadMiss(masm(), Code::LOAD_IC);
2836 
2837  // Return the generated code.
2838  return GetCode(NONEXISTENT, factory()->empty_string());
2839 }
2840 
2841 
2842 Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
2843  Handle<JSObject> holder,
2844  int index,
2845  Handle<String> name) {
2846  // ----------- S t a t e -------------
2847  // -- r0 : receiver
2848  // -- r2 : name
2849  // -- lr : return address
2850  // -----------------------------------
2851  Label miss;
2852 
2853  GenerateLoadField(object, holder, r0, r3, r1, r4, index, name, &miss);
2854  __ bind(&miss);
2855  GenerateLoadMiss(masm(), Code::LOAD_IC);
2856 
2857  // Return the generated code.
2858  return GetCode(FIELD, name);
2859 }
2860 
2861 
2863  Handle<String> name,
2864  Handle<JSObject> object,
2865  Handle<JSObject> holder,
2866  Handle<AccessorInfo> callback) {
2867  // ----------- S t a t e -------------
2868  // -- r0 : receiver
2869  // -- r2 : name
2870  // -- lr : return address
2871  // -----------------------------------
2872  Label miss;
2873  GenerateLoadCallback(object, holder, r0, r2, r3, r1, r4, callback, name,
2874  &miss);
2875  __ bind(&miss);
2876  GenerateLoadMiss(masm(), Code::LOAD_IC);
2877 
2878  // Return the generated code.
2879  return GetCode(CALLBACKS, name);
2880 }
2881 
2882 
2884  Handle<String> name,
2885  Handle<JSObject> receiver,
2886  Handle<JSObject> holder,
2887  Handle<JSFunction> getter) {
2888  // ----------- S t a t e -------------
2889  // -- r0 : receiver
2890  // -- r2 : name
2891  // -- lr : return address
2892  // -----------------------------------
2893  Label miss;
2894 
2895  // Check that the maps haven't changed.
2896  __ JumpIfSmi(r0, &miss);
2897  CheckPrototypes(receiver, r0, holder, r3, r4, r1, name, &miss);
2898 
2899  {
2900  FrameScope scope(masm(), StackFrame::INTERNAL);
2901 
2902  // Call the JavaScript getter with the receiver on the stack.
2903  __ push(r0);
2904  ParameterCount actual(0);
2905  __ InvokeFunction(getter, actual, CALL_FUNCTION, NullCallWrapper(),
2906  CALL_AS_METHOD);
2907 
2908  // Restore context register.
2910  }
2911  __ Ret();
2912 
2913  __ bind(&miss);
2914  GenerateLoadMiss(masm(), Code::LOAD_IC);
2915 
2916  // Return the generated code.
2917  return GetCode(CALLBACKS, name);
2918 }
2919 
2920 
2921 Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
2922  Handle<JSObject> holder,
2923  Handle<JSFunction> value,
2924  Handle<String> name) {
2925  // ----------- S t a t e -------------
2926  // -- r0 : receiver
2927  // -- r2 : name
2928  // -- lr : return address
2929  // -----------------------------------
2930  Label miss;
2931 
2932  GenerateLoadConstant(object, holder, r0, r3, r1, r4, value, name, &miss);
2933  __ bind(&miss);
2934  GenerateLoadMiss(masm(), Code::LOAD_IC);
2935 
2936  // Return the generated code.
2937  return GetCode(CONSTANT_FUNCTION, name);
2938 }
2939 
2940 
2941 Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> object,
2942  Handle<JSObject> holder,
2943  Handle<String> name) {
2944  // ----------- S t a t e -------------
2945  // -- r0 : receiver
2946  // -- r2 : name
2947  // -- lr : return address
2948  // -----------------------------------
2949  Label miss;
2950 
2951  LookupResult lookup(isolate());
2952  LookupPostInterceptor(holder, name, &lookup);
2953  GenerateLoadInterceptor(object, holder, &lookup, r0, r2, r3, r1, r4, name,
2954  &miss);
2955  __ bind(&miss);
2956  GenerateLoadMiss(masm(), Code::LOAD_IC);
2957 
2958  // Return the generated code.
2959  return GetCode(INTERCEPTOR, name);
2960 }
2961 
2962 
2964  Handle<JSObject> object,
2965  Handle<GlobalObject> holder,
2966  Handle<JSGlobalPropertyCell> cell,
2967  Handle<String> name,
2968  bool is_dont_delete) {
2969  // ----------- S t a t e -------------
2970  // -- r0 : receiver
2971  // -- r2 : name
2972  // -- lr : return address
2973  // -----------------------------------
2974  Label miss;
2975 
2976  // Check that the map of the global has not changed.
2977  __ JumpIfSmi(r0, &miss);
2978  CheckPrototypes(object, r0, holder, r3, r4, r1, name, &miss);
2979 
2980  // Get the value from the cell.
2981  __ mov(r3, Operand(cell));
2983 
2984  // Check for deleted property if property can actually be deleted.
2985  if (!is_dont_delete) {
2986  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2987  __ cmp(r4, ip);
2988  __ b(eq, &miss);
2989  }
2990 
2991  __ mov(r0, r4);
2992  Counters* counters = masm()->isolate()->counters();
2993  __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3);
2994  __ Ret();
2995 
2996  __ bind(&miss);
2997  __ IncrementCounter(counters->named_load_global_stub_miss(), 1, r1, r3);
2998  GenerateLoadMiss(masm(), Code::LOAD_IC);
2999 
3000  // Return the generated code.
3001  return GetCode(NORMAL, name);
3002 }
3003 
3004 
3005 Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
3006  Handle<JSObject> receiver,
3007  Handle<JSObject> holder,
3008  int index) {
3009  // ----------- S t a t e -------------
3010  // -- lr : return address
3011  // -- r0 : key
3012  // -- r1 : receiver
3013  // -----------------------------------
3014  Label miss;
3015 
3016  // Check the key is the cached one.
3017  __ cmp(r0, Operand(name));
3018  __ b(ne, &miss);
3019 
3020  GenerateLoadField(receiver, holder, r1, r2, r3, r4, index, name, &miss);
3021  __ bind(&miss);
3022  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3023 
3024  return GetCode(FIELD, name);
3025 }
3026 
3027 
3029  Handle<String> name,
3030  Handle<JSObject> receiver,
3031  Handle<JSObject> holder,
3032  Handle<AccessorInfo> callback) {
3033  // ----------- S t a t e -------------
3034  // -- lr : return address
3035  // -- r0 : key
3036  // -- r1 : receiver
3037  // -----------------------------------
3038  Label miss;
3039 
3040  // Check the key is the cached one.
3041  __ cmp(r0, Operand(name));
3042  __ b(ne, &miss);
3043 
3044  GenerateLoadCallback(receiver, holder, r1, r0, r2, r3, r4, callback, name,
3045  &miss);
3046  __ bind(&miss);
3047  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3048 
3049  return GetCode(CALLBACKS, name);
3050 }
3051 
3052 
3054  Handle<String> name,
3055  Handle<JSObject> receiver,
3056  Handle<JSObject> holder,
3057  Handle<JSFunction> value) {
3058  // ----------- S t a t e -------------
3059  // -- lr : return address
3060  // -- r0 : key
3061  // -- r1 : receiver
3062  // -----------------------------------
3063  Label miss;
3064 
3065  // Check the key is the cached one.
3066  __ cmp(r0, Operand(name));
3067  __ b(ne, &miss);
3068 
3069  GenerateLoadConstant(receiver, holder, r1, r2, r3, r4, value, name, &miss);
3070  __ bind(&miss);
3071  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3072 
3073  // Return the generated code.
3074  return GetCode(CONSTANT_FUNCTION, name);
3075 }
3076 
3077 
3079  Handle<JSObject> receiver,
3080  Handle<JSObject> holder,
3081  Handle<String> name) {
3082  // ----------- S t a t e -------------
3083  // -- lr : return address
3084  // -- r0 : key
3085  // -- r1 : receiver
3086  // -----------------------------------
3087  Label miss;
3088 
3089  // Check the key is the cached one.
3090  __ cmp(r0, Operand(name));
3091  __ b(ne, &miss);
3092 
3093  LookupResult lookup(isolate());
3094  LookupPostInterceptor(holder, name, &lookup);
3095  GenerateLoadInterceptor(receiver, holder, &lookup, r1, r0, r2, r3, r4, name,
3096  &miss);
3097  __ bind(&miss);
3098  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3099 
3100  return GetCode(INTERCEPTOR, name);
3101 }
3102 
3103 
3105  Handle<String> name) {
3106  // ----------- S t a t e -------------
3107  // -- lr : return address
3108  // -- r0 : key
3109  // -- r1 : receiver
3110  // -----------------------------------
3111  Label miss;
3112 
3113  // Check the key is the cached one.
3114  __ cmp(r0, Operand(name));
3115  __ b(ne, &miss);
3116 
3117  GenerateLoadArrayLength(masm(), r1, r2, &miss);
3118  __ bind(&miss);
3119  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3120 
3121  return GetCode(CALLBACKS, name);
3122 }
3123 
3124 
3126  Handle<String> name) {
3127  // ----------- S t a t e -------------
3128  // -- lr : return address
3129  // -- r0 : key
3130  // -- r1 : receiver
3131  // -----------------------------------
3132  Label miss;
3133 
3134  Counters* counters = masm()->isolate()->counters();
3135  __ IncrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
3136 
3137  // Check the key is the cached one.
3138  __ cmp(r0, Operand(name));
3139  __ b(ne, &miss);
3140 
3141  GenerateLoadStringLength(masm(), r1, r2, r3, &miss, true);
3142  __ bind(&miss);
3143  __ DecrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
3144 
3145  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3146 
3147  return GetCode(CALLBACKS, name);
3148 }
3149 
3150 
3152  Handle<String> name) {
3153  // ----------- S t a t e -------------
3154  // -- lr : return address
3155  // -- r0 : key
3156  // -- r1 : receiver
3157  // -----------------------------------
3158  Label miss;
3159 
3160  Counters* counters = masm()->isolate()->counters();
3161  __ IncrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
3162 
3163  // Check the name hasn't changed.
3164  __ cmp(r0, Operand(name));
3165  __ b(ne, &miss);
3166 
3167  GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss);
3168  __ bind(&miss);
3169  __ DecrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
3170  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3171 
3172  return GetCode(CALLBACKS, name);
3173 }
3174 
3175 
3177  Handle<Map> receiver_map) {
3178  // ----------- S t a t e -------------
3179  // -- lr : return address
3180  // -- r0 : key
3181  // -- r1 : receiver
3182  // -----------------------------------
3183  ElementsKind elements_kind = receiver_map->elements_kind();
3184  Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode();
3185 
3186  __ DispatchMap(r1, r2, receiver_map, stub, DO_SMI_CHECK);
3187 
3188  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3189  __ Jump(ic, RelocInfo::CODE_TARGET);
3190 
3191  // Return the generated code.
3192  return GetCode(NORMAL, factory()->empty_string());
3193 }
3194 
3195 
3197  MapHandleList* receiver_maps,
3198  CodeHandleList* handler_ics) {
3199  // ----------- S t a t e -------------
3200  // -- lr : return address
3201  // -- r0 : key
3202  // -- r1 : receiver
3203  // -----------------------------------
3204  Label miss;
3205  __ JumpIfSmi(r1, &miss);
3206 
3207  int receiver_count = receiver_maps->length();
3209  for (int current = 0; current < receiver_count; ++current) {
3210  __ mov(ip, Operand(receiver_maps->at(current)));
3211  __ cmp(r2, ip);
3212  __ Jump(handler_ics->at(current), RelocInfo::CODE_TARGET, eq);
3213  }
3214 
3215  __ bind(&miss);
3216  Handle<Code> miss_ic = isolate()->builtins()->KeyedLoadIC_Miss();
3217  __ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
3218 
3219  // Return the generated code.
3220  return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
3221 }
3222 
3223 
3224 Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
3225  int index,
3226  Handle<Map> transition,
3227  Handle<String> name) {
3228  // ----------- S t a t e -------------
3229  // -- r0 : value
3230  // -- r1 : name
3231  // -- r2 : receiver
3232  // -- lr : return address
3233  // -----------------------------------
3234  Label miss;
3235 
3236  Counters* counters = masm()->isolate()->counters();
3237  __ IncrementCounter(counters->keyed_store_field(), 1, r3, r4);
3238 
3239  // Check that the name has not changed.
3240  __ cmp(r1, Operand(name));
3241  __ b(ne, &miss);
3242 
3243  // r3 is used as scratch register. r1 and r2 keep their values if a jump to
3244  // the miss label is generated.
3245  GenerateStoreField(masm(),
3246  object,
3247  index,
3248  transition,
3249  name,
3250  r2, r1, r3, r4,
3251  &miss);
3252  __ bind(&miss);
3253 
3254  __ DecrementCounter(counters->keyed_store_field(), 1, r3, r4);
3255  Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3256  __ Jump(ic, RelocInfo::CODE_TARGET);
3257 
3258  // Return the generated code.
3259  return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
3260 }
3261 
3262 
3264  Handle<Map> receiver_map) {
3265  // ----------- S t a t e -------------
3266  // -- r0 : value
3267  // -- r1 : key
3268  // -- r2 : receiver
3269  // -- lr : return address
3270  // -- r3 : scratch
3271  // -----------------------------------
3272  ElementsKind elements_kind = receiver_map->elements_kind();
3273  bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
3274  Handle<Code> stub =
3275  KeyedStoreElementStub(is_js_array, elements_kind, grow_mode_).GetCode();
3276 
3277  __ DispatchMap(r2, r3, receiver_map, stub, DO_SMI_CHECK);
3278 
3279  Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
3280  __ Jump(ic, RelocInfo::CODE_TARGET);
3281 
3282  // Return the generated code.
3283  return GetCode(NORMAL, factory()->empty_string());
3284 }
3285 
3286 
3288  MapHandleList* receiver_maps,
3289  CodeHandleList* handler_stubs,
3290  MapHandleList* transitioned_maps) {
3291  // ----------- S t a t e -------------
3292  // -- r0 : value
3293  // -- r1 : key
3294  // -- r2 : receiver
3295  // -- lr : return address
3296  // -- r3 : scratch
3297  // -----------------------------------
3298  Label miss;
3299  __ JumpIfSmi(r2, &miss);
3300 
3301  int receiver_count = receiver_maps->length();
3303  for (int i = 0; i < receiver_count; ++i) {
3304  __ mov(ip, Operand(receiver_maps->at(i)));
3305  __ cmp(r3, ip);
3306  if (transitioned_maps->at(i).is_null()) {
3307  __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq);
3308  } else {
3309  Label next_map;
3310  __ b(ne, &next_map);
3311  __ mov(r3, Operand(transitioned_maps->at(i)));
3312  __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, al);
3313  __ bind(&next_map);
3314  }
3315  }
3316 
3317  __ bind(&miss);
3318  Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
3319  __ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
3320 
3321  // Return the generated code.
3322  return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
3323 }
3324 
3325 
3327  Handle<JSFunction> function) {
3328  // ----------- S t a t e -------------
3329  // -- r0 : argc
3330  // -- r1 : constructor
3331  // -- lr : return address
3332  // -- [sp] : last argument
3333  // -----------------------------------
3334  Label generic_stub_call;
3335 
3336  // Use r7 for holding undefined which is used in several places below.
3337  __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
3338 
3339 #ifdef ENABLE_DEBUGGER_SUPPORT
3340  // Check to see whether there are any break points in the function code. If
3341  // there are jump to the generic constructor stub which calls the actual
3342  // code for the function thereby hitting the break points.
3345  __ cmp(r2, r7);
3346  __ b(ne, &generic_stub_call);
3347 #endif
3348 
3349  // Load the initial map and verify that it is in fact a map.
3350  // r1: constructor function
3351  // r7: undefined
3353  __ JumpIfSmi(r2, &generic_stub_call);
3354  __ CompareObjectType(r2, r3, r4, MAP_TYPE);
3355  __ b(ne, &generic_stub_call);
3356 
3357 #ifdef DEBUG
3358  // Cannot construct functions this way.
3359  // r0: argc
3360  // r1: constructor function
3361  // r2: initial map
3362  // r7: undefined
3363  __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
3364  __ Check(ne, "Function constructed by construct stub.");
3365 #endif
3366 
3367  // Now allocate the JSObject in new space.
3368  // r0: argc
3369  // r1: constructor function
3370  // r2: initial map
3371  // r7: undefined
3373  __ AllocateInNewSpace(r3, r4, r5, r6, &generic_stub_call, SIZE_IN_WORDS);
3374 
3375  // Allocated the JSObject, now initialize the fields. Map is set to initial
3376  // map and properties and elements are set to empty fixed array.
3377  // r0: argc
3378  // r1: constructor function
3379  // r2: initial map
3380  // r3: object size (in words)
3381  // r4: JSObject (not tagged)
3382  // r7: undefined
3383  __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
3384  __ mov(r5, r4);
3385  ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
3386  __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3387  ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
3388  __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
3389  ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
3390  __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
3391 
3392  // Calculate the location of the first argument. The stack contains only the
3393  // argc arguments.
3394  __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
3395 
3396  // Fill all the in-object properties with undefined.
3397  // r0: argc
3398  // r1: first argument
3399  // r3: object size (in words)
3400  // r4: JSObject (not tagged)
3401  // r5: First in-object property of JSObject (not tagged)
3402  // r7: undefined
3403  // Fill the initialized properties with a constant value or a passed argument
3404  // depending on the this.x = ...; assignment in the function.
3405  Handle<SharedFunctionInfo> shared(function->shared());
3406  for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3407  if (shared->IsThisPropertyAssignmentArgument(i)) {
3408  Label not_passed, next;
3409  // Check if the argument assigned to the property is actually passed.
3410  int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3411  __ cmp(r0, Operand(arg_number));
3412  __ b(le, &not_passed);
3413  // Argument passed - find it on the stack.
3414  __ ldr(r2, MemOperand(r1, (arg_number + 1) * -kPointerSize));
3415  __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3416  __ b(&next);
3417  __ bind(&not_passed);
3418  // Set the property to undefined.
3419  __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
3420  __ bind(&next);
3421  } else {
3422  // Set the property to the constant value.
3423  Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3424  __ mov(r2, Operand(constant));
3425  __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3426  }
3427  }
3428 
3429  // Fill the unused in-object property fields with undefined.
3430  ASSERT(function->has_initial_map());
3431  for (int i = shared->this_property_assignments_count();
3432  i < function->initial_map()->inobject_properties();
3433  i++) {
3434  __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
3435  }
3436 
3437  // r0: argc
3438  // r4: JSObject (not tagged)
3439  // Move argc to r1 and the JSObject to return to r0 and tag it.
3440  __ mov(r1, r0);
3441  __ mov(r0, r4);
3442  __ orr(r0, r0, Operand(kHeapObjectTag));
3443 
3444  // r0: JSObject
3445  // r1: argc
3446  // Remove caller arguments and receiver from the stack and return.
3447  __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
3448  __ add(sp, sp, Operand(kPointerSize));
3449  Counters* counters = masm()->isolate()->counters();
3450  __ IncrementCounter(counters->constructed_objects(), 1, r1, r2);
3451  __ IncrementCounter(counters->constructed_objects_stub(), 1, r1, r2);
3452  __ Jump(lr);
3453 
3454  // Jump to the generic stub in case the specialized code cannot handle the
3455  // construction.
3456  __ bind(&generic_stub_call);
3457  Handle<Code> code = masm()->isolate()->builtins()->JSConstructStubGeneric();
3458  __ Jump(code, RelocInfo::CODE_TARGET);
3459 
3460  // Return the generated code.
3461  return GetCode();
3462 }
3463 
3464 
3465 #undef __
3466 #define __ ACCESS_MASM(masm)
3467 
3468 
3470  MacroAssembler* masm) {
3471  // ---------- S t a t e --------------
3472  // -- lr : return address
3473  // -- r0 : key
3474  // -- r1 : receiver
3475  // -----------------------------------
3476  Label slow, miss_force_generic;
3477 
3478  Register key = r0;
3479  Register receiver = r1;
3480 
3481  __ JumpIfNotSmi(key, &miss_force_generic);
3482  __ mov(r2, Operand(key, ASR, kSmiTagSize));
3483  __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
3484  __ LoadFromNumberDictionary(&slow, r4, key, r0, r2, r3, r5);
3485  __ Ret();
3486 
3487  __ bind(&slow);
3488  __ IncrementCounter(
3489  masm->isolate()->counters()->keyed_load_external_array_slow(),
3490  1, r2, r3);
3491 
3492  // ---------- S t a t e --------------
3493  // -- lr : return address
3494  // -- r0 : key
3495  // -- r1 : receiver
3496  // -----------------------------------
3497  Handle<Code> slow_ic =
3498  masm->isolate()->builtins()->KeyedLoadIC_Slow();
3499  __ Jump(slow_ic, RelocInfo::CODE_TARGET);
3500 
3501  // Miss case, call the runtime.
3502  __ bind(&miss_force_generic);
3503 
3504  // ---------- S t a t e --------------
3505  // -- lr : return address
3506  // -- r0 : key
3507  // -- r1 : receiver
3508  // -----------------------------------
3509 
3510  Handle<Code> miss_ic =
3511  masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3512  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3513 }
3514 
3515 
3516 static bool IsElementTypeSigned(ElementsKind elements_kind) {
3517  switch (elements_kind) {
3520  case EXTERNAL_INT_ELEMENTS:
3521  return true;
3522 
3527  return false;
3528 
3531  case FAST_ELEMENTS:
3532  case FAST_SMI_ELEMENTS:
3533  case FAST_DOUBLE_ELEMENTS:
3534  case FAST_HOLEY_ELEMENTS:
3537  case DICTIONARY_ELEMENTS:
3539  UNREACHABLE();
3540  return false;
3541  }
3542  return false;
3543 }
3544 
3545 
3546 static void GenerateSmiKeyCheck(MacroAssembler* masm,
3547  Register key,
3548  Register scratch0,
3549  Register scratch1,
3550  DwVfpRegister double_scratch0,
3551  Label* fail) {
3553  CpuFeatures::Scope scope(VFP3);
3554  Label key_ok;
3555  // Check for smi or a smi inside a heap number. We convert the heap
3556  // number and check if the conversion is exact and fits into the smi
3557  // range.
3558  __ JumpIfSmi(key, &key_ok);
3559  __ CheckMap(key,
3560  scratch0,
3561  Heap::kHeapNumberMapRootIndex,
3562  fail,
3564  __ sub(ip, key, Operand(kHeapObjectTag));
3565  __ vldr(double_scratch0, ip, HeapNumber::kValueOffset);
3566  __ EmitVFPTruncate(kRoundToZero,
3567  double_scratch0.low(),
3568  double_scratch0,
3569  scratch0,
3570  scratch1,
3572  __ b(ne, fail);
3573  __ vmov(scratch0, double_scratch0.low());
3574  __ TrySmiTag(scratch0, fail, scratch1);
3575  __ mov(key, scratch0);
3576  __ bind(&key_ok);
3577  } else {
3578  // Check that the key is a smi.
3579  __ JumpIfNotSmi(key, fail);
3580  }
3581 }
3582 
3583 
3585  MacroAssembler* masm,
3586  ElementsKind elements_kind) {
3587  // ---------- S t a t e --------------
3588  // -- lr : return address
3589  // -- r0 : key
3590  // -- r1 : receiver
3591  // -----------------------------------
3592  Label miss_force_generic, slow, failed_allocation;
3593 
3594  Register key = r0;
3595  Register receiver = r1;
3596 
3597  // This stub is meant to be tail-jumped to, the receiver must already
3598  // have been verified by the caller to not be a smi.
3599 
3600  // Check that the key is a smi or a heap number convertible to a smi.
3601  GenerateSmiKeyCheck(masm, key, r4, r5, d1, &miss_force_generic);
3602 
3603  __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3604  // r3: elements array
3605 
3606  // Check that the index is in range.
3608  __ cmp(key, ip);
3609  // Unsigned comparison catches both negative and too-large values.
3610  __ b(hs, &miss_force_generic);
3611 
3613  // r3: base pointer of external storage
3614 
3615  // We are not untagging smi key and instead work with it
3616  // as if it was premultiplied by 2.
3617  STATIC_ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
3618 
3619  Register value = r2;
3620  switch (elements_kind) {
3622  __ ldrsb(value, MemOperand(r3, key, LSR, 1));
3623  break;
3626  __ ldrb(value, MemOperand(r3, key, LSR, 1));
3627  break;
3629  __ ldrsh(value, MemOperand(r3, key, LSL, 0));
3630  break;
3632  __ ldrh(value, MemOperand(r3, key, LSL, 0));
3633  break;
3634  case EXTERNAL_INT_ELEMENTS:
3636  __ ldr(value, MemOperand(r3, key, LSL, 1));
3637  break;
3640  CpuFeatures::Scope scope(VFP3);
3641  __ add(r2, r3, Operand(key, LSL, 1));
3642  __ vldr(s0, r2, 0);
3643  } else {
3644  __ ldr(value, MemOperand(r3, key, LSL, 1));
3645  }
3646  break;
3649  CpuFeatures::Scope scope(VFP3);
3650  __ add(r2, r3, Operand(key, LSL, 2));
3651  __ vldr(d0, r2, 0);
3652  } else {
3653  __ add(r4, r3, Operand(key, LSL, 2));
3654  // r4: pointer to the beginning of the double we want to load.
3655  __ ldr(r2, MemOperand(r4, 0));
3656  __ ldr(r3, MemOperand(r4, Register::kSizeInBytes));
3657  }
3658  break;
3659  case FAST_ELEMENTS:
3660  case FAST_SMI_ELEMENTS:
3661  case FAST_DOUBLE_ELEMENTS:
3662  case FAST_HOLEY_ELEMENTS:
3665  case DICTIONARY_ELEMENTS:
3667  UNREACHABLE();
3668  break;
3669  }
3670 
3671  // For integer array types:
3672  // r2: value
3673  // For float array type:
3674  // s0: value (if VFP3 is supported)
3675  // r2: value (if VFP3 is not supported)
3676  // For double array type:
3677  // d0: value (if VFP3 is supported)
3678  // r2/r3: value (if VFP3 is not supported)
3679 
3680  if (elements_kind == EXTERNAL_INT_ELEMENTS) {
3681  // For the Int and UnsignedInt array types, we need to see whether
3682  // the value can be represented in a Smi. If not, we need to convert
3683  // it to a HeapNumber.
3684  Label box_int;
3685  __ cmp(value, Operand(0xC0000000));
3686  __ b(mi, &box_int);
3687  // Tag integer as smi and return it.
3688  __ mov(r0, Operand(value, LSL, kSmiTagSize));
3689  __ Ret();
3690 
3691  __ bind(&box_int);
3692  // Allocate a HeapNumber for the result and perform int-to-double
3693  // conversion. Don't touch r0 or r1 as they are needed if allocation
3694  // fails.
3695  __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3696  __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
3697  // Now we can use r0 for the result as key is not needed any more.
3698  __ mov(r0, r5);
3699 
3701  CpuFeatures::Scope scope(VFP3);
3702  __ vmov(s0, value);
3703  __ vcvt_f64_s32(d0, s0);
3704  __ sub(r3, r0, Operand(kHeapObjectTag));
3705  __ vstr(d0, r3, HeapNumber::kValueOffset);
3706  __ Ret();
3707  } else {
3708  Register dst1 = r1;
3709  Register dst2 = r3;
3713  value,
3714  dest,
3715  d0,
3716  dst1,
3717  dst2,
3718  r9,
3719  s0);
3722  __ Ret();
3723  }
3724  } else if (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) {
3725  // The test is different for unsigned int values. Since we need
3726  // the value to be in the range of a positive smi, we can't
3727  // handle either of the top two bits being set in the value.
3729  CpuFeatures::Scope scope(VFP3);
3730  Label box_int, done;
3731  __ tst(value, Operand(0xC0000000));
3732  __ b(ne, &box_int);
3733  // Tag integer as smi and return it.
3734  __ mov(r0, Operand(value, LSL, kSmiTagSize));
3735  __ Ret();
3736 
3737  __ bind(&box_int);
3738  __ vmov(s0, value);
3739  // Allocate a HeapNumber for the result and perform int-to-double
3740  // conversion. Don't use r0 and r1 as AllocateHeapNumber clobbers all
3741  // registers - also when jumping due to exhausted young space.
3742  __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3743  __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3744 
3745  __ vcvt_f64_u32(d0, s0);
3746  __ sub(r1, r2, Operand(kHeapObjectTag));
3747  __ vstr(d0, r1, HeapNumber::kValueOffset);
3748 
3749  __ mov(r0, r2);
3750  __ Ret();
3751  } else {
3752  // Check whether unsigned integer fits into smi.
3753  Label box_int_0, box_int_1, done;
3754  __ tst(value, Operand(0x80000000));
3755  __ b(ne, &box_int_0);
3756  __ tst(value, Operand(0x40000000));
3757  __ b(ne, &box_int_1);
3758  // Tag integer as smi and return it.
3759  __ mov(r0, Operand(value, LSL, kSmiTagSize));
3760  __ Ret();
3761 
3762  Register hiword = value; // r2.
3763  Register loword = r3;
3764 
3765  __ bind(&box_int_0);
3766  // Integer does not have leading zeros.
3767  GenerateUInt2Double(masm, hiword, loword, r4, 0);
3768  __ b(&done);
3769 
3770  __ bind(&box_int_1);
3771  // Integer has one leading zero.
3772  GenerateUInt2Double(masm, hiword, loword, r4, 1);
3773 
3774 
3775  __ bind(&done);
3776  // Integer was converted to double in registers hiword:loword.
3777  // Wrap it into a HeapNumber. Don't use r0 and r1 as AllocateHeapNumber
3778  // clobbers all registers - also when jumping due to exhausted young
3779  // space.
3780  __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3781  __ AllocateHeapNumber(r4, r5, r7, r6, &slow);
3782 
3785 
3786  __ mov(r0, r4);
3787  __ Ret();
3788  }
3789  } else if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3790  // For the floating-point array type, we need to always allocate a
3791  // HeapNumber.
3793  CpuFeatures::Scope scope(VFP3);
3794  // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3795  // AllocateHeapNumber clobbers all registers - also when jumping due to
3796  // exhausted young space.
3797  __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3798  __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3799  __ vcvt_f64_f32(d0, s0);
3800  __ sub(r1, r2, Operand(kHeapObjectTag));
3801  __ vstr(d0, r1, HeapNumber::kValueOffset);
3802 
3803  __ mov(r0, r2);
3804  __ Ret();
3805  } else {
3806  // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3807  // AllocateHeapNumber clobbers all registers - also when jumping due to
3808  // exhausted young space.
3809  __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3810  __ AllocateHeapNumber(r3, r4, r5, r6, &slow);
3811  // VFP is not available, do manual single to double conversion.
3812 
3813  // r2: floating point value (binary32)
3814  // r3: heap number for result
3815 
3816  // Extract mantissa to r0. OK to clobber r0 now as there are no jumps to
3817  // the slow case from here.
3818  __ and_(r0, value, Operand(kBinary32MantissaMask));
3819 
3820  // Extract exponent to r1. OK to clobber r1 now as there are no jumps to
3821  // the slow case from here.
3822  __ mov(r1, Operand(value, LSR, kBinary32MantissaBits));
3823  __ and_(r1, r1, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
3824 
3825  Label exponent_rebiased;
3826  __ teq(r1, Operand(0x00));
3827  __ b(eq, &exponent_rebiased);
3828 
3829  __ teq(r1, Operand(0xff));
3830  __ mov(r1, Operand(0x7ff), LeaveCC, eq);
3831  __ b(eq, &exponent_rebiased);
3832 
3833  // Rebias exponent.
3834  __ add(r1,
3835  r1,
3837 
3838  __ bind(&exponent_rebiased);
3839  __ and_(r2, value, Operand(kBinary32SignMask));
3840  value = no_reg;
3841  __ orr(r2, r2, Operand(r1, LSL, HeapNumber::kMantissaBitsInTopWord));
3842 
3843  // Shift mantissa.
3844  static const int kMantissaShiftForHiWord =
3846 
3847  static const int kMantissaShiftForLoWord =
3848  kBitsPerInt - kMantissaShiftForHiWord;
3849 
3850  __ orr(r2, r2, Operand(r0, LSR, kMantissaShiftForHiWord));
3851  __ mov(r0, Operand(r0, LSL, kMantissaShiftForLoWord));
3852 
3855 
3856  __ mov(r0, r3);
3857  __ Ret();
3858  }
3859  } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3861  CpuFeatures::Scope scope(VFP3);
3862  // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3863  // AllocateHeapNumber clobbers all registers - also when jumping due to
3864  // exhausted young space.
3865  __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3866  __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3867  __ sub(r1, r2, Operand(kHeapObjectTag));
3868  __ vstr(d0, r1, HeapNumber::kValueOffset);
3869 
3870  __ mov(r0, r2);
3871  __ Ret();
3872  } else {
3873  // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3874  // AllocateHeapNumber clobbers all registers - also when jumping due to
3875  // exhausted young space.
3876  __ LoadRoot(r7, Heap::kHeapNumberMapRootIndex);
3877  __ AllocateHeapNumber(r4, r5, r6, r7, &slow);
3878 
3881  __ mov(r0, r4);
3882  __ Ret();
3883  }
3884 
3885  } else {
3886  // Tag integer as smi and return it.
3887  __ mov(r0, Operand(value, LSL, kSmiTagSize));
3888  __ Ret();
3889  }
3890 
3891  // Slow case, key and receiver still in r0 and r1.
3892  __ bind(&slow);
3893  __ IncrementCounter(
3894  masm->isolate()->counters()->keyed_load_external_array_slow(),
3895  1, r2, r3);
3896 
3897  // ---------- S t a t e --------------
3898  // -- lr : return address
3899  // -- r0 : key
3900  // -- r1 : receiver
3901  // -----------------------------------
3902 
3903  __ Push(r1, r0);
3904 
3905  __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3906 
3907  __ bind(&miss_force_generic);
3908  Handle<Code> stub =
3909  masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3910  __ Jump(stub, RelocInfo::CODE_TARGET);
3911 }
3912 
3913 
3915  MacroAssembler* masm,
3916  ElementsKind elements_kind) {
3917  // ---------- S t a t e --------------
3918  // -- r0 : value
3919  // -- r1 : key
3920  // -- r2 : receiver
3921  // -- lr : return address
3922  // -----------------------------------
3923  Label slow, check_heap_number, miss_force_generic;
3924 
3925  // Register usage.
3926  Register value = r0;
3927  Register key = r1;
3928  Register receiver = r2;
3929  // r3 mostly holds the elements array or the destination external array.
3930 
3931  // This stub is meant to be tail-jumped to, the receiver must already
3932  // have been verified by the caller to not be a smi.
3933 
3934  // Check that the key is a smi or a heap number convertible to a smi.
3935  GenerateSmiKeyCheck(masm, key, r4, r5, d1, &miss_force_generic);
3936 
3937  __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3938 
3939  // Check that the index is in range
3941  __ cmp(key, ip);
3942  // Unsigned comparison catches both negative and too-large values.
3943  __ b(hs, &miss_force_generic);
3944 
3945  // Handle both smis and HeapNumbers in the fast path. Go to the
3946  // runtime for all other kinds of values.
3947  // r3: external array.
3948  if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) {
3949  // Double to pixel conversion is only implemented in the runtime for now.
3950  __ JumpIfNotSmi(value, &slow);
3951  } else {
3952  __ JumpIfNotSmi(value, &check_heap_number);
3953  }
3954  __ SmiUntag(r5, value);
3956 
3957  // r3: base pointer of external storage.
3958  // r5: value (integer).
3959  switch (elements_kind) {
3961  // Clamp the value to [0..255].
3962  __ Usat(r5, 8, Operand(r5));
3963  __ strb(r5, MemOperand(r3, key, LSR, 1));
3964  break;
3967  __ strb(r5, MemOperand(r3, key, LSR, 1));
3968  break;
3971  __ strh(r5, MemOperand(r3, key, LSL, 0));
3972  break;
3973  case EXTERNAL_INT_ELEMENTS:
3975  __ str(r5, MemOperand(r3, key, LSL, 1));
3976  break;
3978  // Perform int-to-float conversion and store to memory.
3979  __ SmiUntag(r4, key);
3980  StoreIntAsFloat(masm, r3, r4, r5, r6, r7, r9);
3981  break;
3983  __ add(r3, r3, Operand(key, LSL, 2));
3984  // r3: effective address of the double element
3987  destination = FloatingPointHelper::kVFPRegisters;
3988  } else {
3990  }
3992  masm, r5, destination,
3993  d0, r6, r7, // These are: double_dst, dst1, dst2.
3994  r4, s2); // These are: scratch2, single_scratch.
3995  if (destination == FloatingPointHelper::kVFPRegisters) {
3996  CpuFeatures::Scope scope(VFP3);
3997  __ vstr(d0, r3, 0);
3998  } else {
3999  __ str(r6, MemOperand(r3, 0));
4001  }
4002  break;
4003  case FAST_ELEMENTS:
4004  case FAST_SMI_ELEMENTS:
4005  case FAST_DOUBLE_ELEMENTS:
4006  case FAST_HOLEY_ELEMENTS:
4009  case DICTIONARY_ELEMENTS:
4011  UNREACHABLE();
4012  break;
4013  }
4014 
4015  // Entry registers are intact, r0 holds the value which is the return value.
4016  __ Ret();
4017 
4018  if (elements_kind != EXTERNAL_PIXEL_ELEMENTS) {
4019  // r3: external array.
4020  __ bind(&check_heap_number);
4021  __ CompareObjectType(value, r5, r6, HEAP_NUMBER_TYPE);
4022  __ b(ne, &slow);
4023 
4025 
4026  // r3: base pointer of external storage.
4027 
4028  // The WebGL specification leaves the behavior of storing NaN and
4029  // +/-Infinity into integer arrays basically undefined. For more
4030  // reproducible behavior, convert these to zero.
4032  CpuFeatures::Scope scope(VFP3);
4033 
4034  if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
4035  // vldr requires offset to be a multiple of 4 so we can not
4036  // include -kHeapObjectTag into it.
4037  __ sub(r5, r0, Operand(kHeapObjectTag));
4038  __ vldr(d0, r5, HeapNumber::kValueOffset);
4039  __ add(r5, r3, Operand(key, LSL, 1));
4040  __ vcvt_f32_f64(s0, d0);
4041  __ vstr(s0, r5, 0);
4042  } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
4043  __ sub(r5, r0, Operand(kHeapObjectTag));
4044  __ vldr(d0, r5, HeapNumber::kValueOffset);
4045  __ add(r5, r3, Operand(key, LSL, 2));
4046  __ vstr(d0, r5, 0);
4047  } else {
4048  // Hoisted load. vldr requires offset to be a multiple of 4 so we can
4049  // not include -kHeapObjectTag into it.
4050  __ sub(r5, value, Operand(kHeapObjectTag));
4051  __ vldr(d0, r5, HeapNumber::kValueOffset);
4052  __ EmitECMATruncate(r5, d0, s2, r6, r7, r9);
4053 
4054  switch (elements_kind) {
4057  __ strb(r5, MemOperand(r3, key, LSR, 1));
4058  break;
4061  __ strh(r5, MemOperand(r3, key, LSL, 0));
4062  break;
4063  case EXTERNAL_INT_ELEMENTS:
4065  __ str(r5, MemOperand(r3, key, LSL, 1));
4066  break;
4070  case FAST_ELEMENTS:
4071  case FAST_SMI_ELEMENTS:
4072  case FAST_DOUBLE_ELEMENTS:
4073  case FAST_HOLEY_ELEMENTS:
4076  case DICTIONARY_ELEMENTS:
4078  UNREACHABLE();
4079  break;
4080  }
4081  }
4082 
4083  // Entry registers are intact, r0 holds the value which is the return
4084  // value.
4085  __ Ret();
4086  } else {
4087  // VFP3 is not available do manual conversions.
4090 
4091  if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
4092  Label done, nan_or_infinity_or_zero;
4093  static const int kMantissaInHiWordShift =
4095 
4096  static const int kMantissaInLoWordShift =
4097  kBitsPerInt - kMantissaInHiWordShift;
4098 
4099  // Test for all special exponent values: zeros, subnormal numbers, NaNs
4100  // and infinities. All these should be converted to 0.
4101  __ mov(r7, Operand(HeapNumber::kExponentMask));
4102  __ and_(r9, r5, Operand(r7), SetCC);
4103  __ b(eq, &nan_or_infinity_or_zero);
4104 
4105  __ teq(r9, Operand(r7));
4106  __ mov(r9, Operand(kBinary32ExponentMask), LeaveCC, eq);
4107  __ b(eq, &nan_or_infinity_or_zero);
4108 
4109  // Rebias exponent.
4110  __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
4111  __ add(r9,
4112  r9,
4114 
4115  __ cmp(r9, Operand(kBinary32MaxExponent));
4116  __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, gt);
4117  __ orr(r5, r5, Operand(kBinary32ExponentMask), LeaveCC, gt);
4118  __ b(gt, &done);
4119 
4120  __ cmp(r9, Operand(kBinary32MinExponent));
4121  __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, lt);
4122  __ b(lt, &done);
4123 
4124  __ and_(r7, r5, Operand(HeapNumber::kSignMask));
4125  __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
4126  __ orr(r7, r7, Operand(r5, LSL, kMantissaInHiWordShift));
4127  __ orr(r7, r7, Operand(r6, LSR, kMantissaInLoWordShift));
4128  __ orr(r5, r7, Operand(r9, LSL, kBinary32ExponentShift));
4129 
4130  __ bind(&done);
4131  __ str(r5, MemOperand(r3, key, LSL, 1));
4132  // Entry registers are intact, r0 holds the value which is the return
4133  // value.
4134  __ Ret();
4135 
4136  __ bind(&nan_or_infinity_or_zero);
4137  __ and_(r7, r5, Operand(HeapNumber::kSignMask));
4138  __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
4139  __ orr(r9, r9, r7);
4140  __ orr(r9, r9, Operand(r5, LSL, kMantissaInHiWordShift));
4141  __ orr(r5, r9, Operand(r6, LSR, kMantissaInLoWordShift));
4142  __ b(&done);
4143  } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
4144  __ add(r7, r3, Operand(key, LSL, 2));
4145  // r7: effective address of destination element.
4146  __ str(r6, MemOperand(r7, 0));
4148  __ Ret();
4149  } else {
4150  bool is_signed_type = IsElementTypeSigned(elements_kind);
4151  int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
4152  int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
4153 
4154  Label done, sign;
4155 
4156  // Test for all special exponent values: zeros, subnormal numbers, NaNs
4157  // and infinities. All these should be converted to 0.
4158  __ mov(r7, Operand(HeapNumber::kExponentMask));
4159  __ and_(r9, r5, Operand(r7), SetCC);
4160  __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
4161  __ b(eq, &done);
4162 
4163  __ teq(r9, Operand(r7));
4164  __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
4165  __ b(eq, &done);
4166 
4167  // Unbias exponent.
4168  __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
4169  __ sub(r9, r9, Operand(HeapNumber::kExponentBias), SetCC);
4170  // If exponent is negative then result is 0.
4171  __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, mi);
4172  __ b(mi, &done);
4173 
4174  // If exponent is too big then result is minimal value.
4175  __ cmp(r9, Operand(meaningfull_bits - 1));
4176  __ mov(r5, Operand(min_value), LeaveCC, ge);
4177  __ b(ge, &done);
4178 
4179  __ and_(r7, r5, Operand(HeapNumber::kSignMask), SetCC);
4180  __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
4181  __ orr(r5, r5, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
4182 
4183  __ rsb(r9, r9, Operand(HeapNumber::kMantissaBitsInTopWord), SetCC);
4184  __ mov(r5, Operand(r5, LSR, r9), LeaveCC, pl);
4185  __ b(pl, &sign);
4186 
4187  __ rsb(r9, r9, Operand(0, RelocInfo::NONE));
4188  __ mov(r5, Operand(r5, LSL, r9));
4189  __ rsb(r9, r9, Operand(meaningfull_bits));
4190  __ orr(r5, r5, Operand(r6, LSR, r9));
4191 
4192  __ bind(&sign);
4193  __ teq(r7, Operand(0, RelocInfo::NONE));
4194  __ rsb(r5, r5, Operand(0, RelocInfo::NONE), LeaveCC, ne);
4195 
4196  __ bind(&done);
4197  switch (elements_kind) {
4200  __ strb(r5, MemOperand(r3, key, LSR, 1));
4201  break;
4204  __ strh(r5, MemOperand(r3, key, LSL, 0));
4205  break;
4206  case EXTERNAL_INT_ELEMENTS:
4208  __ str(r5, MemOperand(r3, key, LSL, 1));
4209  break;
4213  case FAST_ELEMENTS:
4214  case FAST_SMI_ELEMENTS:
4215  case FAST_DOUBLE_ELEMENTS:
4216  case FAST_HOLEY_ELEMENTS:
4219  case DICTIONARY_ELEMENTS:
4221  UNREACHABLE();
4222  break;
4223  }
4224  }
4225  }
4226  }
4227 
4228  // Slow case, key and receiver still in r0 and r1.
4229  __ bind(&slow);
4230  __ IncrementCounter(
4231  masm->isolate()->counters()->keyed_load_external_array_slow(),
4232  1, r2, r3);
4233 
4234  // ---------- S t a t e --------------
4235  // -- lr : return address
4236  // -- r0 : key
4237  // -- r1 : receiver
4238  // -----------------------------------
4239  Handle<Code> slow_ic =
4240  masm->isolate()->builtins()->KeyedStoreIC_Slow();
4241  __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4242 
4243  // Miss case, call the runtime.
4244  __ bind(&miss_force_generic);
4245 
4246  // ---------- S t a t e --------------
4247  // -- lr : return address
4248  // -- r0 : key
4249  // -- r1 : receiver
4250  // -----------------------------------
4251 
4252  Handle<Code> miss_ic =
4253  masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4254  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4255 }
4256 
4257 
4258 void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
4259  // ----------- S t a t e -------------
4260  // -- lr : return address
4261  // -- r0 : key
4262  // -- r1 : receiver
4263  // -----------------------------------
4264  Label miss_force_generic;
4265 
4266  // This stub is meant to be tail-jumped to, the receiver must already
4267  // have been verified by the caller to not be a smi.
4268 
4269  // Check that the key is a smi or a heap number convertible to a smi.
4270  GenerateSmiKeyCheck(masm, r0, r4, r5, d1, &miss_force_generic);
4271 
4272  // Get the elements array.
4274  __ AssertFastElements(r2);
4275 
4276  // Check that the key is within bounds.
4278  __ cmp(r0, Operand(r3));
4279  __ b(hs, &miss_force_generic);
4280 
4281  // Load the result and make sure it's not the hole.
4282  __ add(r3, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4284  __ ldr(r4,
4286  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
4287  __ cmp(r4, ip);
4288  __ b(eq, &miss_force_generic);
4289  __ mov(r0, r4);
4290  __ Ret();
4291 
4292  __ bind(&miss_force_generic);
4293  Handle<Code> stub =
4294  masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4295  __ Jump(stub, RelocInfo::CODE_TARGET);
4296 }
4297 
4298 
4300  MacroAssembler* masm) {
4301  // ----------- S t a t e -------------
4302  // -- lr : return address
4303  // -- r0 : key
4304  // -- r1 : receiver
4305  // -----------------------------------
4306  Label miss_force_generic, slow_allocate_heapnumber;
4307 
4308  Register key_reg = r0;
4309  Register receiver_reg = r1;
4310  Register elements_reg = r2;
4311  Register heap_number_reg = r2;
4312  Register indexed_double_offset = r3;
4313  Register scratch = r4;
4314  Register scratch2 = r5;
4315  Register scratch3 = r6;
4316  Register heap_number_map = r7;
4317 
4318  // This stub is meant to be tail-jumped to, the receiver must already
4319  // have been verified by the caller to not be a smi.
4320 
4321  // Check that the key is a smi or a heap number convertible to a smi.
4322  GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic);
4323 
4324  // Get the elements array.
4325  __ ldr(elements_reg,
4327 
4328  // Check that the key is within bounds.
4329  __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4330  __ cmp(key_reg, Operand(scratch));
4331  __ b(hs, &miss_force_generic);
4332 
4333  // Load the upper word of the double in the fixed array and test for NaN.
4334  __ add(indexed_double_offset, elements_reg,
4335  Operand(key_reg, LSL, kDoubleSizeLog2 - kSmiTagSize));
4336  uint32_t upper_32_offset = FixedArray::kHeaderSize + sizeof(kHoleNanLower32);
4337  __ ldr(scratch, FieldMemOperand(indexed_double_offset, upper_32_offset));
4338  __ cmp(scratch, Operand(kHoleNanUpper32));
4339  __ b(&miss_force_generic, eq);
4340 
4341  // Non-NaN. Allocate a new heap number and copy the double value into it.
4342  __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4343  __ AllocateHeapNumber(heap_number_reg, scratch2, scratch3,
4344  heap_number_map, &slow_allocate_heapnumber);
4345 
4346  // Don't need to reload the upper 32 bits of the double, it's already in
4347  // scratch.
4348  __ str(scratch, FieldMemOperand(heap_number_reg,
4350  __ ldr(scratch, FieldMemOperand(indexed_double_offset,
4351  FixedArray::kHeaderSize));
4352  __ str(scratch, FieldMemOperand(heap_number_reg,
4354 
4355  __ mov(r0, heap_number_reg);
4356  __ Ret();
4357 
4358  __ bind(&slow_allocate_heapnumber);
4359  Handle<Code> slow_ic =
4360  masm->isolate()->builtins()->KeyedLoadIC_Slow();
4361  __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4362 
4363  __ bind(&miss_force_generic);
4364  Handle<Code> miss_ic =
4365  masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4366  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4367 }
4368 
4369 
4371  MacroAssembler* masm,
4372  bool is_js_array,
4373  ElementsKind elements_kind,
4374  KeyedAccessGrowMode grow_mode) {
4375  // ----------- S t a t e -------------
4376  // -- r0 : value
4377  // -- r1 : key
4378  // -- r2 : receiver
4379  // -- lr : return address
4380  // -- r3 : scratch
4381  // -- r4 : scratch (elements)
4382  // -----------------------------------
4383  Label miss_force_generic, transition_elements_kind, grow, slow;
4384  Label finish_store, check_capacity;
4385 
4386  Register value_reg = r0;
4387  Register key_reg = r1;
4388  Register receiver_reg = r2;
4389  Register scratch = r4;
4390  Register elements_reg = r3;
4391  Register length_reg = r5;
4392  Register scratch2 = r6;
4393 
4394  // This stub is meant to be tail-jumped to, the receiver must already
4395  // have been verified by the caller to not be a smi.
4396 
4397  // Check that the key is a smi or a heap number convertible to a smi.
4398  GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic);
4399 
4400  if (IsFastSmiElementsKind(elements_kind)) {
4401  __ JumpIfNotSmi(value_reg, &transition_elements_kind);
4402  }
4403 
4404  // Check that the key is within bounds.
4405  __ ldr(elements_reg,
4407  if (is_js_array) {
4408  __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4409  } else {
4410  __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4411  }
4412  // Compare smis.
4413  __ cmp(key_reg, scratch);
4414  if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
4415  __ b(hs, &grow);
4416  } else {
4417  __ b(hs, &miss_force_generic);
4418  }
4419 
4420  // Make sure elements is a fast element array, not 'cow'.
4421  __ CheckMap(elements_reg,
4422  scratch,
4423  Heap::kFixedArrayMapRootIndex,
4424  &miss_force_generic,
4426 
4427  __ bind(&finish_store);
4428  if (IsFastSmiElementsKind(elements_kind)) {
4429  __ add(scratch,
4430  elements_reg,
4431  Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4433  __ add(scratch,
4434  scratch,
4435  Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
4436  __ str(value_reg, MemOperand(scratch));
4437  } else {
4438  ASSERT(IsFastObjectElementsKind(elements_kind));
4439  __ add(scratch,
4440  elements_reg,
4441  Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4443  __ add(scratch,
4444  scratch,
4445  Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
4446  __ str(value_reg, MemOperand(scratch));
4447  __ mov(receiver_reg, value_reg);
4448  __ RecordWrite(elements_reg, // Object.
4449  scratch, // Address.
4450  receiver_reg, // Value.
4452  kDontSaveFPRegs);
4453  }
4454  // value_reg (r0) is preserved.
4455  // Done.
4456  __ Ret();
4457 
4458  __ bind(&miss_force_generic);
4459  Handle<Code> ic =
4460  masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4461  __ Jump(ic, RelocInfo::CODE_TARGET);
4462 
4463  __ bind(&transition_elements_kind);
4464  Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
4465  __ Jump(ic_miss, RelocInfo::CODE_TARGET);
4466 
4467  if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
4468  // Grow the array by a single element if possible.
4469  __ bind(&grow);
4470 
4471  // Make sure the array is only growing by a single element, anything else
4472  // must be handled by the runtime. Flags already set by previous compare.
4473  __ b(ne, &miss_force_generic);
4474 
4475  // Check for the empty array, and preallocate a small backing store if
4476  // possible.
4477  __ ldr(length_reg,
4478  FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4479  __ ldr(elements_reg,
4481  __ CompareRoot(elements_reg, Heap::kEmptyFixedArrayRootIndex);
4482  __ b(ne, &check_capacity);
4483 
4485  __ AllocateInNewSpace(size, elements_reg, scratch, scratch2, &slow,
4486  TAG_OBJECT);
4487 
4488  __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex);
4489  __ str(scratch, FieldMemOperand(elements_reg, JSObject::kMapOffset));
4490  __ mov(scratch, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
4491  __ str(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4492  __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
4493  for (int i = 1; i < JSArray::kPreallocatedArrayElements; ++i) {
4494  __ str(scratch, FieldMemOperand(elements_reg, FixedArray::SizeFor(i)));
4495  }
4496 
4497  // Store the element at index zero.
4498  __ str(value_reg, FieldMemOperand(elements_reg, FixedArray::SizeFor(0)));
4499 
4500  // Install the new backing store in the JSArray.
4501  __ str(elements_reg,
4503  __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
4506 
4507  // Increment the length of the array.
4508  __ mov(length_reg, Operand(Smi::FromInt(1)));
4509  __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4510  __ Ret();
4511 
4512  __ bind(&check_capacity);
4513  // Check for cow elements, in general they are not handled by this stub
4514  __ CheckMap(elements_reg,
4515  scratch,
4516  Heap::kFixedCOWArrayMapRootIndex,
4517  &miss_force_generic,
4519 
4520  __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4521  __ cmp(length_reg, scratch);
4522  __ b(hs, &slow);
4523 
4524  // Grow the array and finish the store.
4525  __ add(length_reg, length_reg, Operand(Smi::FromInt(1)));
4526  __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4527  __ jmp(&finish_store);
4528 
4529  __ bind(&slow);
4530  Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
4531  __ Jump(ic_slow, RelocInfo::CODE_TARGET);
4532  }
4533 }
4534 
4535 
4537  MacroAssembler* masm,
4538  bool is_js_array,
4539  KeyedAccessGrowMode grow_mode) {
4540  // ----------- S t a t e -------------
4541  // -- r0 : value
4542  // -- r1 : key
4543  // -- r2 : receiver
4544  // -- lr : return address
4545  // -- r3 : scratch
4546  // -- r4 : scratch
4547  // -- r5 : scratch
4548  // -----------------------------------
4549  Label miss_force_generic, transition_elements_kind, grow, slow;
4550  Label finish_store, check_capacity;
4551 
4552  Register value_reg = r0;
4553  Register key_reg = r1;
4554  Register receiver_reg = r2;
4555  Register elements_reg = r3;
4556  Register scratch1 = r4;
4557  Register scratch2 = r5;
4558  Register scratch3 = r6;
4559  Register scratch4 = r7;
4560  Register length_reg = r7;
4561 
4562  // This stub is meant to be tail-jumped to, the receiver must already
4563  // have been verified by the caller to not be a smi.
4564 
4565  // Check that the key is a smi or a heap number convertible to a smi.
4566  GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic);
4567 
4568  __ ldr(elements_reg,
4570 
4571  // Check that the key is within bounds.
4572  if (is_js_array) {
4573  __ ldr(scratch1, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4574  } else {
4575  __ ldr(scratch1,
4577  }
4578  // Compare smis, unsigned compare catches both negative and out-of-bound
4579  // indexes.
4580  __ cmp(key_reg, scratch1);
4581  if (grow_mode == ALLOW_JSARRAY_GROWTH) {
4582  __ b(hs, &grow);
4583  } else {
4584  __ b(hs, &miss_force_generic);
4585  }
4586 
4587  __ bind(&finish_store);
4588  __ StoreNumberToDoubleElements(value_reg,
4589  key_reg,
4590  receiver_reg,
4591  elements_reg,
4592  scratch1,
4593  scratch2,
4594  scratch3,
4595  scratch4,
4596  &transition_elements_kind);
4597  __ Ret();
4598 
4599  // Handle store cache miss, replacing the ic with the generic stub.
4600  __ bind(&miss_force_generic);
4601  Handle<Code> ic =
4602  masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4603  __ Jump(ic, RelocInfo::CODE_TARGET);
4604 
4605  __ bind(&transition_elements_kind);
4606  Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
4607  __ Jump(ic_miss, RelocInfo::CODE_TARGET);
4608 
4609  if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
4610  // Grow the array by a single element if possible.
4611  __ bind(&grow);
4612 
4613  // Make sure the array is only growing by a single element, anything else
4614  // must be handled by the runtime. Flags already set by previous compare.
4615  __ b(ne, &miss_force_generic);
4616 
4617  // Transition on values that can't be stored in a FixedDoubleArray.
4618  Label value_is_smi;
4619  __ JumpIfSmi(value_reg, &value_is_smi);
4620  __ ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
4621  __ CompareRoot(scratch1, Heap::kHeapNumberMapRootIndex);
4622  __ b(ne, &transition_elements_kind);
4623  __ bind(&value_is_smi);
4624 
4625  // Check for the empty array, and preallocate a small backing store if
4626  // possible.
4627  __ ldr(length_reg,
4628  FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4629  __ ldr(elements_reg,
4631  __ CompareRoot(elements_reg, Heap::kEmptyFixedArrayRootIndex);
4632  __ b(ne, &check_capacity);
4633 
4634  int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
4635  __ AllocateInNewSpace(size, elements_reg, scratch1, scratch2, &slow,
4636  TAG_OBJECT);
4637 
4638  // Initialize the new FixedDoubleArray. Leave elements unitialized for
4639  // efficiency, they are guaranteed to be initialized before use.
4640  __ LoadRoot(scratch1, Heap::kFixedDoubleArrayMapRootIndex);
4641  __ str(scratch1, FieldMemOperand(elements_reg, JSObject::kMapOffset));
4642  __ mov(scratch1,
4643  Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
4644  __ str(scratch1,
4646 
4647  // Install the new backing store in the JSArray.
4648  __ str(elements_reg,
4650  __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
4653 
4654  // Increment the length of the array.
4655  __ mov(length_reg, Operand(Smi::FromInt(1)));
4656  __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4657  __ ldr(elements_reg,
4659  __ jmp(&finish_store);
4660 
4661  __ bind(&check_capacity);
4662  // Make sure that the backing store can hold additional elements.
4663  __ ldr(scratch1,
4665  __ cmp(length_reg, scratch1);
4666  __ b(hs, &slow);
4667 
4668  // Grow the array and finish the store.
4669  __ add(length_reg, length_reg, Operand(Smi::FromInt(1)));
4670  __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4671  __ jmp(&finish_store);
4672 
4673  __ bind(&slow);
4674  Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
4675  __ Jump(ic_slow, RelocInfo::CODE_TARGET);
4676  }
4677 }
4678 
4679 
4680 #undef __
4681 
4682 } } // namespace v8::internal
4683 
4684 #endif // V8_TARGET_ARCH_ARM
byte * Address
Definition: globals.h:172
const Register cp
const SwVfpRegister s2
static const int kBitFieldOffset
Definition: objects.h:4994
Handle< Code > CompileLoadFunctionPrototype(Handle< String > name)
Handle< Code > CompileLoadCallback(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< AccessorInfo > callback)
const intptr_t kSmiTagMask
Definition: v8.h:3855
static const int kCodeEntryOffset
Definition: objects.h:5981
Handle< Code > CompileStoreField(Handle< JSObject > object, int index, Handle< Map > transition, Handle< String > name)
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:5982
static int SlotOffset(int index)
Definition: contexts.h:408
static const int kDataOffset
Definition: objects.h:8190
const Register r3
const int kBinary32ExponentShift
Definition: globals.h:264
Handle< Code > CompileLoadNonexistent(Handle< String > name, Handle< JSObject > object, Handle< JSObject > last)
const int kDoubleSizeLog2
Definition: globals.h:236
Handle< Code > CompileStoreElement(Handle< Map > receiver_map)
void GenerateProbe(MacroAssembler *masm, Code::Flags flags, Register receiver, Register name, Register scratch, Register extra, Register extra2=no_reg, Register extra3=no_reg)
static const uint32_t kExponentMask
Definition: objects.h:1317
static const int kFlagsOffset
Definition: objects.h:4504
const uint32_t kBinary32MantissaMask
Definition: globals.h:259
const int kBinary32MaxExponent
Definition: globals.h:261
static Smi * FromInt(int value)
Definition: objects-inl.h:973
bool IsFastObjectElementsKind(ElementsKind kind)
#define LOG(isolate, Call)
Definition: log.h:81
static void GenerateStoreExternalArray(MacroAssembler *masm, ElementsKind elements_kind)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
Handle< Code > CompileStoreViaSetter(Handle< JSObject > receiver, Handle< JSFunction > setter, Handle< String > name)
const DwVfpRegister d0
static const int kGlobalReceiverOffset
Definition: objects.h:6085
static void GenerateLoadFastDoubleElement(MacroAssembler *masm)
static PropertyType ExtractTypeFromFlags(Flags flags)
Definition: objects-inl.h:3359
const Register r6
const int kBinary32MantissaBits
Definition: globals.h:263
Flag flags[]
Definition: flags.cc:1467
static const int kExponentBias
Definition: objects.h:1321
int int32_t
Definition: unicode.cc:47
static bool IsSupported(CpuFeature f)
static const int kExternalPointerOffset
Definition: objects.h:3720
static const int kHasNamedInterceptor
Definition: objects.h:5003
static const int kIsAccessCheckNeeded
Definition: objects.h:5007
List< Handle< Map > > MapHandleList
Definition: list.h:193
#define ASSERT(condition)
Definition: checks.h:270
Handle< Code > CompileStoreCallback(Handle< JSObject > object, Handle< AccessorInfo > callback, Handle< String > name)
const int kPointerSizeLog2
Definition: globals.h:246
static const int kInstanceSizeOffset
Definition: objects.h:4981
static const int kDebugInfoOffset
Definition: objects.h:5614
const Register r2
static const int kGlobalContextOffset
Definition: objects.h:6084
static const int kContextOffset
Definition: objects.h:5986
Handle< Code > CompileLoadField(Handle< JSObject > object, Handle< JSObject > holder, int index, Handle< String > name)
Handle< Code > CompileStoreInterceptor(Handle< JSObject > object, Handle< String > name)
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< String > name, Register scratch0)
Handle< Code > CompileStoreField(Handle< JSObject > object, int index, Handle< Map > transition, Handle< String > name)
static const int kHashFieldOffset
Definition: objects.h:7099
const uint32_t kVFPFlushToZeroMask
const Register sp
Handle< Code > CompileLoadInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
#define UNREACHABLE()
Definition: checks.h:50
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
Definition: objects.h:7098
Handle< Code > CompileCallGlobal(Handle< JSObject > object, Handle< GlobalObject > holder, Handle< JSGlobalPropertyCell > cell, Handle< JSFunction > function, Handle< String > name)
static const int kExponentShift
Definition: objects.h:1322
Handle< Code > CompileLoadField(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, int index)
static const int kValueOffset
Definition: objects.h:1307
const uint32_t kHoleNanUpper32
Definition: v8globals.h:476
Handle< Code > CompileStoreGlobal(Handle< GlobalObject > object, Handle< JSGlobalPropertyCell > holder, Handle< String > name)
Handle< Code > CompileLoadViaGetter(Handle< String > name, Handle< JSObject > receiver, Handle< JSObject > holder, Handle< JSFunction > getter)
Handle< Code > CompileLoadConstant(Handle< JSObject > object, Handle< JSObject > holder, Handle< JSFunction > value, Handle< String > name)
Handle< Code > CompileLoadConstant(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< JSFunction > value)
const Register ip
Handle< Code > CompileCallField(Handle< JSObject > object, Handle< JSObject > holder, int index, Handle< String > name)
const Register r9
const int kPointerSize
Definition: globals.h:234
static void GenerateStoreFastElement(MacroAssembler *masm, bool is_js_array, ElementsKind element_kind, KeyedAccessGrowMode grow_mode)
Handle< Code > CompileLoadStringLength(Handle< String > name)
const int kHeapObjectTag
Definition: v8.h:3848
const uint32_t kHoleNanLower32
Definition: v8globals.h:477
#define __
static bool decode(uint32_t value)
Definition: utils.h:272
const Register pc
static const int kPropertiesOffset
Definition: objects.h:2113
const int kBinary32MinExponent
Definition: globals.h:262
Handle< Code > CompileLoadGlobal(Handle< JSObject > object, Handle< GlobalObject > holder, Handle< JSGlobalPropertyCell > cell, Handle< String > name, bool is_dont_delete)
bool IsFastSmiElementsKind(ElementsKind kind)
const uint32_t kVFPExceptionMask
const SwVfpRegister s0
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
const int kBinary32ExponentBias
Definition: globals.h:260
static const int kDataOffset
Definition: objects.h:8326
static int SizeFor(int length)
Definition: objects.h:2369
const Register r0
static const int kElementsOffset
Definition: objects.h:2114
const uint32_t kStringTag
Definition: objects.h:437
#define BASE_EMBEDDED
Definition: allocation.h:68
const int kBitsPerInt
Definition: globals.h:254
static void GenerateLoadDictionaryElement(MacroAssembler *masm)
static void GenerateLoadExternalArray(MacroAssembler *masm, ElementsKind elements_kind)
static const int kLengthOffset
Definition: objects.h:8111
static int SizeFor(int length)
Definition: objects.h:2288
static const int kHeaderSize
Definition: objects.h:2233
const Register lr
static const int kMapOffset
Definition: objects.h:1219
static const int kMantissaBitsInTopWord
Definition: objects.h:1323
const uint32_t kIsNotStringMask
Definition: objects.h:436
List< Handle< Code > > CodeHandleList
Definition: list.h:194
const Register r1
static const int kLengthOffset
Definition: objects.h:2232
static void ConvertIntToDouble(MacroAssembler *masm, Register int_scratch, Destination destination, DwVfpRegister double_dst, Register dst1, Register dst2, Register scratch2, SwVfpRegister single_scratch)
Handle< Code > CompileCallInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
MemOperand FieldMemOperand(Register object, int offset)
static const int kDataOffset
Definition: objects.h:8352
static void GenerateLoadFastElement(MacroAssembler *masm)
static const uint32_t kSignMask
Definition: objects.h:1316
friend class Isolate
Definition: stub-cache.h:391
const int kSmiTagSize
Definition: v8.h:3854
static void GenerateStoreFastDoubleElement(MacroAssembler *masm, bool is_js_array, KeyedAccessGrowMode grow_mode)
static const int kHeaderSize
Definition: objects.h:4513
static Handle< T > null()
Definition: handles.h:86
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
Handle< Code > CompileLoadArrayLength(Handle< String > name)
static const uint32_t kMantissaMask
Definition: objects.h:1318
const int kSmiTag
Definition: v8.h:3853
Handle< Code > CompileCallConstant(Handle< Object > object, Handle< JSObject > holder, Handle< JSFunction > function, Handle< String > name, CheckType check)
static AccessorInfo * cast(Object *obj)
const uint32_t kBinary32ExponentMask
Definition: globals.h:258
const uint32_t kBinary32SignMask
Definition: globals.h:257
const int kHeapObjectTagSize
Definition: v8.h:3849
static const int kSizeInBytes
Definition: assembler-arm.h:75
static Handle< JSGlobalPropertyCell > EnsurePropertyCell(Handle< GlobalObject > global, Handle< String > name)
Definition: objects.cc:11797
static bool HasCustomCallGenerator(Handle< JSFunction > function)
Definition: stub-cache.cc:1428
static const int kPreallocatedArrayElements
Definition: objects.h:8108
static const int kPrototypeOffset
Definition: objects.h:4953
static const int kFlagsNotUsedInLookup
Definition: objects.h:4557
const char * name_
Definition: flags.cc:1352
const int kInvalidProtoDepth
const uint32_t kVFPRoundingModeMask
const Register no_reg
static const int kValueOffset
Definition: objects.h:6188
const DwVfpRegister d1
const Register fp
Handle< Code > CompileLoadCallback(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< AccessorInfo > callback)
Handle< Code > CompileLoadPolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_ics)
Handle< Code > CompileLoadInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
Handle< Code > CompileStorePolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_stubs, MapHandleList *transitioned_maps)
static const int kSharedFunctionInfoOffset
Definition: objects.h:5984
KeyedAccessGrowMode
Definition: objects.h:141
static const int kMantissaBits
Definition: objects.h:1319
void check(i::Vector< const char > string)
static const int kExponentOffset
Definition: objects.h:1313
Handle< Code > CompileLoadElement(Handle< Map > receiver_map)
Handle< Code > CompileConstructStub(Handle< JSFunction > function)
static JSObject * cast(Object *obj)
const Register r5
static const int kInstanceTypeOffset
Definition: objects.h:4992
static const int kMantissaOffset
Definition: objects.h:1312
const Register r4
const Register r7
static JSFunction * cast(Object *obj)