v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
stub-cache-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_X64)
31 
32 #include "ic-inl.h"
33 #include "codegen.h"
34 #include "stub-cache.h"
35 
36 namespace v8 {
37 namespace internal {
38 
39 #define __ ACCESS_MASM(masm)
40 
41 
42 static void ProbeTable(Isolate* isolate,
43  MacroAssembler* masm,
45  StubCache::Table table,
46  Register receiver,
47  Register name,
48  // The offset is scaled by 4, based on
49  // kHeapObjectTagSize, which is two bits
50  Register offset) {
51  // We need to scale up the pointer by 2 because the offset is scaled by less
52  // than the pointer size.
54  ScaleFactor scale_factor = times_2;
55 
56  ASSERT_EQ(24, sizeof(StubCache::Entry));
57  // The offset register holds the entry offset times four (due to masking
58  // and shifting optimizations).
59  ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
60  ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
61  Label miss;
62 
63  // Multiply by 3 because there are 3 fields per entry (name, code, map).
64  __ lea(offset, Operand(offset, offset, times_2, 0));
65 
66  __ LoadAddress(kScratchRegister, key_offset);
67 
68  // Check that the key in the entry matches the name.
69  // Multiply entry offset by 16 to get the entry address. Since the
70  // offset register already holds the entry offset times four, multiply
71  // by a further four.
72  __ cmpl(name, Operand(kScratchRegister, offset, scale_factor, 0));
73  __ j(not_equal, &miss);
74 
75  // Get the map entry from the cache.
76  // Use key_offset + kPointerSize * 2, rather than loading map_offset.
77  __ movq(kScratchRegister,
78  Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2));
80  __ j(not_equal, &miss);
81 
82  // Get the code entry from the cache.
83  __ LoadAddress(kScratchRegister, value_offset);
84  __ movq(kScratchRegister,
85  Operand(kScratchRegister, offset, scale_factor, 0));
86 
87  // Check that the flags match what we're looking for.
89  __ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup));
90  __ cmpl(offset, Immediate(flags));
91  __ j(not_equal, &miss);
92 
93 #ifdef DEBUG
94  if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
95  __ jmp(&miss);
96  } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
97  __ jmp(&miss);
98  }
99 #endif
100 
101  // Jump to the first instruction in the code stub.
103  __ jmp(kScratchRegister);
104 
105  __ bind(&miss);
106 }
107 
108 
109 // Helper function used to check that the dictionary doesn't contain
110 // the property. This function may return false negatives, so miss_label
111 // must always call a backup property check that is complete.
112 // This function is safe to call if the receiver has fast properties.
113 // Name must be a symbol and receiver must be a heap object.
114 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
115  Label* miss_label,
116  Register receiver,
117  Handle<String> name,
118  Register r0,
119  Register r1) {
120  ASSERT(name->IsSymbol());
121  Counters* counters = masm->isolate()->counters();
122  __ IncrementCounter(counters->negative_lookups(), 1);
123  __ IncrementCounter(counters->negative_lookups_miss(), 1);
124 
125  __ movq(r0, FieldOperand(receiver, HeapObject::kMapOffset));
126 
127  const int kInterceptorOrAccessCheckNeededMask =
129 
130  // Bail out if the receiver has a named interceptor or requires access checks.
132  Immediate(kInterceptorOrAccessCheckNeededMask));
133  __ j(not_zero, miss_label);
134 
135  // Check that receiver is a JSObject.
136  __ CmpInstanceType(r0, FIRST_SPEC_OBJECT_TYPE);
137  __ j(below, miss_label);
138 
139  // Load properties array.
140  Register properties = r0;
141  __ movq(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
142 
143  // Check that the properties array is a dictionary.
144  __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset),
145  Heap::kHashTableMapRootIndex);
146  __ j(not_equal, miss_label);
147 
148  Label done;
150  miss_label,
151  &done,
152  properties,
153  name,
154  r1);
155  __ bind(&done);
156  __ DecrementCounter(counters->negative_lookups_miss(), 1);
157 }
158 
159 
160 void StubCache::GenerateProbe(MacroAssembler* masm,
161  Code::Flags flags,
162  Register receiver,
163  Register name,
164  Register scratch,
165  Register extra,
166  Register extra2,
167  Register extra3) {
168  Isolate* isolate = masm->isolate();
169  Label miss;
170  USE(extra); // The register extra is not used on the X64 platform.
171  USE(extra2); // The register extra2 is not used on the X64 platform.
172  USE(extra3); // The register extra2 is not used on the X64 platform.
173  // Make sure that code is valid. The multiplying code relies on the
174  // entry size being 24.
175  ASSERT(sizeof(Entry) == 24);
176 
177  // Make sure the flags do not name a specific type.
178  ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
179 
180  // Make sure that there are no register conflicts.
181  ASSERT(!scratch.is(receiver));
182  ASSERT(!scratch.is(name));
183 
184  // Check scratch register is valid, extra and extra2 are unused.
185  ASSERT(!scratch.is(no_reg));
186  ASSERT(extra2.is(no_reg));
187  ASSERT(extra3.is(no_reg));
188 
189  Counters* counters = masm->isolate()->counters();
190  __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
191 
192  // Check that the receiver isn't a smi.
193  __ JumpIfSmi(receiver, &miss);
194 
195  // Get the map of the receiver and compute the hash.
196  __ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
197  // Use only the low 32 bits of the map pointer.
198  __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
199  __ xor_(scratch, Immediate(flags));
200  // We mask out the last two bits because they are not part of the hash and
201  // they are always 01 for maps. Also in the two 'and' instructions below.
202  __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
203 
204  // Probe the primary table.
205  ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch);
206 
207  // Primary miss: Compute hash for secondary probe.
208  __ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
209  __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
210  __ xor_(scratch, Immediate(flags));
211  __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
212  __ subl(scratch, name);
213  __ addl(scratch, Immediate(flags));
214  __ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
215 
216  // Probe the secondary table.
217  ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch);
218 
219  // Cache miss: Fall-through and let caller handle the miss by
220  // entering the runtime system.
221  __ bind(&miss);
222  __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
223 }
224 
225 
226 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
227  int index,
228  Register prototype) {
229  // Load the global or builtins object from the current context.
230  __ movq(prototype,
232  // Load the global context from the global or builtins object.
233  __ movq(prototype,
235  // Load the function from the global context.
236  __ movq(prototype, Operand(prototype, Context::SlotOffset(index)));
237  // Load the initial map. The global functions all have initial maps.
238  __ movq(prototype,
240  // Load the prototype from the initial map.
241  __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
242 }
243 
244 
245 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
246  MacroAssembler* masm,
247  int index,
248  Register prototype,
249  Label* miss) {
250  Isolate* isolate = masm->isolate();
251  // Check we're still in the same context.
252  __ Move(prototype, isolate->global());
254  prototype);
255  __ j(not_equal, miss);
256  // Get the global function with the given index.
257  Handle<JSFunction> function(
258  JSFunction::cast(isolate->global_context()->get(index)));
259  // Load its initial map. The global functions all have initial maps.
260  __ Move(prototype, Handle<Map>(function->initial_map()));
261  // Load the prototype from the initial map.
262  __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
263 }
264 
265 
266 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
267  Register receiver,
268  Register scratch,
269  Label* miss_label) {
270  // Check that the receiver isn't a smi.
271  __ JumpIfSmi(receiver, miss_label);
272 
273  // Check that the object is a JS array.
274  __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
275  __ j(not_equal, miss_label);
276 
277  // Load length directly from the JS array.
278  __ movq(rax, FieldOperand(receiver, JSArray::kLengthOffset));
279  __ ret(0);
280 }
281 
282 
283 // Generate code to check if an object is a string. If the object is
284 // a string, the map's instance type is left in the scratch register.
285 static void GenerateStringCheck(MacroAssembler* masm,
286  Register receiver,
287  Register scratch,
288  Label* smi,
289  Label* non_string_object) {
290  // Check that the object isn't a smi.
291  __ JumpIfSmi(receiver, smi);
292 
293  // Check that the object is a string.
294  __ movq(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
295  __ movzxbq(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
297  __ testl(scratch, Immediate(kNotStringTag));
298  __ j(not_zero, non_string_object);
299 }
300 
301 
302 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
303  Register receiver,
304  Register scratch1,
305  Register scratch2,
306  Label* miss,
307  bool support_wrappers) {
308  Label check_wrapper;
309 
310  // Check if the object is a string leaving the instance type in the
311  // scratch register.
312  GenerateStringCheck(masm, receiver, scratch1, miss,
313  support_wrappers ? &check_wrapper : miss);
314 
315  // Load length directly from the string.
316  __ movq(rax, FieldOperand(receiver, String::kLengthOffset));
317  __ ret(0);
318 
319  if (support_wrappers) {
320  // Check if the object is a JSValue wrapper.
321  __ bind(&check_wrapper);
322  __ cmpl(scratch1, Immediate(JS_VALUE_TYPE));
323  __ j(not_equal, miss);
324 
325  // Check if the wrapped value is a string and load the length
326  // directly if it is.
327  __ movq(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
328  GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
329  __ movq(rax, FieldOperand(scratch2, String::kLengthOffset));
330  __ ret(0);
331  }
332 }
333 
334 
335 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
336  Register receiver,
337  Register result,
338  Register scratch,
339  Label* miss_label) {
340  __ TryGetFunctionPrototype(receiver, result, miss_label);
341  if (!result.is(rax)) __ movq(rax, result);
342  __ ret(0);
343 }
344 
345 
346 // Load a fast property out of a holder object (src). In-object properties
347 // are loaded directly otherwise the property is loaded from the properties
348 // fixed array.
349 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
350  Register dst,
351  Register src,
352  Handle<JSObject> holder,
353  int index) {
354  // Adjust for the number of properties stored in the holder.
355  index -= holder->map()->inobject_properties();
356  if (index < 0) {
357  // Get the property straight out of the holder.
358  int offset = holder->map()->instance_size() + (index * kPointerSize);
359  __ movq(dst, FieldOperand(src, offset));
360  } else {
361  // Calculate the offset into the properties array.
362  int offset = index * kPointerSize + FixedArray::kHeaderSize;
364  __ movq(dst, FieldOperand(dst, offset));
365  }
366 }
367 
368 
369 static void PushInterceptorArguments(MacroAssembler* masm,
370  Register receiver,
371  Register holder,
372  Register name,
373  Handle<JSObject> holder_obj) {
374  __ push(name);
375  Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
376  ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
377  __ Move(kScratchRegister, interceptor);
378  __ push(kScratchRegister);
379  __ push(receiver);
380  __ push(holder);
382  __ PushAddress(ExternalReference::isolate_address());
383 }
384 
385 
386 static void CompileCallLoadPropertyWithInterceptor(
387  MacroAssembler* masm,
388  Register receiver,
389  Register holder,
390  Register name,
391  Handle<JSObject> holder_obj) {
392  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
393 
394  ExternalReference ref =
395  ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
396  masm->isolate());
397  __ Set(rax, 6);
398  __ LoadAddress(rbx, ref);
399 
400  CEntryStub stub(1);
401  __ CallStub(&stub);
402 }
403 
404 
405 // Number of pointers to be reserved on stack for fast API call.
406 static const int kFastApiCallArguments = 4;
407 
408 
409 // Reserves space for the extra arguments to API function in the
410 // caller's frame.
411 //
412 // These arguments are set by CheckPrototypes and GenerateFastApiCall.
413 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
414  // ----------- S t a t e -------------
415  // -- rsp[0] : return address
416  // -- rsp[8] : last argument in the internal frame of the caller
417  // -----------------------------------
418  __ movq(scratch, Operand(rsp, 0));
419  __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
420  __ movq(Operand(rsp, 0), scratch);
421  __ Move(scratch, Smi::FromInt(0));
422  for (int i = 1; i <= kFastApiCallArguments; i++) {
423  __ movq(Operand(rsp, i * kPointerSize), scratch);
424  }
425 }
426 
427 
428 // Undoes the effects of ReserveSpaceForFastApiCall.
429 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
430  // ----------- S t a t e -------------
431  // -- rsp[0] : return address.
432  // -- rsp[8] : last fast api call extra argument.
433  // -- ...
434  // -- rsp[kFastApiCallArguments * 8] : first fast api call extra argument.
435  // -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal
436  // frame.
437  // -----------------------------------
438  __ movq(scratch, Operand(rsp, 0));
439  __ movq(Operand(rsp, kFastApiCallArguments * kPointerSize), scratch);
440  __ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments));
441 }
442 
443 
444 // Generates call to API function.
445 static void GenerateFastApiCall(MacroAssembler* masm,
446  const CallOptimization& optimization,
447  int argc) {
448  // ----------- S t a t e -------------
449  // -- rsp[0] : return address
450  // -- rsp[8] : object passing the type check
451  // (last fast api call extra argument,
452  // set by CheckPrototypes)
453  // -- rsp[16] : api function
454  // (first fast api call extra argument)
455  // -- rsp[24] : api call data
456  // -- rsp[32] : isolate
457  // -- rsp[40] : last argument
458  // -- ...
459  // -- rsp[(argc + 4) * 8] : first argument
460  // -- rsp[(argc + 5) * 8] : receiver
461  // -----------------------------------
462  // Get the function and setup the context.
463  Handle<JSFunction> function = optimization.constant_function();
464  __ LoadHeapObject(rdi, function);
466 
467  // Pass the additional arguments.
468  __ movq(Operand(rsp, 2 * kPointerSize), rdi);
469  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
470  Handle<Object> call_data(api_call_info->data());
471  if (masm->isolate()->heap()->InNewSpace(*call_data)) {
472  __ Move(rcx, api_call_info);
474  __ movq(Operand(rsp, 3 * kPointerSize), rbx);
475  } else {
476  __ Move(Operand(rsp, 3 * kPointerSize), call_data);
477  }
478  __ movq(kScratchRegister, ExternalReference::isolate_address());
479  __ movq(Operand(rsp, 4 * kPointerSize), kScratchRegister);
480 
481  // Prepare arguments.
482  __ lea(rbx, Operand(rsp, 4 * kPointerSize));
483 
484 #if defined(__MINGW64__)
485  Register arguments_arg = rcx;
486 #elif defined(_WIN64)
487  // Win64 uses first register--rcx--for returned value.
488  Register arguments_arg = rdx;
489 #else
490  Register arguments_arg = rdi;
491 #endif
492 
493  // Allocate the v8::Arguments structure in the arguments' space since
494  // it's not controlled by GC.
495  const int kApiStackSpace = 4;
496 
497  __ PrepareCallApiFunction(kApiStackSpace);
498 
499  __ movq(StackSpaceOperand(0), rbx); // v8::Arguments::implicit_args_.
500  __ addq(rbx, Immediate(argc * kPointerSize));
501  __ movq(StackSpaceOperand(1), rbx); // v8::Arguments::values_.
502  __ Set(StackSpaceOperand(2), argc); // v8::Arguments::length_.
503  // v8::Arguments::is_construct_call_.
504  __ Set(StackSpaceOperand(3), 0);
505 
506  // v8::InvocationCallback's argument.
507  __ lea(arguments_arg, StackSpaceOperand(0));
508 
509  // Function address is a foreign pointer outside V8's heap.
510  Address function_address = v8::ToCData<Address>(api_call_info->callback());
511  __ CallApiFunctionAndReturn(function_address,
512  argc + kFastApiCallArguments + 1);
513 }
514 
515 
516 class CallInterceptorCompiler BASE_EMBEDDED {
517  public:
518  CallInterceptorCompiler(StubCompiler* stub_compiler,
519  const ParameterCount& arguments,
520  Register name,
521  Code::ExtraICState extra_ic_state)
522  : stub_compiler_(stub_compiler),
523  arguments_(arguments),
524  name_(name),
525  extra_ic_state_(extra_ic_state) {}
526 
527  void Compile(MacroAssembler* masm,
528  Handle<JSObject> object,
529  Handle<JSObject> holder,
530  Handle<String> name,
531  LookupResult* lookup,
532  Register receiver,
533  Register scratch1,
534  Register scratch2,
535  Register scratch3,
536  Label* miss) {
537  ASSERT(holder->HasNamedInterceptor());
538  ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
539 
540  // Check that the receiver isn't a smi.
541  __ JumpIfSmi(receiver, miss);
542 
543  CallOptimization optimization(lookup);
544  if (optimization.is_constant_call()) {
545  CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
546  holder, lookup, name, optimization, miss);
547  } else {
548  CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
549  name, holder, miss);
550  }
551  }
552 
553  private:
554  void CompileCacheable(MacroAssembler* masm,
555  Handle<JSObject> object,
556  Register receiver,
557  Register scratch1,
558  Register scratch2,
559  Register scratch3,
560  Handle<JSObject> interceptor_holder,
561  LookupResult* lookup,
562  Handle<String> name,
563  const CallOptimization& optimization,
564  Label* miss_label) {
565  ASSERT(optimization.is_constant_call());
566  ASSERT(!lookup->holder()->IsGlobalObject());
567 
568  int depth1 = kInvalidProtoDepth;
569  int depth2 = kInvalidProtoDepth;
570  bool can_do_fast_api_call = false;
571  if (optimization.is_simple_api_call() &&
572  !lookup->holder()->IsGlobalObject()) {
573  depth1 = optimization.GetPrototypeDepthOfExpectedType(
574  object, interceptor_holder);
575  if (depth1 == kInvalidProtoDepth) {
576  depth2 = optimization.GetPrototypeDepthOfExpectedType(
577  interceptor_holder, Handle<JSObject>(lookup->holder()));
578  }
579  can_do_fast_api_call =
580  depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
581  }
582 
583  Counters* counters = masm->isolate()->counters();
584  __ IncrementCounter(counters->call_const_interceptor(), 1);
585 
586  if (can_do_fast_api_call) {
587  __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1);
588  ReserveSpaceForFastApiCall(masm, scratch1);
589  }
590 
591  // Check that the maps from receiver to interceptor's holder
592  // haven't changed and thus we can invoke interceptor.
593  Label miss_cleanup;
594  Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
595  Register holder =
596  stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
597  scratch1, scratch2, scratch3,
598  name, depth1, miss);
599 
600  // Invoke an interceptor and if it provides a value,
601  // branch to |regular_invoke|.
602  Label regular_invoke;
603  LoadWithInterceptor(masm, receiver, holder, interceptor_holder,
604  &regular_invoke);
605 
606  // Interceptor returned nothing for this property. Try to use cached
607  // constant function.
608 
609  // Check that the maps from interceptor's holder to constant function's
610  // holder haven't changed and thus we can use cached constant function.
611  if (*interceptor_holder != lookup->holder()) {
612  stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
613  Handle<JSObject>(lookup->holder()),
614  scratch1, scratch2, scratch3,
615  name, depth2, miss);
616  } else {
617  // CheckPrototypes has a side effect of fetching a 'holder'
618  // for API (object which is instanceof for the signature). It's
619  // safe to omit it here, as if present, it should be fetched
620  // by the previous CheckPrototypes.
621  ASSERT(depth2 == kInvalidProtoDepth);
622  }
623 
624  // Invoke function.
625  if (can_do_fast_api_call) {
626  GenerateFastApiCall(masm, optimization, arguments_.immediate());
627  } else {
628  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
630  : CALL_AS_METHOD;
631  __ InvokeFunction(optimization.constant_function(), arguments_,
632  JUMP_FUNCTION, NullCallWrapper(), call_kind);
633  }
634 
635  // Deferred code for fast API call case---clean preallocated space.
636  if (can_do_fast_api_call) {
637  __ bind(&miss_cleanup);
638  FreeSpaceForFastApiCall(masm, scratch1);
639  __ jmp(miss_label);
640  }
641 
642  // Invoke a regular function.
643  __ bind(&regular_invoke);
644  if (can_do_fast_api_call) {
645  FreeSpaceForFastApiCall(masm, scratch1);
646  }
647  }
648 
649  void CompileRegular(MacroAssembler* masm,
650  Handle<JSObject> object,
651  Register receiver,
652  Register scratch1,
653  Register scratch2,
654  Register scratch3,
655  Handle<String> name,
656  Handle<JSObject> interceptor_holder,
657  Label* miss_label) {
658  Register holder =
659  stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
660  scratch1, scratch2, scratch3,
661  name, miss_label);
662 
663  FrameScope scope(masm, StackFrame::INTERNAL);
664  // Save the name_ register across the call.
665  __ push(name_);
666 
667  PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder);
668 
669  __ CallExternalReference(
670  ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
671  masm->isolate()),
672  6);
673 
674  // Restore the name_ register.
675  __ pop(name_);
676 
677  // Leave the internal frame.
678  }
679 
680  void LoadWithInterceptor(MacroAssembler* masm,
681  Register receiver,
682  Register holder,
683  Handle<JSObject> holder_obj,
684  Label* interceptor_succeeded) {
685  {
686  FrameScope scope(masm, StackFrame::INTERNAL);
687  __ push(holder); // Save the holder.
688  __ push(name_); // Save the name.
689 
690  CompileCallLoadPropertyWithInterceptor(masm,
691  receiver,
692  holder,
693  name_,
694  holder_obj);
695 
696  __ pop(name_); // Restore the name.
697  __ pop(receiver); // Restore the holder.
698  // Leave the internal frame.
699  }
700 
701  __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
702  __ j(not_equal, interceptor_succeeded);
703  }
704 
705  StubCompiler* stub_compiler_;
706  const ParameterCount& arguments_;
707  Register name_;
708  Code::ExtraICState extra_ic_state_;
709 };
710 
711 
712 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
713  ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
714  Handle<Code> code = (kind == Code::LOAD_IC)
715  ? masm->isolate()->builtins()->LoadIC_Miss()
716  : masm->isolate()->builtins()->KeyedLoadIC_Miss();
717  __ Jump(code, RelocInfo::CODE_TARGET);
718 }
719 
720 
721 void StubCompiler::GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm) {
722  Handle<Code> code =
723  masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
724  __ Jump(code, RelocInfo::CODE_TARGET);
725 }
726 
727 
728 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
729 // but may be destroyed if store is successful.
730 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
731  Handle<JSObject> object,
732  int index,
733  Handle<Map> transition,
734  Handle<String> name,
735  Register receiver_reg,
736  Register name_reg,
737  Register scratch1,
738  Register scratch2,
739  Label* miss_label) {
740  LookupResult lookup(masm->isolate());
741  object->Lookup(*name, &lookup);
742  if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
743  // In sloppy mode, we could just return the value and be done. However, we
744  // might be in strict mode, where we have to throw. Since we cannot tell,
745  // go into slow case unconditionally.
746  __ jmp(miss_label);
747  return;
748  }
749 
750  // Check that the map of the object hasn't changed.
751  CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS
753  __ CheckMap(receiver_reg, Handle<Map>(object->map()),
754  miss_label, DO_SMI_CHECK, mode);
755 
756  // Perform global security token check if needed.
757  if (object->IsJSGlobalProxy()) {
758  __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
759  }
760 
761  // Check that we are allowed to write this.
762  if (!transition.is_null() && object->GetPrototype()->IsJSObject()) {
763  JSObject* holder;
764  if (lookup.IsFound()) {
765  holder = lookup.holder();
766  } else {
767  // Find the top object.
768  holder = *object;
769  do {
770  holder = JSObject::cast(holder->GetPrototype());
771  } while (holder->GetPrototype()->IsJSObject());
772  }
773  // We need an extra register, push
774  __ push(name_reg);
775  Label miss_pop, done_check;
776  CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg,
777  scratch1, scratch2, name, &miss_pop);
778  __ jmp(&done_check);
779  __ bind(&miss_pop);
780  __ pop(name_reg);
781  __ jmp(miss_label);
782  __ bind(&done_check);
783  __ pop(name_reg);
784  }
785 
786  // Stub never generated for non-global objects that require access
787  // checks.
788  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
789 
790  // Perform map transition for the receiver if necessary.
791  if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) {
792  // The properties must be extended before we can store the value.
793  // We jump to a runtime call that extends the properties array.
794  __ pop(scratch1); // Return address.
795  __ push(receiver_reg);
796  __ Push(transition);
797  __ push(rax);
798  __ push(scratch1);
799  __ TailCallExternalReference(
800  ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
801  masm->isolate()),
802  3,
803  1);
804  return;
805  }
806 
807  if (!transition.is_null()) {
808  // Update the map of the object.
809  __ Move(scratch1, transition);
810  __ movq(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
811 
812  // Update the write barrier for the map field and pass the now unused
813  // name_reg as scratch register.
814  __ RecordWriteField(receiver_reg,
816  scratch1,
817  name_reg,
821  }
822 
823  // Adjust for the number of properties stored in the object. Even in the
824  // face of a transition we can use the old map here because the size of the
825  // object and the number of in-object properties is not going to change.
826  index -= object->map()->inobject_properties();
827 
828  if (index < 0) {
829  // Set the property straight into the object.
830  int offset = object->map()->instance_size() + (index * kPointerSize);
831  __ movq(FieldOperand(receiver_reg, offset), rax);
832 
833  // Update the write barrier for the array address.
834  // Pass the value being stored in the now unused name_reg.
835  __ movq(name_reg, rax);
836  __ RecordWriteField(
837  receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs);
838  } else {
839  // Write to the properties array.
840  int offset = index * kPointerSize + FixedArray::kHeaderSize;
841  // Get the properties array (optimistically).
842  __ movq(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
843  __ movq(FieldOperand(scratch1, offset), rax);
844 
845  // Update the write barrier for the array address.
846  // Pass the value being stored in the now unused name_reg.
847  __ movq(name_reg, rax);
848  __ RecordWriteField(
849  scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs);
850  }
851 
852  // Return the value (register rax).
853  __ ret(0);
854 }
855 
856 
857 // Generate code to check that a global property cell is empty. Create
858 // the property cell at compilation time if no cell exists for the
859 // property.
860 static void GenerateCheckPropertyCell(MacroAssembler* masm,
861  Handle<GlobalObject> global,
862  Handle<String> name,
863  Register scratch,
864  Label* miss) {
865  Handle<JSGlobalPropertyCell> cell =
866  GlobalObject::EnsurePropertyCell(global, name);
867  ASSERT(cell->value()->IsTheHole());
868  __ Move(scratch, cell);
870  masm->isolate()->factory()->the_hole_value());
871  __ j(not_equal, miss);
872 }
873 
874 
875 // Calls GenerateCheckPropertyCell for each global object in the prototype chain
876 // from object to (but not including) holder.
877 static void GenerateCheckPropertyCells(MacroAssembler* masm,
878  Handle<JSObject> object,
879  Handle<JSObject> holder,
880  Handle<String> name,
881  Register scratch,
882  Label* miss) {
883  Handle<JSObject> current = object;
884  while (!current.is_identical_to(holder)) {
885  if (current->IsGlobalObject()) {
886  GenerateCheckPropertyCell(masm,
887  Handle<GlobalObject>::cast(current),
888  name,
889  scratch,
890  miss);
891  }
892  current = Handle<JSObject>(JSObject::cast(current->GetPrototype()));
893  }
894 }
895 
896 #undef __
897 #define __ ACCESS_MASM((masm()))
898 
899 
900 Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
901  Register object_reg,
902  Handle<JSObject> holder,
903  Register holder_reg,
904  Register scratch1,
905  Register scratch2,
906  Handle<String> name,
907  int save_at_depth,
908  Label* miss) {
909  // Make sure there's no overlap between holder and object registers.
910  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
911  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
912  && !scratch2.is(scratch1));
913 
914  // Keep track of the current object in register reg. On the first
915  // iteration, reg is an alias for object_reg, on later iterations,
916  // it is an alias for holder_reg.
917  Register reg = object_reg;
918  int depth = 0;
919 
920  if (save_at_depth == depth) {
921  __ movq(Operand(rsp, kPointerSize), object_reg);
922  }
923 
924  // Check the maps in the prototype chain.
925  // Traverse the prototype chain from the object and do map checks.
926  Handle<JSObject> current = object;
927  while (!current.is_identical_to(holder)) {
928  ++depth;
929 
930  // Only global objects and objects that do not require access
931  // checks are allowed in stubs.
932  ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
933 
934  Handle<JSObject> prototype(JSObject::cast(current->GetPrototype()));
935  if (!current->HasFastProperties() &&
936  !current->IsJSGlobalObject() &&
937  !current->IsJSGlobalProxy()) {
938  if (!name->IsSymbol()) {
939  name = factory()->LookupSymbol(name);
940  }
941  ASSERT(current->property_dictionary()->FindEntry(*name) ==
943 
944  GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
945  scratch1, scratch2);
946 
947  __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
948  reg = holder_reg; // From now on the object will be in holder_reg.
949  __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
950  } else {
951  bool in_new_space = heap()->InNewSpace(*prototype);
952  Handle<Map> current_map(current->map());
953  if (in_new_space) {
954  // Save the map in scratch1 for later.
955  __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
956  }
957  __ CheckMap(reg, Handle<Map>(current_map),
959 
960  // Check access rights to the global object. This has to happen after
961  // the map check so that we know that the object is actually a global
962  // object.
963  if (current->IsJSGlobalProxy()) {
964  __ CheckAccessGlobalProxy(reg, scratch2, miss);
965  }
966  reg = holder_reg; // From now on the object will be in holder_reg.
967 
968  if (in_new_space) {
969  // The prototype is in new space; we cannot store a reference to it
970  // in the code. Load it from the map.
971  __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
972  } else {
973  // The prototype is in old space; load it directly.
974  __ Move(reg, prototype);
975  }
976  }
977 
978  if (save_at_depth == depth) {
979  __ movq(Operand(rsp, kPointerSize), reg);
980  }
981 
982  // Go to the next object in the prototype chain.
983  current = prototype;
984  }
985  ASSERT(current.is_identical_to(holder));
986 
987  // Log the check depth.
988  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
989 
990  // Check the holder map.
991  __ CheckMap(reg, Handle<Map>(holder->map()),
993 
994  // Perform security check for access to the global object.
995  ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
996  if (current->IsJSGlobalProxy()) {
997  __ CheckAccessGlobalProxy(reg, scratch1, miss);
998  }
999 
1000  // If we've skipped any global objects, it's not enough to verify that
1001  // their maps haven't changed. We also need to check that the property
1002  // cell for the property is still empty.
1003  GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
1004 
1005  // Return the register containing the holder.
1006  return reg;
1007 }
1008 
1009 
1010 void StubCompiler::GenerateLoadField(Handle<JSObject> object,
1011  Handle<JSObject> holder,
1012  Register receiver,
1013  Register scratch1,
1014  Register scratch2,
1015  Register scratch3,
1016  int index,
1017  Handle<String> name,
1018  Label* miss) {
1019  // Check that the receiver isn't a smi.
1020  __ JumpIfSmi(receiver, miss);
1021 
1022  // Check the prototype chain.
1023  Register reg = CheckPrototypes(
1024  object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1025 
1026  // Get the value from the properties.
1027  GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
1028  __ ret(0);
1029 }
1030 
1031 
1032 void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
1033  Handle<JSObject> holder,
1034  Register receiver,
1035  Register name_reg,
1036  Register scratch1,
1037  Register scratch2,
1038  Register scratch3,
1039  Handle<AccessorInfo> callback,
1040  Handle<String> name,
1041  Label* miss) {
1042  // Check that the receiver isn't a smi.
1043  __ JumpIfSmi(receiver, miss);
1044 
1045  // Check that the maps haven't changed.
1046  Register reg = CheckPrototypes(object, receiver, holder, scratch1,
1047  scratch2, scratch3, name, miss);
1048 
1049  // Insert additional parameters into the stack frame above return address.
1050  ASSERT(!scratch2.is(reg));
1051  __ pop(scratch2); // Get return address to place it below.
1052 
1053  __ push(receiver); // receiver
1054  __ push(reg); // holder
1055  if (heap()->InNewSpace(callback->data())) {
1056  __ Move(scratch1, callback);
1057  __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); // data
1058  } else {
1059  __ Push(Handle<Object>(callback->data()));
1060  }
1061  __ PushAddress(ExternalReference::isolate_address()); // isolate
1062  __ push(name_reg); // name
1063  // Save a pointer to where we pushed the arguments pointer.
1064  // This will be passed as the const AccessorInfo& to the C++ callback.
1065 
1066 #if defined(__MINGW64__)
1067  Register accessor_info_arg = rdx;
1068  Register name_arg = rcx;
1069 #elif defined(_WIN64)
1070  // Win64 uses first register--rcx--for returned value.
1071  Register accessor_info_arg = r8;
1072  Register name_arg = rdx;
1073 #else
1074  Register accessor_info_arg = rsi;
1075  Register name_arg = rdi;
1076 #endif
1077 
1078  ASSERT(!name_arg.is(scratch2));
1079  __ movq(name_arg, rsp);
1080  __ push(scratch2); // Restore return address.
1081 
1082  // 4 elements array for v8::Arguments::values_ and handler for name.
1083  const int kStackSpace = 5;
1084 
1085  // Allocate v8::AccessorInfo in non-GCed stack space.
1086  const int kArgStackSpace = 1;
1087 
1088  __ PrepareCallApiFunction(kArgStackSpace);
1089  __ lea(rax, Operand(name_arg, 4 * kPointerSize));
1090 
1091  // v8::AccessorInfo::args_.
1092  __ movq(StackSpaceOperand(0), rax);
1093 
1094  // The context register (rsi) has been saved in PrepareCallApiFunction and
1095  // could be used to pass arguments.
1096  __ lea(accessor_info_arg, StackSpaceOperand(0));
1097 
1098  Address getter_address = v8::ToCData<Address>(callback->getter());
1099  __ CallApiFunctionAndReturn(getter_address, kStackSpace);
1100 }
1101 
1102 
1103 void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
1104  Handle<JSObject> holder,
1105  Register receiver,
1106  Register scratch1,
1107  Register scratch2,
1108  Register scratch3,
1109  Handle<JSFunction> value,
1110  Handle<String> name,
1111  Label* miss) {
1112  // Check that the receiver isn't a smi.
1113  __ JumpIfSmi(receiver, miss);
1114 
1115  // Check that the maps haven't changed.
1116  CheckPrototypes(
1117  object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1118 
1119  // Return the constant value.
1120  __ LoadHeapObject(rax, value);
1121  __ ret(0);
1122 }
1123 
1124 
1125 void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
1126  Handle<JSObject> interceptor_holder,
1127  LookupResult* lookup,
1128  Register receiver,
1129  Register name_reg,
1130  Register scratch1,
1131  Register scratch2,
1132  Register scratch3,
1133  Handle<String> name,
1134  Label* miss) {
1135  ASSERT(interceptor_holder->HasNamedInterceptor());
1136  ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1137 
1138  // Check that the receiver isn't a smi.
1139  __ JumpIfSmi(receiver, miss);
1140 
1141  // So far the most popular follow ups for interceptor loads are FIELD
1142  // and CALLBACKS, so inline only them, other cases may be added
1143  // later.
1144  bool compile_followup_inline = false;
1145  if (lookup->IsFound() && lookup->IsCacheable()) {
1146  if (lookup->type() == FIELD) {
1147  compile_followup_inline = true;
1148  } else if (lookup->type() == CALLBACKS &&
1149  lookup->GetCallbackObject()->IsAccessorInfo()) {
1150  AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1151  compile_followup_inline = callback->getter() != NULL &&
1152  callback->IsCompatibleReceiver(*object);
1153  }
1154  }
1155 
1156  if (compile_followup_inline) {
1157  // Compile the interceptor call, followed by inline code to load the
1158  // property from further up the prototype chain if the call fails.
1159  // Check that the maps haven't changed.
1160  Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1161  scratch1, scratch2, scratch3,
1162  name, miss);
1163  ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1164 
1165  // Preserve the receiver register explicitly whenever it is different from
1166  // the holder and it is needed should the interceptor return without any
1167  // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1168  // the FIELD case might cause a miss during the prototype check.
1169  bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1170  bool must_preserve_receiver_reg = !receiver.is(holder_reg) &&
1171  (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1172 
1173  // Save necessary data before invoking an interceptor.
1174  // Requires a frame to make GC aware of pushed pointers.
1175  {
1176  FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1177 
1178  if (must_preserve_receiver_reg) {
1179  __ push(receiver);
1180  }
1181  __ push(holder_reg);
1182  __ push(name_reg);
1183 
1184  // Invoke an interceptor. Note: map checks from receiver to
1185  // interceptor's holder has been compiled before (see a caller
1186  // of this method.)
1187  CompileCallLoadPropertyWithInterceptor(masm(),
1188  receiver,
1189  holder_reg,
1190  name_reg,
1191  interceptor_holder);
1192 
1193  // Check if interceptor provided a value for property. If it's
1194  // the case, return immediately.
1195  Label interceptor_failed;
1196  __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
1197  __ j(equal, &interceptor_failed);
1198  frame_scope.GenerateLeaveFrame();
1199  __ ret(0);
1200 
1201  __ bind(&interceptor_failed);
1202  __ pop(name_reg);
1203  __ pop(holder_reg);
1204  if (must_preserve_receiver_reg) {
1205  __ pop(receiver);
1206  }
1207 
1208  // Leave the internal frame.
1209  }
1210 
1211  // Check that the maps from interceptor's holder to lookup's holder
1212  // haven't changed. And load lookup's holder into |holder| register.
1213  if (must_perfrom_prototype_check) {
1214  holder_reg = CheckPrototypes(interceptor_holder,
1215  holder_reg,
1216  Handle<JSObject>(lookup->holder()),
1217  scratch1,
1218  scratch2,
1219  scratch3,
1220  name,
1221  miss);
1222  }
1223 
1224  if (lookup->type() == FIELD) {
1225  // We found FIELD property in prototype chain of interceptor's holder.
1226  // Retrieve a field from field's holder.
1227  GenerateFastPropertyLoad(masm(), rax, holder_reg,
1228  Handle<JSObject>(lookup->holder()),
1229  lookup->GetFieldIndex());
1230  __ ret(0);
1231  } else {
1232  // We found CALLBACKS property in prototype chain of interceptor's
1233  // holder.
1234  ASSERT(lookup->type() == CALLBACKS);
1235  Handle<AccessorInfo> callback(
1236  AccessorInfo::cast(lookup->GetCallbackObject()));
1237  ASSERT(callback->getter() != NULL);
1238 
1239  // Tail call to runtime.
1240  // Important invariant in CALLBACKS case: the code above must be
1241  // structured to never clobber |receiver| register.
1242  __ pop(scratch2); // return address
1243  __ push(receiver);
1244  __ push(holder_reg);
1245  __ Move(holder_reg, callback);
1246  __ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
1247  __ PushAddress(ExternalReference::isolate_address());
1248  __ push(holder_reg);
1249  __ push(name_reg);
1250  __ push(scratch2); // restore return address
1251 
1252  ExternalReference ref =
1253  ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1254  isolate());
1255  __ TailCallExternalReference(ref, 6, 1);
1256  }
1257  } else { // !compile_followup_inline
1258  // Call the runtime system to load the interceptor.
1259  // Check that the maps haven't changed.
1260  Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1261  scratch1, scratch2, scratch3,
1262  name, miss);
1263  __ pop(scratch2); // save old return address
1264  PushInterceptorArguments(masm(), receiver, holder_reg,
1265  name_reg, interceptor_holder);
1266  __ push(scratch2); // restore old return address
1267 
1268  ExternalReference ref = ExternalReference(
1269  IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
1270  __ TailCallExternalReference(ref, 6, 1);
1271  }
1272 }
1273 
1274 
1275 void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) {
1276  if (kind_ == Code::KEYED_CALL_IC) {
1277  __ Cmp(rcx, name);
1278  __ j(not_equal, miss);
1279  }
1280 }
1281 
1282 
1283 void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
1284  Handle<JSObject> holder,
1285  Handle<String> name,
1286  Label* miss) {
1287  ASSERT(holder->IsGlobalObject());
1288 
1289  // Get the number of arguments.
1290  const int argc = arguments().immediate();
1291 
1292  // Get the receiver from the stack.
1293  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1294 
1295 
1296  // Check that the maps haven't changed.
1297  __ JumpIfSmi(rdx, miss);
1298  CheckPrototypes(object, rdx, holder, rbx, rax, rdi, name, miss);
1299 }
1300 
1301 
1302 void CallStubCompiler::GenerateLoadFunctionFromCell(
1303  Handle<JSGlobalPropertyCell> cell,
1304  Handle<JSFunction> function,
1305  Label* miss) {
1306  // Get the value from the cell.
1307  __ Move(rdi, cell);
1309 
1310  // Check that the cell contains the same function.
1311  if (heap()->InNewSpace(*function)) {
1312  // We can't embed a pointer to a function in new space so we have
1313  // to verify that the shared function info is unchanged. This has
1314  // the nice side effect that multiple closures based on the same
1315  // function can all use this call IC. Before we load through the
1316  // function, we have to verify that it still is a function.
1317  __ JumpIfSmi(rdi, miss);
1318  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax);
1319  __ j(not_equal, miss);
1320 
1321  // Check the shared function info. Make sure it hasn't changed.
1322  __ Move(rax, Handle<SharedFunctionInfo>(function->shared()));
1324  } else {
1325  __ Cmp(rdi, function);
1326  }
1327  __ j(not_equal, miss);
1328 }
1329 
1330 
1331 void CallStubCompiler::GenerateMissBranch() {
1332  Handle<Code> code =
1333  isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1334  kind_,
1335  extra_state_);
1336  __ Jump(code, RelocInfo::CODE_TARGET);
1337 }
1338 
1339 
1340 Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
1341  Handle<JSObject> holder,
1342  int index,
1343  Handle<String> name) {
1344  // ----------- S t a t e -------------
1345  // rcx : function name
1346  // rsp[0] : return address
1347  // rsp[8] : argument argc
1348  // rsp[16] : argument argc - 1
1349  // ...
1350  // rsp[argc * 8] : argument 1
1351  // rsp[(argc + 1) * 8] : argument 0 = receiver
1352  // -----------------------------------
1353  Label miss;
1354 
1355  GenerateNameCheck(name, &miss);
1356 
1357  // Get the receiver from the stack.
1358  const int argc = arguments().immediate();
1359  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1360 
1361  // Check that the receiver isn't a smi.
1362  __ JumpIfSmi(rdx, &miss);
1363 
1364  // Do the right check and compute the holder register.
1365  Register reg = CheckPrototypes(object, rdx, holder, rbx, rax, rdi,
1366  name, &miss);
1367 
1368  GenerateFastPropertyLoad(masm(), rdi, reg, holder, index);
1369 
1370  // Check that the function really is a function.
1371  __ JumpIfSmi(rdi, &miss);
1372  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rbx);
1373  __ j(not_equal, &miss);
1374 
1375  // Patch the receiver on the stack with the global proxy if
1376  // necessary.
1377  if (object->IsGlobalObject()) {
1379  __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
1380  }
1381 
1382  // Invoke the function.
1383  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
1385  : CALL_AS_METHOD;
1386  __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION,
1387  NullCallWrapper(), call_kind);
1388 
1389  // Handle call cache miss.
1390  __ bind(&miss);
1391  GenerateMissBranch();
1392 
1393  // Return the generated code.
1394  return GetCode(FIELD, name);
1395 }
1396 
1397 
1398 Handle<Code> CallStubCompiler::CompileArrayPushCall(
1399  Handle<Object> object,
1400  Handle<JSObject> holder,
1401  Handle<JSGlobalPropertyCell> cell,
1402  Handle<JSFunction> function,
1403  Handle<String> name) {
1404  // ----------- S t a t e -------------
1405  // -- rcx : name
1406  // -- rsp[0] : return address
1407  // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1408  // -- ...
1409  // -- rsp[(argc + 1) * 8] : receiver
1410  // -----------------------------------
1411 
1412  // If object is not an array, bail out to regular call.
1413  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1414 
1415  Label miss;
1416  GenerateNameCheck(name, &miss);
1417 
1418  // Get the receiver from the stack.
1419  const int argc = arguments().immediate();
1420  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1421 
1422  // Check that the receiver isn't a smi.
1423  __ JumpIfSmi(rdx, &miss);
1424 
1425  CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi,
1426  name, &miss);
1427 
1428  if (argc == 0) {
1429  // Noop, return the length.
1431  __ ret((argc + 1) * kPointerSize);
1432  } else {
1433  Label call_builtin;
1434 
1435  if (argc == 1) { // Otherwise fall through to call builtin.
1436  Label attempt_to_grow_elements, with_write_barrier;
1437 
1438  // Get the elements array of the object.
1440 
1441  // Check that the elements are in fast mode and writable.
1443  factory()->fixed_array_map());
1444  __ j(not_equal, &call_builtin);
1445 
1446  // Get the array's length into rax and calculate new length.
1447  __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset));
1449  __ addl(rax, Immediate(argc));
1450 
1451  // Get the elements' length into rcx.
1452  __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
1453 
1454  // Check if we could survive without allocation.
1455  __ cmpl(rax, rcx);
1456  __ j(greater, &attempt_to_grow_elements);
1457 
1458  // Check if value is a smi.
1459  __ movq(rcx, Operand(rsp, argc * kPointerSize));
1460  __ JumpIfNotSmi(rcx, &with_write_barrier);
1461 
1462  // Save new length.
1463  __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1464 
1465  // Store the value.
1466  __ movq(FieldOperand(rdi,
1467  rax,
1469  FixedArray::kHeaderSize - argc * kPointerSize),
1470  rcx);
1471 
1472  __ Integer32ToSmi(rax, rax); // Return new length as smi.
1473  __ ret((argc + 1) * kPointerSize);
1474 
1475  __ bind(&with_write_barrier);
1476 
1478 
1479  if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
1480  Label fast_object, not_fast_object;
1481  __ CheckFastObjectElements(rbx, &not_fast_object, Label::kNear);
1482  __ jmp(&fast_object);
1483  // In case of fast smi-only, convert to fast object, otherwise bail out.
1484  __ bind(&not_fast_object);
1485  __ CheckFastSmiElements(rbx, &call_builtin);
1486  // rdx: receiver
1487  // rbx: map
1488 
1489  Label try_holey_map;
1490  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1491  FAST_ELEMENTS,
1492  rbx,
1493  rdi,
1494  &try_holey_map);
1495 
1498  // Restore edi.
1500  __ jmp(&fast_object);
1501 
1502  __ bind(&try_holey_map);
1503  __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
1505  rbx,
1506  rdi,
1507  &call_builtin);
1511  __ bind(&fast_object);
1512  } else {
1513  __ CheckFastObjectElements(rbx, &call_builtin);
1514  }
1515 
1516  // Save new length.
1517  __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1518 
1519  // Store the value.
1520  __ lea(rdx, FieldOperand(rdi,
1522  FixedArray::kHeaderSize - argc * kPointerSize));
1523  __ movq(Operand(rdx, 0), rcx);
1524 
1525  __ RecordWrite(rdi, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1526  OMIT_SMI_CHECK);
1527 
1528  __ Integer32ToSmi(rax, rax); // Return new length as smi.
1529  __ ret((argc + 1) * kPointerSize);
1530 
1531  __ bind(&attempt_to_grow_elements);
1532  if (!FLAG_inline_new) {
1533  __ jmp(&call_builtin);
1534  }
1535 
1536  __ movq(rbx, Operand(rsp, argc * kPointerSize));
1537  // Growing elements that are SMI-only requires special handling in case
1538  // the new element is non-Smi. For now, delegate to the builtin.
1539  Label no_fast_elements_check;
1540  __ JumpIfSmi(rbx, &no_fast_elements_check);
1542  __ CheckFastObjectElements(rcx, &call_builtin, Label::kFar);
1543  __ bind(&no_fast_elements_check);
1544 
1545  ExternalReference new_space_allocation_top =
1546  ExternalReference::new_space_allocation_top_address(isolate());
1547  ExternalReference new_space_allocation_limit =
1548  ExternalReference::new_space_allocation_limit_address(isolate());
1549 
1550  const int kAllocationDelta = 4;
1551  // Load top.
1552  __ Load(rcx, new_space_allocation_top);
1553 
1554  // Check if it's the end of elements.
1555  __ lea(rdx, FieldOperand(rdi,
1557  FixedArray::kHeaderSize - argc * kPointerSize));
1558  __ cmpq(rdx, rcx);
1559  __ j(not_equal, &call_builtin);
1560  __ addq(rcx, Immediate(kAllocationDelta * kPointerSize));
1561  Operand limit_operand =
1562  masm()->ExternalOperand(new_space_allocation_limit);
1563  __ cmpq(rcx, limit_operand);
1564  __ j(above, &call_builtin);
1565 
1566  // We fit and could grow elements.
1567  __ Store(new_space_allocation_top, rcx);
1568 
1569  // Push the argument...
1570  __ movq(Operand(rdx, 0), rbx);
1571  // ... and fill the rest with holes.
1572  __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
1573  for (int i = 1; i < kAllocationDelta; i++) {
1574  __ movq(Operand(rdx, i * kPointerSize), kScratchRegister);
1575  }
1576 
1577  // We know the elements array is in new space so we don't need the
1578  // remembered set, but we just pushed a value onto it so we may have to
1579  // tell the incremental marker to rescan the object that we just grew. We
1580  // don't need to worry about the holes because they are in old space and
1581  // already marked black.
1582  __ RecordWrite(rdi, rdx, rbx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
1583 
1584  // Restore receiver to rdx as finish sequence assumes it's here.
1585  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1586 
1587  // Increment element's and array's sizes.
1588  __ SmiAddConstant(FieldOperand(rdi, FixedArray::kLengthOffset),
1589  Smi::FromInt(kAllocationDelta));
1590 
1591  // Make new length a smi before returning it.
1592  __ Integer32ToSmi(rax, rax);
1594 
1595  __ ret((argc + 1) * kPointerSize);
1596  }
1597 
1598  __ bind(&call_builtin);
1599  __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1600  isolate()),
1601  argc + 1,
1602  1);
1603  }
1604 
1605  __ bind(&miss);
1606  GenerateMissBranch();
1607 
1608  // Return the generated code.
1609  return GetCode(function);
1610 }
1611 
1612 
1613 Handle<Code> CallStubCompiler::CompileArrayPopCall(
1614  Handle<Object> object,
1615  Handle<JSObject> holder,
1616  Handle<JSGlobalPropertyCell> cell,
1617  Handle<JSFunction> function,
1618  Handle<String> name) {
1619  // ----------- S t a t e -------------
1620  // -- rcx : name
1621  // -- rsp[0] : return address
1622  // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1623  // -- ...
1624  // -- rsp[(argc + 1) * 8] : receiver
1625  // -----------------------------------
1626 
1627  // If object is not an array, bail out to regular call.
1628  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1629 
1630  Label miss, return_undefined, call_builtin;
1631  GenerateNameCheck(name, &miss);
1632 
1633  // Get the receiver from the stack.
1634  const int argc = arguments().immediate();
1635  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1636 
1637  // Check that the receiver isn't a smi.
1638  __ JumpIfSmi(rdx, &miss);
1639 
1640  CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi,
1641  name, &miss);
1642 
1643  // Get the elements array of the object.
1645 
1646  // Check that the elements are in fast mode and writable.
1647  __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1648  Heap::kFixedArrayMapRootIndex);
1649  __ j(not_equal, &call_builtin);
1650 
1651  // Get the array's length into rcx and calculate new length.
1652  __ SmiToInteger32(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
1653  __ subl(rcx, Immediate(1));
1654  __ j(negative, &return_undefined);
1655 
1656  // Get the last element.
1657  __ LoadRoot(r9, Heap::kTheHoleValueRootIndex);
1658  __ movq(rax, FieldOperand(rbx,
1660  FixedArray::kHeaderSize));
1661  // Check if element is already the hole.
1662  __ cmpq(rax, r9);
1663  // If so, call slow-case to also check prototypes for value.
1664  __ j(equal, &call_builtin);
1665 
1666  // Set the array's length.
1667  __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
1668 
1669  // Fill with the hole and return original value.
1670  __ movq(FieldOperand(rbx,
1672  FixedArray::kHeaderSize),
1673  r9);
1674  __ ret((argc + 1) * kPointerSize);
1675 
1676  __ bind(&return_undefined);
1677  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1678  __ ret((argc + 1) * kPointerSize);
1679 
1680  __ bind(&call_builtin);
1681  __ TailCallExternalReference(
1682  ExternalReference(Builtins::c_ArrayPop, isolate()),
1683  argc + 1,
1684  1);
1685 
1686  __ bind(&miss);
1687  GenerateMissBranch();
1688 
1689  // Return the generated code.
1690  return GetCode(function);
1691 }
1692 
1693 
1694 Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
1695  Handle<Object> object,
1696  Handle<JSObject> holder,
1697  Handle<JSGlobalPropertyCell> cell,
1698  Handle<JSFunction> function,
1699  Handle<String> name) {
1700  // ----------- S t a t e -------------
1701  // -- rcx : function name
1702  // -- rsp[0] : return address
1703  // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1704  // -- ...
1705  // -- rsp[(argc + 1) * 8] : receiver
1706  // -----------------------------------
1707 
1708  // If object is not a string, bail out to regular call.
1709  if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
1710 
1711  const int argc = arguments().immediate();
1712 
1713  Label miss;
1714  Label name_miss;
1715  Label index_out_of_range;
1716  Label* index_out_of_range_label = &index_out_of_range;
1717  if (kind_ == Code::CALL_IC &&
1718  (CallICBase::StringStubState::decode(extra_state_) ==
1720  index_out_of_range_label = &miss;
1721  }
1722  GenerateNameCheck(name, &name_miss);
1723 
1724  // Check that the maps starting from the prototype haven't changed.
1725  GenerateDirectLoadGlobalFunctionPrototype(masm(),
1727  rax,
1728  &miss);
1729  ASSERT(!object.is_identical_to(holder));
1730  CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())),
1731  rax, holder, rbx, rdx, rdi, name, &miss);
1732 
1733  Register receiver = rbx;
1734  Register index = rdi;
1735  Register result = rax;
1736  __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
1737  if (argc > 0) {
1738  __ movq(index, Operand(rsp, (argc - 0) * kPointerSize));
1739  } else {
1740  __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1741  }
1742 
1743  StringCharCodeAtGenerator generator(receiver,
1744  index,
1745  result,
1746  &miss, // When not a string.
1747  &miss, // When not a number.
1748  index_out_of_range_label,
1750  generator.GenerateFast(masm());
1751  __ ret((argc + 1) * kPointerSize);
1752 
1753  StubRuntimeCallHelper call_helper;
1754  generator.GenerateSlow(masm(), call_helper);
1755 
1756  if (index_out_of_range.is_linked()) {
1757  __ bind(&index_out_of_range);
1758  __ LoadRoot(rax, Heap::kNanValueRootIndex);
1759  __ ret((argc + 1) * kPointerSize);
1760  }
1761 
1762  __ bind(&miss);
1763  // Restore function name in rcx.
1764  __ Move(rcx, name);
1765  __ bind(&name_miss);
1766  GenerateMissBranch();
1767 
1768  // Return the generated code.
1769  return GetCode(function);
1770 }
1771 
1772 
1773 Handle<Code> CallStubCompiler::CompileStringCharAtCall(
1774  Handle<Object> object,
1775  Handle<JSObject> holder,
1776  Handle<JSGlobalPropertyCell> cell,
1777  Handle<JSFunction> function,
1778  Handle<String> name) {
1779  // ----------- S t a t e -------------
1780  // -- rcx : function name
1781  // -- rsp[0] : return address
1782  // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1783  // -- ...
1784  // -- rsp[(argc + 1) * 8] : receiver
1785  // -----------------------------------
1786 
1787  // If object is not a string, bail out to regular call.
1788  if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
1789 
1790  const int argc = arguments().immediate();
1791  Label miss;
1792  Label name_miss;
1793  Label index_out_of_range;
1794  Label* index_out_of_range_label = &index_out_of_range;
1795  if (kind_ == Code::CALL_IC &&
1796  (CallICBase::StringStubState::decode(extra_state_) ==
1798  index_out_of_range_label = &miss;
1799  }
1800  GenerateNameCheck(name, &name_miss);
1801 
1802  // Check that the maps starting from the prototype haven't changed.
1803  GenerateDirectLoadGlobalFunctionPrototype(masm(),
1805  rax,
1806  &miss);
1807  ASSERT(!object.is_identical_to(holder));
1808  CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())),
1809  rax, holder, rbx, rdx, rdi, name, &miss);
1810 
1811  Register receiver = rax;
1812  Register index = rdi;
1813  Register scratch = rdx;
1814  Register result = rax;
1815  __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
1816  if (argc > 0) {
1817  __ movq(index, Operand(rsp, (argc - 0) * kPointerSize));
1818  } else {
1819  __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1820  }
1821 
1822  StringCharAtGenerator generator(receiver,
1823  index,
1824  scratch,
1825  result,
1826  &miss, // When not a string.
1827  &miss, // When not a number.
1828  index_out_of_range_label,
1830  generator.GenerateFast(masm());
1831  __ ret((argc + 1) * kPointerSize);
1832 
1833  StubRuntimeCallHelper call_helper;
1834  generator.GenerateSlow(masm(), call_helper);
1835 
1836  if (index_out_of_range.is_linked()) {
1837  __ bind(&index_out_of_range);
1838  __ LoadRoot(rax, Heap::kEmptyStringRootIndex);
1839  __ ret((argc + 1) * kPointerSize);
1840  }
1841  __ bind(&miss);
1842  // Restore function name in rcx.
1843  __ Move(rcx, name);
1844  __ bind(&name_miss);
1845  GenerateMissBranch();
1846 
1847  // Return the generated code.
1848  return GetCode(function);
1849 }
1850 
1851 
1852 Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
1853  Handle<Object> object,
1854  Handle<JSObject> holder,
1855  Handle<JSGlobalPropertyCell> cell,
1856  Handle<JSFunction> function,
1857  Handle<String> name) {
1858  // ----------- S t a t e -------------
1859  // -- rcx : function name
1860  // -- rsp[0] : return address
1861  // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1862  // -- ...
1863  // -- rsp[(argc + 1) * 8] : receiver
1864  // -----------------------------------
1865 
1866  // If the object is not a JSObject or we got an unexpected number of
1867  // arguments, bail out to the regular call.
1868  const int argc = arguments().immediate();
1869  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
1870 
1871  Label miss;
1872  GenerateNameCheck(name, &miss);
1873 
1874  if (cell.is_null()) {
1875  __ movq(rdx, Operand(rsp, 2 * kPointerSize));
1876  __ JumpIfSmi(rdx, &miss);
1877  CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi,
1878  name, &miss);
1879  } else {
1880  ASSERT(cell->value() == *function);
1881  GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
1882  &miss);
1883  GenerateLoadFunctionFromCell(cell, function, &miss);
1884  }
1885 
1886  // Load the char code argument.
1887  Register code = rbx;
1888  __ movq(code, Operand(rsp, 1 * kPointerSize));
1889 
1890  // Check the code is a smi.
1891  Label slow;
1892  __ JumpIfNotSmi(code, &slow);
1893 
1894  // Convert the smi code to uint16.
1895  __ SmiAndConstant(code, code, Smi::FromInt(0xffff));
1896 
1897  StringCharFromCodeGenerator generator(code, rax);
1898  generator.GenerateFast(masm());
1899  __ ret(2 * kPointerSize);
1900 
1901  StubRuntimeCallHelper call_helper;
1902  generator.GenerateSlow(masm(), call_helper);
1903 
1904  // Tail call the full function. We do not have to patch the receiver
1905  // because the function makes no use of it.
1906  __ bind(&slow);
1907  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
1909  : CALL_AS_METHOD;
1910  __ InvokeFunction(function, arguments(), JUMP_FUNCTION,
1911  NullCallWrapper(), call_kind);
1912 
1913  __ bind(&miss);
1914  // rcx: function name.
1915  GenerateMissBranch();
1916 
1917  // Return the generated code.
1918  return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
1919 }
1920 
1921 
1922 Handle<Code> CallStubCompiler::CompileMathFloorCall(
1923  Handle<Object> object,
1924  Handle<JSObject> holder,
1925  Handle<JSGlobalPropertyCell> cell,
1926  Handle<JSFunction> function,
1927  Handle<String> name) {
1928  // TODO(872): implement this.
1929  return Handle<Code>::null();
1930 }
1931 
1932 
1933 Handle<Code> CallStubCompiler::CompileMathAbsCall(
1934  Handle<Object> object,
1935  Handle<JSObject> holder,
1936  Handle<JSGlobalPropertyCell> cell,
1937  Handle<JSFunction> function,
1938  Handle<String> name) {
1939  // ----------- S t a t e -------------
1940  // -- rcx : function name
1941  // -- rsp[0] : return address
1942  // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1943  // -- ...
1944  // -- rsp[(argc + 1) * 8] : receiver
1945  // -----------------------------------
1946 
1947  // If the object is not a JSObject or we got an unexpected number of
1948  // arguments, bail out to the regular call.
1949  const int argc = arguments().immediate();
1950  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
1951 
1952  Label miss;
1953  GenerateNameCheck(name, &miss);
1954 
1955  if (cell.is_null()) {
1956  __ movq(rdx, Operand(rsp, 2 * kPointerSize));
1957  __ JumpIfSmi(rdx, &miss);
1958  CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi,
1959  name, &miss);
1960  } else {
1961  ASSERT(cell->value() == *function);
1962  GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
1963  &miss);
1964  GenerateLoadFunctionFromCell(cell, function, &miss);
1965  }
1966  // Load the (only) argument into rax.
1967  __ movq(rax, Operand(rsp, 1 * kPointerSize));
1968 
1969  // Check if the argument is a smi.
1970  Label not_smi;
1971  STATIC_ASSERT(kSmiTag == 0);
1972  __ JumpIfNotSmi(rax, &not_smi);
1973  __ SmiToInteger32(rax, rax);
1974 
1975  // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0
1976  // otherwise.
1977  __ movl(rbx, rax);
1978  __ sarl(rbx, Immediate(kBitsPerInt - 1));
1979 
1980  // Do bitwise not or do nothing depending on ebx.
1981  __ xorl(rax, rbx);
1982 
1983  // Add 1 or do nothing depending on ebx.
1984  __ subl(rax, rbx);
1985 
1986  // If the result is still negative, go to the slow case.
1987  // This only happens for the most negative smi.
1988  Label slow;
1989  __ j(negative, &slow);
1990 
1991  // Smi case done.
1992  __ Integer32ToSmi(rax, rax);
1993  __ ret(2 * kPointerSize);
1994 
1995  // Check if the argument is a heap number and load its value.
1996  __ bind(&not_smi);
1997  __ CheckMap(rax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK);
1999 
2000  // Check the sign of the argument. If the argument is positive,
2001  // just return it.
2002  Label negative_sign;
2003  const int sign_mask_shift =
2005  __ movq(rdi, static_cast<int64_t>(HeapNumber::kSignMask) << sign_mask_shift,
2006  RelocInfo::NONE);
2007  __ testq(rbx, rdi);
2008  __ j(not_zero, &negative_sign);
2009  __ ret(2 * kPointerSize);
2010 
2011  // If the argument is negative, clear the sign, and return a new
2012  // number. We still have the sign mask in rdi.
2013  __ bind(&negative_sign);
2014  __ xor_(rbx, rdi);
2015  __ AllocateHeapNumber(rax, rdx, &slow);
2017  __ ret(2 * kPointerSize);
2018 
2019  // Tail call the full function. We do not have to patch the receiver
2020  // because the function makes no use of it.
2021  __ bind(&slow);
2022  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2024  : CALL_AS_METHOD;
2025  __ InvokeFunction(function, arguments(), JUMP_FUNCTION,
2026  NullCallWrapper(), call_kind);
2027 
2028  __ bind(&miss);
2029  // rcx: function name.
2030  GenerateMissBranch();
2031 
2032  // Return the generated code.
2033  return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
2034 }
2035 
2036 
2037 Handle<Code> CallStubCompiler::CompileFastApiCall(
2038  const CallOptimization& optimization,
2039  Handle<Object> object,
2040  Handle<JSObject> holder,
2041  Handle<JSGlobalPropertyCell> cell,
2042  Handle<JSFunction> function,
2043  Handle<String> name) {
2044  ASSERT(optimization.is_simple_api_call());
2045  // Bail out if object is a global object as we don't want to
2046  // repatch it to global receiver.
2047  if (object->IsGlobalObject()) return Handle<Code>::null();
2048  if (!cell.is_null()) return Handle<Code>::null();
2049  if (!object->IsJSObject()) return Handle<Code>::null();
2050  int depth = optimization.GetPrototypeDepthOfExpectedType(
2051  Handle<JSObject>::cast(object), holder);
2052  if (depth == kInvalidProtoDepth) return Handle<Code>::null();
2053 
2054  Label miss, miss_before_stack_reserved;
2055  GenerateNameCheck(name, &miss_before_stack_reserved);
2056 
2057  // Get the receiver from the stack.
2058  const int argc = arguments().immediate();
2059  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2060 
2061  // Check that the receiver isn't a smi.
2062  __ JumpIfSmi(rdx, &miss_before_stack_reserved);
2063 
2064  Counters* counters = isolate()->counters();
2065  __ IncrementCounter(counters->call_const(), 1);
2066  __ IncrementCounter(counters->call_const_fast_api(), 1);
2067 
2068  // Allocate space for v8::Arguments implicit values. Must be initialized
2069  // before calling any runtime function.
2070  __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2071 
2072  // Check that the maps haven't changed and find a Holder as a side effect.
2073  CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi,
2074  name, depth, &miss);
2075 
2076  // Move the return address on top of the stack.
2077  __ movq(rax, Operand(rsp, 4 * kPointerSize));
2078  __ movq(Operand(rsp, 0 * kPointerSize), rax);
2079 
2080  GenerateFastApiCall(masm(), optimization, argc);
2081 
2082  __ bind(&miss);
2083  __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2084 
2085  __ bind(&miss_before_stack_reserved);
2086  GenerateMissBranch();
2087 
2088  // Return the generated code.
2089  return GetCode(function);
2090 }
2091 
2092 
2093 Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
2094  Handle<JSObject> holder,
2095  Handle<JSFunction> function,
2096  Handle<String> name,
2097  CheckType check) {
2098  // ----------- S t a t e -------------
2099  // rcx : function name
2100  // rsp[0] : return address
2101  // rsp[8] : argument argc
2102  // rsp[16] : argument argc - 1
2103  // ...
2104  // rsp[argc * 8] : argument 1
2105  // rsp[(argc + 1) * 8] : argument 0 = receiver
2106  // -----------------------------------
2107 
2108  if (HasCustomCallGenerator(function)) {
2109  Handle<Code> code = CompileCustomCall(object, holder,
2110  Handle<JSGlobalPropertyCell>::null(),
2111  function, name);
2112  // A null handle means bail out to the regular compiler code below.
2113  if (!code.is_null()) return code;
2114  }
2115 
2116  Label miss;
2117  GenerateNameCheck(name, &miss);
2118 
2119  // Get the receiver from the stack.
2120  const int argc = arguments().immediate();
2121  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2122 
2123  // Check that the receiver isn't a smi.
2124  if (check != NUMBER_CHECK) {
2125  __ JumpIfSmi(rdx, &miss);
2126  }
2127 
2128  // Make sure that it's okay not to patch the on stack receiver
2129  // unless we're doing a receiver map check.
2130  ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2131 
2132  Counters* counters = isolate()->counters();
2133  switch (check) {
2134  case RECEIVER_MAP_CHECK:
2135  __ IncrementCounter(counters->call_const(), 1);
2136 
2137  // Check that the maps haven't changed.
2138  CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax,
2139  rdi, name, &miss);
2140 
2141  // Patch the receiver on the stack with the global proxy if
2142  // necessary.
2143  if (object->IsGlobalObject()) {
2145  __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2146  }
2147  break;
2148 
2149  case STRING_CHECK:
2150  if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2151  // Check that the object is a two-byte string or a symbol.
2152  __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax);
2153  __ j(above_equal, &miss);
2154  // Check that the maps starting from the prototype haven't changed.
2155  GenerateDirectLoadGlobalFunctionPrototype(
2156  masm(), Context::STRING_FUNCTION_INDEX, rax, &miss);
2157  CheckPrototypes(
2158  Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2159  rax, holder, rbx, rdx, rdi, name, &miss);
2160  } else {
2161  // Calling non-strict non-builtins with a value as the receiver
2162  // requires boxing.
2163  __ jmp(&miss);
2164  }
2165  break;
2166 
2167  case NUMBER_CHECK:
2168  if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2169  Label fast;
2170  // Check that the object is a smi or a heap number.
2171  __ JumpIfSmi(rdx, &fast);
2172  __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rax);
2173  __ j(not_equal, &miss);
2174  __ bind(&fast);
2175  // Check that the maps starting from the prototype haven't changed.
2176  GenerateDirectLoadGlobalFunctionPrototype(
2177  masm(), Context::NUMBER_FUNCTION_INDEX, rax, &miss);
2178  CheckPrototypes(
2179  Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2180  rax, holder, rbx, rdx, rdi, name, &miss);
2181  } else {
2182  // Calling non-strict non-builtins with a value as the receiver
2183  // requires boxing.
2184  __ jmp(&miss);
2185  }
2186  break;
2187 
2188  case BOOLEAN_CHECK:
2189  if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2190  Label fast;
2191  // Check that the object is a boolean.
2192  __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
2193  __ j(equal, &fast);
2194  __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
2195  __ j(not_equal, &miss);
2196  __ bind(&fast);
2197  // Check that the maps starting from the prototype haven't changed.
2198  GenerateDirectLoadGlobalFunctionPrototype(
2199  masm(), Context::BOOLEAN_FUNCTION_INDEX, rax, &miss);
2200  CheckPrototypes(
2201  Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2202  rax, holder, rbx, rdx, rdi, name, &miss);
2203  } else {
2204  // Calling non-strict non-builtins with a value as the receiver
2205  // requires boxing.
2206  __ jmp(&miss);
2207  }
2208  break;
2209  }
2210 
2211  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2213  : CALL_AS_METHOD;
2214  __ InvokeFunction(function, arguments(), JUMP_FUNCTION,
2215  NullCallWrapper(), call_kind);
2216 
2217  // Handle call cache miss.
2218  __ bind(&miss);
2219  GenerateMissBranch();
2220 
2221  // Return the generated code.
2222  return GetCode(function);
2223 }
2224 
2225 
2226 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
2227  Handle<JSObject> holder,
2228  Handle<String> name) {
2229  // ----------- S t a t e -------------
2230  // rcx : function name
2231  // rsp[0] : return address
2232  // rsp[8] : argument argc
2233  // rsp[16] : argument argc - 1
2234  // ...
2235  // rsp[argc * 8] : argument 1
2236  // rsp[(argc + 1) * 8] : argument 0 = receiver
2237  // -----------------------------------
2238  Label miss;
2239  GenerateNameCheck(name, &miss);
2240 
2241  // Get the number of arguments.
2242  const int argc = arguments().immediate();
2243 
2244  LookupResult lookup(isolate());
2245  LookupPostInterceptor(holder, name, &lookup);
2246 
2247  // Get the receiver from the stack.
2248  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2249 
2250  CallInterceptorCompiler compiler(this, arguments(), rcx, extra_state_);
2251  compiler.Compile(masm(), object, holder, name, &lookup, rdx, rbx, rdi, rax,
2252  &miss);
2253 
2254  // Restore receiver.
2255  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2256 
2257  // Check that the function really is a function.
2258  __ JumpIfSmi(rax, &miss);
2259  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2260  __ j(not_equal, &miss);
2261 
2262  // Patch the receiver on the stack with the global proxy if
2263  // necessary.
2264  if (object->IsGlobalObject()) {
2266  __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2267  }
2268 
2269  // Invoke the function.
2270  __ movq(rdi, rax);
2271  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2273  : CALL_AS_METHOD;
2274  __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION,
2275  NullCallWrapper(), call_kind);
2276 
2277  // Handle load cache miss.
2278  __ bind(&miss);
2279  GenerateMissBranch();
2280 
2281  // Return the generated code.
2282  return GetCode(INTERCEPTOR, name);
2283 }
2284 
2285 
2287  Handle<JSObject> object,
2288  Handle<GlobalObject> holder,
2289  Handle<JSGlobalPropertyCell> cell,
2290  Handle<JSFunction> function,
2291  Handle<String> name) {
2292  // ----------- S t a t e -------------
2293  // rcx : function name
2294  // rsp[0] : return address
2295  // rsp[8] : argument argc
2296  // rsp[16] : argument argc - 1
2297  // ...
2298  // rsp[argc * 8] : argument 1
2299  // rsp[(argc + 1) * 8] : argument 0 = receiver
2300  // -----------------------------------
2301 
2302  if (HasCustomCallGenerator(function)) {
2303  Handle<Code> code = CompileCustomCall(object, holder, cell, function, name);
2304  // A null handle means bail out to the regular compiler code below.
2305  if (!code.is_null()) return code;
2306  }
2307 
2308  Label miss;
2309  GenerateNameCheck(name, &miss);
2310 
2311  // Get the number of arguments.
2312  const int argc = arguments().immediate();
2313  GenerateGlobalReceiverCheck(object, holder, name, &miss);
2314  GenerateLoadFunctionFromCell(cell, function, &miss);
2315 
2316  // Patch the receiver on the stack with the global proxy.
2317  if (object->IsGlobalObject()) {
2319  __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2320  }
2321 
2322  // Set up the context (function already in rdi).
2324 
2325  // Jump to the cached code (tail call).
2326  Counters* counters = isolate()->counters();
2327  __ IncrementCounter(counters->call_global_inline(), 1);
2328  ParameterCount expected(function->shared()->formal_parameter_count());
2329  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2331  : CALL_AS_METHOD;
2332  // We call indirectly through the code field in the function to
2333  // allow recompilation to take effect without changing any of the
2334  // call sites.
2335  __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2336  __ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION,
2337  NullCallWrapper(), call_kind);
2338 
2339  // Handle call cache miss.
2340  __ bind(&miss);
2341  __ IncrementCounter(counters->call_global_inline_miss(), 1);
2342  GenerateMissBranch();
2343 
2344  // Return the generated code.
2345  return GetCode(NORMAL, name);
2346 }
2347 
2348 
2349 Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
2350  int index,
2351  Handle<Map> transition,
2352  Handle<String> name) {
2353  // ----------- S t a t e -------------
2354  // -- rax : value
2355  // -- rcx : name
2356  // -- rdx : receiver
2357  // -- rsp[0] : return address
2358  // -----------------------------------
2359  Label miss;
2360 
2361  // Generate store field code. Preserves receiver and name on jump to miss.
2362  GenerateStoreField(masm(),
2363  object,
2364  index,
2365  transition,
2366  name,
2367  rdx, rcx, rbx, rdi,
2368  &miss);
2369 
2370  // Handle store cache miss.
2371  __ bind(&miss);
2372  Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2373  __ Jump(ic, RelocInfo::CODE_TARGET);
2374 
2375  // Return the generated code.
2376  return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
2377 }
2378 
2379 
2381  Handle<JSObject> object,
2382  Handle<AccessorInfo> callback,
2383  Handle<String> name) {
2384  // ----------- S t a t e -------------
2385  // -- rax : value
2386  // -- rcx : name
2387  // -- rdx : receiver
2388  // -- rsp[0] : return address
2389  // -----------------------------------
2390  Label miss;
2391 
2392  // Check that the map of the object hasn't changed.
2393  __ CheckMap(rdx, Handle<Map>(object->map()), &miss,
2395 
2396  // Perform global security token check if needed.
2397  if (object->IsJSGlobalProxy()) {
2398  __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2399  }
2400 
2401  // Stub never generated for non-global objects that require access
2402  // checks.
2403  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2404 
2405  __ pop(rbx); // remove the return address
2406  __ push(rdx); // receiver
2407  __ Push(callback); // callback info
2408  __ push(rcx); // name
2409  __ push(rax); // value
2410  __ push(rbx); // restore return address
2411 
2412  // Do tail-call to the runtime system.
2413  ExternalReference store_callback_property =
2414  ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
2415  __ TailCallExternalReference(store_callback_property, 4, 1);
2416 
2417  // Handle store cache miss.
2418  __ bind(&miss);
2419  Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2420  __ Jump(ic, RelocInfo::CODE_TARGET);
2421 
2422  // Return the generated code.
2423  return GetCode(CALLBACKS, name);
2424 }
2425 
2426 
2428  Handle<JSObject> receiver,
2429  Handle<JSFunction> setter,
2430  Handle<String> name) {
2431  // ----------- S t a t e -------------
2432  // -- rax : value
2433  // -- rcx : name
2434  // -- rdx : receiver
2435  // -- rsp[0] : return address
2436  // -----------------------------------
2437  Label miss;
2438 
2439  // Check that the map of the object hasn't changed.
2440  __ CheckMap(rdx, Handle<Map>(receiver->map()), &miss, DO_SMI_CHECK,
2442 
2443  {
2444  FrameScope scope(masm(), StackFrame::INTERNAL);
2445 
2446  // Save value register, so we can restore it later.
2447  __ push(rax);
2448 
2449  // Call the JavaScript getter with the receiver and the value on the stack.
2450  __ push(rdx);
2451  __ push(rax);
2452  ParameterCount actual(1);
2453  __ InvokeFunction(setter, actual, CALL_FUNCTION, NullCallWrapper(),
2454  CALL_AS_METHOD);
2455 
2456  // We have to return the passed value, not the return value of the setter.
2457  __ pop(rax);
2458 
2459  // Restore context register.
2461  }
2462  __ ret(0);
2463 
2464  __ bind(&miss);
2465  Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2466  __ Jump(ic, RelocInfo::CODE_TARGET);
2467 
2468  // Return the generated code.
2469  return GetCode(CALLBACKS, name);
2470 }
2471 
2472 
2474  Handle<JSObject> receiver,
2475  Handle<String> name) {
2476  // ----------- S t a t e -------------
2477  // -- rax : value
2478  // -- rcx : name
2479  // -- rdx : receiver
2480  // -- rsp[0] : return address
2481  // -----------------------------------
2482  Label miss;
2483 
2484  // Check that the map of the object hasn't changed.
2485  __ CheckMap(rdx, Handle<Map>(receiver->map()), &miss,
2487 
2488  // Perform global security token check if needed.
2489  if (receiver->IsJSGlobalProxy()) {
2490  __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2491  }
2492 
2493  // Stub never generated for non-global objects that require access
2494  // checks.
2495  ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2496 
2497  __ pop(rbx); // remove the return address
2498  __ push(rdx); // receiver
2499  __ push(rcx); // name
2500  __ push(rax); // value
2501  __ Push(Smi::FromInt(strict_mode_));
2502  __ push(rbx); // restore return address
2503 
2504  // Do tail-call to the runtime system.
2505  ExternalReference store_ic_property =
2506  ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
2507  __ TailCallExternalReference(store_ic_property, 4, 1);
2508 
2509  // Handle store cache miss.
2510  __ bind(&miss);
2511  Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2512  __ Jump(ic, RelocInfo::CODE_TARGET);
2513 
2514  // Return the generated code.
2515  return GetCode(INTERCEPTOR, name);
2516 }
2517 
2518 
2520  Handle<GlobalObject> object,
2521  Handle<JSGlobalPropertyCell> cell,
2522  Handle<String> name) {
2523  // ----------- S t a t e -------------
2524  // -- rax : value
2525  // -- rcx : name
2526  // -- rdx : receiver
2527  // -- rsp[0] : return address
2528  // -----------------------------------
2529  Label miss;
2530 
2531  // Check that the map of the global has not changed.
2533  Handle<Map>(object->map()));
2534  __ j(not_equal, &miss);
2535 
2536  // Compute the cell operand to use.
2537  __ Move(rbx, cell);
2538  Operand cell_operand = FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset);
2539 
2540  // Check that the value in the cell is not the hole. If it is, this
2541  // cell could have been deleted and reintroducing the global needs
2542  // to update the property details in the property dictionary of the
2543  // global object. We bail out to the runtime system to do that.
2544  __ CompareRoot(cell_operand, Heap::kTheHoleValueRootIndex);
2545  __ j(equal, &miss);
2546 
2547  // Store the value in the cell.
2548  __ movq(cell_operand, rax);
2549  // Cells are always rescanned, so no write barrier here.
2550 
2551  // Return the value (register rax).
2552  Counters* counters = isolate()->counters();
2553  __ IncrementCounter(counters->named_store_global_inline(), 1);
2554  __ ret(0);
2555 
2556  // Handle store cache miss.
2557  __ bind(&miss);
2558  __ IncrementCounter(counters->named_store_global_inline_miss(), 1);
2559  Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2560  __ Jump(ic, RelocInfo::CODE_TARGET);
2561 
2562  // Return the generated code.
2563  return GetCode(NORMAL, name);
2564 }
2565 
2566 
2567 Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
2568  int index,
2569  Handle<Map> transition,
2570  Handle<String> name) {
2571  // ----------- S t a t e -------------
2572  // -- rax : value
2573  // -- rcx : key
2574  // -- rdx : receiver
2575  // -- rsp[0] : return address
2576  // -----------------------------------
2577  Label miss;
2578 
2579  Counters* counters = isolate()->counters();
2580  __ IncrementCounter(counters->keyed_store_field(), 1);
2581 
2582  // Check that the name has not changed.
2583  __ Cmp(rcx, name);
2584  __ j(not_equal, &miss);
2585 
2586  // Generate store field code. Preserves receiver and name on jump to miss.
2587  GenerateStoreField(masm(),
2588  object,
2589  index,
2590  transition,
2591  name,
2592  rdx, rcx, rbx, rdi,
2593  &miss);
2594 
2595  // Handle store cache miss.
2596  __ bind(&miss);
2597  __ DecrementCounter(counters->keyed_store_field(), 1);
2598  Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2599  __ Jump(ic, RelocInfo::CODE_TARGET);
2600 
2601  // Return the generated code.
2602  return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
2603 }
2604 
2605 
2607  Handle<Map> receiver_map) {
2608  // ----------- S t a t e -------------
2609  // -- rax : value
2610  // -- rcx : key
2611  // -- rdx : receiver
2612  // -- rsp[0] : return address
2613  // -----------------------------------
2614 
2615  ElementsKind elements_kind = receiver_map->elements_kind();
2616  bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
2617  Handle<Code> stub =
2618  KeyedStoreElementStub(is_js_array, elements_kind, grow_mode_).GetCode();
2619 
2620  __ DispatchMap(rdx, receiver_map, stub, DO_SMI_CHECK);
2621 
2622  Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2623  __ jmp(ic, RelocInfo::CODE_TARGET);
2624 
2625  // Return the generated code.
2626  return GetCode(NORMAL, factory()->empty_string());
2627 }
2628 
2629 
2631  MapHandleList* receiver_maps,
2632  CodeHandleList* handler_stubs,
2633  MapHandleList* transitioned_maps) {
2634  // ----------- S t a t e -------------
2635  // -- rax : value
2636  // -- rcx : key
2637  // -- rdx : receiver
2638  // -- rsp[0] : return address
2639  // -----------------------------------
2640  Label miss;
2641  __ JumpIfSmi(rdx, &miss, Label::kNear);
2642 
2643  __ movq(rdi, FieldOperand(rdx, HeapObject::kMapOffset));
2644  int receiver_count = receiver_maps->length();
2645  for (int i = 0; i < receiver_count; ++i) {
2646  // Check map and tail call if there's a match
2647  __ Cmp(rdi, receiver_maps->at(i));
2648  if (transitioned_maps->at(i).is_null()) {
2649  __ j(equal, handler_stubs->at(i), RelocInfo::CODE_TARGET);
2650  } else {
2651  Label next_map;
2652  __ j(not_equal, &next_map, Label::kNear);
2653  __ movq(rbx, transitioned_maps->at(i), RelocInfo::EMBEDDED_OBJECT);
2654  __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
2655  __ bind(&next_map);
2656  }
2657  }
2658 
2659  __ bind(&miss);
2660  Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2661  __ jmp(ic, RelocInfo::CODE_TARGET);
2662 
2663  // Return the generated code.
2664  return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
2665 }
2666 
2667 
2668 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
2669  Handle<JSObject> object,
2670  Handle<JSObject> last) {
2671  // ----------- S t a t e -------------
2672  // -- rax : receiver
2673  // -- rcx : name
2674  // -- rsp[0] : return address
2675  // -----------------------------------
2676  Label miss;
2677 
2678  // Check that receiver is not a smi.
2679  __ JumpIfSmi(rax, &miss);
2680 
2681  // Check the maps of the full prototype chain. Also check that
2682  // global property cells up to (but not including) the last object
2683  // in the prototype chain are empty.
2684  CheckPrototypes(object, rax, last, rbx, rdx, rdi, name, &miss);
2685 
2686  // If the last object in the prototype chain is a global object,
2687  // check that the global property cell is empty.
2688  if (last->IsGlobalObject()) {
2689  GenerateCheckPropertyCell(
2690  masm(), Handle<GlobalObject>::cast(last), name, rdx, &miss);
2691  }
2692 
2693  // Return undefined if maps of the full prototype chain are still the
2694  // same and no global property with this name contains a value.
2695  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2696  __ ret(0);
2697 
2698  __ bind(&miss);
2699  GenerateLoadMiss(masm(), Code::LOAD_IC);
2700 
2701  // Return the generated code.
2702  return GetCode(NONEXISTENT, factory()->empty_string());
2703 }
2704 
2705 
2706 Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
2707  Handle<JSObject> holder,
2708  int index,
2709  Handle<String> name) {
2710  // ----------- S t a t e -------------
2711  // -- rax : receiver
2712  // -- rcx : name
2713  // -- rsp[0] : return address
2714  // -----------------------------------
2715  Label miss;
2716 
2717  GenerateLoadField(object, holder, rax, rbx, rdx, rdi, index, name, &miss);
2718  __ bind(&miss);
2719  GenerateLoadMiss(masm(), Code::LOAD_IC);
2720 
2721  // Return the generated code.
2722  return GetCode(FIELD, name);
2723 }
2724 
2725 
2727  Handle<String> name,
2728  Handle<JSObject> object,
2729  Handle<JSObject> holder,
2730  Handle<AccessorInfo> callback) {
2731  // ----------- S t a t e -------------
2732  // -- rax : receiver
2733  // -- rcx : name
2734  // -- rsp[0] : return address
2735  // -----------------------------------
2736  Label miss;
2737  GenerateLoadCallback(object, holder, rax, rcx, rdx, rbx, rdi, callback,
2738  name, &miss);
2739  __ bind(&miss);
2740  GenerateLoadMiss(masm(), Code::LOAD_IC);
2741 
2742  // Return the generated code.
2743  return GetCode(CALLBACKS, name);
2744 }
2745 
2746 
2748  Handle<String> name,
2749  Handle<JSObject> receiver,
2750  Handle<JSObject> holder,
2751  Handle<JSFunction> getter) {
2752  // ----------- S t a t e -------------
2753  // -- rax : receiver
2754  // -- rcx : name
2755  // -- rsp[0] : return address
2756  // -----------------------------------
2757  Label miss;
2758 
2759  // Check that the maps haven't changed.
2760  __ JumpIfSmi(rax, &miss);
2761  CheckPrototypes(receiver, rax, holder, rbx, rdx, rdi, name, &miss);
2762 
2763  {
2764  FrameScope scope(masm(), StackFrame::INTERNAL);
2765 
2766  // Call the JavaScript getter with the receiver on the stack.
2767  __ push(rax);
2768  ParameterCount actual(0);
2769  __ InvokeFunction(getter, actual, CALL_FUNCTION, NullCallWrapper(),
2770  CALL_AS_METHOD);
2771 
2772  // Restore context register.
2774  }
2775  __ ret(0);
2776 
2777  __ bind(&miss);
2778  GenerateLoadMiss(masm(), Code::LOAD_IC);
2779 
2780  // Return the generated code.
2781  return GetCode(CALLBACKS, name);
2782 }
2783 
2784 
2785 Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
2786  Handle<JSObject> holder,
2787  Handle<JSFunction> value,
2788  Handle<String> name) {
2789  // ----------- S t a t e -------------
2790  // -- rax : receiver
2791  // -- rcx : name
2792  // -- rsp[0] : return address
2793  // -----------------------------------
2794  Label miss;
2795 
2796  GenerateLoadConstant(object, holder, rax, rbx, rdx, rdi, value, name, &miss);
2797  __ bind(&miss);
2798  GenerateLoadMiss(masm(), Code::LOAD_IC);
2799 
2800  // Return the generated code.
2801  return GetCode(CONSTANT_FUNCTION, name);
2802 }
2803 
2804 
2805 Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> receiver,
2806  Handle<JSObject> holder,
2807  Handle<String> name) {
2808  // ----------- S t a t e -------------
2809  // -- rax : receiver
2810  // -- rcx : name
2811  // -- rsp[0] : return address
2812  // -----------------------------------
2813  Label miss;
2814  LookupResult lookup(isolate());
2815  LookupPostInterceptor(holder, name, &lookup);
2816 
2817  // TODO(368): Compile in the whole chain: all the interceptors in
2818  // prototypes and ultimate answer.
2819  GenerateLoadInterceptor(receiver, holder, &lookup, rax, rcx, rdx, rbx, rdi,
2820  name, &miss);
2821  __ bind(&miss);
2822  GenerateLoadMiss(masm(), Code::LOAD_IC);
2823 
2824  // Return the generated code.
2825  return GetCode(INTERCEPTOR, name);
2826 }
2827 
2828 
2830  Handle<JSObject> object,
2831  Handle<GlobalObject> holder,
2832  Handle<JSGlobalPropertyCell> cell,
2833  Handle<String> name,
2834  bool is_dont_delete) {
2835  // ----------- S t a t e -------------
2836  // -- rax : receiver
2837  // -- rcx : name
2838  // -- rsp[0] : return address
2839  // -----------------------------------
2840  Label miss;
2841 
2842  // Check that the maps haven't changed.
2843  __ JumpIfSmi(rax, &miss);
2844  CheckPrototypes(object, rax, holder, rbx, rdx, rdi, name, &miss);
2845 
2846  // Get the value from the cell.
2847  __ Move(rbx, cell);
2849 
2850  // Check for deleted property if property can actually be deleted.
2851  if (!is_dont_delete) {
2852  __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2853  __ j(equal, &miss);
2854  } else if (FLAG_debug_code) {
2855  __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2856  __ Check(not_equal, "DontDelete cells can't contain the hole");
2857  }
2858 
2859  Counters* counters = isolate()->counters();
2860  __ IncrementCounter(counters->named_load_global_stub(), 1);
2861  __ movq(rax, rbx);
2862  __ ret(0);
2863 
2864  __ bind(&miss);
2865  __ IncrementCounter(counters->named_load_global_stub_miss(), 1);
2866  GenerateLoadMiss(masm(), Code::LOAD_IC);
2867 
2868  // Return the generated code.
2869  return GetCode(NORMAL, name);
2870 }
2871 
2872 
2873 Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
2874  Handle<JSObject> receiver,
2875  Handle<JSObject> holder,
2876  int index) {
2877  // ----------- S t a t e -------------
2878  // -- rax : key
2879  // -- rdx : receiver
2880  // -- rsp[0] : return address
2881  // -----------------------------------
2882  Label miss;
2883 
2884  Counters* counters = isolate()->counters();
2885  __ IncrementCounter(counters->keyed_load_field(), 1);
2886 
2887  // Check that the name has not changed.
2888  __ Cmp(rax, name);
2889  __ j(not_equal, &miss);
2890 
2891  GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss);
2892 
2893  __ bind(&miss);
2894  __ DecrementCounter(counters->keyed_load_field(), 1);
2895  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2896 
2897  // Return the generated code.
2898  return GetCode(FIELD, name);
2899 }
2900 
2901 
2903  Handle<String> name,
2904  Handle<JSObject> receiver,
2905  Handle<JSObject> holder,
2906  Handle<AccessorInfo> callback) {
2907  // ----------- S t a t e -------------
2908  // -- rax : key
2909  // -- rdx : receiver
2910  // -- rsp[0] : return address
2911  // -----------------------------------
2912  Label miss;
2913  Counters* counters = isolate()->counters();
2914  __ IncrementCounter(counters->keyed_load_callback(), 1);
2915 
2916  // Check that the name has not changed.
2917  __ Cmp(rax, name);
2918  __ j(not_equal, &miss);
2919 
2920  GenerateLoadCallback(receiver, holder, rdx, rax, rbx, rcx, rdi, callback,
2921  name, &miss);
2922  __ bind(&miss);
2923  __ DecrementCounter(counters->keyed_load_callback(), 1);
2924  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2925 
2926  // Return the generated code.
2927  return GetCode(CALLBACKS, name);
2928 }
2929 
2930 
2932  Handle<String> name,
2933  Handle<JSObject> receiver,
2934  Handle<JSObject> holder,
2935  Handle<JSFunction> value) {
2936  // ----------- S t a t e -------------
2937  // -- rax : key
2938  // -- rdx : receiver
2939  // -- rsp[0] : return address
2940  // -----------------------------------
2941  Label miss;
2942 
2943  Counters* counters = isolate()->counters();
2944  __ IncrementCounter(counters->keyed_load_constant_function(), 1);
2945 
2946  // Check that the name has not changed.
2947  __ Cmp(rax, name);
2948  __ j(not_equal, &miss);
2949 
2950  GenerateLoadConstant(receiver, holder, rdx, rbx, rcx, rdi,
2951  value, name, &miss);
2952  __ bind(&miss);
2953  __ DecrementCounter(counters->keyed_load_constant_function(), 1);
2954  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2955 
2956  // Return the generated code.
2957  return GetCode(CONSTANT_FUNCTION, name);
2958 }
2959 
2960 
2962  Handle<JSObject> receiver,
2963  Handle<JSObject> holder,
2964  Handle<String> name) {
2965  // ----------- S t a t e -------------
2966  // -- rax : key
2967  // -- rdx : receiver
2968  // -- rsp[0] : return address
2969  // -----------------------------------
2970  Label miss;
2971  Counters* counters = isolate()->counters();
2972  __ IncrementCounter(counters->keyed_load_interceptor(), 1);
2973 
2974  // Check that the name has not changed.
2975  __ Cmp(rax, name);
2976  __ j(not_equal, &miss);
2977 
2978  LookupResult lookup(isolate());
2979  LookupPostInterceptor(holder, name, &lookup);
2980  GenerateLoadInterceptor(receiver, holder, &lookup, rdx, rax, rcx, rbx, rdi,
2981  name, &miss);
2982  __ bind(&miss);
2983  __ DecrementCounter(counters->keyed_load_interceptor(), 1);
2984  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2985 
2986  // Return the generated code.
2987  return GetCode(INTERCEPTOR, name);
2988 }
2989 
2990 
2992  Handle<String> name) {
2993  // ----------- S t a t e -------------
2994  // -- rax : key
2995  // -- rdx : receiver
2996  // -- rsp[0] : return address
2997  // -----------------------------------
2998  Label miss;
2999 
3000  Counters* counters = isolate()->counters();
3001  __ IncrementCounter(counters->keyed_load_array_length(), 1);
3002 
3003  // Check that the name has not changed.
3004  __ Cmp(rax, name);
3005  __ j(not_equal, &miss);
3006 
3007  GenerateLoadArrayLength(masm(), rdx, rcx, &miss);
3008  __ bind(&miss);
3009  __ DecrementCounter(counters->keyed_load_array_length(), 1);
3010  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3011 
3012  // Return the generated code.
3013  return GetCode(CALLBACKS, name);
3014 }
3015 
3016 
3018  Handle<String> name) {
3019  // ----------- S t a t e -------------
3020  // -- rax : key
3021  // -- rdx : receiver
3022  // -- rsp[0] : return address
3023  // -----------------------------------
3024  Label miss;
3025 
3026  Counters* counters = isolate()->counters();
3027  __ IncrementCounter(counters->keyed_load_string_length(), 1);
3028 
3029  // Check that the name has not changed.
3030  __ Cmp(rax, name);
3031  __ j(not_equal, &miss);
3032 
3033  GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss, true);
3034  __ bind(&miss);
3035  __ DecrementCounter(counters->keyed_load_string_length(), 1);
3036  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3037 
3038  // Return the generated code.
3039  return GetCode(CALLBACKS, name);
3040 }
3041 
3042 
3044  Handle<String> name) {
3045  // ----------- S t a t e -------------
3046  // -- rax : key
3047  // -- rdx : receiver
3048  // -- rsp[0] : return address
3049  // -----------------------------------
3050  Label miss;
3051 
3052  Counters* counters = isolate()->counters();
3053  __ IncrementCounter(counters->keyed_load_function_prototype(), 1);
3054 
3055  // Check that the name has not changed.
3056  __ Cmp(rax, name);
3057  __ j(not_equal, &miss);
3058 
3059  GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss);
3060  __ bind(&miss);
3061  __ DecrementCounter(counters->keyed_load_function_prototype(), 1);
3062  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3063 
3064  // Return the generated code.
3065  return GetCode(CALLBACKS, name);
3066 }
3067 
3068 
3070  Handle<Map> receiver_map) {
3071  // ----------- S t a t e -------------
3072  // -- rax : key
3073  // -- rdx : receiver
3074  // -- rsp[0] : return address
3075  // -----------------------------------
3076  ElementsKind elements_kind = receiver_map->elements_kind();
3077  Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode();
3078 
3079  __ DispatchMap(rdx, receiver_map, stub, DO_SMI_CHECK);
3080 
3081  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3082  __ jmp(ic, RelocInfo::CODE_TARGET);
3083 
3084  // Return the generated code.
3085  return GetCode(NORMAL, factory()->empty_string());
3086 }
3087 
3088 
3090  MapHandleList* receiver_maps,
3091  CodeHandleList* handler_ics) {
3092  // ----------- S t a t e -------------
3093  // -- rax : key
3094  // -- rdx : receiver
3095  // -- rsp[0] : return address
3096  // -----------------------------------
3097  Label miss;
3098  __ JumpIfSmi(rdx, &miss);
3099 
3100  Register map_reg = rbx;
3101  __ movq(map_reg, FieldOperand(rdx, HeapObject::kMapOffset));
3102  int receiver_count = receiver_maps->length();
3103  for (int current = 0; current < receiver_count; ++current) {
3104  // Check map and tail call if there's a match
3105  __ Cmp(map_reg, receiver_maps->at(current));
3106  __ j(equal, handler_ics->at(current), RelocInfo::CODE_TARGET);
3107  }
3108 
3109  __ bind(&miss);
3110  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3111 
3112  // Return the generated code.
3113  return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
3114 }
3115 
3116 
3117 // Specialized stub for constructing objects from functions which only have only
3118 // simple assignments of the form this.x = ...; in their body.
3120  Handle<JSFunction> function) {
3121  // ----------- S t a t e -------------
3122  // -- rax : argc
3123  // -- rdi : constructor
3124  // -- rsp[0] : return address
3125  // -- rsp[4] : last argument
3126  // -----------------------------------
3127  Label generic_stub_call;
3128 
3129  // Use r8 for holding undefined which is used in several places below.
3130  __ Move(r8, factory()->undefined_value());
3131 
3132 #ifdef ENABLE_DEBUGGER_SUPPORT
3133  // Check to see whether there are any break points in the function code. If
3134  // there are jump to the generic constructor stub which calls the actual
3135  // code for the function thereby hitting the break points.
3138  __ cmpq(rbx, r8);
3139  __ j(not_equal, &generic_stub_call);
3140 #endif
3141 
3142  // Load the initial map and verify that it is in fact a map.
3144  // Will both indicate a NULL and a Smi.
3145  STATIC_ASSERT(kSmiTag == 0);
3146  __ JumpIfSmi(rbx, &generic_stub_call);
3147  __ CmpObjectType(rbx, MAP_TYPE, rcx);
3148  __ j(not_equal, &generic_stub_call);
3149 
3150 #ifdef DEBUG
3151  // Cannot construct functions this way.
3152  // rdi: constructor
3153  // rbx: initial map
3154  __ CmpInstanceType(rbx, JS_FUNCTION_TYPE);
3155  __ Assert(not_equal, "Function constructed by construct stub.");
3156 #endif
3157 
3158  // Now allocate the JSObject in new space.
3159  // rdi: constructor
3160  // rbx: initial map
3161  __ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
3162  __ shl(rcx, Immediate(kPointerSizeLog2));
3163  __ AllocateInNewSpace(rcx, rdx, rcx, no_reg,
3164  &generic_stub_call, NO_ALLOCATION_FLAGS);
3165 
3166  // Allocated the JSObject, now initialize the fields and add the heap tag.
3167  // rbx: initial map
3168  // rdx: JSObject (untagged)
3169  __ movq(Operand(rdx, JSObject::kMapOffset), rbx);
3170  __ Move(rbx, factory()->empty_fixed_array());
3171  __ movq(Operand(rdx, JSObject::kPropertiesOffset), rbx);
3172  __ movq(Operand(rdx, JSObject::kElementsOffset), rbx);
3173 
3174  // rax: argc
3175  // rdx: JSObject (untagged)
3176  // Load the address of the first in-object property into r9.
3177  __ lea(r9, Operand(rdx, JSObject::kHeaderSize));
3178  // Calculate the location of the first argument. The stack contains only the
3179  // return address on top of the argc arguments.
3180  __ lea(rcx, Operand(rsp, rax, times_pointer_size, 0));
3181 
3182  // rax: argc
3183  // rcx: first argument
3184  // rdx: JSObject (untagged)
3185  // r8: undefined
3186  // r9: first in-object property of the JSObject
3187  // Fill the initialized properties with a constant value or a passed argument
3188  // depending on the this.x = ...; assignment in the function.
3189  Handle<SharedFunctionInfo> shared(function->shared());
3190  for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3191  if (shared->IsThisPropertyAssignmentArgument(i)) {
3192  // Check if the argument assigned to the property is actually passed.
3193  // If argument is not passed the property is set to undefined,
3194  // otherwise find it on the stack.
3195  int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3196  __ movq(rbx, r8);
3197  __ cmpq(rax, Immediate(arg_number));
3198  __ cmovq(above, rbx, Operand(rcx, arg_number * -kPointerSize));
3199  // Store value in the property.
3200  __ movq(Operand(r9, i * kPointerSize), rbx);
3201  } else {
3202  // Set the property to the constant value.
3203  Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3204  __ Move(Operand(r9, i * kPointerSize), constant);
3205  }
3206  }
3207 
3208  // Fill the unused in-object property fields with undefined.
3209  ASSERT(function->has_initial_map());
3210  for (int i = shared->this_property_assignments_count();
3211  i < function->initial_map()->inobject_properties();
3212  i++) {
3213  __ movq(Operand(r9, i * kPointerSize), r8);
3214  }
3215 
3216  // rax: argc
3217  // rdx: JSObject (untagged)
3218  // Move argc to rbx and the JSObject to return to rax and tag it.
3219  __ movq(rbx, rax);
3220  __ movq(rax, rdx);
3221  __ or_(rax, Immediate(kHeapObjectTag));
3222 
3223  // rax: JSObject
3224  // rbx: argc
3225  // Remove caller arguments and receiver from the stack and return.
3226  __ pop(rcx);
3227  __ lea(rsp, Operand(rsp, rbx, times_pointer_size, 1 * kPointerSize));
3228  __ push(rcx);
3229  Counters* counters = isolate()->counters();
3230  __ IncrementCounter(counters->constructed_objects(), 1);
3231  __ IncrementCounter(counters->constructed_objects_stub(), 1);
3232  __ ret(0);
3233 
3234  // Jump to the generic stub in case the specialized code cannot handle the
3235  // construction.
3236  __ bind(&generic_stub_call);
3237  Handle<Code> code = isolate()->builtins()->JSConstructStubGeneric();
3238  __ Jump(code, RelocInfo::CODE_TARGET);
3239 
3240  // Return the generated code.
3241  return GetCode();
3242 }
3243 
3244 
3245 #undef __
3246 #define __ ACCESS_MASM(masm)
3247 
3248 
3250  MacroAssembler* masm) {
3251  // ----------- S t a t e -------------
3252  // -- rax : key
3253  // -- rdx : receiver
3254  // -- rsp[0] : return address
3255  // -----------------------------------
3256  Label slow, miss_force_generic;
3257 
3258  // This stub is meant to be tail-jumped to, the receiver must already
3259  // have been verified by the caller to not be a smi.
3260 
3261  __ JumpIfNotSmi(rax, &miss_force_generic);
3262  __ SmiToInteger32(rbx, rax);
3264 
3265  // Check whether the elements is a number dictionary.
3266  // rdx: receiver
3267  // rax: key
3268  // rbx: key as untagged int32
3269  // rcx: elements
3270  __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax);
3271  __ ret(0);
3272 
3273  __ bind(&slow);
3274  // ----------- S t a t e -------------
3275  // -- rax : key
3276  // -- rdx : receiver
3277  // -- rsp[0] : return address
3278  // -----------------------------------
3279  Handle<Code> slow_ic =
3280  masm->isolate()->builtins()->KeyedLoadIC_Slow();
3281  __ jmp(slow_ic, RelocInfo::CODE_TARGET);
3282 
3283  __ bind(&miss_force_generic);
3284  // ----------- S t a t e -------------
3285  // -- rax : key
3286  // -- rdx : receiver
3287  // -- rsp[0] : return address
3288  // -----------------------------------
3289  Handle<Code> miss_ic =
3290  masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3291  __ jmp(miss_ic, RelocInfo::CODE_TARGET);
3292 }
3293 
3294 
3295 static void GenerateSmiKeyCheck(MacroAssembler* masm,
3296  Register key,
3297  Register scratch,
3298  XMMRegister xmm_scratch0,
3299  XMMRegister xmm_scratch1,
3300  Label* fail) {
3301  // Check that key is a smi or a heap number containing a smi and branch
3302  // if the check fails.
3303  Label key_ok;
3304  __ JumpIfSmi(key, &key_ok);
3305  __ CheckMap(key,
3306  masm->isolate()->factory()->heap_number_map(),
3307  fail,
3309  __ movsd(xmm_scratch0, FieldOperand(key, HeapNumber::kValueOffset));
3310  __ cvttsd2si(scratch, xmm_scratch0);
3311  __ cvtlsi2sd(xmm_scratch1, scratch);
3312  __ ucomisd(xmm_scratch1, xmm_scratch0);
3313  __ j(not_equal, fail);
3314  __ j(parity_even, fail); // NaN.
3315  __ Integer32ToSmi(key, scratch);
3316  __ bind(&key_ok);
3317 }
3318 
3319 
3321  MacroAssembler* masm,
3322  ElementsKind elements_kind) {
3323  // ----------- S t a t e -------------
3324  // -- rax : key
3325  // -- rdx : receiver
3326  // -- rsp[0] : return address
3327  // -----------------------------------
3328  Label slow, miss_force_generic;
3329 
3330  // This stub is meant to be tail-jumped to, the receiver must already
3331  // have been verified by the caller to not be a smi.
3332 
3333  // Check that the key is a smi or a heap number convertible to a smi.
3334  GenerateSmiKeyCheck(masm, rax, rcx, xmm0, xmm1, &miss_force_generic);
3335 
3336  // Check that the index is in range.
3337  __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
3338  __ SmiToInteger32(rcx, rax);
3340  // Unsigned comparison catches both negative and too-large values.
3341  __ j(above_equal, &miss_force_generic);
3342 
3343  // rax: index (as a smi)
3344  // rdx: receiver (JSObject)
3345  // rcx: untagged index
3346  // rbx: elements array
3348  // rbx: base pointer of external storage
3349  switch (elements_kind) {
3351  __ movsxbq(rcx, Operand(rbx, rcx, times_1, 0));
3352  break;
3355  __ movzxbq(rcx, Operand(rbx, rcx, times_1, 0));
3356  break;
3358  __ movsxwq(rcx, Operand(rbx, rcx, times_2, 0));
3359  break;
3361  __ movzxwq(rcx, Operand(rbx, rcx, times_2, 0));
3362  break;
3363  case EXTERNAL_INT_ELEMENTS:
3364  __ movsxlq(rcx, Operand(rbx, rcx, times_4, 0));
3365  break;
3367  __ movl(rcx, Operand(rbx, rcx, times_4, 0));
3368  break;
3370  __ cvtss2sd(xmm0, Operand(rbx, rcx, times_4, 0));
3371  break;
3373  __ movsd(xmm0, Operand(rbx, rcx, times_8, 0));
3374  break;
3375  default:
3376  UNREACHABLE();
3377  break;
3378  }
3379 
3380  // rax: index
3381  // rdx: receiver
3382  // For integer array types:
3383  // rcx: value
3384  // For floating-point array type:
3385  // xmm0: value as double.
3386 
3387  ASSERT(kSmiValueSize == 32);
3388  if (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) {
3389  // For the UnsignedInt array type, we need to see whether
3390  // the value can be represented in a Smi. If not, we need to convert
3391  // it to a HeapNumber.
3392  Label box_int;
3393 
3394  __ JumpIfUIntNotValidSmiValue(rcx, &box_int, Label::kNear);
3395 
3396  __ Integer32ToSmi(rax, rcx);
3397  __ ret(0);
3398 
3399  __ bind(&box_int);
3400 
3401  // Allocate a HeapNumber for the int and perform int-to-double
3402  // conversion.
3403  // The value is zero-extended since we loaded the value from memory
3404  // with movl.
3405  __ cvtqsi2sd(xmm0, rcx);
3406 
3407  __ AllocateHeapNumber(rcx, rbx, &slow);
3408  // Set the value.
3410  __ movq(rax, rcx);
3411  __ ret(0);
3412  } else if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
3413  elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3414  // For the floating-point array type, we need to always allocate a
3415  // HeapNumber.
3416  __ AllocateHeapNumber(rcx, rbx, &slow);
3417  // Set the value.
3419  __ movq(rax, rcx);
3420  __ ret(0);
3421  } else {
3422  __ Integer32ToSmi(rax, rcx);
3423  __ ret(0);
3424  }
3425 
3426  // Slow case: Jump to runtime.
3427  __ bind(&slow);
3428  Counters* counters = masm->isolate()->counters();
3429  __ IncrementCounter(counters->keyed_load_external_array_slow(), 1);
3430 
3431  // ----------- S t a t e -------------
3432  // -- rax : key
3433  // -- rdx : receiver
3434  // -- rsp[0] : return address
3435  // -----------------------------------
3436 
3437  Handle<Code> ic = masm->isolate()->builtins()->KeyedLoadIC_Slow();
3438  __ jmp(ic, RelocInfo::CODE_TARGET);
3439 
3440  // Miss case: Jump to runtime.
3441  __ bind(&miss_force_generic);
3442 
3443  // ----------- S t a t e -------------
3444  // -- rax : key
3445  // -- rdx : receiver
3446  // -- rsp[0] : return address
3447  // -----------------------------------
3448  Handle<Code> miss_ic =
3449  masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3450  __ jmp(miss_ic, RelocInfo::CODE_TARGET);
3451 }
3452 
3453 
3455  MacroAssembler* masm,
3456  ElementsKind elements_kind) {
3457  // ----------- S t a t e -------------
3458  // -- rax : value
3459  // -- rcx : key
3460  // -- rdx : receiver
3461  // -- rsp[0] : return address
3462  // -----------------------------------
3463  Label slow, miss_force_generic;
3464 
3465  // This stub is meant to be tail-jumped to, the receiver must already
3466  // have been verified by the caller to not be a smi.
3467 
3468  // Check that the key is a smi or a heap number convertible to a smi.
3469  GenerateSmiKeyCheck(masm, rcx, rbx, xmm0, xmm1, &miss_force_generic);
3470 
3471  // Check that the index is in range.
3472  __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
3473  __ SmiToInteger32(rdi, rcx); // Untag the index.
3475  // Unsigned comparison catches both negative and too-large values.
3476  __ j(above_equal, &miss_force_generic);
3477 
3478  // Handle both smis and HeapNumbers in the fast path. Go to the
3479  // runtime for all other kinds of values.
3480  // rax: value
3481  // rcx: key (a smi)
3482  // rdx: receiver (a JSObject)
3483  // rbx: elements array
3484  // rdi: untagged key
3485  Label check_heap_number;
3486  if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) {
3487  // Float to pixel conversion is only implemented in the runtime for now.
3488  __ JumpIfNotSmi(rax, &slow);
3489  } else {
3490  __ JumpIfNotSmi(rax, &check_heap_number, Label::kNear);
3491  }
3492  // No more branches to slow case on this path. Key and receiver not needed.
3493  __ SmiToInteger32(rdx, rax);
3495  // rbx: base pointer of external storage
3496  switch (elements_kind) {
3498  { // Clamp the value to [0..255].
3499  Label done;
3500  __ testl(rdx, Immediate(0xFFFFFF00));
3501  __ j(zero, &done, Label::kNear);
3502  __ setcc(negative, rdx); // 1 if negative, 0 if positive.
3503  __ decb(rdx); // 0 if negative, 255 if positive.
3504  __ bind(&done);
3505  }
3506  __ movb(Operand(rbx, rdi, times_1, 0), rdx);
3507  break;
3510  __ movb(Operand(rbx, rdi, times_1, 0), rdx);
3511  break;
3514  __ movw(Operand(rbx, rdi, times_2, 0), rdx);
3515  break;
3516  case EXTERNAL_INT_ELEMENTS:
3518  __ movl(Operand(rbx, rdi, times_4, 0), rdx);
3519  break;
3521  // Need to perform int-to-float conversion.
3522  __ cvtlsi2ss(xmm0, rdx);
3523  __ movss(Operand(rbx, rdi, times_4, 0), xmm0);
3524  break;
3526  // Need to perform int-to-float conversion.
3527  __ cvtlsi2sd(xmm0, rdx);
3528  __ movsd(Operand(rbx, rdi, times_8, 0), xmm0);
3529  break;
3530  case FAST_ELEMENTS:
3531  case FAST_SMI_ELEMENTS:
3532  case FAST_DOUBLE_ELEMENTS:
3533  case FAST_HOLEY_ELEMENTS:
3536  case DICTIONARY_ELEMENTS:
3538  UNREACHABLE();
3539  break;
3540  }
3541  __ ret(0);
3542 
3543  // TODO(danno): handle heap number -> pixel array conversion
3544  if (elements_kind != EXTERNAL_PIXEL_ELEMENTS) {
3545  __ bind(&check_heap_number);
3546  // rax: value
3547  // rcx: key (a smi)
3548  // rdx: receiver (a JSObject)
3549  // rbx: elements array
3550  // rdi: untagged key
3551  __ CmpObjectType(rax, HEAP_NUMBER_TYPE, kScratchRegister);
3552  __ j(not_equal, &slow);
3553  // No more branches to slow case on this path.
3554 
3555  // The WebGL specification leaves the behavior of storing NaN and
3556  // +/-Infinity into integer arrays basically undefined. For more
3557  // reproducible behavior, convert these to zero.
3560  // rdi: untagged index
3561  // rbx: base pointer of external storage
3562  // top of FPU stack: value
3563  if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3564  __ cvtsd2ss(xmm0, xmm0);
3565  __ movss(Operand(rbx, rdi, times_4, 0), xmm0);
3566  __ ret(0);
3567  } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3568  __ movsd(Operand(rbx, rdi, times_8, 0), xmm0);
3569  __ ret(0);
3570  } else {
3571  // Perform float-to-int conversion with truncation (round-to-zero)
3572  // behavior.
3573  // Fast path: use machine instruction to convert to int64. If that
3574  // fails (out-of-range), go into the runtime.
3575  __ cvttsd2siq(r8, xmm0);
3576  __ Set(kScratchRegister, V8_UINT64_C(0x8000000000000000));
3577  __ cmpq(r8, kScratchRegister);
3578  __ j(equal, &slow);
3579 
3580  // rdx: value (converted to an untagged integer)
3581  // rdi: untagged index
3582  // rbx: base pointer of external storage
3583  switch (elements_kind) {
3586  __ movb(Operand(rbx, rdi, times_1, 0), r8);
3587  break;
3590  __ movw(Operand(rbx, rdi, times_2, 0), r8);
3591  break;
3592  case EXTERNAL_INT_ELEMENTS:
3594  __ movl(Operand(rbx, rdi, times_4, 0), r8);
3595  break;
3599  case FAST_ELEMENTS:
3600  case FAST_SMI_ELEMENTS:
3601  case FAST_DOUBLE_ELEMENTS:
3602  case FAST_HOLEY_ELEMENTS:
3605  case DICTIONARY_ELEMENTS:
3607  UNREACHABLE();
3608  break;
3609  }
3610  __ ret(0);
3611  }
3612  }
3613 
3614  // Slow case: call runtime.
3615  __ bind(&slow);
3616 
3617  // ----------- S t a t e -------------
3618  // -- rax : value
3619  // -- rcx : key
3620  // -- rdx : receiver
3621  // -- rsp[0] : return address
3622  // -----------------------------------
3623 
3624  Handle<Code> ic = masm->isolate()->builtins()->KeyedStoreIC_Slow();
3625  __ jmp(ic, RelocInfo::CODE_TARGET);
3626 
3627  // Miss case: call runtime.
3628  __ bind(&miss_force_generic);
3629 
3630  // ----------- S t a t e -------------
3631  // -- rax : value
3632  // -- rcx : key
3633  // -- rdx : receiver
3634  // -- rsp[0] : return address
3635  // -----------------------------------
3636 
3637  Handle<Code> miss_ic =
3638  masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
3639  __ jmp(miss_ic, RelocInfo::CODE_TARGET);
3640 }
3641 
3642 
3643 void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
3644  // ----------- S t a t e -------------
3645  // -- rax : key
3646  // -- rdx : receiver
3647  // -- rsp[0] : return address
3648  // -----------------------------------
3649  Label miss_force_generic;
3650 
3651  // This stub is meant to be tail-jumped to, the receiver must already
3652  // have been verified by the caller to not be a smi.
3653 
3654  // Check that the key is a smi or a heap number convertible to a smi.
3655  GenerateSmiKeyCheck(masm, rax, rcx, xmm0, xmm1, &miss_force_generic);
3656 
3657  // Get the elements array.
3659  __ AssertFastElements(rcx);
3660 
3661  // Check that the key is within bounds.
3663  __ j(above_equal, &miss_force_generic);
3664 
3665  // Load the result and make sure it's not the hole.
3666  SmiIndex index = masm->SmiToIndex(rbx, rax, kPointerSizeLog2);
3667  __ movq(rbx, FieldOperand(rcx,
3668  index.reg,
3669  index.scale,
3670  FixedArray::kHeaderSize));
3671  __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
3672  __ j(equal, &miss_force_generic);
3673  __ movq(rax, rbx);
3674  __ ret(0);
3675 
3676  __ bind(&miss_force_generic);
3677  Code* code = masm->isolate()->builtins()->builtin(
3678  Builtins::kKeyedLoadIC_MissForceGeneric);
3679  Handle<Code> ic(code);
3680  __ jmp(ic, RelocInfo::CODE_TARGET);
3681 }
3682 
3683 
3685  MacroAssembler* masm) {
3686  // ----------- S t a t e -------------
3687  // -- rax : key
3688  // -- rdx : receiver
3689  // -- rsp[0] : return address
3690  // -----------------------------------
3691  Label miss_force_generic, slow_allocate_heapnumber;
3692 
3693  // This stub is meant to be tail-jumped to, the receiver must already
3694  // have been verified by the caller to not be a smi.
3695 
3696  // Check that the key is a smi or a heap number convertible to a smi.
3697  GenerateSmiKeyCheck(masm, rax, rcx, xmm0, xmm1, &miss_force_generic);
3698 
3699  // Get the elements array.
3701  __ AssertFastElements(rcx);
3702 
3703  // Check that the key is within bounds.
3705  __ j(above_equal, &miss_force_generic);
3706 
3707  // Check for the hole
3708  __ SmiToInteger32(kScratchRegister, rax);
3709  uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
3710  __ cmpl(FieldOperand(rcx, kScratchRegister, times_8, offset),
3711  Immediate(kHoleNanUpper32));
3712  __ j(equal, &miss_force_generic);
3713 
3714  // Always allocate a heap number for the result.
3717  __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber);
3718  // Set the value.
3719  __ movq(rax, rcx);
3721  __ ret(0);
3722 
3723  __ bind(&slow_allocate_heapnumber);
3724  Handle<Code> slow_ic =
3725  masm->isolate()->builtins()->KeyedLoadIC_Slow();
3726  __ jmp(slow_ic, RelocInfo::CODE_TARGET);
3727 
3728  __ bind(&miss_force_generic);
3729  Handle<Code> miss_ic =
3730  masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3731  __ jmp(miss_ic, RelocInfo::CODE_TARGET);
3732 }
3733 
3734 
3736  MacroAssembler* masm,
3737  bool is_js_array,
3738  ElementsKind elements_kind,
3739  KeyedAccessGrowMode grow_mode) {
3740  // ----------- S t a t e -------------
3741  // -- rax : value
3742  // -- rcx : key
3743  // -- rdx : receiver
3744  // -- rsp[0] : return address
3745  // -----------------------------------
3746  Label miss_force_generic, transition_elements_kind, finish_store, grow;
3747  Label check_capacity, slow;
3748 
3749  // This stub is meant to be tail-jumped to, the receiver must already
3750  // have been verified by the caller to not be a smi.
3751 
3752  // Check that the key is a smi or a heap number convertible to a smi.
3753  GenerateSmiKeyCheck(masm, rcx, rbx, xmm0, xmm1, &miss_force_generic);
3754 
3755  if (IsFastSmiElementsKind(elements_kind)) {
3756  __ JumpIfNotSmi(rax, &transition_elements_kind);
3757  }
3758 
3759  // Get the elements array and make sure it is a fast element array, not 'cow'.
3760  __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
3761  // Check that the key is within bounds.
3762  if (is_js_array) {
3763  __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
3764  if (grow_mode == ALLOW_JSARRAY_GROWTH) {
3765  __ j(above_equal, &grow);
3766  } else {
3767  __ j(above_equal, &miss_force_generic);
3768  }
3769  } else {
3770  __ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
3771  __ j(above_equal, &miss_force_generic);
3772  }
3773 
3774  __ CompareRoot(FieldOperand(rdi, HeapObject::kMapOffset),
3775  Heap::kFixedArrayMapRootIndex);
3776  __ j(not_equal, &miss_force_generic);
3777 
3778  __ bind(&finish_store);
3779  if (IsFastSmiElementsKind(elements_kind)) {
3780  __ SmiToInteger32(rcx, rcx);
3781  __ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize),
3782  rax);
3783  } else {
3784  // Do the store and update the write barrier.
3785  ASSERT(IsFastObjectElementsKind(elements_kind));
3786  __ SmiToInteger32(rcx, rcx);
3787  __ lea(rcx,
3788  FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize));
3789  __ movq(Operand(rcx, 0), rax);
3790  // Make sure to preserve the value in register rax.
3791  __ movq(rbx, rax);
3792  __ RecordWrite(rdi, rcx, rbx, kDontSaveFPRegs);
3793  }
3794 
3795  // Done.
3796  __ ret(0);
3797 
3798  // Handle store cache miss.
3799  __ bind(&miss_force_generic);
3800  Handle<Code> ic_force_generic =
3801  masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
3802  __ jmp(ic_force_generic, RelocInfo::CODE_TARGET);
3803 
3804  __ bind(&transition_elements_kind);
3805  Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
3806  __ jmp(ic_miss, RelocInfo::CODE_TARGET);
3807 
3808  if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
3809  // Grow the array by a single element if possible.
3810  __ bind(&grow);
3811 
3812  // Make sure the array is only growing by a single element, anything else
3813  // must be handled by the runtime. Flags are already set by previous
3814  // compare.
3815  __ j(not_equal, &miss_force_generic);
3816 
3817  // Check for the empty array, and preallocate a small backing store if
3818  // possible.
3819  __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
3820  __ CompareRoot(rdi, Heap::kEmptyFixedArrayRootIndex);
3821  __ j(not_equal, &check_capacity);
3822 
3824  __ AllocateInNewSpace(size, rdi, rbx, r8, &slow, TAG_OBJECT);
3825 
3826  // rax: value
3827  // rcx: key
3828  // rdx: receiver
3829  // rdi: elements
3830  // Make sure that the backing store can hold additional elements.
3832  masm->isolate()->factory()->fixed_array_map());
3835  __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex);
3836  for (int i = 1; i < JSArray::kPreallocatedArrayElements; ++i) {
3837  __ movq(FieldOperand(rdi, FixedArray::SizeFor(i)), rbx);
3838  }
3839 
3840  // Store the element at index zero.
3841  __ movq(FieldOperand(rdi, FixedArray::SizeFor(0)), rax);
3842 
3843  // Install the new backing store in the JSArray.
3844  __ movq(FieldOperand(rdx, JSObject::kElementsOffset), rdi);
3845  __ RecordWriteField(rdx, JSObject::kElementsOffset, rdi, rbx,
3847 
3848  // Increment the length of the array.
3850  __ ret(0);
3851 
3852  __ bind(&check_capacity);
3853  // Check for cow elements, in general they are not handled by this stub.
3854  __ CompareRoot(FieldOperand(rdi, HeapObject::kMapOffset),
3855  Heap::kFixedCOWArrayMapRootIndex);
3856  __ j(equal, &miss_force_generic);
3857 
3858  // rax: value
3859  // rcx: key
3860  // rdx: receiver
3861  // rdi: elements
3862  // Make sure that the backing store can hold additional elements.
3864  __ j(above_equal, &slow);
3865 
3866  // Grow the array and finish the store.
3867  __ SmiAddConstant(FieldOperand(rdx, JSArray::kLengthOffset),
3868  Smi::FromInt(1));
3869  __ jmp(&finish_store);
3870 
3871  __ bind(&slow);
3872  Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
3873  __ jmp(ic_slow, RelocInfo::CODE_TARGET);
3874  }
3875 }
3876 
3877 
3879  MacroAssembler* masm,
3880  bool is_js_array,
3881  KeyedAccessGrowMode grow_mode) {
3882  // ----------- S t a t e -------------
3883  // -- rax : value
3884  // -- rcx : key
3885  // -- rdx : receiver
3886  // -- rsp[0] : return address
3887  // -----------------------------------
3888  Label miss_force_generic, transition_elements_kind, finish_store;
3889  Label grow, slow, check_capacity;
3890 
3891  // This stub is meant to be tail-jumped to, the receiver must already
3892  // have been verified by the caller to not be a smi.
3893 
3894  // Check that the key is a smi or a heap number convertible to a smi.
3895  GenerateSmiKeyCheck(masm, rcx, rbx, xmm0, xmm1, &miss_force_generic);
3896 
3897  // Get the elements array.
3898  __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
3899  __ AssertFastElements(rdi);
3900 
3901  // Check that the key is within bounds.
3902  if (is_js_array) {
3903  __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
3904  if (grow_mode == ALLOW_JSARRAY_GROWTH) {
3905  __ j(above_equal, &grow);
3906  } else {
3907  __ j(above_equal, &miss_force_generic);
3908  }
3909  } else {
3911  __ j(above_equal, &miss_force_generic);
3912  }
3913 
3914  // Handle smi values specially
3915  __ bind(&finish_store);
3916  __ SmiToInteger32(rcx, rcx);
3917  __ StoreNumberToDoubleElements(rax, rdi, rcx, xmm0,
3918  &transition_elements_kind);
3919  __ ret(0);
3920 
3921  // Handle store cache miss, replacing the ic with the generic stub.
3922  __ bind(&miss_force_generic);
3923  Handle<Code> ic_force_generic =
3924  masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
3925  __ jmp(ic_force_generic, RelocInfo::CODE_TARGET);
3926 
3927  __ bind(&transition_elements_kind);
3928  // Restore smi-tagging of rcx.
3929  __ Integer32ToSmi(rcx, rcx);
3930  Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
3931  __ jmp(ic_miss, RelocInfo::CODE_TARGET);
3932 
3933  if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
3934  // Grow the array by a single element if possible.
3935  __ bind(&grow);
3936 
3937  // Make sure the array is only growing by a single element, anything else
3938  // must be handled by the runtime. Flags are already set by previous
3939  // compare.
3940  __ j(not_equal, &miss_force_generic);
3941 
3942  // Transition on values that can't be stored in a FixedDoubleArray.
3943  Label value_is_smi;
3944  __ JumpIfSmi(rax, &value_is_smi);
3945  __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
3946  Heap::kHeapNumberMapRootIndex);
3947  __ j(not_equal, &transition_elements_kind);
3948  __ bind(&value_is_smi);
3949 
3950  // Check for the empty array, and preallocate a small backing store if
3951  // possible.
3952  __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
3953  __ CompareRoot(rdi, Heap::kEmptyFixedArrayRootIndex);
3954  __ j(not_equal, &check_capacity);
3955 
3956  int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
3957  __ AllocateInNewSpace(size, rdi, rbx, r8, &slow, TAG_OBJECT);
3958 
3959  // rax: value
3960  // rcx: key
3961  // rdx: receiver
3962  // rdi: elements
3963  // Initialize the new FixedDoubleArray. Leave elements unitialized for
3964  // efficiency, they are guaranteed to be initialized before use.
3966  masm->isolate()->factory()->fixed_double_array_map());
3968  Smi::FromInt(JSArray::kPreallocatedArrayElements));
3969 
3970  // Install the new backing store in the JSArray.
3971  __ movq(FieldOperand(rdx, JSObject::kElementsOffset), rdi);
3972  __ RecordWriteField(rdx, JSObject::kElementsOffset, rdi, rbx,
3974 
3975  // Increment the length of the array.
3977  __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
3978  __ jmp(&finish_store);
3979 
3980  __ bind(&check_capacity);
3981  // rax: value
3982  // rcx: key
3983  // rdx: receiver
3984  // rdi: elements
3985  // Make sure that the backing store can hold additional elements.
3987  __ j(above_equal, &slow);
3988 
3989  // Grow the array and finish the store.
3990  __ SmiAddConstant(FieldOperand(rdx, JSArray::kLengthOffset),
3991  Smi::FromInt(1));
3992  __ jmp(&finish_store);
3993 
3994  __ bind(&slow);
3995  Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
3996  __ jmp(ic_slow, RelocInfo::CODE_TARGET);
3997  }
3998 }
3999 
4000 
4001 #undef __
4002 
4003 } } // namespace v8::internal
4004 
4005 #endif // V8_TARGET_ARCH_X64
byte * Address
Definition: globals.h:172
const Register rdx
static const int kBitFieldOffset
Definition: objects.h:4994
Handle< Code > CompileLoadFunctionPrototype(Handle< String > name)
static const int kMaxLength
Definition: objects.h:2301
Handle< Code > CompileLoadCallback(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< AccessorInfo > callback)
static const int kCodeEntryOffset
Definition: objects.h:5981
Handle< Code > CompileStoreField(Handle< JSObject > object, int index, Handle< Map > transition, Handle< String > name)
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:5982
static int SlotOffset(int index)
Definition: contexts.h:408
static const int kDataOffset
Definition: objects.h:8190
Handle< Code > CompileLoadNonexistent(Handle< String > name, Handle< JSObject > object, Handle< JSObject > last)
Handle< Code > CompileStoreElement(Handle< Map > receiver_map)
void GenerateProbe(MacroAssembler *masm, Code::Flags flags, Register receiver, Register name, Register scratch, Register extra, Register extra2=no_reg, Register extra3=no_reg)
static const int kFlagsOffset
Definition: objects.h:4504
static Smi * FromInt(int value)
Definition: objects-inl.h:973
bool IsFastObjectElementsKind(ElementsKind kind)
const Register rbp
#define LOG(isolate, Call)
Definition: log.h:81
static void GenerateStoreExternalArray(MacroAssembler *masm, ElementsKind elements_kind)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
Handle< Code > CompileStoreViaSetter(Handle< JSObject > receiver, Handle< JSFunction > setter, Handle< String > name)
static const int kGlobalReceiverOffset
Definition: objects.h:6085
static void GenerateLoadFastDoubleElement(MacroAssembler *masm)
static PropertyType ExtractTypeFromFlags(Flags flags)
Definition: objects-inl.h:3359
const int kSmiValueSize
Definition: v8.h:3900
const Register rsi
Flag flags[]
Definition: flags.cc:1467
static const int kExternalPointerOffset
Definition: objects.h:3720
static const int kHasNamedInterceptor
Definition: objects.h:5003
static const int kIsAccessCheckNeeded
Definition: objects.h:5007
List< Handle< Map > > MapHandleList
Definition: list.h:193
#define ASSERT(condition)
Definition: checks.h:270
Handle< Code > CompileStoreCallback(Handle< JSObject > object, Handle< AccessorInfo > callback, Handle< String > name)
const int kPointerSizeLog2
Definition: globals.h:246
static const int kInstanceSizeOffset
Definition: objects.h:4981
static const int kDebugInfoOffset
Definition: objects.h:5614
static const int kGlobalContextOffset
Definition: objects.h:6084
static const int kContextOffset
Definition: objects.h:5986
Handle< Code > CompileLoadField(Handle< JSObject > object, Handle< JSObject > holder, int index, Handle< String > name)
Handle< Code > CompileStoreInterceptor(Handle< JSObject > object, Handle< String > name)
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< String > name, Register scratch0)
Handle< Code > CompileStoreField(Handle< JSObject > object, int index, Handle< Map > transition, Handle< String > name)
static const int kHashFieldOffset
Definition: objects.h:7099
const uint32_t kNotStringTag
Definition: objects.h:438
Handle< Code > CompileLoadInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
#define UNREACHABLE()
Definition: checks.h:50
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
Definition: objects.h:7098
Handle< Code > CompileCallGlobal(Handle< JSObject > object, Handle< GlobalObject > holder, Handle< JSGlobalPropertyCell > cell, Handle< JSFunction > function, Handle< String > name)
Handle< Code > CompileLoadField(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, int index)
static const int kValueOffset
Definition: objects.h:1307
const uint32_t kHoleNanUpper32
Definition: v8globals.h:476
Handle< Code > CompileStoreGlobal(Handle< GlobalObject > object, Handle< JSGlobalPropertyCell > holder, Handle< String > name)
Handle< Code > CompileLoadViaGetter(Handle< String > name, Handle< JSObject > receiver, Handle< JSObject > holder, Handle< JSFunction > getter)
Handle< Code > CompileLoadConstant(Handle< JSObject > object, Handle< JSObject > holder, Handle< JSFunction > value, Handle< String > name)
const XMMRegister xmm1
Handle< Code > CompileLoadConstant(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< JSFunction > value)
Handle< Code > CompileCallField(Handle< JSObject > object, Handle< JSObject > holder, int index, Handle< String > name)
const Register r9
const int kPointerSize
Definition: globals.h:234
static void GenerateStoreFastElement(MacroAssembler *masm, bool is_js_array, ElementsKind element_kind, KeyedAccessGrowMode grow_mode)
Handle< Code > CompileLoadStringLength(Handle< String > name)
Operand FieldOperand(Register object, int offset)
const int kHeapObjectTag
Definition: v8.h:3848
const Register rbx
const uint32_t kHoleNanLower32
Definition: v8globals.h:477
const Register rsp
#define __
static bool decode(uint32_t value)
Definition: utils.h:272
Operand StackSpaceOperand(int index)
static const int kPropertiesOffset
Definition: objects.h:2113
Handle< Code > CompileLoadGlobal(Handle< JSObject > object, Handle< GlobalObject > holder, Handle< JSGlobalPropertyCell > cell, Handle< String > name, bool is_dont_delete)
const Register rax
bool IsFastSmiElementsKind(ElementsKind kind)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
static const int kDataOffset
Definition: objects.h:8326
const Register rdi
const int kBitsPerByte
Definition: globals.h:251
static int SizeFor(int length)
Definition: objects.h:2369
const Register r0
static const int kElementsOffset
Definition: objects.h:2114
#define BASE_EMBEDDED
Definition: allocation.h:68
const int kBitsPerInt
Definition: globals.h:254
static void GenerateLoadDictionaryElement(MacroAssembler *masm)
static void GenerateLoadExternalArray(MacroAssembler *masm, ElementsKind elements_kind)
static const int kLengthOffset
Definition: objects.h:8111
static int SizeFor(int length)
Definition: objects.h:2288
static const int kHeaderSize
Definition: objects.h:2233
static const int kMapOffset
Definition: objects.h:1219
List< Handle< Code > > CodeHandleList
Definition: list.h:194
const Register r1
static const int kLengthOffset
Definition: objects.h:2232
Handle< Code > CompileCallInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
static const int kDataOffset
Definition: objects.h:8352
const Register kScratchRegister
static void GenerateLoadFastElement(MacroAssembler *masm)
static const uint32_t kSignMask
Definition: objects.h:1316
v8::Handle< v8::Value > Load(const v8::Arguments &args)
Definition: shell.cc:159
friend class Isolate
Definition: stub-cache.h:391
static void GenerateStoreFastDoubleElement(MacroAssembler *masm, bool is_js_array, KeyedAccessGrowMode grow_mode)
static const int kHeaderSize
Definition: objects.h:4513
const Register r8
static Handle< T > null()
Definition: handles.h:86
const Register rcx
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
Handle< Code > CompileLoadArrayLength(Handle< String > name)
void USE(T)
Definition: globals.h:303
const int kSmiTag
Definition: v8.h:3853
Handle< Code > CompileCallConstant(Handle< Object > object, Handle< JSObject > holder, Handle< JSFunction > function, Handle< String > name, CheckType check)
static AccessorInfo * cast(Object *obj)
static const int kHeaderSize
Definition: objects.h:2115
const int kHeapObjectTagSize
Definition: v8.h:3849
static Handle< JSGlobalPropertyCell > EnsurePropertyCell(Handle< GlobalObject > global, Handle< String > name)
Definition: objects.cc:11797
static bool HasCustomCallGenerator(Handle< JSFunction > function)
Definition: stub-cache.cc:1428
static const int kPreallocatedArrayElements
Definition: objects.h:8108
static const int kPrototypeOffset
Definition: objects.h:4953
static const int kFlagsNotUsedInLookup
Definition: objects.h:4557
const char * name_
Definition: flags.cc:1352
const int kInvalidProtoDepth
const Register no_reg
static const int kValueOffset
Definition: objects.h:6188
Handle< Code > CompileLoadCallback(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< AccessorInfo > callback)
Handle< Code > CompileLoadPolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_ics)
Handle< Code > CompileLoadInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
Handle< Code > CompileStorePolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_stubs, MapHandleList *transitioned_maps)
static const int kSharedFunctionInfoOffset
Definition: objects.h:5984
KeyedAccessGrowMode
Definition: objects.h:141
static const int kMaxValue
Definition: objects.h:1006
void check(i::Vector< const char > string)
static const int kExponentOffset
Definition: objects.h:1313
Handle< Code > CompileLoadElement(Handle< Map > receiver_map)
Handle< Code > CompileConstructStub(Handle< JSFunction > function)
static JSObject * cast(Object *obj)
static const int kInstanceTypeOffset
Definition: objects.h:4992
const XMMRegister xmm0
static JSFunction * cast(Object *obj)