v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
code-stubs-hydrogen.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "code-stubs.h"
31 #include "hydrogen.h"
32 #include "lithium.h"
33 
34 namespace v8 {
35 namespace internal {
36 
37 
38 static LChunk* OptimizeGraph(HGraph* graph) {
39  DisallowHeapAllocation no_allocation;
40  DisallowHandleAllocation no_handles;
42 
43  ASSERT(graph != NULL);
44  BailoutReason bailout_reason = kNoReason;
45  if (!graph->Optimize(&bailout_reason)) {
46  FATAL(GetBailoutReason(bailout_reason));
47  }
48  LChunk* chunk = LChunk::NewChunk(graph);
49  if (chunk == NULL) {
50  FATAL(GetBailoutReason(graph->info()->bailout_reason()));
51  }
52  return chunk;
53 }
54 
55 
57  public:
59  : HGraphBuilder(&info_),
60  arguments_length_(NULL),
61  info_(stub, isolate),
62  context_(NULL) {
63  descriptor_ = stub->GetInterfaceDescriptor(isolate);
64  parameters_.Reset(new HParameter*[descriptor_->register_param_count_]);
65  }
66  virtual bool BuildGraph();
67 
68  protected:
69  virtual HValue* BuildCodeStub() = 0;
70  HParameter* GetParameter(int parameter) {
71  ASSERT(parameter < descriptor_->register_param_count_);
72  return parameters_[parameter];
73  }
75  // This is initialized in BuildGraph()
76  ASSERT(arguments_length_ != NULL);
77  return arguments_length_;
78  }
79  CompilationInfo* info() { return &info_; }
80  HydrogenCodeStub* stub() { return info_.code_stub(); }
81  HContext* context() { return context_; }
82  Isolate* isolate() { return info_.isolate(); }
83 
84  HLoadNamedField* BuildLoadNamedField(HValue* object,
85  Representation representation,
86  int offset,
87  bool is_inobject);
88 
93  };
94 
96  AllocationSiteOverrideMode override_mode,
97  ArgumentClass argument_class);
99  ArgumentClass argument_class);
100 
101  // BuildCheckAndInstallOptimizedCode emits code to install the optimized
102  // function found in the optimized code map at map_index in js_function, if
103  // the function at map_index matches the given native_context. Builder is
104  // left in the "Then()" state after the install.
105  void BuildCheckAndInstallOptimizedCode(HValue* js_function,
106  HValue* native_context,
107  IfBuilder* builder,
108  HValue* optimized_map,
109  HValue* map_index);
110  void BuildInstallCode(HValue* js_function, HValue* shared_info);
111 
113  HValue* iterator,
114  int field_offset);
115  void BuildInstallFromOptimizedCodeMap(HValue* js_function,
116  HValue* shared_info,
117  HValue* native_context);
118 
119  private:
120  HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
121  HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
122  ElementsKind kind);
123 
124  SmartArrayPointer<HParameter*> parameters_;
125  HValue* arguments_length_;
127  CodeStubInterfaceDescriptor* descriptor_;
128  HContext* context_;
129 };
130 
131 
133  // Update the static counter each time a new code stub is generated.
134  isolate()->counters()->code_stubs()->Increment();
135 
136  if (FLAG_trace_hydrogen_stubs) {
137  const char* name = CodeStub::MajorName(stub()->MajorKey(), false);
138  PrintF("-----------------------------------------------------------\n");
139  PrintF("Compiling stub %s using hydrogen\n", name);
140  isolate()->GetHTracer()->TraceCompilation(&info_);
141  }
142 
143  int param_count = descriptor_->register_param_count_;
144  HEnvironment* start_environment = graph()->start_environment();
145  HBasicBlock* next_block = CreateBasicBlock(start_environment);
146  Goto(next_block);
147  next_block->SetJoinId(BailoutId::StubEntry());
148  set_current_block(next_block);
149 
150  bool runtime_stack_params = descriptor_->stack_parameter_count_.is_valid();
151  HInstruction* stack_parameter_count = NULL;
152  for (int i = 0; i < param_count; ++i) {
153  Representation r = descriptor_->IsParameterCountRegister(i)
156  HParameter* param = Add<HParameter>(i, HParameter::REGISTER_PARAMETER, r);
157  start_environment->Bind(i, param);
158  parameters_[i] = param;
159  if (descriptor_->IsParameterCountRegister(i)) {
160  param->set_type(HType::Smi());
161  stack_parameter_count = param;
162  arguments_length_ = stack_parameter_count;
163  }
164  }
165 
166  ASSERT(!runtime_stack_params || arguments_length_ != NULL);
167  if (!runtime_stack_params) {
168  stack_parameter_count = graph()->GetConstantMinus1();
169  arguments_length_ = graph()->GetConstant0();
170  }
171 
172  context_ = Add<HContext>();
173  start_environment->BindContext(context_);
174 
175  Add<HSimulate>(BailoutId::StubEntry());
176 
177  NoObservableSideEffectsScope no_effects(this);
178 
179  HValue* return_value = BuildCodeStub();
180 
181  // We might have extra expressions to pop from the stack in addition to the
182  // arguments above.
183  HInstruction* stack_pop_count = stack_parameter_count;
184  if (descriptor_->function_mode_ == JS_FUNCTION_STUB_MODE) {
185  if (!stack_parameter_count->IsConstant() &&
186  descriptor_->hint_stack_parameter_count_ < 0) {
187  HInstruction* constant_one = graph()->GetConstant1();
188  stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
189  stack_pop_count->ClearFlag(HValue::kCanOverflow);
190  // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
191  // smi.
192  } else {
193  int count = descriptor_->hint_stack_parameter_count_;
194  stack_pop_count = Add<HConstant>(count);
195  }
196  }
197 
198  if (current_block() != NULL) {
199  HReturn* hreturn_instruction = New<HReturn>(return_value,
200  stack_pop_count);
201  FinishCurrentBlock(hreturn_instruction);
202  }
203  return true;
204 }
205 
206 
207 template <class Stub>
209  public:
211  : CodeStubGraphBuilderBase(isolate, stub) {}
212 
213  protected:
214  virtual HValue* BuildCodeStub() {
215  if (casted_stub()->IsUninitialized()) {
217  } else {
218  return BuildCodeInitializedStub();
219  }
220  }
221 
223  UNIMPLEMENTED();
224  return NULL;
225  }
226 
228  // Force a deopt that falls back to the runtime.
229  HValue* undefined = graph()->GetConstantUndefined();
230  IfBuilder builder(this);
231  builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
232  builder.Then();
233  builder.ElseDeopt("Forced deopt to runtime");
234  return undefined;
235  }
236 
237  Stub* casted_stub() { return static_cast<Stub*>(stub()); }
238 };
239 
240 
242  Factory* factory = isolate->factory();
243 
244  // Generate the new code.
245  MacroAssembler masm(isolate, NULL, 256);
246 
247  {
248  // Update the static counter each time a new code stub is generated.
249  isolate->counters()->code_stubs()->Increment();
250 
251  // Generate the code for the stub.
252  masm.set_generating_stub(true);
253  NoCurrentFrameScope scope(&masm);
254  GenerateLightweightMiss(&masm);
255  }
256 
257  // Create the code object.
258  CodeDesc desc;
259  masm.GetCode(&desc);
260 
261  // Copy the generated code into a heap object.
263  GetCodeKind(),
264  GetICState(),
265  GetExtraICState(),
266  GetStubType());
267  Handle<Code> new_object = factory->NewCode(
268  desc, flags, masm.CodeObject(), NeedsImmovableCode());
269  return new_object;
270 }
271 
272 
273 template <class Stub>
274 static Handle<Code> DoGenerateCode(Isolate* isolate, Stub* stub) {
275  CodeStub::Major major_key =
276  static_cast<HydrogenCodeStub*>(stub)->MajorKey();
277  CodeStubInterfaceDescriptor* descriptor =
278  isolate->code_stub_interface_descriptor(major_key);
279  if (descriptor->register_param_count_ < 0) {
280  stub->InitializeInterfaceDescriptor(isolate, descriptor);
281  }
282 
283  // If we are uninitialized we can use a light-weight stub to enter
284  // the runtime that is significantly faster than using the standard
285  // stub-failure deopt mechanism.
286  if (stub->IsUninitialized() && descriptor->has_miss_handler()) {
287  ASSERT(!descriptor->stack_parameter_count_.is_valid());
288  return stub->GenerateLightweightMissCode(isolate);
289  }
290  ElapsedTimer timer;
291  if (FLAG_profile_hydrogen_code_stub_compilation) {
292  timer.Start();
293  }
294  CodeStubGraphBuilder<Stub> builder(isolate, stub);
295  LChunk* chunk = OptimizeGraph(builder.CreateGraph());
296  Handle<Code> code = chunk->Codegen();
297  if (FLAG_profile_hydrogen_code_stub_compilation) {
298  double ms = timer.Elapsed().InMillisecondsF();
299  PrintF("[Lazy compilation of %s took %0.3f ms]\n",
300  stub->GetName().get(), ms);
301  }
302  return code;
303 }
304 
305 
306 template <>
308  HValue* value = GetParameter(0);
309 
310  // Check if the parameter is already a SMI or heap number.
311  IfBuilder if_number(this);
312  if_number.If<HIsSmiAndBranch>(value);
313  if_number.OrIf<HCompareMap>(value, isolate()->factory()->heap_number_map());
314  if_number.Then();
315 
316  // Return the number.
317  Push(value);
318 
319  if_number.Else();
320 
321  // Convert the parameter to number using the builtin.
322  HValue* function = AddLoadJSBuiltin(Builtins::TO_NUMBER);
323  Add<HPushArgument>(value);
324  Push(Add<HInvokeFunction>(function, 1));
325 
326  if_number.End();
327 
328  return Pop();
329 }
330 
331 
333  return DoGenerateCode(isolate, this);
334 }
335 
336 
337 template <>
339  info()->MarkAsSavesCallerDoubles();
340  HValue* number = GetParameter(NumberToStringStub::kNumber);
341  return BuildNumberToString(number, Type::Number(zone()));
342 }
343 
344 
345 Handle<Code> NumberToStringStub::GenerateCode(Isolate* isolate) {
346  return DoGenerateCode(isolate, this);
347 }
348 
349 
350 template <>
352  Factory* factory = isolate()->factory();
353  HValue* undefined = graph()->GetConstantUndefined();
354  AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
355  FastCloneShallowArrayStub::Mode mode = casted_stub()->mode();
356  int length = casted_stub()->length();
357 
358  HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
359  GetParameter(1),
360  static_cast<HValue*>(NULL),
361  FAST_ELEMENTS);
362  IfBuilder checker(this);
363  checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
364  undefined);
365  checker.Then();
366 
367  HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
369  HInstruction* boilerplate = Add<HLoadNamedField>(
370  allocation_site, static_cast<HValue*>(NULL), access);
371  HValue* push_value;
373  HValue* elements = AddLoadElements(boilerplate);
374 
375  IfBuilder if_fixed_cow(this);
376  if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
377  if_fixed_cow.Then();
378  push_value = BuildCloneShallowArray(boilerplate,
379  allocation_site,
380  alloc_site_mode,
382  0/*copy-on-write*/);
383  environment()->Push(push_value);
384  if_fixed_cow.Else();
385 
386  IfBuilder if_fixed(this);
387  if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
388  if_fixed.Then();
389  push_value = BuildCloneShallowArray(boilerplate,
390  allocation_site,
391  alloc_site_mode,
393  length);
394  environment()->Push(push_value);
395  if_fixed.Else();
396  push_value = BuildCloneShallowArray(boilerplate,
397  allocation_site,
398  alloc_site_mode,
400  length);
401  environment()->Push(push_value);
402  } else {
403  ElementsKind elements_kind = casted_stub()->ComputeElementsKind();
404  push_value = BuildCloneShallowArray(boilerplate,
405  allocation_site,
406  alloc_site_mode,
407  elements_kind,
408  length);
409  environment()->Push(push_value);
410  }
411 
412  checker.ElseDeopt("Uninitialized boilerplate literals");
413  checker.End();
414 
415  return environment()->Pop();
416 }
417 
418 
420  return DoGenerateCode(isolate, this);
421 }
422 
423 
424 template <>
426  HValue* undefined = graph()->GetConstantUndefined();
427 
428  HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
429  GetParameter(1),
430  static_cast<HValue*>(NULL),
431  FAST_ELEMENTS);
432 
433  IfBuilder checker(this);
434  checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
435  undefined);
436  checker.And();
437 
438  HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
440  HInstruction* boilerplate = Add<HLoadNamedField>(
441  allocation_site, static_cast<HValue*>(NULL), access);
442 
443  int size = JSObject::kHeaderSize + casted_stub()->length() * kPointerSize;
444  int object_size = size;
445  if (FLAG_allocation_site_pretenuring) {
446  size += AllocationMemento::kSize;
447  }
448 
449  HValue* boilerplate_map = Add<HLoadNamedField>(
450  boilerplate, static_cast<HValue*>(NULL),
451  HObjectAccess::ForMap());
452  HValue* boilerplate_size = Add<HLoadNamedField>(
453  boilerplate_map, static_cast<HValue*>(NULL),
454  HObjectAccess::ForMapInstanceSize());
455  HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
456  checker.If<HCompareNumericAndBranch>(boilerplate_size,
457  size_in_words, Token::EQ);
458  checker.Then();
459 
460  HValue* size_in_bytes = Add<HConstant>(size);
461 
462  HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
464 
465  for (int i = 0; i < object_size; i += kPointerSize) {
466  HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
467  Add<HStoreNamedField>(
468  object, access, Add<HLoadNamedField>(
469  boilerplate, static_cast<HValue*>(NULL), access));
470  }
471 
472  ASSERT(FLAG_allocation_site_pretenuring || (size == object_size));
473  if (FLAG_allocation_site_pretenuring) {
474  BuildCreateAllocationMemento(
475  object, Add<HConstant>(object_size), allocation_site);
476  }
477 
478  environment()->Push(object);
479  checker.ElseDeopt("Uninitialized boilerplate in fast clone");
480  checker.End();
481 
482  return environment()->Pop();
483 }
484 
485 
487  return DoGenerateCode(isolate, this);
488 }
489 
490 
491 template <>
493  HValue* size = Add<HConstant>(AllocationSite::kSize);
494  HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
496 
497  // Store the map
498  Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
499  AddStoreMapConstant(object, allocation_site_map);
500 
501  // Store the payload (smi elements kind)
502  HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
503  Add<HStoreNamedField>(object,
504  HObjectAccess::ForAllocationSiteOffset(
506  initial_elements_kind);
507 
508  // Unlike literals, constructed arrays don't have nested sites
509  Add<HStoreNamedField>(object,
510  HObjectAccess::ForAllocationSiteOffset(
512  graph()->GetConstant0());
513 
514  // Pretenuring calculation field.
515  Add<HStoreNamedField>(object,
516  HObjectAccess::ForAllocationSiteOffset(
518  graph()->GetConstant0());
519 
520  // Pretenuring memento creation count field.
521  Add<HStoreNamedField>(object,
522  HObjectAccess::ForAllocationSiteOffset(
524  graph()->GetConstant0());
525 
526  // Store an empty fixed array for the code dependency.
527  HConstant* empty_fixed_array =
528  Add<HConstant>(isolate()->factory()->empty_fixed_array());
529  HStoreNamedField* store = Add<HStoreNamedField>(
530  object,
531  HObjectAccess::ForAllocationSiteOffset(
533  empty_fixed_array);
534 
535  // Link the object to the allocation site list
536  HValue* site_list = Add<HConstant>(
537  ExternalReference::allocation_sites_list_address(isolate()));
538  HValue* site = Add<HLoadNamedField>(
539  site_list, static_cast<HValue*>(NULL),
540  HObjectAccess::ForAllocationSiteList());
541  store = Add<HStoreNamedField>(object,
542  HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
543  site);
544  store->SkipWriteBarrier();
545  Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
546  object);
547 
548  HInstruction* feedback_vector = GetParameter(0);
549  HInstruction* slot = GetParameter(1);
550  Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
552  return feedback_vector;
553 }
554 
555 
557  return DoGenerateCode(isolate, this);
558 }
559 
560 
561 template <>
563  HInstruction* load = BuildUncheckedMonomorphicElementAccess(
564  GetParameter(0), GetParameter(1), NULL,
565  casted_stub()->is_js_array(), casted_stub()->elements_kind(),
567  return load;
568 }
569 
570 
572  return DoGenerateCode(isolate, this);
573 }
574 
575 
577  HValue* object,
578  Representation representation,
579  int offset,
580  bool is_inobject) {
581  HObjectAccess access = is_inobject
582  ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
583  : HObjectAccess::ForBackingStoreOffset(offset, representation);
584  if (representation.IsDouble()) {
585  // Load the heap number.
586  object = Add<HLoadNamedField>(
587  object, static_cast<HValue*>(NULL),
588  access.WithRepresentation(Representation::Tagged()));
589  // Load the double value from it.
590  access = HObjectAccess::ForHeapNumberValue();
591  }
592  return Add<HLoadNamedField>(object, static_cast<HValue*>(NULL), access);
593 }
594 
595 
596 template<>
598  return BuildLoadNamedField(GetParameter(0),
599  casted_stub()->representation(),
600  casted_stub()->offset(),
601  casted_stub()->is_inobject());
602 }
603 
604 
606  return DoGenerateCode(isolate, this);
607 }
608 
609 
610 template<>
612  HValue* string = BuildLoadNamedField(
613  GetParameter(0), Representation::Tagged(), JSValue::kValueOffset, true);
614  return BuildLoadNamedField(
616 }
617 
618 
620  return DoGenerateCode(isolate, this);
621 }
622 
623 
624 template <>
626  BuildUncheckedMonomorphicElementAccess(
627  GetParameter(0), GetParameter(1), GetParameter(2),
628  casted_stub()->is_js_array(), casted_stub()->elements_kind(),
629  STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode());
630 
631  return GetParameter(2);
632 }
633 
634 
636  return DoGenerateCode(isolate, this);
637 }
638 
639 
640 template <>
642  info()->MarkAsSavesCallerDoubles();
643 
644  BuildTransitionElementsKind(GetParameter(0),
645  GetParameter(1),
646  casted_stub()->from_kind(),
647  casted_stub()->to_kind(),
648  casted_stub()->is_js_array());
649 
650  return GetParameter(0);
651 }
652 
653 
655  return DoGenerateCode(isolate, this);
656 }
657 
659  ElementsKind kind,
660  AllocationSiteOverrideMode override_mode,
661  ArgumentClass argument_class) {
664  JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
665  override_mode);
666  HValue* result = NULL;
667  switch (argument_class) {
668  case NONE:
669  result = array_builder.AllocateEmptyArray();
670  break;
671  case SINGLE:
672  result = BuildArraySingleArgumentConstructor(&array_builder);
673  break;
674  case MULTIPLE:
675  result = BuildArrayNArgumentsConstructor(&array_builder, kind);
676  break;
677  }
678 
679  return result;
680 }
681 
682 
684  ElementsKind kind, ArgumentClass argument_class) {
685  HValue* constructor = GetParameter(
687  JSArrayBuilder array_builder(this, kind, constructor);
688 
689  HValue* result = NULL;
690  switch (argument_class) {
691  case NONE:
692  result = array_builder.AllocateEmptyArray();
693  break;
694  case SINGLE:
695  result = BuildArraySingleArgumentConstructor(&array_builder);
696  break;
697  case MULTIPLE:
698  result = BuildArrayNArgumentsConstructor(&array_builder, kind);
699  break;
700  }
701  return result;
702 }
703 
704 
705 HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
706  JSArrayBuilder* array_builder) {
707  // Smi check and range check on the input arg.
708  HValue* constant_one = graph()->GetConstant1();
709  HValue* constant_zero = graph()->GetConstant0();
710 
711  HInstruction* elements = Add<HArgumentsElements>(false);
712  HInstruction* argument = Add<HAccessArgumentsAt>(
713  elements, constant_one, constant_zero);
714 
715  return BuildAllocateArrayFromLength(array_builder, argument);
716 }
717 
718 
719 HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
720  JSArrayBuilder* array_builder, ElementsKind kind) {
721  // Insert a bounds check because the number of arguments might exceed
722  // the kInitialMaxFastElementArray limit. This cannot happen for code
723  // that was parsed, but calling via Array.apply(thisArg, [...]) might
724  // trigger it.
725  HValue* length = GetArgumentsLength();
726  HConstant* max_alloc_length =
728  HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
729 
730  // We need to fill with the hole if it's a smi array in the multi-argument
731  // case because we might have to bail out while copying arguments into
732  // the array because they aren't compatible with a smi array.
733  // If it's a double array, no problem, and if it's fast then no
734  // problem either because doubles are boxed.
735  //
736  // TODO(mvstanton): consider an instruction to memset fill the array
737  // with zero in this case instead.
738  JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
739  ? JSArrayBuilder::FILL_WITH_HOLE
740  : JSArrayBuilder::DONT_FILL_WITH_HOLE;
741  HValue* new_object = array_builder->AllocateArray(checked_length,
742  checked_length,
743  fill_mode);
744  HValue* elements = array_builder->GetElementsLocation();
745  ASSERT(elements != NULL);
746 
747  // Now populate the elements correctly.
748  LoopBuilder builder(this,
749  context(),
750  LoopBuilder::kPostIncrement);
751  HValue* start = graph()->GetConstant0();
752  HValue* key = builder.BeginBody(start, checked_length, Token::LT);
753  HInstruction* argument_elements = Add<HArgumentsElements>(false);
754  HInstruction* argument = Add<HAccessArgumentsAt>(
755  argument_elements, checked_length, key);
756 
757  Add<HStoreKeyed>(elements, key, argument, kind);
758  builder.EndBody();
759  return new_object;
760 }
761 
762 
763 template <>
765  ElementsKind kind = casted_stub()->elements_kind();
766  AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
767  return BuildArrayConstructor(kind, override_mode, NONE);
768 }
769 
770 
772  return DoGenerateCode(isolate, this);
773 }
774 
775 
776 template <>
779  ElementsKind kind = casted_stub()->elements_kind();
780  AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
781  return BuildArrayConstructor(kind, override_mode, SINGLE);
782 }
783 
784 
786  Isolate* isolate) {
787  return DoGenerateCode(isolate, this);
788 }
789 
790 
791 template <>
793  ElementsKind kind = casted_stub()->elements_kind();
794  AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
795  return BuildArrayConstructor(kind, override_mode, MULTIPLE);
796 }
797 
798 
800  return DoGenerateCode(isolate, this);
801 }
802 
803 
804 template <>
807  ElementsKind kind = casted_stub()->elements_kind();
808  return BuildInternalArrayConstructor(kind, NONE);
809 }
810 
811 
813  Isolate* isolate) {
814  return DoGenerateCode(isolate, this);
815 }
816 
817 
818 template <>
821  ElementsKind kind = casted_stub()->elements_kind();
822  return BuildInternalArrayConstructor(kind, SINGLE);
823 }
824 
825 
827  Isolate* isolate) {
828  return DoGenerateCode(isolate, this);
829 }
830 
831 
832 template <>
835  ElementsKind kind = casted_stub()->elements_kind();
836  return BuildInternalArrayConstructor(kind, MULTIPLE);
837 }
838 
839 
841  Isolate* isolate) {
842  return DoGenerateCode(isolate, this);
843 }
844 
845 
846 template <>
848  Isolate* isolate = graph()->isolate();
849  CompareNilICStub* stub = casted_stub();
850  HIfContinuation continuation;
851  Handle<Map> sentinel_map(isolate->heap()->meta_map());
852  Type* type = stub->GetType(zone(), sentinel_map);
853  BuildCompareNil(GetParameter(0), type, &continuation);
854  IfBuilder if_nil(this, &continuation);
855  if_nil.Then();
856  if (continuation.IsFalseReachable()) {
857  if_nil.Else();
858  if_nil.Return(graph()->GetConstant0());
859  }
860  if_nil.End();
861  return continuation.IsTrueReachable()
862  ? graph()->GetConstant1()
863  : graph()->GetConstantUndefined();
864 }
865 
866 
868  return DoGenerateCode(isolate, this);
869 }
870 
871 
872 template <>
874  BinaryOpIC::State state = casted_stub()->state();
875 
876  HValue* left = GetParameter(BinaryOpICStub::kLeft);
877  HValue* right = GetParameter(BinaryOpICStub::kRight);
878 
879  Type* left_type = state.GetLeftType(zone());
880  Type* right_type = state.GetRightType(zone());
881  Type* result_type = state.GetResultType(zone());
882 
883  ASSERT(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
884  (state.HasSideEffects() || !result_type->Is(Type::None())));
885 
886  HValue* result = NULL;
887  HAllocationMode allocation_mode(NOT_TENURED);
888  if (state.op() == Token::ADD &&
889  (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
890  !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
891  // For the generic add stub a fast case for string addition is performance
892  // critical.
893  if (left_type->Maybe(Type::String())) {
894  IfBuilder if_leftisstring(this);
895  if_leftisstring.If<HIsStringAndBranch>(left);
896  if_leftisstring.Then();
897  {
898  Push(BuildBinaryOperation(
899  state.op(), left, right,
900  Type::String(zone()), right_type,
901  result_type, state.fixed_right_arg(),
902  allocation_mode));
903  }
904  if_leftisstring.Else();
905  {
906  Push(BuildBinaryOperation(
907  state.op(), left, right,
908  left_type, right_type, result_type,
909  state.fixed_right_arg(), allocation_mode));
910  }
911  if_leftisstring.End();
912  result = Pop();
913  } else {
914  IfBuilder if_rightisstring(this);
915  if_rightisstring.If<HIsStringAndBranch>(right);
916  if_rightisstring.Then();
917  {
918  Push(BuildBinaryOperation(
919  state.op(), left, right,
920  left_type, Type::String(zone()),
921  result_type, state.fixed_right_arg(),
922  allocation_mode));
923  }
924  if_rightisstring.Else();
925  {
926  Push(BuildBinaryOperation(
927  state.op(), left, right,
928  left_type, right_type, result_type,
929  state.fixed_right_arg(), allocation_mode));
930  }
931  if_rightisstring.End();
932  result = Pop();
933  }
934  } else {
935  result = BuildBinaryOperation(
936  state.op(), left, right,
937  left_type, right_type, result_type,
938  state.fixed_right_arg(), allocation_mode);
939  }
940 
941  // If we encounter a generic argument, the number conversion is
942  // observable, thus we cannot afford to bail out after the fact.
943  if (!state.HasSideEffects()) {
944  if (result_type->Is(Type::SignedSmall())) {
945  if (state.op() == Token::SHR) {
946  // TODO(olivf) Replace this by a SmiTagU Instruction.
947  // 0x40000000: this number would convert to negative when interpreting
948  // the register as signed value;
949  IfBuilder if_of(this);
950  if_of.IfNot<HCompareNumericAndBranch>(result,
951  Add<HConstant>(static_cast<int>(SmiValuesAre32Bits()
952  ? 0x80000000 : 0x40000000)), Token::EQ_STRICT);
953  if_of.Then();
954  if_of.ElseDeopt("UInt->Smi oveflow");
955  if_of.End();
956  }
957  }
958  result = EnforceNumberType(result, result_type);
959  }
960 
961  // Reuse the double box of one of the operands if we are allowed to (i.e.
962  // chained binops).
963  if (state.CanReuseDoubleBox()) {
964  HValue* operand = (state.mode() == OVERWRITE_LEFT) ? left : right;
965  IfBuilder if_heap_number(this);
966  if_heap_number.IfNot<HIsSmiAndBranch>(operand);
967  if_heap_number.Then();
968  Add<HStoreNamedField>(operand, HObjectAccess::ForHeapNumberValue(), result);
969  Push(operand);
970  if_heap_number.Else();
971  Push(result);
972  if_heap_number.End();
973  result = Pop();
974  }
975 
976  return result;
977 }
978 
979 
981  return DoGenerateCode(isolate, this);
982 }
983 
984 
985 template <>
987  BinaryOpIC::State state = casted_stub()->state();
988 
989  HValue* allocation_site = GetParameter(
990  BinaryOpWithAllocationSiteStub::kAllocationSite);
991  HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
992  HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
993 
994  Type* left_type = state.GetLeftType(zone());
995  Type* right_type = state.GetRightType(zone());
996  Type* result_type = state.GetResultType(zone());
997  HAllocationMode allocation_mode(allocation_site);
998 
999  return BuildBinaryOperation(state.op(), left, right,
1000  left_type, right_type, result_type,
1001  state.fixed_right_arg(), allocation_mode);
1002 }
1003 
1004 
1005 Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode(Isolate* isolate) {
1006  return DoGenerateCode(isolate, this);
1007 }
1008 
1009 
1010 template <>
1012  StringAddStub* stub = casted_stub();
1013  StringAddFlags flags = stub->flags();
1014  PretenureFlag pretenure_flag = stub->pretenure_flag();
1015 
1016  HValue* left = GetParameter(StringAddStub::kLeft);
1017  HValue* right = GetParameter(StringAddStub::kRight);
1018 
1019  // Make sure that both arguments are strings if not known in advance.
1020  if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
1021  left = BuildCheckString(left);
1022  }
1023  if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
1024  right = BuildCheckString(right);
1025  }
1026 
1027  return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1028 }
1029 
1030 
1031 Handle<Code> StringAddStub::GenerateCode(Isolate* isolate) {
1032  return DoGenerateCode(isolate, this);
1033 }
1034 
1035 
1036 template <>
1038  ToBooleanStub* stub = casted_stub();
1039 
1040  IfBuilder if_true(this);
1041  if_true.If<HBranch>(GetParameter(0), stub->GetTypes());
1042  if_true.Then();
1043  if_true.Return(graph()->GetConstant1());
1044  if_true.Else();
1045  if_true.End();
1046  return graph()->GetConstant0();
1047 }
1048 
1049 
1051  return DoGenerateCode(isolate, this);
1052 }
1053 
1054 
1055 template <>
1057  StoreGlobalStub* stub = casted_stub();
1058  Handle<Object> hole(isolate()->heap()->the_hole_value(), isolate());
1059  Handle<Object> placeholer_value(Smi::FromInt(0), isolate());
1060  Handle<PropertyCell> placeholder_cell =
1061  isolate()->factory()->NewPropertyCell(placeholer_value);
1062 
1063  HParameter* value = GetParameter(2);
1064 
1065  if (stub->check_global()) {
1066  // Check that the map of the global has not changed: use a placeholder map
1067  // that will be replaced later with the global object's map.
1068  Handle<Map> placeholder_map = isolate()->factory()->meta_map();
1069  HValue* global = Add<HConstant>(
1071  Add<HCheckMaps>(global, placeholder_map, top_info());
1072  }
1073 
1074  HValue* cell = Add<HConstant>(placeholder_cell);
1075  HObjectAccess access(HObjectAccess::ForCellPayload(isolate()));
1076  HValue* cell_contents = Add<HLoadNamedField>(
1077  cell, static_cast<HValue*>(NULL), access);
1078 
1079  if (stub->is_constant()) {
1080  IfBuilder builder(this);
1081  builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1082  builder.Then();
1083  builder.ElseDeopt("Unexpected cell contents in constant global store");
1084  builder.End();
1085  } else {
1086  // Load the payload of the global parameter cell. A hole indicates that the
1087  // property has been deleted and that the store must be handled by the
1088  // runtime.
1089  IfBuilder builder(this);
1090  HValue* hole_value = Add<HConstant>(hole);
1091  builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1092  builder.Then();
1093  builder.Deopt("Unexpected cell contents in global store");
1094  builder.Else();
1095  Add<HStoreNamedField>(cell, access, value);
1096  builder.End();
1097  }
1098 
1099  return value;
1100 }
1101 
1102 
1104  return DoGenerateCode(isolate, this);
1105 }
1106 
1107 
1108 template<>
1110  HValue* value = GetParameter(0);
1111  HValue* map = GetParameter(1);
1112  HValue* key = GetParameter(2);
1113  HValue* object = GetParameter(3);
1114 
1115  if (FLAG_trace_elements_transitions) {
1116  // Tracing elements transitions is the job of the runtime.
1117  Add<HDeoptimize>("Tracing elements transitions", Deoptimizer::EAGER);
1118  } else {
1119  info()->MarkAsSavesCallerDoubles();
1120 
1121  BuildTransitionElementsKind(object, map,
1122  casted_stub()->from_kind(),
1123  casted_stub()->to_kind(),
1124  casted_stub()->is_jsarray());
1125 
1126  BuildUncheckedMonomorphicElementAccess(object, key, value,
1127  casted_stub()->is_jsarray(),
1128  casted_stub()->to_kind(),
1130  casted_stub()->store_mode());
1131  }
1132 
1133  return value;
1134 }
1135 
1136 
1138  return DoGenerateCode(isolate, this);
1139 }
1140 
1141 
1143  HValue* js_function,
1144  HValue* native_context,
1145  IfBuilder* builder,
1146  HValue* optimized_map,
1147  HValue* map_index) {
1148  HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
1149  HValue* context_slot = LoadFromOptimizedCodeMap(
1150  optimized_map, map_index, SharedFunctionInfo::kContextOffset);
1151  HValue* osr_ast_slot = LoadFromOptimizedCodeMap(
1152  optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset);
1153  builder->If<HCompareObjectEqAndBranch>(native_context,
1154  context_slot);
1155  builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none);
1156  builder->Then();
1157  HValue* code_object = LoadFromOptimizedCodeMap(optimized_map,
1159  // and the literals
1160  HValue* literals = LoadFromOptimizedCodeMap(optimized_map,
1162 
1163  Counters* counters = isolate()->counters();
1164  AddIncrementCounter(counters->fast_new_closure_install_optimized());
1165 
1166  // TODO(fschneider): Idea: store proper code pointers in the optimized code
1167  // map and either unmangle them on marking or do nothing as the whole map is
1168  // discarded on major GC anyway.
1169  Add<HStoreCodeEntry>(js_function, code_object);
1170  Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1171  literals);
1172 
1173  // Now link a function into a list of optimized functions.
1174  HValue* optimized_functions_list = Add<HLoadNamedField>(
1175  native_context, static_cast<HValue*>(NULL),
1176  HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
1177  Add<HStoreNamedField>(js_function,
1178  HObjectAccess::ForNextFunctionLinkPointer(),
1179  optimized_functions_list);
1180 
1181  // This store is the only one that should have a write barrier.
1182  Add<HStoreNamedField>(native_context,
1183  HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
1184  js_function);
1185 
1186  // The builder continues in the "then" after this function.
1187 }
1188 
1189 
1191  HValue* shared_info) {
1192  Add<HStoreNamedField>(js_function,
1193  HObjectAccess::ForNextFunctionLinkPointer(),
1194  graph()->GetConstantUndefined());
1195  HValue* code_object = Add<HLoadNamedField>(
1196  shared_info, static_cast<HValue*>(NULL), HObjectAccess::ForCodeOffset());
1197  Add<HStoreCodeEntry>(js_function, code_object);
1198 }
1199 
1200 
1202  HValue* optimized_map,
1203  HValue* iterator,
1204  int field_offset) {
1205  // By making sure to express these loads in the form [<hvalue> + constant]
1206  // the keyed load can be hoisted.
1207  ASSERT(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength);
1208  HValue* field_slot = iterator;
1209  if (field_offset > 0) {
1210  HValue* field_offset_value = Add<HConstant>(field_offset);
1211  field_slot = AddUncasted<HAdd>(iterator, field_offset_value);
1212  }
1213  HInstruction* field_entry = Add<HLoadKeyed>(optimized_map, field_slot,
1214  static_cast<HValue*>(NULL), FAST_ELEMENTS);
1215  return field_entry;
1216 }
1217 
1218 
1220  HValue* js_function,
1221  HValue* shared_info,
1222  HValue* native_context) {
1223  Counters* counters = isolate()->counters();
1224  IfBuilder is_optimized(this);
1225  HInstruction* optimized_map = Add<HLoadNamedField>(
1226  shared_info, static_cast<HValue*>(NULL),
1227  HObjectAccess::ForOptimizedCodeMap());
1228  HValue* null_constant = Add<HConstant>(0);
1229  is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
1230  is_optimized.Then();
1231  {
1232  BuildInstallCode(js_function, shared_info);
1233  }
1234  is_optimized.Else();
1235  {
1236  AddIncrementCounter(counters->fast_new_closure_try_optimized());
1237  // optimized_map points to fixed array of 3-element entries
1238  // (native context, optimized code, literals).
1239  // Map must never be empty, so check the first elements.
1240  HValue* first_entry_index =
1241  Add<HConstant>(SharedFunctionInfo::kEntriesStart);
1242  IfBuilder already_in(this);
1243  BuildCheckAndInstallOptimizedCode(js_function, native_context, &already_in,
1244  optimized_map, first_entry_index);
1245  already_in.Else();
1246  {
1247  // Iterate through the rest of map backwards. Do not double check first
1248  // entry. After the loop, if no matching optimized code was found,
1249  // install unoptimized code.
1250  // for(i = map.length() - SharedFunctionInfo::kEntryLength;
1251  // i > SharedFunctionInfo::kEntriesStart;
1252  // i -= SharedFunctionInfo::kEntryLength) { .. }
1253  HValue* shared_function_entry_length =
1254  Add<HConstant>(SharedFunctionInfo::kEntryLength);
1255  LoopBuilder loop_builder(this,
1256  context(),
1257  LoopBuilder::kPostDecrement,
1258  shared_function_entry_length);
1259  HValue* array_length = Add<HLoadNamedField>(
1260  optimized_map, static_cast<HValue*>(NULL),
1261  HObjectAccess::ForFixedArrayLength());
1262  HValue* start_pos = AddUncasted<HSub>(array_length,
1263  shared_function_entry_length);
1264  HValue* slot_iterator = loop_builder.BeginBody(start_pos,
1265  first_entry_index,
1266  Token::GT);
1267  {
1268  IfBuilder done_check(this);
1269  BuildCheckAndInstallOptimizedCode(js_function, native_context,
1270  &done_check,
1271  optimized_map,
1272  slot_iterator);
1273  // Fall out of the loop
1274  loop_builder.Break();
1275  }
1276  loop_builder.EndBody();
1277 
1278  // If slot_iterator equals first entry index, then we failed to find and
1279  // install optimized code
1280  IfBuilder no_optimized_code_check(this);
1281  no_optimized_code_check.If<HCompareNumericAndBranch>(
1282  slot_iterator, first_entry_index, Token::EQ);
1283  no_optimized_code_check.Then();
1284  {
1285  // Store the unoptimized code
1286  BuildInstallCode(js_function, shared_info);
1287  }
1288  }
1289  }
1290 }
1291 
1292 
1293 template<>
1295  Counters* counters = isolate()->counters();
1296  Factory* factory = isolate()->factory();
1297  HInstruction* empty_fixed_array =
1298  Add<HConstant>(factory->empty_fixed_array());
1299  HValue* shared_info = GetParameter(0);
1300 
1301  AddIncrementCounter(counters->fast_new_closure_total());
1302 
1303  // Create a new closure from the given function info in new space
1304  HValue* size = Add<HConstant>(JSFunction::kSize);
1305  HInstruction* js_function = Add<HAllocate>(size, HType::JSObject(),
1307 
1308  int map_index = Context::FunctionMapIndex(casted_stub()->strict_mode(),
1309  casted_stub()->is_generator());
1310 
1311  // Compute the function map in the current native context and set that
1312  // as the map of the allocated object.
1313  HInstruction* native_context = BuildGetNativeContext();
1314  HInstruction* map_slot_value = Add<HLoadNamedField>(
1315  native_context, static_cast<HValue*>(NULL),
1316  HObjectAccess::ForContextSlot(map_index));
1317  Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
1318 
1319  // Initialize the rest of the function.
1320  Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
1321  empty_fixed_array);
1322  Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
1323  empty_fixed_array);
1324  Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1325  empty_fixed_array);
1326  Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
1327  graph()->GetConstantHole());
1328  Add<HStoreNamedField>(js_function,
1329  HObjectAccess::ForSharedFunctionInfoPointer(),
1330  shared_info);
1331  Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
1332  context());
1333 
1334  // Initialize the code pointer in the function to be the one
1335  // found in the shared function info object.
1336  // But first check if there is an optimized version for our context.
1337  if (FLAG_cache_optimized_code) {
1338  BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
1339  } else {
1340  BuildInstallCode(js_function, shared_info);
1341  }
1342 
1343  return js_function;
1344 }
1345 
1346 
1348  return DoGenerateCode(isolate, this);
1349 }
1350 
1351 
1352 template<>
1354  int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
1355 
1356  // Get the function.
1357  HParameter* function = GetParameter(FastNewContextStub::kFunction);
1358 
1359  // Allocate the context in new space.
1360  HAllocate* function_context = Add<HAllocate>(
1361  Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
1362  HType::Tagged(), NOT_TENURED, FIXED_ARRAY_TYPE);
1363 
1364  // Set up the object header.
1365  AddStoreMapConstant(function_context,
1366  isolate()->factory()->function_context_map());
1367  Add<HStoreNamedField>(function_context,
1368  HObjectAccess::ForFixedArrayLength(),
1369  Add<HConstant>(length));
1370 
1371  // Set up the fixed slots.
1372  Add<HStoreNamedField>(function_context,
1373  HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
1374  function);
1375  Add<HStoreNamedField>(function_context,
1376  HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
1377  context());
1378  Add<HStoreNamedField>(function_context,
1379  HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
1380  graph()->GetConstant0());
1381 
1382  // Copy the global object from the previous context.
1383  HValue* global_object = Add<HLoadNamedField>(
1384  context(), static_cast<HValue*>(NULL),
1385  HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
1386  Add<HStoreNamedField>(function_context,
1387  HObjectAccess::ForContextSlot(
1389  global_object);
1390 
1391  // Initialize the rest of the slots to undefined.
1392  for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
1393  Add<HStoreNamedField>(function_context,
1394  HObjectAccess::ForContextSlot(i),
1395  graph()->GetConstantUndefined());
1396  }
1397 
1398  return function_context;
1399 }
1400 
1401 
1402 Handle<Code> FastNewContextStub::GenerateCode(Isolate* isolate) {
1403  return DoGenerateCode(isolate, this);
1404 }
1405 
1406 
1407 template<>
1409  HValue* receiver = GetParameter(0);
1410  HValue* key = GetParameter(1);
1411 
1412  Add<HCheckSmi>(key);
1413 
1414  return BuildUncheckedDictionaryElementLoad(receiver, key);
1415 }
1416 
1417 
1419  return DoGenerateCode(isolate, this);
1420 }
1421 
1422 
1423 template<>
1425  // Determine the parameters.
1426  HValue* length = GetParameter(RegExpConstructResultStub::kLength);
1427  HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
1428  HValue* input = GetParameter(RegExpConstructResultStub::kInput);
1429 
1430  return BuildRegExpConstructResult(length, index, input);
1431 }
1432 
1433 
1434 Handle<Code> RegExpConstructResultStub::GenerateCode(Isolate* isolate) {
1435  return DoGenerateCode(isolate, this);
1436 }
1437 
1438 
1439 } } // namespace v8::internal
virtual Handle< Code > GenerateCode(Isolate *isolate)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
virtual Handle< Code > GenerateCode(Isolate *isolate)
void PrintF(const char *format,...)
Definition: v8utils.cc:40
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
virtual Handle< Code > GenerateCode(Isolate *isolate)
static int FunctionMapIndex(StrictMode strict_mode, bool is_generator)
Definition: contexts.h:502
Handle< PropertyCell > NewPropertyCell(Handle< Object > value)
Definition: factory.cc:793
#define FATAL(msg)
Definition: checks.h:48
virtual Handle< Code > GenerateCode(Isolate *isolate)
Type * GetType(Zone *zone, Handle< Map > map=Handle< Map >())
Definition: code-stubs.cc:474
virtual Handle< Code > GenerateCode(Isolate *isolate)
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
static const int kContextOffset
Definition: objects.h:6743
void AddIncrementCounter(StatsCounter *counter)
Definition: hydrogen.cc:1214
virtual Code::Kind GetCodeKind() const
Definition: code-stubs.h:393
static Representation Integer32()
static BailoutId StubEntry()
Definition: utils.h:1168
virtual Handle< Code > GenerateCode(Isolate *isolate)
virtual Handle< Code > GenerateCode(Isolate *isolate)
PerThreadAssertScopeDebugOnly< HANDLE_DEREFERENCE_ASSERT, false > DisallowHandleDereference
Definition: assert-scope.h:222
CodeStubInterfaceDescriptor * code_stub_interface_descriptor(int index)
Definition: isolate.cc:2274
uint32_t Flags
Definition: objects.h:5184
AllocationSiteOverrideMode
Definition: code-stubs.h:759
virtual Handle< Code > GenerateCode(Isolate *isolate)
Isolate * isolate() const
Definition: compiler.h:67
#define ASSERT(condition)
Definition: checks.h:329
HBasicBlock * current_block() const
Definition: hydrogen.h:1066
const int kPointerSizeLog2
Definition: globals.h:281
HLoadNamedField * BuildLoadNamedField(HValue *object, Representation representation, int offset, bool is_inobject)
virtual Handle< Code > GenerateCode(Isolate *isolate)
virtual Handle< Code > GenerateCode(Isolate *isolate)
virtual Handle< Code > GenerateCode(Isolate *isolate) V8_OVERRIDE
bool Maybe(TypeImpl *that)
Definition: types.cc:339
Factory * factory()
Definition: isolate.h:995
int ToInt() const
Definition: utils.h:1162
static const int kDependentCodeOffset
Definition: objects.h:8416
static const int kOsrAstIdOffset
Definition: objects.h:6746
static const int kPretenureCreateCountOffset
Definition: objects.h:8414
virtual Handle< Code > GenerateCode(Isolate *isolate)
static const int kNestedSiteOffset
Definition: objects.h:8412
static const int kPretenureDataOffset
Definition: objects.h:8413
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
static const int kLengthOffset
Definition: objects.h:8905
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Definition: flags.cc:665
virtual Handle< Code > GenerateCode(Isolate *isolate) V8_OVERRIDE
void GetCode(CodeDesc *desc)
const int kPointerSize
Definition: globals.h:268
static const int kTransitionInfoOffset
Definition: objects.h:8411
void set_current_block(HBasicBlock *block)
Definition: hydrogen.h:1067
HValue * BuildArrayConstructor(ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class)
PerThreadAssertScopeDebugOnly< HANDLE_ALLOCATION_ASSERT, false > DisallowHandleAllocation
Definition: assert-scope.h:206
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
CodeStubGraphBuilder(Isolate *isolate, Stub *stub)
virtual Handle< Code > GenerateCode(Isolate *isolate)
static LChunk * NewChunk(HGraph *graph)
Definition: lithium.cc:398
bool IsFastSmiElementsKind(ElementsKind kind)
static const int kCachedCodeOffset
Definition: objects.h:6744
HValue * BuildAllocateArrayFromLength(JSArrayBuilder *array_builder, HValue *length_argument)
Definition: hydrogen.cc:2255
Handle< Code > GenerateLightweightMissCode(Isolate *isolate)
static BailoutId None()
Definition: utils.h:1164
virtual Handle< Code > GenerateCode(Isolate *isolate)
bool Is(TypeImpl *that)
Definition: types.h:246
Definition: v8.h:2107
static const int kHeaderSize
Definition: objects.h:3016
bool is_valid() const
HydrogenCodeStub * code_stub() const
Definition: compiler.h:84
static const int kEntriesStart
Definition: objects.h:6742
const char * GetBailoutReason(BailoutReason reason)
Definition: objects.cc:16437
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
virtual Handle< Code > GenerateCode(Isolate *isolate)
ElementsKind GetInitialFastElementsKind()
#define UNIMPLEMENTED()
Definition: checks.h:50
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:4601
HBasicBlock * CreateBasicBlock(HEnvironment *env)
Definition: hydrogen.cc:1235
Handle< Code > NewCode(const CodeDesc &desc, Code::Flags flags, Handle< Object > self_reference, bool immovable=false, bool crankshafted=false, int prologue_offset=Code::kPrologueOffsetNotSet)
Definition: factory.cc:1291
void BuildInstallCode(HValue *js_function, HValue *shared_info)
CodeStubInterfaceDescriptor * GetInterfaceDescriptor(Isolate *isolate)
Definition: code-stubs.h:395
Counters * counters()
Definition: isolate.h:859
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, false > DisallowHeapAllocation
Definition: assert-scope.h:214
static const int kHeaderSize
Definition: objects.h:2757
virtual Handle< Code > GenerateCode(Isolate *isolate)
HValue * BuildInternalArrayConstructor(ElementsKind kind, ArgumentClass argument_class)
static const int kEntryLength
Definition: objects.h:6747
void BuildCheckAndInstallOptimizedCode(HValue *js_function, HValue *native_context, IfBuilder *builder, HValue *optimized_map, HValue *map_index)
static Handle< HeapObject > global_placeholder(Isolate *isolate)
Definition: code-stubs.h:1001
static const int kWeakNextOffset
Definition: objects.h:8418
static const int kSize
Definition: objects.h:7527
static const int kSize
Definition: objects.h:8419
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric literals(0o77, 0b11)") DEFINE_bool(harmony_strings
static const int kValueOffset
Definition: objects.h:7701
HParameter * GetParameter(int parameter)
static Representation Tagged()
virtual Handle< Code > GenerateCode(Isolate *isolate)
CodeStubGraphBuilderBase(Isolate *isolate, HydrogenCodeStub *stub)
void BuildInstallFromOptimizedCodeMap(HValue *js_function, HValue *shared_info, HValue *native_context)
void FinishCurrentBlock(HControlInstruction *last)
Definition: hydrogen.cc:1193
static const int kInitialMaxFastElementArray
Definition: objects.h:2744
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
virtual Handle< Code > GenerateCode(Isolate *isolate)
static const int kLiteralsOffset
Definition: objects.h:6745
HGraph * graph() const
Definition: hydrogen.h:1072
virtual Handle< Code > GenerateCode(Isolate *isolate)
virtual Handle< Code > GenerateCode(Isolate *isolate)
HInstruction * LoadFromOptimizedCodeMap(HValue *optimized_map, HValue *iterator, int field_offset)
void Goto(HBasicBlock *from, HBasicBlock *target, FunctionState *state=NULL, bool add_simulate=true)
Definition: hydrogen.h:1089
virtual Handle< Code > GenerateCode(Isolate *isolate)
HTracer * GetHTracer()
Definition: isolate.cc:2223