v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
macro-assembler-ia32.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_IA32_MACRO_ASSEMBLER_IA32_H_
29 #define V8_IA32_MACRO_ASSEMBLER_IA32_H_
30 
31 #include "assembler.h"
32 #include "frames.h"
33 #include "v8globals.h"
34 
35 namespace v8 {
36 namespace internal {
37 
38 // Flags used for the AllocateInNewSpace functions.
40  // No special flags.
42  // Return the pointer to the allocated already tagged as a heap object.
43  TAG_OBJECT = 1 << 0,
44  // The content of the result register already contains the allocation top in
45  // new space.
46  RESULT_CONTAINS_TOP = 1 << 1
47 };
48 
49 
50 // Convenience for platform-independent signatures. We do not normally
51 // distinguish memory operands from other operands on ia32.
52 typedef Operand MemOperand;
53 
56 
57 
58 bool AreAliased(Register r1, Register r2, Register r3, Register r4);
59 
60 
61 // MacroAssembler implements a collection of frequently used macros.
62 class MacroAssembler: public Assembler {
63  public:
64  // The isolate parameter can be NULL if the macro assembler should
65  // not use isolate-dependent functionality. In this case, it's the
66  // responsibility of the caller to never invoke such function on the
67  // macro assembler.
68  MacroAssembler(Isolate* isolate, void* buffer, int size);
69 
70  // ---------------------------------------------------------------------------
71  // GC Support
75  };
76 
77  // Record in the remembered set the fact that we have a pointer to new space
78  // at the address pointed to by the addr register. Only works if addr is not
79  // in new space.
80  void RememberedSetHelper(Register object, // Used for debug code.
81  Register addr,
82  Register scratch,
83  SaveFPRegsMode save_fp,
84  RememberedSetFinalAction and_then);
85 
86  void CheckPageFlag(Register object,
87  Register scratch,
88  int mask,
89  Condition cc,
90  Label* condition_met,
91  Label::Distance condition_met_distance = Label::kFar);
92 
94  Handle<Map> map,
95  int mask,
96  Condition cc,
97  Label* condition_met,
98  Label::Distance condition_met_distance = Label::kFar);
99 
100  // Check if object is in new space. Jumps if the object is not in new space.
101  // The register scratch can be object itself, but scratch will be clobbered.
103  Register scratch,
104  Label* branch,
105  Label::Distance distance = Label::kFar) {
106  InNewSpace(object, scratch, zero, branch, distance);
107  }
108 
109  // Check if object is in new space. Jumps if the object is in new space.
110  // The register scratch can be object itself, but it will be clobbered.
112  Register scratch,
113  Label* branch,
114  Label::Distance distance = Label::kFar) {
115  InNewSpace(object, scratch, not_zero, branch, distance);
116  }
117 
118  // Check if an object has a given incremental marking color. Also uses ecx!
119  void HasColor(Register object,
120  Register scratch0,
121  Register scratch1,
122  Label* has_color,
123  Label::Distance has_color_distance,
124  int first_bit,
125  int second_bit);
126 
127  void JumpIfBlack(Register object,
128  Register scratch0,
129  Register scratch1,
130  Label* on_black,
131  Label::Distance on_black_distance = Label::kFar);
132 
133  // Checks the color of an object. If the object is already grey or black
134  // then we just fall through, since it is already live. If it is white and
135  // we can determine that it doesn't need to be scanned, then we just mark it
136  // black and fall through. For the rest we jump to the label so the
137  // incremental marker can fix its assumptions.
138  void EnsureNotWhite(Register object,
139  Register scratch1,
140  Register scratch2,
141  Label* object_is_white_and_not_data,
142  Label::Distance distance);
143 
144  // Notify the garbage collector that we wrote a pointer into an object.
145  // |object| is the object being stored into, |value| is the object being
146  // stored. value and scratch registers are clobbered by the operation.
147  // The offset is the offset from the start of the object, not the offset from
148  // the tagged HeapObject pointer. For use with FieldOperand(reg, off).
149  void RecordWriteField(
150  Register object,
151  int offset,
152  Register value,
153  Register scratch,
154  SaveFPRegsMode save_fp,
155  RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
156  SmiCheck smi_check = INLINE_SMI_CHECK);
157 
158  // As above, but the offset has the tag presubtracted. For use with
159  // Operand(reg, off).
161  Register context,
162  int offset,
163  Register value,
164  Register scratch,
165  SaveFPRegsMode save_fp,
166  RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
167  SmiCheck smi_check = INLINE_SMI_CHECK) {
168  RecordWriteField(context,
169  offset + kHeapObjectTag,
170  value,
171  scratch,
172  save_fp,
173  remembered_set_action,
174  smi_check);
175  }
176 
177  // Notify the garbage collector that we wrote a pointer into a fixed array.
178  // |array| is the array being stored into, |value| is the
179  // object being stored. |index| is the array index represented as a
180  // Smi. All registers are clobbered by the operation RecordWriteArray
181  // filters out smis so it does not update the write barrier if the
182  // value is a smi.
183  void RecordWriteArray(
184  Register array,
185  Register value,
186  Register index,
187  SaveFPRegsMode save_fp,
188  RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
189  SmiCheck smi_check = INLINE_SMI_CHECK);
190 
191  // For page containing |object| mark region covering |address|
192  // dirty. |object| is the object being stored into, |value| is the
193  // object being stored. The address and value registers are clobbered by the
194  // operation. RecordWrite filters out smis so it does not update the
195  // write barrier if the value is a smi.
196  void RecordWrite(
197  Register object,
198  Register address,
199  Register value,
200  SaveFPRegsMode save_fp,
201  RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
202  SmiCheck smi_check = INLINE_SMI_CHECK);
203 
204  // For page containing |object| mark the region covering the object's map
205  // dirty. |object| is the object being stored into, |map| is the Map object
206  // that was stored.
207  void RecordWriteForMap(
208  Register object,
209  Handle<Map> map,
210  Register scratch1,
211  Register scratch2,
212  SaveFPRegsMode save_fp);
213 
214 #ifdef ENABLE_DEBUGGER_SUPPORT
215  // ---------------------------------------------------------------------------
216  // Debugger Support
217 
218  void DebugBreak();
219 #endif
220 
221  // Enter specific kind of exit frame. Expects the number of
222  // arguments in register eax and sets up the number of arguments in
223  // register edi and the pointer to the first argument in register
224  // esi.
225  void EnterExitFrame(bool save_doubles);
226 
227  void EnterApiExitFrame(int argc);
228 
229  // Leave the current exit frame. Expects the return value in
230  // register eax:edx (untouched) and the pointer to the first
231  // argument in register esi.
232  void LeaveExitFrame(bool save_doubles);
233 
234  // Leave the current exit frame. Expects the return value in
235  // register eax (untouched).
236  void LeaveApiExitFrame();
237 
238  // Find the function context up the context chain.
239  void LoadContext(Register dst, int context_chain_length);
240 
241  // Conditionally load the cached Array transitioned map of type
242  // transitioned_kind from the native context if the map in register
243  // map_in_out is the cached Array map in the native context of
244  // expected_kind.
246  ElementsKind expected_kind,
247  ElementsKind transitioned_kind,
248  Register map_in_out,
249  Register scratch,
250  Label* no_map_match);
251 
252  // Load the initial map for new Arrays from a JSFunction.
253  void LoadInitialArrayMap(Register function_in,
254  Register scratch,
255  Register map_out,
256  bool can_have_holes);
257 
258  // Load the global function with the given index.
259  void LoadGlobalFunction(int index, Register function);
260 
261  // Load the initial map from the global function. The registers
262  // function and map can be the same.
263  void LoadGlobalFunctionInitialMap(Register function, Register map);
264 
265  // Push and pop the registers that can hold pointers.
268  // Store the value in register/immediate src in the safepoint
269  // register stack slot for register dst.
271  void StoreToSafepointRegisterSlot(Register dst, Immediate src);
273 
274  void LoadHeapObject(Register result, Handle<HeapObject> object);
275  void PushHeapObject(Handle<HeapObject> object);
276 
277  void LoadObject(Register result, Handle<Object> object) {
278  if (object->IsHeapObject()) {
279  LoadHeapObject(result, Handle<HeapObject>::cast(object));
280  } else {
281  Set(result, Immediate(object));
282  }
283  }
284 
285  // ---------------------------------------------------------------------------
286  // JavaScript invokes
287 
288  // Set up call kind marking in ecx. The method takes ecx as an
289  // explicit first parameter to make the code more readable at the
290  // call sites.
291  void SetCallKind(Register dst, CallKind kind);
292 
293  // Invoke the JavaScript function code by either calling or jumping.
295  const ParameterCount& expected,
296  const ParameterCount& actual,
298  const CallWrapper& call_wrapper,
299  CallKind call_kind) {
300  InvokeCode(Operand(code), expected, actual, flag, call_wrapper, call_kind);
301  }
302 
303  void InvokeCode(const Operand& code,
304  const ParameterCount& expected,
305  const ParameterCount& actual,
307  const CallWrapper& call_wrapper,
308  CallKind call_kind);
309 
311  const ParameterCount& expected,
312  const ParameterCount& actual,
313  RelocInfo::Mode rmode,
315  const CallWrapper& call_wrapper,
316  CallKind call_kind);
317 
318  // Invoke the JavaScript function in the given register. Changes the
319  // current context to the context in the function before invoking.
320  void InvokeFunction(Register function,
321  const ParameterCount& actual,
323  const CallWrapper& call_wrapper,
324  CallKind call_kind);
325 
326  void InvokeFunction(Handle<JSFunction> function,
327  const ParameterCount& actual,
329  const CallWrapper& call_wrapper,
330  CallKind call_kind);
331 
332  // Invoke specified builtin JavaScript function. Adds an entry to
333  // the unresolved list if the name does not resolve.
336  const CallWrapper& call_wrapper = NullCallWrapper());
337 
338  // Store the function for the given builtin in the target register.
340 
341  // Store the code object for the given builtin in the target register.
343 
344  // Expression support
345  void Set(Register dst, const Immediate& x);
346  void Set(const Operand& dst, const Immediate& x);
347 
348  // Support for constant splitting.
349  bool IsUnsafeImmediate(const Immediate& x);
350  void SafeSet(Register dst, const Immediate& x);
351  void SafePush(const Immediate& x);
352 
353  // Compare against a known root, e.g. undefined, null, true, ...
354  void CompareRoot(Register with, Heap::RootListIndex index);
355  void CompareRoot(const Operand& with, Heap::RootListIndex index);
356 
357  // Compare object type for heap object.
358  // Incoming register is heap_object and outgoing register is map.
359  void CmpObjectType(Register heap_object, InstanceType type, Register map);
360 
361  // Compare instance type for map.
362  void CmpInstanceType(Register map, InstanceType type);
363 
364  // Check if a map for a JSObject indicates that the object has fast elements.
365  // Jump to the specified label if it does not.
366  void CheckFastElements(Register map,
367  Label* fail,
368  Label::Distance distance = Label::kFar);
369 
370  // Check if a map for a JSObject indicates that the object can have both smi
371  // and HeapObject elements. Jump to the specified label if it does not.
373  Label* fail,
374  Label::Distance distance = Label::kFar);
375 
376  // Check if a map for a JSObject indicates that the object has fast smi only
377  // elements. Jump to the specified label if it does not.
379  Label* fail,
380  Label::Distance distance = Label::kFar);
381 
382  // Check to see if maybe_number can be stored as a double in
383  // FastDoubleElements. If it can, store it at the index specified by key in
384  // the FastDoubleElements array elements, otherwise jump to fail.
385  void StoreNumberToDoubleElements(Register maybe_number,
386  Register elements,
387  Register key,
388  Register scratch1,
389  XMMRegister scratch2,
390  Label* fail,
391  bool specialize_for_processor);
392 
393  // Compare an object's map with the specified map and its transitioned
394  // elements maps if mode is ALLOW_ELEMENT_TRANSITION_MAPS. FLAGS are set with
395  // result of map compare. If multiple map compares are required, the compare
396  // sequences branches to early_success.
397  void CompareMap(Register obj,
398  Handle<Map> map,
399  Label* early_success,
401 
402  // Check if the map of an object is equal to a specified map and branch to
403  // label if not. Skip the smi check if not required (object is known to be a
404  // heap object). If mode is ALLOW_ELEMENT_TRANSITION_MAPS, then also match
405  // against maps that are ElementsKind transition maps of the specified map.
406  void CheckMap(Register obj,
407  Handle<Map> map,
408  Label* fail,
409  SmiCheckType smi_check_type,
411 
412  // Check if the map of an object is equal to a specified map and branch to a
413  // specified target if equal. Skip the smi check if not required (object is
414  // known to be a heap object)
415  void DispatchMap(Register obj,
416  Handle<Map> map,
417  Handle<Code> success,
418  SmiCheckType smi_check_type);
419 
420  // Check if the object in register heap_object is a string. Afterwards the
421  // register map contains the object map and the register instance_type
422  // contains the instance_type. The registers map and instance_type can be the
423  // same in which case it contains the instance type afterwards. Either of the
424  // registers map and instance_type can be the same as heap_object.
426  Register map,
427  Register instance_type);
428 
429  // Check if a heap object's type is in the JSObject range, not including
430  // JSFunction. The object's map will be loaded in the map register.
431  // Any or all of the three registers may be the same.
432  // The contents of the scratch register will always be overwritten.
433  void IsObjectJSObjectType(Register heap_object,
434  Register map,
435  Register scratch,
436  Label* fail);
437 
438  // The contents of the scratch register will be overwritten.
439  void IsInstanceJSObjectType(Register map, Register scratch, Label* fail);
440 
441  // FCmp is similar to integer cmp, but requires unsigned
442  // jcc instructions (je, ja, jae, jb, jbe, je, and jz).
443  void FCmp();
444 
445  void ClampUint8(Register reg);
446 
447  void ClampDoubleToUint8(XMMRegister input_reg,
448  XMMRegister scratch_reg,
449  Register result_reg);
450 
451 
452  // Smi tagging support.
453  void SmiTag(Register reg) {
454  STATIC_ASSERT(kSmiTag == 0);
456  add(reg, reg);
457  }
458  void SmiUntag(Register reg) {
459  sar(reg, kSmiTagSize);
460  }
461 
462  // Modifies the register even if it does not contain a Smi!
463  void SmiUntag(Register reg, Label* is_smi) {
465  sar(reg, kSmiTagSize);
466  STATIC_ASSERT(kSmiTag == 0);
467  j(not_carry, is_smi);
468  }
469 
470  void LoadUint32(XMMRegister dst, Register src, XMMRegister scratch);
471 
472  // Jump the register contains a smi.
473  inline void JumpIfSmi(Register value,
474  Label* smi_label,
475  Label::Distance distance = Label::kFar) {
476  test(value, Immediate(kSmiTagMask));
477  j(zero, smi_label, distance);
478  }
479  // Jump if the operand is a smi.
480  inline void JumpIfSmi(Operand value,
481  Label* smi_label,
482  Label::Distance distance = Label::kFar) {
483  test(value, Immediate(kSmiTagMask));
484  j(zero, smi_label, distance);
485  }
486  // Jump if register contain a non-smi.
487  inline void JumpIfNotSmi(Register value,
488  Label* not_smi_label,
489  Label::Distance distance = Label::kFar) {
490  test(value, Immediate(kSmiTagMask));
491  j(not_zero, not_smi_label, distance);
492  }
493 
494  void LoadInstanceDescriptors(Register map, Register descriptors);
495  void EnumLength(Register dst, Register map);
497 
498  template<typename Field>
499  void DecodeField(Register reg) {
500  static const int shift = Field::kShift;
501  static const int mask = (Field::kMask >> Field::kShift) << kSmiTagSize;
502  sar(reg, shift);
503  and_(reg, Immediate(mask));
504  }
505  void LoadPowerOf2(XMMRegister dst, Register scratch, int power);
506 
507  // Abort execution if argument is not a number, enabled via --debug-code.
508  void AssertNumber(Register object);
509 
510  // Abort execution if argument is not a smi, enabled via --debug-code.
511  void AssertSmi(Register object);
512 
513  // Abort execution if argument is a smi, enabled via --debug-code.
514  void AssertNotSmi(Register object);
515 
516  // Abort execution if argument is not a string, enabled via --debug-code.
517  void AssertString(Register object);
518 
519  // ---------------------------------------------------------------------------
520  // Exception handling
521 
522  // Push a new try handler and link it into try handler chain.
523  void PushTryHandler(StackHandler::Kind kind, int handler_index);
524 
525  // Unlink the stack handler on top of the stack from the try handler chain.
526  void PopTryHandler();
527 
528  // Throw to the top handler in the try hander chain.
529  void Throw(Register value);
530 
531  // Throw past all JS frames to the top JS entry frame.
532  void ThrowUncatchable(Register value);
533 
534  // ---------------------------------------------------------------------------
535  // Inline caching support
536 
537  // Generate code for checking access rights - used for security checks
538  // on access to global objects across environments. The holder register
539  // is left untouched, but the scratch register is clobbered.
540  void CheckAccessGlobalProxy(Register holder_reg,
541  Register scratch,
542  Label* miss);
543 
544  void GetNumberHash(Register r0, Register scratch);
545 
546  void LoadFromNumberDictionary(Label* miss,
547  Register elements,
548  Register key,
549  Register r0,
550  Register r1,
551  Register r2,
552  Register result);
553 
554 
555  // ---------------------------------------------------------------------------
556  // Allocation support
557 
558  // Allocate an object in new space. If the new space is exhausted control
559  // continues at the gc_required label. The allocated object is returned in
560  // result and end of the new object is returned in result_end. The register
561  // scratch can be passed as no_reg in which case an additional object
562  // reference will be added to the reloc info. The returned pointers in result
563  // and result_end have not yet been tagged as heap objects. If
564  // result_contains_top_on_entry is true the content of result is known to be
565  // the allocation top on entry (could be result_end from a previous call to
566  // AllocateInNewSpace). If result_contains_top_on_entry is true scratch
567  // should be no_reg as it is never used.
568  void AllocateInNewSpace(int object_size,
569  Register result,
570  Register result_end,
571  Register scratch,
572  Label* gc_required,
574 
575  void AllocateInNewSpace(int header_size,
576  ScaleFactor element_size,
577  Register element_count,
578  Register result,
579  Register result_end,
580  Register scratch,
581  Label* gc_required,
583 
584  void AllocateInNewSpace(Register object_size,
585  Register result,
586  Register result_end,
587  Register scratch,
588  Label* gc_required,
590 
591  // Undo allocation in new space. The object passed and objects allocated after
592  // it will no longer be allocated. Make sure that no pointers are left to the
593  // object(s) no longer allocated as they would be invalid when allocation is
594  // un-done.
595  void UndoAllocationInNewSpace(Register object);
596 
597  // Allocate a heap number in new space with undefined value. The
598  // register scratch2 can be passed as no_reg; the others must be
599  // valid registers. Returns tagged pointer in result register, or
600  // jumps to gc_required if new space is full.
601  void AllocateHeapNumber(Register result,
602  Register scratch1,
603  Register scratch2,
604  Label* gc_required);
605 
606  // Allocate a sequential string. All the header fields of the string object
607  // are initialized.
608  void AllocateTwoByteString(Register result,
609  Register length,
610  Register scratch1,
611  Register scratch2,
612  Register scratch3,
613  Label* gc_required);
614  void AllocateAsciiString(Register result,
615  Register length,
616  Register scratch1,
617  Register scratch2,
618  Register scratch3,
619  Label* gc_required);
620  void AllocateAsciiString(Register result,
621  int length,
622  Register scratch1,
623  Register scratch2,
624  Label* gc_required);
625 
626  // Allocate a raw cons string object. Only the map field of the result is
627  // initialized.
629  Register scratch1,
630  Register scratch2,
631  Label* gc_required);
632  void AllocateAsciiConsString(Register result,
633  Register scratch1,
634  Register scratch2,
635  Label* gc_required);
636 
637  // Allocate a raw sliced string object. Only the map field of the result is
638  // initialized.
640  Register scratch1,
641  Register scratch2,
642  Label* gc_required);
644  Register scratch1,
645  Register scratch2,
646  Label* gc_required);
647 
648  // Copy memory, byte-by-byte, from source to destination. Not optimized for
649  // long or aligned copies.
650  // The contents of index and scratch are destroyed.
651  void CopyBytes(Register source,
652  Register destination,
653  Register length,
654  Register scratch);
655 
656  // Initialize fields with filler values. Fields starting at |start_offset|
657  // not including end_offset are overwritten with the value in |filler|. At
658  // the end the loop, |start_offset| takes the value of |end_offset|.
659  void InitializeFieldsWithFiller(Register start_offset,
660  Register end_offset,
661  Register filler);
662 
663  // ---------------------------------------------------------------------------
664  // Support functions.
665 
666  // Check a boolean-bit of a Smi field.
667  void BooleanBitTest(Register object, int field_offset, int bit_index);
668 
669  // Check if result is zero and op is negative.
670  void NegativeZeroTest(Register result, Register op, Label* then_label);
671 
672  // Check if result is zero and any of op1 and op2 are negative.
673  // Register scratch is destroyed, and it must be different from op2.
674  void NegativeZeroTest(Register result, Register op1, Register op2,
675  Register scratch, Label* then_label);
676 
677  // Try to get function prototype of a function and puts the value in
678  // the result register. Checks that the function really is a
679  // function and jumps to the miss label if the fast checks fail. The
680  // function register will be untouched; the other registers may be
681  // clobbered.
682  void TryGetFunctionPrototype(Register function,
683  Register result,
684  Register scratch,
685  Label* miss,
686  bool miss_on_bound_function = false);
687 
688  // Generates code for reporting that an illegal operation has
689  // occurred.
690  void IllegalOperation(int num_arguments);
691 
692  // Picks out an array index from the hash field.
693  // Register use:
694  // hash - holds the index's hash. Clobbered.
695  // index - holds the overwritten index on exit.
696  void IndexFromHash(Register hash, Register index);
697 
698  // ---------------------------------------------------------------------------
699  // Runtime calls
700 
701  // Call a code stub. Generate the code if necessary.
702  void CallStub(CodeStub* stub, TypeFeedbackId ast_id = TypeFeedbackId::None());
703 
704  // Tail call a code stub (jump). Generate the code if necessary.
705  void TailCallStub(CodeStub* stub);
706 
707  // Return from a code stub after popping its arguments.
708  void StubReturn(int argc);
709 
710  // Call a runtime routine.
711  void CallRuntime(const Runtime::Function* f, int num_arguments);
713 
714  // Convenience function: Same as above, but takes the fid instead.
715  void CallRuntime(Runtime::FunctionId id, int num_arguments);
716 
717  // Convenience function: call an external reference.
718  void CallExternalReference(ExternalReference ref, int num_arguments);
719 
720  // Tail call of a runtime routine (jump).
721  // Like JumpToExternalReference, but also takes care of passing the number
722  // of parameters.
723  void TailCallExternalReference(const ExternalReference& ext,
724  int num_arguments,
725  int result_size);
726 
727  // Convenience function: tail call a runtime routine (jump).
729  int num_arguments,
730  int result_size);
731 
732  // Before calling a C-function from generated code, align arguments on stack.
733  // After aligning the frame, arguments must be stored in esp[0], esp[4],
734  // etc., not pushed. The argument count assumes all arguments are word sized.
735  // Some compilers/platforms require the stack to be aligned when calling
736  // C++ code.
737  // Needs a scratch register to do some arithmetic. This register will be
738  // trashed.
739  void PrepareCallCFunction(int num_arguments, Register scratch);
740 
741  // Calls a C function and cleans up the space for arguments allocated
742  // by PrepareCallCFunction. The called function is not allowed to trigger a
743  // garbage collection, since that might move the code and invalidate the
744  // return address (unless this is somehow accounted for by the called
745  // function).
746  void CallCFunction(ExternalReference function, int num_arguments);
747  void CallCFunction(Register function, int num_arguments);
748 
749  // Prepares stack to put arguments (aligns and so on). Reserves
750  // space for return value if needed (assumes the return value is a handle).
751  // Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
752  // etc. Saves context (esi). If space was reserved for return value then
753  // stores the pointer to the reserved slot into esi.
754  void PrepareCallApiFunction(int argc);
755 
756  // Calls an API function. Allocates HandleScope, extracts returned value
757  // from handle and propagates exceptions. Clobbers ebx, edi and
758  // caller-save registers. Restores context. On return removes
759  // stack_space * kPointerSize (GCed).
760  void CallApiFunctionAndReturn(Address function_address, int stack_space);
761 
762  // Jump to a runtime routine.
763  void JumpToExternalReference(const ExternalReference& ext);
764 
765  // ---------------------------------------------------------------------------
766  // Utilities
767 
768  void Ret();
769 
770  // Return and drop arguments from stack, where the number of arguments
771  // may be bigger than 2^16 - 1. Requires a scratch register.
772  void Ret(int bytes_dropped, Register scratch);
773 
774  // Emit code to discard a non-negative number of pointer-sized elements
775  // from the stack, clobbering only the esp register.
776  void Drop(int element_count);
777 
778  void Call(Label* target) { call(target); }
779 
780  // Emit call to the code we are currently generating.
781  void CallSelf() {
782  Handle<Code> self(reinterpret_cast<Code**>(CodeObject().location()));
783  call(self, RelocInfo::CODE_TARGET);
784  }
785 
786  // Move if the registers are not identical.
787  void Move(Register target, Register source);
788 
789  // Push a handle value.
790  void Push(Handle<Object> handle) { push(Immediate(handle)); }
791 
793  ASSERT(!code_object_.is_null());
794  return code_object_;
795  }
796 
797 
798  // ---------------------------------------------------------------------------
799  // StatsCounter support
800 
801  void SetCounter(StatsCounter* counter, int value);
802  void IncrementCounter(StatsCounter* counter, int value);
803  void DecrementCounter(StatsCounter* counter, int value);
804  void IncrementCounter(Condition cc, StatsCounter* counter, int value);
805  void DecrementCounter(Condition cc, StatsCounter* counter, int value);
806 
807 
808  // ---------------------------------------------------------------------------
809  // Debugging
810 
811  // Calls Abort(msg) if the condition cc is not satisfied.
812  // Use --debug_code to enable.
813  void Assert(Condition cc, const char* msg);
814 
815  void AssertFastElements(Register elements);
816 
817  // Like Assert(), but always enabled.
818  void Check(Condition cc, const char* msg);
819 
820  // Print a message to stdout and abort execution.
821  void Abort(const char* msg);
822 
823  // Check that the stack is aligned.
824  void CheckStackAlignment();
825 
826  // Verify restrictions about code generated in stubs.
827  void set_generating_stub(bool value) { generating_stub_ = value; }
828  bool generating_stub() { return generating_stub_; }
829  void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
830  bool allow_stub_calls() { return allow_stub_calls_; }
831  void set_has_frame(bool value) { has_frame_ = value; }
832  bool has_frame() { return has_frame_; }
833  inline bool AllowThisStubCall(CodeStub* stub);
834 
835  // ---------------------------------------------------------------------------
836  // String utilities.
837 
838  // Check whether the instance type represents a flat ASCII string. Jump to the
839  // label if not. If the instance type can be scratched specify same register
840  // for both instance type and scratch.
842  Register scratch,
843  Label* on_not_flat_ascii_string);
844 
845  // Checks if both objects are sequential ASCII strings, and jumps to label
846  // if either is not.
848  Register object2,
849  Register scratch1,
850  Register scratch2,
851  Label* on_not_flat_ascii_strings);
852 
854  return SafepointRegisterStackIndex(reg.code());
855  }
856 
857  // Activation support.
858  void EnterFrame(StackFrame::Type type);
859  void LeaveFrame(StackFrame::Type type);
860 
861  // Expects object in eax and returns map with validated enum cache
862  // in eax. Assumes that any other register can be used as a scratch.
863  void CheckEnumCache(Label* call_runtime);
864 
865  private:
866  bool generating_stub_;
867  bool allow_stub_calls_;
868  bool has_frame_;
869  // This handle will be patched with the code object on installation.
870  Handle<Object> code_object_;
871 
872  // Helper functions for generating invokes.
873  void InvokePrologue(const ParameterCount& expected,
874  const ParameterCount& actual,
875  Handle<Code> code_constant,
876  const Operand& code_operand,
877  Label* done,
878  bool* definitely_mismatches,
879  InvokeFlag flag,
880  Label::Distance done_distance,
881  const CallWrapper& call_wrapper = NullCallWrapper(),
882  CallKind call_kind = CALL_AS_METHOD);
883 
884  void EnterExitFramePrologue();
885  void EnterExitFrameEpilogue(int argc, bool save_doubles);
886 
887  void LeaveExitFrameEpilogue();
888 
889  // Allocation support helpers.
890  void LoadAllocationTopHelper(Register result,
891  Register scratch,
893  void UpdateAllocationTopHelper(Register result_end, Register scratch);
894 
895  // Helper for PopHandleScope. Allowed to perform a GC and returns
896  // NULL if gc_allowed. Does not perform a GC if !gc_allowed, and
897  // possibly returns a failure object indicating an allocation failure.
898  MUST_USE_RESULT MaybeObject* PopHandleScopeHelper(Register saved,
899  Register scratch,
900  bool gc_allowed);
901 
902  // Helper for implementing JumpIfNotInNewSpace and JumpIfInNewSpace.
903  void InNewSpace(Register object,
904  Register scratch,
905  Condition cc,
906  Label* condition_met,
907  Label::Distance condition_met_distance = Label::kFar);
908 
909  // Helper for finding the mark bits for an address. Afterwards, the
910  // bitmap register points at the word with the mark bits and the mask
911  // the position of the first bit. Uses ecx as scratch and leaves addr_reg
912  // unchanged.
913  inline void GetMarkBits(Register addr_reg,
914  Register bitmap_reg,
915  Register mask_reg);
916 
917  // Helper for throwing exceptions. Compute a handler address and jump to
918  // it. See the implementation for register usage.
919  void JumpToHandlerEntry();
920 
921  // Compute memory operands for safepoint stack slots.
922  Operand SafepointRegisterSlot(Register reg);
923  static int SafepointRegisterStackIndex(int reg_code);
924 
925  // Needs access to SafepointRegisterStackIndex for optimized frame
926  // traversal.
927  friend class OptimizedFrame;
928 };
929 
930 
931 // The code patcher is used to patch (typically) small parts of code e.g. for
932 // debugging and other types of instrumentation. When using the code patcher
933 // the exact number of bytes specified must be emitted. Is not legal to emit
934 // relocation information. If any of these constraints are violated it causes
935 // an assertion.
936 class CodePatcher {
937  public:
938  CodePatcher(byte* address, int size);
939  virtual ~CodePatcher();
940 
941  // Macro assembler to emit code.
942  MacroAssembler* masm() { return &masm_; }
943 
944  private:
945  byte* address_; // The address of the code being patched.
946  int size_; // Number of bytes of the expected patch size.
947  MacroAssembler masm_; // Macro assembler used to generate the code.
948 };
949 
950 
951 // -----------------------------------------------------------------------------
952 // Static helper functions.
953 
954 // Generate an Operand for loading a field from an object.
955 inline Operand FieldOperand(Register object, int offset) {
956  return Operand(object, offset - kHeapObjectTag);
957 }
958 
959 
960 // Generate an Operand for loading an indexed field from an object.
961 inline Operand FieldOperand(Register object,
962  Register index,
963  ScaleFactor scale,
964  int offset) {
965  return Operand(object, index, scale, offset - kHeapObjectTag);
966 }
967 
968 
969 inline Operand ContextOperand(Register context, int index) {
970  return Operand(context, Context::SlotOffset(index));
971 }
972 
973 
974 inline Operand GlobalObjectOperand() {
976 }
977 
978 
979 // Generates an Operand for saving parameters after PrepareCallApiFunction.
980 Operand ApiParameterOperand(int index);
981 
982 
983 #ifdef GENERATED_CODE_COVERAGE
984 extern void LogGeneratedCodeCoverage(const char* file_line);
985 #define CODE_COVERAGE_STRINGIFY(x) #x
986 #define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
987 #define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
988 #define ACCESS_MASM(masm) { \
989  byte* ia32_coverage_function = \
990  reinterpret_cast<byte*>(FUNCTION_ADDR(LogGeneratedCodeCoverage)); \
991  masm->pushfd(); \
992  masm->pushad(); \
993  masm->push(Immediate(reinterpret_cast<int>(&__FILE_LINE__))); \
994  masm->call(ia32_coverage_function, RelocInfo::RUNTIME_ENTRY); \
995  masm->pop(eax); \
996  masm->popad(); \
997  masm->popfd(); \
998  } \
999  masm->
1000 #else
1001 #define ACCESS_MASM(masm) masm->
1002 #endif
1003 
1004 
1005 } } // namespace v8::internal
1006 
1007 #endif // V8_IA32_MACRO_ASSEMBLER_IA32_H_
byte * Address
Definition: globals.h:157
void CallRuntime(const Runtime::Function *f, int num_arguments)
void ClampDoubleToUint8(Register result_reg, DoubleRegister input_reg, DoubleRegister temp_double_reg)
void LoadUint32(XMMRegister dst, Register src, XMMRegister scratch)
void Push(Handle< Object > handle)
void ClampUint8(Register output_reg, Register input_reg)
void LoadPowerOf2(XMMRegister dst, Register scratch, int power)
void SmiUntag(Register reg, Label *is_smi)
Isolate * isolate() const
Definition: assembler.h:61
const intptr_t kSmiTagMask
Definition: v8.h:4016
void Assert(Condition cond, const char *msg)
static int SlotOffset(int index)
Definition: contexts.h:425
void RecordWriteContextSlot(Register context, int offset, Register value, Register scratch, SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action=EMIT_REMEMBERED_SET, SmiCheck smi_check=INLINE_SMI_CHECK)
const Register r3
void AllocateTwoByteSlicedString(Register result, Register length, Register scratch1, Register scratch2, Label *gc_required)
void LeaveExitFrame(bool save_doubles, Register argument_count)
void AssertString(Register object)
static TypeFeedbackId None()
Definition: utils.h:999
void RecordWriteArray(Register array, Register value, Register index, SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action=EMIT_REMEMBERED_SET, SmiCheck smi_check=INLINE_SMI_CHECK)
void JumpToExternalReference(const ExternalReference &builtin)
void LoadInstanceDescriptors(Register map, Register descriptors)
void AllocateAsciiString(Register result, Register length, Register scratch1, Register scratch2, Register scratch3, Label *gc_required)
void JumpIfNotSmi(Register value, Label *not_smi_label, Label::Distance distance=Label::kFar)
void JumpIfNotBothSequentialAsciiStrings(Register first, Register second, Register scratch1, Register scratch2, Label *not_flat_ascii_strings)
void LoadFromNumberDictionary(Label *miss, Register elements, Register key, Register result, Register t0, Register t1, Register t2)
void GetBuiltinEntry(Register target, Builtins::JavaScript id)
void DispatchMap(Register obj, Register scratch, Handle< Map > map, Handle< Code > success, SmiCheckType smi_check_type)
bool AllowThisStubCall(CodeStub *stub)
void StoreToSafepointRegisterSlot(Register src, Register dst)
void CheckFastObjectElements(Register map, Register scratch, Label *fail)
void j(Condition cc, Label *L, Label::Distance distance=Label::kFar)
bool AreAliased(Register r1, Register r2, Register r3, Register r4)
#define ASSERT(condition)
Definition: checks.h:270
void RecordWriteForMap(Register object, Handle< Map > map, Register scratch1, Register scratch2, SaveFPRegsMode save_fp)
void AssertNotSmi(Register object)
void RecordWriteField(Register object, int offset, Register value, Register scratch, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action=EMIT_REMEMBERED_SET, SmiCheck smi_check=INLINE_SMI_CHECK)
void PushTryHandler(StackHandler::Kind kind, int handler_index)
void LoadTransitionedArrayMapConditional(ElementsKind expected_kind, ElementsKind transitioned_kind, Register map_in_out, Register scratch, Label *no_map_match)
void CheckPageFlagForMap(Handle< Map > map, int mask, Condition cc, Label *condition_met, Label::Distance condition_met_distance=Label::kFar)
void NumberOfOwnDescriptors(Register dst, Register map)
MemOperand GlobalObjectOperand()
void IncrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
const Register r2
void Abort(const char *msg)
void sar(Register dst, uint8_t imm8)
MemOperand ContextOperand(Register context, int index)
void CheckMap(Register obj, Register scratch, Handle< Map > map, Label *fail, SmiCheckType smi_check_type, CompareMapMode mode=REQUIRE_EXACT_MAP)
void AssertSmi(Register object)
void CompareRoot(Register obj, Heap::RootListIndex index)
void PushHeapObject(Handle< HeapObject > object)
void DecrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
static int SafepointRegisterStackIndex(Register reg)
void JumpIfInNewSpace(Register object, Register scratch, Label *branch, Label::Distance distance=Label::kFar)
uint8_t byte
Definition: globals.h:156
void JumpIfInstanceTypeIsNotSequentialAscii(Register type, Register scratch, Label *failure)
T ** location() const
Definition: handles.h:75
void IsObjectJSObjectType(Register heap_object, Register map, Register scratch, Label *fail)
void EnumLength(Register dst, Register map)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
void LeaveFrame(StackFrame::Type type)
void CheckFastElements(Register map, Register scratch, Label *fail)
#define MUST_USE_RESULT
Definition: globals.h:346
void LoadGlobalFunction(int index, Register function)
void CheckPageFlag(Register object, Register scratch, int mask, Condition cc, Label *condition_met)
void TryGetFunctionPrototype(Register function, Register result, Register scratch, Label *miss, bool miss_on_bound_function=false)
void CallCFunction(ExternalReference function, int num_arguments)
Condition IsObjectStringType(Register obj, Register type)
void SafePush(const Immediate &x)
void AllocateAsciiConsString(Register result, Register length, Register scratch1, Register scratch2, Label *gc_required)
Operand FieldOperand(Register object, int offset)
void IsInstanceJSObjectType(Register map, Register scratch, Label *fail)
void CheckFastSmiElements(Register map, Register scratch, Label *fail)
const int kHeapObjectTag
Definition: v8.h:4009
void RecordWrite(Register object, Register address, Register value, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action=EMIT_REMEMBERED_SET, SmiCheck smi_check=INLINE_SMI_CHECK)
bool IsUnsafeImmediate(const Immediate &x)
void AllocateHeapNumber(Register result, Register scratch1, Register scratch2, Register heap_number_map, Label *gc_required, TaggingMode tagging_mode=TAG_RESULT)
void CopyBytes(Register src, Register dst, Register length, Register scratch)
void LoadHeapObject(Register dst, Handle< HeapObject > object)
void Throw(Register value)
void Move(Register dst, Handle< Object > value)
void SafeSet(Register dst, const Immediate &x)
void EnterApiExitFrame(int argc)
void PrepareCallApiFunction(int argc)
void SetCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void InvokeCode(Register code, const ParameterCount &expected, const ParameterCount &actual, InvokeFlag flag, const CallWrapper &call_wrapper, CallKind call_kind)
const Register r0
void NegativeZeroTest(Register result, Register op, Label *then_label)
MacroAssembler(Isolate *isolate, void *buffer, int size)
void LoadContext(Register dst, int context_chain_length)
void CallExternalReference(const ExternalReference &ext, int num_arguments)
void StoreNumberToDoubleElements(Register value_reg, Register key_reg, Register receiver_reg, Register elements_reg, Register scratch1, Register scratch2, Register scratch3, Register scratch4, Label *fail)
void JumpIfNotInNewSpace(Register object, Register scratch, Label *branch, Label::Distance distance=Label::kFar)
void AssertFastElements(Register elements)
void add(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void JumpIfBlack(Register object, Register scratch0, Register scratch1, Label *on_black)
void AllocateTwoByteConsString(Register result, Register length, Register scratch1, Register scratch2, Label *gc_required)
void Drop(int count, Condition cond=al)
InvokeFlag
void GetBuiltinFunction(Register target, Builtins::JavaScript id)
void IllegalOperation(int num_arguments)
void CheckAccessGlobalProxy(Register holder_reg, Register scratch, Label *miss)
void CallApiFunctionAndReturn(ExternalReference function, int stack_space)
void LoadObject(Register result, Handle< Object > object)
const Register r1
void CallRuntimeSaveDoubles(Runtime::FunctionId id)
bool is_null() const
Definition: handles.h:87
void ThrowUncatchable(Register value)
void AllocateInNewSpace(int object_size, Register result, Register scratch1, Register scratch2, Label *gc_required, AllocationFlags flags)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers, Register scratch)
void CompareMap(Register obj, Register scratch, Handle< Map > map, Label *early_success, CompareMapMode mode=REQUIRE_EXACT_MAP)
void LoadGlobalFunctionInitialMap(Register function, Register map, Register scratch)
void GetNumberHash(Register t0, Register scratch)
void InvokeFunction(Register function, const ParameterCount &actual, InvokeFlag flag, const CallWrapper &call_wrapper, CallKind call_kind)
void JumpIfSmi(Operand value, Label *smi_label, Label::Distance distance=Label::kFar)
Operand ApiParameterOperand(int index)
const int kSmiTagSize
Definition: v8.h:4015
void UndoAllocationInNewSpace(Register object, Register scratch)
void LoadFromSafepointRegisterSlot(Register dst, Register src)
void test(Register reg, const Immediate &imm)
const Register esi
void Set(Register dst, const Immediate &x)
void AllocateAsciiSlicedString(Register result, Register length, Register scratch1, Register scratch2, Label *gc_required)
const int kSmiTag
Definition: v8.h:4014
void Check(Condition cond, const char *msg)
void LoadInitialArrayMap(Register function_in, Register scratch, Register map_out, bool can_have_holes)
void InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag, const CallWrapper &call_wrapper=NullCallWrapper())
void TailCallStub(CodeStub *stub, Condition cond=al)
CodePatcher(byte *address, int instructions)
void BooleanBitTest(Register object, int field_offset, int bit_index)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
void EnsureNotWhite(Register object, Register scratch1, Register scratch2, Register scratch3, Label *object_is_white_and_not_data)
void and_(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void CallStub(CodeStub *stub, Condition cond=al)
void IndexFromHash(Register hash, Register index)
void TailCallExternalReference(const ExternalReference &ext, int num_arguments, int result_size)
void EnterExitFrame(bool save_doubles, int stack_space=0)
void InitializeFieldsWithFiller(Register start_offset, Register end_offset, Register filler)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
void TailCallRuntime(Runtime::FunctionId fid, int num_arguments, int result_size)
void SetCallKind(Register dst, CallKind kind)
void AllocateTwoByteString(Register result, Register length, Register scratch1, Register scratch2, Register scratch3, Label *gc_required)
void RememberedSetHelper(Register object, Register addr, Register scratch, SaveFPRegsMode save_fp, RememberedSetFinalAction and_then)
void HasColor(Register object, Register scratch0, Register scratch1, Label *has_color, int first_bit, int second_bit)
void CmpObjectType(Register heap_object, InstanceType type, Register map)
void CmpInstanceType(Register map, InstanceType type)
void EnterFrame(StackFrame::Type type)
void JumpIfSmi(Register value, Label *smi_label, Label::Distance distance=Label::kFar)
void CheckEnumCache(Register null_value, Label *call_runtime)
void AssertNumber(Register object)
const Register r4
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag
Definition: objects-inl.h:3923