v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
compilation-cache.cc
Go to the documentation of this file.
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "assembler.h"
31 #include "compilation-cache.h"
32 #include "serialize.h"
33 
34 namespace v8 {
35 namespace internal {
36 
37 
38 // The number of generations for each sub cache.
39 // The number of ScriptGenerations is carefully chosen based on histograms.
40 // See issue 458: http://code.google.com/p/v8/issues/detail?id=458
41 static const int kScriptGenerations = 5;
42 static const int kEvalGlobalGenerations = 2;
43 static const int kEvalContextualGenerations = 2;
44 static const int kRegExpGenerations = 2;
45 
46 // Initial size of each compilation cache table allocated.
47 static const int kInitialCacheSize = 64;
48 
49 
50 CompilationCache::CompilationCache(Isolate* isolate)
51  : isolate_(isolate),
52  script_(isolate, kScriptGenerations),
53  eval_global_(isolate, kEvalGlobalGenerations),
54  eval_contextual_(isolate, kEvalContextualGenerations),
55  reg_exp_(isolate, kRegExpGenerations),
56  enabled_(true) {
57  CompilationSubCache* subcaches[kSubCacheCount] =
58  {&script_, &eval_global_, &eval_contextual_, &reg_exp_};
59  for (int i = 0; i < kSubCacheCount; ++i) {
60  subcaches_[i] = subcaches[i];
61  }
62 }
63 
64 
65 CompilationCache::~CompilationCache() {}
66 
67 
68 static Handle<CompilationCacheTable> AllocateTable(Isolate* isolate, int size) {
69  CALL_HEAP_FUNCTION(isolate,
71  CompilationCacheTable);
72 }
73 
74 
76  ASSERT(generation < generations_);
78  if (tables_[generation]->IsUndefined()) {
79  result = AllocateTable(isolate(), kInitialCacheSize);
80  tables_[generation] = *result;
81  } else {
82  CompilationCacheTable* table =
83  CompilationCacheTable::cast(tables_[generation]);
84  result = Handle<CompilationCacheTable>(table, isolate());
85  }
86  return result;
87 }
88 
90  // Age the generations implicitly killing off the oldest.
91  for (int i = generations_ - 1; i > 0; i--) {
92  tables_[i] = tables_[i - 1];
93  }
94 
95  // Set the first generation as unborn.
96  tables_[0] = isolate()->heap()->undefined_value();
97 }
98 
99 
101  Object* undefined = isolate()->heap()->raw_unchecked_undefined_value();
102  for (int i = 0; i < generations_; i++) {
103  if (tables_[i] != undefined) {
104  reinterpret_cast<CompilationCacheTable*>(tables_[i])->IterateElements(v);
105  }
106  }
107 }
108 
109 
110 void CompilationSubCache::Iterate(ObjectVisitor* v) {
111  v->VisitPointers(&tables_[0], &tables_[generations_]);
112 }
113 
114 
116  MemsetPointer(tables_, isolate()->heap()->undefined_value(), generations_);
117 }
118 
119 
121  // Probe the script generation tables. Make sure not to leak handles
122  // into the caller's handle scope.
123  { HandleScope scope(isolate());
124  for (int generation = 0; generation < generations(); generation++) {
126  table->Remove(*function_info);
127  }
128  }
129 }
130 
131 
133  int generations)
134  : CompilationSubCache(isolate, generations),
135  script_histogram_(NULL),
136  script_histogram_initialized_(false) { }
137 
138 
139 // We only re-use a cached function for some script source code if the
140 // script originates from the same place. This is to avoid issues
141 // when reporting errors, etc.
142 bool CompilationCacheScript::HasOrigin(
143  Handle<SharedFunctionInfo> function_info,
144  Handle<Object> name,
145  int line_offset,
146  int column_offset) {
147  Handle<Script> script =
148  Handle<Script>(Script::cast(function_info->script()), isolate());
149  // If the script name isn't set, the boilerplate script should have
150  // an undefined name to have the same origin.
151  if (name.is_null()) {
152  return script->name()->IsUndefined();
153  }
154  // Do the fast bailout checks first.
155  if (line_offset != script->line_offset()->value()) return false;
156  if (column_offset != script->column_offset()->value()) return false;
157  // Check that both names are strings. If not, no match.
158  if (!name->IsString() || !script->name()->IsString()) return false;
159  // Compare the two name strings for equality.
160  return String::cast(*name)->Equals(String::cast(script->name()));
161 }
162 
163 
164 // TODO(245): Need to allow identical code from different contexts to
165 // be cached in the same script generation. Currently the first use
166 // will be cached, but subsequent code from different source / line
167 // won't.
169  Handle<String> source,
170  Handle<Object> name,
171  int line_offset,
172  int column_offset,
173  Handle<Context> context) {
174  Object* result = NULL;
175  int generation;
176 
177  // Probe the script generation tables. Make sure not to leak handles
178  // into the caller's handle scope.
179  { HandleScope scope(isolate());
180  for (generation = 0; generation < generations(); generation++) {
181  Handle<CompilationCacheTable> table = GetTable(generation);
182  Handle<Object> probe(table->Lookup(*source, *context), isolate());
183  if (probe->IsSharedFunctionInfo()) {
184  Handle<SharedFunctionInfo> function_info =
186  // Break when we've found a suitable shared function info that
187  // matches the origin.
188  if (HasOrigin(function_info, name, line_offset, column_offset)) {
189  result = *function_info;
190  break;
191  }
192  }
193  }
194  }
195 
196  if (!script_histogram_initialized_) {
197  script_histogram_ = isolate()->stats_table()->CreateHistogram(
198  "V8.ScriptCache",
199  0,
200  kScriptGenerations,
201  kScriptGenerations + 1);
202  script_histogram_initialized_ = true;
203  }
204 
205  if (script_histogram_ != NULL) {
206  // The level NUMBER_OF_SCRIPT_GENERATIONS is equivalent to a cache miss.
207  isolate()->stats_table()->AddHistogramSample(script_histogram_, generation);
208  }
209 
210  // Once outside the manacles of the handle scope, we need to recheck
211  // to see if we actually found a cached script. If so, we return a
212  // handle created in the caller's handle scope.
213  if (result != NULL) {
215  isolate());
216  ASSERT(HasOrigin(shared, name, line_offset, column_offset));
217  // If the script was found in a later generation, we promote it to
218  // the first generation to let it survive longer in the cache.
219  if (generation != 0) Put(source, context, shared);
220  isolate()->counters()->compilation_cache_hits()->Increment();
221  return shared;
222  } else {
223  isolate()->counters()->compilation_cache_misses()->Increment();
225  }
226 }
227 
228 
229 MaybeObject* CompilationCacheScript::TryTablePut(
230  Handle<String> source,
231  Handle<Context> context,
232  Handle<SharedFunctionInfo> function_info) {
234  return table->Put(*source, *context, *function_info);
235 }
236 
237 
238 Handle<CompilationCacheTable> CompilationCacheScript::TablePut(
239  Handle<String> source,
240  Handle<Context> context,
241  Handle<SharedFunctionInfo> function_info) {
243  TryTablePut(source, context, function_info),
244  CompilationCacheTable);
245 }
246 
247 
249  Handle<Context> context,
250  Handle<SharedFunctionInfo> function_info) {
251  HandleScope scope(isolate());
252  SetFirstTable(TablePut(source, context, function_info));
253 }
254 
255 
257  Handle<String> source,
258  Handle<Context> context,
259  LanguageMode language_mode,
260  int scope_position) {
261  // Make sure not to leak the table into the surrounding handle
262  // scope. Otherwise, we risk keeping old tables around even after
263  // having cleared the cache.
264  Object* result = NULL;
265  int generation;
266  { HandleScope scope(isolate());
267  for (generation = 0; generation < generations(); generation++) {
268  Handle<CompilationCacheTable> table = GetTable(generation);
269  result = table->LookupEval(
270  *source, *context, language_mode, scope_position);
271  if (result->IsSharedFunctionInfo()) {
272  break;
273  }
274  }
275  }
276  if (result->IsSharedFunctionInfo()) {
278  function_info(SharedFunctionInfo::cast(result), isolate());
279  if (generation != 0) {
280  Put(source, context, function_info, scope_position);
281  }
282  isolate()->counters()->compilation_cache_hits()->Increment();
283  return function_info;
284  } else {
285  isolate()->counters()->compilation_cache_misses()->Increment();
287  }
288 }
289 
290 
291 MaybeObject* CompilationCacheEval::TryTablePut(
292  Handle<String> source,
293  Handle<Context> context,
294  Handle<SharedFunctionInfo> function_info,
295  int scope_position) {
297  return table->PutEval(*source, *context, *function_info, scope_position);
298 }
299 
300 
301 Handle<CompilationCacheTable> CompilationCacheEval::TablePut(
302  Handle<String> source,
303  Handle<Context> context,
304  Handle<SharedFunctionInfo> function_info,
305  int scope_position) {
307  TryTablePut(
308  source, context, function_info, scope_position),
309  CompilationCacheTable);
310 }
311 
312 
314  Handle<Context> context,
315  Handle<SharedFunctionInfo> function_info,
316  int scope_position) {
317  HandleScope scope(isolate());
318  SetFirstTable(TablePut(source, context, function_info, scope_position));
319 }
320 
321 
324  // Make sure not to leak the table into the surrounding handle
325  // scope. Otherwise, we risk keeping old tables around even after
326  // having cleared the cache.
327  Object* result = NULL;
328  int generation;
329  { HandleScope scope(isolate());
330  for (generation = 0; generation < generations(); generation++) {
331  Handle<CompilationCacheTable> table = GetTable(generation);
332  result = table->LookupRegExp(*source, flags);
333  if (result->IsFixedArray()) {
334  break;
335  }
336  }
337  }
338  if (result->IsFixedArray()) {
340  if (generation != 0) {
341  Put(source, flags, data);
342  }
343  isolate()->counters()->compilation_cache_hits()->Increment();
344  return data;
345  } else {
346  isolate()->counters()->compilation_cache_misses()->Increment();
347  return Handle<FixedArray>::null();
348  }
349 }
350 
351 
352 MaybeObject* CompilationCacheRegExp::TryTablePut(
353  Handle<String> source,
355  Handle<FixedArray> data) {
357  return table->PutRegExp(*source, flags, *data);
358 }
359 
360 
361 Handle<CompilationCacheTable> CompilationCacheRegExp::TablePut(
362  Handle<String> source,
363  JSRegExp::Flags flags,
364  Handle<FixedArray> data) {
366  TryTablePut(source, flags, data),
367  CompilationCacheTable);
368 }
369 
370 
372  JSRegExp::Flags flags,
373  Handle<FixedArray> data) {
374  HandleScope scope(isolate());
375  SetFirstTable(TablePut(source, flags, data));
376 }
377 
378 
380  if (!IsEnabled()) return;
381 
382  eval_global_.Remove(function_info);
383  eval_contextual_.Remove(function_info);
384  script_.Remove(function_info);
385 }
386 
387 
389  Handle<String> source,
390  Handle<Object> name,
391  int line_offset,
392  int column_offset,
393  Handle<Context> context) {
394  if (!IsEnabled()) {
396  }
397 
398  return script_.Lookup(source, name, line_offset, column_offset, context);
399 }
400 
401 
403  Handle<String> source,
404  Handle<Context> context,
405  bool is_global,
406  LanguageMode language_mode,
407  int scope_position) {
408  if (!IsEnabled()) {
410  }
411 
413  if (is_global) {
414  result = eval_global_.Lookup(
415  source, context, language_mode, scope_position);
416  } else {
417  ASSERT(scope_position != RelocInfo::kNoPosition);
418  result = eval_contextual_.Lookup(
419  source, context, language_mode, scope_position);
420  }
421  return result;
422 }
423 
424 
426  JSRegExp::Flags flags) {
427  if (!IsEnabled()) {
428  return Handle<FixedArray>::null();
429  }
430 
431  return reg_exp_.Lookup(source, flags);
432 }
433 
434 
436  Handle<Context> context,
437  Handle<SharedFunctionInfo> function_info) {
438  if (!IsEnabled()) {
439  return;
440  }
441 
442  script_.Put(source, context, function_info);
443 }
444 
445 
447  Handle<Context> context,
448  bool is_global,
449  Handle<SharedFunctionInfo> function_info,
450  int scope_position) {
451  if (!IsEnabled()) {
452  return;
453  }
454 
455  HandleScope scope(isolate());
456  if (is_global) {
457  eval_global_.Put(source, context, function_info, scope_position);
458  } else {
459  ASSERT(scope_position != RelocInfo::kNoPosition);
460  eval_contextual_.Put(source, context, function_info, scope_position);
461  }
462 }
463 
464 
465 
467  JSRegExp::Flags flags,
468  Handle<FixedArray> data) {
469  if (!IsEnabled()) {
470  return;
471  }
472 
473  reg_exp_.Put(source, flags, data);
474 }
475 
476 
478  for (int i = 0; i < kSubCacheCount; i++) {
479  subcaches_[i]->Clear();
480  }
481 }
482 
483 
484 void CompilationCache::Iterate(ObjectVisitor* v) {
485  for (int i = 0; i < kSubCacheCount; i++) {
486  subcaches_[i]->Iterate(v);
487  }
488 }
489 
490 
491 void CompilationCache::IterateFunctions(ObjectVisitor* v) {
492  for (int i = 0; i < kSubCacheCount; i++) {
493  subcaches_[i]->IterateFunctions(v);
494  }
495 }
496 
497 
499  for (int i = 0; i < kSubCacheCount; i++) {
500  subcaches_[i]->Age();
501  }
502 }
503 
504 
506  enabled_ = true;
507 }
508 
509 
511  enabled_ = false;
512  Clear();
513 }
514 
515 
516 } } // namespace v8::internal
void Put(Handle< String > source, Handle< Context > context, Handle< SharedFunctionInfo > function_info, int scope_position)
void Put(Handle< String > source, Handle< Context > context, Handle< SharedFunctionInfo > function_info)
static String * cast(Object *obj)
StatsTable * stats_table()
Definition: isolate.cc:1952
static CompilationCacheTable * cast(Object *obj)
static Handle< T > cast(Handle< S > that)
Definition: handles.h:81
Handle< FixedArray > LookupRegExp(Handle< String > source, JSRegExp::Flags flags)
#define ASSERT(condition)
Definition: checks.h:270
void PutEval(Handle< String > source, Handle< Context > context, bool is_global, Handle< SharedFunctionInfo > function_info, int scope_position)
static Script * cast(Object *obj)
static SharedFunctionInfo * cast(Object *obj)
void SetFirstTable(Handle< CompilationCacheTable > value)
Handle< CompilationCacheTable > GetTable(int generation)
void PutScript(Handle< String > source, Handle< Context > context, Handle< SharedFunctionInfo > function_info)
bool Equals(String *other)
Definition: objects-inl.h:2419
Handle< SharedFunctionInfo > LookupEval(Handle< String > source, Handle< Context > context, bool is_global, LanguageMode language_mode, int scope_position)
#define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE)
Definition: heap-inl.h:558
Handle< SharedFunctionInfo > Lookup(Handle< String > source, Handle< Object > name, int line_offset, int column_offset, Handle< Context > context)
void Iterate(ObjectVisitor *v)
activate correct semantics for inheriting readonliness false
Definition: flags.cc:141
void PutRegExp(Handle< String > source, JSRegExp::Flags flags, Handle< FixedArray > data)
Handle< FixedArray > Lookup(Handle< String > source, JSRegExp::Flags flags)
bool is_null() const
Definition: handles.h:87
Handle< CompilationCacheTable > GetFirstTable()
static Handle< T > null()
Definition: handles.h:86
void MemsetPointer(T **dest, U *value, int counter)
Definition: v8utils.h:149
Handle< SharedFunctionInfo > LookupScript(Handle< String > source, Handle< Object > name, int line_offset, int column_offset, Handle< Context > context)
void IterateFunctions(ObjectVisitor *v)
Counters * counters()
Definition: isolate.h:819
static FixedArray * cast(Object *obj)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new generation(in kBytes)") DEFINE_int(max_old_space_size
CompilationCacheScript(Isolate *isolate, int generations)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
void Remove(Handle< SharedFunctionInfo > function_info)
void * CreateHistogram(const char *name, int min, int max, size_t buckets)
Definition: counters.h:81
Handle< SharedFunctionInfo > Lookup(Handle< String > source, Handle< Context > context, LanguageMode language_mode, int scope_position)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
void AddHistogramSample(void *histogram, int sample)
Definition: counters.h:91
void Remove(Handle< SharedFunctionInfo > function_info)
void IterateFunctions(ObjectVisitor *v)
static MUST_USE_RESULT MaybeObject * Allocate(int at_least_space_for, MinimumCapacity capacity_option=USE_DEFAULT_MINIMUM_CAPACITY, PretenureFlag pretenure=NOT_TENURED)
void Put(Handle< String > source, JSRegExp::Flags flags, Handle< FixedArray > data)