Coverage Report

Created: 2025-10-31 09:06

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/node/deps/v8/include/v8-array-buffer.h
Line
Count
Source
1
// Copyright 2021 the V8 project authors. All rights reserved.
2
// Use of this source code is governed by a BSD-style license that can be
3
// found in the LICENSE file.
4
5
#ifndef INCLUDE_V8_ARRAY_BUFFER_H_
6
#define INCLUDE_V8_ARRAY_BUFFER_H_
7
8
#include <stddef.h>
9
10
#include <memory>
11
12
#include "v8-local-handle.h"  // NOLINT(build/include_directory)
13
#include "v8-memory-span.h"   // NOLINT(build/include_directory)
14
#include "v8-object.h"        // NOLINT(build/include_directory)
15
#include "v8-platform.h"      // NOLINT(build/include_directory)
16
#include "v8config.h"         // NOLINT(build/include_directory)
17
18
namespace v8 {
19
20
class SharedArrayBuffer;
21
22
#if defined(V8_COMPRESS_POINTERS) && \
23
    !defined(V8_COMPRESS_POINTERS_IN_SHARED_CAGE)
24
class IsolateGroup;
25
#endif
26
27
#ifndef V8_ARRAY_BUFFER_INTERNAL_FIELD_COUNT
28
// Defined using gn arg `v8_array_buffer_internal_field_count`.
29
#define V8_ARRAY_BUFFER_INTERNAL_FIELD_COUNT 2
30
#endif
31
32
enum class ArrayBufferCreationMode { kInternalized, kExternalized };
33
enum class BackingStoreInitializationMode { kZeroInitialized, kUninitialized };
34
enum class BackingStoreOnFailureMode { kReturnNull, kOutOfMemory };
35
36
/**
37
 * A wrapper around the backing store (i.e. the raw memory) of an array buffer.
38
 * See a document linked in http://crbug.com/v8/9908 for more information.
39
 *
40
 * The allocation and destruction of backing stores is generally managed by
41
 * V8. Clients should always use standard C++ memory ownership types (i.e.
42
 * std::unique_ptr and std::shared_ptr) to manage lifetimes of backing stores
43
 * properly, since V8 internal objects may alias backing stores.
44
 *
45
 * This object does not keep the underlying |ArrayBuffer::Allocator| alive by
46
 * default. Use Isolate::CreateParams::array_buffer_allocator_shared when
47
 * creating the Isolate to make it hold a reference to the allocator itself.
48
 */
49
class V8_EXPORT BackingStore : public v8::internal::BackingStoreBase {
50
 public:
51
  ~BackingStore();
52
53
  /**
54
   * Return a pointer to the beginning of the memory block for this backing
55
   * store. The pointer is only valid as long as this backing store object
56
   * lives.
57
   */
58
  void* Data() const;
59
60
  /**
61
   * The length (in bytes) of this backing store.
62
   */
63
  size_t ByteLength() const;
64
65
  /**
66
   * The maximum length (in bytes) that this backing store may grow to.
67
   *
68
   * If this backing store was created for a resizable ArrayBuffer or a growable
69
   * SharedArrayBuffer, it is >= ByteLength(). Otherwise it is ==
70
   * ByteLength().
71
   */
72
  size_t MaxByteLength() const;
73
74
  /**
75
   * Indicates whether the backing store was created for an ArrayBuffer or
76
   * a SharedArrayBuffer.
77
   */
78
  bool IsShared() const;
79
80
  /**
81
   * Indicates whether the backing store was created for a resizable ArrayBuffer
82
   * or a growable SharedArrayBuffer, and thus may be resized by user JavaScript
83
   * code.
84
   */
85
  bool IsResizableByUserJavaScript() const;
86
87
  /**
88
   * Prevent implicit instantiation of operator delete with size_t argument.
89
   * The size_t argument would be incorrect because ptr points to the
90
   * internal BackingStore object.
91
   */
92
0
  void operator delete(void* ptr) { ::operator delete(ptr); }
93
94
  /**
95
   * This callback is used only if the memory block for a BackingStore cannot be
96
   * allocated with an ArrayBuffer::Allocator. In such cases the destructor of
97
   * the BackingStore invokes the callback to free the memory block.
98
   */
99
  using DeleterCallback = void (*)(void* data, size_t length,
100
                                   void* deleter_data);
101
102
  /**
103
   * If the memory block of a BackingStore is static or is managed manually,
104
   * then this empty deleter along with nullptr deleter_data can be passed to
105
   * ArrayBuffer::NewBackingStore to indicate that.
106
   *
107
   * The manually managed case should be used with caution and only when it
108
   * is guaranteed that the memory block freeing happens after detaching its
109
   * ArrayBuffer.
110
   */
111
  static void EmptyDeleter(void* data, size_t length, void* deleter_data);
112
113
 private:
114
  /**
115
   * See [Shared]ArrayBuffer::GetBackingStore and
116
   * [Shared]ArrayBuffer::NewBackingStore.
117
   */
118
  BackingStore();
119
};
120
121
#if !defined(V8_IMMINENT_DEPRECATION_WARNINGS)
122
// Use v8::BackingStore::DeleterCallback instead.
123
using BackingStoreDeleterCallback = void (*)(void* data, size_t length,
124
                                             void* deleter_data);
125
126
#endif
127
128
/**
129
 * An instance of the built-in ArrayBuffer constructor (ES6 draft 15.13.5).
130
 */
131
class V8_EXPORT ArrayBuffer : public Object {
132
 public:
133
  /**
134
   * A thread-safe allocator that V8 uses to allocate |ArrayBuffer|'s memory.
135
   * The allocator is a global V8 setting. It has to be set via
136
   * Isolate::CreateParams.
137
   *
138
   * Memory allocated through this allocator by V8 is accounted for as external
139
   * memory by V8. Note that V8 keeps track of the memory for all internalized
140
   * |ArrayBuffer|s. Responsibility for tracking external memory (using
141
   * Isolate::AdjustAmountOfExternalAllocatedMemory) is handed over to the
142
   * embedder upon externalization and taken over upon internalization (creating
143
   * an internalized buffer from an existing buffer).
144
   *
145
   * Note that it is unsafe to call back into V8 from any of the allocator
146
   * functions.
147
   */
148
  class V8_EXPORT Allocator {
149
   public:
150
35
    virtual ~Allocator() = default;
151
152
    /**
153
     * Allocate |length| bytes. Return nullptr if allocation is not successful.
154
     * Memory should be initialized to zeroes.
155
     */
156
    virtual void* Allocate(size_t length) = 0;
157
158
    /**
159
     * Allocate |length| bytes. Return nullptr if allocation is not successful.
160
     * Memory does not have to be initialized.
161
     */
162
    virtual void* AllocateUninitialized(size_t length) = 0;
163
164
    /**
165
     * Free the memory block of size |length|, pointed to by |data|.
166
     * That memory is guaranteed to be previously allocated by |Allocate|.
167
     */
168
    virtual void Free(void* data, size_t length) = 0;
169
170
    /**
171
     * Returns a size_t that determines the largest ArrayBuffer that can be
172
     * allocated.  Override if your Allocator is more restrictive than the
173
     * default.  Will only be called once, and the value returned will be
174
     * cached.
175
     * Should not return a value that is larger than kMaxByteLength.
176
     */
177
2.24k
    virtual size_t MaxAllocationSize() const { return kMaxByteLength; }
178
179
    /**
180
     * ArrayBuffer allocation mode. kNormal is a malloc/free style allocation,
181
     * while kReservation is for larger allocations with the ability to set
182
     * access permissions.
183
     */
184
    enum class AllocationMode { kNormal, kReservation };
185
186
    /**
187
     * Returns page allocator used by this Allocator instance.
188
     *
189
     * When the sandbox used by Allocator it is expected that this returns
190
     * sandbox's page allocator.
191
     * Otherwise, it should return system page allocator.
192
     */
193
0
    virtual PageAllocator* GetPageAllocator() { return nullptr; }
194
195
#if defined(V8_COMPRESS_POINTERS) && \
196
    !defined(V8_COMPRESS_POINTERS_IN_SHARED_CAGE)
197
    /**
198
     * Convenience allocator.
199
     *
200
     * When the sandbox is enabled, this allocator will allocate its backing
201
     * memory inside the sandbox that belongs to the passed isolate group.
202
     * Otherwise, it will rely on malloc/free.
203
     *
204
     * Caller takes ownership, i.e. the returned object needs to be freed using
205
     * |delete allocator| once it is no longer in use.
206
     */
207
    static Allocator* NewDefaultAllocator(const IsolateGroup& group);
208
#endif  // defined(V8_COMPRESS_POINTERS) &&
209
        // !defined(V8_COMPRESS_POINTERS_IN_SHARED_CAGE)
210
211
    /**
212
     * Convenience allocator.
213
     *
214
     * When the sandbox is enabled, this allocator will allocate its backing
215
     * memory inside the default global sandbox. Otherwise, it will rely on
216
     * malloc/free.
217
     *
218
     * Caller takes ownership, i.e. the returned object needs to be freed using
219
     * |delete allocator| once it is no longer in use.
220
     */
221
    static Allocator* NewDefaultAllocator();
222
  };
223
224
  /**
225
   * Data length in bytes.
226
   */
227
  size_t ByteLength() const;
228
229
  /**
230
   * Maximum length in bytes.
231
   */
232
  size_t MaxByteLength() const;
233
234
  /**
235
   * Attempt to create a new ArrayBuffer. Allocate |byte_length| bytes.
236
   * Allocated memory will be owned by a created ArrayBuffer and
237
   * will be deallocated when it is garbage-collected,
238
   * unless the object is externalized. If allocation fails, the Maybe
239
   * returned will be empty.
240
   */
241
  static MaybeLocal<ArrayBuffer> MaybeNew(
242
      Isolate* isolate, size_t byte_length,
243
      BackingStoreInitializationMode initialization_mode =
244
          BackingStoreInitializationMode::kZeroInitialized);
245
246
  /**
247
   * Create a new ArrayBuffer. Allocate |byte_length| bytes, which are either
248
   * zero-initialized or uninitialized. Allocated memory will be owned by a
249
   * created ArrayBuffer and will be deallocated when it is garbage-collected,
250
   * unless the object is externalized.
251
   */
252
  static Local<ArrayBuffer> New(
253
      Isolate* isolate, size_t byte_length,
254
      BackingStoreInitializationMode initialization_mode =
255
          BackingStoreInitializationMode::kZeroInitialized);
256
257
  /**
258
   * Create a new ArrayBuffer with an existing backing store.
259
   * The created array keeps a reference to the backing store until the array
260
   * is garbage collected. Note that the IsExternal bit does not affect this
261
   * reference from the array to the backing store.
262
   *
263
   * In future IsExternal bit will be removed. Until then the bit is set as
264
   * follows. If the backing store does not own the underlying buffer, then
265
   * the array is created in externalized state. Otherwise, the array is created
266
   * in internalized state. In the latter case the array can be transitioned
267
   * to the externalized state using Externalize(backing_store).
268
   */
269
  static Local<ArrayBuffer> New(Isolate* isolate,
270
                                std::shared_ptr<BackingStore> backing_store);
271
272
  /**
273
   * Returns a new standalone BackingStore that is allocated using the array
274
   * buffer allocator of the isolate. The allocation can either be zero
275
   * initialized, or uninitialized. The result can be later passed to
276
   * ArrayBuffer::New.
277
   *
278
   * If the allocator returns nullptr, then the function may cause GCs in the
279
   * given isolate and re-try the allocation.
280
   *
281
   * If GCs do not help and on_failure is kOutOfMemory, then the
282
   * function will crash with an out-of-memory error.
283
   *
284
   * Otherwise if GCs do not help (or the allocation is too large for GCs to
285
   * help) and on_failure is kReturnNull, then a null result is returned.
286
   */
287
  static std::unique_ptr<BackingStore> NewBackingStore(
288
      Isolate* isolate, size_t byte_length,
289
      BackingStoreInitializationMode initialization_mode =
290
          BackingStoreInitializationMode::kZeroInitialized,
291
      BackingStoreOnFailureMode on_failure =
292
          BackingStoreOnFailureMode::kOutOfMemory);
293
294
  /**
295
   * Returns a new standalone BackingStore that takes over the ownership of
296
   * the given buffer. The destructor of the BackingStore invokes the given
297
   * deleter callback.
298
   *
299
   * The result can be later passed to ArrayBuffer::New. The raw pointer
300
   * to the buffer must not be passed again to any V8 API function.
301
   */
302
  static std::unique_ptr<BackingStore> NewBackingStore(
303
      void* data, size_t byte_length, v8::BackingStore::DeleterCallback deleter,
304
      void* deleter_data);
305
306
  /**
307
   * Returns a new resizable standalone BackingStore that is allocated using the
308
   * array buffer allocator of the isolate. The result can be later passed to
309
   * ArrayBuffer::New.
310
   *
311
   * |byte_length| must be <= |max_byte_length|.
312
   *
313
   * This function is usable without an isolate. Unlike |NewBackingStore| calls
314
   * with an isolate, GCs cannot be triggered, and there are no
315
   * retries. Allocation failure will cause the function to crash with an
316
   * out-of-memory error.
317
   */
318
  static std::unique_ptr<BackingStore> NewResizableBackingStore(
319
      size_t byte_length, size_t max_byte_length);
320
321
  /**
322
   * Returns true if this ArrayBuffer may be detached.
323
   */
324
  bool IsDetachable() const;
325
326
  /**
327
   * Returns true if this ArrayBuffer has been detached.
328
   */
329
  bool WasDetached() const;
330
331
  /**
332
   * Detaches this ArrayBuffer and all its views (typed arrays).
333
   * Detaching sets the byte length of the buffer and all typed arrays to zero,
334
   * preventing JavaScript from ever accessing underlying backing store.
335
   * ArrayBuffer should have been externalized and must be detachable.
336
   */
337
  V8_DEPRECATED(
338
      "Use the version which takes a key parameter (passing a null handle is "
339
      "ok).")
340
  void Detach();
341
342
  /**
343
   * Detaches this ArrayBuffer and all its views (typed arrays).
344
   * Detaching sets the byte length of the buffer and all typed arrays to zero,
345
   * preventing JavaScript from ever accessing underlying backing store.
346
   * ArrayBuffer should have been externalized and must be detachable. Returns
347
   * Nothing if the key didn't pass the [[ArrayBufferDetachKey]] check,
348
   * Just(true) otherwise.
349
   */
350
  V8_WARN_UNUSED_RESULT Maybe<bool> Detach(v8::Local<v8::Value> key);
351
352
  /**
353
   * Sets the ArrayBufferDetachKey.
354
   */
355
  void SetDetachKey(v8::Local<v8::Value> key);
356
357
  /**
358
   * Get a shared pointer to the backing store of this array buffer. This
359
   * pointer coordinates the lifetime management of the internal storage
360
   * with any live ArrayBuffers on the heap, even across isolates. The embedder
361
   * should not attempt to manage lifetime of the storage through other means.
362
   *
363
   * The returned shared pointer will not be empty, even if the ArrayBuffer has
364
   * been detached. Use |WasDetached| to tell if it has been detached instead.
365
   */
366
  std::shared_ptr<BackingStore> GetBackingStore();
367
368
  /**
369
   * More efficient shortcut for
370
   * GetBackingStore()->IsResizableByUserJavaScript().
371
   */
372
  bool IsResizableByUserJavaScript() const;
373
374
  /**
375
   * More efficient shortcut for GetBackingStore()->Data(). The returned pointer
376
   * is valid as long as the ArrayBuffer is alive.
377
   */
378
  void* Data() const;
379
380
0
  V8_INLINE static ArrayBuffer* Cast(Value* value) {
381
0
#ifdef V8_ENABLE_CHECKS
382
0
    CheckCast(value);
383
0
#endif
384
0
    return static_cast<ArrayBuffer*>(value);
385
0
  }
386
387
  static constexpr int kInternalFieldCount =
388
      V8_ARRAY_BUFFER_INTERNAL_FIELD_COUNT;
389
  static constexpr int kEmbedderFieldCount = kInternalFieldCount;
390
391
#if V8_ENABLE_SANDBOX
392
  static constexpr size_t kMaxByteLength =
393
      internal::kMaxSafeBufferSizeForSandbox;
394
#elif V8_HOST_ARCH_32_BIT
395
  static constexpr size_t kMaxByteLength = std::numeric_limits<int>::max();
396
#else
397
  // The maximum safe integer (2^53 - 1).
398
  static constexpr size_t kMaxByteLength =
399
      static_cast<size_t>((uint64_t{1} << 53) - 1);
400
#endif
401
402
 private:
403
  ArrayBuffer();
404
  static void CheckCast(Value* obj);
405
  friend class TypedArray;
406
};
407
408
#ifndef V8_ARRAY_BUFFER_VIEW_INTERNAL_FIELD_COUNT
409
// Defined using gn arg `v8_array_buffer_view_internal_field_count`.
410
#define V8_ARRAY_BUFFER_VIEW_INTERNAL_FIELD_COUNT 2
411
#endif
412
413
/**
414
 * A base class for an instance of one of "views" over ArrayBuffer,
415
 * including TypedArrays and DataView (ES6 draft 15.13).
416
 */
417
class V8_EXPORT ArrayBufferView : public Object {
418
 public:
419
  /**
420
   * Returns underlying ArrayBuffer.
421
   */
422
  Local<ArrayBuffer> Buffer();
423
  /**
424
   * Byte offset in |Buffer|.
425
   */
426
  size_t ByteOffset();
427
  /**
428
   * Size of a view in bytes.
429
   */
430
  size_t ByteLength();
431
432
  /**
433
   * Copy the contents of the ArrayBufferView's buffer to an embedder defined
434
   * memory without additional overhead that calling ArrayBufferView::Buffer
435
   * might incur.
436
   *
437
   * Will write at most min(|byte_length|, ByteLength) bytes starting at
438
   * ByteOffset of the underlying buffer to the memory starting at |dest|.
439
   * Returns the number of bytes actually written.
440
   */
441
  size_t CopyContents(void* dest, size_t byte_length);
442
443
  /**
444
   * Returns the contents of the ArrayBufferView's buffer as a MemorySpan. If
445
   * the contents are on the V8 heap, they get copied into `storage`. Otherwise
446
   * a view into the off-heap backing store is returned. The provided storage
447
   * should be at least as large as the maximum on-heap size of a TypedArray,
448
   * was defined in gn with `typed_array_max_size_in_heap`. The default value is
449
   * 64 bytes.
450
   */
451
  v8::MemorySpan<uint8_t> GetContents(v8::MemorySpan<uint8_t> storage);
452
453
  /**
454
   * Returns true if ArrayBufferView's backing ArrayBuffer has already been
455
   * allocated.
456
   */
457
  bool HasBuffer() const;
458
459
0
  V8_INLINE static ArrayBufferView* Cast(Value* value) {
460
0
#ifdef V8_ENABLE_CHECKS
461
0
    CheckCast(value);
462
0
#endif
463
0
    return static_cast<ArrayBufferView*>(value);
464
0
  }
465
466
  static constexpr int kInternalFieldCount =
467
      V8_ARRAY_BUFFER_VIEW_INTERNAL_FIELD_COUNT;
468
  static const int kEmbedderFieldCount = kInternalFieldCount;
469
470
 private:
471
  ArrayBufferView();
472
  static void CheckCast(Value* obj);
473
};
474
475
/**
476
 * An instance of DataView constructor (ES6 draft 15.13.7).
477
 */
478
class V8_EXPORT DataView : public ArrayBufferView {
479
 public:
480
  static Local<DataView> New(Local<ArrayBuffer> array_buffer,
481
                             size_t byte_offset, size_t length);
482
  static Local<DataView> New(Local<SharedArrayBuffer> shared_array_buffer,
483
                             size_t byte_offset, size_t length);
484
0
  V8_INLINE static DataView* Cast(Value* value) {
485
0
#ifdef V8_ENABLE_CHECKS
486
0
    CheckCast(value);
487
0
#endif
488
0
    return static_cast<DataView*>(value);
489
0
  }
490
491
 private:
492
  DataView();
493
  static void CheckCast(Value* obj);
494
};
495
496
/**
497
 * An instance of the built-in SharedArrayBuffer constructor.
498
 */
499
class V8_EXPORT SharedArrayBuffer : public Object {
500
 public:
501
  /**
502
   * Data length in bytes.
503
   */
504
  size_t ByteLength() const;
505
506
  /**
507
   * Maximum length in bytes.
508
   */
509
  size_t MaxByteLength() const;
510
511
  /**
512
   * Create a new SharedArrayBuffer. Allocate |byte_length| bytes, which are
513
   * either zero-initialized or uninitialized. Allocated memory will be owned by
514
   * a created SharedArrayBuffer and will be deallocated when it is
515
   * garbage-collected, unless the object is externalized.
516
   */
517
  static Local<SharedArrayBuffer> New(
518
      Isolate* isolate, size_t byte_length,
519
      BackingStoreInitializationMode initialization_mode =
520
          BackingStoreInitializationMode::kZeroInitialized);
521
522
  /**
523
   * Create a new SharedArrayBuffer. Allocate |byte_length| bytes, which are
524
   * either zero-initialized or uninitialized. Allocated memory will be owned by
525
   * a created SharedArrayBuffer and will be deallocated when it is
526
   * garbage-collected, unless the object is externalized.  If allocation
527
   * fails, the Maybe returned will be empty.
528
   */
529
  static MaybeLocal<SharedArrayBuffer> MaybeNew(
530
      Isolate* isolate, size_t byte_length,
531
      BackingStoreInitializationMode initialization_mode =
532
          BackingStoreInitializationMode::kZeroInitialized);
533
534
  /**
535
   * Create a new SharedArrayBuffer with an existing backing store.
536
   * The created array keeps a reference to the backing store until the array
537
   * is garbage collected. Note that the IsExternal bit does not affect this
538
   * reference from the array to the backing store.
539
   *
540
   * In future IsExternal bit will be removed. Until then the bit is set as
541
   * follows. If the backing store does not own the underlying buffer, then
542
   * the array is created in externalized state. Otherwise, the array is created
543
   * in internalized state. In the latter case the array can be transitioned
544
   * to the externalized state using Externalize(backing_store).
545
   */
546
  static Local<SharedArrayBuffer> New(
547
      Isolate* isolate, std::shared_ptr<BackingStore> backing_store);
548
549
  /**
550
   * Returns a new standalone BackingStore that is allocated using the array
551
   * buffer allocator of the isolate. The allocation can either be zero
552
   * initialized, or uninitialized. The result can be later passed to
553
   * SharedArrayBuffer::New.
554
   *
555
   * If the allocator returns nullptr, then the function may cause GCs in the
556
   * given isolate and re-try the allocation.
557
   *
558
   * If on_failure is kOutOfMemory and GCs do not help, then the function will
559
   * crash with an out-of-memory error.
560
   *
561
   * Otherwise, if on_failure is kReturnNull and GCs do not help (or the
562
   * byte_length is so large that the allocation cannot succeed), then a null
563
   * result is returned.
564
   */
565
  static std::unique_ptr<BackingStore> NewBackingStore(
566
      Isolate* isolate, size_t byte_length,
567
      BackingStoreInitializationMode initialization_mode =
568
          BackingStoreInitializationMode::kZeroInitialized,
569
      BackingStoreOnFailureMode on_failure =
570
          BackingStoreOnFailureMode::kOutOfMemory);
571
572
  /**
573
   * Returns a new standalone BackingStore that takes over the ownership of
574
   * the given buffer. The destructor of the BackingStore invokes the given
575
   * deleter callback.
576
   *
577
   * The result can be later passed to SharedArrayBuffer::New. The raw pointer
578
   * to the buffer must not be passed again to any V8 functions.
579
   */
580
  static std::unique_ptr<BackingStore> NewBackingStore(
581
      void* data, size_t byte_length, v8::BackingStore::DeleterCallback deleter,
582
      void* deleter_data);
583
584
  /**
585
   * Get a shared pointer to the backing store of this array buffer. This
586
   * pointer coordinates the lifetime management of the internal storage
587
   * with any live ArrayBuffers on the heap, even across isolates. The embedder
588
   * should not attempt to manage lifetime of the storage through other means.
589
   */
590
  std::shared_ptr<BackingStore> GetBackingStore();
591
592
  /**
593
   * More efficient shortcut for GetBackingStore()->Data(). The returned pointer
594
   * is valid as long as the ArrayBuffer is alive.
595
   */
596
  void* Data() const;
597
598
0
  V8_INLINE static SharedArrayBuffer* Cast(Value* value) {
599
0
#ifdef V8_ENABLE_CHECKS
600
0
    CheckCast(value);
601
0
#endif
602
0
    return static_cast<SharedArrayBuffer*>(value);
603
0
  }
604
605
  static constexpr int kInternalFieldCount =
606
      V8_ARRAY_BUFFER_INTERNAL_FIELD_COUNT;
607
608
 private:
609
  SharedArrayBuffer();
610
  static void CheckCast(Value* obj);
611
};
612
613
}  // namespace v8
614
615
#endif  // INCLUDE_V8_ARRAY_BUFFER_H_