feat(android): embed local node runtime

This commit is contained in:
DeskClaw Bot
2026-04-21 17:22:37 +00:00
Unverified
parent 8512d32079
commit a929b32b97
690 changed files with 213507 additions and 0 deletions

View File

@@ -0,0 +1,310 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_ALLOCATION_H_
#define INCLUDE_CPPGC_ALLOCATION_H_
#include <atomic>
#include <cstddef>
#include <cstdint>
#include <new>
#include <type_traits>
#include <utility>
#include "cppgc/custom-space.h"
#include "cppgc/internal/api-constants.h"
#include "cppgc/internal/gc-info.h"
#include "cppgc/type-traits.h"
#include "v8config.h" // NOLINT(build/include_directory)
#if defined(__has_attribute)
#if __has_attribute(assume_aligned)
#define CPPGC_DEFAULT_ALIGNED \
__attribute__((assume_aligned(api_constants::kDefaultAlignment)))
#define CPPGC_DOUBLE_WORD_ALIGNED \
__attribute__((assume_aligned(2 * api_constants::kDefaultAlignment)))
#endif // __has_attribute(assume_aligned)
#endif // defined(__has_attribute)
#if !defined(CPPGC_DEFAULT_ALIGNED)
#define CPPGC_DEFAULT_ALIGNED
#endif
#if !defined(CPPGC_DOUBLE_WORD_ALIGNED)
#define CPPGC_DOUBLE_WORD_ALIGNED
#endif
namespace cppgc {
/**
* AllocationHandle is used to allocate garbage-collected objects.
*/
class AllocationHandle;
namespace internal {
// Similar to C++17 std::align_val_t;
enum class AlignVal : size_t {};
class V8_EXPORT MakeGarbageCollectedTraitInternal {
protected:
static inline void MarkObjectAsFullyConstructed(const void* payload) {
// See api_constants for an explanation of the constants.
std::atomic<uint16_t>* atomic_mutable_bitfield =
reinterpret_cast<std::atomic<uint16_t>*>(
const_cast<uint16_t*>(reinterpret_cast<const uint16_t*>(
reinterpret_cast<const uint8_t*>(payload) -
api_constants::kFullyConstructedBitFieldOffsetFromPayload)));
// It's safe to split use load+store here (instead of a read-modify-write
// operation), since it's guaranteed that this 16-bit bitfield is only
// modified by a single thread. This is cheaper in terms of code bloat (on
// ARM) and performance.
uint16_t value = atomic_mutable_bitfield->load(std::memory_order_relaxed);
value |= api_constants::kFullyConstructedBitMask;
atomic_mutable_bitfield->store(value, std::memory_order_release);
}
// Dispatch based on compile-time information.
//
// Default implementation is for a custom space with >`kDefaultAlignment` byte
// alignment.
template <typename GCInfoType, typename CustomSpace, size_t alignment>
struct AllocationDispatcher final {
static void* Invoke(AllocationHandle& handle, size_t size) {
static_assert(std::is_base_of<CustomSpaceBase, CustomSpace>::value,
"Custom space must inherit from CustomSpaceBase.");
static_assert(
!CustomSpace::kSupportsCompaction,
"Custom spaces that support compaction do not support allocating "
"objects with non-default (i.e. word-sized) alignment.");
return MakeGarbageCollectedTraitInternal::Allocate(
handle, size, static_cast<AlignVal>(alignment),
internal::GCInfoTrait<GCInfoType>::Index(), CustomSpace::kSpaceIndex);
}
};
// Fast path for regular allocations for the default space with
// `kDefaultAlignment` byte alignment.
template <typename GCInfoType>
struct AllocationDispatcher<GCInfoType, void,
api_constants::kDefaultAlignment>
final {
static void* Invoke(AllocationHandle& handle, size_t size) {
return MakeGarbageCollectedTraitInternal::Allocate(
handle, size, internal::GCInfoTrait<GCInfoType>::Index());
}
};
// Default space with >`kDefaultAlignment` byte alignment.
template <typename GCInfoType, size_t alignment>
struct AllocationDispatcher<GCInfoType, void, alignment> final {
static void* Invoke(AllocationHandle& handle, size_t size) {
return MakeGarbageCollectedTraitInternal::Allocate(
handle, size, static_cast<AlignVal>(alignment),
internal::GCInfoTrait<GCInfoType>::Index());
}
};
// Custom space with `kDefaultAlignment` byte alignment.
template <typename GCInfoType, typename CustomSpace>
struct AllocationDispatcher<GCInfoType, CustomSpace,
api_constants::kDefaultAlignment>
final {
static void* Invoke(AllocationHandle& handle, size_t size) {
static_assert(std::is_base_of<CustomSpaceBase, CustomSpace>::value,
"Custom space must inherit from CustomSpaceBase.");
return MakeGarbageCollectedTraitInternal::Allocate(
handle, size, internal::GCInfoTrait<GCInfoType>::Index(),
CustomSpace::kSpaceIndex);
}
};
private:
static void* CPPGC_DEFAULT_ALIGNED Allocate(cppgc::AllocationHandle&, size_t,
GCInfoIndex);
static void* CPPGC_DOUBLE_WORD_ALIGNED Allocate(cppgc::AllocationHandle&,
size_t, AlignVal,
GCInfoIndex);
static void* CPPGC_DEFAULT_ALIGNED Allocate(cppgc::AllocationHandle&, size_t,
GCInfoIndex, CustomSpaceIndex);
static void* CPPGC_DOUBLE_WORD_ALIGNED Allocate(cppgc::AllocationHandle&,
size_t, AlignVal, GCInfoIndex,
CustomSpaceIndex);
friend class HeapObjectHeader;
};
} // namespace internal
/**
* Base trait that provides utilities for advancers users that have custom
* allocation needs (e.g., overriding size). It's expected that users override
* MakeGarbageCollectedTrait (see below) and inherit from
* MakeGarbageCollectedTraitBase and make use of the low-level primitives
* offered to allocate and construct an object.
*/
template <typename T>
class MakeGarbageCollectedTraitBase
: private internal::MakeGarbageCollectedTraitInternal {
private:
static_assert(internal::IsGarbageCollectedType<T>::value,
"T needs to be a garbage collected object");
static_assert(!IsGarbageCollectedWithMixinTypeV<T> ||
sizeof(T) <=
internal::api_constants::kLargeObjectSizeThreshold,
"GarbageCollectedMixin may not be a large object");
protected:
/**
* Allocates memory for an object of type T.
*
* \param handle AllocationHandle identifying the heap to allocate the object
* on.
* \param size The size that should be reserved for the object.
* \returns the memory to construct an object of type T on.
*/
V8_INLINE static void* Allocate(AllocationHandle& handle, size_t size) {
static_assert(
std::is_base_of<typename T::ParentMostGarbageCollectedType, T>::value,
"U of GarbageCollected<U> must be a base of T. Check "
"GarbageCollected<T> base class inheritance.");
static constexpr size_t kWantedAlignment =
alignof(T) < internal::api_constants::kDefaultAlignment
? internal::api_constants::kDefaultAlignment
: alignof(T);
static_assert(
kWantedAlignment <= internal::api_constants::kMaxSupportedAlignment,
"Requested alignment larger than alignof(std::max_align_t) bytes. "
"Please file a bug to possibly get this restriction lifted.");
return AllocationDispatcher<
typename internal::GCInfoFolding<
T, typename T::ParentMostGarbageCollectedType>::ResultType,
typename SpaceTrait<T>::Space, kWantedAlignment>::Invoke(handle, size);
}
/**
* Marks an object as fully constructed, resulting in precise handling by the
* garbage collector.
*
* \param payload The base pointer the object is allocated at.
*/
V8_INLINE static void MarkObjectAsFullyConstructed(const void* payload) {
internal::MakeGarbageCollectedTraitInternal::MarkObjectAsFullyConstructed(
payload);
}
};
/**
* Passed to MakeGarbageCollected to specify how many bytes should be appended
* to the allocated object.
*
* Example:
* \code
* class InlinedArray final : public GarbageCollected<InlinedArray> {
* public:
* explicit InlinedArray(size_t bytes) : size(bytes), byte_array(this + 1) {}
* void Trace(Visitor*) const {}
* size_t size;
* char* byte_array;
* };
*
* auto* inlined_array = MakeGarbageCollected<InlinedArray(
* GetAllocationHandle(), AdditionalBytes(4), 4);
* for (size_t i = 0; i < 4; i++) {
* Process(inlined_array->byte_array[i]);
* }
* \endcode
*/
struct AdditionalBytes {
constexpr explicit AdditionalBytes(size_t bytes) : value(bytes) {}
const size_t value;
};
/**
* Default trait class that specifies how to construct an object of type T.
* Advanced users may override how an object is constructed using the utilities
* that are provided through MakeGarbageCollectedTraitBase.
*
* Any trait overriding construction must
* - allocate through `MakeGarbageCollectedTraitBase<T>::Allocate`;
* - mark the object as fully constructed using
* `MakeGarbageCollectedTraitBase<T>::MarkObjectAsFullyConstructed`;
*/
template <typename T>
class MakeGarbageCollectedTrait : public MakeGarbageCollectedTraitBase<T> {
public:
template <typename... Args>
static T* Call(AllocationHandle& handle, Args&&... args) {
void* memory =
MakeGarbageCollectedTraitBase<T>::Allocate(handle, sizeof(T));
T* object = ::new (memory) T(std::forward<Args>(args)...);
MakeGarbageCollectedTraitBase<T>::MarkObjectAsFullyConstructed(object);
return object;
}
template <typename... Args>
static T* Call(AllocationHandle& handle, AdditionalBytes additional_bytes,
Args&&... args) {
void* memory = MakeGarbageCollectedTraitBase<T>::Allocate(
handle, sizeof(T) + additional_bytes.value);
T* object = ::new (memory) T(std::forward<Args>(args)...);
MakeGarbageCollectedTraitBase<T>::MarkObjectAsFullyConstructed(object);
return object;
}
};
/**
* Allows users to specify a post-construction callback for specific types. The
* callback is invoked on the instance of type T right after it has been
* constructed. This can be useful when the callback requires a
* fully-constructed object to be able to dispatch to virtual methods.
*/
template <typename T, typename = void>
struct PostConstructionCallbackTrait {
static void Call(T*) {}
};
/**
* Constructs a managed object of type T where T transitively inherits from
* GarbageCollected.
*
* \param args List of arguments with which an instance of T will be
* constructed.
* \returns an instance of type T.
*/
template <typename T, typename... Args>
V8_INLINE T* MakeGarbageCollected(AllocationHandle& handle, Args&&... args) {
T* object =
MakeGarbageCollectedTrait<T>::Call(handle, std::forward<Args>(args)...);
PostConstructionCallbackTrait<T>::Call(object);
return object;
}
/**
* Constructs a managed object of type T where T transitively inherits from
* GarbageCollected. Created objects will have additional bytes appended to
* it. Allocated memory would suffice for `sizeof(T) + additional_bytes`.
*
* \param additional_bytes Denotes how many bytes to append to T.
* \param args List of arguments with which an instance of T will be
* constructed.
* \returns an instance of type T.
*/
template <typename T, typename... Args>
V8_INLINE T* MakeGarbageCollected(AllocationHandle& handle,
AdditionalBytes additional_bytes,
Args&&... args) {
T* object = MakeGarbageCollectedTrait<T>::Call(handle, additional_bytes,
std::forward<Args>(args)...);
PostConstructionCallbackTrait<T>::Call(object);
return object;
}
} // namespace cppgc
#undef CPPGC_DEFAULT_ALIGNED
#undef CPPGC_DOUBLE_WORD_ALIGNED
#endif // INCLUDE_CPPGC_ALLOCATION_H_

View File

@@ -0,0 +1,29 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_COMMON_H_
#define INCLUDE_CPPGC_COMMON_H_
// TODO(chromium:1056170): Remove dependency on v8.
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
/**
* Indicator for the stack state of the embedder.
*/
enum class EmbedderStackState {
/**
* Stack may contain interesting heap pointers.
*/
kMayContainHeapPointers,
/**
* Stack does not contain any interesting heap pointers.
*/
kNoHeapPointers,
};
} // namespace cppgc
#endif // INCLUDE_CPPGC_COMMON_H_

View File

@@ -0,0 +1,465 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_CROSS_THREAD_PERSISTENT_H_
#define INCLUDE_CPPGC_CROSS_THREAD_PERSISTENT_H_
#include <atomic>
#include "cppgc/internal/persistent-node.h"
#include "cppgc/internal/pointer-policies.h"
#include "cppgc/persistent.h"
#include "cppgc/visitor.h"
namespace cppgc {
namespace internal {
// Wrapper around PersistentBase that allows accessing poisoned memory when
// using ASAN. This is needed as the GC of the heap that owns the value
// of a CTP, may clear it (heap termination, weakness) while the object
// holding the CTP may be poisoned as itself may be deemed dead.
class CrossThreadPersistentBase : public PersistentBase {
public:
CrossThreadPersistentBase() = default;
explicit CrossThreadPersistentBase(const void* raw) : PersistentBase(raw) {}
V8_CLANG_NO_SANITIZE("address") const void* GetValueFromGC() const {
return raw_;
}
V8_CLANG_NO_SANITIZE("address")
PersistentNode* GetNodeFromGC() const { return node_; }
V8_CLANG_NO_SANITIZE("address")
void ClearFromGC() const {
raw_ = nullptr;
SetNodeSafe(nullptr);
}
// GetNodeSafe() can be used for a thread-safe IsValid() check in a
// double-checked locking pattern. See ~BasicCrossThreadPersistent.
PersistentNode* GetNodeSafe() const {
return reinterpret_cast<std::atomic<PersistentNode*>*>(&node_)->load(
std::memory_order_acquire);
}
// The GC writes using SetNodeSafe() while holding the lock.
V8_CLANG_NO_SANITIZE("address")
void SetNodeSafe(PersistentNode* value) const {
#if defined(__has_feature)
#if __has_feature(address_sanitizer)
#define V8_IS_ASAN 1
#endif
#endif
#ifdef V8_IS_ASAN
__atomic_store(&node_, &value, __ATOMIC_RELEASE);
#else // !V8_IS_ASAN
// Non-ASAN builds can use atomics. This also covers MSVC which does not
// have the __atomic_store intrinsic.
reinterpret_cast<std::atomic<PersistentNode*>*>(&node_)->store(
value, std::memory_order_release);
#endif // !V8_IS_ASAN
#undef V8_IS_ASAN
}
};
template <typename T, typename WeaknessPolicy, typename LocationPolicy,
typename CheckingPolicy>
class BasicCrossThreadPersistent final : public CrossThreadPersistentBase,
public LocationPolicy,
private WeaknessPolicy,
private CheckingPolicy {
public:
using typename WeaknessPolicy::IsStrongPersistent;
using PointeeType = T;
~BasicCrossThreadPersistent() {
// This implements fast path for destroying empty/sentinel.
//
// Simplified version of `AssignUnsafe()` to allow calling without a
// complete type `T`. Uses double-checked locking with a simple thread-safe
// check for a valid handle based on a node.
if (GetNodeSafe()) {
PersistentRegionLock guard;
const void* old_value = GetValue();
// The fast path check (GetNodeSafe()) does not acquire the lock. Recheck
// validity while holding the lock to ensure the reference has not been
// cleared.
if (IsValid(old_value)) {
CrossThreadPersistentRegion& region =
this->GetPersistentRegion(old_value);
region.FreeNode(GetNode());
SetNode(nullptr);
} else {
CPPGC_DCHECK(!GetNode());
}
}
// No need to call SetValue() as the handle is not used anymore. This can
// leave behind stale sentinel values but will always destroy the underlying
// node.
}
BasicCrossThreadPersistent(
const SourceLocation& loc = SourceLocation::Current())
: LocationPolicy(loc) {}
BasicCrossThreadPersistent(
std::nullptr_t, const SourceLocation& loc = SourceLocation::Current())
: LocationPolicy(loc) {}
BasicCrossThreadPersistent(
SentinelPointer s, const SourceLocation& loc = SourceLocation::Current())
: CrossThreadPersistentBase(s), LocationPolicy(loc) {}
BasicCrossThreadPersistent(
T* raw, const SourceLocation& loc = SourceLocation::Current())
: CrossThreadPersistentBase(raw), LocationPolicy(loc) {
if (!IsValid(raw)) return;
PersistentRegionLock guard;
CrossThreadPersistentRegion& region = this->GetPersistentRegion(raw);
SetNode(region.AllocateNode(this, &Trace));
this->CheckPointer(raw);
}
class UnsafeCtorTag {
private:
UnsafeCtorTag() = default;
template <typename U, typename OtherWeaknessPolicy,
typename OtherLocationPolicy, typename OtherCheckingPolicy>
friend class BasicCrossThreadPersistent;
};
BasicCrossThreadPersistent(
UnsafeCtorTag, T* raw,
const SourceLocation& loc = SourceLocation::Current())
: CrossThreadPersistentBase(raw), LocationPolicy(loc) {
if (!IsValid(raw)) return;
CrossThreadPersistentRegion& region = this->GetPersistentRegion(raw);
SetNode(region.AllocateNode(this, &Trace));
this->CheckPointer(raw);
}
BasicCrossThreadPersistent(
T& raw, const SourceLocation& loc = SourceLocation::Current())
: BasicCrossThreadPersistent(&raw, loc) {}
template <typename U, typename MemberBarrierPolicy,
typename MemberWeaknessTag, typename MemberCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
BasicCrossThreadPersistent(
internal::BasicMember<U, MemberBarrierPolicy, MemberWeaknessTag,
MemberCheckingPolicy>
member,
const SourceLocation& loc = SourceLocation::Current())
: BasicCrossThreadPersistent(member.Get(), loc) {}
BasicCrossThreadPersistent(
const BasicCrossThreadPersistent& other,
const SourceLocation& loc = SourceLocation::Current())
: BasicCrossThreadPersistent(loc) {
// Invoke operator=.
*this = other;
}
// Heterogeneous ctor.
template <typename U, typename OtherWeaknessPolicy,
typename OtherLocationPolicy, typename OtherCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
BasicCrossThreadPersistent(
const BasicCrossThreadPersistent<U, OtherWeaknessPolicy,
OtherLocationPolicy,
OtherCheckingPolicy>& other,
const SourceLocation& loc = SourceLocation::Current())
: BasicCrossThreadPersistent(loc) {
*this = other;
}
BasicCrossThreadPersistent(
BasicCrossThreadPersistent&& other,
const SourceLocation& loc = SourceLocation::Current()) noexcept {
// Invoke operator=.
*this = std::move(other);
}
BasicCrossThreadPersistent& operator=(
const BasicCrossThreadPersistent& other) {
PersistentRegionLock guard;
AssignSafe(guard, other.Get());
return *this;
}
template <typename U, typename OtherWeaknessPolicy,
typename OtherLocationPolicy, typename OtherCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
BasicCrossThreadPersistent& operator=(
const BasicCrossThreadPersistent<U, OtherWeaknessPolicy,
OtherLocationPolicy,
OtherCheckingPolicy>& other) {
PersistentRegionLock guard;
AssignSafe(guard, other.Get());
return *this;
}
BasicCrossThreadPersistent& operator=(BasicCrossThreadPersistent&& other) {
if (this == &other) return *this;
Clear();
PersistentRegionLock guard;
PersistentBase::operator=(std::move(other));
LocationPolicy::operator=(std::move(other));
if (!IsValid(GetValue())) return *this;
GetNode()->UpdateOwner(this);
other.SetValue(nullptr);
other.SetNode(nullptr);
this->CheckPointer(Get());
return *this;
}
/**
* Assigns a raw pointer.
*
* Note: **Not thread-safe.**
*/
BasicCrossThreadPersistent& operator=(T* other) {
AssignUnsafe(other);
return *this;
}
// Assignment from member.
template <typename U, typename MemberBarrierPolicy,
typename MemberWeaknessTag, typename MemberCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
BasicCrossThreadPersistent& operator=(
internal::BasicMember<U, MemberBarrierPolicy, MemberWeaknessTag,
MemberCheckingPolicy>
member) {
return operator=(member.Get());
}
/**
* Assigns a nullptr.
*
* \returns the handle.
*/
BasicCrossThreadPersistent& operator=(std::nullptr_t) {
Clear();
return *this;
}
/**
* Assigns the sentinel pointer.
*
* \returns the handle.
*/
BasicCrossThreadPersistent& operator=(SentinelPointer s) {
PersistentRegionLock guard;
AssignSafe(guard, s);
return *this;
}
/**
* Returns a pointer to the stored object.
*
* Note: **Not thread-safe.**
*
* \returns a pointer to the stored object.
*/
// CFI cast exemption to allow passing SentinelPointer through T* and support
// heterogeneous assignments between different Member and Persistent handles
// based on their actual types.
V8_CLANG_NO_SANITIZE("cfi-unrelated-cast") T* Get() const {
return static_cast<T*>(const_cast<void*>(GetValue()));
}
/**
* Clears the stored object.
*/
void Clear() {
PersistentRegionLock guard;
AssignSafe(guard, nullptr);
}
/**
* Returns a pointer to the stored object and releases it.
*
* Note: **Not thread-safe.**
*
* \returns a pointer to the stored object.
*/
T* Release() {
T* result = Get();
Clear();
return result;
}
/**
* Conversio to boolean.
*
* Note: **Not thread-safe.**
*
* \returns true if an actual object has been stored and false otherwise.
*/
explicit operator bool() const { return Get(); }
/**
* Conversion to object of type T.
*
* Note: **Not thread-safe.**
*
* \returns the object.
*/
operator T*() const { return Get(); }
/**
* Dereferences the stored object.
*
* Note: **Not thread-safe.**
*/
T* operator->() const { return Get(); }
T& operator*() const { return *Get(); }
template <typename U, typename OtherWeaknessPolicy = WeaknessPolicy,
typename OtherLocationPolicy = LocationPolicy,
typename OtherCheckingPolicy = CheckingPolicy>
BasicCrossThreadPersistent<U, OtherWeaknessPolicy, OtherLocationPolicy,
OtherCheckingPolicy>
To() const {
using OtherBasicCrossThreadPersistent =
BasicCrossThreadPersistent<U, OtherWeaknessPolicy, OtherLocationPolicy,
OtherCheckingPolicy>;
PersistentRegionLock guard;
return OtherBasicCrossThreadPersistent(
typename OtherBasicCrossThreadPersistent::UnsafeCtorTag(),
static_cast<U*>(Get()));
}
template <typename U = T,
typename = typename std::enable_if<!BasicCrossThreadPersistent<
U, WeaknessPolicy>::IsStrongPersistent::value>::type>
BasicCrossThreadPersistent<U, internal::StrongCrossThreadPersistentPolicy>
Lock() const {
return BasicCrossThreadPersistent<
U, internal::StrongCrossThreadPersistentPolicy>(*this);
}
private:
static bool IsValid(const void* ptr) {
return ptr && ptr != kSentinelPointer;
}
static void Trace(Visitor* v, const void* ptr) {
const auto* handle = static_cast<const BasicCrossThreadPersistent*>(ptr);
v->TraceRoot(*handle, handle->Location());
}
void AssignUnsafe(T* ptr) {
const void* old_value = GetValue();
if (IsValid(old_value)) {
PersistentRegionLock guard;
old_value = GetValue();
// The fast path check (IsValid()) does not acquire the lock. Reload
// the value to ensure the reference has not been cleared.
if (IsValid(old_value)) {
CrossThreadPersistentRegion& region =
this->GetPersistentRegion(old_value);
if (IsValid(ptr) && (&region == &this->GetPersistentRegion(ptr))) {
SetValue(ptr);
this->CheckPointer(ptr);
return;
}
region.FreeNode(GetNode());
SetNode(nullptr);
} else {
CPPGC_DCHECK(!GetNode());
}
}
SetValue(ptr);
if (!IsValid(ptr)) return;
PersistentRegionLock guard;
SetNode(this->GetPersistentRegion(ptr).AllocateNode(this, &Trace));
this->CheckPointer(ptr);
}
void AssignSafe(PersistentRegionLock&, T* ptr) {
PersistentRegionLock::AssertLocked();
const void* old_value = GetValue();
if (IsValid(old_value)) {
CrossThreadPersistentRegion& region =
this->GetPersistentRegion(old_value);
if (IsValid(ptr) && (&region == &this->GetPersistentRegion(ptr))) {
SetValue(ptr);
this->CheckPointer(ptr);
return;
}
region.FreeNode(GetNode());
SetNode(nullptr);
}
SetValue(ptr);
if (!IsValid(ptr)) return;
SetNode(this->GetPersistentRegion(ptr).AllocateNode(this, &Trace));
this->CheckPointer(ptr);
}
void ClearFromGC() const {
if (IsValid(GetValueFromGC())) {
WeaknessPolicy::GetPersistentRegion(GetValueFromGC())
.FreeNode(GetNodeFromGC());
CrossThreadPersistentBase::ClearFromGC();
}
}
// See Get() for details.
V8_CLANG_NO_SANITIZE("cfi-unrelated-cast")
T* GetFromGC() const {
return static_cast<T*>(const_cast<void*>(GetValueFromGC()));
}
friend class cppgc::Visitor;
};
template <typename T, typename LocationPolicy, typename CheckingPolicy>
struct IsWeak<
BasicCrossThreadPersistent<T, internal::WeakCrossThreadPersistentPolicy,
LocationPolicy, CheckingPolicy>>
: std::true_type {};
} // namespace internal
namespace subtle {
/**
* **DO NOT USE: Has known caveats, see below.**
*
* CrossThreadPersistent allows retaining objects from threads other than the
* thread the owning heap is operating on.
*
* Known caveats:
* - Does not protect the heap owning an object from terminating.
* - Reaching transitively through the graph is unsupported as objects may be
* moved concurrently on the thread owning the object.
*/
template <typename T>
using CrossThreadPersistent = internal::BasicCrossThreadPersistent<
T, internal::StrongCrossThreadPersistentPolicy>;
/**
* **DO NOT USE: Has known caveats, see below.**
*
* CrossThreadPersistent allows weakly retaining objects from threads other than
* the thread the owning heap is operating on.
*
* Known caveats:
* - Does not protect the heap owning an object from terminating.
* - Reaching transitively through the graph is unsupported as objects may be
* moved concurrently on the thread owning the object.
*/
template <typename T>
using WeakCrossThreadPersistent = internal::BasicCrossThreadPersistent<
T, internal::WeakCrossThreadPersistentPolicy>;
} // namespace subtle
} // namespace cppgc
#endif // INCLUDE_CPPGC_CROSS_THREAD_PERSISTENT_H_

View File

@@ -0,0 +1,97 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_CUSTOM_SPACE_H_
#define INCLUDE_CPPGC_CUSTOM_SPACE_H_
#include <stddef.h>
namespace cppgc {
/**
* Index identifying a custom space.
*/
struct CustomSpaceIndex {
constexpr CustomSpaceIndex(size_t value) : value(value) {} // NOLINT
size_t value;
};
/**
* Top-level base class for custom spaces. Users must inherit from CustomSpace
* below.
*/
class CustomSpaceBase {
public:
virtual ~CustomSpaceBase() = default;
virtual CustomSpaceIndex GetCustomSpaceIndex() const = 0;
virtual bool IsCompactable() const = 0;
};
/**
* Base class custom spaces should directly inherit from. The class inheriting
* from `CustomSpace` must define `kSpaceIndex` as unique space index. These
* indices need for form a sequence starting at 0.
*
* Example:
* \code
* class CustomSpace1 : public CustomSpace<CustomSpace1> {
* public:
* static constexpr CustomSpaceIndex kSpaceIndex = 0;
* };
* class CustomSpace2 : public CustomSpace<CustomSpace2> {
* public:
* static constexpr CustomSpaceIndex kSpaceIndex = 1;
* };
* \endcode
*/
template <typename ConcreteCustomSpace>
class CustomSpace : public CustomSpaceBase {
public:
/**
* Compaction is only supported on spaces that manually manage slots
* recording.
*/
static constexpr bool kSupportsCompaction = false;
CustomSpaceIndex GetCustomSpaceIndex() const final {
return ConcreteCustomSpace::kSpaceIndex;
}
bool IsCompactable() const final {
return ConcreteCustomSpace::kSupportsCompaction;
}
};
/**
* User-overridable trait that allows pinning types to custom spaces.
*/
template <typename T, typename = void>
struct SpaceTrait {
using Space = void;
};
namespace internal {
template <typename CustomSpace>
struct IsAllocatedOnCompactableSpaceImpl {
static constexpr bool value = CustomSpace::kSupportsCompaction;
};
template <>
struct IsAllocatedOnCompactableSpaceImpl<void> {
// Non-custom spaces are by default not compactable.
static constexpr bool value = false;
};
template <typename T>
struct IsAllocatedOnCompactableSpace {
public:
static constexpr bool value =
IsAllocatedOnCompactableSpaceImpl<typename SpaceTrait<T>::Space>::value;
};
} // namespace internal
} // namespace cppgc
#endif // INCLUDE_CPPGC_CUSTOM_SPACE_H_

View File

@@ -0,0 +1,67 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_DEFAULT_PLATFORM_H_
#define INCLUDE_CPPGC_DEFAULT_PLATFORM_H_
#include <memory>
#include "cppgc/platform.h"
#include "libplatform/libplatform.h"
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
/**
* Platform provided by cppgc. Uses V8's DefaultPlatform provided by
* libplatform internally. Exception: `GetForegroundTaskRunner()`, see below.
*/
class V8_EXPORT DefaultPlatform : public Platform {
public:
using IdleTaskSupport = v8::platform::IdleTaskSupport;
explicit DefaultPlatform(
int thread_pool_size = 0,
IdleTaskSupport idle_task_support = IdleTaskSupport::kDisabled,
std::unique_ptr<TracingController> tracing_controller = {})
: v8_platform_(v8::platform::NewDefaultPlatform(
thread_pool_size, idle_task_support,
v8::platform::InProcessStackDumping::kDisabled,
std::move(tracing_controller))) {}
cppgc::PageAllocator* GetPageAllocator() override {
return v8_platform_->GetPageAllocator();
}
double MonotonicallyIncreasingTime() override {
return v8_platform_->MonotonicallyIncreasingTime();
}
std::shared_ptr<cppgc::TaskRunner> GetForegroundTaskRunner() override {
// V8's default platform creates a new task runner when passed the
// `v8::Isolate` pointer the first time. For non-default platforms this will
// require getting the appropriate task runner.
return v8_platform_->GetForegroundTaskRunner(kNoIsolate);
}
std::unique_ptr<cppgc::JobHandle> PostJob(
cppgc::TaskPriority priority,
std::unique_ptr<cppgc::JobTask> job_task) override {
return v8_platform_->PostJob(priority, std::move(job_task));
}
TracingController* GetTracingController() override {
return v8_platform_->GetTracingController();
}
v8::Platform* GetV8Platform() const { return v8_platform_.get(); }
protected:
static constexpr v8::Isolate* kNoIsolate = nullptr;
std::unique_ptr<v8::Platform> v8_platform_;
};
} // namespace cppgc
#endif // INCLUDE_CPPGC_DEFAULT_PLATFORM_H_

View File

@@ -0,0 +1,30 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_EPHEMERON_PAIR_H_
#define INCLUDE_CPPGC_EPHEMERON_PAIR_H_
#include "cppgc/liveness-broker.h"
#include "cppgc/member.h"
namespace cppgc {
/**
* An ephemeron pair is used to conditionally retain an object.
* The `value` will be kept alive only if the `key` is alive.
*/
template <typename K, typename V>
struct EphemeronPair {
EphemeronPair(K* k, V* v) : key(k), value(v) {}
WeakMember<K> key;
Member<V> value;
void ClearValueIfKeyIsDead(const LivenessBroker& broker) {
if (!broker.IsHeapObjectAlive(key)) value = nullptr;
}
};
} // namespace cppgc
#endif // INCLUDE_CPPGC_EPHEMERON_PAIR_H_

View File

@@ -0,0 +1,100 @@
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_EXPLICIT_MANAGEMENT_H_
#define INCLUDE_CPPGC_EXPLICIT_MANAGEMENT_H_
#include <cstddef>
#include "cppgc/allocation.h"
#include "cppgc/internal/logging.h"
#include "cppgc/type-traits.h"
namespace cppgc {
class HeapHandle;
namespace subtle {
template <typename T>
void FreeUnreferencedObject(HeapHandle& heap_handle, T& object);
template <typename T>
bool Resize(T& object, AdditionalBytes additional_bytes);
} // namespace subtle
namespace internal {
class ExplicitManagementImpl final {
private:
V8_EXPORT static void FreeUnreferencedObject(HeapHandle&, void*);
V8_EXPORT static bool Resize(void*, size_t);
template <typename T>
friend void subtle::FreeUnreferencedObject(HeapHandle&, T&);
template <typename T>
friend bool subtle::Resize(T&, AdditionalBytes);
};
} // namespace internal
namespace subtle {
/**
* Informs the garbage collector that `object` can be immediately reclaimed. The
* destructor may not be invoked immediately but only on next garbage
* collection.
*
* It is up to the embedder to guarantee that no other object holds a reference
* to `object` after calling `FreeUnreferencedObject()`. In case such a
* reference exists, it's use results in a use-after-free.
*
* To aid in using the API, `FreeUnreferencedObject()` may be called from
* destructors on objects that would be reclaimed in the same garbage collection
* cycle.
*
* \param heap_handle The corresponding heap.
* \param object Reference to an object that is of type `GarbageCollected` and
* should be immediately reclaimed.
*/
template <typename T>
void FreeUnreferencedObject(HeapHandle& heap_handle, T& object) {
static_assert(IsGarbageCollectedTypeV<T>,
"Object must be of type GarbageCollected.");
internal::ExplicitManagementImpl::FreeUnreferencedObject(heap_handle,
&object);
}
/**
* Tries to resize `object` of type `T` with additional bytes on top of
* sizeof(T). Resizing is only useful with trailing inlined storage, see e.g.
* `MakeGarbageCollected(AllocationHandle&, AdditionalBytes)`.
*
* `Resize()` performs growing or shrinking as needed and may skip the operation
* for internal reasons, see return value.
*
* It is up to the embedder to guarantee that in case of shrinking a larger
* object down, the reclaimed area is not used anymore. Any subsequent use
* results in a use-after-free.
*
* The `object` must be live when calling `Resize()`.
*
* \param object Reference to an object that is of type `GarbageCollected` and
* should be resized.
* \param additional_bytes Bytes in addition to sizeof(T) that the object should
* provide.
* \returns true when the operation was successful and the result can be relied
* on, and false otherwise.
*/
template <typename T>
bool Resize(T& object, AdditionalBytes additional_bytes) {
static_assert(IsGarbageCollectedTypeV<T>,
"Object must be of type GarbageCollected.");
return internal::ExplicitManagementImpl::Resize(
&object, sizeof(T) + additional_bytes.value);
}
} // namespace subtle
} // namespace cppgc
#endif // INCLUDE_CPPGC_EXPLICIT_MANAGEMENT_H_

View File

@@ -0,0 +1,106 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_GARBAGE_COLLECTED_H_
#define INCLUDE_CPPGC_GARBAGE_COLLECTED_H_
#include "cppgc/internal/api-constants.h"
#include "cppgc/platform.h"
#include "cppgc/trace-trait.h"
#include "cppgc/type-traits.h"
namespace cppgc {
class Visitor;
/**
* Base class for managed objects. Only descendent types of `GarbageCollected`
* can be constructed using `MakeGarbageCollected()`. Must be inherited from as
* left-most base class.
*
* Types inheriting from GarbageCollected must provide a method of
* signature `void Trace(cppgc::Visitor*) const` that dispatchs all managed
* pointers to the visitor and delegates to garbage-collected base classes.
* The method must be virtual if the type is not directly a child of
* GarbageCollected and marked as final.
*
* \code
* // Example using final class.
* class FinalType final : public GarbageCollected<FinalType> {
* public:
* void Trace(cppgc::Visitor* visitor) const {
* // Dispatch using visitor->Trace(...);
* }
* };
*
* // Example using non-final base class.
* class NonFinalBase : public GarbageCollected<NonFinalBase> {
* public:
* virtual void Trace(cppgc::Visitor*) const {}
* };
*
* class FinalChild final : public NonFinalBase {
* public:
* void Trace(cppgc::Visitor* visitor) const final {
* // Dispatch using visitor->Trace(...);
* NonFinalBase::Trace(visitor);
* }
* };
* \endcode
*/
template <typename T>
class GarbageCollected {
public:
using IsGarbageCollectedTypeMarker = void;
using ParentMostGarbageCollectedType = T;
// Must use MakeGarbageCollected.
void* operator new(size_t) = delete;
void* operator new[](size_t) = delete;
// The garbage collector is taking care of reclaiming the object. Also,
// virtual destructor requires an unambiguous, accessible 'operator delete'.
void operator delete(void*) {
#ifdef V8_ENABLE_CHECKS
internal::Fatal(
"Manually deleting a garbage collected object is not allowed");
#endif // V8_ENABLE_CHECKS
}
void operator delete[](void*) = delete;
protected:
GarbageCollected() = default;
};
/**
* Base class for managed mixin objects. Such objects cannot be constructed
* directly but must be mixed into the inheritance hierarchy of a
* GarbageCollected object.
*
* Types inheriting from GarbageCollectedMixin must override a virtual method
* of signature `void Trace(cppgc::Visitor*) const` that dispatchs all managed
* pointers to the visitor and delegates to base classes.
*
* \code
* class Mixin : public GarbageCollectedMixin {
* public:
* void Trace(cppgc::Visitor* visitor) const override {
* // Dispatch using visitor->Trace(...);
* }
* };
* \endcode
*/
class GarbageCollectedMixin {
public:
using IsGarbageCollectedMixinTypeMarker = void;
/**
* This Trace method must be overriden by objects inheriting from
* GarbageCollectedMixin.
*/
virtual void Trace(cppgc::Visitor*) const {}
};
} // namespace cppgc
#endif // INCLUDE_CPPGC_GARBAGE_COLLECTED_H_

View File

@@ -0,0 +1,266 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_HEAP_CONSISTENCY_H_
#define INCLUDE_CPPGC_HEAP_CONSISTENCY_H_
#include <cstddef>
#include "cppgc/internal/write-barrier.h"
#include "cppgc/macros.h"
#include "cppgc/trace-trait.h"
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
class HeapHandle;
namespace subtle {
/**
* **DO NOT USE: Use the appropriate managed types.**
*
* Consistency helpers that aid in maintaining a consistent internal state of
* the garbage collector.
*/
class HeapConsistency final {
public:
using WriteBarrierParams = internal::WriteBarrier::Params;
using WriteBarrierType = internal::WriteBarrier::Type;
/**
* Gets the required write barrier type for a specific write.
*
* \param slot Slot containing the pointer to the object. The slot itself
* must reside in an object that has been allocated using
* `MakeGarbageCollected()`.
* \param value The pointer to the object. May be an interior pointer to an
* interface of the actual object.
* \param params Parameters that may be used for actual write barrier calls.
* Only filled if return value indicates that a write barrier is needed. The
* contents of the `params` are an implementation detail.
* \returns whether a write barrier is needed and which barrier to invoke.
*/
static V8_INLINE WriteBarrierType GetWriteBarrierType(
const void* slot, const void* value, WriteBarrierParams& params) {
return internal::WriteBarrier::GetWriteBarrierType(slot, value, params);
}
/**
* Gets the required write barrier type for a specific write.
*
* \param slot Slot to some part of an object. The object must not necessarily
have been allocated using `MakeGarbageCollected()` but can also live
off-heap or on stack.
* \param params Parameters that may be used for actual write barrier calls.
* Only filled if return value indicates that a write barrier is needed. The
* contents of the `params` are an implementation detail.
* \param callback Callback returning the corresponding heap handle. The
* callback is only invoked if the heap cannot otherwise be figured out. The
* callback must not allocate.
* \returns whether a write barrier is needed and which barrier to invoke.
*/
template <typename HeapHandleCallback>
static V8_INLINE WriteBarrierType
GetWriteBarrierType(const void* slot, WriteBarrierParams& params,
HeapHandleCallback callback) {
return internal::WriteBarrier::GetWriteBarrierType(slot, params, callback);
}
/**
* Gets the required write barrier type for a specific write.
* This version is meant to be used in conjunction with with a marking write
* barrier barrier which doesn't consider the slot.
*
* \param value The pointer to the object. May be an interior pointer to an
* interface of the actual object.
* \param params Parameters that may be used for actual write barrier calls.
* Only filled if return value indicates that a write barrier is needed. The
* contents of the `params` are an implementation detail.
* \returns whether a write barrier is needed and which barrier to invoke.
*/
static V8_INLINE WriteBarrierType
GetWriteBarrierType(const void* value, WriteBarrierParams& params) {
return internal::WriteBarrier::GetWriteBarrierType(value, params);
}
/**
* Conservative Dijkstra-style write barrier that processes an object if it
* has not yet been processed.
*
* \param params The parameters retrieved from `GetWriteBarrierType()`.
* \param object The pointer to the object. May be an interior pointer to a
* an interface of the actual object.
*/
static V8_INLINE void DijkstraWriteBarrier(const WriteBarrierParams& params,
const void* object) {
internal::WriteBarrier::DijkstraMarkingBarrier(params, object);
}
/**
* Conservative Dijkstra-style write barrier that processes a range of
* elements if they have not yet been processed.
*
* \param params The parameters retrieved from `GetWriteBarrierType()`.
* \param first_element Pointer to the first element that should be processed.
* The slot itself must reside in an object that has been allocated using
* `MakeGarbageCollected()`.
* \param element_size Size of the element in bytes.
* \param number_of_elements Number of elements that should be processed,
* starting with `first_element`.
* \param trace_callback The trace callback that should be invoked for each
* element if necessary.
*/
static V8_INLINE void DijkstraWriteBarrierRange(
const WriteBarrierParams& params, const void* first_element,
size_t element_size, size_t number_of_elements,
TraceCallback trace_callback) {
internal::WriteBarrier::DijkstraMarkingBarrierRange(
params, first_element, element_size, number_of_elements,
trace_callback);
}
/**
* Steele-style write barrier that re-processes an object if it has already
* been processed.
*
* \param params The parameters retrieved from `GetWriteBarrierType()`.
* \param object The pointer to the object which must point to an object that
* has been allocated using `MakeGarbageCollected()`. Interior pointers are
* not supported.
*/
static V8_INLINE void SteeleWriteBarrier(const WriteBarrierParams& params,
const void* object) {
internal::WriteBarrier::SteeleMarkingBarrier(params, object);
}
/**
* Generational barrier for maintaining consistency when running with multiple
* generations.
*
* \param params The parameters retrieved from `GetWriteBarrierType()`.
* \param slot Slot containing the pointer to the object. The slot itself
* must reside in an object that has been allocated using
* `MakeGarbageCollected()`.
*/
static V8_INLINE void GenerationalBarrier(const WriteBarrierParams& params,
const void* slot) {
internal::WriteBarrier::GenerationalBarrier(params, slot);
}
/**
* Generational barrier for source object that may contain outgoing pointers
* to objects in young generation.
*
* \param params The parameters retrieved from `GetWriteBarrierType()`.
* \param inner_pointer Pointer to the source object.
*/
static V8_INLINE void GenerationalBarrierForSourceObject(
const WriteBarrierParams& params, const void* inner_pointer) {
internal::WriteBarrier::GenerationalBarrierForSourceObject(params,
inner_pointer);
}
private:
HeapConsistency() = delete;
};
/**
* Disallows garbage collection finalizations. Any garbage collection triggers
* result in a crash when in this scope.
*
* Note that the garbage collector already covers paths that can lead to garbage
* collections, so user code does not require checking
* `IsGarbageCollectionAllowed()` before allocations.
*/
class V8_EXPORT V8_NODISCARD DisallowGarbageCollectionScope final {
CPPGC_STACK_ALLOCATED();
public:
/**
* \returns whether garbage collections are currently allowed.
*/
static bool IsGarbageCollectionAllowed(HeapHandle& heap_handle);
/**
* Enters a disallow garbage collection scope. Must be paired with `Leave()`.
* Prefer a scope instance of `DisallowGarbageCollectionScope`.
*
* \param heap_handle The corresponding heap.
*/
static void Enter(HeapHandle& heap_handle);
/**
* Leaves a disallow garbage collection scope. Must be paired with `Enter()`.
* Prefer a scope instance of `DisallowGarbageCollectionScope`.
*
* \param heap_handle The corresponding heap.
*/
static void Leave(HeapHandle& heap_handle);
/**
* Constructs a scoped object that automatically enters and leaves a disallow
* garbage collection scope based on its lifetime.
*
* \param heap_handle The corresponding heap.
*/
explicit DisallowGarbageCollectionScope(HeapHandle& heap_handle);
~DisallowGarbageCollectionScope();
DisallowGarbageCollectionScope(const DisallowGarbageCollectionScope&) =
delete;
DisallowGarbageCollectionScope& operator=(
const DisallowGarbageCollectionScope&) = delete;
private:
HeapHandle& heap_handle_;
};
/**
* Avoids invoking garbage collection finalizations. Already running garbage
* collection phase are unaffected by this scope.
*
* Should only be used temporarily as the scope has an impact on memory usage
* and follow up garbage collections.
*/
class V8_EXPORT V8_NODISCARD NoGarbageCollectionScope final {
CPPGC_STACK_ALLOCATED();
public:
/**
* Enters a no garbage collection scope. Must be paired with `Leave()`. Prefer
* a scope instance of `NoGarbageCollectionScope`.
*
* \param heap_handle The corresponding heap.
*/
static void Enter(HeapHandle& heap_handle);
/**
* Leaves a no garbage collection scope. Must be paired with `Enter()`. Prefer
* a scope instance of `NoGarbageCollectionScope`.
*
* \param heap_handle The corresponding heap.
*/
static void Leave(HeapHandle& heap_handle);
/**
* Constructs a scoped object that automatically enters and leaves a no
* garbage collection scope based on its lifetime.
*
* \param heap_handle The corresponding heap.
*/
explicit NoGarbageCollectionScope(HeapHandle& heap_handle);
~NoGarbageCollectionScope();
NoGarbageCollectionScope(const NoGarbageCollectionScope&) = delete;
NoGarbageCollectionScope& operator=(const NoGarbageCollectionScope&) = delete;
private:
HeapHandle& heap_handle_;
};
} // namespace subtle
} // namespace cppgc
#endif // INCLUDE_CPPGC_HEAP_CONSISTENCY_H_

View File

@@ -0,0 +1,82 @@
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_HEAP_STATE_H_
#define INCLUDE_CPPGC_HEAP_STATE_H_
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
class HeapHandle;
namespace subtle {
/**
* Helpers to peek into heap-internal state.
*/
class V8_EXPORT HeapState final {
public:
/**
* Returns whether the garbage collector is marking. This API is experimental
* and is expected to be removed in future.
*
* \param heap_handle The corresponding heap.
* \returns true if the garbage collector is currently marking, and false
* otherwise.
*/
static bool IsMarking(const HeapHandle& heap_handle);
/*
* Returns whether the garbage collector is sweeping. This API is experimental
* and is expected to be removed in future.
*
* \param heap_handle The corresponding heap.
* \returns true if the garbage collector is currently sweeping, and false
* otherwise.
*/
static bool IsSweeping(const HeapHandle& heap_handle);
/*
* Returns whether the garbage collector is currently sweeping on the thread
* owning this heap. This API allows the caller to determine whether it has
* been called from a destructor of a managed object. This API is experimental
* and may be removed in future.
*
* \param heap_handle The corresponding heap.
* \returns true if the garbage collector is currently sweeping on this
* thread, and false otherwise.
*/
static bool IsSweepingOnOwningThread(const HeapHandle& heap_handle);
/**
* Returns whether the garbage collector is in the atomic pause, i.e., the
* mutator is stopped from running. This API is experimental and is expected
* to be removed in future.
*
* \param heap_handle The corresponding heap.
* \returns true if the garbage collector is currently in the atomic pause,
* and false otherwise.
*/
static bool IsInAtomicPause(const HeapHandle& heap_handle);
/**
* Returns whether the last garbage collection was finalized conservatively
* (i.e., with a non-empty stack). This API is experimental and is expected to
* be removed in future.
*
* \param heap_handle The corresponding heap.
* \returns true if the last garbage collection was finalized conservatively,
* and false otherwise.
*/
static bool PreviousGCWasConservative(const HeapHandle& heap_handle);
private:
HeapState() = delete;
};
} // namespace subtle
} // namespace cppgc
#endif // INCLUDE_CPPGC_HEAP_STATE_H_

View File

@@ -0,0 +1,120 @@
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_HEAP_STATISTICS_H_
#define INCLUDE_CPPGC_HEAP_STATISTICS_H_
#include <cstddef>
#include <cstdint>
#include <string>
#include <vector>
namespace cppgc {
/**
* `HeapStatistics` contains memory consumption and utilization statistics for a
* cppgc heap.
*/
struct HeapStatistics final {
/**
* Specifies the detail level of the heap statistics. Brief statistics contain
* only the top-level allocated and used memory statistics for the entire
* heap. Detailed statistics also contain a break down per space and page, as
* well as freelist statistics and object type histograms. Note that used
* memory reported by brief statistics and detailed statistics might differ
* slightly.
*/
enum DetailLevel : uint8_t {
kBrief,
kDetailed,
};
/**
* Object statistics for a single type.
*/
struct ObjectStatsEntry {
/**
* Number of allocated bytes.
*/
size_t allocated_bytes;
/**
* Number of allocated objects.
*/
size_t object_count;
};
/**
* Page granularity statistics. For each page the statistics record the
* allocated memory size and overall used memory size for the page.
*/
struct PageStatistics {
/** Overall committed amount of memory for the page. */
size_t committed_size_bytes = 0;
/** Resident amount of memory held by the page. */
size_t resident_size_bytes = 0;
/** Amount of memory actually used on the page. */
size_t used_size_bytes = 0;
/** Statistics for object allocated on the page. Filled only when
* NameProvider::HideInternalNames() is false. */
std::vector<ObjectStatsEntry> object_statistics;
};
/**
* Statistics of the freelist (used only in non-large object spaces). For
* each bucket in the freelist the statistics record the bucket size, the
* number of freelist entries in the bucket, and the overall allocated memory
* consumed by these freelist entries.
*/
struct FreeListStatistics {
/** bucket sizes in the freelist. */
std::vector<size_t> bucket_size;
/** number of freelist entries per bucket. */
std::vector<size_t> free_count;
/** memory size consumed by freelist entries per size. */
std::vector<size_t> free_size;
};
/**
* Space granularity statistics. For each space the statistics record the
* space name, the amount of allocated memory and overall used memory for the
* space. The statistics also contain statistics for each of the space's
* pages, its freelist and the objects allocated on the space.
*/
struct SpaceStatistics {
/** The space name */
std::string name;
/** Overall committed amount of memory for the heap. */
size_t committed_size_bytes = 0;
/** Resident amount of memory held by the heap. */
size_t resident_size_bytes = 0;
/** Amount of memory actually used on the space. */
size_t used_size_bytes = 0;
/** Statistics for each of the pages in the space. */
std::vector<PageStatistics> page_stats;
/** Statistics for the freelist of the space. */
FreeListStatistics free_list_stats;
};
/** Overall committed amount of memory for the heap. */
size_t committed_size_bytes = 0;
/** Resident amount of memory help by the heap. */
size_t resident_size_bytes = 0;
/** Amount of memory actually used on the heap. */
size_t used_size_bytes = 0;
/** Detail level of this HeapStatistics. */
DetailLevel detail_level;
/** Statistics for each of the spaces in the heap. Filled only when
* `detail_level` is `DetailLevel::kDetailed`. */
std::vector<SpaceStatistics> space_stats;
/**
* Vector of `cppgc::GarbageCollected` type names.
*/
std::vector<std::string> type_names;
};
} // namespace cppgc
#endif // INCLUDE_CPPGC_HEAP_STATISTICS_H_

View File

@@ -0,0 +1,206 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_HEAP_H_
#define INCLUDE_CPPGC_HEAP_H_
#include <cstddef>
#include <cstdint>
#include <memory>
#include <vector>
#include "cppgc/common.h"
#include "cppgc/custom-space.h"
#include "cppgc/platform.h"
#include "v8config.h" // NOLINT(build/include_directory)
/**
* cppgc - A C++ garbage collection library.
*/
namespace cppgc {
class AllocationHandle;
/**
* Implementation details of cppgc. Those details are considered internal and
* may change at any point in time without notice. Users should never rely on
* the contents of this namespace.
*/
namespace internal {
class Heap;
} // namespace internal
/**
* Used for additional heap APIs.
*/
class HeapHandle;
class V8_EXPORT Heap {
public:
/**
* Specifies the stack state the embedder is in.
*/
using StackState = EmbedderStackState;
/**
* Specifies whether conservative stack scanning is supported.
*/
enum class StackSupport : uint8_t {
/**
* Conservative stack scan is supported.
*/
kSupportsConservativeStackScan,
/**
* Conservative stack scan is not supported. Embedders may use this option
* when using custom infrastructure that is unsupported by the library.
*/
kNoConservativeStackScan,
};
/**
* Specifies supported marking types
*/
enum class MarkingType : uint8_t {
/**
* Atomic stop-the-world marking. This option does not require any write
* barriers but is the most intrusive in terms of jank.
*/
kAtomic,
/**
* Incremental marking interleaves marking with the rest of the application
* workload on the same thread.
*/
kIncremental,
/**
* Incremental and concurrent marking.
*/
kIncrementalAndConcurrent
};
/**
* Specifies supported sweeping types
*/
enum class SweepingType : uint8_t {
/**
* Atomic stop-the-world sweeping. All of sweeping is performed at once.
*/
kAtomic,
/**
* Incremental sweeping interleaves sweeping with the rest of the
* application workload on the same thread.
*/
kIncremental,
/**
* Incremental and concurrent sweeping. Sweeping is split and interleaved
* with the rest of the application.
*/
kIncrementalAndConcurrent
};
/**
* Constraints for a Heap setup.
*/
struct ResourceConstraints {
/**
* Allows the heap to grow to some initial size in bytes before triggering
* garbage collections. This is useful when it is known that applications
* need a certain minimum heap to run to avoid repeatedly invoking the
* garbage collector when growing the heap.
*/
size_t initial_heap_size_bytes = 0;
};
/**
* Options specifying Heap properties (e.g. custom spaces) when initializing a
* heap through `Heap::Create()`.
*/
struct HeapOptions {
/**
* Creates reasonable defaults for instantiating a Heap.
*
* \returns the HeapOptions that can be passed to `Heap::Create()`.
*/
static HeapOptions Default() { return {}; }
/**
* Custom spaces added to heap are required to have indices forming a
* numbered sequence starting at 0, i.e., their `kSpaceIndex` must
* correspond to the index they reside in the vector.
*/
std::vector<std::unique_ptr<CustomSpaceBase>> custom_spaces;
/**
* Specifies whether conservative stack scan is supported. When conservative
* stack scan is not supported, the collector may try to invoke
* garbage collections using non-nestable task, which are guaranteed to have
* no interesting stack, through the provided Platform. If such tasks are
* not supported by the Platform, the embedder must take care of invoking
* the GC through `ForceGarbageCollectionSlow()`.
*/
StackSupport stack_support = StackSupport::kSupportsConservativeStackScan;
/**
* Specifies which types of marking are supported by the heap.
*/
MarkingType marking_support = MarkingType::kIncrementalAndConcurrent;
/**
* Specifies which types of sweeping are supported by the heap.
*/
SweepingType sweeping_support = SweepingType::kIncrementalAndConcurrent;
/**
* Resource constraints specifying various properties that the internal
* GC scheduler follows.
*/
ResourceConstraints resource_constraints;
};
/**
* Creates a new heap that can be used for object allocation.
*
* \param platform implemented and provided by the embedder.
* \param options HeapOptions specifying various properties for the Heap.
* \returns a new Heap instance.
*/
static std::unique_ptr<Heap> Create(
std::shared_ptr<Platform> platform,
HeapOptions options = HeapOptions::Default());
virtual ~Heap() = default;
/**
* Forces garbage collection.
*
* \param source String specifying the source (or caller) triggering a
* forced garbage collection.
* \param reason String specifying the reason for the forced garbage
* collection.
* \param stack_state The embedder stack state, see StackState.
*/
void ForceGarbageCollectionSlow(
const char* source, const char* reason,
StackState stack_state = StackState::kMayContainHeapPointers);
/**
* \returns the opaque handle for allocating objects using
* `MakeGarbageCollected()`.
*/
AllocationHandle& GetAllocationHandle();
/**
* \returns the opaque heap handle which may be used to refer to this heap in
* other APIs. Valid as long as the underlying `Heap` is alive.
*/
HeapHandle& GetHeapHandle();
private:
Heap() = default;
friend class internal::Heap;
};
} // namespace cppgc
#endif // INCLUDE_CPPGC_HEAP_H_

View File

@@ -0,0 +1,77 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_LIVENESS_BROKER_H_
#define INCLUDE_CPPGC_LIVENESS_BROKER_H_
#include "cppgc/heap.h"
#include "cppgc/member.h"
#include "cppgc/trace-trait.h"
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
namespace internal {
class LivenessBrokerFactory;
} // namespace internal
/**
* The broker is passed to weak callbacks to allow (temporarily) querying
* the liveness state of an object. References to non-live objects must be
* cleared when `IsHeapObjectAlive()` returns false.
*
* \code
* class GCedWithCustomWeakCallback final
* : public GarbageCollected<GCedWithCustomWeakCallback> {
* public:
* UntracedMember<Bar> bar;
*
* void CustomWeakCallbackMethod(const LivenessBroker& broker) {
* if (!broker.IsHeapObjectAlive(bar))
* bar = nullptr;
* }
*
* void Trace(cppgc::Visitor* visitor) const {
* visitor->RegisterWeakCallbackMethod<
* GCedWithCustomWeakCallback,
* &GCedWithCustomWeakCallback::CustomWeakCallbackMethod>(this);
* }
* };
* \endcode
*/
class V8_EXPORT LivenessBroker final {
public:
template <typename T>
bool IsHeapObjectAlive(const T* object) const {
// nullptr objects are considered alive to allow weakness to be used from
// stack while running into a conservative GC. Treating nullptr as dead
// would mean that e.g. custom collectins could not be strongified on stack.
return !object ||
IsHeapObjectAliveImpl(
TraceTrait<T>::GetTraceDescriptor(object).base_object_payload);
}
template <typename T>
bool IsHeapObjectAlive(const WeakMember<T>& weak_member) const {
return (weak_member != kSentinelPointer) &&
IsHeapObjectAlive<T>(weak_member.Get());
}
template <typename T>
bool IsHeapObjectAlive(const UntracedMember<T>& untraced_member) const {
return (untraced_member != kSentinelPointer) &&
IsHeapObjectAlive<T>(untraced_member.Get());
}
private:
LivenessBroker() = default;
bool IsHeapObjectAliveImpl(const void*) const;
friend class internal::LivenessBrokerFactory;
};
} // namespace cppgc
#endif // INCLUDE_CPPGC_LIVENESS_BROKER_H_

View File

@@ -0,0 +1,26 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_MACROS_H_
#define INCLUDE_CPPGC_MACROS_H_
#include <cstddef>
#include "cppgc/internal/compiler-specific.h"
namespace cppgc {
// Use if the object is only stack allocated.
#define CPPGC_STACK_ALLOCATED() \
public: \
using IsStackAllocatedTypeMarker CPPGC_UNUSED = int; \
\
private: \
void* operator new(size_t) = delete; \
void* operator new(size_t, void*) = delete; \
static_assert(true, "Force semicolon.")
} // namespace cppgc
#endif // INCLUDE_CPPGC_MACROS_H_

View File

@@ -0,0 +1,291 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_MEMBER_H_
#define INCLUDE_CPPGC_MEMBER_H_
#include <atomic>
#include <cstddef>
#include <type_traits>
#include "cppgc/internal/pointer-policies.h"
#include "cppgc/sentinel-pointer.h"
#include "cppgc/type-traits.h"
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
class Visitor;
namespace internal {
// MemberBase always refers to the object as const object and defers to
// BasicMember on casting to the right type as needed.
class MemberBase {
protected:
struct AtomicInitializerTag {};
MemberBase() : raw_(nullptr) {}
explicit MemberBase(const void* value) : raw_(value) {}
MemberBase(const void* value, AtomicInitializerTag) { SetRawAtomic(value); }
const void** GetRawSlot() const { return &raw_; }
const void* GetRaw() const { return raw_; }
void SetRaw(void* value) { raw_ = value; }
const void* GetRawAtomic() const {
return reinterpret_cast<const std::atomic<const void*>*>(&raw_)->load(
std::memory_order_relaxed);
}
void SetRawAtomic(const void* value) {
reinterpret_cast<std::atomic<const void*>*>(&raw_)->store(
value, std::memory_order_relaxed);
}
void ClearFromGC() const { raw_ = nullptr; }
private:
// All constructors initialize `raw_`. Do not add a default value here as it
// results in a non-atomic write on some builds, even when the atomic version
// of the constructor is used.
mutable const void* raw_;
};
// The basic class from which all Member classes are 'generated'.
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
typename CheckingPolicy>
class BasicMember final : private MemberBase, private CheckingPolicy {
public:
using PointeeType = T;
constexpr BasicMember() = default;
constexpr BasicMember(std::nullptr_t) {} // NOLINT
BasicMember(SentinelPointer s) : MemberBase(s) {} // NOLINT
BasicMember(T* raw) : MemberBase(raw) { // NOLINT
InitializingWriteBarrier();
this->CheckPointer(Get());
}
BasicMember(T& raw) : BasicMember(&raw) {} // NOLINT
// Atomic ctor. Using the AtomicInitializerTag forces BasicMember to
// initialize using atomic assignments. This is required for preventing
// data races with concurrent marking.
using AtomicInitializerTag = MemberBase::AtomicInitializerTag;
BasicMember(std::nullptr_t, AtomicInitializerTag atomic)
: MemberBase(nullptr, atomic) {}
BasicMember(SentinelPointer s, AtomicInitializerTag atomic)
: MemberBase(s, atomic) {}
BasicMember(T* raw, AtomicInitializerTag atomic) : MemberBase(raw, atomic) {
InitializingWriteBarrier();
this->CheckPointer(Get());
}
BasicMember(T& raw, AtomicInitializerTag atomic)
: BasicMember(&raw, atomic) {}
// Copy ctor.
BasicMember(const BasicMember& other) : BasicMember(other.Get()) {}
// Allow heterogeneous construction.
template <typename U, typename OtherBarrierPolicy, typename OtherWeaknessTag,
typename OtherCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
BasicMember( // NOLINT
const BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy,
OtherCheckingPolicy>& other)
: BasicMember(other.Get()) {}
// Move ctor.
BasicMember(BasicMember&& other) noexcept : BasicMember(other.Get()) {
other.Clear();
}
// Allow heterogeneous move construction.
template <typename U, typename OtherBarrierPolicy, typename OtherWeaknessTag,
typename OtherCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
BasicMember(BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy,
OtherCheckingPolicy>&& other) noexcept
: BasicMember(other.Get()) {
other.Clear();
}
// Construction from Persistent.
template <typename U, typename PersistentWeaknessPolicy,
typename PersistentLocationPolicy,
typename PersistentCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
BasicMember(const BasicPersistent<U, PersistentWeaknessPolicy,
PersistentLocationPolicy,
PersistentCheckingPolicy>& p)
: BasicMember(p.Get()) {}
// Copy assignment.
BasicMember& operator=(const BasicMember& other) {
return operator=(other.Get());
}
// Allow heterogeneous copy assignment.
template <typename U, typename OtherWeaknessTag, typename OtherBarrierPolicy,
typename OtherCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
BasicMember& operator=(
const BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy,
OtherCheckingPolicy>& other) {
return operator=(other.Get());
}
// Move assignment.
BasicMember& operator=(BasicMember&& other) noexcept {
operator=(other.Get());
other.Clear();
return *this;
}
// Heterogeneous move assignment.
template <typename U, typename OtherWeaknessTag, typename OtherBarrierPolicy,
typename OtherCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
BasicMember& operator=(BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy,
OtherCheckingPolicy>&& other) noexcept {
operator=(other.Get());
other.Clear();
return *this;
}
// Assignment from Persistent.
template <typename U, typename PersistentWeaknessPolicy,
typename PersistentLocationPolicy,
typename PersistentCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
BasicMember& operator=(
const BasicPersistent<U, PersistentWeaknessPolicy,
PersistentLocationPolicy, PersistentCheckingPolicy>&
other) {
return operator=(other.Get());
}
BasicMember& operator=(T* other) {
SetRawAtomic(other);
AssigningWriteBarrier();
this->CheckPointer(Get());
return *this;
}
BasicMember& operator=(std::nullptr_t) {
Clear();
return *this;
}
BasicMember& operator=(SentinelPointer s) {
SetRawAtomic(s);
return *this;
}
template <typename OtherWeaknessTag, typename OtherBarrierPolicy,
typename OtherCheckingPolicy>
void Swap(BasicMember<T, OtherWeaknessTag, OtherBarrierPolicy,
OtherCheckingPolicy>& other) {
T* tmp = Get();
*this = other;
other = tmp;
}
explicit operator bool() const { return Get(); }
operator T*() const { return Get(); }
T* operator->() const { return Get(); }
T& operator*() const { return *Get(); }
// CFI cast exemption to allow passing SentinelPointer through T* and support
// heterogeneous assignments between different Member and Persistent handles
// based on their actual types.
V8_CLANG_NO_SANITIZE("cfi-unrelated-cast") T* Get() const {
// Executed by the mutator, hence non atomic load.
//
// The const_cast below removes the constness from MemberBase storage. The
// following static_cast re-adds any constness if specified through the
// user-visible template parameter T.
return static_cast<T*>(const_cast<void*>(MemberBase::GetRaw()));
}
void Clear() { SetRawAtomic(nullptr); }
T* Release() {
T* result = Get();
Clear();
return result;
}
const T** GetSlotForTesting() const {
return reinterpret_cast<const T**>(GetRawSlot());
}
private:
const T* GetRawAtomic() const {
return static_cast<const T*>(MemberBase::GetRawAtomic());
}
void InitializingWriteBarrier() const {
WriteBarrierPolicy::InitializingBarrier(GetRawSlot(), GetRaw());
}
void AssigningWriteBarrier() const {
WriteBarrierPolicy::AssigningBarrier(GetRawSlot(), GetRaw());
}
void ClearFromGC() const { MemberBase::ClearFromGC(); }
T* GetFromGC() const { return Get(); }
friend class cppgc::Visitor;
template <typename U>
friend struct cppgc::TraceTrait;
};
template <typename T1, typename WeaknessTag1, typename WriteBarrierPolicy1,
typename CheckingPolicy1, typename T2, typename WeaknessTag2,
typename WriteBarrierPolicy2, typename CheckingPolicy2>
bool operator==(const BasicMember<T1, WeaknessTag1, WriteBarrierPolicy1,
CheckingPolicy1>& member1,
const BasicMember<T2, WeaknessTag2, WriteBarrierPolicy2,
CheckingPolicy2>& member2) {
return member1.Get() == member2.Get();
}
template <typename T1, typename WeaknessTag1, typename WriteBarrierPolicy1,
typename CheckingPolicy1, typename T2, typename WeaknessTag2,
typename WriteBarrierPolicy2, typename CheckingPolicy2>
bool operator!=(const BasicMember<T1, WeaknessTag1, WriteBarrierPolicy1,
CheckingPolicy1>& member1,
const BasicMember<T2, WeaknessTag2, WriteBarrierPolicy2,
CheckingPolicy2>& member2) {
return !(member1 == member2);
}
template <typename T, typename WriteBarrierPolicy, typename CheckingPolicy>
struct IsWeak<
internal::BasicMember<T, WeakMemberTag, WriteBarrierPolicy, CheckingPolicy>>
: std::true_type {};
} // namespace internal
/**
* Members are used in classes to contain strong pointers to other garbage
* collected objects. All Member fields of a class must be traced in the class'
* trace method.
*/
template <typename T>
using Member = internal::BasicMember<T, internal::StrongMemberTag,
internal::DijkstraWriteBarrierPolicy>;
/**
* WeakMember is similar to Member in that it is used to point to other garbage
* collected objects. However instead of creating a strong pointer to the
* object, the WeakMember creates a weak pointer, which does not keep the
* pointee alive. Hence if all pointers to to a heap allocated object are weak
* the object will be garbage collected. At the time of GC the weak pointers
* will automatically be set to null.
*/
template <typename T>
using WeakMember = internal::BasicMember<T, internal::WeakMemberTag,
internal::DijkstraWriteBarrierPolicy>;
/**
* UntracedMember is a pointer to an on-heap object that is not traced for some
* reason. Do not use this unless you know what you are doing. Keeping raw
* pointers to on-heap objects is prohibited unless used from stack. Pointee
* must be kept alive through other means.
*/
template <typename T>
using UntracedMember = internal::BasicMember<T, internal::UntracedMemberTag,
internal::NoWriteBarrierPolicy>;
} // namespace cppgc
#endif // INCLUDE_CPPGC_MEMBER_H_

View File

@@ -0,0 +1,65 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_NAME_PROVIDER_H_
#define INCLUDE_CPPGC_NAME_PROVIDER_H_
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
/**
* NameProvider allows for providing a human-readable name for garbage-collected
* objects.
*
* There's two cases of names to distinguish:
* a. Explicitly specified names via using NameProvider. Such names are always
* preserved in the system.
* b. Internal names that Oilpan infers from a C++ type on the class hierarchy
* of the object. This is not necessarily the type of the actually
* instantiated object.
*
* Depending on the build configuration, Oilpan may hide names, i.e., represent
* them with kHiddenName, of case b. to avoid exposing internal details.
*/
class V8_EXPORT NameProvider {
public:
/**
* Name that is used when hiding internals.
*/
static constexpr const char kHiddenName[] = "InternalNode";
/**
* Name that is used in case compiler support is missing for composing a name
* from C++ types.
*/
static constexpr const char kNoNameDeducible[] = "<No name>";
/**
* Indicating whether internal names are hidden or not.
*
* @returns true if C++ names should be hidden and represented by kHiddenName.
*/
static constexpr bool HideInternalNames() {
#if CPPGC_SUPPORTS_OBJECT_NAMES
return false;
#else // !CPPGC_SUPPORTS_OBJECT_NAMES
return true;
#endif // !CPPGC_SUPPORTS_OBJECT_NAMES
}
virtual ~NameProvider() = default;
/**
* Specifies a name for the garbage-collected object. Such names will never
* be hidden, as they are explicitly specified by the user of this API.
*
* @returns a human readable name for the object.
*/
virtual const char* GetHumanReadableName() const = 0;
};
} // namespace cppgc
#endif // INCLUDE_CPPGC_NAME_PROVIDER_H_

View File

@@ -0,0 +1,58 @@
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_OBJECT_SIZE_TRAIT_H_
#define INCLUDE_CPPGC_OBJECT_SIZE_TRAIT_H_
#include <cstddef>
#include "cppgc/type-traits.h"
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
namespace internal {
struct V8_EXPORT BaseObjectSizeTrait {
protected:
static size_t GetObjectSizeForGarbageCollected(const void*);
static size_t GetObjectSizeForGarbageCollectedMixin(const void*);
};
} // namespace internal
namespace subtle {
/**
* Trait specifying how to get the size of an object that was allocated using
* `MakeGarbageCollected()`. Also supports querying the size with an inner
* pointer to a mixin.
*/
template <typename T, bool = IsGarbageCollectedMixinTypeV<T>>
struct ObjectSizeTrait;
template <typename T>
struct ObjectSizeTrait<T, false> : cppgc::internal::BaseObjectSizeTrait {
static_assert(sizeof(T), "T must be fully defined");
static_assert(IsGarbageCollectedTypeV<T>,
"T must be of type GarbageCollected or GarbageCollectedMixin");
static size_t GetSize(const T& object) {
return GetObjectSizeForGarbageCollected(&object);
}
};
template <typename T>
struct ObjectSizeTrait<T, true> : cppgc::internal::BaseObjectSizeTrait {
static_assert(sizeof(T), "T must be fully defined");
static size_t GetSize(const T& object) {
return GetObjectSizeForGarbageCollectedMixin(&object);
}
};
} // namespace subtle
} // namespace cppgc
#endif // INCLUDE_CPPGC_OBJECT_SIZE_TRAIT_H_

View File

@@ -0,0 +1,370 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_PERSISTENT_H_
#define INCLUDE_CPPGC_PERSISTENT_H_
#include <type_traits>
#include "cppgc/internal/persistent-node.h"
#include "cppgc/internal/pointer-policies.h"
#include "cppgc/sentinel-pointer.h"
#include "cppgc/source-location.h"
#include "cppgc/type-traits.h"
#include "cppgc/visitor.h"
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
class Visitor;
namespace internal {
// PersistentBase always refers to the object as const object and defers to
// BasicPersistent on casting to the right type as needed.
class PersistentBase {
protected:
PersistentBase() = default;
explicit PersistentBase(const void* raw) : raw_(raw) {}
const void* GetValue() const { return raw_; }
void SetValue(const void* value) { raw_ = value; }
PersistentNode* GetNode() const { return node_; }
void SetNode(PersistentNode* node) { node_ = node; }
// Performs a shallow clear which assumes that internal persistent nodes are
// destroyed elsewhere.
void ClearFromGC() const {
raw_ = nullptr;
node_ = nullptr;
}
protected:
mutable const void* raw_ = nullptr;
mutable PersistentNode* node_ = nullptr;
friend class PersistentRegionBase;
};
// The basic class from which all Persistent classes are generated.
template <typename T, typename WeaknessPolicy, typename LocationPolicy,
typename CheckingPolicy>
class BasicPersistent final : public PersistentBase,
public LocationPolicy,
private WeaknessPolicy,
private CheckingPolicy {
public:
using typename WeaknessPolicy::IsStrongPersistent;
using PointeeType = T;
// Null-state/sentinel constructors.
BasicPersistent( // NOLINT
const SourceLocation& loc = SourceLocation::Current())
: LocationPolicy(loc) {}
BasicPersistent(std::nullptr_t, // NOLINT
const SourceLocation& loc = SourceLocation::Current())
: LocationPolicy(loc) {}
BasicPersistent( // NOLINT
SentinelPointer s, const SourceLocation& loc = SourceLocation::Current())
: PersistentBase(s), LocationPolicy(loc) {}
// Raw value constructors.
BasicPersistent(T* raw, // NOLINT
const SourceLocation& loc = SourceLocation::Current())
: PersistentBase(raw), LocationPolicy(loc) {
if (!IsValid()) return;
SetNode(WeaknessPolicy::GetPersistentRegion(GetValue())
.AllocateNode(this, &BasicPersistent::Trace));
this->CheckPointer(Get());
}
BasicPersistent(T& raw, // NOLINT
const SourceLocation& loc = SourceLocation::Current())
: BasicPersistent(&raw, loc) {}
// Copy ctor.
BasicPersistent(const BasicPersistent& other,
const SourceLocation& loc = SourceLocation::Current())
: BasicPersistent(other.Get(), loc) {}
// Heterogeneous ctor.
template <typename U, typename OtherWeaknessPolicy,
typename OtherLocationPolicy, typename OtherCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
BasicPersistent(
const BasicPersistent<U, OtherWeaknessPolicy, OtherLocationPolicy,
OtherCheckingPolicy>& other,
const SourceLocation& loc = SourceLocation::Current())
: BasicPersistent(other.Get(), loc) {}
// Move ctor. The heterogeneous move ctor is not supported since e.g.
// persistent can't reuse persistent node from weak persistent.
BasicPersistent(
BasicPersistent&& other,
const SourceLocation& loc = SourceLocation::Current()) noexcept
: PersistentBase(std::move(other)), LocationPolicy(std::move(other)) {
if (!IsValid()) return;
GetNode()->UpdateOwner(this);
other.SetValue(nullptr);
other.SetNode(nullptr);
this->CheckPointer(Get());
}
// Constructor from member.
template <typename U, typename MemberBarrierPolicy,
typename MemberWeaknessTag, typename MemberCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
BasicPersistent(
const internal::BasicMember<U, MemberBarrierPolicy, MemberWeaknessTag,
MemberCheckingPolicy>& member,
const SourceLocation& loc = SourceLocation::Current())
: BasicPersistent(member.Get(), loc) {}
~BasicPersistent() { Clear(); }
// Copy assignment.
BasicPersistent& operator=(const BasicPersistent& other) {
return operator=(other.Get());
}
template <typename U, typename OtherWeaknessPolicy,
typename OtherLocationPolicy, typename OtherCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
BasicPersistent& operator=(
const BasicPersistent<U, OtherWeaknessPolicy, OtherLocationPolicy,
OtherCheckingPolicy>& other) {
return operator=(other.Get());
}
// Move assignment.
BasicPersistent& operator=(BasicPersistent&& other) noexcept {
if (this == &other) return *this;
Clear();
PersistentBase::operator=(std::move(other));
LocationPolicy::operator=(std::move(other));
if (!IsValid()) return *this;
GetNode()->UpdateOwner(this);
other.SetValue(nullptr);
other.SetNode(nullptr);
this->CheckPointer(Get());
return *this;
}
// Assignment from member.
template <typename U, typename MemberBarrierPolicy,
typename MemberWeaknessTag, typename MemberCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
BasicPersistent& operator=(
const internal::BasicMember<U, MemberBarrierPolicy, MemberWeaknessTag,
MemberCheckingPolicy>& member) {
return operator=(member.Get());
}
BasicPersistent& operator=(T* other) {
Assign(other);
return *this;
}
BasicPersistent& operator=(std::nullptr_t) {
Clear();
return *this;
}
BasicPersistent& operator=(SentinelPointer s) {
Assign(s);
return *this;
}
explicit operator bool() const { return Get(); }
operator T*() const { return Get(); }
T* operator->() const { return Get(); }
T& operator*() const { return *Get(); }
// CFI cast exemption to allow passing SentinelPointer through T* and support
// heterogeneous assignments between different Member and Persistent handles
// based on their actual types.
V8_CLANG_NO_SANITIZE("cfi-unrelated-cast") T* Get() const {
// The const_cast below removes the constness from PersistentBase storage.
// The following static_cast re-adds any constness if specified through the
// user-visible template parameter T.
return static_cast<T*>(const_cast<void*>(GetValue()));
}
void Clear() {
// Simplified version of `Assign()` to allow calling without a complete type
// `T`.
if (IsValid()) {
WeaknessPolicy::GetPersistentRegion(GetValue()).FreeNode(GetNode());
SetNode(nullptr);
}
SetValue(nullptr);
}
T* Release() {
T* result = Get();
Clear();
return result;
}
template <typename U, typename OtherWeaknessPolicy = WeaknessPolicy,
typename OtherLocationPolicy = LocationPolicy,
typename OtherCheckingPolicy = CheckingPolicy>
BasicPersistent<U, OtherWeaknessPolicy, OtherLocationPolicy,
OtherCheckingPolicy>
To() const {
return BasicPersistent<U, OtherWeaknessPolicy, OtherLocationPolicy,
OtherCheckingPolicy>(static_cast<U*>(Get()));
}
private:
static void Trace(Visitor* v, const void* ptr) {
const auto* persistent = static_cast<const BasicPersistent*>(ptr);
v->TraceRoot(*persistent, persistent->Location());
}
bool IsValid() const {
// Ideally, handling kSentinelPointer would be done by the embedder. On the
// other hand, having Persistent aware of it is beneficial since no node
// gets wasted.
return GetValue() != nullptr && GetValue() != kSentinelPointer;
}
void Assign(T* ptr) {
if (IsValid()) {
if (ptr && ptr != kSentinelPointer) {
// Simply assign the pointer reusing the existing node.
SetValue(ptr);
this->CheckPointer(ptr);
return;
}
WeaknessPolicy::GetPersistentRegion(GetValue()).FreeNode(GetNode());
SetNode(nullptr);
}
SetValue(ptr);
if (!IsValid()) return;
SetNode(WeaknessPolicy::GetPersistentRegion(GetValue())
.AllocateNode(this, &BasicPersistent::Trace));
this->CheckPointer(Get());
}
void ClearFromGC() const {
if (IsValid()) {
WeaknessPolicy::GetPersistentRegion(GetValue()).FreeNode(GetNode());
PersistentBase::ClearFromGC();
}
}
// Set Get() for details.
V8_CLANG_NO_SANITIZE("cfi-unrelated-cast")
T* GetFromGC() const {
return static_cast<T*>(const_cast<void*>(GetValue()));
}
friend class cppgc::Visitor;
};
template <typename T1, typename WeaknessPolicy1, typename LocationPolicy1,
typename CheckingPolicy1, typename T2, typename WeaknessPolicy2,
typename LocationPolicy2, typename CheckingPolicy2>
bool operator==(const BasicPersistent<T1, WeaknessPolicy1, LocationPolicy1,
CheckingPolicy1>& p1,
const BasicPersistent<T2, WeaknessPolicy2, LocationPolicy2,
CheckingPolicy2>& p2) {
return p1.Get() == p2.Get();
}
template <typename T1, typename WeaknessPolicy1, typename LocationPolicy1,
typename CheckingPolicy1, typename T2, typename WeaknessPolicy2,
typename LocationPolicy2, typename CheckingPolicy2>
bool operator!=(const BasicPersistent<T1, WeaknessPolicy1, LocationPolicy1,
CheckingPolicy1>& p1,
const BasicPersistent<T2, WeaknessPolicy2, LocationPolicy2,
CheckingPolicy2>& p2) {
return !(p1 == p2);
}
template <typename T1, typename PersistentWeaknessPolicy,
typename PersistentLocationPolicy, typename PersistentCheckingPolicy,
typename T2, typename MemberWriteBarrierPolicy,
typename MemberWeaknessTag, typename MemberCheckingPolicy>
bool operator==(
const BasicPersistent<T1, PersistentWeaknessPolicy,
PersistentLocationPolicy, PersistentCheckingPolicy>&
p,
const BasicMember<T2, MemberWeaknessTag, MemberWriteBarrierPolicy,
MemberCheckingPolicy>& m) {
return p.Get() == m.Get();
}
template <typename T1, typename PersistentWeaknessPolicy,
typename PersistentLocationPolicy, typename PersistentCheckingPolicy,
typename T2, typename MemberWriteBarrierPolicy,
typename MemberWeaknessTag, typename MemberCheckingPolicy>
bool operator!=(
const BasicPersistent<T1, PersistentWeaknessPolicy,
PersistentLocationPolicy, PersistentCheckingPolicy>&
p,
const BasicMember<T2, MemberWeaknessTag, MemberWriteBarrierPolicy,
MemberCheckingPolicy>& m) {
return !(p == m);
}
template <typename T1, typename MemberWriteBarrierPolicy,
typename MemberWeaknessTag, typename MemberCheckingPolicy,
typename T2, typename PersistentWeaknessPolicy,
typename PersistentLocationPolicy, typename PersistentCheckingPolicy>
bool operator==(
const BasicMember<T2, MemberWeaknessTag, MemberWriteBarrierPolicy,
MemberCheckingPolicy>& m,
const BasicPersistent<T1, PersistentWeaknessPolicy,
PersistentLocationPolicy, PersistentCheckingPolicy>&
p) {
return m.Get() == p.Get();
}
template <typename T1, typename MemberWriteBarrierPolicy,
typename MemberWeaknessTag, typename MemberCheckingPolicy,
typename T2, typename PersistentWeaknessPolicy,
typename PersistentLocationPolicy, typename PersistentCheckingPolicy>
bool operator!=(
const BasicMember<T2, MemberWeaknessTag, MemberWriteBarrierPolicy,
MemberCheckingPolicy>& m,
const BasicPersistent<T1, PersistentWeaknessPolicy,
PersistentLocationPolicy, PersistentCheckingPolicy>&
p) {
return !(m == p);
}
template <typename T, typename LocationPolicy, typename CheckingPolicy>
struct IsWeak<BasicPersistent<T, internal::WeakPersistentPolicy, LocationPolicy,
CheckingPolicy>> : std::true_type {};
} // namespace internal
/**
* Persistent is a way to create a strong pointer from an off-heap object to
* another on-heap object. As long as the Persistent handle is alive the GC will
* keep the object pointed to alive. The Persistent handle is always a GC root
* from the point of view of the GC. Persistent must be constructed and
* destructed in the same thread.
*/
template <typename T>
using Persistent =
internal::BasicPersistent<T, internal::StrongPersistentPolicy>;
/**
* WeakPersistent is a way to create a weak pointer from an off-heap object to
* an on-heap object. The pointer is automatically cleared when the pointee gets
* collected. WeakPersistent must be constructed and destructed in the same
* thread.
*/
template <typename T>
using WeakPersistent =
internal::BasicPersistent<T, internal::WeakPersistentPolicy>;
} // namespace cppgc
#endif // INCLUDE_CPPGC_PERSISTENT_H_

View File

@@ -0,0 +1,156 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_PLATFORM_H_
#define INCLUDE_CPPGC_PLATFORM_H_
#include <memory>
#include "cppgc/source-location.h"
#include "v8-platform.h" // NOLINT(build/include_directory)
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
// TODO(v8:10346): Create separate includes for concepts that are not
// V8-specific.
using IdleTask = v8::IdleTask;
using JobHandle = v8::JobHandle;
using JobDelegate = v8::JobDelegate;
using JobTask = v8::JobTask;
using PageAllocator = v8::PageAllocator;
using Task = v8::Task;
using TaskPriority = v8::TaskPriority;
using TaskRunner = v8::TaskRunner;
using TracingController = v8::TracingController;
/**
* Platform interface used by Heap. Contains allocators and executors.
*/
class V8_EXPORT Platform {
public:
virtual ~Platform() = default;
/**
* Returns the allocator used by cppgc to allocate its heap and various
* support structures.
*/
virtual PageAllocator* GetPageAllocator() = 0;
/**
* Monotonically increasing time in seconds from an arbitrary fixed point in
* the past. This function is expected to return at least
* millisecond-precision values. For this reason,
* it is recommended that the fixed point be no further in the past than
* the epoch.
**/
virtual double MonotonicallyIncreasingTime() = 0;
/**
* Foreground task runner that should be used by a Heap.
*/
virtual std::shared_ptr<TaskRunner> GetForegroundTaskRunner() {
return nullptr;
}
/**
* Posts `job_task` to run in parallel. Returns a `JobHandle` associated with
* the `Job`, which can be joined or canceled.
* This avoids degenerate cases:
* - Calling `CallOnWorkerThread()` for each work item, causing significant
* overhead.
* - Fixed number of `CallOnWorkerThread()` calls that split the work and
* might run for a long time. This is problematic when many components post
* "num cores" tasks and all expect to use all the cores. In these cases,
* the scheduler lacks context to be fair to multiple same-priority requests
* and/or ability to request lower priority work to yield when high priority
* work comes in.
* A canonical implementation of `job_task` looks like:
* \code
* class MyJobTask : public JobTask {
* public:
* MyJobTask(...) : worker_queue_(...) {}
* // JobTask implementation.
* void Run(JobDelegate* delegate) override {
* while (!delegate->ShouldYield()) {
* // Smallest unit of work.
* auto work_item = worker_queue_.TakeWorkItem(); // Thread safe.
* if (!work_item) return;
* ProcessWork(work_item);
* }
* }
*
* size_t GetMaxConcurrency() const override {
* return worker_queue_.GetSize(); // Thread safe.
* }
* };
*
* // ...
* auto handle = PostJob(TaskPriority::kUserVisible,
* std::make_unique<MyJobTask>(...));
* handle->Join();
* \endcode
*
* `PostJob()` and methods of the returned JobHandle/JobDelegate, must never
* be called while holding a lock that could be acquired by `JobTask::Run()`
* or `JobTask::GetMaxConcurrency()` -- that could result in a deadlock. This
* is because (1) `JobTask::GetMaxConcurrency()` may be invoked while holding
* internal lock (A), hence `JobTask::GetMaxConcurrency()` can only use a lock
* (B) if that lock is *never* held while calling back into `JobHandle` from
* any thread (A=>B/B=>A deadlock) and (2) `JobTask::Run()` or
* `JobTask::GetMaxConcurrency()` may be invoked synchronously from
* `JobHandle` (B=>JobHandle::foo=>B deadlock).
*
* A sufficient `PostJob()` implementation that uses the default Job provided
* in libplatform looks like:
* \code
* std::unique_ptr<JobHandle> PostJob(
* TaskPriority priority, std::unique_ptr<JobTask> job_task) override {
* return std::make_unique<DefaultJobHandle>(
* std::make_shared<DefaultJobState>(
* this, std::move(job_task), kNumThreads));
* }
* \endcode
*/
virtual std::unique_ptr<JobHandle> PostJob(
TaskPriority priority, std::unique_ptr<JobTask> job_task) {
return nullptr;
}
/**
* Returns an instance of a `TracingController`. This must be non-nullptr. The
* default implementation returns an empty `TracingController` that consumes
* trace data without effect.
*/
virtual TracingController* GetTracingController();
};
/**
* Process-global initialization of the garbage collector. Must be called before
* creating a Heap.
*
* Can be called multiple times when paired with `ShutdownProcess()`.
*
* \param page_allocator The allocator used for maintaining meta data. Must not
* change between multiple calls to InitializeProcess.
*/
V8_EXPORT void InitializeProcess(PageAllocator* page_allocator);
/**
* Must be called after destroying the last used heap. Some process-global
* metadata may not be returned and reused upon a subsequent
* `InitializeProcess()` call.
*/
V8_EXPORT void ShutdownProcess();
namespace internal {
V8_EXPORT void Fatal(const std::string& reason = std::string(),
const SourceLocation& = SourceLocation::Current());
} // namespace internal
} // namespace cppgc
#endif // INCLUDE_CPPGC_PLATFORM_H_

View File

@@ -0,0 +1,75 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_PREFINALIZER_H_
#define INCLUDE_CPPGC_PREFINALIZER_H_
#include "cppgc/internal/compiler-specific.h"
#include "cppgc/liveness-broker.h"
namespace cppgc {
namespace internal {
class V8_EXPORT PrefinalizerRegistration final {
public:
using Callback = bool (*)(const cppgc::LivenessBroker&, void*);
PrefinalizerRegistration(void*, Callback);
void* operator new(size_t, void* location) = delete;
void* operator new(size_t) = delete;
};
} // namespace internal
/**
* Macro must be used in the private section of `Class` and registers a
* prefinalization callback `void Class::PreFinalizer()`. The callback is
* invoked on garbage collection after the collector has found an object to be
* dead.
*
* Callback properties:
* - The callback is invoked before a possible destructor for the corresponding
* object.
* - The callback may access the whole object graph, irrespective of whether
* objects are considered dead or alive.
* - The callback is invoked on the same thread as the object was created on.
*
* Example:
* \code
* class WithPrefinalizer : public GarbageCollected<WithPrefinalizer> {
* CPPGC_USING_PRE_FINALIZER(WithPrefinalizer, Dispose);
*
* public:
* void Trace(Visitor*) const {}
* void Dispose() { prefinalizer_called = true; }
* ~WithPrefinalizer() {
* // prefinalizer_called == true
* }
* private:
* bool prefinalizer_called = false;
* };
* \endcode
*/
#define CPPGC_USING_PRE_FINALIZER(Class, PreFinalizer) \
public: \
static bool InvokePreFinalizer(const cppgc::LivenessBroker& liveness_broker, \
void* object) { \
static_assert(cppgc::IsGarbageCollectedOrMixinTypeV<Class>, \
"Only garbage collected objects can have prefinalizers"); \
Class* self = static_cast<Class*>(object); \
if (liveness_broker.IsHeapObjectAlive(self)) return false; \
self->PreFinalizer(); \
return true; \
} \
\
private: \
CPPGC_NO_UNIQUE_ADDRESS cppgc::internal::PrefinalizerRegistration \
prefinalizer_dummy_{this, Class::InvokePreFinalizer}; \
static_assert(true, "Force semicolon.")
} // namespace cppgc
#endif // INCLUDE_CPPGC_PREFINALIZER_H_

View File

@@ -0,0 +1,36 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_PROCESS_HEAP_STATISTICS_H_
#define INCLUDE_CPPGC_PROCESS_HEAP_STATISTICS_H_
#include <atomic>
#include <cstddef>
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
namespace internal {
class ProcessHeapStatisticsUpdater;
} // namespace internal
class V8_EXPORT ProcessHeapStatistics final {
public:
static size_t TotalAllocatedObjectSize() {
return total_allocated_object_size_.load(std::memory_order_relaxed);
}
static size_t TotalAllocatedSpace() {
return total_allocated_space_.load(std::memory_order_relaxed);
}
private:
static std::atomic_size_t total_allocated_space_;
static std::atomic_size_t total_allocated_object_size_;
friend class internal::ProcessHeapStatisticsUpdater;
};
} // namespace cppgc
#endif // INCLUDE_CPPGC_PROCESS_HEAP_STATISTICS_H_

View File

@@ -0,0 +1,32 @@
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_SENTINEL_POINTER_H_
#define INCLUDE_CPPGC_SENTINEL_POINTER_H_
#include <cstdint>
namespace cppgc {
namespace internal {
// Special tag type used to denote some sentinel member. The semantics of the
// sentinel is defined by the embedder.
struct SentinelPointer {
template <typename T>
operator T*() const {
static constexpr intptr_t kSentinelValue = 1;
return reinterpret_cast<T*>(kSentinelValue);
}
// Hidden friends.
friend bool operator==(SentinelPointer, SentinelPointer) { return true; }
friend bool operator!=(SentinelPointer, SentinelPointer) { return false; }
};
} // namespace internal
constexpr internal::SentinelPointer kSentinelPointer;
} // namespace cppgc
#endif // INCLUDE_CPPGC_SENTINEL_POINTER_H_

View File

@@ -0,0 +1,92 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_SOURCE_LOCATION_H_
#define INCLUDE_CPPGC_SOURCE_LOCATION_H_
#include <cstddef>
#include <string>
#include "v8config.h" // NOLINT(build/include_directory)
#if defined(__has_builtin)
#define CPPGC_SUPPORTS_SOURCE_LOCATION \
(__has_builtin(__builtin_FUNCTION) && __has_builtin(__builtin_FILE) && \
__has_builtin(__builtin_LINE)) // NOLINT
#elif defined(V8_CC_GNU) && __GNUC__ >= 7
#define CPPGC_SUPPORTS_SOURCE_LOCATION 1
#elif defined(V8_CC_INTEL) && __ICC >= 1800
#define CPPGC_SUPPORTS_SOURCE_LOCATION 1
#else
#define CPPGC_SUPPORTS_SOURCE_LOCATION 0
#endif
namespace cppgc {
/**
* Encapsulates source location information. Mimics C++20's
* `std::source_location`.
*/
class V8_EXPORT SourceLocation final {
public:
/**
* Construct source location information corresponding to the location of the
* call site.
*/
#if CPPGC_SUPPORTS_SOURCE_LOCATION
static constexpr SourceLocation Current(
const char* function = __builtin_FUNCTION(),
const char* file = __builtin_FILE(), size_t line = __builtin_LINE()) {
return SourceLocation(function, file, line);
}
#else
static constexpr SourceLocation Current() { return SourceLocation(); }
#endif // CPPGC_SUPPORTS_SOURCE_LOCATION
/**
* Constructs unspecified source location information.
*/
constexpr SourceLocation() = default;
/**
* Returns the name of the function associated with the position represented
* by this object, if any.
*
* \returns the function name as cstring.
*/
constexpr const char* Function() const { return function_; }
/**
* Returns the name of the current source file represented by this object.
*
* \returns the file name as cstring.
*/
constexpr const char* FileName() const { return file_; }
/**
* Returns the line number represented by this object.
*
* \returns the line number.
*/
constexpr size_t Line() const { return line_; }
/**
* Returns a human-readable string representing this object.
*
* \returns a human-readable string representing source location information.
*/
std::string ToString() const;
private:
constexpr SourceLocation(const char* function, const char* file, size_t line)
: function_(function), file_(file), line_(line) {}
const char* function_ = nullptr;
const char* file_ = nullptr;
size_t line_ = 0u;
};
} // namespace cppgc
#endif // INCLUDE_CPPGC_SOURCE_LOCATION_H_

View File

@@ -0,0 +1,106 @@
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_TESTING_H_
#define INCLUDE_CPPGC_TESTING_H_
#include "cppgc/common.h"
#include "cppgc/macros.h"
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
class HeapHandle;
/**
* Namespace contains testing helpers.
*/
namespace testing {
/**
* Overrides the state of the stack with the provided value. Parameters passed
* to explicit garbage collection calls still take precedence. Must not be
* nested.
*
* This scope is useful to make the garbage collector consider the stack when
* tasks that invoke garbage collection (through the provided platform) contain
* interesting pointers on its stack.
*/
class V8_EXPORT V8_NODISCARD OverrideEmbedderStackStateScope final {
CPPGC_STACK_ALLOCATED();
public:
/**
* Constructs a scoped object that automatically enters and leaves the scope.
*
* \param heap_handle The corresponding heap.
*/
explicit OverrideEmbedderStackStateScope(HeapHandle& heap_handle,
EmbedderStackState state);
~OverrideEmbedderStackStateScope();
OverrideEmbedderStackStateScope(const OverrideEmbedderStackStateScope&) =
delete;
OverrideEmbedderStackStateScope& operator=(
const OverrideEmbedderStackStateScope&) = delete;
private:
HeapHandle& heap_handle_;
};
/**
* Testing interface for managed heaps that allows for controlling garbage
* collection timings. Embedders should use this class when testing the
* interaction of their code with incremental/concurrent garbage collection.
*/
class V8_EXPORT StandaloneTestingHeap final {
public:
explicit StandaloneTestingHeap(HeapHandle&);
/**
* Start an incremental garbage collection.
*/
void StartGarbageCollection();
/**
* Perform an incremental step. This will also schedule concurrent steps if
* needed.
*
* \param stack_state The state of the stack during the step.
*/
bool PerformMarkingStep(EmbedderStackState stack_state);
/**
* Finalize the current garbage collection cycle atomically.
* Assumes that garbage collection is in progress.
*
* \param stack_state The state of the stack for finalizing the garbage
* collection cycle.
*/
void FinalizeGarbageCollection(EmbedderStackState stack_state);
/**
* Toggle main thread marking on/off. Allows to stress concurrent marking
* (e.g. to better detect data races).
*
* \param should_mark Denotes whether the main thread should contribute to
* marking. Defaults to true.
*/
void ToggleMainThreadMarking(bool should_mark);
/**
* Force enable compaction for the next garbage collection cycle.
*/
void ForceCompactionForNextGarbageCollection();
private:
HeapHandle& heap_handle_;
};
V8_EXPORT bool IsHeapObjectOld(void*);
} // namespace testing
} // namespace cppgc
#endif // INCLUDE_CPPGC_TESTING_H_

View File

@@ -0,0 +1,116 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_TRACE_TRAIT_H_
#define INCLUDE_CPPGC_TRACE_TRAIT_H_
#include <type_traits>
#include "cppgc/type-traits.h"
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
class Visitor;
namespace internal {
// Implementation of the default TraceTrait handling GarbageCollected and
// GarbageCollectedMixin.
template <typename T,
bool =
IsGarbageCollectedMixinTypeV<typename std::remove_const<T>::type>>
struct TraceTraitImpl;
} // namespace internal
/**
* Callback for invoking tracing on a given object.
*
* \param visitor The visitor to dispatch to.
* \param object The object to invoke tracing on.
*/
using TraceCallback = void (*)(Visitor* visitor, const void* object);
/**
* Describes how to trace an object, i.e., how to visit all Oilpan-relevant
* fields of an object.
*/
struct TraceDescriptor {
/**
* Adjusted base pointer, i.e., the pointer to the class inheriting directly
* from GarbageCollected, of the object that is being traced.
*/
const void* base_object_payload;
/**
* Callback for tracing the object.
*/
TraceCallback callback;
};
namespace internal {
struct V8_EXPORT TraceTraitFromInnerAddressImpl {
static TraceDescriptor GetTraceDescriptor(const void* address);
};
/**
* Trait specifying how the garbage collector processes an object of type T.
*
* Advanced users may override handling by creating a specialization for their
* type.
*/
template <typename T>
struct TraceTraitBase {
static_assert(internal::IsTraceableV<T>, "T must have a Trace() method");
/**
* Accessor for retrieving a TraceDescriptor to process an object of type T.
*
* \param self The object to be processed.
* \returns a TraceDescriptor to process the object.
*/
static TraceDescriptor GetTraceDescriptor(const void* self) {
return internal::TraceTraitImpl<T>::GetTraceDescriptor(
static_cast<const T*>(self));
}
/**
* Function invoking the tracing for an object of type T.
*
* \param visitor The visitor to dispatch to.
* \param self The object to invoke tracing on.
*/
static void Trace(Visitor* visitor, const void* self) {
static_cast<const T*>(self)->Trace(visitor);
}
};
} // namespace internal
template <typename T>
struct TraceTrait : public internal::TraceTraitBase<T> {};
namespace internal {
template <typename T>
struct TraceTraitImpl<T, false> {
static_assert(IsGarbageCollectedTypeV<T>,
"T must be of type GarbageCollected or GarbageCollectedMixin");
static TraceDescriptor GetTraceDescriptor(const void* self) {
return {self, TraceTrait<T>::Trace};
}
};
template <typename T>
struct TraceTraitImpl<T, true> {
static TraceDescriptor GetTraceDescriptor(const void* self) {
return internal::TraceTraitFromInnerAddressImpl::GetTraceDescriptor(self);
}
};
} // namespace internal
} // namespace cppgc
#endif // INCLUDE_CPPGC_TRACE_TRAIT_H_

View File

@@ -0,0 +1,240 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_TYPE_TRAITS_H_
#define INCLUDE_CPPGC_TYPE_TRAITS_H_
// This file should stay with minimal dependencies to allow embedder to check
// against Oilpan types without including any other parts.
#include <cstddef>
#include <type_traits>
namespace cppgc {
class Visitor;
namespace internal {
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
typename CheckingPolicy>
class BasicMember;
struct DijkstraWriteBarrierPolicy;
struct NoWriteBarrierPolicy;
class StrongMemberTag;
class UntracedMemberTag;
class WeakMemberTag;
// Not supposed to be specialized by the user.
template <typename T>
struct IsWeak : std::false_type {};
// IsTraceMethodConst is used to verify that all Trace methods are marked as
// const. It is equivalent to IsTraceable but for a non-const object.
template <typename T, typename = void>
struct IsTraceMethodConst : std::false_type {};
template <typename T>
struct IsTraceMethodConst<T, std::void_t<decltype(std::declval<const T>().Trace(
std::declval<Visitor*>()))>> : std::true_type {
};
template <typename T, typename = void>
struct IsTraceable : std::false_type {
static_assert(sizeof(T), "T must be fully defined");
};
template <typename T>
struct IsTraceable<
T, std::void_t<decltype(std::declval<T>().Trace(std::declval<Visitor*>()))>>
: std::true_type {
// All Trace methods should be marked as const. If an object of type
// 'T' is traceable then any object of type 'const T' should also
// be traceable.
static_assert(IsTraceMethodConst<T>(),
"Trace methods should be marked as const.");
};
template <typename T>
constexpr bool IsTraceableV = IsTraceable<T>::value;
template <typename T, typename = void>
struct HasGarbageCollectedMixinTypeMarker : std::false_type {
static_assert(sizeof(T), "T must be fully defined");
};
template <typename T>
struct HasGarbageCollectedMixinTypeMarker<
T, std::void_t<
typename std::remove_const_t<T>::IsGarbageCollectedMixinTypeMarker>>
: std::true_type {
static_assert(sizeof(T), "T must be fully defined");
};
template <typename T, typename = void>
struct HasGarbageCollectedTypeMarker : std::false_type {
static_assert(sizeof(T), "T must be fully defined");
};
template <typename T>
struct HasGarbageCollectedTypeMarker<
T,
std::void_t<typename std::remove_const_t<T>::IsGarbageCollectedTypeMarker>>
: std::true_type {
static_assert(sizeof(T), "T must be fully defined");
};
template <typename T, bool = HasGarbageCollectedTypeMarker<T>::value,
bool = HasGarbageCollectedMixinTypeMarker<T>::value>
struct IsGarbageCollectedMixinType : std::false_type {
static_assert(sizeof(T), "T must be fully defined");
};
template <typename T>
struct IsGarbageCollectedMixinType<T, false, true> : std::true_type {
static_assert(sizeof(T), "T must be fully defined");
};
template <typename T, bool = HasGarbageCollectedTypeMarker<T>::value>
struct IsGarbageCollectedType : std::false_type {
static_assert(sizeof(T), "T must be fully defined");
};
template <typename T>
struct IsGarbageCollectedType<T, true> : std::true_type {
static_assert(sizeof(T), "T must be fully defined");
};
template <typename T>
struct IsGarbageCollectedOrMixinType
: std::integral_constant<bool, IsGarbageCollectedType<T>::value ||
IsGarbageCollectedMixinType<T>::value> {
static_assert(sizeof(T), "T must be fully defined");
};
template <typename T, bool = (HasGarbageCollectedTypeMarker<T>::value &&
HasGarbageCollectedMixinTypeMarker<T>::value)>
struct IsGarbageCollectedWithMixinType : std::false_type {
static_assert(sizeof(T), "T must be fully defined");
};
template <typename T>
struct IsGarbageCollectedWithMixinType<T, true> : std::true_type {
static_assert(sizeof(T), "T must be fully defined");
};
template <typename BasicMemberCandidate, typename WeaknessTag,
typename WriteBarrierPolicy>
struct IsSubclassOfBasicMemberTemplate {
private:
template <typename T, typename CheckingPolicy>
static std::true_type SubclassCheck(
BasicMember<T, WeaknessTag, WriteBarrierPolicy, CheckingPolicy>*);
static std::false_type SubclassCheck(...);
public:
static constexpr bool value =
decltype(SubclassCheck(std::declval<BasicMemberCandidate*>()))::value;
};
template <typename T,
bool = IsSubclassOfBasicMemberTemplate<
T, StrongMemberTag, DijkstraWriteBarrierPolicy>::value>
struct IsMemberType : std::false_type {};
template <typename T>
struct IsMemberType<T, true> : std::true_type {};
template <typename T, bool = IsSubclassOfBasicMemberTemplate<
T, WeakMemberTag, DijkstraWriteBarrierPolicy>::value>
struct IsWeakMemberType : std::false_type {};
template <typename T>
struct IsWeakMemberType<T, true> : std::true_type {};
template <typename T, bool = IsSubclassOfBasicMemberTemplate<
T, UntracedMemberTag, NoWriteBarrierPolicy>::value>
struct IsUntracedMemberType : std::false_type {};
template <typename T>
struct IsUntracedMemberType<T, true> : std::true_type {};
template <typename T>
struct IsComplete {
private:
template <typename U, size_t = sizeof(U)>
static std::true_type IsSizeOfKnown(U*);
static std::false_type IsSizeOfKnown(...);
public:
static constexpr bool value =
decltype(IsSizeOfKnown(std::declval<T*>()))::value;
};
} // namespace internal
/**
* Value is true for types that inherit from `GarbageCollectedMixin` but not
* `GarbageCollected<T>` (i.e., they are free mixins), and false otherwise.
*/
template <typename T>
constexpr bool IsGarbageCollectedMixinTypeV =
internal::IsGarbageCollectedMixinType<T>::value;
/**
* Value is true for types that inherit from `GarbageCollected<T>`, and false
* otherwise.
*/
template <typename T>
constexpr bool IsGarbageCollectedTypeV =
internal::IsGarbageCollectedType<T>::value;
/**
* Value is true for types that inherit from either `GarbageCollected<T>` or
* `GarbageCollectedMixin`, and false otherwise.
*/
template <typename T>
constexpr bool IsGarbageCollectedOrMixinTypeV =
internal::IsGarbageCollectedOrMixinType<T>::value;
/**
* Value is true for types that inherit from `GarbageCollected<T>` and
* `GarbageCollectedMixin`, and false otherwise.
*/
template <typename T>
constexpr bool IsGarbageCollectedWithMixinTypeV =
internal::IsGarbageCollectedWithMixinType<T>::value;
/**
* Value is true for types of type `Member<T>`, and false otherwise.
*/
template <typename T>
constexpr bool IsMemberTypeV = internal::IsMemberType<T>::value;
/**
* Value is true for types of type `UntracedMember<T>`, and false otherwise.
*/
template <typename T>
constexpr bool IsUntracedMemberTypeV = internal::IsUntracedMemberType<T>::value;
/**
* Value is true for types of type `WeakMember<T>`, and false otherwise.
*/
template <typename T>
constexpr bool IsWeakMemberTypeV = internal::IsWeakMemberType<T>::value;
/**
* Value is true for types that are considered weak references, and false
* otherwise.
*/
template <typename T>
constexpr bool IsWeakV = internal::IsWeak<T>::value;
/**
* Value is true for types that are complete, and false otherwise.
*/
template <typename T>
constexpr bool IsCompleteV = internal::IsComplete<T>::value;
} // namespace cppgc
#endif // INCLUDE_CPPGC_TYPE_TRAITS_H_

View File

@@ -0,0 +1,379 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_VISITOR_H_
#define INCLUDE_CPPGC_VISITOR_H_
#include "cppgc/custom-space.h"
#include "cppgc/ephemeron-pair.h"
#include "cppgc/garbage-collected.h"
#include "cppgc/internal/logging.h"
#include "cppgc/internal/pointer-policies.h"
#include "cppgc/liveness-broker.h"
#include "cppgc/member.h"
#include "cppgc/sentinel-pointer.h"
#include "cppgc/source-location.h"
#include "cppgc/trace-trait.h"
#include "cppgc/type-traits.h"
namespace cppgc {
namespace internal {
template <typename T, typename WeaknessPolicy, typename LocationPolicy,
typename CheckingPolicy>
class BasicCrossThreadPersistent;
template <typename T, typename WeaknessPolicy, typename LocationPolicy,
typename CheckingPolicy>
class BasicPersistent;
class ConservativeTracingVisitor;
class VisitorBase;
class VisitorFactory;
} // namespace internal
using WeakCallback = void (*)(const LivenessBroker&, const void*);
/**
* Visitor passed to trace methods. All managed pointers must have called the
* Visitor's trace method on them.
*
* \code
* class Foo final : public GarbageCollected<Foo> {
* public:
* void Trace(Visitor* visitor) const {
* visitor->Trace(foo_);
* visitor->Trace(weak_foo_);
* }
* private:
* Member<Foo> foo_;
* WeakMember<Foo> weak_foo_;
* };
* \endcode
*/
class V8_EXPORT Visitor {
public:
class Key {
private:
Key() = default;
friend class internal::VisitorFactory;
};
explicit Visitor(Key) {}
virtual ~Visitor() = default;
/**
* Trace method for raw pointers. Prefer the versions for managed pointers.
*
* \param member Reference retaining an object.
*/
template <typename T>
void Trace(const T* t) {
static_assert(sizeof(T), "Pointee type must be fully defined.");
static_assert(internal::IsGarbageCollectedOrMixinType<T>::value,
"T must be GarbageCollected or GarbageCollectedMixin type");
if (!t) {
return;
}
Visit(t, TraceTrait<T>::GetTraceDescriptor(t));
}
/**
* Trace method for Member.
*
* \param member Member reference retaining an object.
*/
template <typename T>
void Trace(const Member<T>& member) {
const T* value = member.GetRawAtomic();
CPPGC_DCHECK(value != kSentinelPointer);
Trace(value);
}
/**
* Trace method for WeakMember.
*
* \param weak_member WeakMember reference weakly retaining an object.
*/
template <typename T>
void Trace(const WeakMember<T>& weak_member) {
static_assert(sizeof(T), "Pointee type must be fully defined.");
static_assert(internal::IsGarbageCollectedOrMixinType<T>::value,
"T must be GarbageCollected or GarbageCollectedMixin type");
static_assert(!internal::IsAllocatedOnCompactableSpace<T>::value,
"Weak references to compactable objects are not allowed");
const T* value = weak_member.GetRawAtomic();
// Bailout assumes that WeakMember emits write barrier.
if (!value) {
return;
}
CPPGC_DCHECK(value != kSentinelPointer);
VisitWeak(value, TraceTrait<T>::GetTraceDescriptor(value),
&HandleWeak<WeakMember<T>>, &weak_member);
}
/**
* Trace method for inlined objects that are not allocated themselves but
* otherwise follow managed heap layout and have a Trace() method.
*
* \param object reference of the inlined object.
*/
template <typename T>
void Trace(const T& object) {
#if V8_ENABLE_CHECKS
// This object is embedded in potentially multiple nested objects. The
// outermost object must not be in construction as such objects are (a) not
// processed immediately, and (b) only processed conservatively if not
// otherwise possible.
CheckObjectNotInConstruction(&object);
#endif // V8_ENABLE_CHECKS
TraceTrait<T>::Trace(this, &object);
}
/**
* Registers a weak callback method on the object of type T. See
* LivenessBroker for an usage example.
*
* \param object of type T specifying a weak callback method.
*/
template <typename T, void (T::*method)(const LivenessBroker&)>
void RegisterWeakCallbackMethod(const T* object) {
RegisterWeakCallback(&WeakCallbackMethodDelegate<T, method>, object);
}
/**
* Trace method for EphemeronPair.
*
* \param ephemeron_pair EphemeronPair reference weakly retaining a key object
* and strongly retaining a value object in case the key object is alive.
*/
template <typename K, typename V>
void Trace(const EphemeronPair<K, V>& ephemeron_pair) {
TraceEphemeron(ephemeron_pair.key, &ephemeron_pair.value);
RegisterWeakCallbackMethod<EphemeronPair<K, V>,
&EphemeronPair<K, V>::ClearValueIfKeyIsDead>(
&ephemeron_pair);
}
/**
* Trace method for a single ephemeron. Used for tracing a raw ephemeron in
* which the `key` and `value` are kept separately.
*
* \param weak_member_key WeakMember reference weakly retaining a key object.
* \param member_value Member reference with ephemeron semantics.
*/
template <typename KeyType, typename ValueType>
void TraceEphemeron(const WeakMember<KeyType>& weak_member_key,
const Member<ValueType>* member_value) {
const KeyType* key = weak_member_key.GetRawAtomic();
if (!key) return;
// `value` must always be non-null.
CPPGC_DCHECK(member_value);
const ValueType* value = member_value->GetRawAtomic();
if (!value) return;
// KeyType and ValueType may refer to GarbageCollectedMixin.
TraceDescriptor value_desc =
TraceTrait<ValueType>::GetTraceDescriptor(value);
CPPGC_DCHECK(value_desc.base_object_payload);
const void* key_base_object_payload =
TraceTrait<KeyType>::GetTraceDescriptor(key).base_object_payload;
CPPGC_DCHECK(key_base_object_payload);
VisitEphemeron(key_base_object_payload, value, value_desc);
}
/**
* Trace method for a single ephemeron. Used for tracing a raw ephemeron in
* which the `key` and `value` are kept separately. Note that this overload
* is for non-GarbageCollected `value`s that can be traced though.
*
* \param key `WeakMember` reference weakly retaining a key object.
* \param value Reference weakly retaining a value object. Note that
* `ValueType` here should not be `Member`. It is expected that
* `TraceTrait<ValueType>::GetTraceDescriptor(value)` returns a
* `TraceDescriptor` with a null base pointer but a valid trace method.
*/
template <typename KeyType, typename ValueType>
void TraceEphemeron(const WeakMember<KeyType>& weak_member_key,
const ValueType* value) {
static_assert(!IsGarbageCollectedOrMixinTypeV<ValueType>,
"garbage-collected types must use WeakMember and Member");
const KeyType* key = weak_member_key.GetRawAtomic();
if (!key) return;
// `value` must always be non-null.
CPPGC_DCHECK(value);
TraceDescriptor value_desc =
TraceTrait<ValueType>::GetTraceDescriptor(value);
// `value_desc.base_object_payload` must be null as this override is only
// taken for non-garbage-collected values.
CPPGC_DCHECK(!value_desc.base_object_payload);
// KeyType might be a GarbageCollectedMixin.
const void* key_base_object_payload =
TraceTrait<KeyType>::GetTraceDescriptor(key).base_object_payload;
CPPGC_DCHECK(key_base_object_payload);
VisitEphemeron(key_base_object_payload, value, value_desc);
}
/**
* Trace method that strongifies a WeakMember.
*
* \param weak_member WeakMember reference retaining an object.
*/
template <typename T>
void TraceStrongly(const WeakMember<T>& weak_member) {
const T* value = weak_member.GetRawAtomic();
CPPGC_DCHECK(value != kSentinelPointer);
Trace(value);
}
/**
* Trace method for weak containers.
*
* \param object reference of the weak container.
* \param callback to be invoked.
* \param data custom data that is passed to the callback.
*/
template <typename T>
void TraceWeakContainer(const T* object, WeakCallback callback,
const void* data) {
if (!object) return;
VisitWeakContainer(object, TraceTrait<T>::GetTraceDescriptor(object),
TraceTrait<T>::GetWeakTraceDescriptor(object), callback,
data);
}
/**
* Registers a slot containing a reference to an object allocated on a
* compactable space. Such references maybe be arbitrarily moved by the GC.
*
* \param slot location of reference to object that might be moved by the GC.
*/
template <typename T>
void RegisterMovableReference(const T** slot) {
static_assert(internal::IsAllocatedOnCompactableSpace<T>::value,
"Only references to objects allocated on compactable spaces "
"should be registered as movable slots.");
static_assert(!IsGarbageCollectedMixinTypeV<T>,
"Mixin types do not support compaction.");
HandleMovableReference(reinterpret_cast<const void**>(slot));
}
/**
* Registers a weak callback that is invoked during garbage collection.
*
* \param callback to be invoked.
* \param data custom data that is passed to the callback.
*/
virtual void RegisterWeakCallback(WeakCallback callback, const void* data) {}
/**
* Defers tracing an object from a concurrent thread to the mutator thread.
* Should be called by Trace methods of types that are not safe to trace
* concurrently.
*
* \param parameter tells the trace callback which object was deferred.
* \param callback to be invoked for tracing on the mutator thread.
* \param deferred_size size of deferred object.
*
* \returns false if the object does not need to be deferred (i.e. currently
* traced on the mutator thread) and true otherwise (i.e. currently traced on
* a concurrent thread).
*/
virtual V8_WARN_UNUSED_RESULT bool DeferTraceToMutatorThreadIfConcurrent(
const void* parameter, TraceCallback callback, size_t deferred_size) {
// By default tracing is not deferred.
return false;
}
protected:
virtual void Visit(const void* self, TraceDescriptor) {}
virtual void VisitWeak(const void* self, TraceDescriptor, WeakCallback,
const void* weak_member) {}
virtual void VisitRoot(const void*, TraceDescriptor, const SourceLocation&) {}
virtual void VisitWeakRoot(const void* self, TraceDescriptor, WeakCallback,
const void* weak_root, const SourceLocation&) {}
virtual void VisitEphemeron(const void* key, const void* value,
TraceDescriptor value_desc) {}
virtual void VisitWeakContainer(const void* self, TraceDescriptor strong_desc,
TraceDescriptor weak_desc,
WeakCallback callback, const void* data) {}
virtual void HandleMovableReference(const void**) {}
private:
template <typename T, void (T::*method)(const LivenessBroker&)>
static void WeakCallbackMethodDelegate(const LivenessBroker& info,
const void* self) {
// Callback is registered through a potential const Trace method but needs
// to be able to modify fields. See HandleWeak.
(const_cast<T*>(static_cast<const T*>(self))->*method)(info);
}
template <typename PointerType>
static void HandleWeak(const LivenessBroker& info, const void* object) {
const PointerType* weak = static_cast<const PointerType*>(object);
auto* raw_ptr = weak->GetFromGC();
// Sentinel values are preserved for weak pointers.
if (raw_ptr == kSentinelPointer) return;
if (!info.IsHeapObjectAlive(raw_ptr)) {
weak->ClearFromGC();
}
}
template <typename Persistent,
std::enable_if_t<Persistent::IsStrongPersistent::value>* = nullptr>
void TraceRoot(const Persistent& p, const SourceLocation& loc) {
using PointeeType = typename Persistent::PointeeType;
static_assert(sizeof(PointeeType),
"Persistent's pointee type must be fully defined");
static_assert(internal::IsGarbageCollectedOrMixinType<PointeeType>::value,
"Persistent's pointee type must be GarbageCollected or "
"GarbageCollectedMixin");
auto* ptr = p.GetFromGC();
if (!ptr) {
return;
}
VisitRoot(ptr, TraceTrait<PointeeType>::GetTraceDescriptor(ptr), loc);
}
template <
typename WeakPersistent,
std::enable_if_t<!WeakPersistent::IsStrongPersistent::value>* = nullptr>
void TraceRoot(const WeakPersistent& p, const SourceLocation& loc) {
using PointeeType = typename WeakPersistent::PointeeType;
static_assert(sizeof(PointeeType),
"Persistent's pointee type must be fully defined");
static_assert(internal::IsGarbageCollectedOrMixinType<PointeeType>::value,
"Persistent's pointee type must be GarbageCollected or "
"GarbageCollectedMixin");
static_assert(!internal::IsAllocatedOnCompactableSpace<PointeeType>::value,
"Weak references to compactable objects are not allowed");
auto* ptr = p.GetFromGC();
VisitWeakRoot(ptr, TraceTrait<PointeeType>::GetTraceDescriptor(ptr),
&HandleWeak<WeakPersistent>, &p, loc);
}
#if V8_ENABLE_CHECKS
void CheckObjectNotInConstruction(const void* address);
#endif // V8_ENABLE_CHECKS
template <typename T, typename WeaknessPolicy, typename LocationPolicy,
typename CheckingPolicy>
friend class internal::BasicCrossThreadPersistent;
template <typename T, typename WeaknessPolicy, typename LocationPolicy,
typename CheckingPolicy>
friend class internal::BasicPersistent;
friend class internal::ConservativeTracingVisitor;
friend class internal::VisitorBase;
};
} // namespace cppgc
#endif // INCLUDE_CPPGC_VISITOR_H_