// Copyright 2022 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/handles/handles.h" #include "src/handles/traced-handles.h" #include "test/unittests/heap/cppgc-js/unified-heap-utils.h" #include "test/unittests/heap/heap-utils.h" namespace v8::internal { namespace { constexpr uint16_t kClassIdToOptimize = 23; using EmbedderRootsHandlerTest = TestWithHeapInternalsAndContext; class V8_NODISCARD TemporaryEmbedderRootsHandleScope final { public: TemporaryEmbedderRootsHandleScope(v8::Isolate* isolate, v8::EmbedderRootsHandler* handler) : isolate_(isolate) { isolate_->SetEmbedderRootsHandler(handler); } ~TemporaryEmbedderRootsHandleScope() { isolate_->SetEmbedderRootsHandler(nullptr); } private: v8::Isolate* const isolate_; }; // EmbedderRootsHandler that can optimize Scavenger handling when used with // TracedReference. class ClearingEmbedderRootsHandler final : public v8::EmbedderRootsHandler { public: explicit ClearingEmbedderRootsHandler(uint16_t class_id_to_optimize) : class_id_to_optimize_(class_id_to_optimize) {} bool IsRoot(const v8::TracedReference& handle) final { return handle.WrapperClassId() != class_id_to_optimize_; } void ResetRoot(const v8::TracedReference& handle) final { if (handle.WrapperClassId() != class_id_to_optimize_) return; // Convention (for test): Objects that are optimized have their first field // set as a back pointer. BasicTracedReference* original_handle = reinterpret_cast*>( v8::Object::GetAlignedPointerFromInternalField( handle.As(), 0)); original_handle->Reset(); } private: const uint16_t class_id_to_optimize_; }; // EmbedderRootsHandler that forwards handle processing to the main thread and // makes sure that concurrently processed handles get reprocessed if needed. class MainThreadForwardingRootHandler final : public v8::EmbedderRootsHandler { public: bool IsRoot(const v8::TracedReference& handle) { return false; } void ResetRoot(const v8::TracedReference& handle) final { { auto* address = *reinterpret_cast(&handle); base::LockGuard _(&mutex); if (auto it = unprocessed_.find(address); it != unprocessed_.end()) unprocessed_.erase(it); } // Convention (for test): Set first field as a back pointer. BasicTracedReference* original_handle = reinterpret_cast*>( v8::Object::GetAlignedPointerFromInternalField( handle.As(), 0)); original_handle->Reset(); } bool TryResetRoot(const v8::TracedReference& handle) final { base::LockGuard _(&mutex); unprocessed_.insert(*reinterpret_cast(&handle)); return false; } const auto& unprocessed() const { return unprocessed_; } private: base::Mutex mutex; std::unordered_set unprocessed_; }; template void SetupOptimizedAndNonOptimizedHandle(v8::Isolate* isolate, uint16_t optimized_class_id, T* optimized_handle, T* non_optimized_handle) { v8::HandleScope scope(isolate); v8::Local optimized_object = WrapperHelper::CreateWrapper( isolate->GetCurrentContext(), optimized_handle, nullptr); EXPECT_TRUE(optimized_handle->IsEmpty()); *optimized_handle = T(isolate, optimized_object); EXPECT_FALSE(optimized_handle->IsEmpty()); optimized_handle->SetWrapperClassId(optimized_class_id); v8::Local non_optimized_object = WrapperHelper::CreateWrapper( isolate->GetCurrentContext(), nullptr, nullptr); EXPECT_TRUE(non_optimized_handle->IsEmpty()); *non_optimized_handle = T(isolate, non_optimized_object); EXPECT_FALSE(non_optimized_handle->IsEmpty()); } } // namespace TEST_F(EmbedderRootsHandlerTest, TracedReferenceNoDestructorReclaimedOnScavenge) { if (v8_flags.single_generation) return; ManualGCScope manual_gc(i_isolate()); v8::HandleScope scope(v8_isolate()); DisableConservativeStackScanningScopeForTesting no_stack_scanning(heap()); ClearingEmbedderRootsHandler handler(kClassIdToOptimize); TemporaryEmbedderRootsHandleScope roots_handler_scope(v8_isolate(), &handler); auto* traced_handles = i_isolate()->traced_handles(); const size_t initial_count = traced_handles->used_node_count(); auto* optimized_handle = new v8::TracedReference(); auto* non_optimized_handle = new v8::TracedReference(); SetupOptimizedAndNonOptimizedHandle(v8_isolate(), kClassIdToOptimize, optimized_handle, non_optimized_handle); EXPECT_EQ(initial_count + 2, traced_handles->used_node_count()); YoungGC(); EXPECT_EQ(initial_count + 1, traced_handles->used_node_count()); EXPECT_TRUE(optimized_handle->IsEmpty()); delete optimized_handle; EXPECT_FALSE(non_optimized_handle->IsEmpty()); non_optimized_handle->Reset(); delete non_optimized_handle; EXPECT_EQ(initial_count, traced_handles->used_node_count()); } namespace { void ConstructJSObject(v8::Isolate* isolate, v8::Local context, v8::TracedReference* handle) { v8::HandleScope scope(isolate); v8::Local object(v8::Object::New(isolate)); EXPECT_FALSE(object.IsEmpty()); *handle = v8::TracedReference(isolate, object); EXPECT_FALSE(handle->IsEmpty()); } template void ConstructJSApiObject(v8::Isolate* isolate, v8::Local context, T* global) { v8::HandleScope scope(isolate); v8::Local object = WrapperHelper::CreateWrapper(context, nullptr, nullptr); EXPECT_FALSE(object.IsEmpty()); *global = T(isolate, object); EXPECT_FALSE(global->IsEmpty()); } enum class SurvivalMode { kSurvives, kDies }; template void TracedReferenceTest(v8::Isolate* isolate, ConstructTracedReferenceFunction construct_function, ModifierFunction modifier_function, GCFunction gc_function, SurvivalMode survives) { auto i_isolate = reinterpret_cast(isolate); DisableConservativeStackScanningScopeForTesting no_stack_scanning( i_isolate->heap()); v8::HandleScope scope(isolate); auto* traced_handles = i_isolate->traced_handles(); const size_t initial_count = traced_handles->used_node_count(); auto gc_invisible_handle = std::make_unique>(); construct_function(isolate, isolate->GetCurrentContext(), gc_invisible_handle.get()); ASSERT_TRUE(IsNewObjectInCorrectGeneration(isolate, *gc_invisible_handle)); modifier_function(*gc_invisible_handle); const size_t after_modification_count = traced_handles->used_node_count(); gc_function(); // Cannot check the handle as it is not explicitly cleared by the GC. Instead // check the handles count. CHECK_IMPLIES(survives == SurvivalMode::kSurvives, after_modification_count == traced_handles->used_node_count()); CHECK_IMPLIES(survives == SurvivalMode::kDies, initial_count == traced_handles->used_node_count()); } } // namespace TEST_F(EmbedderRootsHandlerTest, TracedReferenceWrapperClassId) { ManualGCScope manual_gc(i_isolate()); v8::HandleScope scope(v8_isolate()); v8::TracedReference traced; ConstructJSObject(v8_isolate(), v8_isolate()->GetCurrentContext(), &traced); EXPECT_EQ(0, traced.WrapperClassId()); traced.SetWrapperClassId(17); EXPECT_EQ(17, traced.WrapperClassId()); } // EmbedderRootsHandler does not affect full GCs. TEST_F(EmbedderRootsHandlerTest, TracedReferenceToUnmodifiedJSObjectDiesOnFullGC) { // When stressing incremental marking, a write barrier may keep the object // alive. if (v8_flags.stress_incremental_marking) return; ClearingEmbedderRootsHandler handler(kClassIdToOptimize); TemporaryEmbedderRootsHandleScope roots_handler_scope(v8_isolate(), &handler); TracedReferenceTest( v8_isolate(), ConstructJSObject, [](const TracedReference&) {}, [this]() { FullGC(); }, SurvivalMode::kDies); } // EmbedderRootsHandler does not affect full GCs. TEST_F( EmbedderRootsHandlerTest, TracedReferenceToUnmodifiedJSObjectDiesOnFullGCEvenWhenPointeeIsHeldAlive) { ManualGCScope manual_gcs(i_isolate()); ClearingEmbedderRootsHandler handler(kClassIdToOptimize); TemporaryEmbedderRootsHandleScope roots_handler_scope(v8_isolate(), &handler); // The TracedReference itself will die as it's not found by the full GC. The // pointee will be kept alive through other means. v8::Global strong_global; TracedReferenceTest( v8_isolate(), ConstructJSObject, [this, &strong_global](const TracedReference& handle) { v8::HandleScope scope(v8_isolate()); strong_global = v8::Global(v8_isolate(), handle.Get(v8_isolate())); }, [this, &strong_global]() { FullGC(); strong_global.Reset(); }, SurvivalMode::kDies); } // EmbedderRootsHandler does not affect non-API objects. TEST_F(EmbedderRootsHandlerTest, TracedReferenceToUnmodifiedJSObjectSurvivesYoungGC) { if (v8_flags.single_generation) return; ManualGCScope manual_gc(i_isolate()); ClearingEmbedderRootsHandler handler(kClassIdToOptimize); TemporaryEmbedderRootsHandleScope roots_handler_scope(v8_isolate(), &handler); TracedReferenceTest( v8_isolate(), ConstructJSObject, [](const TracedReference&) {}, [this]() { YoungGC(); }, SurvivalMode::kSurvives); } // EmbedderRootsHandler does not affect non-API objects, even when the handle // has a wrapper class id that allows for reclamation. TEST_F( EmbedderRootsHandlerTest, TracedReferenceToUnmodifiedJSObjectSurvivesYoungGCWhenExcludedFromRoots) { if (v8_flags.single_generation) return; ManualGCScope manual_gc(i_isolate()); ClearingEmbedderRootsHandler handler(kClassIdToOptimize); TemporaryEmbedderRootsHandleScope roots_handler_scope(v8_isolate(), &handler); TracedReferenceTest( v8_isolate(), ConstructJSObject, [](TracedReference& handle) { handle.SetWrapperClassId(kClassIdToOptimize); }, [this]() { YoungGC(); }, SurvivalMode::kSurvives); } // EmbedderRootsHandler does not affect API objects for handles that have // their class ids not set up. TEST_F(EmbedderRootsHandlerTest, TracedReferenceToUnmodifiedJSApiObjectSurvivesScavengePerDefault) { if (v8_flags.single_generation) return; ManualGCScope manual_gc(i_isolate()); ClearingEmbedderRootsHandler handler(kClassIdToOptimize); TemporaryEmbedderRootsHandleScope roots_handler_scope(v8_isolate(), &handler); TracedReferenceTest( v8_isolate(), ConstructJSApiObject>, [](const TracedReference&) {}, [this]() { YoungGC(); }, SurvivalMode::kSurvives); } // EmbedderRootsHandler resets API objects for handles that have their class ids // set to being optimized. TEST_F( EmbedderRootsHandlerTest, TracedReferenceToUnmodifiedJSApiObjectDiesOnScavengeWhenExcludedFromRoots) { if (v8_flags.single_generation) return; ManualGCScope manual_gc(i_isolate()); ClearingEmbedderRootsHandler handler(kClassIdToOptimize); TemporaryEmbedderRootsHandleScope roots_handler_scope(v8_isolate(), &handler); TracedReferenceTest( v8_isolate(), ConstructJSApiObject>, [this](TracedReference& handle) { handle.SetWrapperClassId(kClassIdToOptimize); { HandleScope handles(i_isolate()); auto local = handle.Get(v8_isolate()); local->SetAlignedPointerInInternalField(0, &handle); } }, [this]() { YoungGC(); }, SurvivalMode::kDies); } // Test that concurrently processed handles are reprocessed on the main thread. TEST_F(EmbedderRootsHandlerTest, ReprocessConcurrentHandlesOnTheMainThread) { static constexpr size_t kHandlesToConstruct = 1 << 14; if (v8_flags.single_generation) return; ManualGCScope manual_gc(i_isolate()); MainThreadForwardingRootHandler handler; TemporaryEmbedderRootsHandleScope roots_handler_scope(v8_isolate(), &handler); auto i_isolate = reinterpret_cast(v8_isolate()); DisableConservativeStackScanningScopeForTesting no_stack_scanning( i_isolate->heap()); std::vector>> traced_refs; { v8::HandleScope scope(v8_isolate()); for (size_t i = 0; i < kHandlesToConstruct; ++i) { auto gc_invisible_handle = std::make_unique>(); v8::Local object = WrapperHelper::CreateWrapper( v8_isolate()->GetCurrentContext(), nullptr, nullptr); EXPECT_FALSE(object.IsEmpty()); *gc_invisible_handle = v8::TracedReference(v8_isolate(), object); object->SetAlignedPointerInInternalField(0, gc_invisible_handle.get()); EXPECT_FALSE(gc_invisible_handle->IsEmpty()); ASSERT_TRUE( IsNewObjectInCorrectGeneration(v8_isolate(), *gc_invisible_handle)); traced_refs.push_back(std::move(gc_invisible_handle)); } } YoungGC(); EXPECT_TRUE(handler.unprocessed().empty()); } } // namespace v8::internal