diff --git a/impeller/renderer/backend/vulkan/command_buffer_vk.cc b/impeller/renderer/backend/vulkan/command_buffer_vk.cc index 780b423b54a41..d5186c184263c 100644 --- a/impeller/renderer/backend/vulkan/command_buffer_vk.cc +++ b/impeller/renderer/backend/vulkan/command_buffer_vk.cc @@ -155,13 +155,14 @@ bool CommandBufferVK::Track(const std::shared_ptr& texture) { fml::StatusOr CommandBufferVK::AllocateDescriptorSets( const vk::DescriptorSetLayout& layout, + uint64_t pipeline_key, const ContextVK& context) { if (!IsValid()) { return fml::Status(fml::StatusCode::kUnknown, "command encoder invalid"); } - return tracked_objects_->GetDescriptorPool().AllocateDescriptorSets(layout, - context); + return tracked_objects_->GetDescriptorPool().AllocateDescriptorSets( + layout, pipeline_key, context); } void CommandBufferVK::PushDebugGroup(std::string_view label) const { diff --git a/impeller/renderer/backend/vulkan/command_buffer_vk.h b/impeller/renderer/backend/vulkan/command_buffer_vk.h index 47ded4867d47a..85120c19211d3 100644 --- a/impeller/renderer/backend/vulkan/command_buffer_vk.h +++ b/impeller/renderer/backend/vulkan/command_buffer_vk.h @@ -74,6 +74,7 @@ class CommandBufferVK final /// @brief Allocate a new descriptor set for the given [layout]. fml::StatusOr AllocateDescriptorSets( const vk::DescriptorSetLayout& layout, + uint64_t pipeline_key, const ContextVK& context); // Visible for testing. diff --git a/impeller/renderer/backend/vulkan/compute_pass_vk.cc b/impeller/renderer/backend/vulkan/compute_pass_vk.cc index f4b61cdba643f..d5b6345d9c599 100644 --- a/impeller/renderer/backend/vulkan/compute_pass_vk.cc +++ b/impeller/renderer/backend/vulkan/compute_pass_vk.cc @@ -58,7 +58,8 @@ void ComputePassVK::SetPipeline( pipeline_layout_ = pipeline_vk.GetPipelineLayout(); auto descriptor_result = command_buffer_->AllocateDescriptorSets( - pipeline_vk.GetDescriptorSetLayout(), ContextVK::Cast(*context_)); + pipeline_vk.GetDescriptorSetLayout(), pipeline_vk.GetPipelineKey(), + ContextVK::Cast(*context_)); if (!descriptor_result.ok()) { return; } diff --git a/impeller/renderer/backend/vulkan/compute_pipeline_vk.cc b/impeller/renderer/backend/vulkan/compute_pipeline_vk.cc index a6806561873d8..6b647012d33a4 100644 --- a/impeller/renderer/backend/vulkan/compute_pipeline_vk.cc +++ b/impeller/renderer/backend/vulkan/compute_pipeline_vk.cc @@ -12,12 +12,14 @@ ComputePipelineVK::ComputePipelineVK( const ComputePipelineDescriptor& desc, vk::UniquePipeline pipeline, vk::UniquePipelineLayout layout, - vk::UniqueDescriptorSetLayout descriptor_set_layout) + vk::UniqueDescriptorSetLayout descriptor_set_layout, + uint64_t pipeline_key) : Pipeline(std::move(library), desc), device_holder_(std::move(device_holder)), pipeline_(std::move(pipeline)), layout_(std::move(layout)), - descriptor_set_layout_(std::move(descriptor_set_layout)) { + descriptor_set_layout_(std::move(descriptor_set_layout)), + pipeline_key_(pipeline_key) { is_valid_ = pipeline_ && layout_ && descriptor_set_layout_; } @@ -51,4 +53,8 @@ const vk::DescriptorSetLayout& ComputePipelineVK::GetDescriptorSetLayout() return *descriptor_set_layout_; } +uint64_t ComputePipelineVK::GetPipelineKey() const { + return pipeline_key_; +} + } // namespace impeller diff --git a/impeller/renderer/backend/vulkan/compute_pipeline_vk.h b/impeller/renderer/backend/vulkan/compute_pipeline_vk.h index 1516d0eeb9fbf..a86cc601aae90 100644 --- a/impeller/renderer/backend/vulkan/compute_pipeline_vk.h +++ b/impeller/renderer/backend/vulkan/compute_pipeline_vk.h @@ -24,7 +24,8 @@ class ComputePipelineVK final const ComputePipelineDescriptor& desc, vk::UniquePipeline pipeline, vk::UniquePipelineLayout layout, - vk::UniqueDescriptorSetLayout descriptor_set_layout); + vk::UniqueDescriptorSetLayout descriptor_set_layout, + uint64_t pipeline_key); // |Pipeline| ~ComputePipelineVK() override; @@ -35,6 +36,8 @@ class ComputePipelineVK final const vk::DescriptorSetLayout& GetDescriptorSetLayout() const; + uint64_t GetPipelineKey() const; + private: friend class PipelineLibraryVK; @@ -42,6 +45,7 @@ class ComputePipelineVK final vk::UniquePipeline pipeline_; vk::UniquePipelineLayout layout_; vk::UniqueDescriptorSetLayout descriptor_set_layout_; + uint64_t pipeline_key_; bool is_valid_ = false; // |Pipeline| diff --git a/impeller/renderer/backend/vulkan/context_vk.cc b/impeller/renderer/backend/vulkan/context_vk.cc index c72f944f48fb6..3e3cec9113e01 100644 --- a/impeller/renderer/backend/vulkan/context_vk.cc +++ b/impeller/renderer/backend/vulkan/context_vk.cc @@ -537,9 +537,8 @@ std::shared_ptr ContextVK::CreateCommandBuffer() const { DescriptorPoolMap::iterator current_pool = cached_descriptor_pool_.find(std::this_thread::get_id()); if (current_pool == cached_descriptor_pool_.end()) { - descriptor_pool = - (cached_descriptor_pool_[std::this_thread::get_id()] = - std::make_shared(weak_from_this())); + descriptor_pool = (cached_descriptor_pool_[std::this_thread::get_id()] = + descriptor_pool_recycler_->GetDescriptorPool()); } else { descriptor_pool = current_pool->second; } diff --git a/impeller/renderer/backend/vulkan/descriptor_pool_vk.cc b/impeller/renderer/backend/vulkan/descriptor_pool_vk.cc index c7545ea9a156d..77c5cbfd1bc7c 100644 --- a/impeller/renderer/backend/vulkan/descriptor_pool_vk.cc +++ b/impeller/renderer/backend/vulkan/descriptor_pool_vk.cc @@ -4,10 +4,7 @@ #include "impeller/renderer/backend/vulkan/descriptor_pool_vk.h" -#include - #include "impeller/base/validation.h" -#include "impeller/renderer/backend/vulkan/resource_manager_vk.h" #include "vulkan/vulkan_enums.hpp" #include "vulkan/vulkan_handles.hpp" @@ -29,44 +26,20 @@ static const constexpr DescriptorPoolSize kDefaultBindingSize = .subpass_bindings = 4u // Subpass Bindings }; -// Holds the command pool in a background thread, recyling it when not in use. -class BackgroundDescriptorPoolVK final { - public: - BackgroundDescriptorPoolVK(BackgroundDescriptorPoolVK&&) = default; - - explicit BackgroundDescriptorPoolVK( - vk::UniqueDescriptorPool&& pool, - std::weak_ptr recycler) - : pool_(std::move(pool)), recycler_(std::move(recycler)) {} - - ~BackgroundDescriptorPoolVK() { - auto const recycler = recycler_.lock(); - - // Not only does this prevent recycling when the context is being destroyed, - // but it also prevents the destructor from effectively being called twice; - // once for the original BackgroundCommandPoolVK() and once for the moved - // BackgroundCommandPoolVK(). - if (!recycler) { - return; - } - - recycler->Reclaim(std::move(pool_)); - } - - private: - BackgroundDescriptorPoolVK(const BackgroundDescriptorPoolVK&) = delete; - - BackgroundDescriptorPoolVK& operator=(const BackgroundDescriptorPoolVK&) = - delete; - - vk::UniqueDescriptorPool pool_; - uint32_t allocated_capacity_; - std::weak_ptr recycler_; -}; - DescriptorPoolVK::DescriptorPoolVK(std::weak_ptr context) : context_(std::move(context)) {} +void DescriptorPoolVK::Destroy() { + pools_.clear(); +} + +DescriptorPoolVK::DescriptorPoolVK(std::weak_ptr context, + DescriptorCacheMap descriptor_sets, + std::vector pools) + : context_(std::move(context)), + descriptor_sets_(std::move(descriptor_sets)), + pools_(std::move(pools)) {} + DescriptorPoolVK::~DescriptorPoolVK() { if (pools_.empty()) { return; @@ -81,19 +54,21 @@ DescriptorPoolVK::~DescriptorPoolVK() { return; } - for (auto i = 0u; i < pools_.size(); i++) { - auto reset_pool_when_dropped = - BackgroundDescriptorPoolVK(std::move(pools_[i]), recycler); - - UniqueResourceVKT pool( - context->GetResourceManager(), std::move(reset_pool_when_dropped)); - } - pools_.clear(); + recycler->Reclaim(std::move(descriptor_sets_), std::move(pools_)); } fml::StatusOr DescriptorPoolVK::AllocateDescriptorSets( const vk::DescriptorSetLayout& layout, + uint64_t pipeline_key, const ContextVK& context_vk) { + auto existing = descriptor_sets_.find(pipeline_key); + if (existing != descriptor_sets_.end() && !existing->second.unused.empty()) { + auto descriptor_set = existing->second.unused.back(); + existing->second.unused.pop_back(); + existing->second.used.push_back(descriptor_set); + return descriptor_set; + } + if (pools_.empty()) { CreateNewPool(context_vk); } @@ -111,6 +86,10 @@ fml::StatusOr DescriptorPoolVK::AllocateDescriptorSets( set_info.setDescriptorPool(pools_.back().get()); result = context_vk.GetDevice().allocateDescriptorSets(&set_info, &set); } + if (existing == descriptor_sets_.end()) { + descriptor_sets_[pipeline_key] = DescriptorCache{}; + } + descriptor_sets_[pipeline_key].used.push_back(set); if (result != vk::Result::eSuccess) { VALIDATION_LOG << "Could not allocate descriptor sets: " @@ -130,30 +109,47 @@ fml::Status DescriptorPoolVK::CreateNewPool(const ContextVK& context_vk) { return fml::Status(); } -void DescriptorPoolRecyclerVK::Reclaim(vk::UniqueDescriptorPool&& pool) { +void DescriptorPoolRecyclerVK::Reclaim( + DescriptorCacheMap descriptor_sets, + std::vector pools) { // Reset the pool on a background thread. auto strong_context = context_.lock(); if (!strong_context) { return; } - auto device = strong_context->GetDevice(); - device.resetDescriptorPool(pool.get()); + for (auto& [_, cache] : descriptor_sets) { + cache.unused.insert(cache.unused.end(), cache.used.begin(), + cache.used.end()); + cache.used.clear(); + } - // Move the pool to the recycled list. + // Move the pool to the recycled list. If more than 32 pool are + // cached then delete the oldest entry. Lock recycled_lock(recycled_mutex_); + if (recycled_.size() >= kMaxRecycledPools) { + auto& front = recycled_.front(); + front->Destroy(); + recycled_.erase(recycled_.begin()); + } + recycled_.push_back(std::make_shared( + context_, std::move(descriptor_sets), std::move(pools))); +} - if (recycled_.size() < kMaxRecycledPools) { - recycled_.push_back(std::move(pool)); - return; +std::shared_ptr +DescriptorPoolRecyclerVK::GetDescriptorPool() { + { + Lock recycled_lock(recycled_mutex_); + if (!recycled_.empty()) { + auto result = recycled_.back(); + recycled_.pop_back(); + return result; + } } + return std::make_shared(context_); } vk::UniqueDescriptorPool DescriptorPoolRecyclerVK::Get() { // Recycle a pool with a matching minumum capcity if it is available. - auto recycled_pool = Reuse(); - if (recycled_pool.has_value()) { - return std::move(recycled_pool.value()); - } return Create(); } @@ -187,15 +183,4 @@ vk::UniqueDescriptorPool DescriptorPoolRecyclerVK::Create() { return std::move(pool); } -std::optional DescriptorPoolRecyclerVK::Reuse() { - Lock lock(recycled_mutex_); - if (recycled_.empty()) { - return std::nullopt; - } - - auto recycled = std::move(recycled_[recycled_.size() - 1]); - recycled_.pop_back(); - return recycled; -} - } // namespace impeller diff --git a/impeller/renderer/backend/vulkan/descriptor_pool_vk.h b/impeller/renderer/backend/vulkan/descriptor_pool_vk.h index 53f35a94ab890..defa1846cd8ee 100644 --- a/impeller/renderer/backend/vulkan/descriptor_pool_vk.h +++ b/impeller/renderer/backend/vulkan/descriptor_pool_vk.h @@ -6,6 +6,7 @@ #define FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_DESCRIPTOR_POOL_VK_H_ #include +#include #include "fml/status_or.h" #include "impeller/renderer/backend/vulkan/context_vk.h" @@ -13,6 +14,13 @@ namespace impeller { +struct DescriptorCache { + std::vector unused; + std::vector used; +}; + +using DescriptorCacheMap = std::unordered_map; + //------------------------------------------------------------------------------ /// @brief A per-frame descriptor pool. Descriptors /// from this pool don't need to be freed individually. Instead, the @@ -28,18 +36,28 @@ class DescriptorPoolVK { public: explicit DescriptorPoolVK(std::weak_ptr context); + DescriptorPoolVK(std::weak_ptr context, + DescriptorCacheMap descriptor_sets, + std::vector pools); + ~DescriptorPoolVK(); fml::StatusOr AllocateDescriptorSets( const vk::DescriptorSetLayout& layout, + uint64_t pipeline_key, const ContextVK& context_vk); private: + friend class DescriptorPoolRecyclerVK; + std::weak_ptr context_; + DescriptorCacheMap descriptor_sets_; std::vector pools_; fml::Status CreateNewPool(const ContextVK& context_vk); + void Destroy(); + DescriptorPoolVK(const DescriptorPoolVK&) = delete; DescriptorPoolVK& operator=(const DescriptorPoolVK&) = delete; @@ -68,17 +86,16 @@ class DescriptorPoolRecyclerVK final /// the necessary capacity. vk::UniqueDescriptorPool Get(); - /// @brief Returns the descriptor pool to be reset on a background - /// thread. - /// - /// @param[in] pool The pool to recycler. - void Reclaim(vk::UniqueDescriptorPool&& pool); + std::shared_ptr GetDescriptorPool(); + + void Reclaim(DescriptorCacheMap descriptor_sets, + std::vector pools); private: std::weak_ptr context_; Mutex recycled_mutex_; - std::vector recycled_ IPLR_GUARDED_BY( + std::vector> recycled_ IPLR_GUARDED_BY( recycled_mutex_); /// @brief Creates a new |vk::CommandPool|. @@ -86,11 +103,6 @@ class DescriptorPoolRecyclerVK final /// @returns Returns a |std::nullopt| if a pool could not be created. vk::UniqueDescriptorPool Create(); - /// @brief Reuses a recycled |vk::CommandPool|, if available. - /// - /// @returns Returns a |std::nullopt| if a pool was not available. - std::optional Reuse(); - DescriptorPoolRecyclerVK(const DescriptorPoolRecyclerVK&) = delete; DescriptorPoolRecyclerVK& operator=(const DescriptorPoolRecyclerVK&) = delete; diff --git a/impeller/renderer/backend/vulkan/descriptor_pool_vk_unittests.cc b/impeller/renderer/backend/vulkan/descriptor_pool_vk_unittests.cc index fea47418dcdac..2978b09a831aa 100644 --- a/impeller/renderer/backend/vulkan/descriptor_pool_vk_unittests.cc +++ b/impeller/renderer/backend/vulkan/descriptor_pool_vk_unittests.cc @@ -3,11 +3,8 @@ // found in the LICENSE file. #include "flutter/testing/testing.h" // IWYU pragma: keep. -#include "fml/closure.h" -#include "fml/synchronization/waitable_event.h" #include "impeller/renderer/backend/vulkan/command_buffer_vk.h" #include "impeller/renderer/backend/vulkan/descriptor_pool_vk.h" -#include "impeller/renderer/backend/vulkan/resource_manager_vk.h" #include "impeller/renderer/backend/vulkan/test/mock_vulkan.h" namespace impeller { @@ -31,29 +28,10 @@ TEST(DescriptorPoolRecyclerVKTest, ReclaimMakesDescriptorPoolAvailable) { { // Fetch a pool (which will be created). auto pool = DescriptorPoolVK(context); - pool.AllocateDescriptorSets({}, *context); + pool.AllocateDescriptorSets({}, 0, *context); } - // There is a chance that the first death rattle item below is destroyed in - // the same reclaim cycle as the pool allocation above. These items are placed - // into a std::vector and free'd, which may free in reverse order. That would - // imply that the death rattle and subsequent waitable event fires before the - // pool is reset. To work around this, we can either manually remove items - // from the vector or use two death rattles. - for (auto i = 0u; i < 2; i++) { - // Add something to the resource manager and have it notify us when it's - // destroyed. That should give us a non-flaky signal that the pool has been - // reclaimed as well. - auto waiter = fml::AutoResetWaitableEvent(); - auto rattle = fml::ScopedCleanupClosure([&waiter]() { waiter.Signal(); }); - { - UniqueResourceVKT resource( - context->GetResourceManager(), std::move(rattle)); - } - waiter.Wait(); - } - - auto const pool = context->GetDescriptorPoolRecycler()->Get(); + auto const pool = context->GetDescriptorPoolRecycler()->GetDescriptorPool(); // Now check that we only ever created one pool. auto const called = GetMockVulkanFunctions(context->GetDevice()); @@ -71,50 +49,27 @@ TEST(DescriptorPoolRecyclerVKTest, ReclaimDropsDescriptorPoolIfSizeIsExceeded) { std::vector> pools; for (auto i = 0u; i < 33; i++) { auto pool = std::make_unique(context); - pool->AllocateDescriptorSets({}, *context); + pool->AllocateDescriptorSets({}, 0, *context); pools.push_back(std::move(pool)); } } - // See note above. - for (auto i = 0u; i < 2; i++) { - auto waiter = fml::AutoResetWaitableEvent(); - auto rattle = fml::ScopedCleanupClosure([&waiter]() { waiter.Signal(); }); - { - UniqueResourceVKT resource( - context->GetResourceManager(), std::move(rattle)); - } - waiter.Wait(); - } - auto const called = GetMockVulkanFunctions(context->GetDevice()); EXPECT_EQ( std::count(called->begin(), called->end(), "vkCreateDescriptorPool"), 33u); - EXPECT_EQ(std::count(called->begin(), called->end(), "vkResetDescriptorPool"), - 33u); // Now create 33 more descriptor pools and observe that only one more is // allocated. { - std::vector> pools; + std::vector> pools; for (auto i = 0u; i < 33; i++) { - auto pool = std::make_unique(context); - pool->AllocateDescriptorSets({}, *context); + auto pool = context->GetDescriptorPoolRecycler()->GetDescriptorPool(); + pool->AllocateDescriptorSets({}, 0, *context); pools.push_back(std::move(pool)); } } - for (auto i = 0u; i < 2; i++) { - auto waiter = fml::AutoResetWaitableEvent(); - auto rattle = fml::ScopedCleanupClosure([&waiter]() { waiter.Signal(); }); - { - UniqueResourceVKT resource( - context->GetResourceManager(), std::move(rattle)); - } - waiter.Wait(); - } - auto const called_twice = GetMockVulkanFunctions(context->GetDevice()); // 32 of the descriptor pools were recycled, so only one more is created. EXPECT_EQ( diff --git a/impeller/renderer/backend/vulkan/pipeline_library_vk.cc b/impeller/renderer/backend/vulkan/pipeline_library_vk.cc index daa57a37139c1..7976a105ef03d 100644 --- a/impeller/renderer/backend/vulkan/pipeline_library_vk.cc +++ b/impeller/renderer/backend/vulkan/pipeline_library_vk.cc @@ -4,22 +4,16 @@ #include "impeller/renderer/backend/vulkan/pipeline_library_vk.h" -#include #include -#include -#include #include "flutter/fml/container.h" #include "flutter/fml/trace_event.h" #include "impeller/base/promise.h" -#include "impeller/base/timing.h" #include "impeller/base/validation.h" #include "impeller/renderer/backend/vulkan/context_vk.h" #include "impeller/renderer/backend/vulkan/formats_vk.h" #include "impeller/renderer/backend/vulkan/pipeline_vk.h" #include "impeller/renderer/backend/vulkan/shader_function_vk.h" -#include "impeller/renderer/backend/vulkan/vertex_descriptor_vk.h" -#include "vulkan/vulkan_core.h" #include "vulkan/vulkan_enums.hpp" namespace impeller { @@ -151,7 +145,8 @@ std::unique_ptr PipelineLibraryVK::CreateComputePipeline( desc, // std::move(pipeline), // std::move(pipeline_layout.value), // - std::move(descs_layout) // + std::move(descs_layout), // + pipeline_key_++ // ); } @@ -179,7 +174,8 @@ PipelineFuture PipelineLibraryVK::GetPipeline( auto weak_this = weak_from_this(); - auto generation_task = [descriptor, weak_this, promise]() { + uint64_t next_key = pipeline_key_++; + auto generation_task = [descriptor, weak_this, promise, next_key]() { auto thiz = weak_this.lock(); if (!thiz) { promise->set_value(nullptr); @@ -191,8 +187,8 @@ PipelineFuture PipelineLibraryVK::GetPipeline( promise->set_value(PipelineVK::Create( descriptor, // PipelineLibraryVK::Cast(*thiz).device_holder_.lock(), // - weak_this // - )); + weak_this, // + next_key)); }; if (async) { diff --git a/impeller/renderer/backend/vulkan/pipeline_library_vk.h b/impeller/renderer/backend/vulkan/pipeline_library_vk.h index 4396cdcab867e..da5ff5169873f 100644 --- a/impeller/renderer/backend/vulkan/pipeline_library_vk.h +++ b/impeller/renderer/backend/vulkan/pipeline_library_vk.h @@ -46,6 +46,8 @@ class PipelineLibraryVK final ComputePipelineMap compute_pipelines_ IPLR_GUARDED_BY( compute_pipelines_mutex_); std::atomic_size_t frames_acquired_ = 0u; + uint64_t pipeline_key_ = 0; + bool is_valid_ = false; bool cache_dirty_ = false; diff --git a/impeller/renderer/backend/vulkan/pipeline_vk.cc b/impeller/renderer/backend/vulkan/pipeline_vk.cc index 4ad838498244a..eb2b24360a65a 100644 --- a/impeller/renderer/backend/vulkan/pipeline_vk.cc +++ b/impeller/renderer/backend/vulkan/pipeline_vk.cc @@ -464,6 +464,7 @@ std::unique_ptr PipelineVK::Create( const PipelineDescriptor& desc, const std::shared_ptr& device_holder, const std::weak_ptr& weak_library, + uint64_t pipeline_key, std::shared_ptr immutable_sampler) { TRACE_EVENT1("flutter", "PipelineVK::Create", "Name", desc.GetLabel().data()); @@ -509,7 +510,8 @@ std::unique_ptr PipelineVK::Create( std::move(render_pass), // std::move(pipeline_layout.value()), // std::move(descs_layout.value()), // - std::move(immutable_sampler) // + pipeline_key, + std::move(immutable_sampler) // )); if (!pipeline_vk->IsValid()) { VALIDATION_LOG << "Could not create a valid pipeline."; @@ -525,6 +527,7 @@ PipelineVK::PipelineVK(std::weak_ptr device_holder, vk::UniqueRenderPass render_pass, vk::UniquePipelineLayout layout, vk::UniqueDescriptorSetLayout descriptor_set_layout, + uint64_t pipeline_key, std::shared_ptr immutable_sampler) : Pipeline(std::move(library), desc), device_holder_(std::move(device_holder)), @@ -532,7 +535,8 @@ PipelineVK::PipelineVK(std::weak_ptr device_holder, render_pass_(std::move(render_pass)), layout_(std::move(layout)), descriptor_set_layout_(std::move(descriptor_set_layout)), - immutable_sampler_(std::move(immutable_sampler)) { + immutable_sampler_(std::move(immutable_sampler)), + pipeline_key_(pipeline_key) { is_valid_ = pipeline_ && render_pass_ && layout_ && descriptor_set_layout_; } @@ -577,7 +581,8 @@ std::shared_ptr PipelineVK::CreateVariantForImmutableSamplers( return nullptr; } return (immutable_sampler_variants_[cache_key] = - Create(desc_, device_holder, library_, immutable_sampler)); + Create(desc_, device_holder, library_, pipeline_key_, + immutable_sampler)); } } // namespace impeller diff --git a/impeller/renderer/backend/vulkan/pipeline_vk.h b/impeller/renderer/backend/vulkan/pipeline_vk.h index 5ab4feccfde89..877d2d01c0a9d 100644 --- a/impeller/renderer/backend/vulkan/pipeline_vk.h +++ b/impeller/renderer/backend/vulkan/pipeline_vk.h @@ -33,6 +33,7 @@ class PipelineVK final const PipelineDescriptor& desc, const std::shared_ptr& device_holder, const std::weak_ptr& weak_library, + uint64_t pipeline_key, std::shared_ptr immutable_sampler = {}); // |Pipeline| @@ -47,6 +48,8 @@ class PipelineVK final std::shared_ptr CreateVariantForImmutableSamplers( const std::shared_ptr& immutable_sampler) const; + uint64_t GetPipelineKey() const { return pipeline_key_; } + private: friend class PipelineLibraryVK; @@ -62,6 +65,7 @@ class PipelineVK final vk::UniquePipelineLayout layout_; vk::UniqueDescriptorSetLayout descriptor_set_layout_; std::shared_ptr immutable_sampler_; + uint64_t pipeline_key_; mutable Mutex immutable_sampler_variants_mutex_; mutable ImmutableSamplerVariants immutable_sampler_variants_ IPLR_GUARDED_BY( immutable_sampler_variants_mutex_); @@ -74,6 +78,7 @@ class PipelineVK final vk::UniqueRenderPass render_pass, vk::UniquePipelineLayout layout, vk::UniqueDescriptorSetLayout descriptor_set_layout, + uint64_t pipeline_key, std::shared_ptr immutable_sampler); // |Pipeline| diff --git a/impeller/renderer/backend/vulkan/render_pass_vk.cc b/impeller/renderer/backend/vulkan/render_pass_vk.cc index f8a169adfb670..ed785803f4fd5 100644 --- a/impeller/renderer/backend/vulkan/render_pass_vk.cc +++ b/impeller/renderer/backend/vulkan/render_pass_vk.cc @@ -479,7 +479,8 @@ fml::Status RenderPassVK::Draw() { const auto& pipeline_vk = PipelineVK::Cast(*pipeline_); auto descriptor_result = command_buffer_->AllocateDescriptorSets( - pipeline_vk.GetDescriptorSetLayout(), context_vk); + pipeline_vk.GetDescriptorSetLayout(), pipeline_vk.GetPipelineKey(), + context_vk); if (!descriptor_result.ok()) { return fml::Status(fml::StatusCode::kAborted, "Could not allocate descriptor sets.");