Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Impeller] cache descriptor set layouts. #57103

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions impeller/renderer/backend/vulkan/command_buffer_vk.cc
Original file line number Diff line number Diff line change
Expand Up @@ -155,13 +155,14 @@ bool CommandBufferVK::Track(const std::shared_ptr<const Texture>& texture) {

fml::StatusOr<vk::DescriptorSet> CommandBufferVK::AllocateDescriptorSets(
const vk::DescriptorSetLayout& layout,
uint64_t pipeline_key,
const ContextVK& context) {
if (!IsValid()) {
return fml::Status(fml::StatusCode::kUnknown, "command encoder invalid");
}

return tracked_objects_->GetDescriptorPool().AllocateDescriptorSets(layout,
context);
return tracked_objects_->GetDescriptorPool().AllocateDescriptorSets(
layout, pipeline_key, context);
}

void CommandBufferVK::PushDebugGroup(std::string_view label) const {
Expand Down
1 change: 1 addition & 0 deletions impeller/renderer/backend/vulkan/command_buffer_vk.h
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ class CommandBufferVK final
/// @brief Allocate a new descriptor set for the given [layout].
fml::StatusOr<vk::DescriptorSet> AllocateDescriptorSets(
const vk::DescriptorSetLayout& layout,
uint64_t pipeline_key,
const ContextVK& context);

// Visible for testing.
Expand Down
3 changes: 2 additions & 1 deletion impeller/renderer/backend/vulkan/compute_pass_vk.cc
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,8 @@ void ComputePassVK::SetPipeline(
pipeline_layout_ = pipeline_vk.GetPipelineLayout();

auto descriptor_result = command_buffer_->AllocateDescriptorSets(
pipeline_vk.GetDescriptorSetLayout(), ContextVK::Cast(*context_));
pipeline_vk.GetDescriptorSetLayout(), pipeline_vk.GetPipelineKey(),
ContextVK::Cast(*context_));
if (!descriptor_result.ok()) {
return;
}
Expand Down
10 changes: 8 additions & 2 deletions impeller/renderer/backend/vulkan/compute_pipeline_vk.cc
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,14 @@ ComputePipelineVK::ComputePipelineVK(
const ComputePipelineDescriptor& desc,
vk::UniquePipeline pipeline,
vk::UniquePipelineLayout layout,
vk::UniqueDescriptorSetLayout descriptor_set_layout)
vk::UniqueDescriptorSetLayout descriptor_set_layout,
uint64_t pipeline_key)
: Pipeline(std::move(library), desc),
device_holder_(std::move(device_holder)),
pipeline_(std::move(pipeline)),
layout_(std::move(layout)),
descriptor_set_layout_(std::move(descriptor_set_layout)) {
descriptor_set_layout_(std::move(descriptor_set_layout)),
pipeline_key_(pipeline_key) {
is_valid_ = pipeline_ && layout_ && descriptor_set_layout_;
}

Expand Down Expand Up @@ -51,4 +53,8 @@ const vk::DescriptorSetLayout& ComputePipelineVK::GetDescriptorSetLayout()
return *descriptor_set_layout_;
}

uint64_t ComputePipelineVK::GetPipelineKey() const {
return pipeline_key_;
}

} // namespace impeller
6 changes: 5 additions & 1 deletion impeller/renderer/backend/vulkan/compute_pipeline_vk.h
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ class ComputePipelineVK final
const ComputePipelineDescriptor& desc,
vk::UniquePipeline pipeline,
vk::UniquePipelineLayout layout,
vk::UniqueDescriptorSetLayout descriptor_set_layout);
vk::UniqueDescriptorSetLayout descriptor_set_layout,
uint64_t pipeline_key);

// |Pipeline|
~ComputePipelineVK() override;
Expand All @@ -35,13 +36,16 @@ class ComputePipelineVK final

const vk::DescriptorSetLayout& GetDescriptorSetLayout() const;

uint64_t GetPipelineKey() const;

private:
friend class PipelineLibraryVK;

std::weak_ptr<DeviceHolderVK> device_holder_;
vk::UniquePipeline pipeline_;
vk::UniquePipelineLayout layout_;
vk::UniqueDescriptorSetLayout descriptor_set_layout_;
uint64_t pipeline_key_;
bool is_valid_ = false;

// |Pipeline|
Expand Down
5 changes: 2 additions & 3 deletions impeller/renderer/backend/vulkan/context_vk.cc
Original file line number Diff line number Diff line change
Expand Up @@ -537,9 +537,8 @@ std::shared_ptr<CommandBuffer> ContextVK::CreateCommandBuffer() const {
DescriptorPoolMap::iterator current_pool =
cached_descriptor_pool_.find(std::this_thread::get_id());
if (current_pool == cached_descriptor_pool_.end()) {
descriptor_pool =
(cached_descriptor_pool_[std::this_thread::get_id()] =
std::make_shared<DescriptorPoolVK>(weak_from_this()));
descriptor_pool = (cached_descriptor_pool_[std::this_thread::get_id()] =
descriptor_pool_recycler_->GetDescriptorPool());
} else {
descriptor_pool = current_pool->second;
}
Expand Down
121 changes: 53 additions & 68 deletions impeller/renderer/backend/vulkan/descriptor_pool_vk.cc
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,7 @@

#include "impeller/renderer/backend/vulkan/descriptor_pool_vk.h"

#include <optional>

#include "impeller/base/validation.h"
#include "impeller/renderer/backend/vulkan/resource_manager_vk.h"
#include "vulkan/vulkan_enums.hpp"
#include "vulkan/vulkan_handles.hpp"

Expand All @@ -29,44 +26,20 @@ static const constexpr DescriptorPoolSize kDefaultBindingSize =
.subpass_bindings = 4u // Subpass Bindings
};

// Holds the command pool in a background thread, recyling it when not in use.
class BackgroundDescriptorPoolVK final {
public:
BackgroundDescriptorPoolVK(BackgroundDescriptorPoolVK&&) = default;

explicit BackgroundDescriptorPoolVK(
vk::UniqueDescriptorPool&& pool,
std::weak_ptr<DescriptorPoolRecyclerVK> recycler)
: pool_(std::move(pool)), recycler_(std::move(recycler)) {}

~BackgroundDescriptorPoolVK() {
auto const recycler = recycler_.lock();

// Not only does this prevent recycling when the context is being destroyed,
// but it also prevents the destructor from effectively being called twice;
// once for the original BackgroundCommandPoolVK() and once for the moved
// BackgroundCommandPoolVK().
if (!recycler) {
return;
}

recycler->Reclaim(std::move(pool_));
}

private:
BackgroundDescriptorPoolVK(const BackgroundDescriptorPoolVK&) = delete;

BackgroundDescriptorPoolVK& operator=(const BackgroundDescriptorPoolVK&) =
delete;

vk::UniqueDescriptorPool pool_;
uint32_t allocated_capacity_;
std::weak_ptr<DescriptorPoolRecyclerVK> recycler_;
};

DescriptorPoolVK::DescriptorPoolVK(std::weak_ptr<const ContextVK> context)
: context_(std::move(context)) {}

void DescriptorPoolVK::Destroy() {
pools_.clear();
}

DescriptorPoolVK::DescriptorPoolVK(std::weak_ptr<const ContextVK> context,
DescriptorCacheMap descriptor_sets,
std::vector<vk::UniqueDescriptorPool> pools)
: context_(std::move(context)),
descriptor_sets_(std::move(descriptor_sets)),
pools_(std::move(pools)) {}

DescriptorPoolVK::~DescriptorPoolVK() {
if (pools_.empty()) {
return;
Expand All @@ -81,19 +54,21 @@ DescriptorPoolVK::~DescriptorPoolVK() {
return;
}

for (auto i = 0u; i < pools_.size(); i++) {
auto reset_pool_when_dropped =
BackgroundDescriptorPoolVK(std::move(pools_[i]), recycler);

UniqueResourceVKT<BackgroundDescriptorPoolVK> pool(
context->GetResourceManager(), std::move(reset_pool_when_dropped));
}
pools_.clear();
recycler->Reclaim(std::move(descriptor_sets_), std::move(pools_));
}

fml::StatusOr<vk::DescriptorSet> DescriptorPoolVK::AllocateDescriptorSets(
const vk::DescriptorSetLayout& layout,
uint64_t pipeline_key,
const ContextVK& context_vk) {
auto existing = descriptor_sets_.find(pipeline_key);
if (existing != descriptor_sets_.end() && !existing->second.unused.empty()) {
auto descriptor_set = existing->second.unused.back();
existing->second.unused.pop_back();
existing->second.used.push_back(descriptor_set);
return descriptor_set;
}

if (pools_.empty()) {
CreateNewPool(context_vk);
}
Expand All @@ -111,6 +86,10 @@ fml::StatusOr<vk::DescriptorSet> DescriptorPoolVK::AllocateDescriptorSets(
set_info.setDescriptorPool(pools_.back().get());
result = context_vk.GetDevice().allocateDescriptorSets(&set_info, &set);
}
if (existing == descriptor_sets_.end()) {
descriptor_sets_[pipeline_key] = DescriptorCache{};
}
descriptor_sets_[pipeline_key].used.push_back(set);

if (result != vk::Result::eSuccess) {
VALIDATION_LOG << "Could not allocate descriptor sets: "
Expand All @@ -130,30 +109,47 @@ fml::Status DescriptorPoolVK::CreateNewPool(const ContextVK& context_vk) {
return fml::Status();
}

void DescriptorPoolRecyclerVK::Reclaim(vk::UniqueDescriptorPool&& pool) {
void DescriptorPoolRecyclerVK::Reclaim(
DescriptorCacheMap descriptor_sets,
std::vector<vk::UniqueDescriptorPool> pools) {
// Reset the pool on a background thread.
auto strong_context = context_.lock();
if (!strong_context) {
return;
}
auto device = strong_context->GetDevice();
device.resetDescriptorPool(pool.get());
for (auto& [_, cache] : descriptor_sets) {
cache.unused.insert(cache.unused.end(), cache.used.begin(),
cache.used.end());
cache.used.clear();
}

// Move the pool to the recycled list.
// Move the pool to the recycled list. If more than 32 pool are
// cached then delete the oldest entry.
Lock recycled_lock(recycled_mutex_);
if (recycled_.size() >= kMaxRecycledPools) {
auto& front = recycled_.front();
front->Destroy();
recycled_.erase(recycled_.begin());
}
recycled_.push_back(std::make_shared<DescriptorPoolVK>(
context_, std::move(descriptor_sets), std::move(pools)));
}

if (recycled_.size() < kMaxRecycledPools) {
recycled_.push_back(std::move(pool));
return;
std::shared_ptr<DescriptorPoolVK>
DescriptorPoolRecyclerVK::GetDescriptorPool() {
{
Lock recycled_lock(recycled_mutex_);
if (!recycled_.empty()) {
auto result = recycled_.back();
recycled_.pop_back();
return result;
}
}
return std::make_shared<DescriptorPoolVK>(context_);
}

vk::UniqueDescriptorPool DescriptorPoolRecyclerVK::Get() {
// Recycle a pool with a matching minumum capcity if it is available.
auto recycled_pool = Reuse();
if (recycled_pool.has_value()) {
return std::move(recycled_pool.value());
}
return Create();
}

Expand Down Expand Up @@ -187,15 +183,4 @@ vk::UniqueDescriptorPool DescriptorPoolRecyclerVK::Create() {
return std::move(pool);
}

std::optional<vk::UniqueDescriptorPool> DescriptorPoolRecyclerVK::Reuse() {
Lock lock(recycled_mutex_);
if (recycled_.empty()) {
return std::nullopt;
}

auto recycled = std::move(recycled_[recycled_.size() - 1]);
recycled_.pop_back();
return recycled;
}

} // namespace impeller
34 changes: 23 additions & 11 deletions impeller/renderer/backend/vulkan/descriptor_pool_vk.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,21 @@
#define FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_DESCRIPTOR_POOL_VK_H_

#include <cstdint>
#include <unordered_map>

#include "fml/status_or.h"
#include "impeller/renderer/backend/vulkan/context_vk.h"
#include "vulkan/vulkan_handles.hpp"

namespace impeller {

struct DescriptorCache {
std::vector<vk::DescriptorSet> unused;
std::vector<vk::DescriptorSet> used;
};

using DescriptorCacheMap = std::unordered_map<uint64_t, DescriptorCache>;

//------------------------------------------------------------------------------
/// @brief A per-frame descriptor pool. Descriptors
/// from this pool don't need to be freed individually. Instead, the
Expand All @@ -28,18 +36,28 @@ class DescriptorPoolVK {
public:
explicit DescriptorPoolVK(std::weak_ptr<const ContextVK> context);

DescriptorPoolVK(std::weak_ptr<const ContextVK> context,
DescriptorCacheMap descriptor_sets,
std::vector<vk::UniqueDescriptorPool> pools);

~DescriptorPoolVK();

fml::StatusOr<vk::DescriptorSet> AllocateDescriptorSets(
const vk::DescriptorSetLayout& layout,
uint64_t pipeline_key,
const ContextVK& context_vk);

private:
friend class DescriptorPoolRecyclerVK;

std::weak_ptr<const ContextVK> context_;
DescriptorCacheMap descriptor_sets_;
std::vector<vk::UniqueDescriptorPool> pools_;

fml::Status CreateNewPool(const ContextVK& context_vk);

void Destroy();

DescriptorPoolVK(const DescriptorPoolVK&) = delete;

DescriptorPoolVK& operator=(const DescriptorPoolVK&) = delete;
Expand Down Expand Up @@ -68,29 +86,23 @@ class DescriptorPoolRecyclerVK final
/// the necessary capacity.
vk::UniqueDescriptorPool Get();

/// @brief Returns the descriptor pool to be reset on a background
/// thread.
///
/// @param[in] pool The pool to recycler.
void Reclaim(vk::UniqueDescriptorPool&& pool);
std::shared_ptr<DescriptorPoolVK> GetDescriptorPool();

void Reclaim(DescriptorCacheMap descriptor_sets,
std::vector<vk::UniqueDescriptorPool> pools);

private:
std::weak_ptr<ContextVK> context_;

Mutex recycled_mutex_;
std::vector<vk::UniqueDescriptorPool> recycled_ IPLR_GUARDED_BY(
std::vector<std::shared_ptr<DescriptorPoolVK>> recycled_ IPLR_GUARDED_BY(
recycled_mutex_);

/// @brief Creates a new |vk::CommandPool|.
///
/// @returns Returns a |std::nullopt| if a pool could not be created.
vk::UniqueDescriptorPool Create();

/// @brief Reuses a recycled |vk::CommandPool|, if available.
///
/// @returns Returns a |std::nullopt| if a pool was not available.
std::optional<vk::UniqueDescriptorPool> Reuse();

DescriptorPoolRecyclerVK(const DescriptorPoolRecyclerVK&) = delete;

DescriptorPoolRecyclerVK& operator=(const DescriptorPoolRecyclerVK&) = delete;
Expand Down
Loading
Loading