vulkan: Add memory pressure handling and pipeline eviction

Signed-off-by: Zephyron <zephyron@citron-emu.org>
This commit is contained in:
Zephyron
2025-12-16 19:56:06 +10:00
parent 180606a166
commit b1192de0c4
8 changed files with 145 additions and 2 deletions

View File

@@ -426,6 +426,59 @@ PipelineCache::~PipelineCache() {
}
}
void PipelineCache::EvictOldPipelines() {
constexpr u64 FRAMES_TO_KEEP = 2000;
const u64 current_frame = scheduler.CurrentTick();
if (current_frame - last_memory_pressure_frame < MEMORY_PRESSURE_COOLDOWN) {
return;
}
last_memory_pressure_frame = current_frame;
const u64 evict_before_frame = current_frame > FRAMES_TO_KEEP ? current_frame - FRAMES_TO_KEEP : 0;
size_t evicted_graphics = 0;
size_t evicted_compute = 0;
for (auto it = graphics_cache.begin(); it != graphics_cache.end();) {
const GraphicsPipeline* pipeline = it->second.get();
if (pipeline && pipeline != current_pipeline) {
auto use_it = graphics_pipeline_last_use.find(pipeline);
if (use_it == graphics_pipeline_last_use.end() || use_it->second < evict_before_frame) {
graphics_pipeline_last_use.erase(pipeline);
it = graphics_cache.erase(it);
evicted_graphics++;
} else {
++it;
}
} else {
++it;
}
}
for (auto it = compute_cache.begin(); it != compute_cache.end();) {
const ComputePipeline* pipeline = it->second.get();
if (pipeline) {
auto use_it = compute_pipeline_last_use.find(pipeline);
if (use_it == compute_pipeline_last_use.end() || use_it->second < evict_before_frame) {
compute_pipeline_last_use.erase(pipeline);
it = compute_cache.erase(it);
evicted_compute++;
} else {
++it;
}
} else {
++it;
}
}
if (evicted_graphics > 0 || evicted_compute > 0) {
LOG_INFO(Render_Vulkan, "Evicted {} graphics and {} compute pipelines to free memory",
evicted_graphics, evicted_compute);
}
}
GraphicsPipeline* PipelineCache::CurrentGraphicsPipeline() {
MICROPROFILE_SCOPE(Vulkan_PipelineCache);
@@ -439,10 +492,16 @@ GraphicsPipeline* PipelineCache::CurrentGraphicsPipeline() {
GraphicsPipeline* const next{current_pipeline->Next(graphics_key)};
if (next) {
current_pipeline = next;
// Update last use frame
graphics_pipeline_last_use[current_pipeline] = scheduler.CurrentTick();
return BuiltPipeline(current_pipeline);
}
}
return CurrentGraphicsPipelineSlowPath();
GraphicsPipeline* result = CurrentGraphicsPipelineSlowPath();
if (result) {
graphics_pipeline_last_use[result] = scheduler.CurrentTick();
}
return result;
}
ComputePipeline* PipelineCache::CurrentComputePipeline() {
@@ -460,10 +519,14 @@ ComputePipeline* PipelineCache::CurrentComputePipeline() {
};
const auto [pair, is_new]{compute_cache.try_emplace(key)};
auto& pipeline{pair->second};
if (!is_new) {
if (!is_new && pipeline) {
compute_pipeline_last_use[pipeline.get()] = scheduler.CurrentTick();
return pipeline.get();
}
pipeline = CreateComputePipeline(key, shader);
if (pipeline) {
compute_pipeline_last_use[pipeline.get()] = scheduler.CurrentTick();
}
return pipeline.get();
}
@@ -705,6 +768,13 @@ std::unique_ptr<GraphicsPipeline> PipelineCache::CreateGraphicsPipeline(
descriptor_pool, guest_descriptor_queue, thread_worker, statistics, render_pass_cache, key,
std::move(modules), infos);
} catch (const vk::Exception& exception) {
if (exception.GetResult() == VK_ERROR_OUT_OF_DEVICE_MEMORY) {
LOG_ERROR(Render_Vulkan, "Out of device memory during graphics pipeline creation, attempting recovery");
EvictOldPipelines();
return nullptr;
}
throw;
} catch (const Shader::Exception& exception) {
auto hash = key.Hash();
size_t env_index{0};
@@ -801,6 +871,13 @@ std::unique_ptr<ComputePipeline> PipelineCache::CreateComputePipeline(
guest_descriptor_queue, thread_worker, statistics,
&shader_notify, program.info, std::move(spv_module));
} catch (const vk::Exception& exception) {
if (exception.GetResult() == VK_ERROR_OUT_OF_DEVICE_MEMORY) {
LOG_ERROR(Render_Vulkan, "Out of device memory during compute pipeline creation, attempting recovery");
EvictOldPipelines();
return nullptr;
}
throw;
} catch (const Shader::Exception& exception) {
LOG_ERROR(Render_Vulkan, "{}", exception.what());
return nullptr;

View File

@@ -1,4 +1,5 @@
// SPDX-FileCopyrightText: Copyright 2019 yuzu Emulator Project
// SPDX-FileCopyrightText: Copyright 2025 Citron Emulator Project
// SPDX-License-Identifier: GPL-2.0-or-later
#pragma once
@@ -140,6 +141,15 @@ private:
vk::PipelineCache LoadVulkanPipelineCache(const std::filesystem::path& filename,
u32 expected_cache_version);
/// Evicts old unused pipelines to free memory when under pressure
void EvictOldPipelines();
public:
/// Public interface to evict old pipelines (for memory pressure handling)
void TriggerPipelineEviction() {
EvictOldPipelines();
}
const Device& device;
Scheduler& scheduler;
DescriptorPool& descriptor_pool;
@@ -157,6 +167,12 @@ private:
std::unordered_map<ComputePipelineCacheKey, std::unique_ptr<ComputePipeline>> compute_cache;
std::unordered_map<GraphicsPipelineCacheKey, std::unique_ptr<GraphicsPipeline>> graphics_cache;
std::unordered_map<const GraphicsPipeline*, u64> graphics_pipeline_last_use;
std::unordered_map<const ComputePipeline*, u64> compute_pipeline_last_use;
u64 last_memory_pressure_frame{0};
static constexpr u64 MEMORY_PRESSURE_COOLDOWN = 300;
ShaderPools main_pools;
Shader::Profile profile;
@@ -170,6 +186,7 @@ private:
Common::ThreadWorker workers;
Common::ThreadWorker serialization_thread;
DynamicFeatures dynamic_features;
};
} // namespace Vulkan

View File

@@ -202,6 +202,14 @@ RasterizerVulkan::RasterizerVulkan(Core::Frontend::EmuWindow& emu_window_, Tegra
fence_manager(*this, gpu, texture_cache, buffer_cache, query_cache, device, scheduler),
wfi_event(device.GetLogical().CreateEvent()) {
scheduler.SetQueryCache(query_cache);
memory_allocator.SetMemoryPressureCallback([this]() {
pipeline_cache.TriggerPipelineEviction();
texture_cache.TriggerGarbageCollection();
buffer_cache.TriggerGarbageCollection();
staging_pool.TriggerCacheRelease(MemoryUsage::Upload);
staging_pool.TriggerCacheRelease(MemoryUsage::Download);
});
}
RasterizerVulkan::~RasterizerVulkan() = default;

View File

@@ -101,6 +101,12 @@ private:
void ReleaseCache(MemoryUsage usage);
void ReleaseLevel(StagingBuffersCache& cache, size_t log2);
public:
/// Public interface to release staging buffer cache
void TriggerCacheRelease(MemoryUsage usage) {
ReleaseCache(usage);
}
size_t Region(size_t iter) const noexcept {
return iter / region_size;
}