rework flexible memory usage tracking

This commit is contained in:
ParantezTech 2026-02-20 17:58:25 +03:00
parent 06b901a47b
commit 6df7bea014
2 changed files with 157 additions and 3 deletions

View File

@ -1,6 +1,8 @@
// SPDX-FileCopyrightText: Copyright 2025-2026 shadPS4 Emulator Project
// SPDX-License-Identifier: GPL-2.0-or-later
#include <limits>
#include "common/alignment.h"
#include "common/assert.h"
#include "common/config.h"
@ -68,8 +70,25 @@ void MemoryManager::SetupMemoryRegions(u64 flexible_size, bool use_extended_mem1
fmem_map.emplace(total_direct_size,
PhysicalMemoryArea{total_direct_size, remaining_physical_space});
flexible_virtual_base = impl.SystemReservedVirtualBase();
const u64 flexible_virtual_size =
std::min<u64>(total_flexible_size, impl.SystemReservedVirtualSize());
flexible_virtual_end = flexible_virtual_base + flexible_virtual_size;
{
std::scoped_lock lk{mutex};
RecalculateFlexibleMappedUsageLocked();
}
LOG_INFO(Kernel_Vmm, "Configured memory regions: flexible size = {:#x}, direct size = {:#x}",
total_flexible_size, total_direct_size);
if (Config::debugDump()) {
LOG_DEBUG(
Kernel_Vmm,
"Flexible accounting region: [{:#x}, {:#x}), total = {:#x}, used = {:#x}, free = {:#x}",
flexible_virtual_base, flexible_virtual_end, total_flexible_size, flexible_mapped_usage,
GetAvailableFlexibleSize());
}
}
u64 MemoryManager::ClampRangeSize(VAddr virtual_addr, u64 size) {
@ -557,6 +576,7 @@ s32 MemoryManager::MapMemory(void** out_addr, VAddr virtual_addr, u64 size, Memo
// Acquire writer lock.
std::unique_lock lk2{mutex};
const u64 flexible_before = GetFlexibleMappedBytesInRangeLocked(virtual_addr, size);
// Create VMA representing this mapping.
auto new_vma_handle = CreateArea(virtual_addr, size, prot, flags, type, name, alignment);
@ -643,6 +663,9 @@ s32 MemoryManager::MapMemory(void** out_addr, VAddr virtual_addr, u64 size, Memo
MergeAdjacent(vma_map, new_vma_handle);
}
const u64 flexible_after = GetFlexibleMappedBytesInRangeLocked(mapped_addr, size);
AdjustFlexibleMappedUsageLocked(flexible_before, flexible_after);
*out_addr = std::bit_cast<void*>(mapped_addr);
if (type != VMAType::Reserved && type != VMAType::PoolReserved) {
// Flexible address space mappings were performed while finding direct memory areas.
@ -731,6 +754,7 @@ s32 MemoryManager::MapFile(void** out_addr, VAddr virtual_addr, u64 size, Memory
// Aquire writer lock
std::scoped_lock lk2{mutex};
const u64 flexible_before = GetFlexibleMappedBytesInRangeLocked(virtual_addr, size);
// Update VMA map and map to address space.
auto new_vma_handle = CreateArea(virtual_addr, size, prot, flags, VMAType::File, "anon", 0);
@ -742,6 +766,9 @@ s32 MemoryManager::MapFile(void** out_addr, VAddr virtual_addr, u64 size, Memory
impl.MapFile(mapped_addr, size, phys_addr, std::bit_cast<u32>(prot), handle);
const u64 flexible_after = GetFlexibleMappedBytesInRangeLocked(mapped_addr, size);
AdjustFlexibleMappedUsageLocked(flexible_before, flexible_after);
*out_addr = std::bit_cast<void*>(mapped_addr);
return ORBIS_OK;
}
@ -848,7 +875,13 @@ s32 MemoryManager::UnmapMemory(VAddr virtual_addr, u64 size) {
// Acquire writer lock.
std::scoped_lock lk2{mutex};
return UnmapMemoryImpl(virtual_addr, size);
const u64 flexible_before = GetFlexibleMappedBytesInRangeLocked(virtual_addr, size);
const s32 result = UnmapMemoryImpl(virtual_addr, size);
if (result == ORBIS_OK) {
const u64 flexible_after = GetFlexibleMappedBytesInRangeLocked(virtual_addr, size);
AdjustFlexibleMappedUsageLocked(flexible_before, flexible_after);
}
return result;
}
u64 MemoryManager::UnmapBytesFromEntry(VAddr virtual_addr, VirtualMemoryArea vma_base, u64 size) {
@ -1344,6 +1377,92 @@ void MemoryManager::InvalidateMemory(const VAddr addr, const u64 size) const {
}
}
void MemoryManager::RecalculateFlexibleUsageForDebug() {
std::scoped_lock lk{mutex};
RecalculateFlexibleMappedUsageLocked();
}
bool MemoryManager::IsFlexibleCountedVmaType(VMAType type) const {
return type == VMAType::Flexible || type == VMAType::Code;
}
bool MemoryManager::IsFlexibleCommittedVma(const VirtualMemoryArea& vma) const {
if (!vma.IsMapped()) {
return false;
}
const bool has_physical_tracking =
vma.type == VMAType::Direct || vma.type == VMAType::Flexible || vma.type == VMAType::Pooled;
if (has_physical_tracking) {
// Direct/flexible/pooled mappings should expose at least one physical sub-area when
// committed.
return !vma.phys_areas.empty();
}
// Non-phys-tracked mappings (code/stack/file) are committed through address-space map calls.
return vma.type == VMAType::Code || vma.type == VMAType::Stack || vma.type == VMAType::File;
}
u64 MemoryManager::GetFlexibleMappedBytesInRangeLocked(VAddr virtual_addr, u64 size) const {
if (!IsFlexibleRegionConfigured() || size == 0) {
return 0;
}
const VAddr aligned_start = Common::AlignDown(virtual_addr, 16_KB);
const u64 page_offset = virtual_addr - aligned_start;
if (size > std::numeric_limits<u64>::max() - page_offset) {
return 0;
}
const u64 aligned_size = Common::AlignUp(size + page_offset, 16_KB);
if (aligned_size == 0) {
return 0;
}
const VAddr aligned_end = aligned_start + aligned_size;
const VAddr range_start = std::max(aligned_start, flexible_virtual_base);
const VAddr range_end = std::min(aligned_end, flexible_virtual_end);
if (range_start >= range_end) {
return 0;
}
u64 mapped_bytes = 0;
auto it = std::prev(vma_map.upper_bound(range_start));
while (it != vma_map.end() && it->second.base < range_end) {
const auto& vma = it->second;
const VAddr vma_end = vma.base + vma.size;
const VAddr overlap_start = std::max(range_start, vma.base);
const VAddr overlap_end = std::min(range_end, vma_end);
const bool counted_type = IsFlexibleCountedVmaType(vma.type);
const bool committed = IsFlexibleCommittedVma(vma);
if (overlap_start < overlap_end && counted_type && committed) {
mapped_bytes += overlap_end - overlap_start;
}
++it;
}
return mapped_bytes;
}
void MemoryManager::AdjustFlexibleMappedUsageLocked(u64 mapped_before, u64 mapped_after) {
if (mapped_after >= mapped_before) {
flexible_mapped_usage += mapped_after - mapped_before;
} else {
const u64 delta = mapped_before - mapped_after;
flexible_mapped_usage = delta > flexible_mapped_usage ? 0 : flexible_mapped_usage - delta;
}
}
void MemoryManager::RecalculateFlexibleMappedUsageLocked() {
if (!IsFlexibleRegionConfigured()) {
flexible_mapped_usage = 0;
return;
}
flexible_mapped_usage = GetFlexibleMappedBytesInRangeLocked(
flexible_virtual_base, flexible_virtual_end - flexible_virtual_base);
}
VAddr MemoryManager::SearchFree(VAddr virtual_addr, u64 size, u32 alignment) {
// Calculate the minimum and maximum addresses present in our address space.
auto min_search_address = impl.SystemManagedVirtualBase();

View File

@ -157,9 +157,11 @@ struct VirtualMemoryArea {
class MemoryManager {
using PhysMap = std::map<PAddr, PhysicalMemoryArea>;
using PhysHandle = PhysMap::iterator;
using PhysConstHandle = PhysMap::const_iterator;
using VMAMap = std::map<VAddr, VirtualMemoryArea>;
using VMAHandle = VMAMap::iterator;
using VMAConstHandle = VMAMap::const_iterator;
public:
explicit MemoryManager();
@ -181,8 +183,17 @@ public:
return total_flexible_size;
}
u64 GetUsedFlexibleSize() const {
return flexible_mapped_usage;
}
u64 GetAvailableFlexibleSize() const {
return total_flexible_size - flexible_usage;
const u64 used = GetUsedFlexibleSize();
return used < total_flexible_size ? total_flexible_size - used : 0;
}
bool IsFlexibleRegionConfigured() const {
return flexible_virtual_end > flexible_virtual_base;
}
VAddr SystemReservedVirtualBase() noexcept {
@ -288,20 +299,31 @@ public:
void InvalidateMemory(VAddr addr, u64 size) const;
void RecalculateFlexibleUsageForDebug();
private:
VMAHandle FindVMA(VAddr target) {
return std::prev(vma_map.upper_bound(target));
}
VMAConstHandle FindVMA(VAddr target) const {
return std::prev(vma_map.upper_bound(target));
}
PhysHandle FindDmemArea(PAddr target) {
return std::prev(dmem_map.upper_bound(target));
}
PhysConstHandle FindDmemArea(PAddr target) const {
return std::prev(dmem_map.upper_bound(target));
}
PhysHandle FindFmemArea(PAddr target) {
return std::prev(fmem_map.upper_bound(target));
}
PhysConstHandle FindFmemArea(PAddr target) const {
return std::prev(fmem_map.upper_bound(target));
}
bool HasPhysicalBacking(VirtualMemoryArea vma) {
bool HasPhysicalBacking(const VirtualMemoryArea& vma) const {
return vma.type == VMAType::Direct || vma.type == VMAType::Flexible ||
vma.type == VMAType::Pooled;
}
@ -327,6 +349,16 @@ private:
s32 UnmapMemoryImpl(VAddr virtual_addr, u64 size);
bool IsFlexibleCountedVmaType(VMAType type) const;
bool IsFlexibleCommittedVma(const VirtualMemoryArea& vma) const;
u64 GetFlexibleMappedBytesInRangeLocked(VAddr virtual_addr, u64 size) const;
void AdjustFlexibleMappedUsageLocked(u64 mapped_before, u64 mapped_after);
void RecalculateFlexibleMappedUsageLocked();
private:
AddressSpace impl;
PhysMap dmem_map;
@ -337,6 +369,9 @@ private:
u64 total_direct_size{};
u64 total_flexible_size{};
u64 flexible_usage{};
VAddr flexible_virtual_base{};
VAddr flexible_virtual_end{};
u64 flexible_mapped_usage{};
u64 pool_budget{};
s32 sdk_version{};
Vulkan::Rasterizer* rasterizer{};