diff options
| author | bunnei <bunneidev@gmail.com> | 2018-02-12 13:51:52 -0500 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2018-02-12 13:51:52 -0500 |
| commit | be5ba4d95215217930e57976386adff6de79322f (patch) | |
| tree | 299b1096450b0284a489900280a28819aa4fb349 /src/video_core/memory_manager.cpp | |
| parent | 890e98a33e4afa3d7374c7951ee2bde7cc8849c5 (diff) | |
| parent | 6cddf9d88e7fc49919fda92bcd4235797c56f07f (diff) | |
Merge pull request #178 from Subv/command_buffers
GPU: Added a command processor to decode the GPU pushbuffers and forward the commands to their respective engines
Diffstat (limited to 'src/video_core/memory_manager.cpp')
| -rw-r--r-- | src/video_core/memory_manager.cpp | 110 |
1 files changed, 110 insertions, 0 deletions
diff --git a/src/video_core/memory_manager.cpp b/src/video_core/memory_manager.cpp new file mode 100644 index 000000000..2789a4ca1 --- /dev/null +++ b/src/video_core/memory_manager.cpp @@ -0,0 +1,110 @@ +// Copyright 2018 yuzu emulator team +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/assert.h" +#include "video_core/memory_manager.h" + +namespace Tegra { + +PAddr MemoryManager::AllocateSpace(u64 size, u64 align) { + boost::optional<PAddr> paddr = FindFreeBlock(size, align); + ASSERT(paddr); + + for (u64 offset = 0; offset < size; offset += Memory::PAGE_SIZE) { + PageSlot(*paddr + offset) = static_cast<u64>(PageStatus::Allocated); + } + + return *paddr; +} + +PAddr MemoryManager::AllocateSpace(PAddr paddr, u64 size, u64 align) { + for (u64 offset = 0; offset < size; offset += Memory::PAGE_SIZE) { + if (IsPageMapped(paddr + offset)) { + return AllocateSpace(size, align); + } + } + + for (u64 offset = 0; offset < size; offset += Memory::PAGE_SIZE) { + PageSlot(paddr + offset) = static_cast<u64>(PageStatus::Allocated); + } + + return paddr; +} + +PAddr MemoryManager::MapBufferEx(VAddr vaddr, u64 size) { + vaddr &= ~Memory::PAGE_MASK; + + boost::optional<PAddr> paddr = FindFreeBlock(size); + ASSERT(paddr); + + for (u64 offset = 0; offset < size; offset += Memory::PAGE_SIZE) { + PageSlot(*paddr + offset) = vaddr + offset; + } + + return *paddr; +} + +PAddr MemoryManager::MapBufferEx(VAddr vaddr, PAddr paddr, u64 size) { + vaddr &= ~Memory::PAGE_MASK; + paddr &= ~Memory::PAGE_MASK; + + for (u64 offset = 0; offset < size; offset += Memory::PAGE_SIZE) { + if (PageSlot(paddr + offset) != static_cast<u64>(PageStatus::Allocated)) { + return MapBufferEx(vaddr, size); + } + } + + for (u64 offset = 0; offset < size; offset += Memory::PAGE_SIZE) { + PageSlot(paddr + offset) = vaddr + offset; + } + + return paddr; +} + +boost::optional<PAddr> MemoryManager::FindFreeBlock(u64 size, u64 align) { + PAddr paddr{}; + u64 free_space{}; + align = (align + Memory::PAGE_MASK) & ~Memory::PAGE_MASK; + + while (paddr + free_space < MAX_ADDRESS) { + if (!IsPageMapped(paddr + free_space)) { + free_space += Memory::PAGE_SIZE; + if (free_space >= size) { + return paddr; + } + } else { + paddr += free_space + Memory::PAGE_SIZE; + free_space = 0; + const u64 remainder{paddr % align}; + if (!remainder) { + paddr = (paddr - remainder) + align; + } + } + } + + return {}; +} + +VAddr MemoryManager::PhysicalToVirtualAddress(PAddr paddr) { + VAddr base_addr = PageSlot(paddr); + ASSERT(base_addr != static_cast<u64>(PageStatus::Unmapped)); + return base_addr + (paddr & Memory::PAGE_MASK); +} + +bool MemoryManager::IsPageMapped(PAddr paddr) { + return PageSlot(paddr) != static_cast<u64>(PageStatus::Unmapped); +} + +VAddr& MemoryManager::PageSlot(PAddr paddr) { + auto& block = page_table[(paddr >> (Memory::PAGE_BITS + PAGE_TABLE_BITS)) & PAGE_TABLE_MASK]; + if (!block) { + block = std::make_unique<PageBlock>(); + for (unsigned index = 0; index < PAGE_BLOCK_SIZE; index++) { + (*block)[index] = static_cast<u64>(PageStatus::Unmapped); + } + } + return (*block)[(paddr >> Memory::PAGE_BITS) & PAGE_BLOCK_MASK]; +} + +} // namespace Tegra |
