2019-03-02 20:20:28 +00:00
|
|
|
// Copyright 2019 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
|
|
|
#pragma once
|
|
|
|
|
core/memory: Read and write page table atomically
Squash attributes into the pointer's integer, making them an uintptr_t
pair containing 2 bits at the bottom and then the pointer. These bits
are currently unused thanks to alignment requirements.
Configure Dynarmic to mask out these bits on pointer reads.
While we are at it, remove some unused attributes carried over from
Citra.
Read/Write and other hot functions use a two step unpacking process that
is less readable to stop MSVC from emitting an extra AND instruction in
the hot path:
mov rdi,rcx
shr rdx,0Ch
mov r8,qword ptr [rax+8]
mov rax,qword ptr [r8+rdx*8]
mov rdx,rax
-and al,3
and rdx,0FFFFFFFFFFFFFFFCh
je Core::Memory::Memory::Impl::Read<unsigned char>
mov rax,qword ptr [vaddr]
movzx eax,byte ptr [rdx+rax]
2020-12-30 00:16:57 +00:00
|
|
|
#include <atomic>
|
2020-11-18 00:58:41 +00:00
|
|
|
|
2019-03-02 20:20:28 +00:00
|
|
|
#include "common/common_types.h"
|
2020-04-09 02:49:51 +00:00
|
|
|
#include "common/virtual_buffer.h"
|
2019-03-02 20:20:28 +00:00
|
|
|
|
|
|
|
namespace Common {
|
|
|
|
|
|
|
|
enum class PageType : u8 {
|
|
|
|
/// Page is unmapped and should cause an access error.
|
|
|
|
Unmapped,
|
|
|
|
/// Page is mapped to regular memory. This is the only type you can get pointers to.
|
|
|
|
Memory,
|
|
|
|
/// Page is mapped to regular memory, but also needs to check for rasterizer cache flushing and
|
|
|
|
/// invalidation
|
|
|
|
RasterizerCachedMemory,
|
|
|
|
};
|
|
|
|
|
|
|
|
/**
|
|
|
|
* A (reasonably) fast way of allowing switchable and remappable process address spaces. It loosely
|
|
|
|
* mimics the way a real CPU page table works.
|
|
|
|
*/
|
|
|
|
struct PageTable {
|
2022-02-19 07:42:27 +00:00
|
|
|
struct TraversalEntry {
|
|
|
|
u64 phys_addr{};
|
|
|
|
std::size_t block_size{};
|
|
|
|
};
|
|
|
|
|
|
|
|
struct TraversalContext {
|
|
|
|
u64 next_page{};
|
|
|
|
u64 next_offset{};
|
|
|
|
};
|
|
|
|
|
core/memory: Read and write page table atomically
Squash attributes into the pointer's integer, making them an uintptr_t
pair containing 2 bits at the bottom and then the pointer. These bits
are currently unused thanks to alignment requirements.
Configure Dynarmic to mask out these bits on pointer reads.
While we are at it, remove some unused attributes carried over from
Citra.
Read/Write and other hot functions use a two step unpacking process that
is less readable to stop MSVC from emitting an extra AND instruction in
the hot path:
mov rdi,rcx
shr rdx,0Ch
mov r8,qword ptr [rax+8]
mov rax,qword ptr [r8+rdx*8]
mov rdx,rax
-and al,3
and rdx,0FFFFFFFFFFFFFFFCh
je Core::Memory::Memory::Impl::Read<unsigned char>
mov rax,qword ptr [vaddr]
movzx eax,byte ptr [rdx+rax]
2020-12-30 00:16:57 +00:00
|
|
|
/// Number of bits reserved for attribute tagging.
|
|
|
|
/// This can be at most the guaranteed alignment of the pointers in the page table.
|
|
|
|
static constexpr int ATTRIBUTE_BITS = 2;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Pair of host pointer and page type attribute.
|
|
|
|
* This uses the lower bits of a given pointer to store the attribute tag.
|
|
|
|
* Writing and reading the pointer attribute pair is guaranteed to be atomic for the same method
|
|
|
|
* call. In other words, they are guaranteed to be synchronized at all times.
|
|
|
|
*/
|
|
|
|
class PageInfo {
|
|
|
|
public:
|
|
|
|
/// Returns the page pointer
|
|
|
|
[[nodiscard]] u8* Pointer() const noexcept {
|
|
|
|
return ExtractPointer(raw.load(std::memory_order_relaxed));
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the page type attribute
|
|
|
|
[[nodiscard]] PageType Type() const noexcept {
|
|
|
|
return ExtractType(raw.load(std::memory_order_relaxed));
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the page pointer and attribute pair, extracted from the same atomic read
|
|
|
|
[[nodiscard]] std::pair<u8*, PageType> PointerType() const noexcept {
|
|
|
|
const uintptr_t non_atomic_raw = raw.load(std::memory_order_relaxed);
|
|
|
|
return {ExtractPointer(non_atomic_raw), ExtractType(non_atomic_raw)};
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the raw representation of the page information.
|
|
|
|
/// Use ExtractPointer and ExtractType to unpack the value.
|
|
|
|
[[nodiscard]] uintptr_t Raw() const noexcept {
|
|
|
|
return raw.load(std::memory_order_relaxed);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Write a page pointer and type pair atomically
|
|
|
|
void Store(u8* pointer, PageType type) noexcept {
|
|
|
|
raw.store(reinterpret_cast<uintptr_t>(pointer) | static_cast<uintptr_t>(type));
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Unpack a pointer from a page info raw representation
|
|
|
|
[[nodiscard]] static u8* ExtractPointer(uintptr_t raw) noexcept {
|
|
|
|
return reinterpret_cast<u8*>(raw & (~uintptr_t{0} << ATTRIBUTE_BITS));
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Unpack a page type from a page info raw representation
|
|
|
|
[[nodiscard]] static PageType ExtractType(uintptr_t raw) noexcept {
|
|
|
|
return static_cast<PageType>(raw & ((uintptr_t{1} << ATTRIBUTE_BITS) - 1));
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
std::atomic<uintptr_t> raw;
|
|
|
|
};
|
|
|
|
|
2020-04-09 02:49:51 +00:00
|
|
|
PageTable();
|
2020-11-18 00:58:41 +00:00
|
|
|
~PageTable() noexcept;
|
|
|
|
|
|
|
|
PageTable(const PageTable&) = delete;
|
|
|
|
PageTable& operator=(const PageTable&) = delete;
|
|
|
|
|
|
|
|
PageTable(PageTable&&) noexcept = default;
|
|
|
|
PageTable& operator=(PageTable&&) noexcept = default;
|
2019-03-02 20:20:28 +00:00
|
|
|
|
2022-02-19 08:14:27 +00:00
|
|
|
bool BeginTraversal(TraversalEntry& out_entry, TraversalContext& out_context,
|
2022-02-19 07:42:27 +00:00
|
|
|
u64 address) const;
|
2022-02-19 08:14:27 +00:00
|
|
|
bool ContinueTraversal(TraversalEntry& out_entry, TraversalContext& context) const;
|
2022-02-19 07:42:27 +00:00
|
|
|
|
2019-03-02 20:20:28 +00:00
|
|
|
/**
|
2021-01-02 14:00:05 +00:00
|
|
|
* Resizes the page table to be able to accommodate enough pages within
|
2019-03-02 20:20:28 +00:00
|
|
|
* a given address space.
|
|
|
|
*
|
|
|
|
* @param address_space_width_in_bits The address size width in bits.
|
2020-11-18 00:45:17 +00:00
|
|
|
* @param page_size_in_bits The page size in bits.
|
2019-03-02 20:20:28 +00:00
|
|
|
*/
|
2022-02-19 07:42:27 +00:00
|
|
|
void Resize(std::size_t address_space_width_in_bits, std::size_t page_size_in_bits);
|
2019-03-02 20:20:28 +00:00
|
|
|
|
2022-02-19 07:42:27 +00:00
|
|
|
std::size_t GetAddressSpaceBits() const {
|
2021-05-29 07:24:09 +00:00
|
|
|
return current_address_space_width_in_bits;
|
|
|
|
}
|
|
|
|
|
2019-03-02 20:20:28 +00:00
|
|
|
/**
|
|
|
|
* Vector of memory pointers backing each page. An entry can only be non-null if the
|
core/memory: Read and write page table atomically
Squash attributes into the pointer's integer, making them an uintptr_t
pair containing 2 bits at the bottom and then the pointer. These bits
are currently unused thanks to alignment requirements.
Configure Dynarmic to mask out these bits on pointer reads.
While we are at it, remove some unused attributes carried over from
Citra.
Read/Write and other hot functions use a two step unpacking process that
is less readable to stop MSVC from emitting an extra AND instruction in
the hot path:
mov rdi,rcx
shr rdx,0Ch
mov r8,qword ptr [rax+8]
mov rax,qword ptr [r8+rdx*8]
mov rdx,rax
-and al,3
and rdx,0FFFFFFFFFFFFFFFCh
je Core::Memory::Memory::Impl::Read<unsigned char>
mov rax,qword ptr [vaddr]
movzx eax,byte ptr [rdx+rax]
2020-12-30 00:16:57 +00:00
|
|
|
* corresponding attribute element is of type `Memory`.
|
2019-03-02 20:20:28 +00:00
|
|
|
*/
|
core/memory: Read and write page table atomically
Squash attributes into the pointer's integer, making them an uintptr_t
pair containing 2 bits at the bottom and then the pointer. These bits
are currently unused thanks to alignment requirements.
Configure Dynarmic to mask out these bits on pointer reads.
While we are at it, remove some unused attributes carried over from
Citra.
Read/Write and other hot functions use a two step unpacking process that
is less readable to stop MSVC from emitting an extra AND instruction in
the hot path:
mov rdi,rcx
shr rdx,0Ch
mov r8,qword ptr [rax+8]
mov rax,qword ptr [r8+rdx*8]
mov rdx,rax
-and al,3
and rdx,0FFFFFFFFFFFFFFFCh
je Core::Memory::Memory::Impl::Read<unsigned char>
mov rax,qword ptr [vaddr]
movzx eax,byte ptr [rdx+rax]
2020-12-30 00:16:57 +00:00
|
|
|
VirtualBuffer<PageInfo> pointers;
|
2020-03-13 20:33:47 +00:00
|
|
|
|
2020-04-09 02:49:51 +00:00
|
|
|
VirtualBuffer<u64> backing_addr;
|
2021-05-29 07:24:09 +00:00
|
|
|
|
2022-02-19 07:42:27 +00:00
|
|
|
std::size_t current_address_space_width_in_bits{};
|
|
|
|
|
|
|
|
u8* fastmem_arena{};
|
2020-01-19 00:49:30 +00:00
|
|
|
|
2022-02-19 07:42:27 +00:00
|
|
|
std::size_t page_size{};
|
2020-03-13 20:33:47 +00:00
|
|
|
};
|
|
|
|
|
2019-03-02 20:20:28 +00:00
|
|
|
} // namespace Common
|