hle: kernel: k_memory_layout: Derive memory regions based on board layout.
This commit is contained in:
		| @@ -172,6 +172,8 @@ add_library(core STATIC | |||||||
|     hle/kernel/k_memory_block.h |     hle/kernel/k_memory_block.h | ||||||
|     hle/kernel/k_memory_block_manager.cpp |     hle/kernel/k_memory_block_manager.cpp | ||||||
|     hle/kernel/k_memory_block_manager.h |     hle/kernel/k_memory_block_manager.h | ||||||
|  |     hle/kernel/k_memory_layout.cpp | ||||||
|  |     hle/kernel/k_memory_layout.board.nintendo_nx.cpp | ||||||
|     hle/kernel/k_memory_layout.h |     hle/kernel/k_memory_layout.h | ||||||
|     hle/kernel/k_memory_manager.cpp |     hle/kernel/k_memory_manager.cpp | ||||||
|     hle/kernel/k_memory_manager.h |     hle/kernel/k_memory_manager.h | ||||||
|   | |||||||
							
								
								
									
										199
									
								
								src/core/hle/kernel/k_memory_layout.board.nintendo_nx.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										199
									
								
								src/core/hle/kernel/k_memory_layout.board.nintendo_nx.cpp
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,199 @@ | |||||||
|  | // Copyright 2021 yuzu Emulator Project | ||||||
|  | // Licensed under GPLv2 or any later version | ||||||
|  | // Refer to the license.txt file included. | ||||||
|  |  | ||||||
|  | #include "common/alignment.h" | ||||||
|  | #include "core/hle/kernel/k_memory_layout.h" | ||||||
|  | #include "core/hle/kernel/k_memory_manager.h" | ||||||
|  | #include "core/hle/kernel/k_system_control.h" | ||||||
|  | #include "core/hle/kernel/k_trace.h" | ||||||
|  |  | ||||||
|  | namespace Kernel { | ||||||
|  |  | ||||||
|  | namespace { | ||||||
|  |  | ||||||
|  | constexpr size_t CarveoutAlignment = 0x20000; | ||||||
|  | constexpr size_t CarveoutSizeMax = (512ULL * 1024 * 1024) - CarveoutAlignment; | ||||||
|  |  | ||||||
|  | bool SetupPowerManagementControllerMemoryRegion(KMemoryLayout& memory_layout) { | ||||||
|  |     // Above firmware 2.0.0, the PMC is not mappable. | ||||||
|  |     return memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |                0x7000E000, 0x400, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap) && | ||||||
|  |            memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |                0x7000E400, 0xC00, | ||||||
|  |                KMemoryRegionType_PowerManagementController | KMemoryRegionAttr_NoUserMap); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | void InsertPoolPartitionRegionIntoBothTrees(KMemoryLayout& memory_layout, size_t start, size_t size, | ||||||
|  |                                             KMemoryRegionType phys_type, | ||||||
|  |                                             KMemoryRegionType virt_type, u32& cur_attr) { | ||||||
|  |     const u32 attr = cur_attr++; | ||||||
|  |     ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert(start, size, | ||||||
|  |                                                               static_cast<u32>(phys_type), attr)); | ||||||
|  |     const KMemoryRegion* phys = memory_layout.GetPhysicalMemoryRegionTree().FindByTypeAndAttribute( | ||||||
|  |         static_cast<u32>(phys_type), attr); | ||||||
|  |     ASSERT(phys != nullptr); | ||||||
|  |     ASSERT(phys->GetEndAddress() != 0); | ||||||
|  |     ASSERT(memory_layout.GetVirtualMemoryRegionTree().Insert(phys->GetPairAddress(), size, | ||||||
|  |                                                              static_cast<u32>(virt_type), attr)); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | } // namespace | ||||||
|  |  | ||||||
|  | namespace Init { | ||||||
|  |  | ||||||
|  | void SetupDevicePhysicalMemoryRegions(KMemoryLayout& memory_layout) { | ||||||
|  |     ASSERT(SetupPowerManagementControllerMemoryRegion(memory_layout)); | ||||||
|  |     ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |         0x70019000, 0x1000, KMemoryRegionType_MemoryController | KMemoryRegionAttr_NoUserMap)); | ||||||
|  |     ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |         0x7001C000, 0x1000, KMemoryRegionType_MemoryController0 | KMemoryRegionAttr_NoUserMap)); | ||||||
|  |     ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |         0x7001D000, 0x1000, KMemoryRegionType_MemoryController1 | KMemoryRegionAttr_NoUserMap)); | ||||||
|  |     ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |         0x50040000, 0x1000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); | ||||||
|  |     ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |         0x50041000, 0x1000, | ||||||
|  |         KMemoryRegionType_InterruptDistributor | KMemoryRegionAttr_ShouldKernelMap)); | ||||||
|  |     ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |         0x50042000, 0x1000, | ||||||
|  |         KMemoryRegionType_InterruptCpuInterface | KMemoryRegionAttr_ShouldKernelMap)); | ||||||
|  |     ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |         0x50043000, 0x1D000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); | ||||||
|  |  | ||||||
|  |     // Map IRAM unconditionally, to support debug-logging-to-iram build config. | ||||||
|  |     ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |         0x40000000, 0x40000, KMemoryRegionType_LegacyLpsIram | KMemoryRegionAttr_ShouldKernelMap)); | ||||||
|  |  | ||||||
|  |     // Above firmware 2.0.0, prevent mapping the bpmp exception vectors or the ipatch region. | ||||||
|  |     ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |         0x6000F000, 0x1000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); | ||||||
|  |     ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |         0x6001DC00, 0x400, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | void SetupDramPhysicalMemoryRegions(KMemoryLayout& memory_layout) { | ||||||
|  |     const size_t intended_memory_size = KSystemControl::Init::GetIntendedMemorySize(); | ||||||
|  |     const PAddr physical_memory_base_address = | ||||||
|  |         KSystemControl::Init::GetKernelPhysicalBaseAddress(DramPhysicalAddress); | ||||||
|  |  | ||||||
|  |     // Insert blocks into the tree. | ||||||
|  |     ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |         physical_memory_base_address, intended_memory_size, KMemoryRegionType_Dram)); | ||||||
|  |     ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |         physical_memory_base_address, ReservedEarlyDramSize, KMemoryRegionType_DramReservedEarly)); | ||||||
|  |  | ||||||
|  |     // Insert the KTrace block at the end of Dram, if KTrace is enabled. | ||||||
|  |     static_assert(!IsKTraceEnabled || KTraceBufferSize > 0); | ||||||
|  |     if constexpr (IsKTraceEnabled) { | ||||||
|  |         const PAddr ktrace_buffer_phys_addr = | ||||||
|  |             physical_memory_base_address + intended_memory_size - KTraceBufferSize; | ||||||
|  |         ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |             ktrace_buffer_phys_addr, KTraceBufferSize, KMemoryRegionType_KernelTraceBuffer)); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | void SetupPoolPartitionMemoryRegions(KMemoryLayout& memory_layout) { | ||||||
|  |     // Start by identifying the extents of the DRAM memory region. | ||||||
|  |     const auto dram_extents = memory_layout.GetMainMemoryPhysicalExtents(); | ||||||
|  |     ASSERT(dram_extents.GetEndAddress() != 0); | ||||||
|  |  | ||||||
|  |     // Determine the end of the pool region. | ||||||
|  |     const u64 pool_end = dram_extents.GetEndAddress() - KTraceBufferSize; | ||||||
|  |  | ||||||
|  |     // Find the start of the kernel DRAM region. | ||||||
|  |     const KMemoryRegion* kernel_dram_region = | ||||||
|  |         memory_layout.GetPhysicalMemoryRegionTree().FindFirstDerived( | ||||||
|  |             KMemoryRegionType_DramKernelBase); | ||||||
|  |     ASSERT(kernel_dram_region != nullptr); | ||||||
|  |  | ||||||
|  |     const u64 kernel_dram_start = kernel_dram_region->GetAddress(); | ||||||
|  |     ASSERT(Common::IsAligned(kernel_dram_start, CarveoutAlignment)); | ||||||
|  |  | ||||||
|  |     // Find the start of the pool partitions region. | ||||||
|  |     const KMemoryRegion* pool_partitions_region = | ||||||
|  |         memory_layout.GetPhysicalMemoryRegionTree().FindByTypeAndAttribute( | ||||||
|  |             KMemoryRegionType_DramPoolPartition, 0); | ||||||
|  |     ASSERT(pool_partitions_region != nullptr); | ||||||
|  |     const u64 pool_partitions_start = pool_partitions_region->GetAddress(); | ||||||
|  |  | ||||||
|  |     // Setup the pool partition layouts. | ||||||
|  |     // On 5.0.0+, setup modern 4-pool-partition layout. | ||||||
|  |  | ||||||
|  |     // Get Application and Applet pool sizes. | ||||||
|  |     const size_t application_pool_size = KSystemControl::Init::GetApplicationPoolSize(); | ||||||
|  |     const size_t applet_pool_size = KSystemControl::Init::GetAppletPoolSize(); | ||||||
|  |     const size_t unsafe_system_pool_min_size = | ||||||
|  |         KSystemControl::Init::GetMinimumNonSecureSystemPoolSize(); | ||||||
|  |  | ||||||
|  |     // Decide on starting addresses for our pools. | ||||||
|  |     const u64 application_pool_start = pool_end - application_pool_size; | ||||||
|  |     const u64 applet_pool_start = application_pool_start - applet_pool_size; | ||||||
|  |     const u64 unsafe_system_pool_start = std::min( | ||||||
|  |         kernel_dram_start + CarveoutSizeMax, | ||||||
|  |         Common::AlignDown(applet_pool_start - unsafe_system_pool_min_size, CarveoutAlignment)); | ||||||
|  |     const size_t unsafe_system_pool_size = applet_pool_start - unsafe_system_pool_start; | ||||||
|  |  | ||||||
|  |     // We want to arrange application pool depending on where the middle of dram is. | ||||||
|  |     const u64 dram_midpoint = (dram_extents.GetAddress() + dram_extents.GetEndAddress()) / 2; | ||||||
|  |     u32 cur_pool_attr = 0; | ||||||
|  |     size_t total_overhead_size = 0; | ||||||
|  |     if (dram_extents.GetEndAddress() <= dram_midpoint || dram_midpoint <= application_pool_start) { | ||||||
|  |         InsertPoolPartitionRegionIntoBothTrees( | ||||||
|  |             memory_layout, application_pool_start, application_pool_size, | ||||||
|  |             KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, | ||||||
|  |             cur_pool_attr); | ||||||
|  |         total_overhead_size += | ||||||
|  |             KMemoryManager::CalculateManagementOverheadSize(application_pool_size); | ||||||
|  |     } else { | ||||||
|  |         const size_t first_application_pool_size = dram_midpoint - application_pool_start; | ||||||
|  |         const size_t second_application_pool_size = | ||||||
|  |             application_pool_start + application_pool_size - dram_midpoint; | ||||||
|  |         InsertPoolPartitionRegionIntoBothTrees( | ||||||
|  |             memory_layout, application_pool_start, first_application_pool_size, | ||||||
|  |             KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, | ||||||
|  |             cur_pool_attr); | ||||||
|  |         InsertPoolPartitionRegionIntoBothTrees( | ||||||
|  |             memory_layout, dram_midpoint, second_application_pool_size, | ||||||
|  |             KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, | ||||||
|  |             cur_pool_attr); | ||||||
|  |         total_overhead_size += | ||||||
|  |             KMemoryManager::CalculateManagementOverheadSize(first_application_pool_size); | ||||||
|  |         total_overhead_size += | ||||||
|  |             KMemoryManager::CalculateManagementOverheadSize(second_application_pool_size); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Insert the applet pool. | ||||||
|  |     InsertPoolPartitionRegionIntoBothTrees(memory_layout, applet_pool_start, applet_pool_size, | ||||||
|  |                                            KMemoryRegionType_DramAppletPool, | ||||||
|  |                                            KMemoryRegionType_VirtualDramAppletPool, cur_pool_attr); | ||||||
|  |     total_overhead_size += KMemoryManager::CalculateManagementOverheadSize(applet_pool_size); | ||||||
|  |  | ||||||
|  |     // Insert the nonsecure system pool. | ||||||
|  |     InsertPoolPartitionRegionIntoBothTrees( | ||||||
|  |         memory_layout, unsafe_system_pool_start, unsafe_system_pool_size, | ||||||
|  |         KMemoryRegionType_DramSystemNonSecurePool, KMemoryRegionType_VirtualDramSystemNonSecurePool, | ||||||
|  |         cur_pool_attr); | ||||||
|  |     total_overhead_size += KMemoryManager::CalculateManagementOverheadSize(unsafe_system_pool_size); | ||||||
|  |  | ||||||
|  |     // Insert the pool management region. | ||||||
|  |     total_overhead_size += KMemoryManager::CalculateManagementOverheadSize( | ||||||
|  |         (unsafe_system_pool_start - pool_partitions_start) - total_overhead_size); | ||||||
|  |     const u64 pool_management_start = unsafe_system_pool_start - total_overhead_size; | ||||||
|  |     const size_t pool_management_size = total_overhead_size; | ||||||
|  |     u32 pool_management_attr = 0; | ||||||
|  |     InsertPoolPartitionRegionIntoBothTrees( | ||||||
|  |         memory_layout, pool_management_start, pool_management_size, | ||||||
|  |         KMemoryRegionType_DramPoolManagement, KMemoryRegionType_VirtualDramPoolManagement, | ||||||
|  |         pool_management_attr); | ||||||
|  |  | ||||||
|  |     // Insert the system pool. | ||||||
|  |     const u64 system_pool_size = pool_management_start - pool_partitions_start; | ||||||
|  |     InsertPoolPartitionRegionIntoBothTrees(memory_layout, pool_partitions_start, system_pool_size, | ||||||
|  |                                            KMemoryRegionType_DramSystemPool, | ||||||
|  |                                            KMemoryRegionType_VirtualDramSystemPool, cur_pool_attr); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | } // namespace Init | ||||||
|  |  | ||||||
|  | } // namespace Kernel | ||||||
							
								
								
									
										183
									
								
								src/core/hle/kernel/k_memory_layout.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										183
									
								
								src/core/hle/kernel/k_memory_layout.cpp
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,183 @@ | |||||||
|  | // Copyright 2021 yuzu Emulator Project | ||||||
|  | // Licensed under GPLv2 or any later version | ||||||
|  | // Refer to the license.txt file included. | ||||||
|  |  | ||||||
|  | #include "common/alignment.h" | ||||||
|  | #include "core/hle/kernel/k_memory_layout.h" | ||||||
|  | #include "core/hle/kernel/k_system_control.h" | ||||||
|  |  | ||||||
|  | namespace Kernel { | ||||||
|  |  | ||||||
|  | namespace { | ||||||
|  |  | ||||||
|  | class KMemoryRegionAllocator final : NonCopyable { | ||||||
|  | public: | ||||||
|  |     static constexpr size_t MaxMemoryRegions = 200; | ||||||
|  |  | ||||||
|  | private: | ||||||
|  |     KMemoryRegion region_heap[MaxMemoryRegions]{}; | ||||||
|  |     size_t num_regions{}; | ||||||
|  |  | ||||||
|  | public: | ||||||
|  |     constexpr KMemoryRegionAllocator() = default; | ||||||
|  |  | ||||||
|  | public: | ||||||
|  |     template <typename... Args> | ||||||
|  |     KMemoryRegion* Allocate(Args&&... args) { | ||||||
|  |         // Ensure we stay within the bounds of our heap. | ||||||
|  |         ASSERT(this->num_regions < MaxMemoryRegions); | ||||||
|  |  | ||||||
|  |         // Create the new region. | ||||||
|  |         KMemoryRegion* region = std::addressof(this->region_heap[this->num_regions++]); | ||||||
|  |         new (region) KMemoryRegion(std::forward<Args>(args)...); | ||||||
|  |  | ||||||
|  |         return region; | ||||||
|  |     } | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | KMemoryRegionAllocator g_memory_region_allocator; | ||||||
|  |  | ||||||
|  | template <typename... Args> | ||||||
|  | KMemoryRegion* AllocateRegion(Args&&... args) { | ||||||
|  |     return g_memory_region_allocator.Allocate(std::forward<Args>(args)...); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | } // namespace | ||||||
|  |  | ||||||
|  | void KMemoryRegionTree::InsertDirectly(u64 address, u64 last_address, u32 attr, u32 type_id) { | ||||||
|  |     this->insert(*AllocateRegion(address, last_address, attr, type_id)); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | bool KMemoryRegionTree::Insert(u64 address, size_t size, u32 type_id, u32 new_attr, u32 old_attr) { | ||||||
|  |     // Locate the memory region that contains the address. | ||||||
|  |     KMemoryRegion* found = this->FindModifiable(address); | ||||||
|  |  | ||||||
|  |     // We require that the old attr is correct. | ||||||
|  |     if (found->GetAttributes() != old_attr) { | ||||||
|  |         return false; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // We further require that the region can be split from the old region. | ||||||
|  |     const u64 inserted_region_end = address + size; | ||||||
|  |     const u64 inserted_region_last = inserted_region_end - 1; | ||||||
|  |     if (found->GetLastAddress() < inserted_region_last) { | ||||||
|  |         return false; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Further, we require that the type id is a valid transformation. | ||||||
|  |     if (!found->CanDerive(type_id)) { | ||||||
|  |         return false; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Cache information from the region before we remove it. | ||||||
|  |     const u64 old_address = found->GetAddress(); | ||||||
|  |     const u64 old_last = found->GetLastAddress(); | ||||||
|  |     const u64 old_pair = found->GetPairAddress(); | ||||||
|  |     const u32 old_type = found->GetType(); | ||||||
|  |  | ||||||
|  |     // Erase the existing region from the tree. | ||||||
|  |     this->erase(this->iterator_to(*found)); | ||||||
|  |  | ||||||
|  |     // Insert the new region into the tree. | ||||||
|  |     if (old_address == address) { | ||||||
|  |         // Reuse the old object for the new region, if we can. | ||||||
|  |         found->Reset(address, inserted_region_last, old_pair, new_attr, type_id); | ||||||
|  |         this->insert(*found); | ||||||
|  |     } else { | ||||||
|  |         // If we can't re-use, adjust the old region. | ||||||
|  |         found->Reset(old_address, address - 1, old_pair, old_attr, old_type); | ||||||
|  |         this->insert(*found); | ||||||
|  |  | ||||||
|  |         // Insert a new region for the split. | ||||||
|  |         const u64 new_pair = (old_pair != std::numeric_limits<u64>::max()) | ||||||
|  |                                  ? old_pair + (address - old_address) | ||||||
|  |                                  : old_pair; | ||||||
|  |         this->insert(*AllocateRegion(address, inserted_region_last, new_pair, new_attr, type_id)); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // If we need to insert a region after the region, do so. | ||||||
|  |     if (old_last != inserted_region_last) { | ||||||
|  |         const u64 after_pair = (old_pair != std::numeric_limits<u64>::max()) | ||||||
|  |                                    ? old_pair + (inserted_region_end - old_address) | ||||||
|  |                                    : old_pair; | ||||||
|  |         this->insert( | ||||||
|  |             *AllocateRegion(inserted_region_end, old_last, after_pair, old_attr, old_type)); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     return true; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | VAddr KMemoryRegionTree::GetRandomAlignedRegion(size_t size, size_t alignment, u32 type_id) { | ||||||
|  |     // We want to find the total extents of the type id. | ||||||
|  |     const auto extents = this->GetDerivedRegionExtents(static_cast<KMemoryRegionType>(type_id)); | ||||||
|  |  | ||||||
|  |     // Ensure that our alignment is correct. | ||||||
|  |     ASSERT(Common::IsAligned(extents.GetAddress(), alignment)); | ||||||
|  |  | ||||||
|  |     const u64 first_address = extents.GetAddress(); | ||||||
|  |     const u64 last_address = extents.GetLastAddress(); | ||||||
|  |  | ||||||
|  |     const u64 first_index = first_address / alignment; | ||||||
|  |     const u64 last_index = last_address / alignment; | ||||||
|  |  | ||||||
|  |     while (true) { | ||||||
|  |         const u64 candidate = | ||||||
|  |             KSystemControl::GenerateRandomRange(first_index, last_index) * alignment; | ||||||
|  |  | ||||||
|  |         // Ensure that the candidate doesn't overflow with the size. | ||||||
|  |         if (!(candidate < candidate + size)) { | ||||||
|  |             continue; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         const u64 candidate_last = candidate + size - 1; | ||||||
|  |  | ||||||
|  |         // Ensure that the candidate fits within the region. | ||||||
|  |         if (candidate_last > last_address) { | ||||||
|  |             continue; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         // Locate the candidate region, and ensure it fits and has the correct type id. | ||||||
|  |         if (const auto& candidate_region = *this->Find(candidate); | ||||||
|  |             !(candidate_last <= candidate_region.GetLastAddress() && | ||||||
|  |               candidate_region.GetType() == type_id)) { | ||||||
|  |             continue; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         return candidate; | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | void KMemoryLayout::InitializeLinearMemoryRegionTrees(PAddr aligned_linear_phys_start, | ||||||
|  |                                                       VAddr linear_virtual_start) { | ||||||
|  |     // Set static differences. | ||||||
|  |     linear_phys_to_virt_diff = linear_virtual_start - aligned_linear_phys_start; | ||||||
|  |     linear_virt_to_phys_diff = aligned_linear_phys_start - linear_virtual_start; | ||||||
|  |  | ||||||
|  |     // Initialize linear trees. | ||||||
|  |     for (auto& region : GetPhysicalMemoryRegionTree()) { | ||||||
|  |         if (region.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) { | ||||||
|  |             GetPhysicalLinearMemoryRegionTree().InsertDirectly( | ||||||
|  |                 region.GetAddress(), region.GetLastAddress(), region.GetAttributes(), | ||||||
|  |                 region.GetType()); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     for (auto& region : GetVirtualMemoryRegionTree()) { | ||||||
|  |         if (region.IsDerivedFrom(KMemoryRegionType_Dram)) { | ||||||
|  |             GetVirtualLinearMemoryRegionTree().InsertDirectly( | ||||||
|  |                 region.GetAddress(), region.GetLastAddress(), region.GetAttributes(), | ||||||
|  |                 region.GetType()); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | size_t KMemoryLayout::GetResourceRegionSizeForInit() { | ||||||
|  |     // Calculate resource region size based on whether we allow extra threads. | ||||||
|  |     const bool use_extra_resources = KSystemControl::Init::ShouldIncreaseThreadResourceLimit(); | ||||||
|  |     size_t resource_region_size = | ||||||
|  |         KernelResourceSize + (use_extra_resources ? KernelSlabHeapAdditionalSize : 0); | ||||||
|  |  | ||||||
|  |     return resource_region_size; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | } // namespace Kernel | ||||||
| @@ -1,24 +1,67 @@ | |||||||
| // Copyright 2020 yuzu Emulator Project | // Copyright 2021 yuzu Emulator Project | ||||||
| // Licensed under GPLv2 or any later version | // Licensed under GPLv2 or any later version | ||||||
| // Refer to the license.txt file included. | // Refer to the license.txt file included. | ||||||
|  |  | ||||||
| #pragma once | #pragma once | ||||||
|  |  | ||||||
|  | #include "common/alignment.h" | ||||||
|  | #include "common/common_sizes.h" | ||||||
| #include "common/common_types.h" | #include "common/common_types.h" | ||||||
| #include "core/device_memory.h" | #include "core/device_memory.h" | ||||||
| #include "core/hle/kernel/k_memory_region.h" | #include "core/hle/kernel/k_memory_region.h" | ||||||
|  | #include "core/hle/kernel/k_memory_region_type.h" | ||||||
|  | #include "core/hle/kernel/memory_types.h" | ||||||
|  |  | ||||||
| namespace Kernel { | namespace Kernel { | ||||||
|  |  | ||||||
| constexpr std::size_t KernelAslrAlignment = 2 * 1024 * 1024; | constexpr std::size_t L1BlockSize = Size_1_GB; | ||||||
|  | constexpr std::size_t L2BlockSize = Size_2_MB; | ||||||
|  |  | ||||||
|  | constexpr std::size_t GetMaximumOverheadSize(std::size_t size) { | ||||||
|  |     return (Common::DivideUp(size, L1BlockSize) + Common::DivideUp(size, L2BlockSize)) * PageSize; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | constexpr std::size_t MainMemorySize = Size_4_GB; | ||||||
|  | constexpr std::size_t MainMemorySizeMax = Size_8_GB; | ||||||
|  |  | ||||||
|  | constexpr std::size_t ReservedEarlyDramSize = 0x60000; | ||||||
|  | constexpr std::size_t DramPhysicalAddress = 0x80000000; | ||||||
|  |  | ||||||
|  | constexpr std::size_t KernelAslrAlignment = Size_2_MB; | ||||||
| constexpr std::size_t KernelVirtualAddressSpaceWidth = 1ULL << 39; | constexpr std::size_t KernelVirtualAddressSpaceWidth = 1ULL << 39; | ||||||
| constexpr std::size_t KernelPhysicalAddressSpaceWidth = 1ULL << 48; | constexpr std::size_t KernelPhysicalAddressSpaceWidth = 1ULL << 48; | ||||||
|  |  | ||||||
| constexpr std::size_t KernelVirtualAddressSpaceBase = 0ULL - KernelVirtualAddressSpaceWidth; | constexpr std::size_t KernelVirtualAddressSpaceBase = 0ULL - KernelVirtualAddressSpaceWidth; | ||||||
| constexpr std::size_t KernelVirtualAddressSpaceEnd = | constexpr std::size_t KernelVirtualAddressSpaceEnd = | ||||||
|     KernelVirtualAddressSpaceBase + (KernelVirtualAddressSpaceWidth - KernelAslrAlignment); |     KernelVirtualAddressSpaceBase + (KernelVirtualAddressSpaceWidth - KernelAslrAlignment); | ||||||
| constexpr std::size_t KernelVirtualAddressSpaceLast = KernelVirtualAddressSpaceEnd - 1; | constexpr std::size_t KernelVirtualAddressSpaceLast = KernelVirtualAddressSpaceEnd - 1ULL; | ||||||
| constexpr std::size_t KernelVirtualAddressSpaceSize = | constexpr std::size_t KernelVirtualAddressSpaceSize = | ||||||
|     KernelVirtualAddressSpaceEnd - KernelVirtualAddressSpaceBase; |     KernelVirtualAddressSpaceEnd - KernelVirtualAddressSpaceBase; | ||||||
|  | constexpr std::size_t KernelVirtualAddressCodeBase = KernelVirtualAddressSpaceBase; | ||||||
|  | constexpr std::size_t KernelVirtualAddressCodeSize = 0x62000; | ||||||
|  | constexpr std::size_t KernelVirtualAddressCodeEnd = | ||||||
|  |     KernelVirtualAddressCodeBase + KernelVirtualAddressCodeSize; | ||||||
|  |  | ||||||
|  | constexpr std::size_t KernelPhysicalAddressSpaceBase = 0ULL; | ||||||
|  | constexpr std::size_t KernelPhysicalAddressSpaceEnd = | ||||||
|  |     KernelPhysicalAddressSpaceBase + KernelPhysicalAddressSpaceWidth; | ||||||
|  | constexpr std::size_t KernelPhysicalAddressSpaceLast = KernelPhysicalAddressSpaceEnd - 1ULL; | ||||||
|  | constexpr std::size_t KernelPhysicalAddressSpaceSize = | ||||||
|  |     KernelPhysicalAddressSpaceEnd - KernelPhysicalAddressSpaceBase; | ||||||
|  | constexpr std::size_t KernelPhysicalAddressCodeBase = DramPhysicalAddress + ReservedEarlyDramSize; | ||||||
|  |  | ||||||
|  | constexpr std::size_t KernelPageTableHeapSize = GetMaximumOverheadSize(MainMemorySizeMax); | ||||||
|  | constexpr std::size_t KernelInitialPageHeapSize = Size_128_KB; | ||||||
|  |  | ||||||
|  | constexpr std::size_t KernelSlabHeapDataSize = Size_5_MB; | ||||||
|  | constexpr std::size_t KernelSlabHeapGapsSize = Size_2_MB - Size_64_KB; | ||||||
|  | constexpr std::size_t KernelSlabHeapSize = KernelSlabHeapDataSize + KernelSlabHeapGapsSize; | ||||||
|  |  | ||||||
|  | // NOTE: This is calculated from KThread slab counts, assuming KThread size <= 0x860. | ||||||
|  | constexpr std::size_t KernelSlabHeapAdditionalSize = 0x68000ULL; | ||||||
|  |  | ||||||
|  | constexpr std::size_t KernelResourceSize = | ||||||
|  |     KernelPageTableHeapSize + KernelInitialPageHeapSize + KernelSlabHeapSize; | ||||||
|  |  | ||||||
| constexpr bool IsKernelAddressKey(VAddr key) { | constexpr bool IsKernelAddressKey(VAddr key) { | ||||||
|     return KernelVirtualAddressSpaceBase <= key && key <= KernelVirtualAddressSpaceLast; |     return KernelVirtualAddressSpaceBase <= key && key <= KernelVirtualAddressSpaceLast; | ||||||
| @@ -30,41 +73,324 @@ constexpr bool IsKernelAddress(VAddr address) { | |||||||
|  |  | ||||||
| class KMemoryLayout final { | class KMemoryLayout final { | ||||||
| public: | public: | ||||||
|     constexpr const KMemoryRegion& Application() const { |     KMemoryLayout() = default; | ||||||
|         return application; |  | ||||||
|  |     KMemoryRegionTree& GetVirtualMemoryRegionTree() { | ||||||
|  |         return virtual_tree; | ||||||
|  |     } | ||||||
|  |     const KMemoryRegionTree& GetVirtualMemoryRegionTree() const { | ||||||
|  |         return virtual_tree; | ||||||
|  |     } | ||||||
|  |     KMemoryRegionTree& GetPhysicalMemoryRegionTree() { | ||||||
|  |         return physical_tree; | ||||||
|  |     } | ||||||
|  |     const KMemoryRegionTree& GetPhysicalMemoryRegionTree() const { | ||||||
|  |         return physical_tree; | ||||||
|  |     } | ||||||
|  |     KMemoryRegionTree& GetVirtualLinearMemoryRegionTree() { | ||||||
|  |         return virtual_linear_tree; | ||||||
|  |     } | ||||||
|  |     const KMemoryRegionTree& GetVirtualLinearMemoryRegionTree() const { | ||||||
|  |         return virtual_linear_tree; | ||||||
|  |     } | ||||||
|  |     KMemoryRegionTree& GetPhysicalLinearMemoryRegionTree() { | ||||||
|  |         return physical_linear_tree; | ||||||
|  |     } | ||||||
|  |     const KMemoryRegionTree& GetPhysicalLinearMemoryRegionTree() const { | ||||||
|  |         return physical_linear_tree; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     constexpr const KMemoryRegion& Applet() const { |     VAddr GetLinearVirtualAddress(PAddr address) const { | ||||||
|         return applet; |         return address + linear_phys_to_virt_diff; | ||||||
|  |     } | ||||||
|  |     PAddr GetLinearPhysicalAddress(VAddr address) const { | ||||||
|  |         return address + linear_virt_to_phys_diff; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     constexpr const KMemoryRegion& System() const { |     const KMemoryRegion* FindVirtual(VAddr address) const { | ||||||
|         return system; |         return Find(address, GetVirtualMemoryRegionTree()); | ||||||
|  |     } | ||||||
|  |     const KMemoryRegion* FindPhysical(PAddr address) const { | ||||||
|  |         return Find(address, GetPhysicalMemoryRegionTree()); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     static constexpr KMemoryLayout GetDefaultLayout() { |     const KMemoryRegion* FindVirtualLinear(VAddr address) const { | ||||||
|         constexpr std::size_t application_size{0xcd500000}; |         return Find(address, GetVirtualLinearMemoryRegionTree()); | ||||||
|         constexpr std::size_t applet_size{0x1fb00000}; |     } | ||||||
|         constexpr PAddr application_start_address{Core::DramMemoryMap::End - application_size}; |     const KMemoryRegion* FindPhysicalLinear(PAddr address) const { | ||||||
|         constexpr PAddr application_end_address{Core::DramMemoryMap::End}; |         return Find(address, GetPhysicalLinearMemoryRegionTree()); | ||||||
|         constexpr PAddr applet_start_address{application_start_address - applet_size}; |     } | ||||||
|         constexpr PAddr applet_end_address{applet_start_address + applet_size}; |  | ||||||
|         constexpr PAddr system_start_address{Core::DramMemoryMap::SlabHeapEnd}; |     VAddr GetMainStackTopAddress(s32 core_id) const { | ||||||
|         constexpr PAddr system_end_address{applet_start_address}; |         return GetStackTopAddress(core_id, KMemoryRegionType_KernelMiscMainStack); | ||||||
|         return {application_start_address, application_end_address, applet_start_address, |     } | ||||||
|                 applet_end_address,        system_start_address,    system_end_address}; |     VAddr GetIdleStackTopAddress(s32 core_id) const { | ||||||
|  |         return GetStackTopAddress(core_id, KMemoryRegionType_KernelMiscIdleStack); | ||||||
|  |     } | ||||||
|  |     VAddr GetExceptionStackTopAddress(s32 core_id) const { | ||||||
|  |         return GetStackTopAddress(core_id, KMemoryRegionType_KernelMiscExceptionStack); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     VAddr GetSlabRegionAddress() const { | ||||||
|  |         return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_KernelSlab)) | ||||||
|  |             .GetAddress(); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     const KMemoryRegion& GetDeviceRegion(KMemoryRegionType type) const { | ||||||
|  |         return Dereference(GetPhysicalMemoryRegionTree().FindFirstDerived(type)); | ||||||
|  |     } | ||||||
|  |     PAddr GetDevicePhysicalAddress(KMemoryRegionType type) const { | ||||||
|  |         return GetDeviceRegion(type).GetAddress(); | ||||||
|  |     } | ||||||
|  |     VAddr GetDeviceVirtualAddress(KMemoryRegionType type) const { | ||||||
|  |         return GetDeviceRegion(type).GetPairAddress(); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     const KMemoryRegion& GetPoolManagementRegion() const { | ||||||
|  |         return Dereference( | ||||||
|  |             GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_VirtualDramPoolManagement)); | ||||||
|  |     } | ||||||
|  |     const KMemoryRegion& GetPageTableHeapRegion() const { | ||||||
|  |         return Dereference( | ||||||
|  |             GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_VirtualDramKernelPtHeap)); | ||||||
|  |     } | ||||||
|  |     const KMemoryRegion& GetKernelStackRegion() const { | ||||||
|  |         return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_KernelStack)); | ||||||
|  |     } | ||||||
|  |     const KMemoryRegion& GetTempRegion() const { | ||||||
|  |         return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_KernelTemp)); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     const KMemoryRegion& GetKernelTraceBufferRegion() const { | ||||||
|  |         return Dereference(GetVirtualLinearMemoryRegionTree().FindByType( | ||||||
|  |             KMemoryRegionType_VirtualDramKernelTraceBuffer)); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     const KMemoryRegion& GetVirtualLinearRegion(VAddr address) const { | ||||||
|  |         return Dereference(FindVirtualLinear(address)); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     const KMemoryRegion* GetPhysicalKernelTraceBufferRegion() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_KernelTraceBuffer); | ||||||
|  |     } | ||||||
|  |     const KMemoryRegion* GetPhysicalOnMemoryBootImageRegion() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_OnMemoryBootImage); | ||||||
|  |     } | ||||||
|  |     const KMemoryRegion* GetPhysicalDTBRegion() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_DTB); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     bool IsHeapPhysicalAddress(const KMemoryRegion*& region, PAddr address) const { | ||||||
|  |         return IsTypedAddress(region, address, GetPhysicalLinearMemoryRegionTree(), | ||||||
|  |                               KMemoryRegionType_DramUserPool); | ||||||
|  |     } | ||||||
|  |     bool IsHeapVirtualAddress(const KMemoryRegion*& region, VAddr address) const { | ||||||
|  |         return IsTypedAddress(region, address, GetVirtualLinearMemoryRegionTree(), | ||||||
|  |                               KMemoryRegionType_VirtualDramUserPool); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     bool IsHeapPhysicalAddress(const KMemoryRegion*& region, PAddr address, size_t size) const { | ||||||
|  |         return IsTypedAddress(region, address, size, GetPhysicalLinearMemoryRegionTree(), | ||||||
|  |                               KMemoryRegionType_DramUserPool); | ||||||
|  |     } | ||||||
|  |     bool IsHeapVirtualAddress(const KMemoryRegion*& region, VAddr address, size_t size) const { | ||||||
|  |         return IsTypedAddress(region, address, size, GetVirtualLinearMemoryRegionTree(), | ||||||
|  |                               KMemoryRegionType_VirtualDramUserPool); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     bool IsLinearMappedPhysicalAddress(const KMemoryRegion*& region, PAddr address) const { | ||||||
|  |         return IsTypedAddress(region, address, GetPhysicalLinearMemoryRegionTree(), | ||||||
|  |                               static_cast<KMemoryRegionType>(KMemoryRegionAttr_LinearMapped)); | ||||||
|  |     } | ||||||
|  |     bool IsLinearMappedPhysicalAddress(const KMemoryRegion*& region, PAddr address, | ||||||
|  |                                        size_t size) const { | ||||||
|  |         return IsTypedAddress(region, address, size, GetPhysicalLinearMemoryRegionTree(), | ||||||
|  |                               static_cast<KMemoryRegionType>(KMemoryRegionAttr_LinearMapped)); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     std::tuple<size_t, size_t> GetTotalAndKernelMemorySizes() const { | ||||||
|  |         size_t total_size = 0, kernel_size = 0; | ||||||
|  |         for (const auto& region : GetPhysicalMemoryRegionTree()) { | ||||||
|  |             if (region.IsDerivedFrom(KMemoryRegionType_Dram)) { | ||||||
|  |                 total_size += region.GetSize(); | ||||||
|  |                 if (!region.IsDerivedFrom(KMemoryRegionType_DramUserPool)) { | ||||||
|  |                     kernel_size += region.GetSize(); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |         return std::make_tuple(total_size, kernel_size); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     void InitializeLinearMemoryRegionTrees(PAddr aligned_linear_phys_start, | ||||||
|  |                                            VAddr linear_virtual_start); | ||||||
|  |     static size_t GetResourceRegionSizeForInit(); | ||||||
|  |  | ||||||
|  |     auto GetKernelRegionExtents() const { | ||||||
|  |         return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_Kernel); | ||||||
|  |     } | ||||||
|  |     auto GetKernelCodeRegionExtents() const { | ||||||
|  |         return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelCode); | ||||||
|  |     } | ||||||
|  |     auto GetKernelStackRegionExtents() const { | ||||||
|  |         return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelStack); | ||||||
|  |     } | ||||||
|  |     auto GetKernelMiscRegionExtents() const { | ||||||
|  |         return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelMisc); | ||||||
|  |     } | ||||||
|  |     auto GetKernelSlabRegionExtents() const { | ||||||
|  |         return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelSlab); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     auto GetLinearRegionPhysicalExtents() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents( | ||||||
|  |             KMemoryRegionAttr_LinearMapped); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     auto GetLinearRegionVirtualExtents() const { | ||||||
|  |         const auto physical = GetLinearRegionPhysicalExtents(); | ||||||
|  |         return KMemoryRegion(GetLinearVirtualAddress(physical.GetAddress()), | ||||||
|  |                              GetLinearVirtualAddress(physical.GetLastAddress()), 0, | ||||||
|  |                              KMemoryRegionType_None); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     auto GetMainMemoryPhysicalExtents() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_Dram); | ||||||
|  |     } | ||||||
|  |     auto GetCarveoutRegionExtents() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents( | ||||||
|  |             KMemoryRegionAttr_CarveoutProtected); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     auto GetKernelRegionPhysicalExtents() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents( | ||||||
|  |             KMemoryRegionType_DramKernelBase); | ||||||
|  |     } | ||||||
|  |     auto GetKernelCodeRegionPhysicalExtents() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents( | ||||||
|  |             KMemoryRegionType_DramKernelCode); | ||||||
|  |     } | ||||||
|  |     auto GetKernelSlabRegionPhysicalExtents() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents( | ||||||
|  |             KMemoryRegionType_DramKernelSlab); | ||||||
|  |     } | ||||||
|  |     auto GetKernelPageTableHeapRegionPhysicalExtents() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents( | ||||||
|  |             KMemoryRegionType_DramKernelPtHeap); | ||||||
|  |     } | ||||||
|  |     auto GetKernelInitPageTableRegionPhysicalExtents() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents( | ||||||
|  |             KMemoryRegionType_DramKernelInitPt); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     auto GetKernelPoolManagementRegionPhysicalExtents() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents( | ||||||
|  |             KMemoryRegionType_DramPoolManagement); | ||||||
|  |     } | ||||||
|  |     auto GetKernelPoolPartitionRegionPhysicalExtents() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents( | ||||||
|  |             KMemoryRegionType_DramPoolPartition); | ||||||
|  |     } | ||||||
|  |     auto GetKernelSystemPoolRegionPhysicalExtents() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents( | ||||||
|  |             KMemoryRegionType_DramSystemPool); | ||||||
|  |     } | ||||||
|  |     auto GetKernelSystemNonSecurePoolRegionPhysicalExtents() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents( | ||||||
|  |             KMemoryRegionType_DramSystemNonSecurePool); | ||||||
|  |     } | ||||||
|  |     auto GetKernelAppletPoolRegionPhysicalExtents() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents( | ||||||
|  |             KMemoryRegionType_DramAppletPool); | ||||||
|  |     } | ||||||
|  |     auto GetKernelApplicationPoolRegionPhysicalExtents() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents( | ||||||
|  |             KMemoryRegionType_DramApplicationPool); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     auto GetKernelTraceBufferRegionPhysicalExtents() const { | ||||||
|  |         return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents( | ||||||
|  |             KMemoryRegionType_KernelTraceBuffer); | ||||||
|     } |     } | ||||||
|  |  | ||||||
| private: | private: | ||||||
|     constexpr KMemoryLayout(PAddr application_start_address, std::size_t application_size, |     template <typename AddressType> | ||||||
|                             PAddr applet_start_address, std::size_t applet_size, |     static bool IsTypedAddress(const KMemoryRegion*& region, AddressType address, | ||||||
|                             PAddr system_start_address, std::size_t system_size) |                                const KMemoryRegionTree& tree, KMemoryRegionType type) { | ||||||
|         : application{application_start_address, application_size}, |         // Check if the cached region already contains the address. | ||||||
|           applet{applet_start_address, applet_size}, system{system_start_address, system_size} {} |         if (region != nullptr && region->Contains(address)) { | ||||||
|  |             return true; | ||||||
|  |         } | ||||||
|  |  | ||||||
|     const KMemoryRegion application; |         // Find the containing region, and update the cache. | ||||||
|     const KMemoryRegion applet; |         if (const KMemoryRegion* found = tree.Find(address); | ||||||
|     const KMemoryRegion system; |             found != nullptr && found->IsDerivedFrom(type)) { | ||||||
|  |             region = found; | ||||||
|  |             return true; | ||||||
|  |         } else { | ||||||
|  |             return false; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     template <typename AddressType> | ||||||
|  |     static bool IsTypedAddress(const KMemoryRegion*& region, AddressType address, size_t size, | ||||||
|  |                                const KMemoryRegionTree& tree, KMemoryRegionType type) { | ||||||
|  |         // Get the end of the checked region. | ||||||
|  |         const u64 last_address = address + size - 1; | ||||||
|  |  | ||||||
|  |         // Walk the tree to verify the region is correct. | ||||||
|  |         const KMemoryRegion* cur = | ||||||
|  |             (region != nullptr && region->Contains(address)) ? region : tree.Find(address); | ||||||
|  |         while (cur != nullptr && cur->IsDerivedFrom(type)) { | ||||||
|  |             if (last_address <= cur->GetLastAddress()) { | ||||||
|  |                 region = cur; | ||||||
|  |                 return true; | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |             cur = cur->GetNext(); | ||||||
|  |         } | ||||||
|  |         return false; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     template <typename AddressType> | ||||||
|  |     static const KMemoryRegion* Find(AddressType address, const KMemoryRegionTree& tree) { | ||||||
|  |         return tree.Find(address); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     static KMemoryRegion& Dereference(KMemoryRegion* region) { | ||||||
|  |         ASSERT(region != nullptr); | ||||||
|  |         return *region; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     static const KMemoryRegion& Dereference(const KMemoryRegion* region) { | ||||||
|  |         ASSERT(region != nullptr); | ||||||
|  |         return *region; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     VAddr GetStackTopAddress(s32 core_id, KMemoryRegionType type) const { | ||||||
|  |         const auto& region = Dereference( | ||||||
|  |             GetVirtualMemoryRegionTree().FindByTypeAndAttribute(type, static_cast<u32>(core_id))); | ||||||
|  |         ASSERT(region.GetEndAddress() != 0); | ||||||
|  |         return region.GetEndAddress(); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  | private: | ||||||
|  |     u64 linear_phys_to_virt_diff{}; | ||||||
|  |     u64 linear_virt_to_phys_diff{}; | ||||||
|  |     KMemoryRegionTree virtual_tree; | ||||||
|  |     KMemoryRegionTree physical_tree; | ||||||
|  |     KMemoryRegionTree virtual_linear_tree; | ||||||
|  |     KMemoryRegionTree physical_linear_tree; | ||||||
| }; | }; | ||||||
|  |  | ||||||
|  | namespace Init { | ||||||
|  |  | ||||||
|  | // These should be generic, regardless of board. | ||||||
|  | void SetupPoolPartitionMemoryRegions(KMemoryLayout& memory_layout); | ||||||
|  |  | ||||||
|  | // These may be implemented in a board-specific manner. | ||||||
|  | void SetupDevicePhysicalMemoryRegions(KMemoryLayout& memory_layout); | ||||||
|  | void SetupDramPhysicalMemoryRegions(KMemoryLayout& memory_layout); | ||||||
|  |  | ||||||
|  | } // namespace Init | ||||||
|  |  | ||||||
| } // namespace Kernel | } // namespace Kernel | ||||||
|   | |||||||
| @@ -1,4 +1,4 @@ | |||||||
| // Copyright 2014 Citra Emulator Project | // Copyright 2021 yuzu Emulator Project | ||||||
| // Licensed under GPLv2 or any later version | // Licensed under GPLv2 or any later version | ||||||
| // Refer to the license.txt file included. | // Refer to the license.txt file included. | ||||||
|  |  | ||||||
| @@ -12,6 +12,7 @@ | |||||||
| #include <utility> | #include <utility> | ||||||
|  |  | ||||||
| #include "common/assert.h" | #include "common/assert.h" | ||||||
|  | #include "common/common_sizes.h" | ||||||
| #include "common/logging/log.h" | #include "common/logging/log.h" | ||||||
| #include "common/microprofile.h" | #include "common/microprofile.h" | ||||||
| #include "common/thread.h" | #include "common/thread.h" | ||||||
| @@ -269,44 +270,310 @@ struct KernelCore::Impl { | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     void InitializeMemoryLayout() { |     void InitializeMemoryLayout() { | ||||||
|         // Initialize memory layout |         KMemoryLayout memory_layout; | ||||||
|         constexpr KMemoryLayout layout{KMemoryLayout::GetDefaultLayout()}; |  | ||||||
|  |         // Insert the root region for the virtual memory tree, from which all other regions will | ||||||
|  |         // derive. | ||||||
|  |         memory_layout.GetVirtualMemoryRegionTree().InsertDirectly( | ||||||
|  |             KernelVirtualAddressSpaceBase, | ||||||
|  |             KernelVirtualAddressSpaceBase + KernelVirtualAddressSpaceSize - 1); | ||||||
|  |  | ||||||
|  |         // Insert the root region for the physical memory tree, from which all other regions will | ||||||
|  |         // derive. | ||||||
|  |         memory_layout.GetPhysicalMemoryRegionTree().InsertDirectly( | ||||||
|  |             KernelPhysicalAddressSpaceBase, | ||||||
|  |             KernelPhysicalAddressSpaceBase + KernelPhysicalAddressSpaceSize - 1); | ||||||
|  |  | ||||||
|  |         // Save start and end for ease of use. | ||||||
|  |         const VAddr code_start_virt_addr = KernelVirtualAddressCodeBase; | ||||||
|  |         const VAddr code_end_virt_addr = KernelVirtualAddressCodeEnd; | ||||||
|  |  | ||||||
|  |         // Setup the containing kernel region. | ||||||
|  |         constexpr size_t KernelRegionSize = Size_1_GB; | ||||||
|  |         constexpr size_t KernelRegionAlign = Size_1_GB; | ||||||
|  |         constexpr VAddr kernel_region_start = | ||||||
|  |             Common::AlignDown(code_start_virt_addr, KernelRegionAlign); | ||||||
|  |         size_t kernel_region_size = KernelRegionSize; | ||||||
|  |         if (!(kernel_region_start + KernelRegionSize - 1 <= KernelVirtualAddressSpaceLast)) { | ||||||
|  |             kernel_region_size = KernelVirtualAddressSpaceEnd - kernel_region_start; | ||||||
|  |         } | ||||||
|  |         ASSERT(memory_layout.GetVirtualMemoryRegionTree().Insert( | ||||||
|  |             kernel_region_start, kernel_region_size, KMemoryRegionType_Kernel)); | ||||||
|  |  | ||||||
|  |         // Setup the code region. | ||||||
|  |         constexpr size_t CodeRegionAlign = PageSize; | ||||||
|  |         constexpr VAddr code_region_start = | ||||||
|  |             Common::AlignDown(code_start_virt_addr, CodeRegionAlign); | ||||||
|  |         constexpr VAddr code_region_end = Common::AlignUp(code_end_virt_addr, CodeRegionAlign); | ||||||
|  |         constexpr size_t code_region_size = code_region_end - code_region_start; | ||||||
|  |         ASSERT(memory_layout.GetVirtualMemoryRegionTree().Insert( | ||||||
|  |             code_region_start, code_region_size, KMemoryRegionType_KernelCode)); | ||||||
|  |  | ||||||
|  |         // Setup board-specific device physical regions. | ||||||
|  |         Init::SetupDevicePhysicalMemoryRegions(memory_layout); | ||||||
|  |  | ||||||
|  |         // Determine the amount of space needed for the misc region. | ||||||
|  |         size_t misc_region_needed_size; | ||||||
|  |         { | ||||||
|  |             // Each core has a one page stack for all three stack types (Main, Idle, Exception). | ||||||
|  |             misc_region_needed_size = Core::Hardware::NUM_CPU_CORES * (3 * (PageSize + PageSize)); | ||||||
|  |  | ||||||
|  |             // Account for each auto-map device. | ||||||
|  |             for (const auto& region : memory_layout.GetPhysicalMemoryRegionTree()) { | ||||||
|  |                 if (region.HasTypeAttribute(KMemoryRegionAttr_ShouldKernelMap)) { | ||||||
|  |                     // Check that the region is valid. | ||||||
|  |                     ASSERT(region.GetEndAddress() != 0); | ||||||
|  |  | ||||||
|  |                     // Account for the region. | ||||||
|  |                     misc_region_needed_size += | ||||||
|  |                         PageSize + (Common::AlignUp(region.GetLastAddress(), PageSize) - | ||||||
|  |                                     Common::AlignDown(region.GetAddress(), PageSize)); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |             // Multiply the needed size by three, to account for the need for guard space. | ||||||
|  |             misc_region_needed_size *= 3; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         // Decide on the actual size for the misc region. | ||||||
|  |         constexpr size_t MiscRegionAlign = KernelAslrAlignment; | ||||||
|  |         constexpr size_t MiscRegionMinimumSize = Size_32_MB; | ||||||
|  |         const size_t misc_region_size = Common::AlignUp( | ||||||
|  |             std::max(misc_region_needed_size, MiscRegionMinimumSize), MiscRegionAlign); | ||||||
|  |         ASSERT(misc_region_size > 0); | ||||||
|  |  | ||||||
|  |         // Setup the misc region. | ||||||
|  |         const VAddr misc_region_start = | ||||||
|  |             memory_layout.GetVirtualMemoryRegionTree().GetRandomAlignedRegion( | ||||||
|  |                 misc_region_size, MiscRegionAlign, KMemoryRegionType_Kernel); | ||||||
|  |         ASSERT(memory_layout.GetVirtualMemoryRegionTree().Insert( | ||||||
|  |             misc_region_start, misc_region_size, KMemoryRegionType_KernelMisc)); | ||||||
|  |  | ||||||
|  |         // Setup the stack region. | ||||||
|  |         constexpr size_t StackRegionSize = Size_14_MB; | ||||||
|  |         constexpr size_t StackRegionAlign = KernelAslrAlignment; | ||||||
|  |         const VAddr stack_region_start = | ||||||
|  |             memory_layout.GetVirtualMemoryRegionTree().GetRandomAlignedRegion( | ||||||
|  |                 StackRegionSize, StackRegionAlign, KMemoryRegionType_Kernel); | ||||||
|  |         ASSERT(memory_layout.GetVirtualMemoryRegionTree().Insert( | ||||||
|  |             stack_region_start, StackRegionSize, KMemoryRegionType_KernelStack)); | ||||||
|  |  | ||||||
|  |         // Determine the size of the resource region. | ||||||
|  |         const size_t resource_region_size = memory_layout.GetResourceRegionSizeForInit(); | ||||||
|  |  | ||||||
|  |         // Determine the size of the slab region. | ||||||
|  |         const size_t slab_region_size = Common::AlignUp(KernelSlabHeapSize, PageSize); | ||||||
|  |         ASSERT(slab_region_size <= resource_region_size); | ||||||
|  |  | ||||||
|  |         // Setup the slab region. | ||||||
|  |         const PAddr code_start_phys_addr = KernelPhysicalAddressCodeBase; | ||||||
|  |         const PAddr code_end_phys_addr = code_start_phys_addr + code_region_size; | ||||||
|  |         const PAddr slab_start_phys_addr = code_end_phys_addr; | ||||||
|  |         const PAddr slab_end_phys_addr = slab_start_phys_addr + slab_region_size; | ||||||
|  |         constexpr size_t SlabRegionAlign = KernelAslrAlignment; | ||||||
|  |         const size_t slab_region_needed_size = | ||||||
|  |             Common::AlignUp(code_end_phys_addr + slab_region_size, SlabRegionAlign) - | ||||||
|  |             Common::AlignDown(code_end_phys_addr, SlabRegionAlign); | ||||||
|  |         const VAddr slab_region_start = | ||||||
|  |             memory_layout.GetVirtualMemoryRegionTree().GetRandomAlignedRegion( | ||||||
|  |                 slab_region_needed_size, SlabRegionAlign, KMemoryRegionType_Kernel) + | ||||||
|  |             (code_end_phys_addr % SlabRegionAlign); | ||||||
|  |         ASSERT(memory_layout.GetVirtualMemoryRegionTree().Insert( | ||||||
|  |             slab_region_start, slab_region_size, KMemoryRegionType_KernelSlab)); | ||||||
|  |  | ||||||
|  |         // Setup the temp region. | ||||||
|  |         constexpr size_t TempRegionSize = Size_128_MB; | ||||||
|  |         constexpr size_t TempRegionAlign = KernelAslrAlignment; | ||||||
|  |         const VAddr temp_region_start = | ||||||
|  |             memory_layout.GetVirtualMemoryRegionTree().GetRandomAlignedRegion( | ||||||
|  |                 TempRegionSize, TempRegionAlign, KMemoryRegionType_Kernel); | ||||||
|  |         ASSERT(memory_layout.GetVirtualMemoryRegionTree().Insert(temp_region_start, TempRegionSize, | ||||||
|  |                                                                  KMemoryRegionType_KernelTemp)); | ||||||
|  |  | ||||||
|  |         // Automatically map in devices that have auto-map attributes. | ||||||
|  |         for (auto& region : memory_layout.GetPhysicalMemoryRegionTree()) { | ||||||
|  |             // We only care about kernel regions. | ||||||
|  |             if (!region.IsDerivedFrom(KMemoryRegionType_Kernel)) { | ||||||
|  |                 continue; | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |             // Check whether we should map the region. | ||||||
|  |             if (!region.HasTypeAttribute(KMemoryRegionAttr_ShouldKernelMap)) { | ||||||
|  |                 continue; | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |             // If this region has already been mapped, no need to consider it. | ||||||
|  |             if (region.HasTypeAttribute(KMemoryRegionAttr_DidKernelMap)) { | ||||||
|  |                 continue; | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |             // Check that the region is valid. | ||||||
|  |             ASSERT(region.GetEndAddress() != 0); | ||||||
|  |  | ||||||
|  |             // Set the attribute to note we've mapped this region. | ||||||
|  |             region.SetTypeAttribute(KMemoryRegionAttr_DidKernelMap); | ||||||
|  |  | ||||||
|  |             // Create a virtual pair region and insert it into the tree. | ||||||
|  |             const PAddr map_phys_addr = Common::AlignDown(region.GetAddress(), PageSize); | ||||||
|  |             const size_t map_size = | ||||||
|  |                 Common::AlignUp(region.GetEndAddress(), PageSize) - map_phys_addr; | ||||||
|  |             const VAddr map_virt_addr = | ||||||
|  |                 memory_layout.GetVirtualMemoryRegionTree().GetRandomAlignedRegionWithGuard( | ||||||
|  |                     map_size, PageSize, KMemoryRegionType_KernelMisc, PageSize); | ||||||
|  |             ASSERT(memory_layout.GetVirtualMemoryRegionTree().Insert( | ||||||
|  |                 map_virt_addr, map_size, KMemoryRegionType_KernelMiscMappedDevice)); | ||||||
|  |             region.SetPairAddress(map_virt_addr + region.GetAddress() - map_phys_addr); | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         Init::SetupDramPhysicalMemoryRegions(memory_layout); | ||||||
|  |  | ||||||
|  |         // Insert a physical region for the kernel code region. | ||||||
|  |         ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |             code_start_phys_addr, code_region_size, KMemoryRegionType_DramKernelCode)); | ||||||
|  |  | ||||||
|  |         // Insert a physical region for the kernel slab region. | ||||||
|  |         ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |             slab_start_phys_addr, slab_region_size, KMemoryRegionType_DramKernelSlab)); | ||||||
|  |  | ||||||
|  |         // Determine size available for kernel page table heaps, requiring > 8 MB. | ||||||
|  |         const PAddr resource_end_phys_addr = slab_start_phys_addr + resource_region_size; | ||||||
|  |         const size_t page_table_heap_size = resource_end_phys_addr - slab_end_phys_addr; | ||||||
|  |         ASSERT(page_table_heap_size / Size_4_MB > 2); | ||||||
|  |  | ||||||
|  |         // Insert a physical region for the kernel page table heap region | ||||||
|  |         ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |             slab_end_phys_addr, page_table_heap_size, KMemoryRegionType_DramKernelPtHeap)); | ||||||
|  |  | ||||||
|  |         // All DRAM regions that we haven't tagged by this point will be mapped under the linear | ||||||
|  |         // mapping. Tag them. | ||||||
|  |         for (auto& region : memory_layout.GetPhysicalMemoryRegionTree()) { | ||||||
|  |             if (region.GetType() == KMemoryRegionType_Dram) { | ||||||
|  |                 // Check that the region is valid. | ||||||
|  |                 ASSERT(region.GetEndAddress() != 0); | ||||||
|  |  | ||||||
|  |                 // Set the linear map attribute. | ||||||
|  |                 region.SetTypeAttribute(KMemoryRegionAttr_LinearMapped); | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         // Get the linear region extents. | ||||||
|  |         const auto linear_extents = | ||||||
|  |             memory_layout.GetPhysicalMemoryRegionTree().GetDerivedRegionExtents( | ||||||
|  |                 KMemoryRegionAttr_LinearMapped); | ||||||
|  |         ASSERT(linear_extents.GetEndAddress() != 0); | ||||||
|  |  | ||||||
|  |         // Setup the linear mapping region. | ||||||
|  |         constexpr size_t LinearRegionAlign = Size_1_GB; | ||||||
|  |         const PAddr aligned_linear_phys_start = | ||||||
|  |             Common::AlignDown(linear_extents.GetAddress(), LinearRegionAlign); | ||||||
|  |         const size_t linear_region_size = | ||||||
|  |             Common::AlignUp(linear_extents.GetEndAddress(), LinearRegionAlign) - | ||||||
|  |             aligned_linear_phys_start; | ||||||
|  |         const VAddr linear_region_start = | ||||||
|  |             memory_layout.GetVirtualMemoryRegionTree().GetRandomAlignedRegionWithGuard( | ||||||
|  |                 linear_region_size, LinearRegionAlign, KMemoryRegionType_None, LinearRegionAlign); | ||||||
|  |  | ||||||
|  |         const u64 linear_region_phys_to_virt_diff = linear_region_start - aligned_linear_phys_start; | ||||||
|  |  | ||||||
|  |         // Map and create regions for all the linearly-mapped data. | ||||||
|  |         { | ||||||
|  |             PAddr cur_phys_addr = 0; | ||||||
|  |             u64 cur_size = 0; | ||||||
|  |             for (auto& region : memory_layout.GetPhysicalMemoryRegionTree()) { | ||||||
|  |                 if (!region.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) { | ||||||
|  |                     continue; | ||||||
|  |                 } | ||||||
|  |  | ||||||
|  |                 ASSERT(region.GetEndAddress() != 0); | ||||||
|  |  | ||||||
|  |                 if (cur_size == 0) { | ||||||
|  |                     cur_phys_addr = region.GetAddress(); | ||||||
|  |                     cur_size = region.GetSize(); | ||||||
|  |                 } else if (cur_phys_addr + cur_size == region.GetAddress()) { | ||||||
|  |                     cur_size += region.GetSize(); | ||||||
|  |                 } else { | ||||||
|  |                     const VAddr cur_virt_addr = cur_phys_addr + linear_region_phys_to_virt_diff; | ||||||
|  |                     cur_phys_addr = region.GetAddress(); | ||||||
|  |                     cur_size = region.GetSize(); | ||||||
|  |                 } | ||||||
|  |  | ||||||
|  |                 const VAddr region_virt_addr = | ||||||
|  |                     region.GetAddress() + linear_region_phys_to_virt_diff; | ||||||
|  |                 ASSERT(memory_layout.GetVirtualMemoryRegionTree().Insert( | ||||||
|  |                     region_virt_addr, region.GetSize(), | ||||||
|  |                     GetTypeForVirtualLinearMapping(region.GetType()))); | ||||||
|  |                 region.SetPairAddress(region_virt_addr); | ||||||
|  |  | ||||||
|  |                 KMemoryRegion* virt_region = | ||||||
|  |                     memory_layout.GetVirtualMemoryRegionTree().FindModifiable(region_virt_addr); | ||||||
|  |                 ASSERT(virt_region != nullptr); | ||||||
|  |                 virt_region->SetPairAddress(region.GetAddress()); | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         // Insert regions for the initial page table region. | ||||||
|  |         ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( | ||||||
|  |             resource_end_phys_addr, KernelPageTableHeapSize, KMemoryRegionType_DramKernelInitPt)); | ||||||
|  |         ASSERT(memory_layout.GetVirtualMemoryRegionTree().Insert( | ||||||
|  |             resource_end_phys_addr + linear_region_phys_to_virt_diff, KernelPageTableHeapSize, | ||||||
|  |             KMemoryRegionType_VirtualDramKernelInitPt)); | ||||||
|  |  | ||||||
|  |         // All linear-mapped DRAM regions that we haven't tagged by this point will be allocated to | ||||||
|  |         // some pool partition. Tag them. | ||||||
|  |         for (auto& region : memory_layout.GetPhysicalMemoryRegionTree()) { | ||||||
|  |             if (region.GetType() == (KMemoryRegionType_Dram | KMemoryRegionAttr_LinearMapped)) { | ||||||
|  |                 region.SetType(KMemoryRegionType_DramPoolPartition); | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         // Setup all other memory regions needed to arrange the pool partitions. | ||||||
|  |         Init::SetupPoolPartitionMemoryRegions(memory_layout); | ||||||
|  |  | ||||||
|  |         // Cache all linear regions in their own trees for faster access, later. | ||||||
|  |         memory_layout.InitializeLinearMemoryRegionTrees(aligned_linear_phys_start, | ||||||
|  |                                                         linear_region_start); | ||||||
|  |  | ||||||
|  |         const auto system_pool = memory_layout.GetKernelSystemPoolRegionPhysicalExtents(); | ||||||
|  |         const auto applet_pool = memory_layout.GetKernelAppletPoolRegionPhysicalExtents(); | ||||||
|  |         const auto application_pool = memory_layout.GetKernelApplicationPoolRegionPhysicalExtents(); | ||||||
|  |  | ||||||
|  |         // Initialize memory managers | ||||||
|  |         memory_manager = std::make_unique<KMemoryManager>(); | ||||||
|  |         memory_manager->InitializeManager(KMemoryManager::Pool::Application, | ||||||
|  |                                           application_pool.GetAddress(), | ||||||
|  |                                           application_pool.GetEndAddress()); | ||||||
|  |         memory_manager->InitializeManager(KMemoryManager::Pool::Applet, applet_pool.GetAddress(), | ||||||
|  |                                           applet_pool.GetEndAddress()); | ||||||
|  |         memory_manager->InitializeManager(KMemoryManager::Pool::System, system_pool.GetAddress(), | ||||||
|  |                                           system_pool.GetEndAddress()); | ||||||
|  |  | ||||||
|  |         // Setup memory regions for emulated processes | ||||||
|  |         // TODO(bunnei): These should not be hardcoded regions initialized within the kernel | ||||||
|         constexpr std::size_t hid_size{0x40000}; |         constexpr std::size_t hid_size{0x40000}; | ||||||
|         constexpr std::size_t font_size{0x1100000}; |         constexpr std::size_t font_size{0x1100000}; | ||||||
|         constexpr std::size_t irs_size{0x8000}; |         constexpr std::size_t irs_size{0x8000}; | ||||||
|         constexpr std::size_t time_size{0x1000}; |         constexpr std::size_t time_size{0x1000}; | ||||||
|         constexpr PAddr hid_addr{layout.System().GetAddress()}; |  | ||||||
|         constexpr PAddr font_pa{layout.System().GetAddress() + hid_size}; |  | ||||||
|         constexpr PAddr irs_addr{layout.System().GetAddress() + hid_size + font_size}; |  | ||||||
|         constexpr PAddr time_addr{layout.System().GetAddress() + hid_size + font_size + irs_size}; |  | ||||||
|  |  | ||||||
|         // Initialize memory manager |         const PAddr hid_phys_addr{system_pool.GetAddress()}; | ||||||
|         memory_manager = std::make_unique<KMemoryManager>(); |         const PAddr font_phys_addr{system_pool.GetAddress() + hid_size}; | ||||||
|         memory_manager->InitializeManager(KMemoryManager::Pool::Application, |         const PAddr irs_phys_addr{system_pool.GetAddress() + hid_size + font_size}; | ||||||
|                                           layout.Application().GetAddress(), |         const PAddr time_phys_addr{system_pool.GetAddress() + hid_size + font_size + irs_size}; | ||||||
|                                           layout.Application().GetLastAddress()); |  | ||||||
|         memory_manager->InitializeManager(KMemoryManager::Pool::Applet, |  | ||||||
|                                           layout.Applet().GetAddress(), |  | ||||||
|                                           layout.Applet().GetLastAddress()); |  | ||||||
|         memory_manager->InitializeManager(KMemoryManager::Pool::System, |  | ||||||
|                                           layout.System().GetAddress(), |  | ||||||
|                                           layout.System().GetLastAddress()); |  | ||||||
|  |  | ||||||
|         hid_shared_mem = Kernel::KSharedMemory::Create( |         hid_shared_mem = Kernel::KSharedMemory::Create( | ||||||
|             system.Kernel(), system.DeviceMemory(), nullptr, {hid_addr, hid_size / PageSize}, |             system.Kernel(), system.DeviceMemory(), nullptr, {hid_phys_addr, hid_size / PageSize}, | ||||||
|             KMemoryPermission::None, KMemoryPermission::Read, hid_addr, hid_size, |             KMemoryPermission::None, KMemoryPermission::Read, hid_phys_addr, hid_size, | ||||||
|             "HID:SharedMemory"); |             "HID:SharedMemory"); | ||||||
|         font_shared_mem = Kernel::KSharedMemory::Create( |         font_shared_mem = Kernel::KSharedMemory::Create( | ||||||
|             system.Kernel(), system.DeviceMemory(), nullptr, {font_pa, font_size / PageSize}, |             system.Kernel(), system.DeviceMemory(), nullptr, {font_phys_addr, font_size / PageSize}, | ||||||
|             KMemoryPermission::None, KMemoryPermission::Read, font_pa, font_size, |             KMemoryPermission::None, KMemoryPermission::Read, font_phys_addr, font_size, | ||||||
|             "Font:SharedMemory"); |             "Font:SharedMemory"); | ||||||
|         irs_shared_mem = Kernel::KSharedMemory::Create( |         irs_shared_mem = Kernel::KSharedMemory::Create( | ||||||
|             system.Kernel(), system.DeviceMemory(), nullptr, {irs_addr, irs_size / PageSize}, |             system.Kernel(), system.DeviceMemory(), nullptr, {irs_phys_addr, irs_size / PageSize}, | ||||||
|             KMemoryPermission::None, KMemoryPermission::Read, irs_addr, irs_size, |             KMemoryPermission::None, KMemoryPermission::Read, irs_phys_addr, irs_size, | ||||||
|             "IRS:SharedMemory"); |             "IRS:SharedMemory"); | ||||||
|         time_shared_mem = Kernel::KSharedMemory::Create( |         time_shared_mem = Kernel::KSharedMemory::Create( | ||||||
|             system.Kernel(), system.DeviceMemory(), nullptr, {time_addr, time_size / PageSize}, |             system.Kernel(), system.DeviceMemory(), nullptr, {time_phys_addr, time_size / PageSize}, | ||||||
|             KMemoryPermission::None, KMemoryPermission::Read, time_addr, time_size, |             KMemoryPermission::None, KMemoryPermission::Read, time_phys_addr, time_size, | ||||||
|             "Time:SharedMemory"); |             "Time:SharedMemory"); | ||||||
|  |  | ||||||
|         // Allocate slab heaps |         // Allocate slab heaps | ||||||
|   | |||||||
| @@ -1,4 +1,4 @@ | |||||||
| // Copyright 2014 Citra Emulator Project / PPSSPP Project | // Copyright 2021 yuzu Emulator Project | ||||||
| // Licensed under GPLv2 or any later version | // Licensed under GPLv2 or any later version | ||||||
| // Refer to the license.txt file included. | // Refer to the license.txt file included. | ||||||
|  |  | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user
	 bunnei
					bunnei