2017-06-05 04:52:19 +00:00
|
|
|
// Copyright 2017 Citra Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
2017-12-13 18:56:53 +00:00
|
|
|
#include <algorithm>
|
2017-11-05 17:50:22 +00:00
|
|
|
#include <vector>
|
2017-06-05 04:52:19 +00:00
|
|
|
#include "common/assert.h"
|
|
|
|
#include "common/common_types.h"
|
2018-10-11 19:48:16 +00:00
|
|
|
#include "core/core.h"
|
2017-11-09 23:13:11 +00:00
|
|
|
#include "core/hle/kernel/event.h"
|
2017-06-09 06:55:18 +00:00
|
|
|
#include "core/hle/kernel/handle_table.h"
|
2017-06-05 04:52:19 +00:00
|
|
|
#include "core/hle/kernel/hle_ipc.h"
|
|
|
|
#include "core/hle/kernel/kernel.h"
|
2017-06-09 12:23:13 +00:00
|
|
|
#include "core/hle/kernel/process.h"
|
2017-06-05 04:52:19 +00:00
|
|
|
|
|
|
|
namespace Kernel {
|
|
|
|
|
2017-12-13 18:56:53 +00:00
|
|
|
SessionRequestHandler::SessionInfo::SessionInfo(SharedPtr<ServerSession> session,
|
|
|
|
std::unique_ptr<SessionDataBase> data)
|
|
|
|
: session(std::move(session)), data(std::move(data)) {}
|
|
|
|
|
2017-06-05 04:52:19 +00:00
|
|
|
void SessionRequestHandler::ClientConnected(SharedPtr<ServerSession> server_session) {
|
2017-06-06 05:39:26 +00:00
|
|
|
server_session->SetHleHandler(shared_from_this());
|
2017-12-13 18:56:53 +00:00
|
|
|
connected_sessions.emplace_back(std::move(server_session), MakeSessionData());
|
2017-06-05 04:52:19 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void SessionRequestHandler::ClientDisconnected(SharedPtr<ServerSession> server_session) {
|
2017-06-06 05:39:26 +00:00
|
|
|
server_session->SetHleHandler(nullptr);
|
2017-12-13 18:56:53 +00:00
|
|
|
connected_sessions.erase(
|
|
|
|
std::remove_if(connected_sessions.begin(), connected_sessions.end(),
|
|
|
|
[&](const SessionInfo& info) { return info.session == server_session; }),
|
|
|
|
connected_sessions.end());
|
2017-06-05 04:52:19 +00:00
|
|
|
}
|
|
|
|
|
2017-11-09 23:13:11 +00:00
|
|
|
SharedPtr<Event> HLERequestContext::SleepClientThread(SharedPtr<Thread> thread,
|
2018-02-22 03:03:46 +00:00
|
|
|
const std::string& reason,
|
|
|
|
std::chrono::nanoseconds timeout,
|
2017-11-09 23:13:11 +00:00
|
|
|
WakeupCallback&& callback) {
|
|
|
|
// Put the client thread to sleep until the wait event is signaled or the timeout expires.
|
2018-03-09 17:54:43 +00:00
|
|
|
thread->wakeup_callback = [context = *this, callback](ThreadWakeupReason reason,
|
|
|
|
SharedPtr<Thread> thread,
|
|
|
|
SharedPtr<WaitObject> object) mutable {
|
2018-07-20 01:39:05 +00:00
|
|
|
ASSERT(thread->status == ThreadStatus::WaitHleEvent);
|
2017-11-09 23:13:11 +00:00
|
|
|
callback(thread, context, reason);
|
|
|
|
|
|
|
|
auto& process = thread->owner_process;
|
|
|
|
// We must copy the entire command buffer *plus* the entire static buffers area, since
|
|
|
|
// the translation might need to read from it in order to retrieve the StaticBuffer
|
|
|
|
// target addresses.
|
2018-02-22 14:12:39 +00:00
|
|
|
std::array<u32_le, IPC::COMMAND_BUFFER_LENGTH + 2 * IPC::MAX_STATIC_BUFFERS> cmd_buff;
|
2019-02-02 20:55:45 +00:00
|
|
|
Memory::MemorySystem& memory = context.kernel.memory;
|
2018-11-21 20:21:30 +00:00
|
|
|
memory.ReadBlock(*process, thread->GetCommandBufferAddress(), cmd_buff.data(),
|
|
|
|
cmd_buff.size() * sizeof(u32));
|
2018-10-20 01:04:18 +00:00
|
|
|
context.WriteToOutgoingCommandBuffer(cmd_buff.data(), *process);
|
2017-11-09 23:13:11 +00:00
|
|
|
// Copy the translated command buffer back into the thread's command buffer area.
|
2018-11-21 20:21:30 +00:00
|
|
|
memory.WriteBlock(*process, thread->GetCommandBufferAddress(), cmd_buff.data(),
|
|
|
|
cmd_buff.size() * sizeof(u32));
|
2017-11-09 23:13:11 +00:00
|
|
|
};
|
|
|
|
|
2019-02-02 20:55:45 +00:00
|
|
|
auto event = kernel.CreateEvent(Kernel::ResetType::OneShot, "HLE Pause Event: " + reason);
|
2018-07-20 01:39:05 +00:00
|
|
|
thread->status = ThreadStatus::WaitHleEvent;
|
2017-11-09 23:13:11 +00:00
|
|
|
thread->wait_objects = {event};
|
|
|
|
event->AddWaitingThread(thread);
|
|
|
|
|
2018-02-22 03:03:46 +00:00
|
|
|
if (timeout.count() > 0)
|
|
|
|
thread->WakeAfterDelay(timeout.count());
|
2017-11-09 23:13:11 +00:00
|
|
|
|
|
|
|
return event;
|
|
|
|
}
|
|
|
|
|
2019-02-02 20:55:45 +00:00
|
|
|
HLERequestContext::HLERequestContext(KernelSystem& kernel, SharedPtr<ServerSession> session)
|
|
|
|
: kernel(kernel), session(std::move(session)) {
|
2017-06-18 23:05:12 +00:00
|
|
|
cmd_buf[0] = 0;
|
|
|
|
}
|
|
|
|
|
2017-06-07 04:20:52 +00:00
|
|
|
HLERequestContext::~HLERequestContext() = default;
|
|
|
|
|
2017-06-09 12:23:13 +00:00
|
|
|
SharedPtr<Object> HLERequestContext::GetIncomingHandle(u32 id_from_cmdbuf) const {
|
|
|
|
ASSERT(id_from_cmdbuf < request_handles.size());
|
|
|
|
return request_handles[id_from_cmdbuf];
|
2017-06-09 06:55:18 +00:00
|
|
|
}
|
|
|
|
|
2017-06-09 12:23:13 +00:00
|
|
|
u32 HLERequestContext::AddOutgoingHandle(SharedPtr<Object> object) {
|
|
|
|
request_handles.push_back(std::move(object));
|
2017-09-26 23:26:09 +00:00
|
|
|
return static_cast<u32>(request_handles.size() - 1);
|
2017-06-09 12:23:13 +00:00
|
|
|
}
|
|
|
|
|
2017-06-11 00:57:08 +00:00
|
|
|
void HLERequestContext::ClearIncomingObjects() {
|
|
|
|
request_handles.clear();
|
|
|
|
}
|
|
|
|
|
2017-11-05 17:50:22 +00:00
|
|
|
const std::vector<u8>& HLERequestContext::GetStaticBuffer(u8 buffer_id) const {
|
|
|
|
return static_buffers[buffer_id];
|
|
|
|
}
|
|
|
|
|
|
|
|
void HLERequestContext::AddStaticBuffer(u8 buffer_id, std::vector<u8> data) {
|
|
|
|
static_buffers[buffer_id] = std::move(data);
|
|
|
|
}
|
|
|
|
|
2017-06-09 12:23:13 +00:00
|
|
|
ResultCode HLERequestContext::PopulateFromIncomingCommandBuffer(const u32_le* src_cmdbuf,
|
2018-10-20 01:04:18 +00:00
|
|
|
Process& src_process) {
|
2017-06-09 12:23:13 +00:00
|
|
|
IPC::Header header{src_cmdbuf[0]};
|
|
|
|
|
2018-09-06 20:03:28 +00:00
|
|
|
std::size_t untranslated_size = 1u + header.normal_params_size;
|
|
|
|
std::size_t command_size = untranslated_size + header.translate_params_size;
|
2017-06-09 12:23:13 +00:00
|
|
|
ASSERT(command_size <= IPC::COMMAND_BUFFER_LENGTH); // TODO(yuriks): Return error
|
|
|
|
|
|
|
|
std::copy_n(src_cmdbuf, untranslated_size, cmd_buf.begin());
|
|
|
|
|
2018-09-06 20:03:28 +00:00
|
|
|
std::size_t i = untranslated_size;
|
2017-06-09 12:23:13 +00:00
|
|
|
while (i < command_size) {
|
|
|
|
u32 descriptor = cmd_buf[i] = src_cmdbuf[i];
|
|
|
|
i += 1;
|
|
|
|
|
|
|
|
switch (IPC::GetDescriptorType(descriptor)) {
|
|
|
|
case IPC::DescriptorType::CopyHandle:
|
|
|
|
case IPC::DescriptorType::MoveHandle: {
|
|
|
|
u32 num_handles = IPC::HandleNumberFromDesc(descriptor);
|
|
|
|
ASSERT(i + num_handles <= command_size); // TODO(yuriks): Return error
|
|
|
|
for (u32 j = 0; j < num_handles; ++j) {
|
|
|
|
Handle handle = src_cmdbuf[i];
|
2017-06-21 21:27:03 +00:00
|
|
|
SharedPtr<Object> object = nullptr;
|
|
|
|
if (handle != 0) {
|
2018-10-20 01:04:18 +00:00
|
|
|
object = src_process.handle_table.GetGeneric(handle);
|
2017-06-21 21:27:03 +00:00
|
|
|
ASSERT(object != nullptr); // TODO(yuriks): Return error
|
|
|
|
if (descriptor == IPC::DescriptorType::MoveHandle) {
|
2018-10-20 01:04:18 +00:00
|
|
|
src_process.handle_table.Close(handle);
|
2017-06-21 21:27:03 +00:00
|
|
|
}
|
2017-06-09 12:23:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
cmd_buf[i++] = AddOutgoingHandle(std::move(object));
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case IPC::DescriptorType::CallingPid: {
|
|
|
|
cmd_buf[i++] = src_process.process_id;
|
|
|
|
break;
|
|
|
|
}
|
2017-11-05 17:50:22 +00:00
|
|
|
case IPC::DescriptorType::StaticBuffer: {
|
|
|
|
VAddr source_address = src_cmdbuf[i];
|
|
|
|
IPC::StaticBufferDescInfo buffer_info{descriptor};
|
|
|
|
|
|
|
|
// Copy the input buffer into our own vector and store it.
|
|
|
|
std::vector<u8> data(buffer_info.size);
|
2019-02-02 20:55:45 +00:00
|
|
|
kernel.memory.ReadBlock(src_process, source_address, data.data(), data.size());
|
2017-11-05 17:50:22 +00:00
|
|
|
|
|
|
|
AddStaticBuffer(buffer_info.buffer_id, std::move(data));
|
|
|
|
cmd_buf[i++] = source_address;
|
|
|
|
break;
|
|
|
|
}
|
2017-07-14 19:24:57 +00:00
|
|
|
case IPC::DescriptorType::MappedBuffer: {
|
|
|
|
u32 next_id = static_cast<u32>(request_mapped_buffers.size());
|
2019-02-02 20:55:45 +00:00
|
|
|
request_mapped_buffers.emplace_back(kernel.memory, src_process, descriptor,
|
|
|
|
src_cmdbuf[i], next_id);
|
2017-07-14 19:24:57 +00:00
|
|
|
cmd_buf[i++] = next_id;
|
|
|
|
break;
|
|
|
|
}
|
2017-06-09 12:23:13 +00:00
|
|
|
default:
|
2018-03-27 15:37:36 +00:00
|
|
|
UNIMPLEMENTED_MSG("Unsupported handle translation: {:#010X}", descriptor);
|
2017-06-09 12:23:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return RESULT_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2018-10-20 01:04:18 +00:00
|
|
|
ResultCode HLERequestContext::WriteToOutgoingCommandBuffer(u32_le* dst_cmdbuf,
|
|
|
|
Process& dst_process) const {
|
2017-06-09 12:23:13 +00:00
|
|
|
IPC::Header header{cmd_buf[0]};
|
|
|
|
|
2018-09-06 20:03:28 +00:00
|
|
|
std::size_t untranslated_size = 1u + header.normal_params_size;
|
|
|
|
std::size_t command_size = untranslated_size + header.translate_params_size;
|
2017-06-09 12:23:13 +00:00
|
|
|
ASSERT(command_size <= IPC::COMMAND_BUFFER_LENGTH);
|
|
|
|
|
|
|
|
std::copy_n(cmd_buf.begin(), untranslated_size, dst_cmdbuf);
|
|
|
|
|
2018-09-06 20:03:28 +00:00
|
|
|
std::size_t i = untranslated_size;
|
2017-06-09 12:23:13 +00:00
|
|
|
while (i < command_size) {
|
|
|
|
u32 descriptor = dst_cmdbuf[i] = cmd_buf[i];
|
|
|
|
i += 1;
|
|
|
|
|
|
|
|
switch (IPC::GetDescriptorType(descriptor)) {
|
|
|
|
case IPC::DescriptorType::CopyHandle:
|
|
|
|
case IPC::DescriptorType::MoveHandle: {
|
|
|
|
// HLE services don't use handles, so we treat both CopyHandle and MoveHandle equally
|
|
|
|
u32 num_handles = IPC::HandleNumberFromDesc(descriptor);
|
|
|
|
ASSERT(i + num_handles <= command_size);
|
|
|
|
for (u32 j = 0; j < num_handles; ++j) {
|
|
|
|
SharedPtr<Object> object = GetIncomingHandle(cmd_buf[i]);
|
2017-06-21 21:27:03 +00:00
|
|
|
Handle handle = 0;
|
|
|
|
if (object != nullptr) {
|
|
|
|
// TODO(yuriks): Figure out the proper error handling for if this fails
|
2018-10-20 01:04:18 +00:00
|
|
|
handle = dst_process.handle_table.Create(object).Unwrap();
|
2017-06-21 21:27:03 +00:00
|
|
|
}
|
2017-06-09 12:23:13 +00:00
|
|
|
dst_cmdbuf[i++] = handle;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
2017-11-05 17:50:22 +00:00
|
|
|
case IPC::DescriptorType::StaticBuffer: {
|
|
|
|
IPC::StaticBufferDescInfo buffer_info{descriptor};
|
|
|
|
|
|
|
|
const auto& data = GetStaticBuffer(buffer_info.buffer_id);
|
|
|
|
|
|
|
|
// Grab the address that the target thread set up to receive the response static buffer
|
|
|
|
// and write our data there. The static buffers area is located right after the command
|
|
|
|
// buffer area.
|
2018-09-06 20:03:28 +00:00
|
|
|
std::size_t static_buffer_offset =
|
|
|
|
IPC::COMMAND_BUFFER_LENGTH + 2 * buffer_info.buffer_id;
|
2017-11-05 17:50:22 +00:00
|
|
|
IPC::StaticBufferDescInfo target_descriptor{dst_cmdbuf[static_buffer_offset]};
|
|
|
|
VAddr target_address = dst_cmdbuf[static_buffer_offset + 1];
|
|
|
|
|
|
|
|
ASSERT_MSG(target_descriptor.size >= data.size(), "Static buffer data is too big");
|
|
|
|
|
2019-02-02 20:55:45 +00:00
|
|
|
kernel.memory.WriteBlock(dst_process, target_address, data.data(), data.size());
|
2017-11-05 17:50:22 +00:00
|
|
|
|
|
|
|
dst_cmdbuf[i++] = target_address;
|
|
|
|
break;
|
|
|
|
}
|
2017-07-14 19:24:57 +00:00
|
|
|
case IPC::DescriptorType::MappedBuffer: {
|
|
|
|
VAddr addr = request_mapped_buffers[cmd_buf[i]].address;
|
|
|
|
dst_cmdbuf[i++] = addr;
|
|
|
|
break;
|
|
|
|
}
|
2017-06-09 12:23:13 +00:00
|
|
|
default:
|
2018-03-27 15:37:36 +00:00
|
|
|
UNIMPLEMENTED_MSG("Unsupported handle translation: {:#010X}", descriptor);
|
2017-06-09 12:23:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return RESULT_SUCCESS;
|
2017-06-09 06:55:18 +00:00
|
|
|
}
|
|
|
|
|
2017-07-14 19:24:57 +00:00
|
|
|
MappedBuffer& HLERequestContext::GetMappedBuffer(u32 id_from_cmdbuf) {
|
|
|
|
ASSERT_MSG(id_from_cmdbuf < request_mapped_buffers.size(), "Mapped Buffer ID out of range!");
|
|
|
|
return request_mapped_buffers[id_from_cmdbuf];
|
|
|
|
}
|
|
|
|
|
2019-02-02 20:55:45 +00:00
|
|
|
MappedBuffer::MappedBuffer(Memory::MemorySystem& memory, const Process& process, u32 descriptor,
|
|
|
|
VAddr address, u32 id)
|
|
|
|
: memory(&memory), id(id), address(address), process(&process) {
|
2017-07-14 19:24:57 +00:00
|
|
|
IPC::MappedBufferDescInfo desc{descriptor};
|
|
|
|
size = desc.size;
|
|
|
|
perms = desc.perms;
|
|
|
|
}
|
|
|
|
|
2018-09-06 20:03:28 +00:00
|
|
|
void MappedBuffer::Read(void* dest_buffer, std::size_t offset, std::size_t size) {
|
2017-07-14 19:24:57 +00:00
|
|
|
ASSERT(perms & IPC::R);
|
|
|
|
ASSERT(offset + size <= this->size);
|
2019-02-02 20:55:45 +00:00
|
|
|
memory->ReadBlock(*process, address + static_cast<VAddr>(offset), dest_buffer, size);
|
2017-07-14 19:24:57 +00:00
|
|
|
}
|
|
|
|
|
2018-09-06 20:03:28 +00:00
|
|
|
void MappedBuffer::Write(const void* src_buffer, std::size_t offset, std::size_t size) {
|
2017-07-14 19:24:57 +00:00
|
|
|
ASSERT(perms & IPC::W);
|
|
|
|
ASSERT(offset + size <= this->size);
|
2019-02-02 20:55:45 +00:00
|
|
|
memory->WriteBlock(*process, address + static_cast<VAddr>(offset), src_buffer, size);
|
2017-07-14 19:24:57 +00:00
|
|
|
}
|
|
|
|
|
2017-06-05 04:52:19 +00:00
|
|
|
} // namespace Kernel
|