#include #include #include #include extern "C" { #include #include } #include "display.h" #include "sunshine/main.h" #include "sunshine/video.h" #define SUNSHINE_SHADERS_DIR SUNSHINE_ASSETS_DIR "/shaders/directx" namespace platf { using namespace std::literals; } static void free_frame(AVFrame *frame) { av_frame_free(&frame); } using frame_t = util::safe_ptr; namespace platf::dxgi { using input_layout_t = util::safe_ptr>; using render_target_t = util::safe_ptr>; using shader_res_t = util::safe_ptr>; using buf_t = util::safe_ptr>; using blend_t = util::safe_ptr>; using raster_state_t = util::safe_ptr>; using sampler_state_t = util::safe_ptr>; using vs_t = util::safe_ptr>; using ps_t = util::safe_ptr>; using blob_t = util::safe_ptr>; using depth_stencil_state_t = util::safe_ptr>; using depth_stencil_view_t = util::safe_ptr>; template buf_t make_buffer(device_t::pointer device, const T &t) { static_assert(sizeof(T) % 16 == 0, "Buffer needs to be aligned on a 16-byte alignment"); D3D11_BUFFER_DESC buffer_desc { sizeof(T), D3D11_USAGE_IMMUTABLE, D3D11_BIND_CONSTANT_BUFFER }; D3D11_SUBRESOURCE_DATA init_data { &t }; buf_t::pointer buf_p; auto status = device->CreateBuffer(&buffer_desc, &init_data, &buf_p); if(status) { BOOST_LOG(error) << "Failed to create buffer: [0x"sv << util::hex(status).to_string_view() << ']'; return nullptr; } return buf_t { buf_p }; } blend_t make_blend(device_t::pointer device, bool enable) { D3D11_BLEND_DESC bdesc {}; auto &rt = bdesc.RenderTarget[0]; rt.BlendEnable = enable; rt.RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL; if(enable) { rt.BlendOp = D3D11_BLEND_OP_ADD; rt.BlendOpAlpha = D3D11_BLEND_OP_ADD; rt.SrcBlend = D3D11_BLEND_SRC_ALPHA; rt.DestBlend = D3D11_BLEND_INV_SRC_ALPHA; rt.SrcBlendAlpha = D3D11_BLEND_ZERO; rt.DestBlendAlpha = D3D11_BLEND_ZERO; } blend_t blend; auto status = device->CreateBlendState(&bdesc, &blend); if(status) { BOOST_LOG(error) << "Failed to create blend state: [0x"sv << util::hex(status).to_string_view() << ']'; return nullptr; } return blend; } blob_t convert_UV_vs_hlsl; blob_t convert_UV_ps_hlsl; blob_t scene_vs_hlsl; blob_t convert_Y_ps_hlsl; blob_t scene_ps_hlsl; struct img_d3d_t : public platf::img_t { shader_res_t input_res; texture2d_t texture; std::shared_ptr display; ~img_d3d_t() override = default; }; util::buffer_t make_cursor_image(util::buffer_t &&img_data, DXGI_OUTDUPL_POINTER_SHAPE_INFO shape_info) { constexpr std::uint32_t black = 0xFF000000; constexpr std::uint32_t white = 0xFFFFFFFF; constexpr std::uint32_t transparent = 0; switch(shape_info.Type) { case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MASKED_COLOR: std::for_each((std::uint32_t *)std::begin(img_data), (std::uint32_t *)std::end(img_data), [](auto &pixel) { if(pixel & 0xFF000000) { pixel = transparent; } }); case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_COLOR: return std::move(img_data); default: break; } shape_info.Height /= 2; util::buffer_t cursor_img { shape_info.Width * shape_info.Height * 4 }; auto bytes = shape_info.Pitch * shape_info.Height; auto pixel_begin = (std::uint32_t *)std::begin(cursor_img); auto pixel_data = pixel_begin; auto and_mask = std::begin(img_data); auto xor_mask = std::begin(img_data) + bytes; for(auto x = 0; x < bytes; ++x) { for(auto c = 7; c >= 0; --c) { auto bit = 1 << c; auto color_type = ((*and_mask & bit) ? 1 : 0) + ((*xor_mask & bit) ? 2 : 0); switch(color_type) { case 0: //black *pixel_data = black; break; case 2: //white *pixel_data = white; break; case 1: //transparent { *pixel_data = transparent; break; } case 3: //inverse { auto top_p = pixel_data - shape_info.Width; auto left_p = pixel_data - 1; auto right_p = pixel_data + 1; auto bottom_p = pixel_data + shape_info.Width; // Get the x coordinate of the pixel auto column = (pixel_data - pixel_begin) % shape_info.Width != 0; if(top_p >= pixel_begin && *top_p == transparent) { *top_p = black; } if(column != 0 && left_p >= pixel_begin && *left_p == transparent) { *left_p = black; } if(bottom_p < (std::uint32_t *)std::end(cursor_img)) { *bottom_p = black; } if(column != shape_info.Width - 1) { *right_p = black; } *pixel_data = white; } } ++pixel_data; } ++and_mask; ++xor_mask; } return cursor_img; } blob_t compile_shader(LPCSTR file, LPCSTR entrypoint, LPCSTR shader_model) { blob_t::pointer msg_p = nullptr; blob_t::pointer compiled_p; DWORD flags = D3DCOMPILE_ENABLE_STRICTNESS; #ifndef NDEBUG flags |= D3DCOMPILE_DEBUG | D3DCOMPILE_SKIP_OPTIMIZATION; #endif std::wstring_convert, wchar_t> converter; auto wFile = converter.from_bytes(file); auto status = D3DCompileFromFile(wFile.c_str(), nullptr, nullptr, entrypoint, shader_model, flags, 0, &compiled_p, &msg_p); if(msg_p) { BOOST_LOG(warning) << std::string_view { (const char *)msg_p->GetBufferPointer(), msg_p->GetBufferSize() - 1 }; msg_p->Release(); } if(status) { BOOST_LOG(error) << "Couldn't compile ["sv << file << "] [0x"sv << util::hex(status).to_string_view() << ']'; return nullptr; } return blob_t { compiled_p }; } blob_t compile_pixel_shader(LPCSTR file) { return compile_shader(file, "main_ps", "ps_5_0"); } blob_t compile_vertex_shader(LPCSTR file) { return compile_shader(file, "main_vs", "vs_5_0"); } class hwdevice_t : public platf::hwdevice_t { public: hwdevice_t(std::vector *hwdevices_p) : hwdevices_p { hwdevices_p } {} hwdevice_t() = delete; void set_cursor_pos(LONG rel_x, LONG rel_y, bool visible) { cursor_visible = visible; if(!visible) { return; } auto x = ((float)rel_x); auto y = ((float)rel_y); cursor_view.TopLeftX = x; cursor_view.TopLeftY = y; } int set_cursor_texture(texture2d_t::pointer texture, LONG width, LONG height) { auto device = (device_t::pointer)data; cursor_view.Width = width; cursor_view.Height = height; D3D11_SHADER_RESOURCE_VIEW_DESC desc { DXGI_FORMAT_B8G8R8A8_UNORM, D3D11_SRV_DIMENSION_TEXTURE2D }; desc.Texture2D.MipLevels = 1; auto status = device->CreateShaderResourceView(texture, &desc, &img.input_res); if(FAILED(status)) { BOOST_LOG(error) << "Failed to create cursor shader resource view [0x"sv << util::hex(status).to_string_view() << ']'; return -1; } return 0; } int convert(platf::img_t &img_base) override { auto &img = (img_d3d_t &)img_base; if(!img.input_res) { auto device = (device_t::pointer)data; D3D11_SHADER_RESOURCE_VIEW_DESC desc { DXGI_FORMAT_B8G8R8A8_UNORM, D3D11_SRV_DIMENSION_TEXTURE2D }; desc.Texture2D.MipLevels = 1; auto status = device->CreateShaderResourceView(img.texture.get(), &desc, &img.input_res); if(FAILED(status)) { BOOST_LOG(error) << "Failed to create input shader resource view [0x"sv << util::hex(status).to_string_view() << ']'; return -1; } } auto input_res_p = img.input_res.get(); if(cursor_visible) { _init_view_port(img.width, img.height); device_ctx_p->OMSetRenderTargets(1, &scene_rt, nullptr); device_ctx_p->VSSetShader(scene_vs.get(), nullptr, 0); device_ctx_p->PSSetShader(scene_ps.get(), nullptr, 0); device_ctx_p->PSSetShaderResources(0, 1, &input_res_p); device_ctx_p->Draw(3, 0); device_ctx_p->OMSetBlendState(blend_enable.get(), nullptr, 0xFFFFFFFFu); device_ctx_p->RSSetViewports(1, &cursor_view); device_ctx_p->PSSetShaderResources(0, 1, &this->img.input_res); device_ctx_p->Draw(3, 0); device_ctx_p->OMSetBlendState(blend_disable.get(), nullptr, 0xFFFFFFFFu); input_res_p = scene_sr.get(); } _init_view_port(this->img.width, this->img.height); device_ctx_p->OMSetRenderTargets(1, &nv12_Y_rt, nullptr); device_ctx_p->VSSetShader(scene_vs.get(), nullptr, 0); device_ctx_p->PSSetShader(convert_Y_ps.get(), nullptr, 0); device_ctx_p->PSSetShaderResources(0, 1, &back_img.input_res); device_ctx_p->Draw(3, 0); device_ctx_p->RSSetViewports(1, &outY_view); device_ctx_p->PSSetShaderResources(0, 1, &input_res_p); device_ctx_p->Draw(3, 0); // Artifacts start appearing on the rendered image if Sunshine doesn't flush // before rendering on the UV part of the image. device_ctx_p->Flush(); _init_view_port(this->img.width / 2, this->img.height / 2); device_ctx_p->OMSetRenderTargets(1, &nv12_UV_rt, nullptr); device_ctx_p->VSSetShader(convert_UV_vs.get(), nullptr, 0); device_ctx_p->PSSetShader(convert_UV_ps.get(), nullptr, 0); device_ctx_p->PSSetShaderResources(0, 1, &back_img.input_res); device_ctx_p->Draw(3, 0); device_ctx_p->RSSetViewports(1, &outUV_view); device_ctx_p->PSSetShaderResources(0, 1, &input_res_p); device_ctx_p->Draw(3, 0); device_ctx_p->Flush(); return 0; } void set_colorspace(std::uint32_t colorspace, std::uint32_t color_range) override { switch(colorspace) { case 5: // SWS_CS_SMPTE170M color_p = &::video::colors[0]; break; case 1: // SWS_CS_ITU709 color_p = &::video::colors[2]; break; case 9: // SWS_CS_BT2020 default: BOOST_LOG(warning) << "Colorspace: ["sv << colorspace << "] not yet supported: switching to default"sv; color_p = &::video::colors[0]; }; if(color_range > 1) { // Full range ++color_p; } auto color_matrix = make_buffer((device_t::pointer)data, *color_p); if(!color_matrix) { BOOST_LOG(warning) << "Failed to create color matrix"sv; return; } device_ctx_p->PSSetConstantBuffers(0, 1, &color_matrix); this->color_matrix = std::move(color_matrix); } int set_frame(AVFrame *frame) { this->hwframe.reset(frame); this->frame = frame; auto device_p = (device_t::pointer)data; auto out_width = frame->width; auto out_height = frame->height; float in_width = img.display->width; float in_height = img.display->height; // // Ensure aspect ratio is maintained auto scalar = std::fminf(out_width / in_width, out_height / in_height); auto out_width_f = in_width * scalar; auto out_height_f = in_height * scalar; // result is always positive auto offsetX = (out_width - out_width_f) / 2; auto offsetY = (out_height - out_height_f) / 2; outY_view = D3D11_VIEWPORT { offsetX, offsetY, out_width_f, out_height_f, 0.0f, 1.0f }; outUV_view = D3D11_VIEWPORT { offsetX / 2, offsetY / 2, out_width_f / 2, out_height_f / 2, 0.0f, 1.0f }; D3D11_TEXTURE2D_DESC t {}; t.Width = out_width; t.Height = out_height; t.MipLevels = 1; t.ArraySize = 1; t.SampleDesc.Count = 1; t.Usage = D3D11_USAGE_DEFAULT; t.Format = format; t.BindFlags = D3D11_BIND_RENDER_TARGET; auto status = device_p->CreateTexture2D(&t, nullptr, &img.texture); if(FAILED(status)) { BOOST_LOG(error) << "Failed to create render target texture [0x"sv << util::hex(status).to_string_view() << ']'; return -1; } img.width = out_width; img.height = out_height; img.data = (std::uint8_t *)img.texture.get(); img.row_pitch = out_width; img.pixel_pitch = 1; float info_in[16 / sizeof(float)] { 1.0f / (float)out_width }; //aligned to 16-byte info_scene = make_buffer(device_p, info_in); if(!info_in) { BOOST_LOG(error) << "Failed to create info scene buffer"sv; return -1; } D3D11_RENDER_TARGET_VIEW_DESC nv12_rt_desc { DXGI_FORMAT_R8_UNORM, D3D11_RTV_DIMENSION_TEXTURE2D }; status = device_p->CreateRenderTargetView(img.texture.get(), &nv12_rt_desc, &nv12_Y_rt); if(FAILED(status)) { BOOST_LOG(error) << "Failed to create render target view [0x"sv << util::hex(status).to_string_view() << ']'; return -1; } nv12_rt_desc.Format = DXGI_FORMAT_R8G8_UNORM; status = device_p->CreateRenderTargetView(img.texture.get(), &nv12_rt_desc, &nv12_UV_rt); if(FAILED(status)) { BOOST_LOG(error) << "Failed to create render target view [0x"sv << util::hex(status).to_string_view() << ']'; return -1; } // Need to have something refcounted if(!frame->buf[0]) { frame->buf[0] = av_buffer_allocz(sizeof(AVD3D11FrameDescriptor)); } auto desc = (AVD3D11FrameDescriptor *)frame->buf[0]->data; desc->texture = (ID3D11Texture2D *)img.data; desc->index = 0; frame->data[0] = img.data; frame->data[1] = 0; frame->linesize[0] = img.row_pitch; frame->height = img.height; frame->width = img.width; return 0; } int init( std::shared_ptr display, device_t::pointer device_p, device_ctx_t::pointer device_ctx_p, pix_fmt_e pix_fmt) { HRESULT status; device_p->AddRef(); data = device_p; this->device_ctx_p = device_ctx_p; cursor_visible = false; cursor_view.MinDepth = 0.0f; cursor_view.MaxDepth = 1.0f; format = (pix_fmt == pix_fmt_e::nv12 ? DXGI_FORMAT_NV12 : DXGI_FORMAT_P010); status = device_p->CreateVertexShader(scene_vs_hlsl->GetBufferPointer(), scene_vs_hlsl->GetBufferSize(), nullptr, &scene_vs); if(status) { BOOST_LOG(error) << "Failed to create scene vertex shader [0x"sv << util::hex(status).to_string_view() << ']'; return -1; } status = device_p->CreatePixelShader(convert_Y_ps_hlsl->GetBufferPointer(), convert_Y_ps_hlsl->GetBufferSize(), nullptr, &convert_Y_ps); if(status) { BOOST_LOG(error) << "Failed to create convertY pixel shader [0x"sv << util::hex(status).to_string_view() << ']'; return -1; } status = device_p->CreatePixelShader(convert_UV_ps_hlsl->GetBufferPointer(), convert_UV_ps_hlsl->GetBufferSize(), nullptr, &convert_UV_ps); if(status) { BOOST_LOG(error) << "Failed to create convertUV pixel shader [0x"sv << util::hex(status).to_string_view() << ']'; return -1; } status = device_p->CreateVertexShader(convert_UV_vs_hlsl->GetBufferPointer(), convert_UV_vs_hlsl->GetBufferSize(), nullptr, &convert_UV_vs); if(status) { BOOST_LOG(error) << "Failed to create convertUV vertex shader [0x"sv << util::hex(status).to_string_view() << ']'; return -1; } status = device_p->CreatePixelShader(scene_ps_hlsl->GetBufferPointer(), scene_ps_hlsl->GetBufferSize(), nullptr, &scene_ps); if(status) { BOOST_LOG(error) << "Failed to create scene pixel shader [0x"sv << util::hex(status).to_string_view() << ']'; return -1; } blend_disable = make_blend(device_p, false); blend_enable = make_blend(device_p, true); if(!blend_disable || !blend_enable) { return -1; } if(_init_rt(scene_sr, scene_rt, display->width, display->height, DXGI_FORMAT_B8G8R8A8_UNORM)) { return -1; } color_matrix = make_buffer(device_p, ::video::colors[0]); if(!color_matrix) { BOOST_LOG(error) << "Failed to create color matrix buffer"sv; return -1; } D3D11_INPUT_ELEMENT_DESC layout_desc { "SV_Position", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 }; status = device_p->CreateInputLayout( &layout_desc, 1, convert_UV_vs_hlsl->GetBufferPointer(), convert_UV_vs_hlsl->GetBufferSize(), &input_layout); img.display = std::move(display); D3D11_SAMPLER_DESC sampler_desc {}; sampler_desc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR; sampler_desc.AddressU = D3D11_TEXTURE_ADDRESS_CLAMP; sampler_desc.AddressV = D3D11_TEXTURE_ADDRESS_CLAMP; sampler_desc.AddressW = D3D11_TEXTURE_ADDRESS_WRAP; sampler_desc.ComparisonFunc = D3D11_COMPARISON_NEVER; sampler_desc.MinLOD = 0; sampler_desc.MaxLOD = D3D11_FLOAT32_MAX; status = device_p->CreateSamplerState(&sampler_desc, &sampler_linear); if(FAILED(status)) { BOOST_LOG(error) << "Failed to create point sampler state [0x"sv << util::hex(status).to_string_view() << ']'; return -1; } // Color the background black, so that the padding for keeping the aspect ratio // is black if(img.display->dummy_img(&back_img)) { BOOST_LOG(warning) << "Couldn't create an image to set background color to black"sv; return -1; } D3D11_SHADER_RESOURCE_VIEW_DESC desc { DXGI_FORMAT_B8G8R8A8_UNORM, D3D11_SRV_DIMENSION_TEXTURE2D }; desc.Texture2D.MipLevels = 1; status = device_p->CreateShaderResourceView(back_img.texture.get(), &desc, &back_img.input_res); if(FAILED(status)) { BOOST_LOG(error) << "Failed to create input shader resource view [0x"sv << util::hex(status).to_string_view() << ']'; return -1; } device_ctx_p->OMSetBlendState(blend_disable.get(), nullptr, 0xFFFFFFFFu); device_ctx_p->PSSetSamplers(0, 1, &sampler_linear); device_ctx_p->PSSetConstantBuffers(0, 1, &color_matrix); device_ctx_p->VSSetConstantBuffers(0, 1, &info_scene); device_ctx_p->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP); device_ctx_p->IASetInputLayout(input_layout.get()); return 0; } ~hwdevice_t() override { if(data) { ((ID3D11Device *)data)->Release(); } auto it = std::find(std::begin(*hwdevices_p), std::end(*hwdevices_p), this); if(it != std::end(*hwdevices_p)) { hwdevices_p->erase(it); } } private: void _init_view_port(float x, float y, float width, float height) { D3D11_VIEWPORT view { x, y, width, height, 0.0f, 1.0f }; device_ctx_p->RSSetViewports(1, &view); } void _init_view_port(float width, float height) { _init_view_port(0.0f, 0.0f, width, height); } int _init_rt(shader_res_t &shader_res, render_target_t &render_target, int width, int height, DXGI_FORMAT format) { D3D11_TEXTURE2D_DESC desc {}; desc.Width = width; desc.Height = height; desc.Format = format; desc.Usage = D3D11_USAGE_DEFAULT; desc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE; desc.MipLevels = 1; desc.ArraySize = 1; desc.SampleDesc.Count = 1; auto device = (device_t::pointer)data; texture2d_t tex; auto status = device->CreateTexture2D(&desc, nullptr, &tex); if(status) { BOOST_LOG(error) << "Failed to create render target texture for luma [0x"sv << util::hex(status).to_string_view() << ']'; return -1; } D3D11_SHADER_RESOURCE_VIEW_DESC shader_resource_desc { format, D3D11_SRV_DIMENSION_TEXTURE2D }; shader_resource_desc.Texture2D.MipLevels = 1; device->CreateShaderResourceView(tex.get(), &shader_resource_desc, &shader_res); if(status) { BOOST_LOG(error) << "Failed to create render target texture for luma [0x"sv << util::hex(status).to_string_view() << ']'; return -1; } D3D11_RENDER_TARGET_VIEW_DESC render_target_desc { format, D3D11_RTV_DIMENSION_TEXTURE2D }; device->CreateRenderTargetView(tex.get(), &render_target_desc, &render_target); if(status) { BOOST_LOG(error) << "Failed to create render target view [0x"sv << util::hex(status).to_string_view() << ']'; return -1; } return 0; } public: frame_t hwframe; ::video::color_t *color_p; blend_t blend_enable; blend_t blend_disable; buf_t info_scene; buf_t color_matrix; sampler_state_t sampler_linear; input_layout_t input_layout; render_target_t nv12_Y_rt; render_target_t nv12_UV_rt; render_target_t scene_rt; shader_res_t scene_sr; img_d3d_t img; // Clear nv12 render target to black img_d3d_t back_img; vs_t convert_UV_vs; ps_t convert_UV_ps; ps_t convert_Y_ps; ps_t scene_ps; vs_t scene_vs; D3D11_VIEWPORT outY_view; D3D11_VIEWPORT outUV_view; D3D11_VIEWPORT cursor_view; bool cursor_visible; DXGI_FORMAT format; device_ctx_t::pointer device_ctx_p; // The destructor will remove itself from the list of hardware devices, this is done synchronously std::vector *hwdevices_p; }; capture_e display_vram_t::capture(snapshot_cb_t &&snapshot_cb, std::shared_ptr<::platf::img_t> img, bool *cursor) { auto next_frame = std::chrono::steady_clock::now(); while(img) { auto now = std::chrono::steady_clock::now(); while(next_frame > now) { now = std::chrono::steady_clock::now(); } next_frame = now + delay; auto status = snapshot(img.get(), 1000ms, *cursor); switch(status) { case platf::capture_e::reinit: case platf::capture_e::error: return status; case platf::capture_e::timeout: std::this_thread::sleep_for(1ms); continue; case platf::capture_e::ok: img = snapshot_cb(img); break; default: BOOST_LOG(error) << "Unrecognized capture status ["sv << (int)status << ']'; return status; } } return capture_e::ok; } capture_e display_vram_t::snapshot(platf::img_t *img_base, std::chrono::milliseconds timeout, bool cursor_visible) { auto img = (img_d3d_t *)img_base; HRESULT status; DXGI_OUTDUPL_FRAME_INFO frame_info; resource_t::pointer res_p {}; auto capture_status = dup.next_frame(frame_info, timeout, &res_p); resource_t res { res_p }; if(capture_status != capture_e::ok) { return capture_status; } const bool mouse_update_flag = frame_info.LastMouseUpdateTime.QuadPart != 0 || frame_info.PointerShapeBufferSize > 0; const bool frame_update_flag = frame_info.AccumulatedFrames != 0 || frame_info.LastPresentTime.QuadPart != 0; const bool update_flag = mouse_update_flag || frame_update_flag; if(!update_flag) { return capture_e::timeout; } if(frame_info.PointerShapeBufferSize > 0) { DXGI_OUTDUPL_POINTER_SHAPE_INFO shape_info {}; util::buffer_t img_data { frame_info.PointerShapeBufferSize }; UINT dummy; status = dup.dup->GetFramePointerShape(img_data.size(), std::begin(img_data), &dummy, &shape_info); if(FAILED(status)) { BOOST_LOG(error) << "Failed to get new pointer shape [0x"sv << util::hex(status).to_string_view() << ']'; return capture_e::error; } auto cursor_img = make_cursor_image(std::move(img_data), shape_info); D3D11_SUBRESOURCE_DATA data { std::begin(cursor_img), 4 * shape_info.Width, 0 }; // Create texture for cursor D3D11_TEXTURE2D_DESC t {}; t.Width = shape_info.Width; t.Height = cursor_img.size() / data.SysMemPitch; t.MipLevels = 1; t.ArraySize = 1; t.SampleDesc.Count = 1; t.Usage = D3D11_USAGE_DEFAULT; t.Format = DXGI_FORMAT_B8G8R8A8_UNORM; t.BindFlags = D3D11_BIND_SHADER_RESOURCE; texture2d_t texture; auto status = device->CreateTexture2D(&t, &data, &texture); if(FAILED(status)) { BOOST_LOG(error) << "Failed to create mouse texture [0x"sv << util::hex(status).to_string_view() << ']'; return capture_e::error; } for(auto *hwdevice : hwdevices) { if(hwdevice->set_cursor_texture(texture.get(), t.Width, t.Height)) { return capture_e::error; } } cursor.texture = std::move(texture); cursor.width = t.Width; cursor.height = t.Height; } if(frame_info.LastMouseUpdateTime.QuadPart) { for(auto *hwdevice : hwdevices) { hwdevice->set_cursor_pos(frame_info.PointerPosition.Position.x, frame_info.PointerPosition.Position.y, frame_info.PointerPosition.Visible && cursor_visible); } } if(frame_update_flag) { texture2d_t src; status = res->QueryInterface(IID_ID3D11Texture2D, (void **)&src); if(FAILED(status)) { BOOST_LOG(error) << "Couldn't query interface [0x"sv << util::hex(status).to_string_view() << ']'; return capture_e::error; } device_ctx->CopyResource(img->texture.get(), src.get()); } return capture_e::ok; } std::shared_ptr display_vram_t::alloc_img() { auto img = std::make_shared(); D3D11_TEXTURE2D_DESC t {}; t.Width = width; t.Height = height; t.MipLevels = 1; t.ArraySize = 1; t.SampleDesc.Count = 1; t.Usage = D3D11_USAGE_DEFAULT; t.Format = format; t.BindFlags = D3D11_BIND_SHADER_RESOURCE; auto status = device->CreateTexture2D(&t, nullptr, &img->texture); if(FAILED(status)) { BOOST_LOG(error) << "Failed to create img buf texture [0x"sv << util::hex(status).to_string_view() << ']'; return nullptr; } img->data = (std::uint8_t *)img->texture.get(); img->row_pitch = 0; img->pixel_pitch = 4; img->width = 0; img->height = 0; img->display = shared_from_this(); return img; } int display_vram_t::dummy_img(platf::img_t *img_base) { auto img = (img_d3d_t *)img_base; img->row_pitch = width * 4; auto dummy_data = std::make_unique(width * height); D3D11_SUBRESOURCE_DATA data { dummy_data.get(), (UINT)img->row_pitch }; std::fill_n(dummy_data.get(), width * height, 0); D3D11_TEXTURE2D_DESC t {}; t.Width = width; t.Height = height; t.MipLevels = 1; t.ArraySize = 1; t.SampleDesc.Count = 1; t.Usage = D3D11_USAGE_DEFAULT; t.Format = format; t.BindFlags = D3D11_BIND_SHADER_RESOURCE; dxgi::texture2d_t tex; auto status = device->CreateTexture2D(&t, &data, &tex); if(FAILED(status)) { BOOST_LOG(error) << "Failed to create dummy texture [0x"sv << util::hex(status).to_string_view() << ']'; return -1; } img->texture = std::move(tex); img->data = (std::uint8_t *)img->texture.get(); img->height = height; img->width = width; img->pixel_pitch = 4; return 0; } std::shared_ptr display_vram_t::make_hwdevice(pix_fmt_e pix_fmt) { if(pix_fmt != platf::pix_fmt_e::nv12) { BOOST_LOG(error) << "display_vram_t doesn't support pixel format ["sv << from_pix_fmt(pix_fmt) << ']'; return nullptr; } auto hwdevice = std::make_shared(&hwdevices); auto ret = hwdevice->init( shared_from_this(), device.get(), device_ctx.get(), pix_fmt); if(ret) { return nullptr; } if(cursor.texture && hwdevice->set_cursor_texture(cursor.texture.get(), cursor.width, cursor.height)) { return nullptr; } hwdevices.emplace_back(hwdevice.get()); return hwdevice; } int init() { BOOST_LOG(info) << "Compiling shaders..."sv; scene_vs_hlsl = compile_vertex_shader(SUNSHINE_SHADERS_DIR "/SceneVS.hlsl"); if(!scene_vs_hlsl) { return -1; } convert_Y_ps_hlsl = compile_pixel_shader(SUNSHINE_SHADERS_DIR "/ConvertYPS.hlsl"); if(!convert_Y_ps_hlsl) { return -1; } convert_UV_ps_hlsl = compile_pixel_shader(SUNSHINE_SHADERS_DIR "/ConvertUVPS.hlsl"); if(!convert_UV_ps_hlsl) { return -1; } convert_UV_vs_hlsl = compile_vertex_shader(SUNSHINE_SHADERS_DIR "/ConvertUVVS.hlsl"); if(!convert_UV_vs_hlsl) { return -1; } scene_ps_hlsl = compile_pixel_shader(SUNSHINE_SHADERS_DIR "/ScenePS.hlsl"); if(!scene_ps_hlsl) { return -1; } BOOST_LOG(info) << "Compiled shaders"sv; return 0; } } // namespace platf::dxgi