2016-02-16 20:24:00 +01:00
/* RetroArch - A frontend for libretro.
2017-01-22 13:40:32 +01:00
* Copyright ( C ) 2016 - 2017 - Hans - Kristian Arntzen
* Copyright ( C ) 2011 - 2017 - Daniel De Matteis
2017-12-11 23:55:31 -08:00
*
2016-02-16 20:24:00 +01:00
* RetroArch is free software : you can redistribute it and / or modify it under the terms
* of the GNU General Public License as published by the Free Software Found -
* ation , either version 3 of the License , or ( at your option ) any later version .
*
* RetroArch is distributed in the hope that it will be useful , but WITHOUT ANY WARRANTY ;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
* PURPOSE . See the GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License along with RetroArch .
* If not , see < http : //www.gnu.org/licenses/>.
*/
# include <stdint.h>
# include <math.h>
# include <string.h>
2023-05-31 19:32:33 +02:00
# include <retro_assert.h>
2023-05-31 19:03:45 +02:00
# include <encodings/utf.h>
2016-02-16 20:24:00 +01:00
# include <compat/strl.h>
# include <gfx/scaler/scaler.h>
2017-04-16 11:03:29 +02:00
# include <gfx/video_frame.h>
2016-02-16 20:24:00 +01:00
# include <formats/image.h>
# include <retro_inline.h>
# include <retro_miscellaneous.h>
2017-09-28 18:53:09 +02:00
# include <retro_math.h>
2019-02-07 00:15:32 +01:00
# include <string/stdstring.h>
2016-05-10 19:03:53 +02:00
# include <libretro.h>
2016-09-11 14:46:53 +02:00
# ifdef HAVE_CONFIG_H
# include "../../config.h"
# endif
# ifdef HAVE_MENU
# include "../../menu/menu_driver.h"
2019-02-07 00:15:32 +01:00
# endif
2020-02-17 21:28:42 +01:00
# ifdef HAVE_GFX_WIDGETS
# include "../gfx_widgets.h"
2016-09-11 14:46:53 +02:00
# endif
2017-01-19 17:20:42 +01:00
# include "../font_driver.h"
2023-08-15 16:02:12 +02:00
# include "../video_driver.h"
2017-01-19 17:20:42 +01:00
2016-05-10 19:03:53 +02:00
# include "../common/vulkan_common.h"
2016-02-16 20:24:00 +01:00
2016-09-05 18:31:32 +02:00
# include "../../configuration.h"
2020-07-01 21:04:05 +02:00
# ifdef HAVE_REWIND
2020-10-15 05:29:20 +02:00
# include "../../state_manager.h"
2020-07-01 21:04:05 +02:00
# endif
2023-01-08 08:56:57 +01:00
2023-08-15 16:02:12 +02:00
# include "../../record/record_driver.h"
2023-01-08 09:05:46 +01:00
# include "../../retroarch.h"
2016-09-01 18:18:55 +02:00
# include "../../verbosity.h"
2016-02-16 20:24:00 +01:00
2023-05-31 19:38:49 +02:00
# define VK_REMAP_TO_TEXFMT(fmt) ((fmt == VK_FORMAT_R5G6B5_UNORM_PACK16) ? VK_FORMAT_R8G8B8A8_UNORM : fmt)
2023-06-01 21:34:34 +02:00
typedef struct
{
vk_t * vk ;
void * font_data ;
struct font_atlas * atlas ;
const font_renderer_driver_t * font_driver ;
struct vk_vertex * pv ;
struct vk_texture texture ;
struct vk_texture texture_optimal ;
struct vk_buffer_range range ;
unsigned vertices ;
bool needs_update ;
} vulkan_raster_t ;
2023-05-31 19:32:33 +02:00
# ifdef VULKAN_DEBUG_TEXTURE_ALLOC
static VkImage vk_images [ 4 * 1024 ] ;
static unsigned vk_count ;
static unsigned track_seq ;
2023-06-01 21:34:34 +02:00
# endif
2023-05-31 19:32:33 +02:00
2023-06-01 21:34:34 +02:00
/*
* VULKAN COMMON
*/
# ifdef VULKAN_DEBUG_TEXTURE_ALLOC
#if 0
2023-05-31 19:32:33 +02:00
void vulkan_log_textures ( void )
{
unsigned i ;
for ( i = 0 ; i < vk_count ; i + + )
{
RARCH_WARN ( " [Vulkan]: Found leaked texture %llu. \n " ,
( unsigned long long ) vk_images [ i ] ) ;
}
vk_count = 0 ;
}
2023-06-01 21:34:34 +02:00
# endif
2023-05-31 19:32:33 +02:00
static void vulkan_track_alloc ( VkImage image )
{
vk_images [ vk_count + + ] = image ;
RARCH_LOG ( " [Vulkan]: Alloc %llu (%u). \n " ,
( unsigned long long ) image , track_seq ) ;
track_seq + + ;
}
static void vulkan_track_dealloc ( VkImage image )
{
unsigned i ;
for ( i = 0 ; i < vk_count ; i + + )
{
if ( image = = vk_images [ i ] )
{
vk_count - - ;
memmove ( vk_images + i , vk_images + 1 + i ,
sizeof ( VkImage ) * ( vk_count - i ) ) ;
return ;
}
}
retro_assert ( 0 & & " Couldn't find VkImage in dealloc! " ) ;
}
# endif
static INLINE unsigned vulkan_format_to_bpp ( VkFormat format )
{
switch ( format )
{
case VK_FORMAT_B8G8R8A8_UNORM :
return 4 ;
case VK_FORMAT_R4G4B4A4_UNORM_PACK16 :
case VK_FORMAT_B4G4R4A4_UNORM_PACK16 :
case VK_FORMAT_R5G6B5_UNORM_PACK16 :
return 2 ;
case VK_FORMAT_R8_UNORM :
return 1 ;
default : /* Unknown format */
break ;
}
return 0 ;
}
static unsigned vulkan_num_miplevels ( unsigned width , unsigned height )
{
unsigned size = MAX ( width , height ) ;
unsigned levels = 0 ;
while ( size )
{
levels + + ;
size > > = 1 ;
}
return levels ;
}
2023-05-31 23:58:06 +02:00
static void vulkan_write_quad_descriptors (
VkDevice device ,
VkDescriptorSet set ,
VkBuffer buffer ,
VkDeviceSize offset ,
VkDeviceSize range ,
const struct vk_texture * texture ,
VkSampler sampler )
{
VkWriteDescriptorSet write ;
VkDescriptorBufferInfo buffer_info ;
buffer_info . buffer = buffer ;
buffer_info . offset = offset ;
buffer_info . range = range ;
write . sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET ;
write . pNext = NULL ;
write . dstSet = set ;
write . dstBinding = 0 ;
write . dstArrayElement = 0 ;
write . descriptorCount = 1 ;
write . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ;
write . pImageInfo = NULL ;
write . pBufferInfo = & buffer_info ;
write . pTexelBufferView = NULL ;
vkUpdateDescriptorSets ( device , 1 , & write , 0 , NULL ) ;
if ( texture )
{
VkDescriptorImageInfo image_info ;
image_info . sampler = sampler ;
image_info . imageView = texture - > view ;
image_info . imageLayout = texture - > layout ;
write . dstSet = set ;
write . dstBinding = 1 ;
write . descriptorCount = 1 ;
write . descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ;
write . pImageInfo = & image_info ;
vkUpdateDescriptorSets ( device , 1 , & write , 0 , NULL ) ;
}
}
static void vulkan_transition_texture ( vk_t * vk , VkCommandBuffer cmd , struct vk_texture * texture )
{
/* Transition to GENERAL layout for linear streamed textures.
* We ' re using linear textures here , so only
* GENERAL layout is supported .
* If we ' re already in GENERAL , add a host - > shader read memory barrier
* to invalidate texture caches .
*/
2023-07-17 17:30:17 +02:00
if ( ( texture - > layout ! = VK_IMAGE_LAYOUT_PREINITIALIZED )
& & ( texture - > layout ! = VK_IMAGE_LAYOUT_GENERAL ) )
2023-05-31 23:58:06 +02:00
return ;
switch ( texture - > type )
{
case VULKAN_TEXTURE_STREAMED :
VULKAN_IMAGE_LAYOUT_TRANSITION ( cmd , texture - > image ,
texture - > layout , VK_IMAGE_LAYOUT_GENERAL ,
VK_ACCESS_HOST_WRITE_BIT , VK_ACCESS_SHADER_READ_BIT ,
VK_PIPELINE_STAGE_HOST_BIT ,
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ) ;
break ;
default :
retro_assert ( 0 & & " Attempting to transition invalid texture type. \n " ) ;
break ;
}
texture - > layout = VK_IMAGE_LAYOUT_GENERAL ;
}
/* The VBO needs to be written to before calling this.
* Use vulkan_buffer_chain_alloc . */
static void vulkan_draw_triangles ( vk_t * vk , const struct vk_draw_triangles * call )
{
if ( call - > texture & & call - > texture - > image )
vulkan_transition_texture ( vk , vk - > cmd , call - > texture ) ;
if ( call - > pipeline ! = vk - > tracker . pipeline )
{
VkRect2D sci ;
vkCmdBindPipeline ( vk - > cmd ,
VK_PIPELINE_BIND_POINT_GRAPHICS , call - > pipeline ) ;
vk - > tracker . pipeline = call - > pipeline ;
/* Changing pipeline invalidates dynamic state. */
vk - > tracker . dirty | = VULKAN_DIRTY_DYNAMIC_BIT ;
if ( vk - > flags & VK_FLAG_TRACKER_USE_SCISSOR )
sci = vk - > tracker . scissor ;
else
{
/* No scissor -> viewport */
sci . offset . x = vk - > vp . x ;
sci . offset . y = vk - > vp . y ;
sci . extent . width = vk - > vp . width ;
sci . extent . height = vk - > vp . height ;
}
vkCmdSetViewport ( vk - > cmd , 0 , 1 , & vk - > vk_vp ) ;
vkCmdSetScissor ( vk - > cmd , 0 , 1 , & sci ) ;
vk - > tracker . dirty & = ~ VULKAN_DIRTY_DYNAMIC_BIT ;
}
else if ( vk - > tracker . dirty & VULKAN_DIRTY_DYNAMIC_BIT )
{
VkRect2D sci ;
if ( vk - > flags & VK_FLAG_TRACKER_USE_SCISSOR )
sci = vk - > tracker . scissor ;
else
{
/* No scissor -> viewport */
sci . offset . x = vk - > vp . x ;
sci . offset . y = vk - > vp . y ;
sci . extent . width = vk - > vp . width ;
sci . extent . height = vk - > vp . height ;
}
vkCmdSetViewport ( vk - > cmd , 0 , 1 , & vk - > vk_vp ) ;
vkCmdSetScissor ( vk - > cmd , 0 , 1 , & sci ) ;
vk - > tracker . dirty & = ~ VULKAN_DIRTY_DYNAMIC_BIT ;
}
/* Upload descriptors */
{
VkDescriptorSet set ;
/* Upload UBO */
struct vk_buffer_range range ;
float * mvp_data_ptr = NULL ;
if ( ! vulkan_buffer_chain_alloc ( vk - > context , & vk - > chain - > ubo ,
call - > uniform_size , & range ) )
return ;
memcpy ( range . data , call - > uniform , call - > uniform_size ) ;
set = vulkan_descriptor_manager_alloc (
vk - > context - > device ,
& vk - > chain - > descriptor_manager ) ;
vulkan_write_quad_descriptors (
vk - > context - > device ,
set ,
range . buffer ,
range . offset ,
call - > uniform_size ,
call - > texture ,
call - > sampler ) ;
vkCmdBindDescriptorSets ( vk - > cmd ,
VK_PIPELINE_BIND_POINT_GRAPHICS ,
vk - > pipelines . layout , 0 ,
1 , & set , 0 , NULL ) ;
vk - > tracker . view = VK_NULL_HANDLE ;
vk - > tracker . sampler = VK_NULL_HANDLE ;
for (
mvp_data_ptr = & vk - > tracker . mvp . data [ 0 ]
; mvp_data_ptr < vk - > tracker . mvp . data + 16
; mvp_data_ptr + + )
* mvp_data_ptr = 0.0f ;
}
/* VBO is already uploaded. */
vkCmdBindVertexBuffers ( vk - > cmd , 0 , 1 ,
& call - > vbo - > buffer , & call - > vbo - > offset ) ;
/* Draw the quad */
vkCmdDraw ( vk - > cmd , call - > vertices , 1 , 0 , 0 ) ;
}
2023-05-31 19:32:33 +02:00
static void vulkan_destroy_texture (
VkDevice device ,
struct vk_texture * tex )
{
if ( tex - > mapped )
vkUnmapMemory ( device , tex - > memory ) ;
if ( tex - > view )
vkDestroyImageView ( device , tex - > view , NULL ) ;
if ( tex - > image )
vkDestroyImage ( device , tex - > image , NULL ) ;
if ( tex - > buffer )
vkDestroyBuffer ( device , tex - > buffer , NULL ) ;
if ( tex - > memory )
vkFreeMemory ( device , tex - > memory , NULL ) ;
# ifdef VULKAN_DEBUG_TEXTURE_ALLOC
if ( tex - > image )
vulkan_track_dealloc ( tex - > image ) ;
# endif
tex - > type = VULKAN_TEXTURE_STREAMED ;
tex - > flags = 0 ;
tex - > memory_type = 0 ;
tex - > width = 0 ;
tex - > height = 0 ;
tex - > offset = 0 ;
tex - > stride = 0 ;
tex - > size = 0 ;
tex - > mapped = NULL ;
tex - > image = VK_NULL_HANDLE ;
tex - > view = VK_NULL_HANDLE ;
tex - > memory = VK_NULL_HANDLE ;
tex - > buffer = VK_NULL_HANDLE ;
tex - > format = VK_FORMAT_UNDEFINED ;
tex - > memory_size = 0 ;
tex - > layout = VK_IMAGE_LAYOUT_UNDEFINED ;
}
static struct vk_texture vulkan_create_texture ( vk_t * vk ,
struct vk_texture * old ,
unsigned width , unsigned height ,
VkFormat format ,
const void * initial ,
const VkComponentMapping * swizzle ,
enum vk_texture_type type )
{
unsigned i ;
uint32_t buffer_width ;
struct vk_texture tex ;
2023-07-17 17:30:17 +02:00
VkImageCreateInfo info ;
2023-05-31 19:32:33 +02:00
VkFormat remap_tex_fmt ;
VkMemoryRequirements mem_reqs ;
VkSubresourceLayout layout ;
2023-07-17 19:51:18 +02:00
VkMemoryAllocateInfo alloc ;
2023-07-18 00:08:55 +02:00
VkBufferCreateInfo buffer_info ;
2023-05-31 19:32:33 +02:00
VkDevice device = vk - > context - > device ;
VkImageSubresource subresource = { VK_IMAGE_ASPECT_COLOR_BIT } ;
memset ( & tex , 0 , sizeof ( tex ) ) ;
2023-07-17 17:30:17 +02:00
info . sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO ;
info . pNext = NULL ;
info . flags = 0 ;
info . imageType = VK_IMAGE_TYPE_2D ;
info . format = format ;
info . extent . width = width ;
info . extent . height = height ;
info . extent . depth = 1 ;
info . mipLevels = 1 ;
info . arrayLayers = 1 ;
info . samples = VK_SAMPLE_COUNT_1_BIT ;
info . tiling = VK_IMAGE_TILING_OPTIMAL ;
info . usage = 0 ;
info . sharingMode = VK_SHARING_MODE_EXCLUSIVE ;
info . queueFamilyIndexCount = 0 ;
info . pQueueFamilyIndices = NULL ;
info . initialLayout = VK_IMAGE_LAYOUT_UNDEFINED ;
2023-05-31 19:32:33 +02:00
/* Align stride to 4 bytes to make sure we can use compute shader uploads without too many problems. */
2023-07-18 00:08:55 +02:00
buffer_width = width * vulkan_format_to_bpp ( format ) ;
buffer_width = ( buffer_width + 3u ) & ~ 3u ;
2023-07-17 17:30:17 +02:00
2023-07-18 00:08:55 +02:00
buffer_info . sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO ;
buffer_info . pNext = NULL ;
buffer_info . flags = 0 ;
buffer_info . size = buffer_width * height ;
buffer_info . usage = 0 ;
buffer_info . sharingMode = VK_SHARING_MODE_EXCLUSIVE ;
buffer_info . queueFamilyIndexCount = 0 ;
buffer_info . pQueueFamilyIndices = NULL ;
2023-05-31 19:32:33 +02:00
2023-07-18 00:08:55 +02:00
remap_tex_fmt = VK_REMAP_TO_TEXFMT ( format ) ;
2023-05-31 19:32:33 +02:00
/* Compatibility concern. Some Apple hardware does not support rgb565.
* Use compute shader uploads instead .
* If we attempt to use streamed texture , force staging path .
* If we ' re creating fallback dynamic texture , force RGBA8888 . */
if ( remap_tex_fmt ! = format )
{
if ( type = = VULKAN_TEXTURE_STREAMED )
type = VULKAN_TEXTURE_STAGING ;
else if ( type = = VULKAN_TEXTURE_DYNAMIC )
{
format = remap_tex_fmt ;
info . format = format ;
info . usage | = VK_IMAGE_USAGE_STORAGE_BIT ;
}
}
if ( type = = VULKAN_TEXTURE_STREAMED )
{
VkFormatProperties format_properties ;
2023-07-17 17:30:17 +02:00
const VkFormatFeatureFlags required = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT
| VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT ;
2023-05-31 19:32:33 +02:00
vkGetPhysicalDeviceFormatProperties (
vk - > context - > gpu , format , & format_properties ) ;
if ( ( format_properties . linearTilingFeatures & required ) ! = required )
{
# ifdef VULKAN_DEBUG
RARCH_DBG ( " [Vulkan]: GPU does not support using linear images as textures. Falling back to copy path. \n " ) ;
# endif
type = VULKAN_TEXTURE_STAGING ;
}
}
switch ( type )
{
case VULKAN_TEXTURE_STATIC :
/* For simplicity, always build mipmaps for
* static textures , samplers can be used to enable it dynamically .
*/
info . mipLevels = vulkan_num_miplevels ( width , height ) ;
tex . flags | = VK_TEX_FLAG_MIPMAP ;
retro_assert ( initial & & " Static textures must have initial data. \n " ) ;
info . tiling = VK_IMAGE_TILING_OPTIMAL ;
2023-07-17 17:30:17 +02:00
info . usage = VK_IMAGE_USAGE_SAMPLED_BIT
| VK_IMAGE_USAGE_TRANSFER_DST_BIT
| VK_IMAGE_USAGE_TRANSFER_SRC_BIT ;
2023-05-31 19:32:33 +02:00
info . initialLayout = VK_IMAGE_LAYOUT_UNDEFINED ;
break ;
case VULKAN_TEXTURE_DYNAMIC :
retro_assert ( ! initial & & " Dynamic textures must not have initial data. \n " ) ;
info . tiling = VK_IMAGE_TILING_OPTIMAL ;
2023-07-17 17:30:17 +02:00
info . usage | = VK_IMAGE_USAGE_SAMPLED_BIT
| VK_IMAGE_USAGE_TRANSFER_DST_BIT
| VK_IMAGE_USAGE_TRANSFER_SRC_BIT ;
2023-05-31 19:32:33 +02:00
info . initialLayout = VK_IMAGE_LAYOUT_UNDEFINED ;
break ;
case VULKAN_TEXTURE_STREAMED :
2023-07-17 17:30:17 +02:00
info . usage = VK_IMAGE_USAGE_SAMPLED_BIT
| VK_IMAGE_USAGE_TRANSFER_SRC_BIT ;
2023-05-31 19:32:33 +02:00
info . tiling = VK_IMAGE_TILING_LINEAR ;
info . initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED ;
break ;
case VULKAN_TEXTURE_STAGING :
2023-08-14 23:36:19 +02:00
buffer_info . usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT
2023-07-17 17:30:17 +02:00
| VK_BUFFER_USAGE_STORAGE_BUFFER_BIT ;
2023-05-31 19:32:33 +02:00
info . initialLayout = VK_IMAGE_LAYOUT_GENERAL ;
info . tiling = VK_IMAGE_TILING_LINEAR ;
break ;
case VULKAN_TEXTURE_READBACK :
buffer_info . usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT ;
info . initialLayout = VK_IMAGE_LAYOUT_GENERAL ;
info . tiling = VK_IMAGE_TILING_LINEAR ;
break ;
}
2023-07-17 17:30:17 +02:00
if ( ( type ! = VULKAN_TEXTURE_STAGING )
& & ( type ! = VULKAN_TEXTURE_READBACK ) )
2023-05-31 19:32:33 +02:00
{
vkCreateImage ( device , & info , NULL , & tex . image ) ;
vulkan_debug_mark_image ( device , tex . image ) ;
#if 0
vulkan_track_alloc ( tex . image ) ;
# endif
vkGetImageMemoryRequirements ( device , tex . image , & mem_reqs ) ;
}
else
{
/* Linear staging textures are not guaranteed to be supported,
* use buffers instead . */
vkCreateBuffer ( device , & buffer_info , NULL , & tex . buffer ) ;
vulkan_debug_mark_buffer ( device , tex . buffer ) ;
vkGetBufferMemoryRequirements ( device , tex . buffer , & mem_reqs ) ;
}
2023-07-17 17:30:17 +02:00
2023-07-17 19:51:18 +02:00
alloc . sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO ;
alloc . pNext = NULL ;
alloc . allocationSize = mem_reqs . size ;
alloc . memoryTypeIndex = 0 ;
2023-05-31 19:32:33 +02:00
switch ( type )
{
case VULKAN_TEXTURE_STATIC :
case VULKAN_TEXTURE_DYNAMIC :
alloc . memoryTypeIndex = vulkan_find_memory_type_fallback (
& vk - > context - > memory_properties ,
mem_reqs . memoryTypeBits ,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT , 0 ) ;
break ;
default :
2023-08-14 23:36:19 +02:00
/* Try to find a memory type which is cached,
2023-07-17 17:30:17 +02:00
* even if it means manual cache management . */
2023-05-31 19:32:33 +02:00
alloc . memoryTypeIndex = vulkan_find_memory_type_fallback (
& vk - > context - > memory_properties ,
mem_reqs . memoryTypeBits ,
2023-07-17 17:30:17 +02:00
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
| VK_MEMORY_PROPERTY_HOST_CACHED_BIT ,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
| VK_MEMORY_PROPERTY_HOST_COHERENT_BIT ) ;
2023-05-31 19:32:33 +02:00
if ( ( vk - > context - > memory_properties . memoryTypes
2023-07-17 17:30:17 +02:00
[ alloc . memoryTypeIndex ] . propertyFlags
& VK_MEMORY_PROPERTY_HOST_COHERENT_BIT ) = = 0 )
2023-05-31 19:32:33 +02:00
tex . flags | = VK_TEX_FLAG_NEED_MANUAL_CACHE_MANAGEMENT ;
/* If the texture is STREAMED and it's not DEVICE_LOCAL, we expect to hit a slower path,
* so fallback to copy path . */
2023-07-17 17:30:17 +02:00
if ( type = = VULKAN_TEXTURE_STREAMED
& & ( vk - > context - > memory_properties . memoryTypes [
alloc . memoryTypeIndex ] . propertyFlags
& VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT ) = = 0 )
2023-05-31 19:32:33 +02:00
{
/* Recreate texture but for STAGING this time ... */
# ifdef VULKAN_DEBUG
RARCH_DBG ( " [Vulkan]: GPU supports linear images as textures, but not DEVICE_LOCAL. Falling back to copy path. \n " ) ;
# endif
2023-07-17 17:30:17 +02:00
type = VULKAN_TEXTURE_STAGING ;
2023-05-31 19:32:33 +02:00
vkDestroyImage ( device , tex . image , NULL ) ;
2023-07-17 17:30:17 +02:00
tex . image = VK_NULL_HANDLE ;
info . initialLayout = VK_IMAGE_LAYOUT_GENERAL ;
2023-05-31 19:32:33 +02:00
2023-07-17 17:30:17 +02:00
buffer_info . usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT ;
2023-05-31 19:32:33 +02:00
vkCreateBuffer ( device , & buffer_info , NULL , & tex . buffer ) ;
vulkan_debug_mark_buffer ( device , tex . buffer ) ;
vkGetBufferMemoryRequirements ( device , tex . buffer , & mem_reqs ) ;
alloc . allocationSize = mem_reqs . size ;
alloc . memoryTypeIndex = vulkan_find_memory_type_fallback (
2023-07-17 17:30:17 +02:00
& vk - > context - > memory_properties ,
mem_reqs . memoryTypeBits ,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
| VK_MEMORY_PROPERTY_HOST_COHERENT_BIT
| VK_MEMORY_PROPERTY_HOST_CACHED_BIT ,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
| VK_MEMORY_PROPERTY_HOST_COHERENT_BIT ) ;
2023-05-31 19:32:33 +02:00
}
break ;
}
/* We're not reusing the objects themselves. */
if ( old )
{
if ( old - > view ! = VK_NULL_HANDLE )
vkDestroyImageView ( vk - > context - > device , old - > view , NULL ) ;
if ( old - > image ! = VK_NULL_HANDLE )
{
vkDestroyImage ( vk - > context - > device , old - > image , NULL ) ;
# ifdef VULKAN_DEBUG_TEXTURE_ALLOC
vulkan_track_dealloc ( old - > image ) ;
# endif
}
if ( old - > buffer ! = VK_NULL_HANDLE )
vkDestroyBuffer ( vk - > context - > device , old - > buffer , NULL ) ;
}
/* We can pilfer the old memory and move it over to the new texture. */
2023-07-17 17:30:17 +02:00
if ( old
& & old - > memory_size > = mem_reqs . size
& & old - > memory_type = = alloc . memoryTypeIndex )
2023-05-31 19:32:33 +02:00
{
tex . memory = old - > memory ;
tex . memory_size = old - > memory_size ;
tex . memory_type = old - > memory_type ;
if ( old - > mapped )
vkUnmapMemory ( device , old - > memory ) ;
old - > memory = VK_NULL_HANDLE ;
}
else
{
vkAllocateMemory ( device , & alloc , NULL , & tex . memory ) ;
vulkan_debug_mark_memory ( device , tex . memory ) ;
tex . memory_size = alloc . allocationSize ;
tex . memory_type = alloc . memoryTypeIndex ;
}
if ( old )
{
if ( old - > memory ! = VK_NULL_HANDLE )
vkFreeMemory ( device , old - > memory , NULL ) ;
memset ( old , 0 , sizeof ( * old ) ) ;
}
if ( tex . image )
vkBindImageMemory ( device , tex . image , tex . memory , 0 ) ;
if ( tex . buffer )
vkBindBufferMemory ( device , tex . buffer , tex . memory , 0 ) ;
2023-08-14 23:36:19 +02:00
if ( type ! = VULKAN_TEXTURE_STAGING
2023-07-17 17:30:17 +02:00
& & type ! = VULKAN_TEXTURE_READBACK )
2023-05-31 19:32:33 +02:00
{
2023-07-17 19:51:18 +02:00
VkImageViewCreateInfo view ;
view . sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO ;
view . pNext = NULL ;
view . flags = 0 ;
view . image = tex . image ;
view . viewType = VK_IMAGE_VIEW_TYPE_2D ;
view . format = format ;
2023-05-31 19:32:33 +02:00
if ( swizzle )
2023-07-17 19:51:18 +02:00
view . components = * swizzle ;
2023-05-31 19:32:33 +02:00
else
{
2023-07-17 19:51:18 +02:00
view . components . r = VK_COMPONENT_SWIZZLE_R ;
view . components . g = VK_COMPONENT_SWIZZLE_G ;
view . components . b = VK_COMPONENT_SWIZZLE_B ;
view . components . a = VK_COMPONENT_SWIZZLE_A ;
2023-05-31 19:32:33 +02:00
}
2023-07-17 19:51:18 +02:00
view . subresourceRange . aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ;
view . subresourceRange . baseMipLevel = 0 ;
view . subresourceRange . levelCount = info . mipLevels ;
view . subresourceRange . baseArrayLayer = 0 ;
view . subresourceRange . layerCount = 1 ;
2023-05-31 19:32:33 +02:00
vkCreateImageView ( device , & view , NULL , & tex . view ) ;
}
else
tex . view = VK_NULL_HANDLE ;
2023-08-14 23:36:19 +02:00
if ( tex . image
2023-07-17 17:30:17 +02:00
& & info . tiling = = VK_IMAGE_TILING_LINEAR )
2023-05-31 19:32:33 +02:00
vkGetImageSubresourceLayout ( device , tex . image , & subresource , & layout ) ;
else if ( tex . buffer )
{
layout . offset = 0 ;
layout . size = buffer_info . size ;
layout . rowPitch = buffer_width ;
}
else
memset ( & layout , 0 , sizeof ( layout ) ) ;
tex . stride = layout . rowPitch ;
tex . offset = layout . offset ;
tex . size = layout . size ;
tex . layout = info . initialLayout ;
tex . width = width ;
tex . height = height ;
tex . format = format ;
tex . type = type ;
if ( initial )
{
switch ( type )
{
case VULKAN_TEXTURE_STREAMED :
case VULKAN_TEXTURE_STAGING :
{
unsigned y ;
uint8_t * dst = NULL ;
const uint8_t * src = NULL ;
void * ptr = NULL ;
unsigned bpp = vulkan_format_to_bpp ( tex . format ) ;
unsigned stride = tex . width * bpp ;
vkMapMemory ( device , tex . memory , tex . offset , tex . size , 0 , & ptr ) ;
dst = ( uint8_t * ) ptr ;
src = ( const uint8_t * ) initial ;
for ( y = 0 ; y < tex . height ; y + + , dst + = tex . stride , src + = stride )
memcpy ( dst , src , width * bpp ) ;
if ( ( tex . flags & VK_TEX_FLAG_NEED_MANUAL_CACHE_MANAGEMENT )
& & ( tex . memory ! = VK_NULL_HANDLE ) )
VULKAN_SYNC_TEXTURE_TO_GPU ( vk - > context - > device , tex . memory ) ;
vkUnmapMemory ( device , tex . memory ) ;
}
break ;
case VULKAN_TEXTURE_STATIC :
{
VkBufferImageCopy region ;
VkCommandBuffer staging ;
2023-07-17 19:51:18 +02:00
VkSubmitInfo submit_info ;
VkCommandBufferBeginInfo begin_info ;
2023-07-17 17:30:17 +02:00
VkCommandBufferAllocateInfo cmd_info ;
2023-08-14 23:36:19 +02:00
enum VkImageLayout layout_fmt =
2023-05-31 19:32:33 +02:00
( tex . flags & VK_TEX_FLAG_MIPMAP )
? VK_IMAGE_LAYOUT_GENERAL
: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ;
2023-07-17 17:30:17 +02:00
struct vk_texture tmp = vulkan_create_texture ( vk , NULL ,
2023-05-31 19:32:33 +02:00
width , height , format , initial , NULL , VULKAN_TEXTURE_STAGING ) ;
2023-07-17 17:30:17 +02:00
cmd_info . sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO ;
cmd_info . pNext = NULL ;
cmd_info . commandPool = vk - > staging_pool ;
cmd_info . level = VK_COMMAND_BUFFER_LEVEL_PRIMARY ;
cmd_info . commandBufferCount = 1 ;
2023-05-31 19:32:33 +02:00
vkAllocateCommandBuffers ( vk - > context - > device ,
& cmd_info , & staging ) ;
2023-07-17 19:51:18 +02:00
begin_info . sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO ;
begin_info . pNext = NULL ;
2023-07-17 17:30:17 +02:00
begin_info . flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT ;
2023-07-17 19:51:18 +02:00
begin_info . pInheritanceInfo = NULL ;
2023-05-31 19:32:33 +02:00
vkBeginCommandBuffer ( staging , & begin_info ) ;
2023-08-14 23:36:19 +02:00
/* If doing mipmapping on upload, keep in general
2023-05-31 19:32:33 +02:00
* so we can easily do transfers to
* and transfers from the images without having to
2023-08-14 23:36:19 +02:00
* mess around with lots of extra transitions at
2023-05-31 19:32:33 +02:00
* per - level granularity .
*/
VULKAN_IMAGE_LAYOUT_TRANSITION (
staging ,
tex . image ,
VK_IMAGE_LAYOUT_UNDEFINED ,
layout_fmt ,
0 , VK_ACCESS_TRANSFER_WRITE_BIT ,
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT ,
VK_PIPELINE_STAGE_TRANSFER_BIT ) ;
memset ( & region , 0 , sizeof ( region ) ) ;
region . imageSubresource . aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ;
region . imageSubresource . layerCount = 1 ;
region . imageExtent . width = width ;
region . imageExtent . height = height ;
region . imageExtent . depth = 1 ;
vkCmdCopyBufferToImage ( staging , tmp . buffer ,
tex . image , layout_fmt , 1 , & region ) ;
if ( tex . flags & VK_TEX_FLAG_MIPMAP )
{
for ( i = 1 ; i < info . mipLevels ; i + + )
{
VkImageBlit blit_region ;
unsigned src_width = MAX ( width > > ( i - 1 ) , 1 ) ;
unsigned src_height = MAX ( height > > ( i - 1 ) , 1 ) ;
unsigned target_width = MAX ( width > > i , 1 ) ;
unsigned target_height = MAX ( height > > i , 1 ) ;
memset ( & blit_region , 0 , sizeof ( blit_region ) ) ;
blit_region . srcSubresource . aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ;
blit_region . srcSubresource . mipLevel = i - 1 ;
blit_region . srcSubresource . baseArrayLayer = 0 ;
blit_region . srcSubresource . layerCount = 1 ;
blit_region . dstSubresource = blit_region . srcSubresource ;
blit_region . dstSubresource . mipLevel = i ;
blit_region . srcOffsets [ 1 ] . x = src_width ;
blit_region . srcOffsets [ 1 ] . y = src_height ;
blit_region . srcOffsets [ 1 ] . z = 1 ;
blit_region . dstOffsets [ 1 ] . x = target_width ;
blit_region . dstOffsets [ 1 ] . y = target_height ;
blit_region . dstOffsets [ 1 ] . z = 1 ;
/* Only injects execution and memory barriers,
* not actual transition . */
VULKAN_IMAGE_LAYOUT_TRANSITION (
staging ,
tex . image ,
VK_IMAGE_LAYOUT_GENERAL ,
VK_IMAGE_LAYOUT_GENERAL ,
VK_ACCESS_TRANSFER_WRITE_BIT ,
VK_ACCESS_TRANSFER_READ_BIT ,
VK_PIPELINE_STAGE_TRANSFER_BIT ,
VK_PIPELINE_STAGE_TRANSFER_BIT ) ;
vkCmdBlitImage (
staging ,
tex . image ,
VK_IMAGE_LAYOUT_GENERAL ,
tex . image ,
VK_IMAGE_LAYOUT_GENERAL ,
1 ,
& blit_region ,
VK_FILTER_LINEAR ) ;
}
}
/* Complete our texture. */
VULKAN_IMAGE_LAYOUT_TRANSITION (
staging ,
tex . image ,
layout_fmt ,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ,
VK_ACCESS_TRANSFER_WRITE_BIT ,
VK_ACCESS_SHADER_READ_BIT ,
VK_PIPELINE_STAGE_TRANSFER_BIT ,
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ) ;
vkEndCommandBuffer ( staging ) ;
2023-07-17 19:51:18 +02:00
submit_info . sType = VK_STRUCTURE_TYPE_SUBMIT_INFO ;
submit_info . pNext = NULL ;
submit_info . waitSemaphoreCount = 0 ;
submit_info . pWaitSemaphores = NULL ;
submit_info . pWaitDstStageMask = NULL ;
submit_info . commandBufferCount = 1 ;
submit_info . pCommandBuffers = & staging ;
submit_info . signalSemaphoreCount = 0 ;
submit_info . pSignalSemaphores = NULL ;
2023-05-31 19:32:33 +02:00
# ifdef HAVE_THREADS
slock_lock ( vk - > context - > queue_lock ) ;
# endif
vkQueueSubmit ( vk - > context - > queue ,
1 , & submit_info , VK_NULL_HANDLE ) ;
/* TODO: Very crude, but texture uploads only happen
* during init , so waiting for GPU to complete transfer
* and blocking isn ' t a big deal . */
vkQueueWaitIdle ( vk - > context - > queue ) ;
# ifdef HAVE_THREADS
slock_unlock ( vk - > context - > queue_lock ) ;
# endif
vkFreeCommandBuffers ( vk - > context - > device ,
vk - > staging_pool , 1 , & staging ) ;
vulkan_destroy_texture (
vk - > context - > device , & tmp ) ;
tex . layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
}
break ;
case VULKAN_TEXTURE_DYNAMIC :
case VULKAN_TEXTURE_READBACK :
/* TODO/FIXME - stubs */
break ;
}
}
return tex ;
}
/* Dynamic texture type should be set to : VULKAN_TEXTURE_DYNAMIC
* Staging texture type should be set to : VULKAN_TEXTURE_STAGING
*/
static void vulkan_copy_staging_to_dynamic ( vk_t * vk , VkCommandBuffer cmd ,
struct vk_texture * dynamic , struct vk_texture * staging )
{
bool compute_upload = dynamic - > format ! = staging - > format ;
if ( compute_upload )
{
const uint32_t ubo [ 3 ] = { dynamic - > width , dynamic - > height , ( uint32_t ) ( staging - > stride / 4 ) /* in terms of u32 words */ } ;
2023-07-17 17:30:17 +02:00
VkWriteDescriptorSet write ;
2023-05-31 19:32:33 +02:00
VkDescriptorBufferInfo buffer_info ;
VkDescriptorImageInfo image_info ;
struct vk_buffer_range range ;
VkDescriptorSet set ;
VULKAN_IMAGE_LAYOUT_TRANSITION (
cmd ,
dynamic - > image ,
VK_IMAGE_LAYOUT_UNDEFINED ,
VK_IMAGE_LAYOUT_GENERAL ,
0 ,
VK_ACCESS_SHADER_WRITE_BIT ,
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT ,
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ) ;
/* staging->format is always RGB565 here.
2023-05-31 19:38:49 +02:00
* Can be expanded as needed if more cases are added to VK_REMAP_TO_TEXFMT . */
2023-05-31 19:32:33 +02:00
retro_assert ( staging - > format = = VK_FORMAT_R5G6B5_UNORM_PACK16 ) ;
set = vulkan_descriptor_manager_alloc (
vk - > context - > device ,
& vk - > chain - > descriptor_manager ) ;
if ( ! vulkan_buffer_chain_alloc ( vk - > context , & vk - > chain - > ubo ,
sizeof ( ubo ) , & range ) )
return ;
memcpy ( range . data , ubo , sizeof ( ubo ) ) ;
2023-06-01 19:59:11 +02:00
VULKAN_SET_UNIFORM_BUFFER ( vk - > context - > device ,
2023-05-31 19:32:33 +02:00
set ,
0 ,
range . buffer ,
range . offset ,
sizeof ( ubo ) ) ;
image_info . imageLayout = VK_IMAGE_LAYOUT_GENERAL ;
2023-07-17 17:30:17 +02:00
image_info . imageView = dynamic - > view ;
image_info . sampler = VK_NULL_HANDLE ;
buffer_info . buffer = staging - > buffer ;
buffer_info . offset = 0 ;
buffer_info . range = VK_WHOLE_SIZE ;
write . sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET ;
write . pNext = NULL ;
write . dstSet = set ;
write . dstBinding = 3 ;
write . dstArrayElement = 0 ;
write . descriptorCount = 1 ;
write . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ;
write . pImageInfo = & image_info ;
write . pBufferInfo = NULL ;
2023-05-31 19:32:33 +02:00
write . pTexelBufferView = NULL ;
vkUpdateDescriptorSets ( vk - > context - > device , 1 , & write , 0 , NULL ) ;
2023-07-17 17:30:17 +02:00
write . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ;
write . dstBinding = 4 ;
write . pImageInfo = NULL ;
write . pBufferInfo = & buffer_info ;
2023-05-31 19:32:33 +02:00
vkUpdateDescriptorSets ( vk - > context - > device , 1 , & write , 0 , NULL ) ;
vkCmdBindPipeline ( cmd , VK_PIPELINE_BIND_POINT_COMPUTE , vk - > pipelines . rgb565_to_rgba8888 ) ;
vkCmdBindDescriptorSets ( cmd , VK_PIPELINE_BIND_POINT_COMPUTE , vk - > pipelines . layout , 0 , 1 , & set , 0 , NULL ) ;
vkCmdDispatch ( cmd , ( dynamic - > width + 15 ) / 16 , ( dynamic - > height + 7 ) / 8 , 1 ) ;
VULKAN_IMAGE_LAYOUT_TRANSITION (
cmd ,
dynamic - > image ,
VK_IMAGE_LAYOUT_GENERAL ,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ,
VK_ACCESS_SHADER_WRITE_BIT ,
VK_ACCESS_SHADER_READ_BIT ,
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ,
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ) ;
}
else
{
VkBufferImageCopy region ;
VULKAN_IMAGE_LAYOUT_TRANSITION (
cmd ,
dynamic - > image ,
VK_IMAGE_LAYOUT_UNDEFINED ,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ,
0 ,
VK_ACCESS_TRANSFER_WRITE_BIT ,
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT ,
VK_PIPELINE_STAGE_TRANSFER_BIT ) ;
2023-07-17 17:30:17 +02:00
region . bufferOffset = 0 ;
region . bufferRowLength = 0 ;
region . bufferImageHeight = 0 ;
region . imageSubresource . aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ;
region . imageSubresource . mipLevel = 0 ;
2023-05-31 19:32:33 +02:00
region . imageSubresource . baseArrayLayer = 0 ;
2023-07-17 17:30:17 +02:00
region . imageSubresource . layerCount = 1 ;
region . imageOffset . x = 0 ;
region . imageOffset . y = 0 ;
region . imageOffset . z = 0 ;
region . imageExtent . width = dynamic - > width ;
region . imageExtent . height = dynamic - > height ;
region . imageExtent . depth = 1 ;
2023-05-31 19:32:33 +02:00
vkCmdCopyBufferToImage (
cmd ,
staging - > buffer ,
dynamic - > image ,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ,
1 ,
& region ) ;
VULKAN_IMAGE_LAYOUT_TRANSITION (
cmd ,
dynamic - > image ,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ,
VK_ACCESS_TRANSFER_WRITE_BIT ,
VK_ACCESS_SHADER_READ_BIT ,
VK_PIPELINE_STAGE_TRANSFER_BIT ,
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ) ;
}
dynamic - > layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
}
2023-05-31 19:03:45 +02:00
/**
* FORWARD DECLARATIONS
*/
static void vulkan_set_viewport ( void * data , unsigned viewport_width ,
unsigned viewport_height , bool force_full , bool allow_rotate ) ;
static bool vulkan_is_mapped_swapchain_texture_ptr ( const vk_t * vk ,
const void * ptr ) ;
# ifdef HAVE_OVERLAY
static void vulkan_overlay_free ( vk_t * vk ) ;
static void vulkan_render_overlay ( vk_t * vk , unsigned width , unsigned height ) ;
# endif
static void vulkan_viewport_info ( void * data , struct video_viewport * vp ) ;
2023-05-31 23:58:06 +02:00
/**
* DISPLAY DRIVER
*/
/* Will do Y-flip later, but try to make it similar to GL. */
static const float vk_vertexes [ 8 ] = {
0 , 0 ,
1 , 0 ,
0 , 1 ,
1 , 1
} ;
static const float vk_tex_coords [ 8 ] = {
0 , 1 ,
1 , 1 ,
0 , 0 ,
1 , 0
} ;
static const float vk_colors [ 16 ] = {
1.0f , 1.0f , 1.0f , 1.0f ,
1.0f , 1.0f , 1.0f , 1.0f ,
1.0f , 1.0f , 1.0f , 1.0f ,
1.0f , 1.0f , 1.0f , 1.0f ,
} ;
static void * gfx_display_vk_get_default_mvp ( void * data )
{
vk_t * vk = ( vk_t * ) data ;
if ( ! vk )
return NULL ;
return & vk - > mvp_no_rot ;
}
static const float * gfx_display_vk_get_default_vertices ( void )
{
return & vk_vertexes [ 0 ] ;
}
static const float * gfx_display_vk_get_default_tex_coords ( void )
{
return & vk_tex_coords [ 0 ] ;
}
# ifdef HAVE_SHADERPIPELINE
static unsigned to_menu_pipeline (
enum gfx_display_prim_type type , unsigned pipeline )
{
switch ( pipeline )
{
case VIDEO_SHADER_MENU :
return 6 + ( type = = GFX_DISPLAY_PRIM_TRIANGLESTRIP ) ;
case VIDEO_SHADER_MENU_2 :
return 8 + ( type = = GFX_DISPLAY_PRIM_TRIANGLESTRIP ) ;
case VIDEO_SHADER_MENU_3 :
return 10 + ( type = = GFX_DISPLAY_PRIM_TRIANGLESTRIP ) ;
case VIDEO_SHADER_MENU_4 :
return 12 + ( type = = GFX_DISPLAY_PRIM_TRIANGLESTRIP ) ;
case VIDEO_SHADER_MENU_5 :
return 14 + ( type = = GFX_DISPLAY_PRIM_TRIANGLESTRIP ) ;
default :
break ;
}
return 0 ;
}
static void gfx_display_vk_draw_pipeline (
gfx_display_ctx_draw_t * draw ,
gfx_display_t * p_disp ,
void * data , unsigned video_width , unsigned video_height )
{
static uint8_t ubo_scratch_data [ 768 ] ;
static struct video_coords blank_coords ;
static float t = 0.0f ;
float output_size [ 2 ] ;
float yflip = 1.0f ;
video_coord_array_t * ca = NULL ;
vk_t * vk = ( vk_t * ) data ;
if ( ! vk | | ! draw )
return ;
draw - > x = 0 ;
draw - > y = 0 ;
draw - > matrix_data = NULL ;
output_size [ 0 ] = ( float ) vk - > context - > swapchain_width ;
output_size [ 1 ] = ( float ) vk - > context - > swapchain_height ;
switch ( draw - > pipeline_id )
{
/* Ribbon */
default :
case VIDEO_SHADER_MENU :
case VIDEO_SHADER_MENU_2 :
ca = & p_disp - > dispca ;
draw - > coords = ( struct video_coords * ) & ca - > coords ;
draw - > backend_data = ubo_scratch_data ;
draw - > backend_data_size = 2 * sizeof ( float ) ;
/* Match UBO layout in shader. */
memcpy ( ubo_scratch_data , & t , sizeof ( t ) ) ;
memcpy ( ubo_scratch_data + sizeof ( float ) , & yflip , sizeof ( yflip ) ) ;
break ;
/* Snow simple */
case VIDEO_SHADER_MENU_3 :
case VIDEO_SHADER_MENU_4 :
case VIDEO_SHADER_MENU_5 :
draw - > backend_data = ubo_scratch_data ;
2023-08-14 23:36:19 +02:00
draw - > backend_data_size = sizeof ( math_matrix_4x4 )
2023-05-31 23:58:06 +02:00
+ 4 * sizeof ( float ) ;
/* Match UBO layout in shader. */
memcpy ( ubo_scratch_data ,
& vk - > mvp_no_rot ,
sizeof ( math_matrix_4x4 ) ) ;
memcpy ( ubo_scratch_data + sizeof ( math_matrix_4x4 ) ,
output_size ,
sizeof ( output_size ) ) ;
/* Shader uses FragCoord, need to fix up. */
if ( draw - > pipeline_id = = VIDEO_SHADER_MENU_5 )
yflip = - 1.0f ;
2023-08-14 23:36:19 +02:00
memcpy ( ubo_scratch_data + sizeof ( math_matrix_4x4 )
2023-05-31 23:58:06 +02:00
+ 2 * sizeof ( float ) , & t , sizeof ( t ) ) ;
2023-08-14 23:36:19 +02:00
memcpy ( ubo_scratch_data + sizeof ( math_matrix_4x4 )
2023-05-31 23:58:06 +02:00
+ 3 * sizeof ( float ) , & yflip , sizeof ( yflip ) ) ;
draw - > coords = & blank_coords ;
blank_coords . vertices = 4 ;
draw - > prim_type = GFX_DISPLAY_PRIM_TRIANGLESTRIP ;
break ;
}
t + = 0.01 ;
}
# endif
static void gfx_display_vk_draw ( gfx_display_ctx_draw_t * draw ,
void * data , unsigned video_width , unsigned video_height )
{
unsigned i ;
struct vk_buffer_range range ;
struct vk_texture * texture = NULL ;
const float * vertex = NULL ;
const float * tex_coord = NULL ;
const float * color = NULL ;
struct vk_vertex * pv = NULL ;
vk_t * vk = ( vk_t * ) data ;
if ( ! vk | | ! draw )
return ;
texture = ( struct vk_texture * ) draw - > texture ;
vertex = draw - > coords - > vertex ;
tex_coord = draw - > coords - > tex_coord ;
color = draw - > coords - > color ;
if ( ! vertex )
vertex = & vk_vertexes [ 0 ] ;
if ( ! tex_coord )
tex_coord = & vk_tex_coords [ 0 ] ;
if ( ! draw - > coords - > lut_tex_coord )
draw - > coords - > lut_tex_coord = & vk_tex_coords [ 0 ] ;
if ( ! texture )
texture = & vk - > display . blank_texture ;
if ( ! color )
color = & vk_colors [ 0 ] ;
vk - > vk_vp . x = draw - > x ;
vk - > vk_vp . y = vk - > context - > swapchain_height - draw - > y - draw - > height ;
vk - > vk_vp . width = draw - > width ;
vk - > vk_vp . height = draw - > height ;
vk - > vk_vp . minDepth = 0.0f ;
vk - > vk_vp . maxDepth = 1.0f ;
vk - > tracker . dirty | = VULKAN_DIRTY_DYNAMIC_BIT ;
/* Bake interleaved VBO. Kinda ugly, we should probably try to move to
* an interleaved model to begin with . . . */
if ( ! vulkan_buffer_chain_alloc ( vk - > context , & vk - > chain - > vbo ,
draw - > coords - > vertices * sizeof ( struct vk_vertex ) , & range ) )
return ;
pv = ( struct vk_vertex * ) range . data ;
for ( i = 0 ; i < draw - > coords - > vertices ; i + + , pv + + )
{
pv - > x = * vertex + + ;
/* Y-flip. Vulkan is top-left clip space */
pv - > y = 1.0f - ( * vertex + + ) ;
pv - > tex_x = * tex_coord + + ;
pv - > tex_y = * tex_coord + + ;
pv - > color . r = * color + + ;
pv - > color . g = * color + + ;
pv - > color . b = * color + + ;
pv - > color . a = * color + + ;
}
switch ( draw - > pipeline_id )
{
# ifdef HAVE_SHADERPIPELINE
case VIDEO_SHADER_MENU :
case VIDEO_SHADER_MENU_2 :
case VIDEO_SHADER_MENU_3 :
case VIDEO_SHADER_MENU_4 :
case VIDEO_SHADER_MENU_5 :
{
struct vk_draw_triangles call ;
call . pipeline = vk - > display . pipelines [
to_menu_pipeline ( draw - > prim_type , draw - > pipeline_id ) ] ;
call . texture = NULL ;
call . sampler = VK_NULL_HANDLE ;
call . uniform = draw - > backend_data ;
call . uniform_size = draw - > backend_data_size ;
call . vbo = & range ;
call . vertices = draw - > coords - > vertices ;
vulkan_draw_triangles ( vk , & call ) ;
}
break ;
# endif
default :
{
struct vk_draw_triangles call ;
2023-08-14 23:36:19 +02:00
unsigned
disp_pipeline =
2023-05-31 23:58:06 +02:00
( ( draw - > prim_type = = GFX_DISPLAY_PRIM_TRIANGLESTRIP ) < < 1 )
| ( ( ( vk - > flags & VK_FLAG_DISPLAY_BLEND ) > 0 ) < < 0 ) ;
call . pipeline = vk - > display . pipelines [ disp_pipeline ] ;
call . texture = texture ;
2023-08-16 04:18:55 +02:00
call . sampler = ( texture - > flags & VK_TEX_FLAG_MIPMAP )
? vk - > samplers . mipmap_linear
: ( ( texture - > flags & VK_TEX_FLAG_DEFAULT_SMOOTH )
? vk - > samplers . linear
: vk - > samplers . nearest ) ;
2023-05-31 23:58:06 +02:00
call . uniform = draw - > matrix_data
? draw - > matrix_data : & vk - > mvp_no_rot ;
call . uniform_size = sizeof ( math_matrix_4x4 ) ;
call . vbo = & range ;
call . vertices = draw - > coords - > vertices ;
vulkan_draw_triangles ( vk , & call ) ;
}
break ;
}
}
static void gfx_display_vk_blend_begin ( void * data )
{
vk_t * vk = ( vk_t * ) data ;
if ( vk )
vk - > flags | = VK_FLAG_DISPLAY_BLEND ;
}
static void gfx_display_vk_blend_end ( void * data )
{
vk_t * vk = ( vk_t * ) data ;
if ( vk )
vk - > flags & = ~ VK_FLAG_DISPLAY_BLEND ;
}
static void gfx_display_vk_scissor_begin (
void * data ,
unsigned video_width ,
unsigned video_height ,
int x , int y , unsigned width , unsigned height )
{
vk_t * vk = ( vk_t * ) data ;
vk - > tracker . scissor . offset . x = x ;
vk - > tracker . scissor . offset . y = y ;
vk - > tracker . scissor . extent . width = width ;
vk - > tracker . scissor . extent . height = height ;
vk - > flags | = VK_FLAG_TRACKER_USE_SCISSOR ;
vk - > tracker . dirty | = VULKAN_DIRTY_DYNAMIC_BIT ;
}
static void gfx_display_vk_scissor_end ( void * data ,
unsigned video_width ,
unsigned video_height )
{
vk_t * vk = ( vk_t * ) data ;
vk - > flags & = ~ VK_FLAG_TRACKER_USE_SCISSOR ;
vk - > tracker . dirty | = VULKAN_DIRTY_DYNAMIC_BIT ;
}
gfx_display_ctx_driver_t gfx_display_ctx_vulkan = {
gfx_display_vk_draw ,
# ifdef HAVE_SHADERPIPELINE
gfx_display_vk_draw_pipeline ,
# else
NULL , /* draw_pipeline */
# endif
gfx_display_vk_blend_begin ,
gfx_display_vk_blend_end ,
gfx_display_vk_get_default_mvp ,
gfx_display_vk_get_default_vertices ,
gfx_display_vk_get_default_tex_coords ,
FONT_DRIVER_RENDER_VULKAN_API ,
GFX_VIDEO_DRIVER_VULKAN ,
" vulkan " ,
false ,
gfx_display_vk_scissor_begin ,
gfx_display_vk_scissor_end
} ;
2023-05-31 19:03:45 +02:00
/**
* FONT DRIVER
*/
2023-05-31 23:58:06 +02:00
2023-05-31 19:03:45 +02:00
static INLINE void vulkan_font_update_glyph (
vulkan_raster_t * font , const struct font_glyph * glyph )
{
unsigned row ;
for ( row = glyph - > atlas_offset_y ; row < ( glyph - > atlas_offset_y + glyph - > height ) ; row + + )
{
uint8_t * src = font - > atlas - > buffer + row * font - > atlas - > width + glyph - > atlas_offset_x ;
uint8_t * dst = ( uint8_t * ) font - > texture . mapped + row * font - > texture . stride + glyph - > atlas_offset_x ;
memcpy ( dst , src , glyph - > width ) ;
}
}
static void vulkan_font_free ( void * data , bool is_threaded )
{
vulkan_raster_t * font = ( vulkan_raster_t * ) data ;
if ( ! font )
return ;
if ( font - > font_driver & & font - > font_data )
font - > font_driver - > free ( font - > font_data ) ;
vkQueueWaitIdle ( font - > vk - > context - > queue ) ;
vulkan_destroy_texture (
font - > vk - > context - > device , & font - > texture ) ;
vulkan_destroy_texture (
font - > vk - > context - > device , & font - > texture_optimal ) ;
free ( font ) ;
}
static void * vulkan_font_init ( void * data ,
const char * font_path , float font_size ,
bool is_threaded )
{
vulkan_raster_t * font =
( vulkan_raster_t * ) calloc ( 1 , sizeof ( * font ) ) ;
if ( ! font )
return NULL ;
font - > vk = ( vk_t * ) data ;
if ( ! font_renderer_create_default (
& font - > font_driver ,
& font - > font_data , font_path , font_size ) )
{
free ( font ) ;
return NULL ;
}
font - > atlas = font - > font_driver - > get_atlas ( font - > font_data ) ;
font - > texture = vulkan_create_texture ( font - > vk , NULL ,
font - > atlas - > width , font - > atlas - > height , VK_FORMAT_R8_UNORM , font - > atlas - > buffer ,
NULL , VULKAN_TEXTURE_STAGING ) ;
{
struct vk_texture * texture = & font - > texture ;
VK_MAP_PERSISTENT_TEXTURE ( font - > vk - > context - > device , texture ) ;
}
font - > texture_optimal = vulkan_create_texture ( font - > vk , NULL ,
font - > atlas - > width , font - > atlas - > height , VK_FORMAT_R8_UNORM , NULL ,
NULL , VULKAN_TEXTURE_DYNAMIC ) ;
font - > needs_update = true ;
return font ;
}
static int vulkan_get_message_width ( void * data , const char * msg ,
size_t msg_len , float scale )
{
const struct font_glyph * glyph_q = NULL ;
vulkan_raster_t * font = ( vulkan_raster_t * ) data ;
const char * msg_end = msg + msg_len ;
int delta_x = 0 ;
if ( ! font
| | ! font - > font_driver
| | ! font - > font_data )
return 0 ;
glyph_q = font - > font_driver - > get_glyph ( font - > font_data , ' ? ' ) ;
while ( msg < msg_end )
{
const struct font_glyph * glyph ;
uint32_t code = utf8_walk ( & msg ) ;
/* Do something smarter here ... */
if ( ! ( glyph = font - > font_driver - > get_glyph (
font - > font_data , code ) ) )
if ( ! ( glyph = glyph_q ) )
continue ;
if ( font - > atlas - > dirty )
{
vulkan_font_update_glyph ( font , glyph ) ;
font - > atlas - > dirty = false ;
font - > needs_update = true ;
}
delta_x + = glyph - > advance_x ;
}
return delta_x * scale ;
}
static void vulkan_font_render_line ( vk_t * vk ,
2023-06-03 20:56:19 +02:00
vulkan_raster_t * font ,
const struct font_glyph * glyph_q ,
const char * msg , size_t msg_len ,
float scale ,
const float color [ 4 ] ,
float pos_x ,
float pos_y ,
int pre_x ,
float inv_tex_size_x ,
float inv_tex_size_y ,
float inv_win_width ,
float inv_win_height ,
unsigned text_align )
2023-05-31 19:03:45 +02:00
{
struct vk_color vk_color ;
const char * msg_end = msg + msg_len ;
2023-06-03 20:56:19 +02:00
int x = pre_x ;
2023-05-31 19:03:45 +02:00
int y = roundf ( ( 1.0f - pos_y ) * vk - > vp . height ) ;
int delta_x = 0 ;
int delta_y = 0 ;
vk_color . r = color [ 0 ] ;
vk_color . g = color [ 1 ] ;
vk_color . b = color [ 2 ] ;
vk_color . a = color [ 3 ] ;
switch ( text_align )
{
case TEXT_ALIGN_RIGHT :
x - = vulkan_get_message_width ( font , msg , msg_len , scale ) ;
break ;
case TEXT_ALIGN_CENTER :
x - = vulkan_get_message_width ( font , msg , msg_len , scale ) / 2 ;
break ;
}
while ( msg < msg_end )
{
const struct font_glyph * glyph ;
int off_x , off_y , tex_x , tex_y , width , height ;
unsigned code = utf8_walk ( & msg ) ;
/* Do something smarter here ... */
if ( ! ( glyph =
font - > font_driver - > get_glyph ( font - > font_data , code ) ) )
if ( ! ( glyph = glyph_q ) )
continue ;
if ( font - > atlas - > dirty )
{
vulkan_font_update_glyph ( font , glyph ) ;
font - > atlas - > dirty = false ;
font - > needs_update = true ;
}
off_x = glyph - > draw_offset_x ;
off_y = glyph - > draw_offset_y ;
tex_x = glyph - > atlas_offset_x ;
tex_y = glyph - > atlas_offset_y ;
width = glyph - > width ;
height = glyph - > height ;
{
struct vk_vertex * pv = font - > pv + font - > vertices ;
float _x = ( x + ( off_x + delta_x ) * scale )
* inv_win_width ;
float _y = ( y + ( off_y + delta_y ) * scale )
* inv_win_height ;
float _width = width * scale * inv_win_width ;
float _height = height * scale * inv_win_height ;
float _tex_x = tex_x * inv_tex_size_x ;
float _tex_y = tex_y * inv_tex_size_y ;
float _tex_width = width * inv_tex_size_x ;
float _tex_height = height * inv_tex_size_y ;
const struct vk_color * _color = & vk_color ;
2023-06-03 20:56:19 +02:00
VULKAN_WRITE_QUAD_VBO ( pv , _x , _y , _width , _height ,
_tex_x , _tex_y , _tex_width , _tex_height , _color ) ;
2023-05-31 19:03:45 +02:00
}
font - > vertices + = 6 ;
delta_x + = glyph - > advance_x ;
delta_y + = glyph - > advance_y ;
}
}
2023-06-03 20:56:19 +02:00
static void vulkan_font_render_message ( vk_t * vk ,
2023-05-31 19:03:45 +02:00
vulkan_raster_t * font , const char * msg , float scale ,
const float color [ 4 ] , float pos_x , float pos_y ,
unsigned text_align )
{
2023-06-03 18:34:47 +02:00
float line_height ;
2023-05-31 19:03:45 +02:00
struct font_line_metrics * line_metrics = NULL ;
2023-06-03 20:56:19 +02:00
const struct font_glyph * glyph_q = font - > font_driver - > get_glyph ( font - > font_data , ' ? ' ) ;
int x = roundf ( pos_x * vk - > vp . width ) ;
2023-05-31 19:03:45 +02:00
int lines = 0 ;
2023-06-03 20:56:19 +02:00
float inv_tex_size_x = 1.0f / font - > texture . width ;
float inv_tex_size_y = 1.0f / font - > texture . height ;
float inv_win_width = 1.0f / vk - > vp . width ;
float inv_win_height = 1.0f / vk - > vp . height ;
2023-06-03 18:34:47 +02:00
font - > font_driver - > get_line_metrics ( font - > font_data , & line_metrics ) ;
2023-06-03 20:56:19 +02:00
line_height = line_metrics - > height * scale / vk - > vp . height ;
2023-05-31 19:03:45 +02:00
for ( ; ; )
{
const char * delim = strchr ( msg , ' \n ' ) ;
2023-06-14 19:51:31 +02:00
size_t msg_len = delim ? ( size_t ) ( delim - msg ) : strlen ( msg ) ;
2023-05-31 19:03:45 +02:00
/* Draw the line */
2023-06-03 20:56:19 +02:00
vulkan_font_render_line ( vk , font , glyph_q , msg , msg_len ,
scale , color ,
pos_x ,
pos_y - ( float ) lines * line_height ,
x ,
inv_tex_size_x ,
inv_tex_size_y ,
inv_win_width ,
inv_win_height ,
2023-05-31 19:03:45 +02:00
text_align ) ;
if ( ! delim )
break ;
msg + = msg_len + 1 ;
lines + + ;
}
}
2023-06-03 20:56:19 +02:00
static void vulkan_font_flush ( vk_t * vk , vulkan_raster_t * font )
2023-05-31 19:03:45 +02:00
{
struct vk_draw_triangles call ;
2023-06-03 20:56:19 +02:00
call . pipeline = vk - > pipelines . font ;
2023-05-31 19:03:45 +02:00
call . texture = & font - > texture_optimal ;
2023-06-03 20:56:19 +02:00
call . sampler = vk - > samplers . mipmap_linear ;
call . uniform = & vk - > mvp ;
call . uniform_size = sizeof ( vk - > mvp ) ;
2023-05-31 19:03:45 +02:00
call . vbo = & font - > range ;
call . vertices = font - > vertices ;
if ( font - > needs_update )
{
VkCommandBuffer staging ;
VkSubmitInfo submit_info ;
VkCommandBufferAllocateInfo cmd_info ;
VkCommandBufferBeginInfo begin_info ;
struct vk_texture * dynamic_tex = NULL ;
struct vk_texture * staging_tex = NULL ;
cmd_info . sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO ;
cmd_info . pNext = NULL ;
2023-06-03 20:56:19 +02:00
cmd_info . commandPool = vk - > staging_pool ;
2023-05-31 19:03:45 +02:00
cmd_info . level = VK_COMMAND_BUFFER_LEVEL_PRIMARY ;
cmd_info . commandBufferCount = 1 ;
2023-06-03 20:56:19 +02:00
vkAllocateCommandBuffers ( vk - > context - > device , & cmd_info , & staging ) ;
2023-05-31 19:03:45 +02:00
begin_info . sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO ;
begin_info . pNext = NULL ;
begin_info . flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT ;
begin_info . pInheritanceInfo = NULL ;
vkBeginCommandBuffer ( staging , & begin_info ) ;
2023-06-03 20:56:19 +02:00
VULKAN_SYNC_TEXTURE_TO_GPU_COND_OBJ ( vk , font - > texture ) ;
2023-05-31 19:03:45 +02:00
dynamic_tex = & font - > texture_optimal ;
staging_tex = & font - > texture ;
2023-06-03 20:56:19 +02:00
vulkan_copy_staging_to_dynamic ( vk , staging ,
2023-05-31 19:03:45 +02:00
dynamic_tex , staging_tex ) ;
vkEndCommandBuffer ( staging ) ;
# ifdef HAVE_THREADS
2023-06-03 20:56:19 +02:00
slock_lock ( vk - > context - > queue_lock ) ;
2023-05-31 19:03:45 +02:00
# endif
submit_info . sType = VK_STRUCTURE_TYPE_SUBMIT_INFO ;
submit_info . pNext = NULL ;
submit_info . waitSemaphoreCount = 0 ;
submit_info . pWaitSemaphores = NULL ;
submit_info . pWaitDstStageMask = NULL ;
submit_info . commandBufferCount = 1 ;
submit_info . pCommandBuffers = & staging ;
submit_info . signalSemaphoreCount = 0 ;
submit_info . pSignalSemaphores = NULL ;
2023-06-03 20:56:19 +02:00
vkQueueSubmit ( vk - > context - > queue ,
2023-05-31 19:03:45 +02:00
1 , & submit_info , VK_NULL_HANDLE ) ;
2023-06-03 20:56:19 +02:00
vkQueueWaitIdle ( vk - > context - > queue ) ;
2023-05-31 19:03:45 +02:00
# ifdef HAVE_THREADS
2023-06-03 20:56:19 +02:00
slock_unlock ( vk - > context - > queue_lock ) ;
2023-05-31 19:03:45 +02:00
# endif
2023-06-03 20:56:19 +02:00
vkFreeCommandBuffers ( vk - > context - > device ,
vk - > staging_pool , 1 , & staging ) ;
2023-05-31 19:03:45 +02:00
font - > needs_update = false ;
}
2023-06-03 20:56:19 +02:00
vulkan_draw_triangles ( vk , & call ) ;
2023-05-31 19:03:45 +02:00
}
static void vulkan_font_render_msg (
void * userdata ,
void * data ,
const char * msg ,
const struct font_params * params )
{
float color [ 4 ] ;
int drop_x , drop_y ;
bool full_screen ;
size_t max_glyphs ;
unsigned width , height ;
enum text_alignment text_align ;
float x , y , scale , drop_mod , drop_alpha ;
vulkan_raster_t * font = ( vulkan_raster_t * ) data ;
settings_t * settings = config_get_ptr ( ) ;
float video_msg_pos_x = settings - > floats . video_msg_pos_x ;
float video_msg_pos_y = settings - > floats . video_msg_pos_y ;
float video_msg_color_r = settings - > floats . video_msg_color_r ;
float video_msg_color_g = settings - > floats . video_msg_color_g ;
float video_msg_color_b = settings - > floats . video_msg_color_b ;
2023-06-03 20:56:19 +02:00
vk_t * vk = ( vk_t * ) userdata ;
2023-05-31 19:03:45 +02:00
2023-06-03 20:56:19 +02:00
if ( ! font | | ! msg | | ! * msg | | ! vk )
2023-05-31 19:03:45 +02:00
return ;
width = vk - > video_width ;
height = vk - > video_height ;
if ( params )
{
x = params - > x ;
y = params - > y ;
scale = params - > scale ;
full_screen = params - > full_screen ;
text_align = params - > text_align ;
drop_x = params - > drop_x ;
drop_y = params - > drop_y ;
drop_mod = params - > drop_mod ;
drop_alpha = params - > drop_alpha ;
color [ 0 ] = FONT_COLOR_GET_RED ( params - > color ) / 255.0f ;
color [ 1 ] = FONT_COLOR_GET_GREEN ( params - > color ) / 255.0f ;
color [ 2 ] = FONT_COLOR_GET_BLUE ( params - > color ) / 255.0f ;
color [ 3 ] = FONT_COLOR_GET_ALPHA ( params - > color ) / 255.0f ;
/* If alpha is 0.0f, turn it into default 1.0f */
if ( color [ 3 ] < = 0.0f )
color [ 3 ] = 1.0f ;
}
else
{
x = video_msg_pos_x ;
y = video_msg_pos_y ;
scale = 1.0f ;
full_screen = true ;
text_align = TEXT_ALIGN_LEFT ;
drop_x = - 2 ;
drop_y = - 2 ;
drop_mod = 0.3f ;
drop_alpha = 1.0f ;
color [ 0 ] = video_msg_color_r ;
color [ 1 ] = video_msg_color_g ;
color [ 2 ] = video_msg_color_b ;
color [ 3 ] = 1.0f ;
}
2023-06-03 20:56:19 +02:00
vulkan_set_viewport ( vk , width , height , full_screen , false ) ;
2023-05-31 19:03:45 +02:00
max_glyphs = strlen ( msg ) ;
if ( drop_x | | drop_y )
max_glyphs * = 2 ;
2023-06-03 20:56:19 +02:00
if ( ! vulkan_buffer_chain_alloc ( vk - > context , & vk - > chain - > vbo ,
2023-05-31 19:03:45 +02:00
6 * sizeof ( struct vk_vertex ) * max_glyphs , & font - > range ) )
return ;
font - > vertices = 0 ;
font - > pv = ( struct vk_vertex * ) font - > range . data ;
if ( drop_x | | drop_y )
{
float color_dark [ 4 ] ;
color_dark [ 0 ] = color [ 0 ] * drop_mod ;
color_dark [ 1 ] = color [ 1 ] * drop_mod ;
color_dark [ 2 ] = color [ 2 ] * drop_mod ;
color_dark [ 3 ] = color [ 3 ] * drop_alpha ;
2023-06-03 20:56:19 +02:00
vulkan_font_render_message ( vk , font , msg , scale , color_dark ,
2023-05-31 19:03:45 +02:00
x + scale * drop_x / vk - > vp . width , y +
scale * drop_y / vk - > vp . height , text_align ) ;
}
2023-06-03 20:56:19 +02:00
vulkan_font_render_message ( vk , font , msg , scale ,
2023-05-31 19:03:45 +02:00
color , x , y , text_align ) ;
2023-06-03 20:56:19 +02:00
vulkan_font_flush ( vk , font ) ;
2023-05-31 19:03:45 +02:00
}
static const struct font_glyph * vulkan_font_get_glyph (
void * data , uint32_t code )
{
const struct font_glyph * glyph ;
vulkan_raster_t * font = ( vulkan_raster_t * ) data ;
2023-06-03 17:21:19 +02:00
if ( ! font | | ! font - > font_driver )
2023-05-31 19:03:45 +02:00
return NULL ;
glyph = font - > font_driver - > get_glyph ( ( void * ) font - > font_driver , code ) ;
if ( glyph & & font - > atlas - > dirty )
{
vulkan_font_update_glyph ( font , glyph ) ;
font - > atlas - > dirty = false ;
font - > needs_update = true ;
}
return glyph ;
}
static bool vulkan_get_line_metrics ( void * data ,
struct font_line_metrics * * metrics )
{
vulkan_raster_t * font = ( vulkan_raster_t * ) data ;
if ( font & & font - > font_driver & & font - > font_data )
2023-06-03 18:34:47 +02:00
{
font - > font_driver - > get_line_metrics ( font - > font_data , metrics ) ;
return true ;
}
2023-05-31 19:03:45 +02:00
return false ;
}
font_renderer_t vulkan_raster_font = {
vulkan_font_init ,
vulkan_font_free ,
vulkan_font_render_msg ,
" vulkan " ,
vulkan_font_get_glyph ,
NULL , /* bind_block */
NULL , /* flush_block */
vulkan_get_message_width ,
vulkan_get_line_metrics
} ;
/*
* VIDEO DRIVER
*/
2022-05-21 01:39:55 +02:00
static struct vk_descriptor_manager vulkan_create_descriptor_manager (
VkDevice device ,
const VkDescriptorPoolSize * sizes ,
unsigned num_sizes ,
VkDescriptorSetLayout set_layout )
{
2022-12-04 15:29:48 +01:00
int i ;
2022-05-21 01:39:55 +02:00
struct vk_descriptor_manager manager ;
manager . current = NULL ;
manager . count = 0 ;
for ( i = 0 ; i < VULKAN_MAX_DESCRIPTOR_POOL_SIZES ; i + + )
{
manager . sizes [ i ] . type = VK_DESCRIPTOR_TYPE_SAMPLER ;
manager . sizes [ i ] . descriptorCount = 0 ;
}
memcpy ( manager . sizes , sizes , num_sizes * sizeof ( * sizes ) ) ;
manager . set_layout = set_layout ;
manager . num_sizes = num_sizes ;
manager . head = vulkan_alloc_descriptor_pool ( device , & manager ) ;
return manager ;
}
static void vulkan_destroy_descriptor_manager (
VkDevice device ,
struct vk_descriptor_manager * manager )
{
struct vk_descriptor_pool * node = manager - > head ;
while ( node )
{
struct vk_descriptor_pool * next = node - > next ;
vkFreeDescriptorSets ( device , node - > pool ,
VULKAN_DESCRIPTOR_MANAGER_BLOCK_SETS , node - > sets ) ;
vkDestroyDescriptorPool ( device , node - > pool , NULL ) ;
free ( node ) ;
node = next ;
}
memset ( manager , 0 , sizeof ( * manager ) ) ;
}
static struct vk_buffer_chain vulkan_buffer_chain_init (
VkDeviceSize block_size ,
VkDeviceSize alignment ,
VkBufferUsageFlags usage )
{
struct vk_buffer_chain chain ;
chain . block_size = block_size ;
chain . alignment = alignment ;
chain . offset = 0 ;
chain . usage = usage ;
chain . head = NULL ;
chain . current = NULL ;
return chain ;
}
2023-05-29 19:13:47 +02:00
static const gfx_ctx_driver_t * gfx_ctx_vk_drivers [ ] = {
# if defined(__APPLE__)
& gfx_ctx_cocoavk ,
# endif
# if defined(_WIN32) && !defined(__WINRT__)
& gfx_ctx_w_vk ,
# endif
# if defined(ANDROID)
& gfx_ctx_vk_android ,
# endif
# if defined(HAVE_WAYLAND)
& gfx_ctx_vk_wayland ,
# endif
# if defined(HAVE_X11)
& gfx_ctx_vk_x ,
# endif
# if defined(HAVE_VULKAN_DISPLAY)
& gfx_ctx_khr_display ,
# endif
& gfx_ctx_null ,
NULL
} ;
static const gfx_ctx_driver_t * vk_context_driver_init_first (
uint32_t runloop_flags ,
settings_t * settings ,
void * data ,
const char * ident , enum gfx_ctx_api api , unsigned major ,
unsigned minor , bool hw_render_ctx , void * * ctx_data )
{
unsigned j ;
int i = - 1 ;
video_driver_state_t * video_st = video_state_get_ptr ( ) ;
for ( j = 0 ; gfx_ctx_vk_drivers [ j ] ; j + + )
{
if ( string_is_equal_noncase ( ident , gfx_ctx_vk_drivers [ j ] - > ident ) )
{
i = j ;
break ;
}
}
if ( i > = 0 )
{
const gfx_ctx_driver_t * ctx = video_context_driver_init (
2023-08-16 04:18:55 +02:00
( runloop_flags & RUNLOOP_FLAG_CORE_SET_SHARED_CONTEXT ) ? true : false ,
2023-05-29 19:13:47 +02:00
settings ,
data ,
gfx_ctx_vk_drivers [ i ] , ident ,
api , major , minor , hw_render_ctx , ctx_data ) ;
if ( ctx )
{
video_st - > context_data = * ctx_data ;
return ctx ;
}
}
for ( i = 0 ; gfx_ctx_vk_drivers [ i ] ; i + + )
{
const gfx_ctx_driver_t * ctx =
video_context_driver_init (
2023-08-16 04:18:55 +02:00
( runloop_flags & RUNLOOP_FLAG_CORE_SET_SHARED_CONTEXT ) ? true : false ,
2023-05-29 19:13:47 +02:00
settings ,
data ,
gfx_ctx_vk_drivers [ i ] , ident ,
api , major , minor , hw_render_ctx , ctx_data ) ;
if ( ctx )
{
video_st - > context_data = * ctx_data ;
return ctx ;
}
}
return NULL ;
}
2022-05-19 15:28:26 +02:00
static const gfx_ctx_driver_t * vulkan_get_context ( vk_t * vk , settings_t * settings )
2016-02-16 20:24:00 +01:00
{
2018-10-14 08:13:05 +02:00
void * ctx_data = NULL ;
unsigned major = 1 ;
unsigned minor = 0 ;
enum gfx_ctx_api api = GFX_CTX_VULKAN_API ;
2023-05-29 19:13:47 +02:00
uint32_t runloop_flags = runloop_get_flags ( ) ;
const gfx_ctx_driver_t * gfx_ctx = vk_context_driver_init_first (
runloop_flags , settings ,
vk , settings - > arrays . video_context_driver , api , major , minor , false , & ctx_data ) ;
2018-10-14 08:13:05 +02:00
if ( ctx_data )
2023-05-29 19:13:47 +02:00
vk - > ctx_data = ctx_data ;
2018-10-14 08:13:05 +02:00
return gfx_ctx ;
2016-02-16 20:24:00 +01:00
}
2016-02-29 20:07:11 +01:00
static void vulkan_init_render_pass (
vk_t * vk )
2016-02-16 20:24:00 +01:00
{
2022-05-19 15:28:26 +02:00
VkRenderPassCreateInfo rp_info ;
VkAttachmentReference color_ref ;
VkAttachmentDescription attachment ;
VkSubpassDescription subpass ;
2016-02-16 20:24:00 +01:00
2022-05-19 15:28:26 +02:00
attachment . flags = 0 ;
2016-02-16 20:24:00 +01:00
/* Backbuffer format. */
2016-02-20 20:15:46 +01:00
attachment . format = vk - > context - > swapchain_format ;
2016-02-16 20:24:00 +01:00
/* Not multisampled. */
2016-02-20 20:15:46 +01:00
attachment . samples = VK_SAMPLE_COUNT_1_BIT ;
2016-02-16 20:24:00 +01:00
/* When starting the frame, we want tiles to be cleared. */
2016-02-20 20:15:46 +01:00
attachment . loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR ;
2016-02-16 20:24:00 +01:00
/* When end the frame, we want tiles to be written out. */
2016-02-20 20:15:46 +01:00
attachment . storeOp = VK_ATTACHMENT_STORE_OP_STORE ;
2016-02-16 20:24:00 +01:00
/* Don't care about stencil since we're not using it. */
2016-02-20 20:15:46 +01:00
attachment . stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
attachment . stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE ;
2016-02-16 20:24:00 +01:00
2024-05-13 18:10:25 +02:00
/* We don't care about the initial layout as we'll overwrite contents anyway */
attachment . initialLayout = VK_IMAGE_LAYOUT_UNDEFINED ;
/* After we're done rendering, automatically transition the image to attachment_optimal */
2016-02-20 20:15:46 +01:00
attachment . finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ;
2016-02-16 20:24:00 +01:00
2022-05-19 15:28:26 +02:00
/* Color attachment reference */
color_ref . attachment = 0 ;
color_ref . layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ;
2016-02-16 20:24:00 +01:00
/* We have one subpass.
* This subpass has 1 color attachment . */
2022-05-19 15:28:26 +02:00
subpass . flags = 0 ;
subpass . pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS ;
subpass . inputAttachmentCount = 0 ;
subpass . pInputAttachments = NULL ;
subpass . colorAttachmentCount = 1 ;
subpass . pColorAttachments = & color_ref ;
subpass . pResolveAttachments = NULL ;
subpass . pDepthStencilAttachment = NULL ;
subpass . preserveAttachmentCount = 0 ;
subpass . pPreserveAttachments = NULL ;
2016-02-16 20:24:00 +01:00
/* Finally, create the renderpass. */
2023-08-14 23:36:19 +02:00
rp_info . sType =
2022-05-19 15:28:26 +02:00
VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO ;
rp_info . pNext = NULL ;
rp_info . flags = 0 ;
2016-02-20 20:15:46 +01:00
rp_info . attachmentCount = 1 ;
rp_info . pAttachments = & attachment ;
rp_info . subpassCount = 1 ;
rp_info . pSubpasses = & subpass ;
2022-05-19 15:28:26 +02:00
rp_info . dependencyCount = 0 ;
rp_info . pDependencies = NULL ;
2016-02-16 20:24:00 +01:00
2016-06-26 13:10:19 +02:00
vkCreateRenderPass ( vk - > context - > device ,
2016-02-20 20:15:46 +01:00
& rp_info , NULL , & vk - > render_pass ) ;
2016-02-16 20:24:00 +01:00
}
2024-05-13 18:10:25 +02:00
static void vulkan_init_hdr_readback_render_pass ( vk_t * vk )
{
VkRenderPassCreateInfo rp_info ;
VkAttachmentReference color_ref ;
VkAttachmentDescription attachment ;
VkSubpassDescription subpass ;
attachment . flags = 0 ;
/* Use BGRA as backbuffer format so CPU can just memcpy transfer results */
attachment . format = VK_FORMAT_B8G8R8A8_UNORM ;
/* Not multisampled. */
attachment . samples = VK_SAMPLE_COUNT_1_BIT ;
/* When starting the frame, we want tiles to be cleared. */
attachment . loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR ;
/* When end the frame, we want tiles to be written out. */
attachment . storeOp = VK_ATTACHMENT_STORE_OP_STORE ;
/* Don't care about stencil since we're not using it. */
attachment . stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
attachment . stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE ;
/* We don't care about the initial layout as we'll overwrite contents anyway */
attachment . initialLayout = VK_IMAGE_LAYOUT_UNDEFINED ;
/* After we're done rendering, automatically transition the image as a source for transfers */
attachment . finalLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ;
/* Color attachment reference */
color_ref . attachment = 0 ;
color_ref . layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ;
/* We have one subpass.
* This subpass has 1 color attachment . */
subpass . flags = 0 ;
subpass . pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS ;
subpass . inputAttachmentCount = 0 ;
subpass . pInputAttachments = NULL ;
subpass . colorAttachmentCount = 1 ;
subpass . pColorAttachments = & color_ref ;
subpass . pResolveAttachments = NULL ;
subpass . pDepthStencilAttachment = NULL ;
subpass . preserveAttachmentCount = 0 ;
subpass . pPreserveAttachments = NULL ;
/* Finally, create the renderpass. */
rp_info . sType =
VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO ;
rp_info . pNext = NULL ;
rp_info . flags = 0 ;
rp_info . attachmentCount = 1 ;
rp_info . pAttachments = & attachment ;
rp_info . subpassCount = 1 ;
rp_info . pSubpasses = & subpass ;
rp_info . dependencyCount = 0 ;
rp_info . pDependencies = NULL ;
vkCreateRenderPass ( vk - > context - > device ,
& rp_info , NULL , & vk - > readback_render_pass ) ;
}
2016-02-29 19:43:54 +01:00
static void vulkan_init_framebuffers (
vk_t * vk )
2016-02-16 20:24:00 +01:00
{
2022-12-04 15:29:48 +01:00
int i ;
2016-02-29 17:06:41 +01:00
2022-12-22 21:36:32 +01:00
for ( i = 0 ; i < ( int ) vk - > num_swapchain_images ; i + + )
2016-02-16 20:24:00 +01:00
{
2016-06-25 13:52:33 +02:00
VkImageViewCreateInfo view =
{ VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO } ;
VkFramebufferCreateInfo info =
{ VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO } ;
2016-02-16 20:24:00 +01:00
2020-06-06 13:23:24 +02:00
vk - > backbuffers [ i ] . image = vk - > context - > swapchain_images [ i ] ;
2017-12-09 12:58:11 +01:00
if ( vk - > context - > swapchain_images [ i ] = = VK_NULL_HANDLE )
{
2020-06-06 13:23:24 +02:00
vk - > backbuffers [ i ] . view = VK_NULL_HANDLE ;
vk - > backbuffers [ i ] . framebuffer = VK_NULL_HANDLE ;
2017-12-09 12:58:11 +01:00
continue ;
}
2016-02-16 20:24:00 +01:00
/* Create an image view which we can render into. */
2016-02-20 20:15:46 +01:00
view . viewType = VK_IMAGE_VIEW_TYPE_2D ;
view . format = vk - > context - > swapchain_format ;
2020-06-06 13:23:24 +02:00
view . image = vk - > backbuffers [ i ] . image ;
2016-02-20 20:15:46 +01:00
view . subresourceRange . baseMipLevel = 0 ;
2016-02-16 20:24:00 +01:00
view . subresourceRange . baseArrayLayer = 0 ;
2016-02-20 20:15:46 +01:00
view . subresourceRange . levelCount = 1 ;
view . subresourceRange . layerCount = 1 ;
view . subresourceRange . aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ;
view . components . r = VK_COMPONENT_SWIZZLE_R ;
view . components . g = VK_COMPONENT_SWIZZLE_G ;
view . components . b = VK_COMPONENT_SWIZZLE_B ;
view . components . a = VK_COMPONENT_SWIZZLE_A ;
2016-02-16 20:24:00 +01:00
2016-06-26 13:10:19 +02:00
vkCreateImageView ( vk - > context - > device ,
2020-06-06 13:23:24 +02:00
& view , NULL , & vk - > backbuffers [ i ] . view ) ;
2016-02-16 20:24:00 +01:00
/* Create the framebuffer */
2016-02-20 20:15:46 +01:00
info . renderPass = vk - > render_pass ;
2016-02-16 20:24:00 +01:00
info . attachmentCount = 1 ;
2020-06-06 13:23:24 +02:00
info . pAttachments = & vk - > backbuffers [ i ] . view ;
2016-02-20 20:15:46 +01:00
info . width = vk - > context - > swapchain_width ;
info . height = vk - > context - > swapchain_height ;
info . layers = 1 ;
2016-02-16 20:24:00 +01:00
2016-06-26 13:10:19 +02:00
vkCreateFramebuffer ( vk - > context - > device ,
2020-06-06 13:23:24 +02:00
& info , NULL , & vk - > backbuffers [ i ] . framebuffer ) ;
2016-02-16 20:24:00 +01:00
}
}
2016-02-29 19:43:54 +01:00
static void vulkan_init_pipeline_layout (
vk_t * vk )
2016-02-16 20:24:00 +01:00
{
2022-05-19 15:43:51 +02:00
VkPipelineLayoutCreateInfo layout_info ;
VkDescriptorSetLayoutCreateInfo set_layout_info ;
2023-02-05 19:23:48 +01:00
VkDescriptorSetLayoutBinding bindings [ 5 ] ;
2016-02-16 20:24:00 +01:00
2016-02-20 20:15:46 +01:00
bindings [ 0 ] . binding = 0 ;
bindings [ 0 ] . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ;
bindings [ 0 ] . descriptorCount = 1 ;
2023-02-05 19:23:48 +01:00
bindings [ 0 ] . stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT |
VK_SHADER_STAGE_COMPUTE_BIT ;
2016-02-18 18:36:07 +01:00
bindings [ 0 ] . pImmutableSamplers = NULL ;
2016-02-20 20:15:46 +01:00
bindings [ 1 ] . binding = 1 ;
bindings [ 1 ] . descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ;
bindings [ 1 ] . descriptorCount = 1 ;
bindings [ 1 ] . stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT ;
2016-02-18 18:36:07 +01:00
bindings [ 1 ] . pImmutableSamplers = NULL ;
2023-08-14 23:36:19 +02:00
2022-01-08 12:22:34 +00:00
bindings [ 2 ] . binding = 2 ;
bindings [ 2 ] . descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ;
bindings [ 2 ] . descriptorCount = 1 ;
bindings [ 2 ] . stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT ;
bindings [ 2 ] . pImmutableSamplers = NULL ;
2016-02-16 20:24:00 +01:00
2023-02-05 19:23:48 +01:00
bindings [ 3 ] . binding = 3 ;
bindings [ 3 ] . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ;
bindings [ 3 ] . descriptorCount = 1 ;
bindings [ 3 ] . stageFlags = VK_SHADER_STAGE_COMPUTE_BIT ;
bindings [ 3 ] . pImmutableSamplers = NULL ;
bindings [ 4 ] . binding = 4 ;
bindings [ 4 ] . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ;
bindings [ 4 ] . descriptorCount = 1 ;
bindings [ 4 ] . stageFlags = VK_SHADER_STAGE_COMPUTE_BIT ;
bindings [ 4 ] . pImmutableSamplers = NULL ;
2023-08-14 23:36:19 +02:00
set_layout_info . sType =
2022-05-19 15:43:51 +02:00
VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO ;
set_layout_info . pNext = NULL ;
set_layout_info . flags = 0 ;
2023-02-05 19:23:48 +01:00
set_layout_info . bindingCount = 5 ;
2016-02-20 20:15:46 +01:00
set_layout_info . pBindings = bindings ;
2016-02-16 20:24:00 +01:00
2016-06-26 13:10:19 +02:00
vkCreateDescriptorSetLayout ( vk - > context - > device ,
2016-02-20 20:15:46 +01:00
& set_layout_info , NULL , & vk - > pipelines . set_layout ) ;
2016-02-16 20:24:00 +01:00
2023-08-14 23:36:19 +02:00
layout_info . sType =
2022-05-19 15:43:51 +02:00
VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO ;
layout_info . pNext = NULL ;
layout_info . flags = 0 ;
layout_info . setLayoutCount = 1 ;
layout_info . pSetLayouts = & vk - > pipelines . set_layout ;
layout_info . pushConstantRangeCount = 0 ;
layout_info . pPushConstantRanges = NULL ;
2016-02-16 20:24:00 +01:00
2016-06-26 13:10:19 +02:00
vkCreatePipelineLayout ( vk - > context - > device ,
2016-02-20 20:15:46 +01:00
& layout_info , NULL , & vk - > pipelines . layout ) ;
2016-02-16 20:24:00 +01:00
}
2018-04-12 02:20:17 +02:00
static void vulkan_init_pipelines ( vk_t * vk )
2016-02-16 20:24:00 +01:00
{
2022-01-08 12:22:34 +00:00
# ifdef VULKAN_HDR_SWAPCHAIN
static const uint32_t hdr_frag [ ] =
# include "vulkan_shaders/hdr.frag.inc"
;
2024-05-13 18:10:25 +02:00
static const uint32_t hdr_tonemap_frag [ ] =
# include "vulkan_shaders/hdr_tonemap.frag.inc"
;
2022-01-10 05:26:02 +00:00
# endif /* VULKAN_HDR_SWAPCHAIN */
2022-01-08 12:22:34 +00:00
2016-06-25 14:55:03 +02:00
static const uint32_t alpha_blend_vert [ ] =
# include "vulkan_shaders/alpha_blend.vert.inc"
;
static const uint32_t alpha_blend_frag [ ] =
# include "vulkan_shaders/alpha_blend.frag.inc"
;
static const uint32_t font_frag [ ] =
# include "vulkan_shaders/font.frag.inc"
;
2023-02-05 19:23:48 +01:00
static const uint32_t rgb565_to_rgba8888_comp [ ] =
# include "vulkan_shaders/rgb565_to_rgba8888.comp.inc"
;
2016-12-12 13:17:20 +01:00
static const uint32_t pipeline_ribbon_vert [ ] =
# include "vulkan_shaders/pipeline_ribbon.vert.inc"
2016-06-25 14:55:03 +02:00
;
2016-12-12 13:17:20 +01:00
static const uint32_t pipeline_ribbon_frag [ ] =
# include "vulkan_shaders/pipeline_ribbon.frag.inc"
2016-06-25 14:55:03 +02:00
;
2016-12-12 13:17:20 +01:00
static const uint32_t pipeline_ribbon_simple_vert [ ] =
# include "vulkan_shaders/pipeline_ribbon_simple.vert.inc"
2016-06-25 14:55:03 +02:00
;
2016-12-12 13:17:20 +01:00
static const uint32_t pipeline_ribbon_simple_frag [ ] =
# include "vulkan_shaders/pipeline_ribbon_simple.frag.inc"
2016-06-25 14:55:03 +02:00
;
2017-02-20 20:51:24 +01:00
static const uint32_t pipeline_snow_simple_frag [ ] =
# include "vulkan_shaders/pipeline_snow_simple.frag.inc"
;
2017-02-20 21:51:58 +01:00
static const uint32_t pipeline_snow_frag [ ] =
# include "vulkan_shaders/pipeline_snow.frag.inc"
;
2017-02-20 22:59:05 -06:00
static const uint32_t pipeline_bokeh_frag [ ] =
# include "vulkan_shaders/pipeline_bokeh.frag.inc"
;
2022-12-04 15:29:48 +01:00
int i ;
2023-07-18 00:08:55 +02:00
VkPipelineMultisampleStateCreateInfo multisample ;
2017-12-11 23:55:31 -08:00
VkPipelineInputAssemblyStateCreateInfo input_assembly = {
2016-02-20 20:15:46 +01:00
VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO } ;
2017-12-11 23:55:31 -08:00
VkPipelineVertexInputStateCreateInfo vertex_input = {
2016-02-20 20:15:46 +01:00
VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO } ;
2017-12-11 23:55:31 -08:00
VkPipelineRasterizationStateCreateInfo raster = {
2016-02-20 20:15:46 +01:00
VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO } ;
VkPipelineColorBlendAttachmentState blend_attachment = { 0 } ;
2017-12-11 23:55:31 -08:00
VkPipelineColorBlendStateCreateInfo blend = {
2016-02-20 20:15:46 +01:00
VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO } ;
2017-12-11 23:55:31 -08:00
VkPipelineViewportStateCreateInfo viewport = {
2016-02-20 20:15:46 +01:00
VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO } ;
2017-12-11 23:55:31 -08:00
VkPipelineDepthStencilStateCreateInfo depth_stencil = {
2016-02-20 20:15:46 +01:00
VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO } ;
2017-12-11 23:55:31 -08:00
VkPipelineDynamicStateCreateInfo dynamic = {
2016-02-20 20:15:46 +01:00
VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO } ;
VkPipelineShaderStageCreateInfo shader_stages [ 2 ] = {
2016-02-16 20:24:00 +01:00
{ VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO } ,
{ VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO } ,
} ;
2017-12-11 23:55:31 -08:00
VkGraphicsPipelineCreateInfo pipe = {
2016-02-20 20:15:46 +01:00
VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO } ;
2023-02-05 19:23:48 +01:00
VkComputePipelineCreateInfo cpipe = {
VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO } ;
2017-12-11 23:55:31 -08:00
VkShaderModuleCreateInfo module_info = {
2016-02-20 20:15:46 +01:00
VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO } ;
VkVertexInputAttributeDescription attributes [ 3 ] = { { 0 } } ;
VkVertexInputBindingDescription binding = { 0 } ;
2016-02-16 20:24:00 +01:00
2023-07-18 00:08:55 +02:00
static const VkDynamicState dynamics [ ] = {
2016-02-16 20:24:00 +01:00
VK_DYNAMIC_STATE_VIEWPORT ,
VK_DYNAMIC_STATE_SCISSOR ,
} ;
2016-03-01 02:21:53 +01:00
vulkan_init_pipeline_layout ( vk ) ;
2016-02-16 20:24:00 +01:00
/* Input assembly */
input_assembly . topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST ;
/* VAO state */
2016-02-20 20:15:46 +01:00
attributes [ 0 ] . location = 0 ;
attributes [ 0 ] . binding = 0 ;
attributes [ 0 ] . format = VK_FORMAT_R32G32_SFLOAT ;
attributes [ 0 ] . offset = 0 ;
attributes [ 1 ] . location = 1 ;
attributes [ 1 ] . binding = 0 ;
attributes [ 1 ] . format = VK_FORMAT_R32G32_SFLOAT ;
attributes [ 1 ] . offset = 2 * sizeof ( float ) ;
attributes [ 2 ] . location = 2 ;
attributes [ 2 ] . binding = 0 ;
attributes [ 2 ] . format = VK_FORMAT_R32G32B32A32_SFLOAT ;
attributes [ 2 ] . offset = 4 * sizeof ( float ) ;
binding . binding = 0 ;
binding . stride = sizeof ( struct vk_vertex ) ;
binding . inputRate = VK_VERTEX_INPUT_RATE_VERTEX ;
vertex_input . vertexBindingDescriptionCount = 1 ;
vertex_input . pVertexBindingDescriptions = & binding ;
2016-02-16 20:24:00 +01:00
vertex_input . vertexAttributeDescriptionCount = 3 ;
2016-02-20 20:15:46 +01:00
vertex_input . pVertexAttributeDescriptions = attributes ;
2016-02-16 20:24:00 +01:00
/* Raster state */
2016-02-29 06:31:55 +01:00
raster . polygonMode = VK_POLYGON_MODE_FILL ;
raster . cullMode = VK_CULL_MODE_NONE ;
raster . frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE ;
2023-07-17 15:27:34 +02:00
raster . depthClampEnable = VK_FALSE ;
raster . rasterizerDiscardEnable = VK_FALSE ;
raster . depthBiasEnable = VK_FALSE ;
2016-02-29 06:31:55 +01:00
raster . lineWidth = 1.0f ;
2016-02-16 20:24:00 +01:00
/* Blend state */
2023-07-17 15:27:34 +02:00
blend_attachment . blendEnable = VK_FALSE ;
2016-02-29 06:31:55 +01:00
blend_attachment . colorWriteMask = 0xf ;
blend . attachmentCount = 1 ;
blend . pAttachments = & blend_attachment ;
2016-02-16 20:24:00 +01:00
/* Viewport state */
2016-02-29 06:31:55 +01:00
viewport . viewportCount = 1 ;
viewport . scissorCount = 1 ;
2016-02-16 20:24:00 +01:00
/* Depth-stencil state */
2023-07-17 15:27:34 +02:00
depth_stencil . depthTestEnable = VK_FALSE ;
depth_stencil . depthWriteEnable = VK_FALSE ;
depth_stencil . depthBoundsTestEnable = VK_FALSE ;
depth_stencil . stencilTestEnable = VK_FALSE ;
2016-02-29 06:31:55 +01:00
depth_stencil . minDepthBounds = 0.0f ;
depth_stencil . maxDepthBounds = 1.0f ;
2016-02-16 20:24:00 +01:00
/* Multisample state */
2023-07-18 00:08:55 +02:00
multisample . sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO ;
multisample . pNext = NULL ;
multisample . flags = 0 ;
2016-02-29 06:31:55 +01:00
multisample . rasterizationSamples = VK_SAMPLE_COUNT_1_BIT ;
2023-07-18 00:08:55 +02:00
multisample . sampleShadingEnable = VK_FALSE ;
multisample . minSampleShading = 0.0f ;
multisample . pSampleMask = NULL ;
multisample . alphaToCoverageEnable = VK_FALSE ;
multisample . alphaToOneEnable = VK_FALSE ;
2016-02-16 20:24:00 +01:00
/* Dynamic state */
2016-02-29 06:31:55 +01:00
dynamic . pDynamicStates = dynamics ;
dynamic . dynamicStateCount = ARRAY_SIZE ( dynamics ) ;
pipe . stageCount = 2 ;
pipe . pStages = shader_stages ;
pipe . pVertexInputState = & vertex_input ;
pipe . pInputAssemblyState = & input_assembly ;
pipe . pRasterizationState = & raster ;
pipe . pColorBlendState = & blend ;
pipe . pMultisampleState = & multisample ;
pipe . pViewportState = & viewport ;
pipe . pDepthStencilState = & depth_stencil ;
pipe . pDynamicState = & dynamic ;
pipe . renderPass = vk - > render_pass ;
pipe . layout = vk - > pipelines . layout ;
2016-06-25 14:55:03 +02:00
module_info . codeSize = sizeof ( alpha_blend_vert ) ;
module_info . pCode = alpha_blend_vert ;
2016-02-29 06:31:55 +01:00
shader_stages [ 0 ] . stage = VK_SHADER_STAGE_VERTEX_BIT ;
shader_stages [ 0 ] . pName = " main " ;
2016-06-26 13:10:19 +02:00
vkCreateShaderModule ( vk - > context - > device ,
2016-02-20 20:15:46 +01:00
& module_info , NULL , & shader_stages [ 0 ] . module ) ;
2023-07-17 15:27:34 +02:00
blend_attachment . blendEnable = VK_TRUE ;
2016-02-20 20:15:46 +01:00
blend_attachment . colorWriteMask = 0xf ;
2016-02-16 20:24:00 +01:00
blend_attachment . srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA ;
blend_attachment . dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA ;
2016-02-20 20:15:46 +01:00
blend_attachment . colorBlendOp = VK_BLEND_OP_ADD ;
2016-02-16 20:24:00 +01:00
blend_attachment . srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA ;
blend_attachment . dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA ;
2016-02-21 13:40:19 +01:00
blend_attachment . alphaBlendOp = VK_BLEND_OP_ADD ;
2016-02-16 20:24:00 +01:00
2016-02-21 13:52:30 +01:00
/* Glyph pipeline */
2016-06-25 14:55:03 +02:00
module_info . codeSize = sizeof ( font_frag ) ;
module_info . pCode = font_frag ;
2016-02-29 06:31:55 +01:00
shader_stages [ 1 ] . stage = VK_SHADER_STAGE_FRAGMENT_BIT ;
shader_stages [ 1 ] . pName = " main " ;
2016-06-26 13:10:19 +02:00
vkCreateShaderModule ( vk - > context - > device ,
2016-02-21 13:52:30 +01:00
& module_info , NULL , & shader_stages [ 1 ] . module ) ;
2016-06-26 13:10:19 +02:00
vkCreateGraphicsPipelines ( vk - > context - > device , vk - > pipelines . cache ,
2016-02-21 13:52:30 +01:00
1 , & pipe , NULL , & vk - > pipelines . font ) ;
2016-06-26 13:10:19 +02:00
vkDestroyShaderModule ( vk - > context - > device , shader_stages [ 1 ] . module , NULL ) ;
2016-02-21 13:52:30 +01:00
/* Alpha-blended pipeline. */
2016-06-25 14:55:03 +02:00
module_info . codeSize = sizeof ( alpha_blend_frag ) ;
module_info . pCode = alpha_blend_frag ;
2016-02-21 13:52:30 +01:00
shader_stages [ 1 ] . stage = VK_SHADER_STAGE_FRAGMENT_BIT ;
shader_stages [ 1 ] . pName = " main " ;
2016-06-26 13:10:19 +02:00
vkCreateShaderModule ( vk - > context - > device ,
2016-02-21 13:52:30 +01:00
& module_info , NULL , & shader_stages [ 1 ] . module ) ;
2016-06-26 13:10:19 +02:00
vkCreateGraphicsPipelines ( vk - > context - > device , vk - > pipelines . cache ,
2016-02-16 20:24:00 +01:00
1 , & pipe , NULL , & vk - > pipelines . alpha_blend ) ;
/* Build display pipelines. */
for ( i = 0 ; i < 4 ; i + + )
{
input_assembly . topology = i & 2 ?
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP :
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST ;
blend_attachment . blendEnable = i & 1 ;
2016-06-26 13:10:19 +02:00
vkCreateGraphicsPipelines ( vk - > context - > device , vk - > pipelines . cache ,
2016-02-16 20:24:00 +01:00
1 , & pipe , NULL , & vk - > display . pipelines [ i ] ) ;
}
2016-06-26 13:10:19 +02:00
vkDestroyShaderModule ( vk - > context - > device , shader_stages [ 1 ] . module , NULL ) ;
2016-05-08 11:11:28 +02:00
2022-01-08 12:22:34 +00:00
# ifdef VULKAN_HDR_SWAPCHAIN
2024-05-13 18:10:25 +02:00
if ( vk - > context - > flags & VK_CTX_FLAG_HDR_SUPPORT )
{
2023-07-17 15:27:34 +02:00
blend_attachment . blendEnable = VK_FALSE ;
2022-01-08 12:22:34 +00:00
/* HDR pipeline. */
2023-07-17 15:27:34 +02:00
module_info . codeSize = sizeof ( hdr_frag ) ;
module_info . pCode = hdr_frag ;
shader_stages [ 1 ] . stage = VK_SHADER_STAGE_FRAGMENT_BIT ;
shader_stages [ 1 ] . pName = " main " ;
2022-01-08 12:22:34 +00:00
vkCreateShaderModule ( vk - > context - > device ,
& module_info , NULL , & shader_stages [ 1 ] . module ) ;
vkCreateGraphicsPipelines ( vk - > context - > device , vk - > pipelines . cache ,
1 , & pipe , NULL , & vk - > pipelines . hdr ) ;
/* Build display hdr pipelines. */
for ( i = 4 ; i < 6 ; i + + )
{
input_assembly . topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP ;
vkCreateGraphicsPipelines ( vk - > context - > device , vk - > pipelines . cache ,
1 , & pipe , NULL , & vk - > display . pipelines [ i ] ) ;
}
2023-08-14 23:36:19 +02:00
2022-01-08 12:22:34 +00:00
vkDestroyShaderModule ( vk - > context - > device , shader_stages [ 1 ] . module , NULL ) ;
2022-01-10 05:26:02 +00:00
2024-05-13 18:10:25 +02:00
/* HDR->SDR tonemapping readback pipeline. */
module_info . codeSize = sizeof ( hdr_tonemap_frag ) ;
module_info . pCode = hdr_tonemap_frag ;
shader_stages [ 1 ] . stage = VK_SHADER_STAGE_FRAGMENT_BIT ;
shader_stages [ 1 ] . pName = " main " ;
vkCreateShaderModule ( vk - > context - > device ,
& module_info , NULL , & shader_stages [ 1 ] . module ) ;
pipe . renderPass = vk - > readback_render_pass ;
vkCreateGraphicsPipelines ( vk - > context - > device , vk - > pipelines . cache ,
1 , & pipe , NULL , & vk - > pipelines . hdr_to_sdr ) ;
vkDestroyShaderModule ( vk - > context - > device , shader_stages [ 1 ] . module , NULL ) ;
pipe . renderPass = vk - > render_pass ;
2023-08-14 23:36:19 +02:00
blend_attachment . blendEnable = VK_TRUE ;
2024-05-13 18:10:25 +02:00
}
2022-01-10 05:26:02 +00:00
# endif /* VULKAN_HDR_SWAPCHAIN */
2022-01-08 12:22:34 +00:00
vkDestroyShaderModule ( vk - > context - > device , shader_stages [ 0 ] . module , NULL ) ;
2016-05-08 11:11:28 +02:00
/* Other menu pipelines. */
2023-06-14 05:43:11 +03:00
for ( i = 0 ; i < ( int ) ARRAY_SIZE ( vk - > display . pipelines ) - 6 ; i + + )
2016-05-08 11:11:28 +02:00
{
2017-02-20 20:51:24 +01:00
switch ( i > > 1 )
2016-05-08 11:11:28 +02:00
{
2017-02-20 20:51:24 +01:00
case 0 :
module_info . codeSize = sizeof ( pipeline_ribbon_vert ) ;
module_info . pCode = pipeline_ribbon_vert ;
break ;
case 1 :
module_info . codeSize = sizeof ( pipeline_ribbon_simple_vert ) ;
module_info . pCode = pipeline_ribbon_simple_vert ;
break ;
case 2 :
module_info . codeSize = sizeof ( alpha_blend_vert ) ;
module_info . pCode = alpha_blend_vert ;
break ;
2017-02-20 21:51:58 +01:00
case 3 :
module_info . codeSize = sizeof ( alpha_blend_vert ) ;
module_info . pCode = alpha_blend_vert ;
break ;
2017-02-20 22:59:05 -06:00
case 4 :
module_info . codeSize = sizeof ( alpha_blend_vert ) ;
module_info . pCode = alpha_blend_vert ;
break ;
2017-02-20 20:51:24 +01:00
default :
break ;
2016-05-08 11:11:28 +02:00
}
shader_stages [ 0 ] . stage = VK_SHADER_STAGE_VERTEX_BIT ;
shader_stages [ 0 ] . pName = " main " ;
2016-06-26 13:10:19 +02:00
vkCreateShaderModule ( vk - > context - > device ,
2016-05-08 11:11:28 +02:00
& module_info , NULL , & shader_stages [ 0 ] . module ) ;
2017-02-20 20:51:24 +01:00
switch ( i > > 1 )
2016-05-08 11:11:28 +02:00
{
2017-02-20 20:51:24 +01:00
case 0 :
module_info . codeSize = sizeof ( pipeline_ribbon_frag ) ;
module_info . pCode = pipeline_ribbon_frag ;
break ;
case 1 :
module_info . codeSize = sizeof ( pipeline_ribbon_simple_frag ) ;
module_info . pCode = pipeline_ribbon_simple_frag ;
break ;
case 2 :
module_info . codeSize = sizeof ( pipeline_snow_simple_frag ) ;
module_info . pCode = pipeline_snow_simple_frag ;
break ;
2017-02-20 21:51:58 +01:00
case 3 :
module_info . codeSize = sizeof ( pipeline_snow_frag ) ;
module_info . pCode = pipeline_snow_frag ;
break ;
2017-02-20 22:59:05 -06:00
case 4 :
module_info . codeSize = sizeof ( pipeline_bokeh_frag ) ;
module_info . pCode = pipeline_bokeh_frag ;
break ;
2017-02-20 20:51:24 +01:00
default :
break ;
2016-05-08 11:11:28 +02:00
}
shader_stages [ 1 ] . stage = VK_SHADER_STAGE_FRAGMENT_BIT ;
shader_stages [ 1 ] . pName = " main " ;
2016-06-26 13:10:19 +02:00
vkCreateShaderModule ( vk - > context - > device ,
2016-05-08 11:11:28 +02:00
& module_info , NULL , & shader_stages [ 1 ] . module ) ;
2017-05-22 01:29:56 +02:00
switch ( i > > 1 )
{
case 0 :
case 1 :
2023-08-06 10:17:38 +03:00
blend_attachment . srcColorBlendFactor = VK_BLEND_FACTOR_DST_COLOR ;
2017-05-22 01:29:56 +02:00
blend_attachment . dstColorBlendFactor = VK_BLEND_FACTOR_ONE ;
break ;
default :
blend_attachment . srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA ;
blend_attachment . dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA ;
break ;
}
2016-05-08 11:11:28 +02:00
input_assembly . topology = i & 1 ?
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP :
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST ;
2016-06-26 13:10:19 +02:00
vkCreateGraphicsPipelines ( vk - > context - > device , vk - > pipelines . cache ,
2022-01-08 12:22:34 +00:00
1 , & pipe , NULL , & vk - > display . pipelines [ 6 + i ] ) ;
2016-06-25 14:06:14 +02:00
2016-06-26 13:10:19 +02:00
vkDestroyShaderModule ( vk - > context - > device , shader_stages [ 0 ] . module , NULL ) ;
vkDestroyShaderModule ( vk - > context - > device , shader_stages [ 1 ] . module , NULL ) ;
2016-05-08 11:11:28 +02:00
}
2023-02-05 19:23:48 +01:00
2023-07-18 00:08:55 +02:00
cpipe . layout = vk - > pipelines . layout ;
cpipe . stage . sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO ;
cpipe . stage . pName = " main " ;
cpipe . stage . stage = VK_SHADER_STAGE_COMPUTE_BIT ;
2023-02-05 19:23:48 +01:00
module_info . codeSize = sizeof ( rgb565_to_rgba8888_comp ) ;
module_info . pCode = rgb565_to_rgba8888_comp ;
vkCreateShaderModule ( vk - > context - > device ,
& module_info , NULL , & cpipe . stage . module ) ;
vkCreateComputePipelines ( vk - > context - > device , vk - > pipelines . cache ,
1 , & cpipe , NULL , & vk - > pipelines . rgb565_to_rgba8888 ) ;
vkDestroyShaderModule ( vk - > context - > device , cpipe . stage . module , NULL ) ;
2016-02-16 20:24:00 +01:00
}
2016-03-01 02:21:53 +01:00
static void vulkan_init_samplers ( vk_t * vk )
2016-02-16 20:24:00 +01:00
{
2022-05-19 15:28:26 +02:00
VkSamplerCreateInfo info ;
2016-02-29 19:11:09 +01:00
2022-05-19 15:28:26 +02:00
info . sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO ;
info . pNext = NULL ;
info . flags = 0 ;
2016-02-20 20:15:46 +01:00
info . magFilter = VK_FILTER_NEAREST ;
info . minFilter = VK_FILTER_NEAREST ;
info . mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST ;
info . addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE ;
info . addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE ;
info . addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE ;
info . mipLodBias = 0.0f ;
2023-07-17 15:27:34 +02:00
info . anisotropyEnable = VK_FALSE ;
2016-02-20 20:15:46 +01:00
info . maxAnisotropy = 1.0f ;
2023-07-17 15:27:34 +02:00
info . compareEnable = VK_FALSE ;
2016-02-20 20:15:46 +01:00
info . minLod = 0.0f ;
info . maxLod = 0.0f ;
info . borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE ;
2023-07-17 15:27:34 +02:00
info . unnormalizedCoordinates = VK_FALSE ;
2016-06-26 13:10:19 +02:00
vkCreateSampler ( vk - > context - > device ,
2016-02-29 19:11:09 +01:00
& info , NULL , & vk - > samplers . nearest ) ;
2016-02-20 20:15:46 +01:00
2022-05-19 15:28:26 +02:00
info . magFilter = VK_FILTER_LINEAR ;
info . minFilter = VK_FILTER_LINEAR ;
2016-06-26 13:10:19 +02:00
vkCreateSampler ( vk - > context - > device ,
2016-02-29 19:11:09 +01:00
& info , NULL , & vk - > samplers . linear ) ;
2016-07-31 13:47:10 +02:00
2022-05-19 15:28:26 +02:00
info . maxLod = VK_LOD_CLAMP_NONE ;
info . magFilter = VK_FILTER_NEAREST ;
info . minFilter = VK_FILTER_NEAREST ;
info . mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST ;
2016-07-31 13:47:10 +02:00
vkCreateSampler ( vk - > context - > device ,
& info , NULL , & vk - > samplers . mipmap_nearest ) ;
2022-05-19 15:28:26 +02:00
info . magFilter = VK_FILTER_LINEAR ;
info . minFilter = VK_FILTER_LINEAR ;
info . mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR ;
2016-07-31 13:47:10 +02:00
vkCreateSampler ( vk - > context - > device ,
& info , NULL , & vk - > samplers . mipmap_linear ) ;
2016-02-16 20:24:00 +01:00
}
2022-05-21 01:39:55 +02:00
static void vulkan_buffer_chain_free (
VkDevice device ,
struct vk_buffer_chain * chain )
{
struct vk_buffer_node * node = chain - > head ;
while ( node )
{
struct vk_buffer_node * next = node - > next ;
vulkan_destroy_buffer ( device , & node - > buffer ) ;
free ( node ) ;
node = next ;
}
memset ( chain , 0 , sizeof ( * chain ) ) ;
}
2016-02-16 20:24:00 +01:00
static void vulkan_deinit_buffers ( vk_t * vk )
{
2022-12-04 15:29:48 +01:00
int i ;
2022-12-22 21:36:32 +01:00
for ( i = 0 ; i < ( int ) vk - > num_swapchain_images ; i + + )
2016-02-16 20:24:00 +01:00
{
2016-03-01 02:21:53 +01:00
vulkan_buffer_chain_free (
2016-02-29 19:03:18 +01:00
vk - > context - > device , & vk - > swapchain [ i ] . vbo ) ;
2016-03-01 02:21:53 +01:00
vulkan_buffer_chain_free (
2016-02-29 19:03:18 +01:00
vk - > context - > device , & vk - > swapchain [ i ] . ubo ) ;
2016-02-16 20:24:00 +01:00
}
}
static void vulkan_deinit_descriptor_pool ( vk_t * vk )
{
2022-12-04 15:29:48 +01:00
int i ;
2022-12-22 21:36:32 +01:00
for ( i = 0 ; i < ( int ) vk - > num_swapchain_images ; i + + )
2016-02-29 23:09:05 +01:00
vulkan_destroy_descriptor_manager (
vk - > context - > device ,
2016-02-20 20:15:46 +01:00
& vk - > swapchain [ i ] . descriptor_manager ) ;
2016-02-16 20:24:00 +01:00
}
static void vulkan_init_textures ( vk_t * vk )
{
2018-12-12 13:22:24 +01:00
const uint32_t zero = 0 ;
2016-02-16 20:24:00 +01:00
2022-11-02 21:34:00 +01:00
if ( ! ( vk - > flags & VK_FLAG_HW_ENABLE ) )
2016-02-16 20:24:00 +01:00
{
2022-12-04 15:29:48 +01:00
int i ;
2022-12-22 21:36:32 +01:00
for ( i = 0 ; i < ( int ) vk - > num_swapchain_images ; i + + )
2016-02-16 20:24:00 +01:00
{
2022-05-19 15:28:26 +02:00
vk - > swapchain [ i ] . texture = vulkan_create_texture (
vk , NULL , vk - > tex_w , vk - > tex_h , vk - > tex_fmt ,
2016-02-20 20:15:46 +01:00
NULL , NULL , VULKAN_TEXTURE_STREAMED ) ;
2016-02-29 19:03:18 +01:00
2020-01-03 20:20:59 +01:00
{
struct vk_texture * texture = & vk - > swapchain [ i ] . texture ;
VK_MAP_PERSISTENT_TEXTURE ( vk - > context - > device , texture ) ;
}
2016-02-21 12:33:16 +01:00
if ( vk - > swapchain [ i ] . texture . type = = VULKAN_TEXTURE_STAGING )
2022-05-19 15:28:26 +02:00
vk - > swapchain [ i ] . texture_optimal = vulkan_create_texture (
vk , NULL , vk - > tex_w , vk - > tex_h , vk - > tex_fmt ,
2016-02-21 12:33:16 +01:00
NULL , NULL , VULKAN_TEXTURE_DYNAMIC ) ;
2016-02-16 20:24:00 +01:00
}
}
2018-12-12 13:22:24 +01:00
vk - > default_texture = vulkan_create_texture ( vk , NULL ,
1 , 1 , VK_FORMAT_B8G8R8A8_UNORM ,
& zero , NULL , VULKAN_TEXTURE_STATIC ) ;
2016-02-16 20:24:00 +01:00
}
static void vulkan_deinit_textures ( vk_t * vk )
{
2022-12-04 15:29:48 +01:00
int i ;
2023-08-14 23:36:19 +02:00
video_driver_state_t * video_st = video_state_get_ptr ( ) ;
2021-05-10 21:18:31 -07:00
/* Avoid memcpying from a destroyed/unmapped texture later on. */
2023-05-31 01:09:38 +02:00
const void * cached_frame = video_st - > frame_cache_data ;
2021-05-10 21:18:31 -07:00
if ( vulkan_is_mapped_swapchain_texture_ptr ( vk , cached_frame ) )
2023-05-31 01:09:38 +02:00
video_st - > frame_cache_data = NULL ;
2021-05-10 21:18:31 -07:00
2022-05-19 15:28:26 +02:00
vkDestroySampler ( vk - > context - > device , vk - > samplers . nearest , NULL ) ;
vkDestroySampler ( vk - > context - > device , vk - > samplers . linear , NULL ) ;
vkDestroySampler ( vk - > context - > device , vk - > samplers . mipmap_nearest , NULL ) ;
vkDestroySampler ( vk - > context - > device , vk - > samplers . mipmap_linear , NULL ) ;
2016-02-29 19:11:09 +01:00
2022-12-22 21:36:32 +01:00
for ( i = 0 ; i < ( int ) vk - > num_swapchain_images ; i + + )
2016-02-21 12:33:16 +01:00
{
2016-02-16 20:24:00 +01:00
if ( vk - > swapchain [ i ] . texture . memory ! = VK_NULL_HANDLE )
2016-03-01 02:21:53 +01:00
vulkan_destroy_texture (
2016-02-29 19:03:18 +01:00
vk - > context - > device , & vk - > swapchain [ i ] . texture ) ;
2016-02-21 12:33:16 +01:00
if ( vk - > swapchain [ i ] . texture_optimal . memory ! = VK_NULL_HANDLE )
2016-03-01 02:21:53 +01:00
vulkan_destroy_texture (
2016-02-29 19:03:18 +01:00
vk - > context - > device , & vk - > swapchain [ i ] . texture_optimal ) ;
2016-02-21 12:33:16 +01:00
}
2018-12-12 13:22:24 +01:00
if ( vk - > default_texture . memory ! = VK_NULL_HANDLE )
vulkan_destroy_texture ( vk - > context - > device , & vk - > default_texture ) ;
2016-02-16 20:24:00 +01:00
}
2016-03-01 02:21:53 +01:00
static void vulkan_deinit_command_buffers ( vk_t * vk )
2016-02-16 20:24:00 +01:00
{
2022-12-04 15:29:48 +01:00
int i ;
2022-12-22 21:36:32 +01:00
for ( i = 0 ; i < ( int ) vk - > num_swapchain_images ; i + + )
2016-02-16 20:24:00 +01:00
{
if ( vk - > swapchain [ i ] . cmd )
2016-06-26 13:10:19 +02:00
vkFreeCommandBuffers ( vk - > context - > device ,
2016-02-16 20:24:00 +01:00
vk - > swapchain [ i ] . cmd_pool , 1 , & vk - > swapchain [ i ] . cmd ) ;
2016-06-26 13:10:19 +02:00
vkDestroyCommandPool ( vk - > context - > device ,
2016-02-20 20:15:46 +01:00
vk - > swapchain [ i ] . cmd_pool , NULL ) ;
2016-02-16 20:24:00 +01:00
}
}
2022-05-19 15:28:26 +02:00
static void vulkan_deinit_pipelines ( vk_t * vk )
2016-02-16 20:24:00 +01:00
{
2022-12-04 15:29:48 +01:00
int i ;
2022-05-19 15:28:26 +02:00
2016-06-26 13:10:19 +02:00
vkDestroyPipelineLayout ( vk - > context - > device ,
2016-02-20 20:15:46 +01:00
vk - > pipelines . layout , NULL ) ;
2016-06-26 13:10:19 +02:00
vkDestroyDescriptorSetLayout ( vk - > context - > device ,
2016-02-20 20:15:46 +01:00
vk - > pipelines . set_layout , NULL ) ;
2016-06-26 13:10:19 +02:00
vkDestroyPipeline ( vk - > context - > device ,
2016-02-29 19:41:25 +01:00
vk - > pipelines . alpha_blend , NULL ) ;
2016-06-26 13:10:19 +02:00
vkDestroyPipeline ( vk - > context - > device ,
2016-02-29 19:41:25 +01:00
vk - > pipelines . font , NULL ) ;
2023-02-05 19:23:48 +01:00
vkDestroyPipeline ( vk - > context - > device ,
vk - > pipelines . rgb565_to_rgba8888 , NULL ) ;
2022-01-08 12:22:34 +00:00
# ifdef VULKAN_HDR_SWAPCHAIN
2024-05-13 18:10:25 +02:00
if ( vk - > context - > flags & VK_CTX_FLAG_HDR_SUPPORT )
{
2022-01-08 12:22:34 +00:00
vkDestroyPipeline ( vk - > context - > device ,
vk - > pipelines . hdr , NULL ) ;
2024-05-13 18:10:25 +02:00
vkDestroyPipeline ( vk - > context - > device ,
vk - > pipelines . hdr_to_sdr , NULL ) ;
}
2022-01-10 05:26:02 +00:00
# endif /* VULKAN_HDR_SWAPCHAIN */
2016-02-29 19:41:25 +01:00
2023-06-14 05:43:11 +03:00
for ( i = 0 ; i < ( int ) ARRAY_SIZE ( vk - > display . pipelines ) ; i + + )
2016-06-26 13:10:19 +02:00
vkDestroyPipeline ( vk - > context - > device ,
2016-02-29 19:41:25 +01:00
vk - > display . pipelines [ i ] , NULL ) ;
2016-02-16 20:24:00 +01:00
}
2016-03-01 02:21:53 +01:00
static void vulkan_deinit_framebuffers ( vk_t * vk )
2016-02-16 20:24:00 +01:00
{
2022-12-04 15:29:48 +01:00
int i ;
2022-12-22 21:36:32 +01:00
for ( i = 0 ; i < ( int ) vk - > num_swapchain_images ; i + + )
2016-02-16 20:24:00 +01:00
{
2020-06-06 13:23:24 +02:00
if ( vk - > backbuffers [ i ] . framebuffer )
2017-12-09 12:58:11 +01:00
vkDestroyFramebuffer ( vk - > context - > device ,
2020-06-06 13:23:24 +02:00
vk - > backbuffers [ i ] . framebuffer , NULL ) ;
2017-12-09 12:58:11 +01:00
2020-06-06 13:23:24 +02:00
if ( vk - > backbuffers [ i ] . view )
2017-12-09 12:58:11 +01:00
vkDestroyImageView ( vk - > context - > device ,
2020-06-06 13:23:24 +02:00
vk - > backbuffers [ i ] . view , NULL ) ;
2016-02-16 20:24:00 +01:00
}
2016-06-26 13:10:19 +02:00
vkDestroyRenderPass ( vk - > context - > device , vk - > render_pass , NULL ) ;
2016-02-16 20:24:00 +01:00
}
2022-01-08 12:22:34 +00:00
# ifdef VULKAN_HDR_SWAPCHAIN
2024-05-13 18:10:25 +02:00
static void vulkan_deinit_hdr_readback_render_pass ( vk_t * vk )
{
vkDestroyRenderPass ( vk - > context - > device , vk - > readback_render_pass , NULL ) ;
}
2022-01-08 12:22:34 +00:00
static void vulkan_set_hdr_max_nits ( void * data , float max_nits )
{
2022-03-30 15:13:28 +02:00
vk_t * vk = ( vk_t * ) data ;
2022-01-08 12:22:34 +00:00
vulkan_hdr_uniform_t * mapped_ubo = ( vulkan_hdr_uniform_t * ) vk - > hdr . ubo . mapped ;
2022-03-30 15:13:28 +02:00
vk - > hdr . max_output_nits = max_nits ;
mapped_ubo - > max_nits = max_nits ;
2022-01-08 12:22:34 +00:00
}
static void vulkan_set_hdr_paper_white_nits ( void * data , float paper_white_nits )
{
vk_t * vk = ( vk_t * ) data ;
vulkan_hdr_uniform_t * mapped_ubo = ( vulkan_hdr_uniform_t * ) vk - > hdr . ubo . mapped ;
mapped_ubo - > paper_white_nits = paper_white_nits ;
}
static void vulkan_set_hdr_contrast ( void * data , float contrast )
{
vk_t * vk = ( vk_t * ) data ;
vulkan_hdr_uniform_t * mapped_ubo = ( vulkan_hdr_uniform_t * ) vk - > hdr . ubo . mapped ;
2022-03-30 15:13:28 +02:00
mapped_ubo - > contrast = contrast ;
2022-01-08 12:22:34 +00:00
}
static void vulkan_set_hdr_expand_gamut ( void * data , bool expand_gamut )
{
vk_t * vk = ( vk_t * ) data ;
vulkan_hdr_uniform_t * mapped_ubo = ( vulkan_hdr_uniform_t * ) vk - > hdr . ubo . mapped ;
mapped_ubo - > expand_gamut = expand_gamut ? 1.0f : 0.0f ;
}
static void vulkan_set_hdr_inverse_tonemap ( vk_t * vk , bool inverse_tonemap )
{
2022-03-30 15:13:28 +02:00
vulkan_hdr_uniform_t * mapped_ubo = ( vulkan_hdr_uniform_t * ) vk - > hdr . ubo . mapped ;
2022-01-08 12:22:34 +00:00
2022-03-30 15:13:28 +02:00
mapped_ubo - > inverse_tonemap = inverse_tonemap ? 1.0f : 0.0f ;
2022-01-08 12:22:34 +00:00
}
static void vulkan_set_hdr10 ( vk_t * vk , bool hdr10 )
{
2022-03-30 15:13:28 +02:00
vulkan_hdr_uniform_t * mapped_ubo = ( vulkan_hdr_uniform_t * ) vk - > hdr . ubo . mapped ;
2022-01-08 12:22:34 +00:00
2022-03-30 15:13:28 +02:00
mapped_ubo - > hdr10 = hdr10 ? 1.0f : 0.0f ;
2022-01-08 12:22:34 +00:00
}
2022-01-10 05:26:02 +00:00
# endif /* VULKAN_HDR_SWAPCHAIN */
2022-01-08 12:22:34 +00:00
2016-02-16 20:24:00 +01:00
static bool vulkan_init_default_filter_chain ( vk_t * vk )
{
struct vulkan_filter_chain_create_info info ;
2018-08-24 18:07:57 -07:00
if ( ! vk - > context )
return false ;
2016-02-20 20:15:46 +01:00
info . device = vk - > context - > device ;
2016-03-26 19:38:44 +01:00
info . gpu = vk - > context - > gpu ;
2016-02-20 20:15:46 +01:00
info . memory_properties = & vk - > context - > memory_properties ;
info . pipeline_cache = vk - > pipelines . cache ;
2016-07-31 12:26:14 +02:00
info . queue = vk - > context - > queue ;
2020-06-06 13:23:24 +02:00
info . command_pool = vk - > swapchain [ vk - > context - > current_frame_index ] . cmd_pool ;
2020-06-29 00:02:45 +02:00
info . num_passes = 0 ;
2023-05-31 19:38:49 +02:00
info . original_format = VK_REMAP_TO_TEXFMT ( vk - > tex_fmt ) ;
2016-02-20 20:15:46 +01:00
info . max_input_size . width = vk - > tex_w ;
2016-02-16 20:24:00 +01:00
info . max_input_size . height = vk - > tex_h ;
2016-02-20 20:15:46 +01:00
info . swapchain . viewport = vk - > vk_vp ;
info . swapchain . format = vk - > context - > swapchain_format ;
2016-02-16 20:24:00 +01:00
info . swapchain . render_pass = vk - > render_pass ;
info . swapchain . num_indices = vk - > context - > num_swapchain_images ;
2016-02-29 22:23:35 +01:00
vk - > filter_chain = vulkan_filter_chain_create_default (
& info ,
2023-08-14 23:36:19 +02:00
vk - > video . smooth
? GLSLANG_FILTER_CHAIN_LINEAR
2020-08-16 02:05:38 +02:00
: GLSLANG_FILTER_CHAIN_NEAREST ) ;
2016-02-20 20:15:46 +01:00
2016-02-16 20:24:00 +01:00
if ( ! vk - > filter_chain )
{
RARCH_ERR ( " Failed to create filter chain. \n " ) ;
return false ;
}
2022-01-08 12:22:34 +00:00
# ifdef VULKAN_HDR_SWAPCHAIN
2022-10-30 23:07:07 +01:00
if ( vk - > context - > flags & VK_CTX_FLAG_HDR_ENABLE )
2022-01-08 12:22:34 +00:00
{
2022-04-02 19:02:54 +02:00
struct video_shader * shader_preset = vulkan_filter_chain_get_preset (
2023-08-14 23:36:19 +02:00
vk - > filter_chain ) ;
2023-07-17 17:30:17 +02:00
VkFormat rt_format = ( shader_preset & & shader_preset - > passes )
2023-08-14 23:36:19 +02:00
? vulkan_filter_chain_get_pass_rt_format ( vk - > filter_chain , shader_preset - > passes - 1 )
2023-07-17 17:30:17 +02:00
: VK_FORMAT_UNDEFINED ;
2022-11-04 00:50:19 +01:00
bool emits_hdr10 = shader_preset & & shader_preset - > passes & & vulkan_filter_chain_emits_hdr10 ( vk - > filter_chain ) ;
2022-01-08 12:22:34 +00:00
2024-04-18 16:34:46 +02:00
if ( vulkan_is_hdr10_format ( rt_format ) )
2022-01-08 12:22:34 +00:00
{
2024-04-18 16:34:46 +02:00
/* If the last shader pass uses a RGB10A2 back buffer
* and HDR has been enabled , assume we want to skip
* the inverse tonemapper and HDR10 conversion .
* If we just inherited HDR10 format based on backbuffer ,
* we would have used RGBA8 , and thus we should do inverse tonemap as expected . */
vulkan_set_hdr_inverse_tonemap ( vk , ! emits_hdr10 ) ;
vulkan_set_hdr10 ( vk , ! emits_hdr10 ) ;
vk - > flags | = VK_FLAG_SHOULD_RESIZE ;
}
else if ( rt_format = = VK_FORMAT_R16G16B16A16_SFLOAT )
{
/* If the last shader pass uses a RGBA16 backbuffer
* and HDR has been enabled , assume we want to
* skip the inverse tonemapper */
vulkan_set_hdr_inverse_tonemap ( vk , false ) ;
vulkan_set_hdr10 ( vk , true ) ;
vk - > flags | = VK_FLAG_SHOULD_RESIZE ;
}
else
{
vulkan_set_hdr_inverse_tonemap ( vk , true ) ;
vulkan_set_hdr10 ( vk , true ) ;
2022-01-08 12:22:34 +00:00
}
2023-08-14 23:36:19 +02:00
}
2022-01-10 05:26:02 +00:00
# endif /* VULKAN_HDR_SWAPCHAIN */
2022-01-08 12:22:34 +00:00
2016-02-16 20:24:00 +01:00
return true ;
}
static bool vulkan_init_filter_chain_preset ( vk_t * vk , const char * shader_path )
{
struct vulkan_filter_chain_create_info info ;
2016-02-29 06:31:55 +01:00
info . device = vk - > context - > device ;
2016-03-26 19:38:44 +01:00
info . gpu = vk - > context - > gpu ;
2016-02-29 06:31:55 +01:00
info . memory_properties = & vk - > context - > memory_properties ;
info . pipeline_cache = vk - > pipelines . cache ;
2016-07-31 12:26:14 +02:00
info . queue = vk - > context - > queue ;
2020-06-06 13:23:24 +02:00
info . command_pool = vk - > swapchain [ vk - > context - > current_frame_index ] . cmd_pool ;
2020-06-29 00:02:45 +02:00
info . num_passes = 0 ;
2023-05-31 19:38:49 +02:00
info . original_format = VK_REMAP_TO_TEXFMT ( vk - > tex_fmt ) ;
2016-02-29 06:31:55 +01:00
info . max_input_size . width = vk - > tex_w ;
2016-02-16 20:24:00 +01:00
info . max_input_size . height = vk - > tex_h ;
2016-02-29 06:31:55 +01:00
info . swapchain . viewport = vk - > vk_vp ;
info . swapchain . format = vk - > context - > swapchain_format ;
2016-02-16 20:24:00 +01:00
info . swapchain . render_pass = vk - > render_pass ;
info . swapchain . num_indices = vk - > context - > num_swapchain_images ;
2016-02-29 22:23:35 +01:00
vk - > filter_chain = vulkan_filter_chain_create_from_preset (
& info , shader_path ,
2020-08-16 02:05:38 +02:00
vk - > video . smooth
2023-08-14 23:36:19 +02:00
? GLSLANG_FILTER_CHAIN_LINEAR
2020-08-16 02:05:38 +02:00
: GLSLANG_FILTER_CHAIN_NEAREST ) ;
2016-02-16 20:24:00 +01:00
if ( ! vk - > filter_chain )
{
RARCH_ERR ( " [Vulkan]: Failed to create preset: \" %s \" . \n " , shader_path ) ;
return false ;
}
2022-01-08 12:22:34 +00:00
# ifdef VULKAN_HDR_SWAPCHAIN
2022-10-30 23:07:07 +01:00
if ( vk - > context - > flags & VK_CTX_FLAG_HDR_ENABLE )
2022-01-08 12:22:34 +00:00
{
2023-08-14 23:36:19 +02:00
struct video_shader * shader_preset = vulkan_filter_chain_get_preset ( vk - > filter_chain ) ;
VkFormat rt_format = ( shader_preset & & shader_preset - > passes )
? vulkan_filter_chain_get_pass_rt_format ( vk - > filter_chain , shader_preset - > passes - 1 )
2023-07-17 17:30:17 +02:00
: VK_FORMAT_UNDEFINED ;
2022-11-04 00:50:19 +01:00
bool emits_hdr10 = shader_preset & & shader_preset - > passes & & vulkan_filter_chain_emits_hdr10 ( vk - > filter_chain ) ;
2022-01-08 12:22:34 +00:00
2024-04-18 16:34:46 +02:00
if ( vulkan_is_hdr10_format ( rt_format ) )
2022-01-08 12:22:34 +00:00
{
2024-04-18 16:34:46 +02:00
/* If the last shader pass uses a RGB10A2 back buffer
* and HDR has been enabled , assume we want to skip
* the inverse tonemapper and HDR10 conversion .
* If we just inherited HDR10 format based on backbuffer ,
* we would have used RGBA8 , and thus we should do inverse tonemap as expected . */
vulkan_set_hdr_inverse_tonemap ( vk , ! emits_hdr10 ) ;
vulkan_set_hdr10 ( vk , ! emits_hdr10 ) ;
vk - > flags | = VK_FLAG_SHOULD_RESIZE ;
}
else if ( rt_format = = VK_FORMAT_R16G16B16A16_SFLOAT )
{
/* If the last shader pass uses a RGBA16 backbuffer
* and HDR has been enabled , assume we want to
* skip the inverse tonemapper */
vulkan_set_hdr_inverse_tonemap ( vk , false ) ;
vulkan_set_hdr10 ( vk , true ) ;
vk - > flags | = VK_FLAG_SHOULD_RESIZE ;
}
else
{
vulkan_set_hdr_inverse_tonemap ( vk , true ) ;
vulkan_set_hdr10 ( vk , true ) ;
2022-01-08 12:22:34 +00:00
}
2023-08-14 23:36:19 +02:00
}
2022-01-10 05:26:02 +00:00
# endif /* VULKAN_HDR_SWAPCHAIN */
2022-01-08 12:22:34 +00:00
2016-02-16 20:24:00 +01:00
return true ;
}
static bool vulkan_init_filter_chain ( vk_t * vk )
{
2022-12-22 21:36:32 +01:00
const char * shader_path = video_shader_get_current_shader_preset ( ) ;
2019-06-17 18:26:41 +02:00
enum rarch_shader_type type = video_shader_parse_type ( shader_path ) ;
2016-02-16 20:24:00 +01:00
2019-06-04 21:52:58 +02:00
if ( string_is_empty ( shader_path ) )
2016-02-16 20:24:00 +01:00
{
RARCH_LOG ( " [Vulkan]: Loading stock shader. \n " ) ;
return vulkan_init_default_filter_chain ( vk ) ;
}
if ( type ! = RARCH_SHADER_SLANG )
{
2019-06-04 21:52:58 +02:00
RARCH_LOG ( " [Vulkan]: Only Slang shaders are supported, falling back to stock. \n " ) ;
2016-02-16 20:24:00 +01:00
return vulkan_init_default_filter_chain ( vk ) ;
}
if ( ! shader_path | | ! vulkan_init_filter_chain_preset ( vk , shader_path ) )
vulkan_init_default_filter_chain ( vk ) ;
return true ;
}
static void vulkan_init_static_resources ( vk_t * vk )
{
2022-12-04 15:29:48 +01:00
int i ;
2016-02-16 20:24:00 +01:00
uint32_t blank [ 4 * 4 ] ;
2023-07-17 17:30:17 +02:00
VkCommandPoolCreateInfo pool_info ;
VkPipelineCacheCreateInfo cache ;
2018-09-02 16:05:45 +02:00
2016-02-16 20:24:00 +01:00
/* Create the pipeline cache. */
2023-07-17 17:30:17 +02:00
cache . sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO ;
cache . pNext = NULL ;
cache . flags = 0 ;
cache . initialDataSize = 0 ;
cache . pInitialData = NULL ;
2018-08-24 18:07:57 -07:00
2016-06-26 13:10:19 +02:00
vkCreatePipelineCache ( vk - > context - > device ,
2016-02-20 20:15:46 +01:00
& cache , NULL , & vk - > pipelines . cache ) ;
2016-02-16 20:24:00 +01:00
2023-07-17 17:30:17 +02:00
pool_info . sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO ;
pool_info . pNext = NULL ;
pool_info . flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT ;
2016-02-16 20:24:00 +01:00
pool_info . queueFamilyIndex = vk - > context - > graphics_queue_index ;
2016-02-29 17:06:41 +01:00
2016-06-26 13:10:19 +02:00
vkCreateCommandPool ( vk - > context - > device ,
2016-02-20 20:15:46 +01:00
& pool_info , NULL , & vk - > staging_pool ) ;
2016-02-16 20:24:00 +01:00
for ( i = 0 ; i < 4 * 4 ; i + + )
blank [ i ] = - 1u ;
vk - > display . blank_texture = vulkan_create_texture ( vk , NULL ,
2016-02-20 20:15:46 +01:00
4 , 4 , VK_FORMAT_B8G8R8A8_UNORM ,
blank , NULL , VULKAN_TEXTURE_STATIC ) ;
2016-02-16 20:24:00 +01:00
}
2016-03-01 02:21:53 +01:00
static void vulkan_deinit_static_resources ( vk_t * vk )
2016-02-16 20:24:00 +01:00
{
2022-12-04 15:29:48 +01:00
int i ;
2016-06-26 13:10:19 +02:00
vkDestroyPipelineCache ( vk - > context - > device ,
2016-02-20 20:15:46 +01:00
vk - > pipelines . cache , NULL ) ;
2016-03-01 02:21:53 +01:00
vulkan_destroy_texture (
2016-02-29 19:03:18 +01:00
vk - > context - > device ,
2016-02-20 20:15:46 +01:00
& vk - > display . blank_texture ) ;
2016-02-29 19:03:18 +01:00
2016-06-26 13:10:19 +02:00
vkDestroyCommandPool ( vk - > context - > device ,
2016-02-29 22:23:35 +01:00
vk - > staging_pool , NULL ) ;
2016-02-16 20:24:00 +01:00
free ( vk - > hw . cmd ) ;
free ( vk - > hw . wait_dst_stages ) ;
2020-06-06 14:24:13 +02:00
free ( vk - > hw . semaphores ) ;
2016-02-16 20:24:00 +01:00
for ( i = 0 ; i < VULKAN_MAX_SWAPCHAIN_IMAGES ; i + + )
if ( vk - > readback . staging [ i ] . memory ! = VK_NULL_HANDLE )
2016-03-01 02:21:53 +01:00
vulkan_destroy_texture (
2016-02-29 19:03:18 +01:00
vk - > context - > device ,
2016-02-20 20:15:46 +01:00
& vk - > readback . staging [ i ] ) ;
2016-02-16 20:24:00 +01:00
}
static void vulkan_deinit_menu ( vk_t * vk )
{
2022-12-04 15:29:48 +01:00
int i ;
2016-02-16 20:24:00 +01:00
for ( i = 0 ; i < VULKAN_MAX_SWAPCHAIN_IMAGES ; i + + )
2016-02-21 12:33:16 +01:00
{
2016-02-16 20:24:00 +01:00
if ( vk - > menu . textures [ i ] . memory )
2016-03-01 02:21:53 +01:00
vulkan_destroy_texture (
2016-02-29 19:03:18 +01:00
vk - > context - > device , & vk - > menu . textures [ i ] ) ;
2016-02-21 12:33:16 +01:00
if ( vk - > menu . textures_optimal [ i ] . memory )
2016-03-01 02:21:53 +01:00
vulkan_destroy_texture (
2016-02-29 19:03:18 +01:00
vk - > context - > device , & vk - > menu . textures_optimal [ i ] ) ;
2016-02-21 12:33:16 +01:00
}
2016-02-16 20:24:00 +01:00
}
2022-08-31 20:56:04 +02:00
# ifdef VULKAN_HDR_SWAPCHAIN
static void vulkan_destroy_hdr_buffer ( VkDevice device , struct vk_image * img )
{
vkDestroyImageView ( device , img - > view , NULL ) ;
vkDestroyImage ( device , img - > image , NULL ) ;
vkDestroyFramebuffer ( device , img - > framebuffer , NULL ) ;
vkFreeMemory ( device , img - > memory , NULL ) ;
memset ( img , 0 , sizeof ( * img ) ) ;
}
# endif
2016-02-16 20:24:00 +01:00
static void vulkan_free ( void * data )
{
vk_t * vk = ( vk_t * ) data ;
if ( ! vk )
return ;
if ( vk - > context & & vk - > context - > device )
{
2018-03-15 09:21:23 +01:00
# ifdef HAVE_THREADS
slock_lock ( vk - > context - > queue_lock ) ;
# endif
2016-06-26 13:10:19 +02:00
vkQueueWaitIdle ( vk - > context - > queue ) ;
2018-03-15 09:21:23 +01:00
# ifdef HAVE_THREADS
slock_unlock ( vk - > context - > queue_lock ) ;
# endif
2022-05-19 15:28:26 +02:00
vulkan_deinit_pipelines ( vk ) ;
vulkan_deinit_framebuffers ( vk ) ;
vulkan_deinit_descriptor_pool ( vk ) ;
vulkan_deinit_textures ( vk ) ;
vulkan_deinit_buffers ( vk ) ;
vulkan_deinit_command_buffers ( vk ) ;
2016-02-16 20:24:00 +01:00
/* No need to init this since textures are create on-demand. */
vulkan_deinit_menu ( vk ) ;
2016-10-20 00:39:33 +02:00
font_driver_free_osd ( ) ;
2016-02-16 20:24:00 +01:00
2016-03-01 02:21:53 +01:00
vulkan_deinit_static_resources ( vk ) ;
2018-01-05 16:10:34 +01:00
# ifdef HAVE_OVERLAY
2016-02-16 20:24:00 +01:00
vulkan_overlay_free ( vk ) ;
2018-01-05 16:10:34 +01:00
# endif
2016-02-16 20:24:00 +01:00
if ( vk - > filter_chain )
2016-09-01 18:26:01 +02:00
vulkan_filter_chain_free ( ( vulkan_filter_chain_t * ) vk - > filter_chain ) ;
2016-02-16 20:24:00 +01:00
2022-01-08 12:22:34 +00:00
# ifdef VULKAN_HDR_SWAPCHAIN
2024-05-13 18:10:25 +02:00
if ( vk - > context - > flags & VK_CTX_FLAG_HDR_SUPPORT )
{
vulkan_destroy_buffer ( vk - > context - > device , & vk - > hdr . ubo ) ;
vulkan_destroy_hdr_buffer ( vk - > context - > device , & vk - > main_buffer ) ;
vulkan_destroy_hdr_buffer ( vk - > context - > device , & vk - > readback_image ) ;
vulkan_deinit_hdr_readback_render_pass ( vk ) ;
video_driver_unset_hdr_support ( ) ;
}
2022-01-10 05:26:02 +00:00
# endif /* VULKAN_HDR_SWAPCHAIN */
2022-01-08 12:22:34 +00:00
2020-08-03 00:35:07 +02:00
if ( vk - > ctx_driver & & vk - > ctx_driver - > destroy )
vk - > ctx_driver - > destroy ( vk - > ctx_data ) ;
2016-05-08 20:32:46 +02:00
video_context_driver_free ( ) ;
2016-02-16 20:24:00 +01:00
}
2019-02-15 20:25:31 +01:00
scaler_ctx_gen_reset ( & vk - > readback . scaler_bgr ) ;
scaler_ctx_gen_reset ( & vk - > readback . scaler_rgb ) ;
2016-02-16 20:24:00 +01:00
free ( vk ) ;
}
static uint32_t vulkan_get_sync_index ( void * handle )
{
vk_t * vk = ( vk_t * ) handle ;
2020-06-06 13:23:24 +02:00
return vk - > context - > current_frame_index ;
2016-02-16 20:24:00 +01:00
}
static uint32_t vulkan_get_sync_index_mask ( void * handle )
{
vk_t * vk = ( vk_t * ) handle ;
return ( 1 < < vk - > context - > num_swapchain_images ) - 1 ;
}
static void vulkan_set_image ( void * handle ,
const struct retro_vulkan_image * image ,
uint32_t num_semaphores ,
2016-06-25 11:39:52 +02:00
const VkSemaphore * semaphores ,
uint32_t src_queue_family )
2016-02-16 20:24:00 +01:00
{
2016-02-20 20:15:46 +01:00
vk_t * vk = ( vk_t * ) handle ;
2016-02-16 20:24:00 +01:00
2016-02-20 20:15:46 +01:00
vk - > hw . image = image ;
2016-02-16 20:24:00 +01:00
vk - > hw . num_semaphores = num_semaphores ;
if ( num_semaphores > 0 )
{
2022-12-04 15:29:48 +01:00
int i ;
2020-06-06 14:24:13 +02:00
/* Allocate one extra in case we need to use WSI acquire semaphores. */
VkPipelineStageFlags * stage_flags = ( VkPipelineStageFlags * ) realloc ( vk - > hw . wait_dst_stages ,
sizeof ( VkPipelineStageFlags ) * ( vk - > hw . num_semaphores + 1 ) ) ;
VkSemaphore * new_semaphores = ( VkSemaphore * ) realloc ( vk - > hw . semaphores ,
sizeof ( VkSemaphore ) * ( vk - > hw . num_semaphores + 1 ) ) ;
2016-02-20 20:15:46 +01:00
2019-01-10 21:47:15 +01:00
vk - > hw . wait_dst_stages = stage_flags ;
2023-01-09 00:50:43 +01:00
vk - > hw . semaphores = new_semaphores ;
2016-02-16 20:24:00 +01:00
2022-12-22 21:36:32 +01:00
for ( i = 0 ; i < ( int ) vk - > hw . num_semaphores ; i + + )
2020-06-06 14:24:13 +02:00
{
2016-02-16 20:24:00 +01:00
vk - > hw . wait_dst_stages [ i ] = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ;
2022-11-02 21:34:00 +01:00
vk - > hw . semaphores [ i ] = semaphores [ i ] ;
2020-06-06 14:24:13 +02:00
}
2016-06-25 11:39:52 +02:00
2022-11-02 21:34:00 +01:00
vk - > flags | = VK_FLAG_HW_VALID_SEMAPHORE ;
vk - > hw . src_queue_family = src_queue_family ;
2016-02-16 20:24:00 +01:00
}
}
static void vulkan_wait_sync_index ( void * handle )
{
/* no-op. RetroArch already waits for this
* in gfx_ctx_swap_buffers ( ) . */
}
static void vulkan_set_command_buffers ( void * handle , uint32_t num_cmd ,
const VkCommandBuffer * cmd )
{
2016-02-20 20:15:46 +01:00
vk_t * vk = ( vk_t * ) handle ;
2016-02-16 20:24:00 +01:00
unsigned required_capacity = num_cmd + 1 ;
if ( required_capacity > vk - > hw . capacity_cmd )
{
2019-01-10 21:47:15 +01:00
VkCommandBuffer * hw_cmd = ( VkCommandBuffer * )
realloc ( vk - > hw . cmd ,
2016-02-16 20:24:00 +01:00
sizeof ( VkCommandBuffer ) * required_capacity ) ;
2016-02-20 20:15:46 +01:00
2023-01-09 00:50:43 +01:00
vk - > hw . cmd = hw_cmd ;
vk - > hw . capacity_cmd = required_capacity ;
2016-02-16 20:24:00 +01:00
}
2023-01-09 00:50:43 +01:00
vk - > hw . num_cmd = num_cmd ;
2016-02-16 20:24:00 +01:00
memcpy ( vk - > hw . cmd , cmd , sizeof ( VkCommandBuffer ) * num_cmd ) ;
}
static void vulkan_lock_queue ( void * handle )
{
2016-05-11 10:10:30 +02:00
# ifdef HAVE_THREADS
2023-01-09 00:50:43 +01:00
vk_t * vk = ( vk_t * ) handle ;
2016-02-16 20:24:00 +01:00
slock_lock ( vk - > context - > queue_lock ) ;
2016-05-11 10:10:30 +02:00
# endif
2016-02-16 20:24:00 +01:00
}
static void vulkan_unlock_queue ( void * handle )
{
2016-05-11 10:10:30 +02:00
# ifdef HAVE_THREADS
2023-01-09 00:50:43 +01:00
vk_t * vk = ( vk_t * ) handle ;
2016-02-16 20:24:00 +01:00
slock_unlock ( vk - > context - > queue_lock ) ;
2016-05-11 10:10:30 +02:00
# endif
2016-02-16 20:24:00 +01:00
}
2016-06-26 18:58:59 +02:00
static void vulkan_set_signal_semaphore ( void * handle , VkSemaphore semaphore )
{
vk_t * vk = ( vk_t * ) handle ;
vk - > hw . signal_semaphore = semaphore ;
}
2016-02-16 20:24:00 +01:00
static void vulkan_init_hw_render ( vk_t * vk )
{
2016-02-20 20:15:46 +01:00
struct retro_hw_render_interface_vulkan * iface =
2016-02-16 20:24:00 +01:00
& vk - > hw . iface ;
2016-05-08 14:00:51 +02:00
struct retro_hw_render_callback * hwr =
video_driver_get_hw_context ( ) ;
2016-02-16 20:24:00 +01:00
2016-03-04 20:49:55 +01:00
if ( hwr - > context_type ! = RETRO_HW_CONTEXT_VULKAN )
2016-02-16 20:24:00 +01:00
return ;
2023-01-09 00:50:43 +01:00
vk - > flags | = VK_FLAG_HW_ENABLE ;
2016-02-16 20:24:00 +01:00
2023-01-09 00:50:43 +01:00
iface - > interface_type = RETRO_HW_RENDER_INTERFACE_VULKAN ;
iface - > interface_version = RETRO_HW_RENDER_INTERFACE_VULKAN_VERSION ;
iface - > instance = vk - > context - > instance ;
iface - > gpu = vk - > context - > gpu ;
iface - > device = vk - > context - > device ;
2016-02-16 20:24:00 +01:00
2023-01-09 00:50:43 +01:00
iface - > queue = vk - > context - > queue ;
iface - > queue_index = vk - > context - > graphics_queue_index ;
2016-02-16 20:24:00 +01:00
2023-01-09 00:50:43 +01:00
iface - > handle = vk ;
iface - > set_image = vulkan_set_image ;
iface - > get_sync_index = vulkan_get_sync_index ;
iface - > get_sync_index_mask = vulkan_get_sync_index_mask ;
iface - > wait_sync_index = vulkan_wait_sync_index ;
iface - > set_command_buffers = vulkan_set_command_buffers ;
iface - > lock_queue = vulkan_lock_queue ;
iface - > unlock_queue = vulkan_unlock_queue ;
iface - > set_signal_semaphore = vulkan_set_signal_semaphore ;
2016-03-29 16:32:16 +02:00
2016-06-26 13:10:19 +02:00
iface - > get_device_proc_addr = vkGetDeviceProcAddr ;
iface - > get_instance_proc_addr = vulkan_symbol_wrapper_instance_proc_addr ( ) ;
2016-02-16 20:24:00 +01:00
}
2022-05-19 15:28:26 +02:00
static void vulkan_init_readback ( vk_t * vk , settings_t * settings )
2016-02-16 20:24:00 +01:00
{
/* Only bother with this if we're doing GPU recording.
2021-11-10 02:34:04 +01:00
* Check recording_st - > enable and not
2017-12-11 23:55:31 -08:00
* driver . recording_data , because recording is
2016-02-16 20:24:00 +01:00
* not initialized yet .
*/
2023-08-14 23:36:19 +02:00
recording_state_t
2021-11-10 02:34:04 +01:00
* recording_st = recording_state_get_ptr ( ) ;
bool recording_enabled = recording_st - > enable ;
2020-02-18 14:51:40 +01:00
bool video_gpu_record = settings - > bools . video_gpu_record ;
2016-02-16 20:24:00 +01:00
2022-11-02 21:34:00 +01:00
if ( ! ( video_gpu_record & & recording_enabled ) )
{
vk - > flags & = ~ VK_FLAG_READBACK_STREAMED ;
2016-02-16 20:24:00 +01:00
return ;
2022-11-02 21:34:00 +01:00
}
vk - > flags | = VK_FLAG_READBACK_STREAMED ;
2016-02-16 20:24:00 +01:00
2019-02-15 20:25:31 +01:00
vk - > readback . scaler_bgr . in_width = vk - > vp . width ;
vk - > readback . scaler_bgr . in_height = vk - > vp . height ;
vk - > readback . scaler_bgr . out_width = vk - > vp . width ;
vk - > readback . scaler_bgr . out_height = vk - > vp . height ;
vk - > readback . scaler_bgr . in_fmt = SCALER_FMT_ARGB8888 ;
vk - > readback . scaler_bgr . out_fmt = SCALER_FMT_BGR24 ;
vk - > readback . scaler_bgr . scaler_type = SCALER_TYPE_POINT ;
vk - > readback . scaler_rgb . in_width = vk - > vp . width ;
vk - > readback . scaler_rgb . in_height = vk - > vp . height ;
vk - > readback . scaler_rgb . out_width = vk - > vp . width ;
vk - > readback . scaler_rgb . out_height = vk - > vp . height ;
vk - > readback . scaler_rgb . in_fmt = SCALER_FMT_ABGR8888 ;
vk - > readback . scaler_rgb . out_fmt = SCALER_FMT_BGR24 ;
vk - > readback . scaler_rgb . scaler_type = SCALER_TYPE_POINT ;
if ( ! scaler_ctx_gen_filter ( & vk - > readback . scaler_bgr ) )
{
2022-11-02 21:34:00 +01:00
vk - > flags & = ~ VK_FLAG_READBACK_STREAMED ;
2019-02-15 20:25:31 +01:00
RARCH_ERR ( " [Vulkan]: Failed to initialize scaler context. \n " ) ;
}
2016-02-16 20:24:00 +01:00
2019-02-15 20:25:31 +01:00
if ( ! scaler_ctx_gen_filter ( & vk - > readback . scaler_rgb ) )
2016-02-16 20:24:00 +01:00
{
2022-11-02 21:34:00 +01:00
vk - > flags & = ~ VK_FLAG_READBACK_STREAMED ;
2016-02-16 20:24:00 +01:00
RARCH_ERR ( " [Vulkan]: Failed to initialize scaler context. \n " ) ;
}
}
2017-04-24 12:25:14 +02:00
static void * vulkan_init ( const video_info_t * video ,
2019-07-27 02:21:24 +02:00
input_driver_t * * input ,
2016-02-16 20:24:00 +01:00
void * * input_data )
{
2016-03-29 15:28:13 +02:00
unsigned full_x , full_y ;
2016-02-20 20:15:46 +01:00
unsigned win_width ;
unsigned win_height ;
2020-07-27 14:33:21 +02:00
unsigned mode_width = 0 ;
unsigned mode_height = 0 ;
2018-09-12 00:07:43 +02:00
int interval = 0 ;
2016-02-20 20:15:46 +01:00
unsigned temp_width = 0 ;
unsigned temp_height = 0 ;
2024-06-13 21:20:19 +02:00
bool force_fullscreen = false ;
2016-02-20 20:15:46 +01:00
const gfx_ctx_driver_t * ctx_driver = NULL ;
2020-08-03 00:47:58 +02:00
settings_t * settings = config_get_ptr ( ) ;
2022-03-30 15:13:28 +02:00
# ifdef VULKAN_HDR_SWAPCHAIN
vulkan_hdr_uniform_t * mapped_ubo = NULL ;
# endif
2016-02-20 20:15:46 +01:00
vk_t * vk = ( vk_t * ) calloc ( 1 , sizeof ( * vk ) ) ;
2016-02-16 20:24:00 +01:00
if ( ! vk )
return NULL ;
2022-05-19 15:28:26 +02:00
ctx_driver = vulkan_get_context ( vk , settings ) ;
2016-02-16 20:24:00 +01:00
if ( ! ctx_driver )
2016-03-29 17:36:13 +02:00
{
RARCH_ERR ( " [Vulkan]: Failed to get Vulkan context. \n " ) ;
2016-02-16 20:24:00 +01:00
goto error ;
2016-03-29 17:36:13 +02:00
}
2016-02-16 20:24:00 +01:00
2022-01-08 12:22:34 +00:00
# ifdef VULKAN_HDR_SWAPCHAIN
vk - > hdr . max_output_nits = settings - > floats . video_hdr_max_nits ;
vk - > hdr . min_output_nits = 0.001f ;
vk - > hdr . max_cll = 0.0f ;
vk - > hdr . max_fall = 0.0f ;
2022-01-10 05:26:02 +00:00
# endif /* VULKAN_HDR_SWAPCHAIN */
2022-01-08 12:22:34 +00:00
2018-10-15 01:26:54 +02:00
vk - > video = * video ;
vk - > ctx_driver = ctx_driver ;
2016-05-09 05:48:47 +02:00
video_context_driver_set ( ( const gfx_ctx_driver_t * ) ctx_driver ) ;
2023-08-14 23:36:19 +02:00
2021-12-04 15:21:12 +02:00
RARCH_LOG ( " [Vulkan]: Found vulkan context: \" %s \" . \n " , ctx_driver - > ident ) ;
2016-02-16 20:24:00 +01:00
2020-07-27 14:33:21 +02:00
if ( vk - > ctx_driver - > get_video_size )
vk - > ctx_driver - > get_video_size ( vk - > ctx_data ,
& mode_width , & mode_height ) ;
2024-06-13 21:20:19 +02:00
if ( ! video - > fullscreen & & ! vk - > ctx_driver - > has_windowed )
{
RARCH_DBG ( " [Vulkan]: Config requires windowed mode, but context driver does not support it. "
" Forcing fullscreen for this session. \n " ) ;
force_fullscreen = true ;
}
2020-07-27 14:33:21 +02:00
full_x = mode_width ;
full_y = mode_height ;
mode_width = 0 ;
mode_height = 0 ;
2016-02-16 20:24:00 +01:00
2021-12-04 15:21:12 +02:00
RARCH_LOG ( " [Vulkan]: Detecting screen resolution: %ux%u. \n " , full_x , full_y ) ;
2017-01-10 18:16:22 +01:00
interval = video - > vsync ? video - > swap_interval : 0 ;
2019-08-28 21:12:51 +02:00
if ( ctx_driver - > swap_interval )
{
bool adaptive_vsync_enabled = video_driver_test_all_flags (
GFX_CTX_FLAGS_ADAPTIVE_VSYNC ) & & video - > adaptive_vsync ;
if ( adaptive_vsync_enabled & & interval = = 1 )
interval = - 1 ;
ctx_driver - > swap_interval ( vk - > ctx_data , interval ) ;
}
2016-02-16 20:24:00 +01:00
win_width = video - > width ;
win_height = video - > height ;
if ( video - > fullscreen & & ( win_width = = 0 ) & & ( win_height = = 0 ) )
{
2016-03-29 15:28:13 +02:00
win_width = full_x ;
win_height = full_y ;
2016-02-16 20:24:00 +01:00
}
2024-06-13 21:20:19 +02:00
/* If fullscreen had to be forced, video->width/height is incorrect */
else if ( force_fullscreen )
{
win_width = settings - > uints . video_fullscreen_x ;
win_height = settings - > uints . video_fullscreen_y ;
}
2016-02-16 20:24:00 +01:00
2020-07-27 11:08:34 +02:00
if ( ! vk - > ctx_driver - > set_video_mode
| | ! vk - > ctx_driver - > set_video_mode ( vk - > ctx_data ,
2024-06-13 21:20:19 +02:00
win_width , win_height , ( video - > fullscreen | | force_fullscreen ) ) )
2016-03-29 17:36:13 +02:00
{
RARCH_ERR ( " [Vulkan]: Failed to set video mode. \n " ) ;
2016-02-16 20:24:00 +01:00
goto error ;
2016-03-29 17:36:13 +02:00
}
2016-02-16 20:24:00 +01:00
2020-07-27 14:33:21 +02:00
if ( vk - > ctx_driver - > get_video_size )
vk - > ctx_driver - > get_video_size ( vk - > ctx_data ,
& mode_width , & mode_height ) ;
temp_width = mode_width ;
temp_height = mode_height ;
2016-02-16 20:24:00 +01:00
2017-04-24 12:25:07 +02:00
if ( temp_width ! = 0 & & temp_height ! = 0 )
2020-01-31 03:47:50 +01:00
video_driver_set_size ( temp_width , temp_height ) ;
2017-04-24 12:25:07 +02:00
video_driver_get_size ( & temp_width , & temp_height ) ;
2019-08-13 12:28:16 +02:00
vk - > video_width = temp_width ;
vk - > video_height = temp_height ;
2024-07-12 11:11:25 -07:00
vk - > translate_x = 0.0 ;
vk - > translate_y = 0.0 ;
2016-02-16 20:24:00 +01:00
2021-12-04 15:21:12 +02:00
RARCH_LOG ( " [Vulkan]: Using resolution %ux%u. \n " , temp_width , temp_height ) ;
2016-02-16 20:24:00 +01:00
2020-06-30 01:33:19 +02:00
if ( ! vk - > ctx_driver | | ! vk - > ctx_driver - > get_context_data )
{
RARCH_ERR ( " [Vulkan]: Failed to get context data. \n " ) ;
goto error ;
}
2018-10-15 01:26:54 +02:00
* ( void * * ) & vk - > context = vk - > ctx_driver - > get_context_data ( vk - > ctx_data ) ;
2016-02-16 20:24:00 +01:00
2022-11-02 21:34:00 +01:00
if ( video - > vsync )
vk - > flags | = VK_FLAG_VSYNC ;
else
vk - > flags & = ~ VK_FLAG_VSYNC ;
2024-06-13 21:20:19 +02:00
if ( video - > fullscreen | | force_fullscreen )
2022-11-02 21:34:00 +01:00
vk - > flags | = VK_FLAG_FULLSCREEN ;
else
vk - > flags & = ~ VK_FLAG_FULLSCREEN ;
2016-02-20 20:15:46 +01:00
vk - > tex_w = RARCH_SCALE_BASE * video - > input_scale ;
vk - > tex_h = RARCH_SCALE_BASE * video - > input_scale ;
2023-02-05 19:23:48 +01:00
vk - > tex_fmt = video - > rgb32 ? VK_FORMAT_B8G8R8A8_UNORM : VK_FORMAT_R5G6B5_UNORM_PACK16 ;
2022-11-02 21:34:00 +01:00
if ( video - > force_aspect )
vk - > flags | = VK_FLAG_KEEP_ASPECT ;
else
vk - > flags & = ~ VK_FLAG_KEEP_ASPECT ;
2016-03-21 09:29:41 +01:00
RARCH_LOG ( " [Vulkan]: Using %s format. \n " , video - > rgb32 ? " BGRA8888 " : " RGB565 " ) ;
2016-02-16 20:24:00 +01:00
2016-02-20 13:44:25 +01:00
/* Set the viewport to fix recording, since it needs to know
* the viewport sizes before we start running . */
vulkan_set_viewport ( vk , temp_width , temp_height , false , true ) ;
2022-01-08 12:22:34 +00:00
# ifdef VULKAN_HDR_SWAPCHAIN
2022-03-30 15:13:28 +02:00
vk - > hdr . ubo = vulkan_create_buffer ( vk - > context , sizeof ( vulkan_hdr_uniform_t ) , VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT ) ;
2022-01-08 12:22:34 +00:00
2022-03-30 15:13:28 +02:00
mapped_ubo = ( vulkan_hdr_uniform_t * ) vk - > hdr . ubo . mapped ;
2022-01-08 12:22:34 +00:00
2023-08-14 23:36:19 +02:00
mapped_ubo - > mvp = vk - > mvp_no_rot ;
2022-01-08 12:22:34 +00:00
mapped_ubo - > max_nits = settings - > floats . video_hdr_max_nits ;
mapped_ubo - > paper_white_nits = settings - > floats . video_hdr_paper_white_nits ;
mapped_ubo - > contrast = VIDEO_HDR_MAX_CONTRAST - settings - > floats . video_hdr_display_contrast ;
mapped_ubo - > expand_gamut = settings - > bools . video_hdr_expand_gamut ;
mapped_ubo - > inverse_tonemap = 1.0f ; /* Use this to turn on/off the inverse tonemap */
mapped_ubo - > hdr10 = 1.0f ; /* Use this to turn on/off the hdr10 */
2022-01-10 05:26:02 +00:00
# endif /* VULKAN_HDR_SWAPCHAIN */
2022-01-08 12:22:34 +00:00
2016-02-16 20:24:00 +01:00
vulkan_init_hw_render ( vk ) ;
2022-05-19 15:28:26 +02:00
if ( vk - > context )
{
2022-12-04 15:29:48 +01:00
int i ;
2023-02-05 19:23:48 +01:00
static const VkDescriptorPoolSize pool_sizes [ 4 ] = {
2023-07-17 17:30:17 +02:00
{ VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER , VULKAN_DESCRIPTOR_MANAGER_BLOCK_SETS } ,
2022-05-19 15:43:51 +02:00
{ VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER , VULKAN_DESCRIPTOR_MANAGER_BLOCK_SETS * 2 } ,
2023-07-17 17:30:17 +02:00
{ VK_DESCRIPTOR_TYPE_STORAGE_IMAGE , VULKAN_DESCRIPTOR_MANAGER_BLOCK_SETS } ,
{ VK_DESCRIPTOR_TYPE_STORAGE_BUFFER , VULKAN_DESCRIPTOR_MANAGER_BLOCK_SETS } ,
2022-05-19 15:43:51 +02:00
} ;
2023-07-17 17:30:17 +02:00
vulkan_init_static_resources ( vk ) ;
2022-05-19 15:28:26 +02:00
vk - > num_swapchain_images = vk - > context - > num_swapchain_images ;
vulkan_init_render_pass ( vk ) ;
2024-05-13 18:10:25 +02:00
# ifdef VULKAN_HDR_SWAPCHAIN
if ( vk - > context - > flags & VK_CTX_FLAG_HDR_SUPPORT )
vulkan_init_hdr_readback_render_pass ( vk ) ;
# endif
2022-05-19 15:28:26 +02:00
vulkan_init_framebuffers ( vk ) ;
vulkan_init_pipelines ( vk ) ;
vulkan_init_samplers ( vk ) ;
vulkan_init_textures ( vk ) ;
2022-12-22 21:36:32 +01:00
for ( i = 0 ; i < ( int ) vk - > num_swapchain_images ; i + + )
2022-05-19 15:28:26 +02:00
{
VkCommandPoolCreateInfo pool_info ;
VkCommandBufferAllocateInfo info ;
2022-05-19 15:43:51 +02:00
vk - > swapchain [ i ] . descriptor_manager =
vulkan_create_descriptor_manager (
vk - > context - > device ,
2023-02-05 19:23:48 +01:00
pool_sizes , 4 , vk - > pipelines . set_layout ) ;
2023-08-14 23:36:19 +02:00
vk - > swapchain [ i ] . vbo =
2022-05-19 15:43:51 +02:00
vulkan_buffer_chain_init (
2022-05-19 15:28:26 +02:00
VULKAN_BUFFER_BLOCK_SIZE , 16 ,
VK_BUFFER_USAGE_VERTEX_BUFFER_BIT ) ;
2023-08-14 23:36:19 +02:00
vk - > swapchain [ i ] . ubo =
2022-05-19 15:43:51 +02:00
vulkan_buffer_chain_init (
2022-05-19 15:28:26 +02:00
VULKAN_BUFFER_BLOCK_SIZE ,
vk - > context - > gpu_properties . limits . minUniformBufferOffsetAlignment ,
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT ) ;
pool_info . sType =
VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO ;
pool_info . pNext = NULL ;
/* RESET_COMMAND_BUFFER_BIT allows command buffer to be reset. */
pool_info . flags =
VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT ;
pool_info . queueFamilyIndex = vk - > context - > graphics_queue_index ;
vkCreateCommandPool ( vk - > context - > device ,
& pool_info , NULL , & vk - > swapchain [ i ] . cmd_pool ) ;
info . sType =
VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO ;
info . pNext = NULL ;
info . commandPool = vk - > swapchain [ i ] . cmd_pool ;
info . level = VK_COMMAND_BUFFER_LEVEL_PRIMARY ;
info . commandBufferCount = 1 ;
vkAllocateCommandBuffers ( vk - > context - > device ,
& info , & vk - > swapchain [ i ] . cmd ) ;
}
}
2016-02-16 20:24:00 +01:00
if ( ! vulkan_init_filter_chain ( vk ) )
2016-03-29 17:36:13 +02:00
{
RARCH_ERR ( " [Vulkan]: Failed to init filter chain. \n " ) ;
2016-02-16 20:24:00 +01:00
goto error ;
2016-03-29 17:36:13 +02:00
}
2016-02-16 20:24:00 +01:00
2020-08-03 00:47:58 +02:00
if ( vk - > ctx_driver - > input_driver )
{
const char * joypad_name = settings - > arrays . input_joypad_driver ;
vk - > ctx_driver - > input_driver (
vk - > ctx_data , joypad_name ,
input , input_data ) ;
}
2016-02-16 20:24:00 +01:00
2017-01-10 18:16:22 +01:00
if ( video - > font_enable )
2020-02-16 21:59:03 +01:00
font_driver_init_osd ( vk ,
video ,
false ,
2017-04-29 16:52:52 +02:00
video - > is_threaded ,
FONT_DRIVER_RENDER_VULKAN_API ) ;
2016-02-16 20:24:00 +01:00
2024-02-21 17:39:26 +01:00
/* The MoltenVK driver needs this, particularly after driver reinit
Also it is required for HDR to not break during reinit , while not ideal it
is the simplest solution unless reinit tracking is done */
2023-01-05 00:33:52 -08:00
vk - > flags | = VK_FLAG_SHOULD_RESIZE ;
2022-05-19 15:28:26 +02:00
vulkan_init_readback ( vk , settings ) ;
2016-02-16 20:24:00 +01:00
return vk ;
error :
vulkan_free ( vk ) ;
return NULL ;
}
static void vulkan_check_swapchain ( vk_t * vk )
{
2022-05-19 15:28:26 +02:00
struct vulkan_filter_chain_swapchain_info filter_info ;
2018-03-15 09:21:23 +01:00
# ifdef HAVE_THREADS
2022-05-19 15:28:26 +02:00
slock_lock ( vk - > context - > queue_lock ) ;
2018-03-15 09:21:23 +01:00
# endif
2022-05-19 15:28:26 +02:00
vkQueueWaitIdle ( vk - > context - > queue ) ;
2018-03-15 09:21:23 +01:00
# ifdef HAVE_THREADS
2022-05-19 15:28:26 +02:00
slock_unlock ( vk - > context - > queue_lock ) ;
2018-03-15 09:21:23 +01:00
# endif
2022-05-19 15:28:26 +02:00
vulkan_deinit_pipelines ( vk ) ;
vulkan_deinit_framebuffers ( vk ) ;
vulkan_deinit_descriptor_pool ( vk ) ;
vulkan_deinit_textures ( vk ) ;
vulkan_deinit_buffers ( vk ) ;
vulkan_deinit_command_buffers ( vk ) ;
2024-05-13 18:10:25 +02:00
# ifdef VULKAN_HDR_SWAPCHAIN
if ( vk - > context - > flags & VK_CTX_FLAG_HDR_SUPPORT )
vulkan_deinit_hdr_readback_render_pass ( vk ) ;
# endif
2022-05-19 15:28:26 +02:00
if ( vk - > context )
{
2022-12-04 15:29:48 +01:00
int i ;
2023-02-05 19:23:48 +01:00
static const VkDescriptorPoolSize pool_sizes [ 4 ] = {
2023-07-17 17:30:17 +02:00
{ VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER , VULKAN_DESCRIPTOR_MANAGER_BLOCK_SETS } ,
2022-05-19 15:43:51 +02:00
{ VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER , VULKAN_DESCRIPTOR_MANAGER_BLOCK_SETS * 2 } ,
2023-07-17 17:30:17 +02:00
{ VK_DESCRIPTOR_TYPE_STORAGE_IMAGE , VULKAN_DESCRIPTOR_MANAGER_BLOCK_SETS } ,
{ VK_DESCRIPTOR_TYPE_STORAGE_BUFFER , VULKAN_DESCRIPTOR_MANAGER_BLOCK_SETS } ,
2022-05-19 15:43:51 +02:00
} ;
2022-05-19 15:28:26 +02:00
vk - > num_swapchain_images = vk - > context - > num_swapchain_images ;
vulkan_init_render_pass ( vk ) ;
2024-05-13 18:10:25 +02:00
# ifdef VULKAN_HDR_SWAPCHAIN
if ( vk - > context - > flags & VK_CTX_FLAG_HDR_SUPPORT )
vulkan_init_hdr_readback_render_pass ( vk ) ;
# endif
2022-05-19 15:28:26 +02:00
vulkan_init_framebuffers ( vk ) ;
vulkan_init_pipelines ( vk ) ;
vulkan_init_samplers ( vk ) ;
vulkan_init_textures ( vk ) ;
2022-05-19 15:43:51 +02:00
2022-12-22 21:36:32 +01:00
for ( i = 0 ; i < ( int ) vk - > num_swapchain_images ; i + + )
2022-05-19 15:28:26 +02:00
{
VkCommandPoolCreateInfo pool_info ;
VkCommandBufferAllocateInfo info ;
2022-05-19 15:43:51 +02:00
vk - > swapchain [ i ] . descriptor_manager =
vulkan_create_descriptor_manager (
vk - > context - > device ,
2023-02-05 19:23:48 +01:00
pool_sizes , 4 , vk - > pipelines . set_layout ) ;
2022-05-19 15:43:51 +02:00
2022-05-19 15:28:26 +02:00
vk - > swapchain [ i ] . vbo = vulkan_buffer_chain_init (
VULKAN_BUFFER_BLOCK_SIZE ,
16 ,
VK_BUFFER_USAGE_VERTEX_BUFFER_BIT ) ;
vk - > swapchain [ i ] . ubo = vulkan_buffer_chain_init (
VULKAN_BUFFER_BLOCK_SIZE ,
vk - > context - > gpu_properties . limits . minUniformBufferOffsetAlignment ,
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT ) ;
pool_info . sType =
VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO ;
pool_info . pNext = NULL ;
/* RESET_COMMAND_BUFFER_BIT allows command buffer to be reset. */
pool_info . flags =
VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT ;
pool_info . queueFamilyIndex = vk - > context - > graphics_queue_index ;
vkCreateCommandPool ( vk - > context - > device ,
& pool_info , NULL , & vk - > swapchain [ i ] . cmd_pool ) ;
info . sType =
VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO ;
info . pNext = NULL ;
info . commandPool = vk - > swapchain [ i ] . cmd_pool ;
info . level = VK_COMMAND_BUFFER_LEVEL_PRIMARY ;
info . commandBufferCount = 1 ;
vkAllocateCommandBuffers ( vk - > context - > device ,
& info , & vk - > swapchain [ i ] . cmd ) ;
}
2016-02-16 20:24:00 +01:00
}
2022-10-30 23:07:07 +01:00
vk - > context - > flags & = ~ VK_CTX_FLAG_INVALID_SWAPCHAIN ;
2022-05-19 15:28:26 +02:00
filter_info . viewport = vk - > vk_vp ;
filter_info . format = vk - > context - > swapchain_format ;
filter_info . render_pass = vk - > render_pass ;
filter_info . num_indices = vk - > context - > num_swapchain_images ;
if (
! vulkan_filter_chain_update_swapchain_info (
( vulkan_filter_chain_t * ) vk - > filter_chain ,
& filter_info )
)
RARCH_ERR ( " Failed to update filter chain info. This will probably lead to a crash ... \n " ) ;
2016-02-16 20:24:00 +01:00
}
2020-02-16 22:26:07 +01:00
static void vulkan_set_nonblock_state ( void * data , bool state ,
bool adaptive_vsync_enabled ,
unsigned swap_interval )
2016-02-16 20:24:00 +01:00
{
2019-08-28 21:12:51 +02:00
vk_t * vk = ( vk_t * ) data ;
2016-02-16 20:24:00 +01:00
if ( ! vk )
return ;
2019-08-28 21:12:51 +02:00
if ( vk - > ctx_driver - > swap_interval )
{
2022-05-19 15:28:26 +02:00
int interval = 0 ;
if ( ! state )
interval = swap_interval ;
2019-08-28 21:12:51 +02:00
if ( adaptive_vsync_enabled & & interval = = 1 )
interval = - 1 ;
vk - > ctx_driver - > swap_interval ( vk - > ctx_data , interval ) ;
}
2016-02-16 20:24:00 +01:00
2020-02-16 22:26:07 +01:00
/* Changing vsync might require recreating the swapchain,
* which means new VkImages to render into . */
2022-10-30 23:07:07 +01:00
if ( vk - > context - > flags & VK_CTX_FLAG_INVALID_SWAPCHAIN )
2022-05-19 15:28:26 +02:00
vulkan_check_swapchain ( vk ) ;
2016-02-16 20:24:00 +01:00
}
static bool vulkan_alive ( void * data )
{
2016-02-20 20:15:46 +01:00
bool ret = false ;
bool quit = false ;
bool resize = false ;
vk_t * vk = ( vk_t * ) data ;
2019-08-13 12:28:16 +02:00
unsigned temp_width = vk - > video_width ;
unsigned temp_height = vk - > video_height ;
2016-02-16 20:24:00 +01:00
2018-10-14 17:49:10 +02:00
vk - > ctx_driver - > check_window ( vk - > ctx_data ,
2020-03-06 20:29:15 +01:00
& quit , & resize , & temp_width , & temp_height ) ;
2016-02-16 20:24:00 +01:00
2018-10-14 17:49:10 +02:00
if ( quit )
2022-11-02 21:34:00 +01:00
vk - > flags | = VK_FLAG_QUITTING ;
2018-10-14 17:49:10 +02:00
else if ( resize )
2022-11-02 21:34:00 +01:00
vk - > flags | = VK_FLAG_SHOULD_RESIZE ;
2016-02-16 20:24:00 +01:00
2022-11-02 21:34:00 +01:00
ret = ( ! ( vk - > flags & VK_FLAG_QUITTING ) ) ;
2016-02-16 20:24:00 +01:00
if ( temp_width ! = 0 & & temp_height ! = 0 )
2019-08-13 12:28:16 +02:00
{
2020-01-31 03:47:50 +01:00
video_driver_set_size ( temp_width , temp_height ) ;
2019-08-13 12:28:16 +02:00
vk - > video_width = temp_width ;
vk - > video_height = temp_height ;
}
2016-02-16 20:24:00 +01:00
return ret ;
}
static bool vulkan_suppress_screensaver ( void * data , bool enable )
{
bool enabled = enable ;
2019-08-28 22:52:41 +02:00
vk_t * vk = ( vk_t * ) data ;
2018-02-04 15:49:53 +01:00
2019-08-28 22:52:41 +02:00
if ( vk - > ctx_data & & vk - > ctx_driver - > suppress_screensaver )
return vk - > ctx_driver - > suppress_screensaver ( vk - > ctx_data , enabled ) ;
return false ;
2016-02-16 20:24:00 +01:00
}
2016-02-20 20:15:46 +01:00
static bool vulkan_set_shader ( void * data ,
enum rarch_shader_type type , const char * path )
2016-02-16 20:24:00 +01:00
{
vk_t * vk = ( vk_t * ) data ;
if ( ! vk )
return false ;
if ( vk - > filter_chain )
2016-09-01 18:26:01 +02:00
vulkan_filter_chain_free ( ( vulkan_filter_chain_t * ) vk - > filter_chain ) ;
2016-02-16 20:24:00 +01:00
vk - > filter_chain = NULL ;
2019-06-04 21:52:58 +02:00
if ( ! string_is_empty ( path ) & & type ! = RARCH_SHADER_SLANG )
{
RARCH_WARN ( " [Vulkan]: Only Slang shaders are supported. Falling back to stock. \n " ) ;
path = NULL ;
}
if ( string_is_empty ( path ) )
2016-02-16 20:24:00 +01:00
{
vulkan_init_default_filter_chain ( vk ) ;
return true ;
}
if ( ! vulkan_init_filter_chain_preset ( vk , path ) )
{
RARCH_ERR ( " [Vulkan]: Failed to create filter chain: \" %s \" . Falling back to stock. \n " , path ) ;
vulkan_init_default_filter_chain ( vk ) ;
return false ;
}
return true ;
}
2016-02-20 20:15:46 +01:00
static void vulkan_set_projection ( vk_t * vk ,
2016-05-10 02:39:09 +02:00
struct video_ortho * ortho , bool allow_rotate )
2016-02-16 20:24:00 +01:00
{
2022-07-04 15:28:08 +02:00
float radians , cosine , sine ;
static math_matrix_4x4 rot = {
{ 0.0f , 0.0f , 0.0f , 0.0f ,
0.0f , 0.0f , 0.0f , 0.0f ,
0.0f , 0.0f , 0.0f , 0.0f ,
0.0f , 0.0f , 0.0f , 1.0f }
} ;
2024-07-12 11:11:25 -07:00
math_matrix_4x4 trn = {
{ 1.0f , 0.0f , 0.0f , 0.0f ,
0.0f , 1.0f , 0.0f , 0.0f ,
0.0f , 0.0f , 1.0f , 0.0f ,
vk - > translate_x / ( float ) vk - > vp . width ,
vk - > translate_y / ( float ) vk - > vp . height ,
0.0f ,
1.0f }
} ;
math_matrix_4x4 tmp = {
{ 1.0f , 0.0f , 0.0f , 0.0f ,
0.0f , 1.0f , 0.0f , 0.0f ,
0.0f , 0.0f , 1.0f , 0.0f ,
0.0f , 0.0f , 0.0f , 1.0f }
} ;
2016-02-16 20:24:00 +01:00
/* Calculate projection. */
2017-04-16 04:00:20 +02:00
matrix_4x4_ortho ( vk - > mvp_no_rot , ortho - > left , ortho - > right ,
2016-02-16 20:24:00 +01:00
ortho - > bottom , ortho - > top , ortho - > znear , ortho - > zfar ) ;
if ( ! allow_rotate )
2024-07-12 11:11:25 -07:00
tmp = vk - > mvp_no_rot ;
else
2016-02-16 20:24:00 +01:00
{
2024-07-12 11:11:25 -07:00
radians = M_PI * vk - > rotation / 180.0f ;
cosine = cosf ( radians ) ;
sine = sinf ( radians ) ;
MAT_ELEM_4X4 ( rot , 0 , 0 ) = cosine ;
MAT_ELEM_4X4 ( rot , 0 , 1 ) = - sine ;
MAT_ELEM_4X4 ( rot , 1 , 0 ) = sine ;
MAT_ELEM_4X4 ( rot , 1 , 1 ) = cosine ;
matrix_4x4_multiply ( tmp , rot , vk - > mvp_no_rot ) ;
2016-02-16 20:24:00 +01:00
}
2024-07-12 11:11:25 -07:00
matrix_4x4_multiply ( vk - > mvp , trn , tmp ) ;
2016-02-16 20:24:00 +01:00
}
static void vulkan_set_rotation ( void * data , unsigned rotation )
{
2016-02-20 20:15:46 +01:00
vk_t * vk = ( vk_t * ) data ;
2016-05-10 02:39:09 +02:00
struct video_ortho ortho = { 0 , 1 , 0 , 1 , - 1 , 1 } ;
2016-02-16 20:24:00 +01:00
if ( ! vk )
return ;
2016-12-11 10:53:02 +01:00
vk - > rotation = 270 * rotation ;
2016-02-16 20:24:00 +01:00
vulkan_set_projection ( vk , & ortho , true ) ;
}
2016-02-20 20:15:46 +01:00
static void vulkan_set_video_mode ( void * data ,
unsigned width , unsigned height ,
2016-02-16 20:24:00 +01:00
bool fullscreen )
{
2020-07-27 11:08:34 +02:00
vk_t * vk = ( vk_t * ) data ;
if ( vk - > ctx_driver - > set_video_mode )
vk - > ctx_driver - > set_video_mode ( vk - > ctx_data ,
width , height , fullscreen ) ;
2016-02-16 20:24:00 +01:00
}
static void vulkan_set_viewport ( void * data , unsigned viewport_width ,
unsigned viewport_height , bool force_full , bool allow_rotate )
{
2020-02-18 14:51:40 +01:00
float device_aspect = ( float ) viewport_width / viewport_height ;
struct video_ortho ortho = { 0 , 1 , 0 , 1 , - 1 , 1 } ;
settings_t * settings = config_get_ptr ( ) ;
bool video_scale_integer = settings - > bools . video_scale_integer ;
vk_t * vk = ( vk_t * ) data ;
2020-07-27 08:15:35 +02:00
if ( vk - > ctx_driver - > translate_aspect )
device_aspect = vk - > ctx_driver - > translate_aspect (
vk - > ctx_data , viewport_width , viewport_height ) ;
2016-02-16 20:24:00 +01:00
2020-02-18 14:51:40 +01:00
if ( video_scale_integer & & ! force_full )
2016-02-16 20:24:00 +01:00
{
video_viewport_get_scaled_integer ( & vk - > vp ,
viewport_width , viewport_height ,
2022-11-02 21:34:00 +01:00
video_driver_get_aspect_ratio ( ) ,
2024-06-21 10:34:20 -07:00
vk - > flags & VK_FLAG_KEEP_ASPECT ,
true ) ;
2016-02-16 20:24:00 +01:00
viewport_width = vk - > vp . width ;
viewport_height = vk - > vp . height ;
}
2022-11-02 21:34:00 +01:00
else if ( ( vk - > flags & VK_FLAG_KEEP_ASPECT ) & & ! force_full )
2016-02-16 20:24:00 +01:00
{
2024-07-20 16:16:16 +02:00
video_viewport_get_scaled_aspect2 ( & vk - > vp , viewport_width , viewport_height ,
true , device_aspect , video_driver_get_aspect_ratio ( ) ) ;
2024-06-21 10:34:20 -07:00
viewport_width = vk - > vp . width ;
viewport_height = vk - > vp . height ;
2016-02-16 20:24:00 +01:00
}
else
{
2016-02-20 20:29:52 +01:00
vk - > vp . x = 0 ;
vk - > vp . y = 0 ;
2016-02-16 20:24:00 +01:00
vk - > vp . width = viewport_width ;
vk - > vp . height = viewport_height ;
}
2024-07-12 11:11:25 -07:00
if ( vk - > vp . x < 0 )
{
vk - > translate_x = ( float ) vk - > vp . x ;
vk - > vp . x = 0.0 ;
}
else
vk - > translate_x = 0.0 ;
if ( vk - > vp . y < 0 )
{
vk - > translate_y = ( float ) vk - > vp . y ;
vk - > vp . y = 0.0 ;
}
else
vk - > translate_y = 0.0 ;
2016-02-16 20:24:00 +01:00
vulkan_set_projection ( vk , & ortho , allow_rotate ) ;
/* Set last backbuffer viewport. */
if ( ! force_full )
{
vk - > vp_out_width = viewport_width ;
vk - > vp_out_height = viewport_height ;
}
2016-02-20 20:29:52 +01:00
vk - > vk_vp . x = ( float ) vk - > vp . x ;
vk - > vk_vp . y = ( float ) vk - > vp . y ;
vk - > vk_vp . width = ( float ) vk - > vp . width ;
vk - > vk_vp . height = ( float ) vk - > vp . height ;
vk - > vk_vp . minDepth = 0.0f ;
vk - > vk_vp . maxDepth = 1.0f ;
2016-02-16 20:24:00 +01:00
vk - > tracker . dirty | = VULKAN_DIRTY_DYNAMIC_BIT ;
}
2024-05-13 18:10:25 +02:00
static void vulkan_readback ( vk_t * vk , struct vk_image * readback_image )
2016-02-16 20:24:00 +01:00
{
2018-09-02 16:05:45 +02:00
VkBufferImageCopy region ;
2016-02-16 20:24:00 +01:00
struct vk_texture * staging ;
2016-02-20 14:04:33 +01:00
struct video_viewport vp ;
2020-06-28 01:09:33 +02:00
VkMemoryBarrier barrier ;
2016-02-16 20:24:00 +01:00
2021-03-22 15:46:34 +01:00
vp . x = 0 ;
vp . y = 0 ;
vp . width = 0 ;
vp . height = 0 ;
vp . full_width = 0 ;
vp . full_height = 0 ;
2016-02-20 14:04:33 +01:00
vulkan_viewport_info ( vk , & vp ) ;
2020-06-28 01:09:33 +02:00
region . bufferOffset = 0 ;
region . bufferRowLength = 0 ;
region . bufferImageHeight = 0 ;
region . imageSubresource . aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ;
region . imageSubresource . mipLevel = 0 ;
region . imageSubresource . baseArrayLayer = 0 ;
region . imageSubresource . layerCount = 1 ;
region . imageOffset . x = vp . x ;
region . imageOffset . y = vp . y ;
region . imageOffset . z = 0 ;
region . imageExtent . width = vp . width ;
region . imageExtent . height = vp . height ;
region . imageExtent . depth = 1 ;
2018-09-02 16:05:45 +02:00
2020-06-06 13:23:24 +02:00
staging = & vk - > readback . staging [ vk - > context - > current_frame_index ] ;
2016-02-16 20:24:00 +01:00
* staging = vulkan_create_texture ( vk ,
staging - > memory ! = VK_NULL_HANDLE ? staging : NULL ,
vk - > vp . width , vk - > vp . height ,
2019-02-15 20:25:31 +01:00
VK_FORMAT_B8G8R8A8_UNORM , /* Formats don't matter for readback since it's a raw copy. */
2016-02-16 20:24:00 +01:00
NULL , NULL , VULKAN_TEXTURE_READBACK ) ;
2024-05-13 18:10:25 +02:00
vkCmdCopyImageToBuffer ( vk - > cmd , readback_image - > image ,
2016-02-16 20:24:00 +01:00
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ,
2018-09-02 16:05:45 +02:00
staging - > buffer ,
2016-02-20 11:21:39 +01:00
1 , & region ) ;
2016-02-16 20:24:00 +01:00
/* Make the data visible to host. */
2020-06-28 01:09:33 +02:00
barrier . sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER ;
barrier . pNext = NULL ;
2018-09-02 16:05:45 +02:00
barrier . srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT ;
barrier . dstAccessMask = VK_ACCESS_HOST_READ_BIT ;
vkCmdPipelineBarrier ( vk - > cmd ,
2016-02-16 20:24:00 +01:00
VK_PIPELINE_STAGE_TRANSFER_BIT ,
2018-09-02 16:05:45 +02:00
VK_PIPELINE_STAGE_HOST_BIT , 0 ,
1 , & barrier , 0 , NULL , 0 , NULL ) ;
2016-02-16 20:24:00 +01:00
}
2022-05-19 15:28:26 +02:00
static void vulkan_inject_black_frame ( vk_t * vk , video_frame_info_t * video_info )
2016-07-24 00:07:32 +02:00
{
2023-07-17 17:30:17 +02:00
VkSubmitInfo submit_info ;
VkCommandBufferBeginInfo begin_info ;
const VkImageSubresourceRange range = { VK_IMAGE_ASPECT_COLOR_BIT , 0 , 1 , 0 , 1 } ;
2016-07-24 00:07:32 +02:00
const VkClearColorValue clear_color = { { 0.0f , 0.0f , 0.0f , 1.0f } } ;
2020-06-06 13:23:24 +02:00
unsigned frame_index = vk - > context - > current_frame_index ;
unsigned swapchain_index = vk - > context - > current_swapchain_index ;
2016-07-24 00:07:32 +02:00
struct vk_per_frame * chain = & vk - > swapchain [ frame_index ] ;
2020-06-06 13:23:24 +02:00
struct vk_image * backbuffer = & vk - > backbuffers [ swapchain_index ] ;
2016-07-24 00:07:32 +02:00
vk - > chain = chain ;
vk - > cmd = chain - > cmd ;
2023-07-17 17:30:17 +02:00
begin_info . sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO ;
begin_info . pNext = NULL ;
2016-07-24 00:07:32 +02:00
begin_info . flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT ;
2023-07-17 17:30:17 +02:00
begin_info . pInheritanceInfo = NULL ;
2016-07-24 00:07:32 +02:00
vkResetCommandBuffer ( vk - > cmd , 0 ) ;
vkBeginCommandBuffer ( vk - > cmd , & begin_info ) ;
2020-07-03 08:45:53 +02:00
VULKAN_IMAGE_LAYOUT_TRANSITION ( vk - > cmd , backbuffer - > image ,
2016-07-24 00:07:32 +02:00
VK_IMAGE_LAYOUT_UNDEFINED , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ,
0 , VK_ACCESS_TRANSFER_WRITE_BIT ,
2020-06-06 14:24:13 +02:00
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT ,
2016-07-24 00:07:32 +02:00
VK_PIPELINE_STAGE_TRANSFER_BIT ) ;
2020-06-06 13:23:24 +02:00
vkCmdClearColorImage ( vk - > cmd , backbuffer - > image , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ,
2016-07-24 00:07:32 +02:00
& clear_color , 1 , & range ) ;
2020-07-03 08:45:53 +02:00
VULKAN_IMAGE_LAYOUT_TRANSITION ( vk - > cmd , backbuffer - > image ,
2016-07-24 00:07:32 +02:00
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , VK_IMAGE_LAYOUT_PRESENT_SRC_KHR ,
2016-11-06 16:07:44 +01:00
VK_ACCESS_TRANSFER_WRITE_BIT , VK_ACCESS_MEMORY_READ_BIT ,
2016-07-24 00:07:32 +02:00
VK_PIPELINE_STAGE_TRANSFER_BIT ,
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT ) ;
vkEndCommandBuffer ( vk - > cmd ) ;
2023-07-17 17:30:17 +02:00
submit_info . sType = VK_STRUCTURE_TYPE_SUBMIT_INFO ;
submit_info . pNext = NULL ;
submit_info . waitSemaphoreCount = 0 ;
submit_info . pWaitSemaphores = NULL ;
submit_info . pWaitDstStageMask = NULL ;
2022-05-19 15:28:26 +02:00
submit_info . commandBufferCount = 1 ;
submit_info . pCommandBuffers = & vk - > cmd ;
2023-07-17 17:30:17 +02:00
submit_info . signalSemaphoreCount = 0 ;
submit_info . pSignalSemaphores = NULL ;
2022-05-19 15:28:26 +02:00
if (
2022-10-30 23:07:07 +01:00
( vk - > context - > flags & VK_CTX_FLAG_HAS_ACQUIRED_SWAPCHAIN )
& & ( vk - > context - > swapchain_semaphores [ swapchain_index ] ! =
VK_NULL_HANDLE ) )
2016-07-24 00:07:32 +02:00
{
submit_info . signalSemaphoreCount = 1 ;
2022-05-19 15:28:26 +02:00
submit_info . pSignalSemaphores = & vk - > context - > swapchain_semaphores [ swapchain_index ] ;
2016-07-24 00:07:32 +02:00
}
2022-10-30 23:07:07 +01:00
if ( ( vk - > context - > flags & VK_CTX_FLAG_HAS_ACQUIRED_SWAPCHAIN )
& & ( vk - > context - > swapchain_acquire_semaphore ! = VK_NULL_HANDLE ) )
2020-06-06 14:24:13 +02:00
{
2022-05-19 15:28:26 +02:00
static const VkPipelineStageFlags wait_stage =
2020-06-06 14:24:13 +02:00
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT ;
vk - > context - > swapchain_wait_semaphores [ frame_index ] =
vk - > context - > swapchain_acquire_semaphore ;
2022-05-19 15:28:26 +02:00
vk - > context - > swapchain_acquire_semaphore = VK_NULL_HANDLE ;
submit_info . waitSemaphoreCount = 1 ;
submit_info . pWaitSemaphores = & vk - > context - > swapchain_wait_semaphores [ frame_index ] ;
submit_info . pWaitDstStageMask = & wait_stage ;
2020-06-06 14:24:13 +02:00
}
2016-07-24 00:07:32 +02:00
# ifdef HAVE_THREADS
slock_lock ( vk - > context - > queue_lock ) ;
# endif
vkQueueSubmit ( vk - > context - > queue , 1 ,
& submit_info , vk - > context - > swapchain_fences [ frame_index ] ) ;
2017-12-08 14:38:57 +01:00
vk - > context - > swapchain_fences_signalled [ frame_index ] = true ;
2016-07-24 00:07:32 +02:00
# ifdef HAVE_THREADS
slock_unlock ( vk - > context - > queue_lock ) ;
# endif
}
2023-08-14 23:36:19 +02:00
# if defined(HAVE_MENU)
2023-01-24 04:40:17 +01:00
/* VBO will be written to here. */
static void vulkan_draw_quad ( vk_t * vk , const struct vk_draw_quad * quad )
{
if ( quad - > texture & & quad - > texture - > image )
vulkan_transition_texture ( vk , vk - > cmd , quad - > texture ) ;
if ( quad - > pipeline ! = vk - > tracker . pipeline )
{
VkRect2D sci ;
vkCmdBindPipeline ( vk - > cmd ,
VK_PIPELINE_BIND_POINT_GRAPHICS , quad - > pipeline ) ;
vk - > tracker . pipeline = quad - > pipeline ;
/* Changing pipeline invalidates dynamic state. */
vk - > tracker . dirty | = VULKAN_DIRTY_DYNAMIC_BIT ;
if ( vk - > flags & VK_FLAG_TRACKER_USE_SCISSOR )
sci = vk - > tracker . scissor ;
else
{
/* No scissor -> viewport */
sci . offset . x = vk - > vp . x ;
sci . offset . y = vk - > vp . y ;
sci . extent . width = vk - > vp . width ;
sci . extent . height = vk - > vp . height ;
}
vkCmdSetViewport ( vk - > cmd , 0 , 1 , & vk - > vk_vp ) ;
vkCmdSetScissor ( vk - > cmd , 0 , 1 , & sci ) ;
vk - > tracker . dirty & = ~ VULKAN_DIRTY_DYNAMIC_BIT ;
}
else if ( vk - > tracker . dirty & VULKAN_DIRTY_DYNAMIC_BIT )
{
VkRect2D sci ;
if ( vk - > flags & VK_FLAG_TRACKER_USE_SCISSOR )
sci = vk - > tracker . scissor ;
else
{
/* No scissor -> viewport */
sci . offset . x = vk - > vp . x ;
sci . offset . y = vk - > vp . y ;
sci . extent . width = vk - > vp . width ;
sci . extent . height = vk - > vp . height ;
}
vkCmdSetViewport ( vk - > cmd , 0 , 1 , & vk - > vk_vp ) ;
vkCmdSetScissor ( vk - > cmd , 0 , 1 , & sci ) ;
vk - > tracker . dirty & = ~ VULKAN_DIRTY_DYNAMIC_BIT ;
}
/* Upload descriptors */
{
VkDescriptorSet set ;
struct vk_buffer_range range ;
if ( ! vulkan_buffer_chain_alloc ( vk - > context , & vk - > chain - > ubo ,
sizeof ( * quad - > mvp ) , & range ) )
return ;
if (
string_is_equal_fast ( quad - > mvp ,
& vk - > tracker . mvp , sizeof ( * quad - > mvp ) )
| | quad - > texture - > view ! = vk - > tracker . view
| | quad - > sampler ! = vk - > tracker . sampler )
{
/* Upload UBO */
struct vk_buffer_range range ;
if ( ! vulkan_buffer_chain_alloc ( vk - > context , & vk - > chain - > ubo ,
sizeof ( * quad - > mvp ) , & range ) )
return ;
memcpy ( range . data , quad - > mvp , sizeof ( * quad - > mvp ) ) ;
set = vulkan_descriptor_manager_alloc (
vk - > context - > device ,
& vk - > chain - > descriptor_manager ) ;
vulkan_write_quad_descriptors (
vk - > context - > device ,
set ,
range . buffer ,
range . offset ,
sizeof ( * quad - > mvp ) ,
quad - > texture ,
quad - > sampler ) ;
vkCmdBindDescriptorSets ( vk - > cmd , VK_PIPELINE_BIND_POINT_GRAPHICS ,
vk - > pipelines . layout , 0 ,
1 , & set , 0 , NULL ) ;
vk - > tracker . view = quad - > texture - > view ;
vk - > tracker . sampler = quad - > sampler ;
vk - > tracker . mvp = * quad - > mvp ;
}
}
/* Upload VBO */
{
struct vk_buffer_range range ;
if ( ! vulkan_buffer_chain_alloc ( vk - > context , & vk - > chain - > vbo ,
6 * sizeof ( struct vk_vertex ) , & range ) )
return ;
{
struct vk_vertex * pv = ( struct vk_vertex * ) range . data ;
const struct vk_color * color = & quad - > color ;
VULKAN_WRITE_QUAD_VBO ( pv , 0.0f , 0.0f , 1.0f , 1.0f , 0.0f , 0.0f , 1.0f , 1.0f , color ) ;
}
vkCmdBindVertexBuffers ( vk - > cmd , 0 , 1 ,
& range . buffer , & range . offset ) ;
}
/* Draw the quad */
vkCmdDraw ( vk - > cmd , 6 , 1 , 0 , 0 ) ;
}
2023-08-14 23:36:19 +02:00
# endif
2023-01-24 04:40:17 +01:00
2024-05-13 18:10:25 +02:00
static void vulkan_init_render_target ( struct vk_image * image , uint32_t width , uint32_t height , VkFormat format , VkRenderPass render_pass , vulkan_context_t * ctx )
{
VkMemoryRequirements mem_reqs ;
VkImageCreateInfo image_info ;
VkMemoryAllocateInfo alloc ;
VkImageViewCreateInfo view ;
VkFramebufferCreateInfo info ;
memset ( image , 0 , sizeof ( struct vk_image ) ) ;
/* Create the image */
image_info . sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO ;
image_info . pNext = NULL ;
image_info . flags = 0 ;
image_info . imageType = VK_IMAGE_TYPE_2D ;
image_info . format = format ;
image_info . extent . width = width ;
image_info . extent . height = height ;
image_info . extent . depth = 1 ;
image_info . mipLevels = 1 ;
image_info . arrayLayers = 1 ;
image_info . samples = VK_SAMPLE_COUNT_1_BIT ;
image_info . tiling = VK_IMAGE_TILING_OPTIMAL ;
image_info . usage = VK_IMAGE_USAGE_SAMPLED_BIT |
VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
VK_IMAGE_USAGE_TRANSFER_DST_BIT |
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT ;
image_info . sharingMode = VK_SHARING_MODE_EXCLUSIVE ;
image_info . queueFamilyIndexCount = 0 ;
image_info . pQueueFamilyIndices = NULL ;
image_info . initialLayout = VK_IMAGE_LAYOUT_UNDEFINED ;
vkCreateImage ( ctx - > device , & image_info , NULL , & image - > image ) ;
vulkan_debug_mark_image ( ctx - > device , image - > image ) ;
vkGetImageMemoryRequirements ( ctx - > device , image - > image , & mem_reqs ) ;
alloc . sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO ;
alloc . pNext = NULL ;
alloc . allocationSize = mem_reqs . size ;
alloc . memoryTypeIndex = vulkan_find_memory_type (
& ctx - > memory_properties ,
mem_reqs . memoryTypeBits ,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT ) ;
vkAllocateMemory ( ctx - > device , & alloc , NULL , & image - > memory ) ;
vulkan_debug_mark_memory ( ctx - > device , image - > memory ) ;
vkBindImageMemory ( ctx - > device , image - > image , image - > memory , 0 ) ;
/* Create an image view which we can render into. */
view . sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO ;
view . pNext = NULL ;
view . flags = 0 ;
view . image = image - > image ;
view . viewType = VK_IMAGE_VIEW_TYPE_2D ;
view . format = format ;
view . components . r = VK_COMPONENT_SWIZZLE_R ;
view . components . g = VK_COMPONENT_SWIZZLE_G ;
view . components . b = VK_COMPONENT_SWIZZLE_B ;
view . components . a = VK_COMPONENT_SWIZZLE_A ;
view . subresourceRange . aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ;
view . subresourceRange . baseMipLevel = 0 ;
view . subresourceRange . levelCount = 1 ;
view . subresourceRange . baseArrayLayer = 0 ;
view . subresourceRange . layerCount = 1 ;
vkCreateImageView ( ctx - > device , & view , NULL , & image - > view ) ;
/* Create the framebuffer */
info . sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO ;
info . pNext = NULL ;
info . flags = 0 ;
info . renderPass = render_pass ;
info . attachmentCount = 1 ;
info . pAttachments = & image - > view ;
info . width = ctx - > swapchain_width ;
info . height = ctx - > swapchain_height ;
info . layers = 1 ;
vkCreateFramebuffer ( ctx - > device , & info , NULL , & image - > framebuffer ) ;
}
static void vulkan_run_hdr_pipeline ( VkPipeline pipeline , VkRenderPass render_pass , const struct vk_image * source_image , struct vk_image * render_target , vk_t * vk )
{
vulkan_hdr_uniform_t * mapped_ubo = ( vulkan_hdr_uniform_t * ) vk - > hdr . ubo . mapped ;
VkRenderPassBeginInfo rp_info ;
VkClearValue clear_color ;
mapped_ubo - > mvp = vk - > mvp_no_rot ;
rp_info . sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO ;
rp_info . pNext = NULL ;
rp_info . renderPass = render_pass ;
rp_info . framebuffer = render_target - > framebuffer ;
rp_info . renderArea . offset . x = 0 ;
rp_info . renderArea . offset . y = 0 ;
rp_info . renderArea . extent . width = vk - > context - > swapchain_width ;
rp_info . renderArea . extent . height = vk - > context - > swapchain_height ;
rp_info . clearValueCount = 1 ;
rp_info . pClearValues = & clear_color ;
clear_color . color . float32 [ 0 ] = 0.0f ;
clear_color . color . float32 [ 1 ] = 0.0f ;
clear_color . color . float32 [ 2 ] = 0.0f ;
clear_color . color . float32 [ 3 ] = 0.0f ;
/* Begin render pass and set up viewport */
vkCmdBeginRenderPass ( vk - > cmd , & rp_info , VK_SUBPASS_CONTENTS_INLINE ) ;
{
if ( pipeline ! = vk - > tracker . pipeline )
{
vkCmdBindPipeline ( vk - > cmd , VK_PIPELINE_BIND_POINT_GRAPHICS , pipeline ) ;
vk - > tracker . pipeline = pipeline ;
/* Changing pipeline invalidates dynamic state. */
vk - > tracker . dirty | = VULKAN_DIRTY_DYNAMIC_BIT ;
}
}
{
VkWriteDescriptorSet write ;
VkDescriptorImageInfo image_info ;
VkDescriptorSet set = vulkan_descriptor_manager_alloc (
vk - > context - > device ,
& vk - > chain - > descriptor_manager ) ;
VULKAN_SET_UNIFORM_BUFFER ( vk - > context - > device ,
set ,
0 ,
vk - > hdr . ubo . buffer ,
0 ,
vk - > hdr . ubo . size ) ;
image_info . sampler = vk - > samplers . nearest ;
image_info . imageView = source_image - > view ;
image_info . imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
write . sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET ;
write . pNext = NULL ;
write . dstSet = set ;
write . dstBinding = 2 ;
write . dstArrayElement = 0 ;
write . descriptorCount = 1 ;
write . descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ;
write . pImageInfo = & image_info ;
write . pBufferInfo = NULL ;
write . pTexelBufferView = NULL ;
vkUpdateDescriptorSets ( vk - > context - > device , 1 , & write , 0 , NULL ) ;
vkCmdBindDescriptorSets ( vk - > cmd , VK_PIPELINE_BIND_POINT_GRAPHICS ,
vk - > pipelines . layout , 0 ,
1 , & set , 0 , NULL ) ;
vk - > tracker . view = source_image - > view ;
vk - > tracker . sampler = vk - > samplers . nearest ;
}
{
VkViewport viewport ;
VkRect2D sci ;
viewport . x = 0.0f ;
viewport . y = 0.0f ;
viewport . width = vk - > context - > swapchain_width ;
viewport . height = vk - > context - > swapchain_height ;
viewport . minDepth = 0.0f ;
viewport . maxDepth = 1.0f ;
sci . offset . x = ( int32_t ) viewport . x ;
sci . offset . y = ( int32_t ) viewport . y ;
sci . extent . width = ( uint32_t ) viewport . width ;
sci . extent . height = ( uint32_t ) viewport . height ;
vkCmdSetViewport ( vk - > cmd , 0 , 1 , & viewport ) ;
vkCmdSetScissor ( vk - > cmd , 0 , 1 , & sci ) ;
}
/* Upload VBO */
{
struct vk_buffer_range range ;
vulkan_buffer_chain_alloc ( vk - > context , & vk - > chain - > vbo , 6 * sizeof ( struct vk_vertex ) , & range ) ;
{
struct vk_vertex * pv = ( struct vk_vertex * ) range . data ;
struct vk_color color ;
color . r = 1.0f ;
color . g = 1.0f ;
color . b = 1.0f ;
color . a = 1.0f ;
VULKAN_WRITE_QUAD_VBO ( pv , 0.0f , 0.0f , 1.0f , 1.0f , 0.0f , 0.0f , 1.0f , 1.0f , & color ) ;
}
vkCmdBindVertexBuffers ( vk - > cmd , 0 , 1 ,
& range . buffer , & range . offset ) ;
}
vkCmdDraw ( vk - > cmd , 6 , 1 , 0 , 0 ) ;
vkCmdEndRenderPass ( vk - > cmd ) ;
}
2016-02-16 20:24:00 +01:00
static bool vulkan_frame ( void * data , const void * frame ,
unsigned frame_width , unsigned frame_height ,
uint64_t frame_count ,
2017-01-18 17:41:27 +01:00
unsigned pitch , const char * msg , video_frame_info_t * video_info )
2016-02-16 20:24:00 +01:00
{
2024-02-09 06:12:55 -05:00
int i , j , k ;
2020-06-28 01:09:33 +02:00
VkSubmitInfo submit_info ;
2020-03-09 15:48:15 +01:00
VkClearValue clear_color ;
2020-06-28 01:09:33 +02:00
VkRenderPassBeginInfo rp_info ;
VkCommandBufferBeginInfo begin_info ;
2017-01-18 23:02:24 +01:00
VkSemaphore signal_semaphores [ 2 ] ;
2016-02-20 20:15:46 +01:00
vk_t * vk = ( vk_t * ) data ;
2016-06-25 11:39:52 +02:00
bool waits_for_semaphores = false ;
2017-01-18 23:02:24 +01:00
unsigned width = video_info - > width ;
unsigned height = video_info - > height ;
2020-03-09 15:48:15 +01:00
bool statistics_show = video_info - > statistics_show ;
const char * stat_text = video_info - > stat_text ;
2020-09-18 11:57:32 -04:00
unsigned black_frame_insertion = video_info - > black_frame_insertion ;
2024-01-20 02:11:31 -05:00
int bfi_light_frames ;
unsigned n ;
2020-03-09 15:48:15 +01:00
bool input_driver_nonblock_state = video_info - > input_driver_nonblock_state ;
bool runloop_is_slowmotion = video_info - > runloop_is_slowmotion ;
bool runloop_is_paused = video_info - > runloop_is_paused ;
unsigned video_width = video_info - > width ;
unsigned video_height = video_info - > height ;
struct font_params * osd_params = ( struct font_params * )
& video_info - > osd_stat_params ;
2020-08-03 16:33:54 +02:00
# ifdef HAVE_MENU
2023-08-16 19:17:04 +02:00
bool menu_is_alive = ( video_info - > menu_st_flags & MENU_ST_FLAG_ALIVE ) ? true : false ;
2020-08-03 16:33:54 +02:00
# endif
# ifdef HAVE_GFX_WIDGETS
bool widgets_active = video_info - > widgets_active ;
# endif
2017-12-11 23:55:31 -08:00
unsigned frame_index =
2020-06-06 13:23:24 +02:00
vk - > context - > current_frame_index ;
unsigned swapchain_index =
2016-02-20 20:29:52 +01:00
vk - > context - > current_swapchain_index ;
2021-12-26 05:56:44 +02:00
bool overlay_behind_menu = video_info - > overlay_behind_menu ;
2016-02-16 20:24:00 +01:00
2022-03-13 17:18:28 +00:00
# ifdef VULKAN_HDR_SWAPCHAIN
2024-05-13 18:10:25 +02:00
bool use_main_buffer =
( vk - > context - > flags & VK_CTX_FLAG_HDR_ENABLE )
2022-10-30 23:07:07 +01:00
& & ( ! vk - > filter_chain | | ! vulkan_filter_chain_emits_hdr10 ( vk - > filter_chain ) ) ;
2022-03-13 17:18:28 +00:00
# endif /* VULKAN_HDR_SWAPCHAIN */
2016-02-16 20:24:00 +01:00
/* Bookkeeping on start of frame. */
2020-06-28 01:09:33 +02:00
struct vk_per_frame * chain = & vk - > swapchain [ frame_index ] ;
struct vk_image * backbuffer = & vk - > backbuffers [ swapchain_index ] ;
2020-10-12 01:26:51 +02:00
struct vk_descriptor_manager * manager = & chain - > descriptor_manager ;
struct vk_buffer_chain * buff_chain_vbo = & chain - > vbo ;
struct vk_buffer_chain * buff_chain_ubo = & chain - > ubo ;
2020-06-28 01:09:33 +02:00
vk - > chain = chain ;
vk - > backbuffer = backbuffer ;
2016-02-16 20:24:00 +01:00
2020-10-12 01:26:51 +02:00
VK_DESCRIPTOR_MANAGER_RESTART ( manager ) ;
VK_BUFFER_CHAIN_DISCARD ( buff_chain_vbo ) ;
VK_BUFFER_CHAIN_DISCARD ( buff_chain_ubo ) ;
2016-02-16 20:24:00 +01:00
2016-02-21 12:33:16 +01:00
/* Start recording the command buffer. */
2020-10-12 01:36:50 +02:00
vk - > cmd = chain - > cmd ;
2023-08-14 23:36:19 +02:00
begin_info . sType =
2020-10-12 01:36:50 +02:00
VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO ;
begin_info . pNext = NULL ;
2023-08-14 23:36:19 +02:00
begin_info . flags =
2020-10-12 01:36:50 +02:00
VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT ;
begin_info . pInheritanceInfo = NULL ;
2020-06-28 01:09:33 +02:00
2016-06-26 13:10:19 +02:00
vkResetCommandBuffer ( vk - > cmd , 0 ) ;
2016-02-29 21:37:35 +01:00
2016-06-26 13:10:19 +02:00
vkBeginCommandBuffer ( vk - > cmd , & begin_info ) ;
2016-03-06 12:11:44 +01:00
2020-06-28 19:18:48 +02:00
vk - > tracker . dirty = 0 ;
vk - > tracker . scissor . offset . x = 0 ;
vk - > tracker . scissor . offset . y = 0 ;
vk - > tracker . scissor . extent . width = 0 ;
vk - > tracker . scissor . extent . height = 0 ;
2022-11-02 21:34:00 +01:00
vk - > flags & = ~ VK_FLAG_TRACKER_USE_SCISSOR ;
2020-06-28 19:18:48 +02:00
vk - > tracker . pipeline = VK_NULL_HANDLE ;
vk - > tracker . view = VK_NULL_HANDLE ;
vk - > tracker . sampler = VK_NULL_HANDLE ;
for ( i = 0 ; i < 16 ; i + + )
vk - > tracker . mvp . data [ i ] = 0.0f ;
2023-08-14 23:36:19 +02:00
waits_for_semaphores =
2023-07-17 17:30:17 +02:00
( vk - > flags & VK_FLAG_HW_ENABLE )
& & frame
2023-08-14 23:36:19 +02:00
& & ! vk - > hw . num_cmd
2023-07-17 17:30:17 +02:00
& & ( vk - > flags & VK_FLAG_HW_VALID_SEMAPHORE ) ;
if ( waits_for_semaphores
& & ( vk - > hw . src_queue_family ! = VK_QUEUE_FAMILY_IGNORED )
& & ( vk - > hw . src_queue_family ! = vk - > context - > graphics_queue_index ) )
2016-06-25 11:39:52 +02:00
{
/* Acquire ownership of image from other queue family. */
2020-07-03 17:48:46 +02:00
VULKAN_TRANSFER_IMAGE_OWNERSHIP ( vk - > cmd ,
2016-06-25 11:39:52 +02:00
vk - > hw . image - > create_info . image ,
vk - > hw . image - > image_layout ,
/* Create a dependency chain from semaphore wait. */
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ,
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT |
VK_PIPELINE_STAGE_TRANSFER_BIT ,
vk - > hw . src_queue_family , vk - > context - > graphics_queue_index ) ;
}
2016-02-16 20:24:00 +01:00
/* Upload texture */
2022-11-02 21:34:00 +01:00
if ( frame & & ( ! ( vk - > flags & VK_FLAG_HW_ENABLE ) ) )
2016-02-16 20:24:00 +01:00
{
unsigned y ;
2016-02-20 20:15:46 +01:00
uint8_t * dst = NULL ;
const uint8_t * src = ( const uint8_t * ) frame ;
unsigned bpp = vk - > video . rgb32 ? 4 : 2 ;
2016-02-16 20:24:00 +01:00
2020-06-28 01:09:33 +02:00
if ( chain - > texture . width ! = frame_width
2016-02-20 20:29:52 +01:00
| | chain - > texture . height ! = frame_height )
2016-02-16 20:24:00 +01:00
{
chain - > texture = vulkan_create_texture ( vk , & chain - > texture ,
2016-02-21 12:33:16 +01:00
frame_width , frame_height , chain - > texture . format , NULL , NULL ,
2017-12-11 23:55:31 -08:00
chain - > texture_optimal . memory
2016-02-29 19:03:18 +01:00
? VULKAN_TEXTURE_STAGING : VULKAN_TEXTURE_STREAMED ) ;
2020-01-03 20:20:59 +01:00
{
struct vk_texture * texture = & chain - > texture ;
VK_MAP_PERSISTENT_TEXTURE ( vk - > context - > device , texture ) ;
}
2016-02-21 12:33:16 +01:00
if ( chain - > texture . type = = VULKAN_TEXTURE_STAGING )
2016-02-29 19:03:18 +01:00
chain - > texture_optimal = vulkan_create_texture (
vk ,
& chain - > texture_optimal ,
2020-06-28 19:18:48 +02:00
frame_width , frame_height ,
2023-02-05 19:23:48 +01:00
chain - > texture . format , /* Ensure we use the original format and not any remapped format. */
2016-02-21 12:33:16 +01:00
NULL , NULL , VULKAN_TEXTURE_DYNAMIC ) ;
2016-02-16 20:24:00 +01:00
}
2023-02-05 19:23:48 +01:00
if ( frame ! = chain - > texture . mapped )
2016-02-16 20:24:00 +01:00
{
dst = ( uint8_t * ) chain - > texture . mapped ;
2020-06-28 19:18:48 +02:00
if ( ( chain - > texture . stride = = pitch )
& & pitch = = frame_width * bpp )
2016-02-16 20:24:00 +01:00
memcpy ( dst , src , frame_width * frame_height * bpp ) ;
else
2017-12-11 23:55:31 -08:00
for ( y = 0 ; y < frame_height ; y + + ,
2016-02-20 20:29:52 +01:00
dst + = chain - > texture . stride , src + = pitch )
2016-02-16 20:24:00 +01:00
memcpy ( dst , src , frame_width * bpp ) ;
}
2020-12-15 07:01:00 +01:00
VULKAN_SYNC_TEXTURE_TO_GPU_COND_OBJ ( vk , chain - > texture ) ;
2016-02-21 12:33:16 +01:00
/* If we have an optimal texture, copy to that now. */
2016-02-21 12:59:11 +01:00
if ( chain - > texture_optimal . memory ! = VK_NULL_HANDLE )
2016-02-21 12:33:16 +01:00
{
2020-12-15 07:20:33 +01:00
struct vk_texture * dynamic = & chain - > texture_optimal ;
struct vk_texture * staging = & chain - > texture ;
2023-02-05 19:23:48 +01:00
vulkan_copy_staging_to_dynamic ( vk , vk - > cmd , dynamic , staging ) ;
2016-02-21 12:33:16 +01:00
}
2016-02-16 20:24:00 +01:00
vk - > last_valid_index = frame_index ;
}
/* Notify filter chain about the new sync index. */
2020-06-28 19:18:48 +02:00
vulkan_filter_chain_notify_sync_index (
( vulkan_filter_chain_t * ) vk - > filter_chain , frame_index ) ;
vulkan_filter_chain_set_frame_count (
( vulkan_filter_chain_t * ) vk - > filter_chain , frame_count ) ;
2024-02-09 06:12:55 -05:00
2024-07-20 16:16:16 +02:00
/* Sub-frame info for multiframe shaders (per real content frame).
2024-02-09 06:12:55 -05:00
Should always be 1 for non - use of subframes */
if ( ! ( vk - > context - > flags & VK_CTX_FLAG_SWAP_INTERVAL_EMULATION_LOCK ) )
{
if ( black_frame_insertion
| | input_driver_nonblock_state
| | runloop_is_slowmotion
| | runloop_is_paused
| | ( vk - > context - > swap_interval > 1 )
| | ( vk - > flags & VK_FLAG_MENU_ENABLE ) )
vulkan_filter_chain_set_shader_subframes (
( vulkan_filter_chain_t * ) vk - > filter_chain , 1 ) ;
else
vulkan_filter_chain_set_shader_subframes (
( vulkan_filter_chain_t * ) vk - > filter_chain , video_info - > shader_subframes ) ;
vulkan_filter_chain_set_current_shader_subframe (
( vulkan_filter_chain_t * ) vk - > filter_chain , 1 ) ;
}
2024-03-19 15:59:36 +00:00
# ifdef VULKAN_ROLLING_SCANLINE_SIMULATION
if ( ( video_info - > shader_subframes > 1 )
& & ( video_info - > scan_subframes )
& & ( backbuffer - > image ! = VK_NULL_HANDLE )
& & ! black_frame_insertion
& & ! input_driver_nonblock_state
& & ! runloop_is_slowmotion
& & ! runloop_is_paused
& & ( ! ( vk - > flags & VK_FLAG_MENU_ENABLE ) )
& & ! ( vk - > context - > swap_interval > 1 ) )
vulkan_filter_chain_set_simulate_scanline (
( vulkan_filter_chain_t * ) vk - > filter_chain , true ) ;
else
vulkan_filter_chain_set_simulate_scanline (
( vulkan_filter_chain_t * ) vk - > filter_chain , false ) ;
2024-07-20 16:16:16 +02:00
# endif /* VULKAN_ROLLING_SCANLINE_SIMULATION */
2024-03-19 15:59:36 +00:00
2020-07-01 21:04:05 +02:00
# ifdef HAVE_REWIND
2020-06-28 19:18:48 +02:00
vulkan_filter_chain_set_frame_direction (
( vulkan_filter_chain_t * ) vk - > filter_chain ,
state_manager_frame_is_reversed ( ) ? - 1 : 1 ) ;
2020-07-01 21:04:05 +02:00
# else
vulkan_filter_chain_set_frame_direction (
( vulkan_filter_chain_t * ) vk - > filter_chain ,
1 ) ;
# endif
2023-03-26 22:20:27 +02:00
vulkan_filter_chain_set_rotation (
( vulkan_filter_chain_t * ) vk - > filter_chain , retroarch_get_rotation ( ) ) ;
2016-02-16 20:24:00 +01:00
/* Render offscreen filter chain passes. */
{
/* Set the source texture in the filter chain */
struct vulkan_filter_chain_texture input ;
2022-11-02 21:34:00 +01:00
if ( vk - > flags & VK_FLAG_HW_ENABLE )
2016-02-16 20:24:00 +01:00
{
/* Does this make that this can happen at all? */
2021-08-29 20:39:22 +02:00
if ( vk - > hw . image & & vk - > hw . image - > create_info . image )
2016-02-16 20:24:00 +01:00
{
2018-12-12 13:22:24 +01:00
if ( frame )
{
input . width = frame_width ;
input . height = frame_height ;
}
else
{
input . width = vk - > hw . last_width ;
input . height = vk - > hw . last_height ;
}
input . image = vk - > hw . image - > create_info . image ;
input . view = vk - > hw . image - > image_view ;
input . layout = vk - > hw . image - > image_layout ;
/* The format can change on a whim. */
input . format = vk - > hw . image - > create_info . format ;
2016-02-16 20:24:00 +01:00
}
else
{
2018-12-12 13:22:24 +01:00
/* Fall back to the default, black texture.
2023-08-14 23:36:19 +02:00
* This can happen if we restart the video
2020-06-28 01:09:33 +02:00
* driver while in the menu . */
2022-12-04 15:29:48 +01:00
input . width = vk - > default_texture . width ;
input . height = vk - > default_texture . height ;
2020-06-28 01:09:33 +02:00
input . image = vk - > default_texture . image ;
input . view = vk - > default_texture . view ;
input . layout = vk - > default_texture . layout ;
input . format = vk - > default_texture . format ;
2016-02-16 20:24:00 +01:00
}
2020-06-28 01:09:33 +02:00
vk - > hw . last_width = input . width ;
vk - > hw . last_height = input . height ;
2016-02-16 20:24:00 +01:00
}
else
{
2022-05-16 21:37:02 +02:00
struct vk_texture * tex = & vk - > swapchain [ vk - > last_valid_index ] . texture ;
2023-08-14 23:36:19 +02:00
if ( vk - > swapchain [ vk - > last_valid_index ] . texture_optimal . memory
2022-05-16 21:37:02 +02:00
! = VK_NULL_HANDLE )
tex = & vk - > swapchain [ vk - > last_valid_index ] . texture_optimal ;
2022-05-18 16:48:23 +02:00
else if ( tex - > image )
2022-05-16 21:37:02 +02:00
vulkan_transition_texture ( vk , vk - > cmd , tex ) ;
2016-02-16 20:24:00 +01:00
2016-03-25 14:51:37 +01:00
input . image = tex - > image ;
2016-02-20 20:15:46 +01:00
input . view = tex - > view ;
2016-02-16 20:24:00 +01:00
input . layout = tex - > layout ;
2016-02-20 20:15:46 +01:00
input . width = tex - > width ;
2016-02-16 20:24:00 +01:00
input . height = tex - > height ;
2018-12-06 11:38:30 +01:00
input . format = VK_FORMAT_UNDEFINED ; /* It's already configured. */
2016-02-16 20:24:00 +01:00
}
2020-06-28 01:09:33 +02:00
vulkan_filter_chain_set_input_texture ( ( vulkan_filter_chain_t * )
vk - > filter_chain , & input ) ;
2016-02-16 20:24:00 +01:00
}
vulkan_set_viewport ( vk , width , height , false , true ) ;
2016-02-20 20:15:46 +01:00
vulkan_filter_chain_build_offscreen_passes (
2016-09-01 18:26:01 +02:00
( vulkan_filter_chain_t * ) vk - > filter_chain ,
vk - > cmd , & vk - > vk_vp ) ;
2018-10-16 23:25:31 +02:00
# if defined(HAVE_MENU)
/* Upload menu texture. */
2022-11-02 21:34:00 +01:00
if ( vk - > flags & VK_FLAG_MENU_ENABLE )
2018-10-16 23:25:31 +02:00
{
2023-08-16 04:18:55 +02:00
if ( vk - > menu . textures [ vk - > menu . last_index ] . image ! = VK_NULL_HANDLE
| | vk - > menu . textures [ vk - > menu . last_index ] . buffer ! = VK_NULL_HANDLE )
2018-10-16 23:25:31 +02:00
{
struct vk_texture * optimal = & vk - > menu . textures_optimal [ vk - > menu . last_index ] ;
struct vk_texture * texture = & vk - > menu . textures [ vk - > menu . last_index ] ;
if ( optimal - > memory ! = VK_NULL_HANDLE )
{
if ( vk - > menu . dirty [ vk - > menu . last_index ] )
{
2020-12-15 07:20:33 +01:00
struct vk_texture * dynamic = optimal ;
struct vk_texture * staging = texture ;
2020-12-15 07:28:39 +01:00
VULKAN_SYNC_TEXTURE_TO_GPU_COND_PTR ( vk , staging ) ;
2023-02-05 19:23:48 +01:00
vulkan_copy_staging_to_dynamic ( vk , vk - > cmd ,
2020-12-15 07:20:33 +01:00
dynamic , staging ) ;
2020-12-15 07:01:00 +01:00
vk - > menu . dirty [ vk - > menu . last_index ] = false ;
2018-10-16 23:25:31 +02:00
}
}
}
}
# endif
2022-01-08 12:22:34 +00:00
# ifdef VULKAN_HDR_SWAPCHAIN
2022-08-31 21:59:25 +02:00
if ( use_main_buffer )
2022-01-08 12:22:34 +00:00
backbuffer = & vk - > main_buffer ;
2022-01-10 05:26:02 +00:00
# endif /* VULKAN_HDR_SWAPCHAIN */
2022-01-08 12:22:34 +00:00
2016-02-16 20:24:00 +01:00
/* Render to backbuffer. */
2020-06-28 01:09:33 +02:00
if ( ( backbuffer - > image ! = VK_NULL_HANDLE )
2022-10-30 23:07:07 +01:00
& & ( vk - > context - > flags & VK_CTX_FLAG_HAS_ACQUIRED_SWAPCHAIN ) )
2016-02-16 20:24:00 +01:00
{
2020-06-28 01:09:33 +02:00
rp_info . sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO ;
rp_info . pNext = NULL ;
2017-12-09 12:58:11 +01:00
rp_info . renderPass = vk - > render_pass ;
2020-06-06 13:23:24 +02:00
rp_info . framebuffer = backbuffer - > framebuffer ;
2020-06-28 01:09:33 +02:00
rp_info . renderArea . offset . x = 0 ;
rp_info . renderArea . offset . y = 0 ;
2017-12-09 12:58:11 +01:00
rp_info . renderArea . extent . width = vk - > context - > swapchain_width ;
rp_info . renderArea . extent . height = vk - > context - > swapchain_height ;
rp_info . clearValueCount = 1 ;
rp_info . pClearValues = & clear_color ;
2020-06-28 01:09:33 +02:00
clear_color . color . float32 [ 0 ] = 0.0f ;
clear_color . color . float32 [ 1 ] = 0.0f ;
clear_color . color . float32 [ 2 ] = 0.0f ;
clear_color . color . float32 [ 3 ] = 0.0f ;
2017-12-09 12:58:11 +01:00
/* Begin render pass and set up viewport */
vkCmdBeginRenderPass ( vk - > cmd , & rp_info , VK_SUBPASS_CONTENTS_INLINE ) ;
vulkan_filter_chain_build_viewport_pass (
( vulkan_filter_chain_t * ) vk - > filter_chain , vk - > cmd ,
& vk - > vk_vp , vk - > mvp . data ) ;
2016-02-16 20:24:00 +01:00
2021-12-26 05:56:44 +02:00
# ifdef HAVE_OVERLAY
2022-11-02 21:34:00 +01:00
if ( ( vk - > flags & VK_FLAG_OVERLAY_ENABLE ) & & overlay_behind_menu )
2021-12-26 05:56:44 +02:00
vulkan_render_overlay ( vk , video_width , video_height ) ;
# endif
2017-12-09 12:58:11 +01:00
# if defined(HAVE_MENU)
2022-11-02 21:34:00 +01:00
if ( vk - > flags & VK_FLAG_MENU_ENABLE )
2016-02-16 20:24:00 +01:00
{
2020-05-19 16:20:43 +02:00
menu_driver_frame ( menu_is_alive , video_info ) ;
2016-02-16 20:24:00 +01:00
2022-05-19 15:28:26 +02:00
if ( vk - > menu . textures [ vk - > menu . last_index ] . image ! = VK_NULL_HANDLE | |
2018-10-16 23:25:31 +02:00
vk - > menu . textures [ vk - > menu . last_index ] . buffer ! = VK_NULL_HANDLE )
2016-02-21 12:33:16 +01:00
{
2017-12-09 12:58:11 +01:00
struct vk_draw_quad quad ;
struct vk_texture * optimal = & vk - > menu . textures_optimal [ vk - > menu . last_index ] ;
2022-05-19 15:28:26 +02:00
settings_t * settings = config_get_ptr ( ) ;
bool menu_linear_filter = settings - > bools . menu_linear_filter ;
2022-11-02 21:34:00 +01:00
vulkan_set_viewport ( vk , width , height , ( ( vk - > flags &
VK_FLAG_MENU_FULLSCREEN ) > 0 ) , false ) ;
2017-12-09 12:58:11 +01:00
2022-05-19 15:28:26 +02:00
quad . pipeline = vk - > pipelines . alpha_blend ;
quad . texture = & vk - > menu . textures [ vk - > menu . last_index ] ;
2017-12-09 12:58:11 +01:00
if ( optimal - > memory ! = VK_NULL_HANDLE )
quad . texture = optimal ;
2016-02-21 12:33:16 +01:00
2020-02-18 14:51:40 +01:00
if ( menu_linear_filter )
2023-08-16 04:18:55 +02:00
quad . sampler = ( optimal - > flags & VK_TEX_FLAG_MIPMAP )
? vk - > samplers . mipmap_linear : vk - > samplers . linear ;
2019-01-23 11:55:17 +00:00
else
2023-08-16 04:18:55 +02:00
quad . sampler = ( optimal - > flags & VK_TEX_FLAG_MIPMAP )
? vk - > samplers . mipmap_nearest : vk - > samplers . nearest ;
2016-07-31 13:47:10 +02:00
2022-10-07 11:08:17 +02:00
quad . mvp = & vk - > mvp_no_rot ;
quad . color . r = 1.0f ;
quad . color . g = 1.0f ;
quad . color . b = 1.0f ;
quad . color . a = vk - > menu . alpha ;
2017-12-09 12:58:11 +01:00
vulkan_draw_quad ( vk , & quad ) ;
}
2016-02-16 20:24:00 +01:00
}
2020-03-09 15:48:15 +01:00
else if ( statistics_show )
2018-03-23 17:43:49 +01:00
{
if ( osd_params )
2020-03-10 03:24:59 +01:00
font_driver_render_msg ( vk ,
2020-03-09 15:48:15 +01:00
stat_text ,
2020-04-23 17:26:00 +01:00
osd_params , NULL ) ;
2018-03-23 17:43:49 +01:00
}
2019-02-07 00:15:32 +01:00
# endif
2019-05-11 06:26:40 +02:00
# ifdef HAVE_OVERLAY
2022-11-02 21:34:00 +01:00
if ( ( vk - > flags & VK_FLAG_OVERLAY_ENABLE ) & & ! overlay_behind_menu )
2020-03-09 15:48:15 +01:00
vulkan_render_overlay ( vk , video_width , video_height ) ;
2016-02-16 20:24:00 +01:00
# endif
2019-02-07 00:15:32 +01:00
if ( ! string_is_empty ( msg ) )
2020-03-10 03:24:59 +01:00
font_driver_render_msg ( vk , msg , NULL , NULL ) ;
2016-02-16 20:24:00 +01:00
2020-02-17 21:28:42 +01:00
# ifdef HAVE_GFX_WIDGETS
2020-08-03 16:33:54 +02:00
if ( widgets_active )
2020-06-08 05:45:15 +02:00
gfx_widgets_frame ( video_info ) ;
2016-02-16 20:24:00 +01:00
# endif
2017-12-09 12:58:11 +01:00
/* End the render pass. We're done rendering to backbuffer now. */
vkCmdEndRenderPass ( vk - > cmd ) ;
2022-01-08 12:22:34 +00:00
# ifdef VULKAN_HDR_SWAPCHAIN
/* Copy over back buffer to swap chain render targets */
2022-08-31 21:59:25 +02:00
if ( use_main_buffer )
2022-01-08 12:22:34 +00:00
{
backbuffer = & vk - > backbuffers [ swapchain_index ] ;
2024-05-13 18:10:25 +02:00
/* Prepare source buffer for reading */
2022-01-08 12:22:34 +00:00
VULKAN_IMAGE_LAYOUT_TRANSITION ( vk - > cmd , vk - > main_buffer . image ,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL , VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ,
2022-08-31 19:57:54 +02:00
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT , VK_ACCESS_SHADER_READ_BIT ,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT ,
2023-08-14 23:36:19 +02:00
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ) ;
2022-01-08 12:22:34 +00:00
2024-05-13 18:10:25 +02:00
vulkan_run_hdr_pipeline ( vk - > pipelines . hdr , vk - > render_pass , & vk - > main_buffer , backbuffer , vk ) ;
2022-01-08 12:22:34 +00:00
}
2022-01-10 05:26:02 +00:00
# endif /* VULKAN_HDR_SWAPCHAIN */
2017-12-09 12:58:11 +01:00
}
2016-02-16 20:24:00 +01:00
2016-08-07 01:09:15 +02:00
/* End the filter chain frame.
* This must happen outside a render pass .
*/
2016-09-01 18:26:01 +02:00
vulkan_filter_chain_end_frame ( ( vulkan_filter_chain_t * ) vk - > filter_chain , vk - > cmd ) ;
2016-08-07 01:09:15 +02:00
2023-08-14 23:36:19 +02:00
if (
2022-10-30 23:07:07 +01:00
( backbuffer - > image ! = VK_NULL_HANDLE )
& & ( vk - > context - > flags & VK_CTX_FLAG_HAS_ACQUIRED_SWAPCHAIN )
2020-12-15 06:19:48 +01:00
)
2016-02-16 20:24:00 +01:00
{
2023-08-14 23:36:19 +02:00
if ( ( vk - > flags & VK_FLAG_READBACK_PENDING )
2022-11-02 21:34:00 +01:00
| | ( vk - > flags & VK_FLAG_READBACK_STREAMED ) )
2020-12-15 06:19:48 +01:00
{
2024-05-13 18:10:25 +02:00
VkImageLayout backbuffer_layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ;
# ifdef VULKAN_HDR_SWAPCHAIN
struct vk_image * readback_source = backbuffer ;
if ( vk - > context - > flags & VK_CTX_FLAG_HDR_ENABLE )
{
if ( use_main_buffer )
{
/* Read directly from sdr main buffer instead of tonemapping */
readback_source = & vk - > main_buffer ;
/* No need to transition layout, it's already read-only optimal */
}
else
{
/* Prepare backbuffer for reading */
backbuffer_layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
VULKAN_IMAGE_LAYOUT_TRANSITION ( vk - > cmd , backbuffer - > image ,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL , backbuffer_layout ,
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT , VK_ACCESS_SHADER_READ_BIT ,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT ,
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ) ;
}
vulkan_run_hdr_pipeline ( vk - > pipelines . hdr_to_sdr , vk - > readback_render_pass , readback_source , & vk - > readback_image , vk ) ;
readback_source = & vk - > readback_image ;
}
# endif /* VULKAN_HDR_SWAPCHAIN */
2020-12-15 06:19:48 +01:00
/* We cannot safely read back from an image which
* has already been presented as we need to
* maintain the PRESENT_SRC_KHR layout .
*
2023-08-14 23:36:19 +02:00
* If we ' re reading back ,
2020-12-15 06:19:48 +01:00
* perform the readback before presenting .
*/
VULKAN_IMAGE_LAYOUT_TRANSITION (
vk - > cmd ,
backbuffer - > image ,
2024-05-13 18:10:25 +02:00
backbuffer_layout ,
2020-12-15 06:19:48 +01:00
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ,
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT ,
VK_ACCESS_TRANSFER_READ_BIT ,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT ,
VK_PIPELINE_STAGE_TRANSFER_BIT ) ;
2024-05-13 18:10:25 +02:00
vulkan_readback ( vk , readback_source ) ;
2020-12-15 06:19:48 +01:00
/* Prepare for presentation after transfers are complete. */
VULKAN_IMAGE_LAYOUT_TRANSITION (
vk - > cmd ,
backbuffer - > image ,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ,
VK_IMAGE_LAYOUT_PRESENT_SRC_KHR ,
0 ,
VK_ACCESS_MEMORY_READ_BIT ,
VK_PIPELINE_STAGE_TRANSFER_BIT ,
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT ) ;
2022-11-02 21:34:00 +01:00
vk - > flags & = ~ VK_FLAG_READBACK_PENDING ;
2020-12-15 06:19:48 +01:00
}
else
{
/* Prepare backbuffer for presentation. */
VULKAN_IMAGE_LAYOUT_TRANSITION (
vk - > cmd ,
backbuffer - > image ,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ,
VK_IMAGE_LAYOUT_PRESENT_SRC_KHR ,
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT ,
0 ,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT ,
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT ) ;
}
2016-02-16 20:24:00 +01:00
}
2023-07-17 17:30:17 +02:00
if ( waits_for_semaphores
& & ( vk - > hw . src_queue_family ! = VK_QUEUE_FAMILY_IGNORED )
& & ( vk - > hw . src_queue_family ! = vk - > context - > graphics_queue_index ) )
2016-06-25 11:39:52 +02:00
{
/* Release ownership of image back to other queue family. */
2020-07-03 17:48:46 +02:00
VULKAN_TRANSFER_IMAGE_OWNERSHIP ( vk - > cmd ,
2016-06-25 11:39:52 +02:00
vk - > hw . image - > create_info . image ,
vk - > hw . image - > image_layout ,
VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT ,
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT ,
vk - > context - > graphics_queue_index , vk - > hw . src_queue_family ) ;
}
2016-06-26 13:10:19 +02:00
vkEndCommandBuffer ( vk - > cmd ) ;
2016-02-16 20:24:00 +01:00
/* Submit command buffers to GPU. */
2020-06-28 01:09:33 +02:00
submit_info . sType = VK_STRUCTURE_TYPE_SUBMIT_INFO ;
submit_info . pNext = NULL ;
2016-02-16 20:24:00 +01:00
if ( vk - > hw . num_cmd )
{
/* vk->hw.cmd has already been allocated for this. */
2016-02-20 20:15:46 +01:00
vk - > hw . cmd [ vk - > hw . num_cmd ] = vk - > cmd ;
2016-02-16 20:24:00 +01:00
submit_info . commandBufferCount = vk - > hw . num_cmd + 1 ;
2016-02-20 20:15:46 +01:00
submit_info . pCommandBuffers = vk - > hw . cmd ;
2016-02-16 20:24:00 +01:00
2016-02-20 20:15:46 +01:00
vk - > hw . num_cmd = 0 ;
2016-02-16 20:24:00 +01:00
}
2022-05-19 15:28:26 +02:00
else
{
submit_info . commandBufferCount = 1 ;
submit_info . pCommandBuffers = & vk - > cmd ;
}
2016-02-16 20:24:00 +01:00
2016-06-25 11:39:52 +02:00
if ( waits_for_semaphores )
2016-02-16 20:24:00 +01:00
{
submit_info . waitSemaphoreCount = vk - > hw . num_semaphores ;
2016-02-20 20:15:46 +01:00
submit_info . pWaitSemaphores = vk - > hw . semaphores ;
submit_info . pWaitDstStageMask = vk - > hw . wait_dst_stages ;
2016-06-25 11:39:52 +02:00
/* Consume the semaphores. */
2022-11-02 21:34:00 +01:00
vk - > flags & = ~ VK_FLAG_HW_VALID_SEMAPHORE ;
2020-06-06 14:24:13 +02:00
/* We allocated space for this. */
2022-10-30 23:07:07 +01:00
if ( ( vk - > context - > flags & VK_CTX_FLAG_HAS_ACQUIRED_SWAPCHAIN )
& & ( vk - > context - > swapchain_acquire_semaphore ! = VK_NULL_HANDLE ) )
2020-06-06 14:24:13 +02:00
{
2023-01-09 00:50:43 +01:00
vk - > context - > swapchain_wait_semaphores [ frame_index ] =
2020-06-06 14:24:13 +02:00
vk - > context - > swapchain_acquire_semaphore ;
2023-01-09 00:50:43 +01:00
vk - > context - > swapchain_acquire_semaphore = VK_NULL_HANDLE ;
2020-06-06 14:24:13 +02:00
2023-01-09 00:50:43 +01:00
vk - > hw . semaphores [ submit_info . waitSemaphoreCount ] = vk - > context - > swapchain_wait_semaphores [ frame_index ] ;
2020-06-06 14:24:13 +02:00
vk - > hw . wait_dst_stages [ submit_info . waitSemaphoreCount ] = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT ;
submit_info . waitSemaphoreCount + + ;
}
}
2022-10-30 23:07:07 +01:00
else if ( ( vk - > context - > flags & VK_CTX_FLAG_HAS_ACQUIRED_SWAPCHAIN )
& & ( vk - > context - > swapchain_acquire_semaphore ! = VK_NULL_HANDLE ) )
2020-06-06 14:24:13 +02:00
{
2023-01-09 00:50:43 +01:00
static const VkPipelineStageFlags wait_stage =
2020-06-06 14:24:13 +02:00
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT ;
vk - > context - > swapchain_wait_semaphores [ frame_index ] =
vk - > context - > swapchain_acquire_semaphore ;
2023-01-09 00:50:43 +01:00
vk - > context - > swapchain_acquire_semaphore = VK_NULL_HANDLE ;
2020-06-06 14:24:13 +02:00
submit_info . waitSemaphoreCount = 1 ;
2022-05-19 15:28:26 +02:00
submit_info . pWaitSemaphores = & vk - > context - > swapchain_wait_semaphores [ frame_index ] ;
submit_info . pWaitDstStageMask = & wait_stage ;
}
else
{
submit_info . waitSemaphoreCount = 0 ;
submit_info . pWaitSemaphores = NULL ;
submit_info . pWaitDstStageMask = NULL ;
2016-02-16 20:24:00 +01:00
}
2022-05-19 15:28:26 +02:00
submit_info . signalSemaphoreCount = 0 ;
2023-08-14 23:36:19 +02:00
if ( ( vk - > context - > swapchain_semaphores [ swapchain_index ]
2022-10-30 23:07:07 +01:00
! = VK_NULL_HANDLE )
& & ( vk - > context - > flags & VK_CTX_FLAG_HAS_ACQUIRED_SWAPCHAIN ) )
2020-06-06 13:23:24 +02:00
signal_semaphores [ submit_info . signalSemaphoreCount + + ] = vk - > context - > swapchain_semaphores [ swapchain_index ] ;
2016-06-26 18:58:59 +02:00
if ( vk - > hw . signal_semaphore ! = VK_NULL_HANDLE )
{
signal_semaphores [ submit_info . signalSemaphoreCount + + ] = vk - > hw . signal_semaphore ;
vk - > hw . signal_semaphore = VK_NULL_HANDLE ;
}
submit_info . pSignalSemaphores = submit_info . signalSemaphoreCount ? signal_semaphores : NULL ;
2016-02-20 20:15:46 +01:00
2016-05-11 10:10:30 +02:00
# ifdef HAVE_THREADS
2016-02-16 20:24:00 +01:00
slock_lock ( vk - > context - > queue_lock ) ;
2016-05-11 10:10:30 +02:00
# endif
2016-06-26 13:10:19 +02:00
vkQueueSubmit ( vk - > context - > queue , 1 ,
2016-02-20 20:15:46 +01:00
& submit_info , vk - > context - > swapchain_fences [ frame_index ] ) ;
2017-12-08 14:38:57 +01:00
vk - > context - > swapchain_fences_signalled [ frame_index ] = true ;
2016-05-11 10:10:30 +02:00
# ifdef HAVE_THREADS
2016-02-16 20:24:00 +01:00
slock_unlock ( vk - > context - > queue_lock ) ;
2016-05-11 10:10:30 +02:00
# endif
2016-02-16 20:24:00 +01:00
2020-08-02 18:49:31 +02:00
if ( vk - > ctx_driver - > swap_buffers )
vk - > ctx_driver - > swap_buffers ( vk - > ctx_data ) ;
2017-04-23 11:31:11 +02:00
2022-10-30 23:07:07 +01:00
if ( ! ( vk - > context - > flags & VK_CTX_FLAG_SWAP_INTERVAL_EMULATION_LOCK ) )
2020-04-25 17:44:13 +02:00
{
if ( vk - > ctx_driver - > update_window_title )
2020-07-09 07:46:40 +02:00
vk - > ctx_driver - > update_window_title ( vk - > ctx_data ) ;
2020-04-25 17:44:13 +02:00
}
2019-11-19 22:48:35 +01:00
2016-02-16 20:24:00 +01:00
/* Handle spurious swapchain invalidations as soon as we can,
* i . e . right after swap buffers . */
2022-01-08 12:22:34 +00:00
# ifdef VULKAN_HDR_SWAPCHAIN
2024-05-13 18:10:25 +02:00
bool video_hdr_enable = video_driver_supports_hdr ( ) & & video_info - > hdr_enable ;
2022-11-02 21:34:00 +01:00
if ( ( vk - > flags & VK_FLAG_SHOULD_RESIZE )
2023-08-14 23:36:19 +02:00
| | ( ( ( vk - > context - > flags & VK_CTX_FLAG_HDR_ENABLE ) > 0 )
2022-10-30 23:07:07 +01:00
! = video_hdr_enable ) )
2022-01-08 12:22:34 +00:00
# else
2022-11-02 21:34:00 +01:00
if ( vk - > flags & VK_FLAG_SHOULD_RESIZE )
2022-01-10 05:26:02 +00:00
# endif /* VULKAN_HDR_SWAPCHAIN */
2016-02-16 20:24:00 +01:00
{
2022-01-08 12:22:34 +00:00
# ifdef VULKAN_HDR_SWAPCHAIN
2022-10-30 23:07:07 +01:00
if ( video_hdr_enable )
2022-01-08 12:22:34 +00:00
{
2022-10-30 23:07:07 +01:00
vk - > context - > flags | = VK_CTX_FLAG_HDR_ENABLE ;
2022-01-08 12:22:34 +00:00
# ifdef HAVE_THREADS
slock_lock ( vk - > context - > queue_lock ) ;
# endif
vkQueueWaitIdle ( vk - > context - > queue ) ;
# ifdef HAVE_THREADS
slock_unlock ( vk - > context - > queue_lock ) ;
# endif
2022-08-31 20:56:04 +02:00
vulkan_destroy_hdr_buffer ( vk - > context - > device , & vk - > main_buffer ) ;
2024-05-13 18:10:25 +02:00
vulkan_destroy_hdr_buffer ( vk - > context - > device , & vk - > readback_image ) ;
2022-01-08 12:22:34 +00:00
}
2022-10-30 23:07:07 +01:00
else
vk - > context - > flags & = ~ VK_CTX_FLAG_HDR_ENABLE ;
2022-01-10 05:26:02 +00:00
# endif /* VULKAN_HDR_SWAPCHAIN */
2022-01-08 12:22:34 +00:00
2016-02-16 20:24:00 +01:00
gfx_ctx_mode_t mode ;
mode . width = width ;
mode . height = height ;
2017-05-18 03:37:02 +02:00
2020-07-09 05:36:29 +02:00
if ( vk - > ctx_driver - > set_resize )
2020-07-09 07:46:40 +02:00
vk - > ctx_driver - > set_resize ( vk - > ctx_data , mode . width , mode . height ) ;
2016-02-16 20:24:00 +01:00
2022-01-08 12:22:34 +00:00
# ifdef VULKAN_HDR_SWAPCHAIN
2022-10-30 23:07:07 +01:00
if ( vk - > context - > flags & VK_CTX_FLAG_HDR_ENABLE )
2022-01-08 12:22:34 +00:00
{
2024-05-13 18:10:25 +02:00
/* Create intermediary buffer to render filter chain output to */
vulkan_init_render_target ( & vk - > main_buffer , video_width , video_height ,
vk - > context - > swapchain_format , vk - > render_pass , vk - > context ) ;
/* Create image for readback target in bgra8 format */
vulkan_init_render_target ( & vk - > readback_image , video_width , video_height ,
VK_FORMAT_B8G8R8A8_UNORM , vk - > readback_render_pass , vk - > context ) ;
2022-01-08 12:22:34 +00:00
}
2022-01-10 05:26:02 +00:00
# endif /* VULKAN_HDR_SWAPCHAIN */
2022-11-02 21:34:00 +01:00
vk - > flags & = ~ VK_FLAG_SHOULD_RESIZE ;
2016-02-16 20:24:00 +01:00
}
2020-06-28 19:18:48 +02:00
2022-10-30 23:07:07 +01:00
if ( vk - > context - > flags & VK_CTX_FLAG_INVALID_SWAPCHAIN )
2023-08-14 23:36:19 +02:00
vulkan_check_swapchain ( vk ) ;
2016-02-16 20:24:00 +01:00
2016-07-24 00:07:32 +02:00
/* Disable BFI during fast forward, slow-motion,
2024-01-20 02:11:31 -05:00
* pause , and menu to prevent flicker . */
2016-07-24 00:07:32 +02:00
if (
2022-10-30 23:07:07 +01:00
( backbuffer - > image ! = VK_NULL_HANDLE )
& & ( vk - > context - > flags & VK_CTX_FLAG_HAS_ACQUIRED_SWAPCHAIN )
2020-03-09 15:48:15 +01:00
& & black_frame_insertion
& & ! input_driver_nonblock_state
& & ! runloop_is_slowmotion
2020-09-18 11:57:32 -04:00
& & ! runloop_is_paused
2024-02-09 06:12:55 -05:00
& & ! ( vk - > context - > swap_interval > 1 )
& & ! ( video_info - > shader_subframes > 1 )
2022-11-02 21:34:00 +01:00
& & ( ! ( vk - > flags & VK_FLAG_MENU_ENABLE ) ) )
2023-08-14 23:36:19 +02:00
{
2024-01-20 02:11:31 -05:00
if ( video_info - > bfi_dark_frames > video_info - > black_frame_insertion )
video_info - > bfi_dark_frames = video_info - > black_frame_insertion ;
/* BFI now handles variable strobe strength, like on-off-off, vs on-on-off for 180hz.
This needs to be done with duping frames instead of increased swap intervals for
a couple reasons . Swap interval caps out at 4 in most all apis as of coding ,
and seems to be flat ignored > 1 at least in modern Windows for some older APIs . */
bfi_light_frames = video_info - > black_frame_insertion - video_info - > bfi_dark_frames ;
if ( bfi_light_frames > 0 & & ! ( vk - > context - > flags & VK_CTX_FLAG_SWAP_INTERVAL_EMULATION_LOCK ) )
2022-05-19 15:28:26 +02:00
{
2024-01-20 02:11:31 -05:00
vk - > context - > flags | = VK_CTX_FLAG_SWAP_INTERVAL_EMULATION_LOCK ;
while ( bfi_light_frames > 0 )
{
if ( ! ( vulkan_frame ( vk , NULL , 0 , 0 , frame_count , 0 , msg , video_info ) ) )
{
vk - > context - > flags & = ~ VK_CTX_FLAG_SWAP_INTERVAL_EMULATION_LOCK ;
return false ;
}
- - bfi_light_frames ;
}
vk - > context - > flags & = ~ VK_CTX_FLAG_SWAP_INTERVAL_EMULATION_LOCK ;
}
for ( n = 0 ; n < video_info - > bfi_dark_frames ; + + n )
{
if ( ! ( vk - > context - > flags & VK_CTX_FLAG_SWAP_INTERVAL_EMULATION_LOCK ) )
{
vulkan_inject_black_frame ( vk , video_info ) ;
if ( vk - > ctx_driver - > swap_buffers )
vk - > ctx_driver - > swap_buffers ( vk - > ctx_data ) ;
}
2022-05-19 15:28:26 +02:00
}
2020-09-19 13:28:21 -04:00
}
2016-07-24 00:07:32 +02:00
2024-02-09 06:12:55 -05:00
/* Frame duping for Shader Subframes, don't combine with swap_interval > 1, BFI.
Also , a major logical use of shader sub - frames will still be shader implemented BFI
or even rolling scan bfi , so we need to protect the menu / ff / etc from bad flickering
from improper settings , and unnecessary performance overhead for ff , screenshots etc . */
if ( ( video_info - > shader_subframes > 1 )
& & ( backbuffer - > image ! = VK_NULL_HANDLE )
& & ( vk - > context - > flags & VK_CTX_FLAG_HAS_ACQUIRED_SWAPCHAIN )
& & ! black_frame_insertion
& & ! input_driver_nonblock_state
& & ! runloop_is_slowmotion
& & ! runloop_is_paused
& & ( ! ( vk - > flags & VK_FLAG_MENU_ENABLE ) )
& & ! ( vk - > context - > swap_interval > 1 )
& & ( ! ( vk - > context - > flags & VK_CTX_FLAG_SWAP_INTERVAL_EMULATION_LOCK ) ) )
{
vk - > context - > flags | = VK_CTX_FLAG_SWAP_INTERVAL_EMULATION_LOCK ;
for ( j = 1 ; j < ( int ) video_info - > shader_subframes ; j + + )
{
vulkan_filter_chain_set_shader_subframes (
( vulkan_filter_chain_t * ) vk - > filter_chain , video_info - > shader_subframes ) ;
vulkan_filter_chain_set_current_shader_subframe (
( vulkan_filter_chain_t * ) vk - > filter_chain , j + 1 ) ;
if ( ! vulkan_frame ( vk , NULL , 0 , 0 , frame_count , 0 , msg ,
video_info ) )
{
vk - > context - > flags & = ~ VK_CTX_FLAG_SWAP_INTERVAL_EMULATION_LOCK ;
return false ;
}
}
vk - > context - > flags & = ~ VK_CTX_FLAG_SWAP_INTERVAL_EMULATION_LOCK ;
}
2023-08-14 23:36:19 +02:00
/* Vulkan doesn't directly support swap_interval > 1,
2024-02-09 06:12:55 -05:00
* so we fake it by duping out more frames . Shader subframes
uses same concept but split above so sub_frame logic the
same as the other apis that do support real swap_interval */
2022-10-30 23:07:07 +01:00
if ( ( vk - > context - > swap_interval > 1 )
2024-02-09 06:12:55 -05:00
& & ! ( video_info - > shader_subframes > 1 )
& & ! black_frame_insertion
& & ( ! ( vk - > context - > flags & VK_CTX_FLAG_SWAP_INTERVAL_EMULATION_LOCK ) ) )
2016-07-24 00:34:48 +02:00
{
2022-10-30 23:07:07 +01:00
vk - > context - > flags | = VK_CTX_FLAG_SWAP_INTERVAL_EMULATION_LOCK ;
2024-02-09 06:12:55 -05:00
for ( k = 1 ; k < ( int ) vk - > context - > swap_interval ; k + + )
2016-07-24 00:34:48 +02:00
{
2017-01-09 14:25:59 +01:00
if ( ! vulkan_frame ( vk , NULL , 0 , 0 , frame_count , 0 , msg ,
video_info ) )
2016-07-24 00:34:48 +02:00
{
2022-10-30 23:07:07 +01:00
vk - > context - > flags & = ~ VK_CTX_FLAG_SWAP_INTERVAL_EMULATION_LOCK ;
2016-07-24 00:34:48 +02:00
return false ;
}
}
2022-10-30 23:07:07 +01:00
vk - > context - > flags & = ~ VK_CTX_FLAG_SWAP_INTERVAL_EMULATION_LOCK ;
2016-07-24 00:34:48 +02:00
}
2016-02-16 20:24:00 +01:00
return true ;
}
static void vulkan_set_aspect_ratio ( void * data , unsigned aspect_ratio_idx )
{
vk_t * vk = ( vk_t * ) data ;
2022-11-02 21:34:00 +01:00
if ( vk )
vk - > flags | = VK_FLAG_KEEP_ASPECT | VK_FLAG_SHOULD_RESIZE ;
2016-02-16 20:24:00 +01:00
}
static void vulkan_apply_state_changes ( void * data )
{
vk_t * vk = ( vk_t * ) data ;
if ( vk )
2022-11-02 21:34:00 +01:00
vk - > flags | = VK_FLAG_SHOULD_RESIZE ;
2016-02-16 20:24:00 +01:00
}
static void vulkan_show_mouse ( void * data , bool state )
{
2019-08-28 21:36:58 +02:00
vk_t * vk = ( vk_t * ) data ;
if ( vk & & vk - > ctx_driver - > show_mouse )
vk - > ctx_driver - > show_mouse ( vk - > ctx_data , state ) ;
2016-02-16 20:24:00 +01:00
}
static struct video_shader * vulkan_get_current_shader ( void * data )
{
vk_t * vk = ( vk_t * ) data ;
2022-05-19 15:28:26 +02:00
if ( vk & & vk - > filter_chain )
return vulkan_filter_chain_get_preset ( ( vulkan_filter_chain_t * ) vk - > filter_chain ) ;
return NULL ;
2016-02-16 20:24:00 +01:00
}
2016-02-20 20:15:46 +01:00
static bool vulkan_get_current_sw_framebuffer ( void * data ,
struct retro_framebuffer * framebuffer )
2016-02-16 20:24:00 +01:00
{
2016-02-29 22:55:31 +01:00
struct vk_per_frame * chain = NULL ;
vk_t * vk = ( vk_t * ) data ;
2017-12-11 23:55:31 -08:00
vk - > chain =
2020-06-06 13:23:24 +02:00
& vk - > swapchain [ vk - > context - > current_frame_index ] ;
2016-02-29 22:55:31 +01:00
chain = vk - > chain ;
2016-02-16 20:24:00 +01:00
if ( chain - > texture . width ! = framebuffer - > width | |
chain - > texture . height ! = framebuffer - > height )
{
2016-02-20 20:15:46 +01:00
chain - > texture = vulkan_create_texture ( vk , & chain - > texture ,
2016-02-16 20:24:00 +01:00
framebuffer - > width , framebuffer - > height , chain - > texture . format ,
NULL , NULL , VULKAN_TEXTURE_STREAMED ) ;
2020-01-03 20:20:59 +01:00
{
struct vk_texture * texture = & chain - > texture ;
VK_MAP_PERSISTENT_TEXTURE ( vk - > context - > device , texture ) ;
}
2016-02-21 12:33:16 +01:00
if ( chain - > texture . type = = VULKAN_TEXTURE_STAGING )
{
2016-02-29 19:16:21 +01:00
chain - > texture_optimal = vulkan_create_texture (
vk ,
& chain - > texture_optimal ,
framebuffer - > width ,
framebuffer - > height ,
2023-02-05 19:23:48 +01:00
chain - > texture . format , /* Ensure we use the non-remapped format. */
2016-02-21 12:33:16 +01:00
NULL , NULL , VULKAN_TEXTURE_DYNAMIC ) ;
}
2016-02-16 20:24:00 +01:00
}
2016-02-29 19:16:21 +01:00
framebuffer - > data = chain - > texture . mapped ;
framebuffer - > pitch = chain - > texture . stride ;
2017-12-11 23:55:31 -08:00
framebuffer - > format = vk - > video . rgb32
2016-02-20 20:15:46 +01:00
? RETRO_PIXEL_FORMAT_XRGB8888 : RETRO_PIXEL_FORMAT_RGB565 ;
2016-02-16 20:24:00 +01:00
framebuffer - > memory_flags = 0 ;
2016-02-29 19:16:21 +01:00
2016-02-20 20:15:46 +01:00
if ( vk - > context - > memory_properties . memoryTypes [
chain - > texture . memory_type ] . propertyFlags &
2016-02-16 20:24:00 +01:00
VK_MEMORY_PROPERTY_HOST_CACHED_BIT )
framebuffer - > memory_flags | = RETRO_MEMORY_TYPE_CACHED ;
return true ;
}
2021-05-10 21:18:31 -07:00
static bool vulkan_is_mapped_swapchain_texture_ptr ( const vk_t * vk ,
const void * ptr )
{
2022-12-04 15:29:48 +01:00
int i ;
2022-12-22 21:36:32 +01:00
for ( i = 0 ; i < ( int ) vk - > num_swapchain_images ; i + + )
2021-05-10 21:18:31 -07:00
{
if ( ptr = = vk - > swapchain [ i ] . texture . mapped )
return true ;
}
return false ;
}
2016-02-20 20:15:46 +01:00
static bool vulkan_get_hw_render_interface ( void * data ,
const struct retro_hw_render_interface * * iface )
2016-02-16 20:24:00 +01:00
{
vk_t * vk = ( vk_t * ) data ;
2016-02-20 20:15:46 +01:00
* iface = ( const struct retro_hw_render_interface * ) & vk - > hw . iface ;
2022-11-02 21:34:00 +01:00
return ( ( vk - > flags & VK_FLAG_HW_ENABLE ) > 0 ) ;
2016-02-16 20:24:00 +01:00
}
static void vulkan_set_texture_frame ( void * data ,
const void * frame , bool rgb32 , unsigned width , unsigned height ,
float alpha )
{
2023-02-18 17:04:02 +01:00
size_t y ;
2023-01-28 16:51:17 +01:00
unsigned stride ;
2023-07-17 17:30:17 +02:00
uint8_t * ptr = NULL ;
uint8_t * dst = NULL ;
const uint8_t * src = NULL ;
vk_t * vk = ( vk_t * ) data ;
unsigned idx = 0 ;
struct vk_texture * texture = NULL ;
struct vk_texture * texture_optimal = NULL ;
VkFormat fmt = VK_FORMAT_B8G8R8A8_UNORM ;
bool do_memcpy = true ;
2023-01-28 11:29:53 -05:00
const VkComponentMapping * ptr_swizzle = NULL ;
2016-02-16 20:24:00 +01:00
if ( ! vk )
return ;
2023-01-28 10:47:11 -05:00
if ( ! rgb32 )
{
VkFormatProperties formatProperties ;
vkGetPhysicalDeviceFormatProperties ( vk - > context - > gpu , VK_FORMAT_B4G4R4A4_UNORM_PACK16 , & formatProperties ) ;
if ( formatProperties . optimalTilingFeatures ! = 0 )
{
2023-08-14 23:36:19 +02:00
static const VkComponentMapping br_swizzle =
2023-07-17 17:30:17 +02:00
{ VK_COMPONENT_SWIZZLE_B , VK_COMPONENT_SWIZZLE_G , VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_A } ;
/* B4G4R4A4 must be supported, but R4G4B4A4 is optional,
* just apply the swizzle in the image view instead . */
fmt = VK_FORMAT_B4G4R4A4_UNORM_PACK16 ;
ptr_swizzle = & br_swizzle ;
2023-01-28 10:47:11 -05:00
}
else
2023-01-28 16:51:17 +01:00
do_memcpy = false ;
2023-01-28 10:47:11 -05:00
}
2023-01-28 16:51:17 +01:00
idx = vk - > context - > current_frame_index ;
2023-07-17 17:30:17 +02:00
texture = & vk - > menu . textures [ idx ] ;
texture_optimal = & vk - > menu . textures_optimal [ idx ] ;
2016-11-25 23:21:25 -05:00
2023-01-28 16:51:17 +01:00
* texture = vulkan_create_texture ( vk ,
2023-08-14 23:36:19 +02:00
texture - > memory
? texture
2023-07-17 17:30:17 +02:00
: NULL ,
width ,
height ,
fmt ,
NULL ,
ptr_swizzle ,
2023-08-14 23:36:19 +02:00
texture_optimal - > memory
? VULKAN_TEXTURE_STAGING
2023-07-17 17:30:17 +02:00
: VULKAN_TEXTURE_STREAMED ) ;
2016-02-16 20:24:00 +01:00
2016-06-26 13:10:19 +02:00
vkMapMemory ( vk - > context - > device , texture - > memory ,
2016-02-16 20:24:00 +01:00
texture - > offset , texture - > size , 0 , ( void * * ) & ptr ) ;
2016-02-29 19:16:21 +01:00
dst = ptr ;
src = ( const uint8_t * ) frame ;
stride = ( rgb32 ? sizeof ( uint32_t ) : sizeof ( uint16_t ) ) * width ;
2023-01-28 16:51:17 +01:00
if ( do_memcpy )
2023-01-28 10:47:11 -05:00
{
2023-01-28 16:51:17 +01:00
for ( y = 0 ; y < height ; y + + , dst + = texture - > stride , src + = stride )
memcpy ( dst , src , stride ) ;
}
else
{
for ( y = 0 ; y < height ; y + + , dst + = texture - > stride , src + = stride )
{
2023-02-18 17:04:02 +01:00
size_t x ;
2023-01-28 16:51:17 +01:00
uint16_t * srcpix = ( uint16_t * ) src ;
uint32_t * dstpix = ( uint32_t * ) dst ;
for ( x = 0 ; x < width ; x + + , srcpix + + , dstpix + + )
{
uint32_t pix = * srcpix ;
* dstpix = (
( pix & 0xf000 ) > > 8 )
| ( ( pix & 0x0f00 ) < < 4 )
| ( ( pix & 0x00f0 ) < < 16 )
| ( ( pix & 0x000f ) < < 28 ) ;
}
}
2023-01-28 10:47:11 -05:00
}
2016-02-16 20:24:00 +01:00
2016-02-20 20:15:46 +01:00
vk - > menu . alpha = alpha ;
2022-12-04 15:29:48 +01:00
vk - > menu . last_index = idx ;
2016-02-21 12:33:16 +01:00
if ( texture - > type = = VULKAN_TEXTURE_STAGING )
* texture_optimal = vulkan_create_texture ( vk ,
2023-08-14 23:36:19 +02:00
texture_optimal - > memory
? texture_optimal
2023-07-17 17:30:17 +02:00
: NULL ,
width ,
height ,
fmt ,
NULL ,
ptr_swizzle ,
2016-02-21 12:33:16 +01:00
VULKAN_TEXTURE_DYNAMIC ) ;
2020-12-15 07:28:39 +01:00
else
{
VULKAN_SYNC_TEXTURE_TO_GPU_COND_PTR ( vk , texture ) ;
}
2016-02-21 12:33:16 +01:00
2018-10-16 23:25:31 +02:00
vkUnmapMemory ( vk - > context - > device , texture - > memory ) ;
2022-12-04 15:29:48 +01:00
vk - > menu . dirty [ idx ] = true ;
2016-02-16 20:24:00 +01:00
}
2022-11-02 21:34:00 +01:00
static void vulkan_set_texture_enable ( void * data , bool state , bool fullscreen )
2016-02-16 20:24:00 +01:00
{
vk_t * vk = ( vk_t * ) data ;
if ( ! vk )
return ;
2022-11-02 21:34:00 +01:00
if ( state )
vk - > flags | = VK_FLAG_MENU_ENABLE ;
else
vk - > flags & = ~ VK_FLAG_MENU_ENABLE ;
if ( fullscreen )
vk - > flags | = VK_FLAG_MENU_FULLSCREEN ;
else
vk - > flags & = ~ VK_FLAG_MENU_FULLSCREEN ;
2016-02-16 20:24:00 +01:00
}
2022-12-04 15:29:48 +01:00
# define VK_T0 0xff000000u
# define VK_T1 0xffffffffu
2016-02-16 20:24:00 +01:00
static uintptr_t vulkan_load_texture ( void * video_data , void * data ,
bool threaded , enum texture_filter_type filter_type )
{
2018-08-18 15:56:44 +02:00
struct vk_texture * texture = NULL ;
2016-02-20 20:15:46 +01:00
vk_t * vk = ( vk_t * ) video_data ;
2016-02-16 20:24:00 +01:00
struct texture_image * image = ( struct texture_image * ) data ;
2018-08-18 15:56:44 +02:00
if ( ! image )
return 0 ;
2022-12-04 15:29:48 +01:00
if ( ! ( texture = ( struct vk_texture * ) calloc ( 1 , sizeof ( * texture ) ) ) )
2016-02-16 20:24:00 +01:00
return 0 ;
2016-12-03 18:54:34 +01:00
if ( ! image - > pixels | | ! image - > width | | ! image - > height )
2016-12-03 17:48:48 +01:00
{
/* Create a dummy texture instead. */
static const uint32_t checkerboard [ ] = {
2022-12-04 15:29:48 +01:00
VK_T0 , VK_T1 , VK_T0 , VK_T1 , VK_T0 , VK_T1 , VK_T0 , VK_T1 ,
VK_T1 , VK_T0 , VK_T1 , VK_T0 , VK_T1 , VK_T0 , VK_T1 , VK_T0 ,
VK_T0 , VK_T1 , VK_T0 , VK_T1 , VK_T0 , VK_T1 , VK_T0 , VK_T1 ,
VK_T1 , VK_T0 , VK_T1 , VK_T0 , VK_T1 , VK_T0 , VK_T1 , VK_T0 ,
VK_T0 , VK_T1 , VK_T0 , VK_T1 , VK_T0 , VK_T1 , VK_T0 , VK_T1 ,
VK_T1 , VK_T0 , VK_T1 , VK_T0 , VK_T1 , VK_T0 , VK_T1 , VK_T0 ,
VK_T0 , VK_T1 , VK_T0 , VK_T1 , VK_T0 , VK_T1 , VK_T0 , VK_T1 ,
VK_T1 , VK_T0 , VK_T1 , VK_T0 , VK_T1 , VK_T0 , VK_T1 , VK_T0 ,
2016-12-03 17:48:48 +01:00
} ;
2022-10-07 11:08:17 +02:00
* texture = vulkan_create_texture ( vk , NULL ,
2016-12-03 17:48:48 +01:00
8 , 8 , VK_FORMAT_B8G8R8A8_UNORM ,
checkerboard , NULL , VULKAN_TEXTURE_STATIC ) ;
2022-10-07 11:08:17 +02:00
texture - > flags & = ~ ( VK_TEX_FLAG_DEFAULT_SMOOTH
| VK_TEX_FLAG_MIPMAP ) ;
2016-12-03 17:48:48 +01:00
}
else
{
* texture = vulkan_create_texture ( vk , NULL ,
image - > width , image - > height , VK_FORMAT_B8G8R8A8_UNORM ,
image - > pixels , NULL , VULKAN_TEXTURE_STATIC ) ;
2022-10-07 11:08:17 +02:00
if ( filter_type = = TEXTURE_FILTER_MIPMAP_LINEAR | | filter_type = =
TEXTURE_FILTER_LINEAR )
texture - > flags | = VK_TEX_FLAG_DEFAULT_SMOOTH ;
if ( filter_type = = TEXTURE_FILTER_MIPMAP_LINEAR )
texture - > flags | = VK_TEX_FLAG_MIPMAP ;
2016-12-03 17:48:48 +01:00
}
2016-02-16 20:24:00 +01:00
return ( uintptr_t ) texture ;
}
2023-08-14 23:36:19 +02:00
static void vulkan_unload_texture ( void * data ,
2020-07-27 10:15:28 +02:00
bool threaded , uintptr_t handle )
2016-02-16 20:24:00 +01:00
{
2016-02-29 17:06:41 +01:00
vk_t * vk = ( vk_t * ) data ;
struct vk_texture * texture = ( struct vk_texture * ) handle ;
2019-04-08 13:35:35 +01:00
if ( ! texture | | ! vk )
2016-02-16 20:24:00 +01:00
return ;
/* TODO: We really want to defer this deletion instead,
* but this will do for now . */
2018-03-15 09:21:23 +01:00
# ifdef HAVE_THREADS
slock_lock ( vk - > context - > queue_lock ) ;
# endif
2016-06-26 13:10:19 +02:00
vkQueueWaitIdle ( vk - > context - > queue ) ;
2018-03-15 09:21:23 +01:00
# ifdef HAVE_THREADS
slock_unlock ( vk - > context - > queue_lock ) ;
# endif
2016-02-29 19:03:18 +01:00
vulkan_destroy_texture (
vk - > context - > device , texture ) ;
2016-02-16 20:24:00 +01:00
free ( texture ) ;
}
2018-04-15 17:38:00 -05:00
static float vulkan_get_refresh_rate ( void * data )
{
float refresh_rate ;
if ( video_context_driver_get_refresh_rate ( & refresh_rate ) )
return refresh_rate ;
return 0.0f ;
}
2018-04-23 13:34:30 +02:00
static uint32_t vulkan_get_flags ( void * data )
{
2019-06-17 14:10:55 +02:00
uint32_t flags = 0 ;
2018-04-23 13:34:30 +02:00
BIT32_SET ( flags , GFX_CTX_FLAGS_CUSTOMIZABLE_SWAPCHAIN_IMAGES ) ;
2018-04-23 14:15:21 +02:00
BIT32_SET ( flags , GFX_CTX_FLAGS_BLACK_FRAME_INSERTION ) ;
2019-01-23 11:55:17 +00:00
BIT32_SET ( flags , GFX_CTX_FLAGS_MENU_FRAME_FILTERING ) ;
2019-05-05 13:46:26 +02:00
BIT32_SET ( flags , GFX_CTX_FLAGS_SCREENSHOTS_SUPPORTED ) ;
2021-12-26 05:56:44 +02:00
BIT32_SET ( flags , GFX_CTX_FLAGS_OVERLAY_BEHIND_MENU_SUPPORTED ) ;
2024-02-09 06:12:55 -05:00
BIT32_SET ( flags , GFX_CTX_FLAGS_SUBFRAME_SHADERS ) ;
2018-04-23 13:34:30 +02:00
return flags ;
}
2020-07-25 11:35:42 +02:00
static void vulkan_get_video_output_size ( void * data ,
2021-09-07 20:25:22 +02:00
unsigned * width , unsigned * height , char * desc , size_t desc_len )
2020-07-25 11:35:42 +02:00
{
2020-07-27 09:38:20 +02:00
vk_t * vk = ( vk_t * ) data ;
2022-12-04 15:29:48 +01:00
if ( vk & & vk - > ctx_driver & & vk - > ctx_driver - > get_video_output_size )
vk - > ctx_driver - > get_video_output_size (
vk - > ctx_data ,
width , height , desc , desc_len ) ;
2020-07-25 11:35:42 +02:00
}
static void vulkan_get_video_output_prev ( void * data )
{
2020-07-27 09:38:20 +02:00
vk_t * vk = ( vk_t * ) data ;
2022-12-04 15:29:48 +01:00
if ( vk & & vk - > ctx_driver & & vk - > ctx_driver - > get_video_output_prev )
vk - > ctx_driver - > get_video_output_prev ( vk - > ctx_data ) ;
2020-07-25 11:35:42 +02:00
}
static void vulkan_get_video_output_next ( void * data )
{
2020-07-27 09:38:20 +02:00
vk_t * vk = ( vk_t * ) data ;
2022-12-04 15:29:48 +01:00
if ( vk & & vk - > ctx_driver & & vk - > ctx_driver - > get_video_output_next )
vk - > ctx_driver - > get_video_output_next ( vk - > ctx_data ) ;
2020-07-25 11:35:42 +02:00
}
2016-02-16 20:24:00 +01:00
static const video_poke_interface_t vulkan_poke_interface = {
2018-04-23 13:34:30 +02:00
vulkan_get_flags ,
2016-02-16 20:24:00 +01:00
vulkan_load_texture ,
vulkan_unload_texture ,
vulkan_set_video_mode ,
2023-06-14 05:42:34 +03:00
vulkan_get_refresh_rate ,
NULL , /* set_filtering */
2020-07-25 11:35:42 +02:00
vulkan_get_video_output_size ,
vulkan_get_video_output_prev ,
vulkan_get_video_output_next ,
2023-06-14 05:42:34 +03:00
NULL , /* get_current_framebuffer */
NULL , /* get_proc_address */
2016-02-16 20:24:00 +01:00
vulkan_set_aspect_ratio ,
vulkan_apply_state_changes ,
vulkan_set_texture_frame ,
vulkan_set_texture_enable ,
2020-01-04 16:24:14 +01:00
font_driver_render_msg ,
2016-02-16 20:24:00 +01:00
vulkan_show_mouse ,
2023-06-14 05:42:34 +03:00
NULL , /* grab_mouse_toggle */
2016-02-16 20:24:00 +01:00
vulkan_get_current_shader ,
vulkan_get_current_sw_framebuffer ,
vulkan_get_hw_render_interface ,
2023-08-14 23:36:19 +02:00
# ifdef VULKAN_HDR_SWAPCHAIN
2022-01-08 12:22:34 +00:00
vulkan_set_hdr_max_nits ,
vulkan_set_hdr_paper_white_nits ,
vulkan_set_hdr_contrast ,
vulkan_set_hdr_expand_gamut
# else
2021-09-03 06:15:25 +02:00
NULL , /* set_hdr_max_nits */
NULL , /* set_hdr_paper_white_nits */
NULL , /* set_hdr_contrast */
NULL /* set_hdr_expand_gamut */
2022-01-10 05:26:02 +00:00
# endif /* VULKAN_HDR_SWAPCHAIN */
2016-02-16 20:24:00 +01:00
} ;
static void vulkan_get_poke_interface ( void * data ,
const video_poke_interface_t * * iface )
{
( void ) data ;
* iface = & vulkan_poke_interface ;
}
static void vulkan_viewport_info ( void * data , struct video_viewport * vp )
{
unsigned width , height ;
2016-02-20 20:15:46 +01:00
vk_t * vk = ( vk_t * ) data ;
2016-02-16 20:24:00 +01:00
2018-08-24 18:07:57 -07:00
if ( ! vk )
return ;
2019-08-13 12:28:16 +02:00
width = vk - > video_width ;
height = vk - > video_height ;
2016-02-20 14:04:33 +01:00
/* Make sure we get the correct viewport. */
vulkan_set_viewport ( vk , width , height , false , true ) ;
2016-02-20 20:15:46 +01:00
* vp = vk - > vp ;
vp - > full_width = width ;
2016-02-16 20:24:00 +01:00
vp - > full_height = height ;
}
2017-01-22 18:17:48 +01:00
static bool vulkan_read_viewport ( void * data , uint8_t * buffer , bool is_idle )
2016-02-16 20:24:00 +01:00
{
2016-02-29 17:06:41 +01:00
struct vk_texture * staging = NULL ;
vk_t * vk = ( vk_t * ) data ;
2016-02-16 20:24:00 +01:00
if ( ! vk )
return false ;
2020-06-06 13:23:24 +02:00
staging = & vk - > readback . staging [ vk - > context - > current_frame_index ] ;
2016-02-16 20:24:00 +01:00
2024-05-13 18:10:25 +02:00
VkFormat format = vk - > context - > swapchain_format ;
# ifdef VULKAN_HDR_SWAPCHAIN
if ( vk - > context - > flags & VK_CTX_FLAG_HDR_ENABLE )
{
/* Hdr readback is implemented through format conversion on the GPU */
format = VK_FORMAT_B8G8R8A8_UNORM ;
}
# endif /* VULKAN_HDR_SWAPCHAIN */
2022-11-02 21:34:00 +01:00
if ( vk - > flags & VK_FLAG_READBACK_STREAMED )
2016-02-16 20:24:00 +01:00
{
2017-04-16 11:03:29 +02:00
const uint8_t * src = NULL ;
struct scaler_ctx * ctx = NULL ;
2016-03-01 18:46:40 +01:00
2024-05-13 18:10:25 +02:00
switch ( format )
2019-02-15 20:25:31 +01:00
{
case VK_FORMAT_R8G8B8A8_UNORM :
case VK_FORMAT_A8B8G8R8_UNORM_PACK32 :
ctx = & vk - > readback . scaler_rgb ;
break ;
2016-02-16 20:24:00 +01:00
2019-02-15 20:25:31 +01:00
case VK_FORMAT_B8G8R8A8_UNORM :
ctx = & vk - > readback . scaler_bgr ;
break ;
2016-02-16 20:24:00 +01:00
2019-02-15 20:25:31 +01:00
default :
RARCH_ERR ( " [Vulkan]: Unexpected swapchain format. Cannot readback. \n " ) ;
break ;
}
2016-07-15 16:10:22 +00:00
2019-02-15 20:25:31 +01:00
if ( ctx )
{
if ( staging - > memory = = VK_NULL_HANDLE )
return false ;
buffer + = 3 * ( vk - > vp . height - 1 ) * vk - > vp . width ;
vkMapMemory ( vk - > context - > device , staging - > memory ,
staging - > offset , staging - > size , 0 , ( void * * ) & src ) ;
2017-04-16 11:03:29 +02:00
2022-10-07 11:08:17 +02:00
if ( ( staging - > flags & VK_TEX_FLAG_NEED_MANUAL_CACHE_MANAGEMENT )
& & ( staging - > memory ! = VK_NULL_HANDLE ) )
2020-07-03 17:48:46 +02:00
VULKAN_SYNC_TEXTURE_TO_CPU ( vk - > context - > device , staging - > memory ) ;
2017-04-16 11:03:29 +02:00
2023-01-07 08:05:06 +01:00
ctx - > in_stride = ( int ) staging - > stride ;
2019-02-15 20:25:31 +01:00
ctx - > out_stride = - ( int ) vk - > vp . width * 3 ;
scaler_ctx_scale_direct ( ctx , buffer , src ) ;
2016-02-16 20:24:00 +01:00
2019-02-15 20:25:31 +01:00
vkUnmapMemory ( vk - > context - > device , staging - > memory ) ;
}
2016-02-16 20:24:00 +01:00
}
else
{
/* Synchronous path only for now. */
/* TODO: How will we deal with format conversion?
* For now , take the simplest route and use image blitting
* with conversion . */
2022-11-02 21:34:00 +01:00
vk - > flags | = VK_FLAG_READBACK_PENDING ;
2017-01-22 18:17:48 +01:00
if ( ! is_idle )
2016-10-25 00:42:14 +02:00
video_driver_cached_frame ( ) ;
2018-03-15 09:21:23 +01:00
# ifdef HAVE_THREADS
slock_lock ( vk - > context - > queue_lock ) ;
# endif
2016-06-26 13:10:19 +02:00
vkQueueWaitIdle ( vk - > context - > queue ) ;
2018-03-15 09:21:23 +01:00
# ifdef HAVE_THREADS
slock_unlock ( vk - > context - > queue_lock ) ;
# endif
2016-02-16 20:24:00 +01:00
2021-12-03 16:19:49 +01:00
if ( ! staging - > memory )
{
RARCH_ERR ( " [Vulkan]: Attempted to readback synchronously, but no image is present. \n This can happen if vsync is disabled on Windows systems due to mailbox emulation. \n " ) ;
return false ;
}
2016-02-16 20:24:00 +01:00
if ( ! staging - > mapped )
2020-01-03 20:20:59 +01:00
{
VK_MAP_PERSISTENT_TEXTURE ( vk - > context - > device , staging ) ;
}
2016-02-16 20:24:00 +01:00
2022-10-07 11:08:17 +02:00
if ( ( staging - > flags & VK_TEX_FLAG_NEED_MANUAL_CACHE_MANAGEMENT )
& & ( staging - > memory ! = VK_NULL_HANDLE ) )
2020-07-03 17:48:46 +02:00
VULKAN_SYNC_TEXTURE_TO_CPU ( vk - > context - > device , staging - > memory ) ;
2016-07-15 16:10:22 +00:00
2016-02-16 20:24:00 +01:00
{
2022-12-04 15:29:48 +01:00
int y ;
2019-02-15 20:25:31 +01:00
const uint8_t * src = ( const uint8_t * ) staging - > mapped ;
2022-12-04 15:29:48 +01:00
buffer + = 3 * ( vk - > vp . height - 1 ) * vk - > vp . width ;
2016-02-16 20:24:00 +01:00
2024-05-13 18:10:25 +02:00
switch ( format )
2016-02-16 20:24:00 +01:00
{
2019-02-15 20:25:31 +01:00
case VK_FORMAT_B8G8R8A8_UNORM :
2022-12-22 21:36:32 +01:00
for ( y = 0 ; y < ( int ) vk - > vp . height ; y + + ,
2019-02-15 20:25:31 +01:00
src + = staging - > stride , buffer - = 3 * vk - > vp . width )
{
2022-12-04 15:29:48 +01:00
int x ;
2022-12-22 21:36:32 +01:00
for ( x = 0 ; x < ( int ) vk - > vp . width ; x + + )
2019-02-15 20:25:31 +01:00
{
buffer [ 3 * x + 0 ] = src [ 4 * x + 0 ] ;
buffer [ 3 * x + 1 ] = src [ 4 * x + 1 ] ;
buffer [ 3 * x + 2 ] = src [ 4 * x + 2 ] ;
}
}
break ;
case VK_FORMAT_R8G8B8A8_UNORM :
case VK_FORMAT_A8B8G8R8_UNORM_PACK32 :
2022-12-22 21:36:32 +01:00
for ( y = 0 ; y < ( int ) vk - > vp . height ; y + + ,
2019-02-15 20:25:31 +01:00
src + = staging - > stride , buffer - = 3 * vk - > vp . width )
{
2022-12-04 15:29:48 +01:00
int x ;
2022-12-22 21:36:32 +01:00
for ( x = 0 ; x < ( int ) vk - > vp . width ; x + + )
2019-02-15 20:25:31 +01:00
{
buffer [ 3 * x + 2 ] = src [ 4 * x + 0 ] ;
buffer [ 3 * x + 1 ] = src [ 4 * x + 1 ] ;
buffer [ 3 * x + 0 ] = src [ 4 * x + 2 ] ;
}
}
break ;
default :
RARCH_ERR ( " [Vulkan]: Unexpected swapchain format. \n " ) ;
break ;
2016-02-16 20:24:00 +01:00
}
}
2016-02-29 19:03:18 +01:00
vulkan_destroy_texture (
vk - > context - > device , staging ) ;
2016-02-16 20:24:00 +01:00
}
return true ;
}
# ifdef HAVE_OVERLAY
static void vulkan_overlay_enable ( void * data , bool enable )
{
vk_t * vk = ( vk_t * ) data ;
if ( ! vk )
return ;
2022-11-02 21:34:00 +01:00
if ( enable )
vk - > flags | = VK_FLAG_OVERLAY_ENABLE ;
else
vk - > flags & = ~ VK_FLAG_OVERLAY_ENABLE ;
2019-08-28 21:36:58 +02:00
if ( vk - > ctx_driver - > show_mouse )
vk - > ctx_driver - > show_mouse ( vk - > ctx_data , enable ) ;
2016-02-16 20:24:00 +01:00
}
static void vulkan_overlay_full_screen ( void * data , bool enable )
{
vk_t * vk = ( vk_t * ) data ;
if ( ! vk )
return ;
2022-11-02 21:34:00 +01:00
if ( enable )
vk - > flags | = VK_FLAG_OVERLAY_FULLSCREEN ;
else
vk - > flags & = ~ VK_FLAG_OVERLAY_FULLSCREEN ;
2016-02-16 20:24:00 +01:00
}
static void vulkan_overlay_free ( vk_t * vk )
{
2022-12-04 15:29:48 +01:00
int i ;
2016-02-16 20:24:00 +01:00
if ( ! vk )
return ;
free ( vk - > overlay . vertex ) ;
2022-12-22 21:36:32 +01:00
for ( i = 0 ; i < ( int ) vk - > overlay . count ; i + + )
2016-02-16 20:24:00 +01:00
if ( vk - > overlay . images [ i ] . memory ! = VK_NULL_HANDLE )
2016-02-29 19:03:18 +01:00
vulkan_destroy_texture (
vk - > context - > device ,
2016-02-20 20:29:52 +01:00
& vk - > overlay . images [ i ] ) ;
2016-02-16 20:24:00 +01:00
2020-09-11 16:48:18 +01:00
if ( vk - > overlay . images )
free ( vk - > overlay . images ) ;
2016-02-16 20:24:00 +01:00
memset ( & vk - > overlay , 0 , sizeof ( vk - > overlay ) ) ;
}
2016-02-20 20:29:52 +01:00
static void vulkan_overlay_set_alpha ( void * data ,
unsigned image , float mod )
2016-02-16 20:24:00 +01:00
{
2022-12-04 15:29:48 +01:00
int i ;
2016-02-20 20:29:52 +01:00
struct vk_vertex * pv ;
vk_t * vk = ( vk_t * ) data ;
2016-02-16 20:24:00 +01:00
if ( ! vk )
return ;
pv = & vk - > overlay . vertex [ image * 4 ] ;
for ( i = 0 ; i < 4 ; i + + )
{
pv [ i ] . color . r = 1.0f ;
pv [ i ] . color . g = 1.0f ;
pv [ i ] . color . b = 1.0f ;
pv [ i ] . color . a = mod ;
}
}
2020-03-09 15:48:15 +01:00
static void vulkan_render_overlay ( vk_t * vk , unsigned width ,
unsigned height )
2016-02-16 20:24:00 +01:00
{
2022-12-04 15:29:48 +01:00
int i ;
2016-02-16 20:24:00 +01:00
struct video_viewport vp ;
if ( ! vk )
return ;
2017-01-18 23:02:24 +01:00
vp = vk - > vp ;
2022-11-02 21:34:00 +01:00
vulkan_set_viewport ( vk , width , height ,
( ( vk - > flags & VK_FLAG_OVERLAY_FULLSCREEN ) > 0 ) ,
false ) ;
2016-02-16 20:24:00 +01:00
2022-12-22 21:36:32 +01:00
for ( i = 0 ; i < ( int ) vk - > overlay . count ; i + + )
2016-02-16 20:24:00 +01:00
{
struct vk_draw_triangles call ;
struct vk_buffer_range range ;
2020-06-29 00:02:45 +02:00
2016-02-16 20:24:00 +01:00
if ( ! vulkan_buffer_chain_alloc ( vk - > context , & vk - > chain - > vbo ,
4 * sizeof ( struct vk_vertex ) , & range ) )
break ;
2016-02-20 20:15:46 +01:00
memcpy ( range . data , & vk - > overlay . vertex [ i * 4 ] ,
4 * sizeof ( struct vk_vertex ) ) ;
2016-02-16 20:24:00 +01:00
2020-06-29 00:02:45 +02:00
call . vertices = 4 ;
call . uniform_size = sizeof ( vk - > mvp ) ;
call . uniform = & vk - > mvp ;
call . vbo = & range ;
2016-05-08 11:11:28 +02:00
call . texture = & vk - > overlay . images [ i ] ;
2020-06-29 00:02:45 +02:00
call . pipeline = vk - > display . pipelines [ 3 ] ; /* Strip with blend */
2022-10-07 11:08:17 +02:00
call . sampler = ( call . texture - > flags & VK_TEX_FLAG_MIPMAP )
? vk - > samplers . mipmap_linear : vk - > samplers . linear ;
2016-02-16 20:24:00 +01:00
vulkan_draw_triangles ( vk , & call ) ;
}
/* Restore the viewport so we don't mess with recording. */
vk - > vp = vp ;
}
static void vulkan_overlay_vertex_geom ( void * data , unsigned image ,
float x , float y ,
float w , float h )
{
2016-02-20 20:29:52 +01:00
struct vk_vertex * pv = NULL ;
vk_t * vk = ( vk_t * ) data ;
2016-02-16 20:24:00 +01:00
if ( ! vk )
return ;
2016-02-20 20:29:52 +01:00
pv = & vk - > overlay . vertex [ 4 * image ] ;
2016-02-16 20:24:00 +01:00
pv [ 0 ] . x = x ;
pv [ 0 ] . y = y ;
pv [ 1 ] . x = x ;
pv [ 1 ] . y = y + h ;
pv [ 2 ] . x = x + w ;
pv [ 2 ] . y = y ;
pv [ 3 ] . x = x + w ;
pv [ 3 ] . y = y + h ;
}
static void vulkan_overlay_tex_geom ( void * data , unsigned image ,
float x , float y ,
float w , float h )
{
2016-02-20 20:29:52 +01:00
struct vk_vertex * pv = NULL ;
vk_t * vk = ( vk_t * ) data ;
2016-02-16 20:24:00 +01:00
if ( ! vk )
return ;
2016-02-20 20:29:52 +01:00
pv = & vk - > overlay . vertex [ 4 * image ] ;
2016-02-16 20:24:00 +01:00
pv [ 0 ] . tex_x = x ;
pv [ 0 ] . tex_y = y ;
pv [ 1 ] . tex_x = x ;
pv [ 1 ] . tex_y = y + h ;
pv [ 2 ] . tex_x = x + w ;
pv [ 2 ] . tex_y = y ;
pv [ 3 ] . tex_x = x + w ;
pv [ 3 ] . tex_y = y + h ;
}
static bool vulkan_overlay_load ( void * data ,
const void * image_data , unsigned num_images )
{
2022-12-04 15:29:48 +01:00
int i ;
2023-08-16 04:18:55 +02:00
bool old_enabled = false ;
2017-12-11 23:55:31 -08:00
const struct texture_image * images =
2016-02-20 20:29:52 +01:00
( const struct texture_image * ) image_data ;
vk_t * vk = ( vk_t * ) data ;
2016-02-16 20:24:00 +01:00
static const struct vk_color white = {
1.0f , 1.0f , 1.0f , 1.0f ,
} ;
if ( ! vk )
return false ;
2016-05-11 10:10:30 +02:00
# ifdef HAVE_THREADS
2016-02-16 20:24:00 +01:00
slock_lock ( vk - > context - > queue_lock ) ;
2016-05-11 10:10:30 +02:00
# endif
2016-06-26 13:10:19 +02:00
vkQueueWaitIdle ( vk - > context - > queue ) ;
2016-05-11 10:10:30 +02:00
# ifdef HAVE_THREADS
2016-02-16 20:24:00 +01:00
slock_unlock ( vk - > context - > queue_lock ) ;
2016-05-11 10:10:30 +02:00
# endif
2023-08-16 04:18:55 +02:00
if ( vk - > flags & VK_FLAG_OVERLAY_ENABLE )
old_enabled = true ;
2016-02-16 20:24:00 +01:00
vulkan_overlay_free ( vk ) ;
2022-11-02 21:34:00 +01:00
if ( ! ( vk - > overlay . images = ( struct vk_texture * )
calloc ( num_images , sizeof ( * vk - > overlay . images ) ) ) )
2016-02-16 20:24:00 +01:00
goto error ;
2023-08-16 04:18:55 +02:00
vk - > overlay . count = num_images ;
2016-02-16 20:24:00 +01:00
2022-11-02 21:34:00 +01:00
if ( ! ( vk - > overlay . vertex = ( struct vk_vertex * )
calloc ( 4 * num_images , sizeof ( * vk - > overlay . vertex ) ) ) )
2016-02-16 20:24:00 +01:00
goto error ;
2022-12-22 21:36:32 +01:00
for ( i = 0 ; i < ( int ) num_images ; i + + )
2016-02-16 20:24:00 +01:00
{
2022-12-04 15:29:48 +01:00
int j ;
2016-02-16 20:24:00 +01:00
vk - > overlay . images [ i ] = vulkan_create_texture ( vk , NULL ,
images [ i ] . width , images [ i ] . height ,
VK_FORMAT_B8G8R8A8_UNORM , images [ i ] . pixels ,
NULL , VULKAN_TEXTURE_STATIC ) ;
vulkan_overlay_tex_geom ( vk , i , 0 , 0 , 1 , 1 ) ;
vulkan_overlay_vertex_geom ( vk , i , 0 , 0 , 1 , 1 ) ;
for ( j = 0 ; j < 4 ; j + + )
vk - > overlay . vertex [ 4 * i + j ] . color = white ;
}
2022-11-02 21:34:00 +01:00
if ( old_enabled )
vk - > flags | = VK_FLAG_OVERLAY_ENABLE ;
else
vk - > flags & = ~ VK_FLAG_OVERLAY_ENABLE ;
2016-02-16 20:24:00 +01:00
return true ;
error :
vulkan_overlay_free ( vk ) ;
return false ;
}
static const video_overlay_interface_t vulkan_overlay_interface = {
vulkan_overlay_enable ,
vulkan_overlay_load ,
vulkan_overlay_tex_geom ,
vulkan_overlay_vertex_geom ,
vulkan_overlay_full_screen ,
vulkan_overlay_set_alpha ,
} ;
static void vulkan_get_overlay_interface ( void * data ,
2020-07-27 13:16:14 +02:00
const video_overlay_interface_t * * iface ) { * iface = & vulkan_overlay_interface ; }
2016-02-16 20:24:00 +01:00
# endif
2020-02-17 21:28:42 +01:00
# ifdef HAVE_GFX_WIDGETS
2020-07-27 13:16:14 +02:00
static bool vulkan_gfx_widgets_enabled ( void * data ) { return true ; }
2019-02-07 00:15:32 +01:00
# endif
2020-07-27 13:39:02 +02:00
static bool vulkan_has_windowed ( void * data )
{
vk_t * vk = ( vk_t * ) data ;
if ( vk & & vk - > ctx_driver )
return vk - > ctx_driver - > has_windowed ;
return false ;
}
2020-07-27 13:46:55 +02:00
static bool vulkan_focus ( void * data )
{
vk_t * vk = ( vk_t * ) data ;
if ( vk & & vk - > ctx_driver & & vk - > ctx_driver - > has_focus )
return vk - > ctx_driver - > has_focus ( vk - > ctx_data ) ;
return true ;
}
2016-02-16 20:24:00 +01:00
video_driver_t video_vulkan = {
vulkan_init ,
vulkan_frame ,
vulkan_set_nonblock_state ,
vulkan_alive ,
2020-07-27 13:46:55 +02:00
vulkan_focus ,
2016-02-16 20:24:00 +01:00
vulkan_suppress_screensaver ,
2020-07-27 13:39:02 +02:00
vulkan_has_windowed ,
2016-02-16 20:24:00 +01:00
vulkan_set_shader ,
vulkan_free ,
" vulkan " ,
vulkan_set_viewport ,
vulkan_set_rotation ,
vulkan_viewport_info ,
vulkan_read_viewport ,
2023-06-14 05:42:34 +03:00
NULL , /* read_frame_raw */
2016-02-16 20:24:00 +01:00
# ifdef HAVE_OVERLAY
vulkan_get_overlay_interface ,
# endif
vulkan_get_poke_interface ,
2023-06-14 05:42:34 +03:00
NULL , /* wrap_type_to_enum */
2020-02-17 21:28:42 +01:00
# ifdef HAVE_GFX_WIDGETS
2020-02-17 01:43:40 +01:00
vulkan_gfx_widgets_enabled
2019-02-07 00:15:32 +01:00
# endif
2016-02-16 20:24:00 +01:00
} ;