mirror of
https://github.com/libretro/RetroArch
synced 2025-03-20 19:21:27 +00:00
(Vulkan) Turn some of these functions into macros
This commit is contained in:
parent
25f682249f
commit
276e86a403
@ -323,34 +323,6 @@ uint32_t vulkan_find_memory_type_fallback(
|
||||
device_reqs, host_reqs_second, 0);
|
||||
}
|
||||
|
||||
void vulkan_transfer_image_ownership(VkCommandBuffer cmd,
|
||||
VkImage image, VkImageLayout layout,
|
||||
VkPipelineStageFlags src_stages,
|
||||
VkPipelineStageFlags dst_stages,
|
||||
uint32_t src_queue_family,
|
||||
uint32_t dst_queue_family)
|
||||
{
|
||||
VkImageMemoryBarrier barrier;
|
||||
|
||||
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
|
||||
barrier.pNext = NULL;
|
||||
barrier.srcAccessMask = 0;
|
||||
barrier.dstAccessMask = 0;
|
||||
barrier.oldLayout = layout;
|
||||
barrier.newLayout = layout;
|
||||
barrier.srcQueueFamilyIndex = src_queue_family;
|
||||
barrier.dstQueueFamilyIndex = dst_queue_family;
|
||||
barrier.image = image;
|
||||
barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
barrier.subresourceRange.baseMipLevel = 0;
|
||||
barrier.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS;
|
||||
barrier.subresourceRange.baseArrayLayer = 0;
|
||||
barrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS;
|
||||
|
||||
vkCmdPipelineBarrier(cmd, src_stages, dst_stages,
|
||||
false, 0, NULL, 0, NULL, 1, &barrier);
|
||||
}
|
||||
|
||||
void vulkan_copy_staging_to_dynamic(vk_t *vk, VkCommandBuffer cmd,
|
||||
struct vk_texture *dynamic,
|
||||
struct vk_texture *staging)
|
||||
@ -360,7 +332,9 @@ void vulkan_copy_staging_to_dynamic(vk_t *vk, VkCommandBuffer cmd,
|
||||
retro_assert(dynamic->type == VULKAN_TEXTURE_DYNAMIC);
|
||||
retro_assert(staging->type == VULKAN_TEXTURE_STAGING);
|
||||
|
||||
vulkan_sync_texture_to_gpu(vk, staging);
|
||||
if ( staging->need_manual_cache_management &&
|
||||
staging->memory != VK_NULL_HANDLE)
|
||||
VULKAN_SYNC_TEXTURE_TO_GPU(vk->context->device, staging->memory);
|
||||
|
||||
/* We don't have to sync against previous TRANSFER,
|
||||
* since we observed the completion by fences.
|
||||
@ -449,34 +423,6 @@ static void vulkan_track_dealloc(VkImage image)
|
||||
}
|
||||
#endif
|
||||
|
||||
void vulkan_sync_texture_to_gpu(vk_t *vk, const struct vk_texture *tex)
|
||||
{
|
||||
VkMappedMemoryRange range;
|
||||
if (!tex || !tex->need_manual_cache_management || tex->memory == VK_NULL_HANDLE)
|
||||
return;
|
||||
|
||||
range.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
|
||||
range.pNext = NULL;
|
||||
range.memory = tex->memory;
|
||||
range.offset = 0;
|
||||
range.size = VK_WHOLE_SIZE;
|
||||
vkFlushMappedMemoryRanges(vk->context->device, 1, &range);
|
||||
}
|
||||
|
||||
void vulkan_sync_texture_to_cpu(vk_t *vk, const struct vk_texture *tex)
|
||||
{
|
||||
VkMappedMemoryRange range;
|
||||
if (!tex || !tex->need_manual_cache_management || tex->memory == VK_NULL_HANDLE)
|
||||
return;
|
||||
|
||||
range.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
|
||||
range.pNext = NULL;
|
||||
range.memory = tex->memory;
|
||||
range.offset = 0;
|
||||
range.size = VK_WHOLE_SIZE;
|
||||
vkInvalidateMappedMemoryRanges(vk->context->device, 1, &range);
|
||||
}
|
||||
|
||||
static unsigned vulkan_num_miplevels(unsigned width, unsigned height)
|
||||
{
|
||||
unsigned size = MAX(width, height);
|
||||
@ -828,10 +774,10 @@ struct vk_texture vulkan_create_texture(vk_t *vk,
|
||||
tex.size = layout.size;
|
||||
tex.layout = info.initialLayout;
|
||||
|
||||
tex.width = width;
|
||||
tex.height = height;
|
||||
tex.format = format;
|
||||
tex.type = type;
|
||||
tex.width = width;
|
||||
tex.height = height;
|
||||
tex.format = format;
|
||||
tex.type = type;
|
||||
|
||||
if (initial)
|
||||
{
|
||||
@ -854,7 +800,9 @@ struct vk_texture vulkan_create_texture(vk_t *vk,
|
||||
for (y = 0; y < tex.height; y++, dst += tex.stride, src += stride)
|
||||
memcpy(dst, src, width * bpp);
|
||||
|
||||
vulkan_sync_texture_to_gpu(vk, &tex);
|
||||
if ( tex.need_manual_cache_management &&
|
||||
tex.memory != VK_NULL_HANDLE)
|
||||
VULKAN_SYNC_TEXTURE_TO_GPU(vk->context->device, tex.memory);
|
||||
vkUnmapMemory(device, tex.memory);
|
||||
}
|
||||
break;
|
||||
|
@ -261,12 +261,6 @@ struct vk_buffer_chain vulkan_buffer_chain_init(
|
||||
VkDeviceSize alignment,
|
||||
VkBufferUsageFlags usage);
|
||||
|
||||
#define VK_BUFFER_CHAIN_DISCARD(chain) \
|
||||
{ \
|
||||
chain->current = chain->head; \
|
||||
chain->offset = 0; \
|
||||
}
|
||||
|
||||
bool vulkan_buffer_chain_alloc(const struct vulkan_context *context,
|
||||
struct vk_buffer_chain *chain, size_t size,
|
||||
struct vk_buffer_range *range);
|
||||
@ -450,6 +444,87 @@ typedef struct vk
|
||||
void *filter_chain;
|
||||
} vk_t;
|
||||
|
||||
#define VK_BUFFER_CHAIN_DISCARD(chain) \
|
||||
{ \
|
||||
chain->current = chain->head; \
|
||||
chain->offset = 0; \
|
||||
}
|
||||
|
||||
#define VULKAN_SYNC_TEXTURE_TO_GPU(device, tex_memory) \
|
||||
{ \
|
||||
VkMappedMemoryRange range; \
|
||||
range.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; \
|
||||
range.pNext = NULL; \
|
||||
range.memory = tex_memory; \
|
||||
range.offset = 0; \
|
||||
range.size = VK_WHOLE_SIZE; \
|
||||
vkFlushMappedMemoryRanges(device, 1, &range); \
|
||||
}
|
||||
|
||||
#define VULKAN_SYNC_TEXTURE_TO_CPU(device, tex_memory) \
|
||||
{ \
|
||||
VkMappedMemoryRange range; \
|
||||
range.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; \
|
||||
range.pNext = NULL; \
|
||||
range.memory = tex_memory; \
|
||||
range.offset = 0; \
|
||||
range.size = VK_WHOLE_SIZE; \
|
||||
vkInvalidateMappedMemoryRanges(device, 1, &range); \
|
||||
}
|
||||
|
||||
#define VULKAN_TRANSFER_IMAGE_OWNERSHIP(cmd, img, layout, src_stages, dst_stages, src_queue_family, dst_queue_family) \
|
||||
{ \
|
||||
VkImageMemoryBarrier barrier; \
|
||||
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; \
|
||||
barrier.pNext = NULL; \
|
||||
barrier.srcAccessMask = 0; \
|
||||
barrier.dstAccessMask = 0; \
|
||||
barrier.oldLayout = layout; \
|
||||
barrier.newLayout = layout; \
|
||||
barrier.srcQueueFamilyIndex = src_queue_family; \
|
||||
barrier.dstQueueFamilyIndex = dst_queue_family; \
|
||||
barrier.image = img; \
|
||||
barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; \
|
||||
barrier.subresourceRange.baseMipLevel = 0; \
|
||||
barrier.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; \
|
||||
barrier.subresourceRange.baseArrayLayer = 0; \
|
||||
barrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; \
|
||||
vkCmdPipelineBarrier(cmd, src_stages, dst_stages, false, 0, NULL, 0, NULL, 1, &barrier); \
|
||||
}
|
||||
|
||||
#define VULKAN_IMAGE_LAYOUT_TRANSITION_LEVELS(cmd, img, levels, old_layout, new_layout, src_access, dst_access, src_stages, dst_stages) \
|
||||
{ \
|
||||
VkImageMemoryBarrier barrier; \
|
||||
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; \
|
||||
barrier.pNext = NULL; \
|
||||
barrier.srcAccessMask = src_access; \
|
||||
barrier.dstAccessMask = dst_access; \
|
||||
barrier.oldLayout = old_layout; \
|
||||
barrier.newLayout = new_layout; \
|
||||
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; \
|
||||
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; \
|
||||
barrier.image = img; \
|
||||
barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; \
|
||||
barrier.subresourceRange.baseMipLevel = 0; \
|
||||
barrier.subresourceRange.levelCount = levels; \
|
||||
barrier.subresourceRange.baseArrayLayer = 0; \
|
||||
barrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; \
|
||||
vkCmdPipelineBarrier(cmd, src_stages, dst_stages, 0, 0, NULL, 0, NULL, 1, &barrier); \
|
||||
}
|
||||
|
||||
#define VULKAN_IMAGE_LAYOUT_TRANSITION(cmd, img, old_layout, new_layout, src_access, dst_access, src_stages, dst_stages) VULKAN_IMAGE_LAYOUT_TRANSITION_LEVELS(cmd, img, VK_REMAINING_MIP_LEVELS, old_layout, new_layout, src_access, dst_access, src_stages, dst_stages)
|
||||
|
||||
#define VK_DESCRIPTOR_MANAGER_RESTART(manager) \
|
||||
{ \
|
||||
manager->current = manager->head; \
|
||||
manager->count = 0; \
|
||||
}
|
||||
|
||||
#define VK_MAP_PERSISTENT_TEXTURE(device, texture) \
|
||||
{ \
|
||||
vkMapMemory(device, texture->memory, texture->offset, texture->size, 0, &texture->mapped); \
|
||||
}
|
||||
|
||||
uint32_t vulkan_find_memory_type(
|
||||
const VkPhysicalDeviceMemoryProperties *mem_props,
|
||||
uint32_t device_reqs, uint32_t host_reqs);
|
||||
@ -466,18 +541,8 @@ struct vk_texture vulkan_create_texture(vk_t *vk,
|
||||
const void *initial, const VkComponentMapping *swizzle,
|
||||
enum vk_texture_type type);
|
||||
|
||||
void vulkan_sync_texture_to_gpu(vk_t *vk, const struct vk_texture *tex);
|
||||
void vulkan_sync_texture_to_cpu(vk_t *vk, const struct vk_texture *tex);
|
||||
|
||||
void vulkan_transition_texture(vk_t *vk, VkCommandBuffer cmd, struct vk_texture *texture);
|
||||
|
||||
void vulkan_transfer_image_ownership(VkCommandBuffer cmd,
|
||||
VkImage image, VkImageLayout layout,
|
||||
VkPipelineStageFlags src_stages,
|
||||
VkPipelineStageFlags dst_stages,
|
||||
uint32_t src_queue_family,
|
||||
uint32_t dst_queue_family);
|
||||
|
||||
void vulkan_destroy_texture(
|
||||
VkDevice device,
|
||||
struct vk_texture *tex);
|
||||
@ -494,44 +559,6 @@ void vulkan_draw_quad(vk_t *vk, const struct vk_draw_quad *quad);
|
||||
*/
|
||||
void vulkan_draw_triangles(vk_t *vk, const struct vk_draw_triangles *call);
|
||||
|
||||
#define VULKAN_IMAGE_LAYOUT_TRANSITION(cmd, img, old_layout, new_layout, srcAccess, dstAccess, srcStages, dstStages) \
|
||||
{ \
|
||||
VkImageMemoryBarrier barrier; \
|
||||
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; \
|
||||
barrier.pNext = NULL; \
|
||||
barrier.srcAccessMask = srcAccess; \
|
||||
barrier.dstAccessMask = dstAccess; \
|
||||
barrier.oldLayout = old_layout; \
|
||||
barrier.newLayout = new_layout; \
|
||||
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; \
|
||||
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; \
|
||||
barrier.image = img; \
|
||||
barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; \
|
||||
barrier.subresourceRange.baseMipLevel = 0; \
|
||||
barrier.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; \
|
||||
barrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; \
|
||||
vkCmdPipelineBarrier(cmd, srcStages, dstStages, 0, 0, NULL, 0, NULL, 1, &barrier); \
|
||||
}
|
||||
|
||||
#define VULKAN_IMAGE_LAYOUT_TRANSITION_LEVELS(cmd, img, levels, old_layout, new_layout, src_access, dst_access, src_stages, dst_stages) \
|
||||
{ \
|
||||
VkImageMemoryBarrier barrier; \
|
||||
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; \
|
||||
barrier.pNext = NULL; \
|
||||
barrier.srcAccessMask = src_access; \
|
||||
barrier.dstAccessMask = dst_access; \
|
||||
barrier.oldLayout = old_layout; \
|
||||
barrier.newLayout = new_layout; \
|
||||
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; \
|
||||
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; \
|
||||
barrier.image = img; \
|
||||
barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; \
|
||||
barrier.subresourceRange.baseMipLevel = 0; \
|
||||
barrier.subresourceRange.levelCount = levels; \
|
||||
barrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; \
|
||||
vkCmdPipelineBarrier(cmd, src_stages, dst_stages, 0, 0, NULL, 0, NULL, 1, &barrier); \
|
||||
}
|
||||
|
||||
static INLINE unsigned vulkan_format_to_bpp(VkFormat format)
|
||||
{
|
||||
switch (format)
|
||||
@ -590,17 +617,6 @@ VkDescriptorSet vulkan_descriptor_manager_alloc(
|
||||
VkDevice device,
|
||||
struct vk_descriptor_manager *manager);
|
||||
|
||||
#define VK_DESCRIPTOR_MANAGER_RESTART(manager) \
|
||||
{ \
|
||||
manager->current = manager->head; \
|
||||
manager->count = 0; \
|
||||
}
|
||||
|
||||
#define VK_MAP_PERSISTENT_TEXTURE(device, texture) \
|
||||
{ \
|
||||
vkMapMemory(device, texture->memory, texture->offset, texture->size, 0, &texture->mapped); \
|
||||
}
|
||||
|
||||
struct vk_descriptor_manager vulkan_create_descriptor_manager(
|
||||
VkDevice device,
|
||||
const VkDescriptorPoolSize *sizes, unsigned num_sizes,
|
||||
|
@ -1789,7 +1789,7 @@ static bool vulkan_frame(void *data, const void *frame,
|
||||
retro_assert(vk->hw.image);
|
||||
|
||||
/* Acquire ownership of image from other queue family. */
|
||||
vulkan_transfer_image_ownership(vk->cmd,
|
||||
VULKAN_TRANSFER_IMAGE_OWNERSHIP(vk->cmd,
|
||||
vk->hw.image->create_info.image,
|
||||
vk->hw.image->image_layout,
|
||||
/* Create a dependency chain from semaphore wait. */
|
||||
@ -1847,8 +1847,10 @@ static bool vulkan_frame(void *data, const void *frame,
|
||||
vulkan_copy_staging_to_dynamic(vk, vk->cmd,
|
||||
&chain->texture_optimal, &chain->texture);
|
||||
}
|
||||
else
|
||||
vulkan_sync_texture_to_gpu(vk, &chain->texture);
|
||||
else if (chain->texture.need_manual_cache_management
|
||||
&& chain->texture.memory != VK_NULL_HANDLE)
|
||||
VULKAN_SYNC_TEXTURE_TO_GPU(vk->context->device,
|
||||
chain->texture.memory);
|
||||
|
||||
vk->last_valid_index = frame_index;
|
||||
}
|
||||
@ -2128,7 +2130,7 @@ static bool vulkan_frame(void *data, const void *frame,
|
||||
retro_assert(vk->hw.image);
|
||||
|
||||
/* Release ownership of image back to other queue family. */
|
||||
vulkan_transfer_image_ownership(vk->cmd,
|
||||
VULKAN_TRANSFER_IMAGE_OWNERSHIP(vk->cmd,
|
||||
vk->hw.image->create_info.image,
|
||||
vk->hw.image->image_layout,
|
||||
VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
|
||||
@ -2425,8 +2427,9 @@ static void vulkan_set_texture_frame(void *data,
|
||||
NULL, rgb32 ? NULL : &br_swizzle,
|
||||
VULKAN_TEXTURE_DYNAMIC);
|
||||
}
|
||||
else
|
||||
vulkan_sync_texture_to_gpu(vk, texture);
|
||||
else if (texture->need_manual_cache_management
|
||||
&& texture->memory != VK_NULL_HANDLE)
|
||||
VULKAN_SYNC_TEXTURE_TO_GPU(vk->context->device, texture->memory);
|
||||
|
||||
vkUnmapMemory(vk->context->device, texture->memory);
|
||||
vk->menu.dirty[index] = true;
|
||||
@ -2626,7 +2629,9 @@ static bool vulkan_read_viewport(void *data, uint8_t *buffer, bool is_idle)
|
||||
vkMapMemory(vk->context->device, staging->memory,
|
||||
staging->offset, staging->size, 0, (void**)&src);
|
||||
|
||||
vulkan_sync_texture_to_cpu(vk, staging);
|
||||
if (staging->need_manual_cache_management
|
||||
&& staging->memory != VK_NULL_HANDLE)
|
||||
VULKAN_SYNC_TEXTURE_TO_CPU(vk->context->device, staging->memory);
|
||||
|
||||
ctx->in_stride = staging->stride;
|
||||
ctx->out_stride = -(int)vk->vp.width * 3;
|
||||
@ -2660,7 +2665,9 @@ static bool vulkan_read_viewport(void *data, uint8_t *buffer, bool is_idle)
|
||||
VK_MAP_PERSISTENT_TEXTURE(vk->context->device, staging);
|
||||
}
|
||||
|
||||
vulkan_sync_texture_to_cpu(vk, staging);
|
||||
if (staging->need_manual_cache_management
|
||||
&& staging->memory != VK_NULL_HANDLE)
|
||||
VULKAN_SYNC_TEXTURE_TO_CPU(vk->context->device, staging->memory);
|
||||
|
||||
{
|
||||
unsigned x, y;
|
||||
|
Loading…
x
Reference in New Issue
Block a user