summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFredrik Höglund <fredrik@kde.org>2017-03-29 19:19:47 +0200
committerFredrik Höglund <fredrik@kde.org>2017-03-30 00:12:55 +0200
commit134df65ccd764407f50ac2063bd2d5ca2a44711e (patch)
treeb6375ad77a71a33db02476e6d187a7cd327c5e48
parent878cd1d7df939b041671c4a16515da2f880bec81 (diff)
radv: implement VK_KHR_descriptor_update_templateradv-push-descriptor
All destination offsets and strides are precomputed by vkCreateDescriptorUpdateTemplateKHR and stored in the template, so vkUpdateDescriptorSetWithTemplateKHR doesn't need to look at the binding layout. Signed-off-by: Fredrik Höglund <fredrik@kde.org>
-rw-r--r--src/amd/vulkan/radv_cmd_buffer.c26
-rw-r--r--src/amd/vulkan/radv_descriptor_set.c133
-rw-r--r--src/amd/vulkan/radv_descriptor_set.h28
-rw-r--r--src/amd/vulkan/radv_device.c4
-rw-r--r--src/amd/vulkan/radv_entrypoints_gen.py1
-rw-r--r--src/amd/vulkan/radv_private.h1
6 files changed, 193 insertions, 0 deletions
diff --git a/src/amd/vulkan/radv_cmd_buffer.c b/src/amd/vulkan/radv_cmd_buffer.c
index 22194e2ad5..58f4ee5ccd 100644
--- a/src/amd/vulkan/radv_cmd_buffer.c
+++ b/src/amd/vulkan/radv_cmd_buffer.c
@@ -1788,6 +1788,32 @@ void radv_CmdPushDescriptorSetKHR(
assert(cmd_buffer->cs->cdw <= cdw_max);
}
+void radv_CmdPushDescriptorSetWithTemplateKHR(
+ VkCommandBuffer commandBuffer,
+ VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+ VkPipelineLayout _layout,
+ uint32_t set,
+ const void* pData)
+{
+ RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
+ RADV_FROM_HANDLE(radv_pipeline_layout, layout, _layout);
+ VkDescriptorSet desc_set;
+
+ MAYBE_UNUSED unsigned cdw_max = radeon_check_space(cmd_buffer->device->ws,
+ cmd_buffer->cs, MAX_SETS * 4 * 6);
+
+ radv_temp_descriptor_set_create(cmd_buffer->device, cmd_buffer,
+ radv_descriptor_set_layout_to_handle(layout->set[set].layout),
+ &desc_set);
+
+ radv_descriptor_set_from_handle(desc_set)->is_push_descriptor_set = true;
+
+ radv_UpdateDescriptorSetWithTemplateKHR(radv_device_to_handle(cmd_buffer->device), desc_set, descriptorUpdateTemplate, pData);
+ radv_bind_descriptor_set(cmd_buffer, radv_descriptor_set_from_handle(desc_set), set);
+
+ assert(cmd_buffer->cs->cdw <= cdw_max);
+}
+
void radv_CmdPushConstants(VkCommandBuffer commandBuffer,
VkPipelineLayout layout,
VkShaderStageFlags stageFlags,
diff --git a/src/amd/vulkan/radv_descriptor_set.c b/src/amd/vulkan/radv_descriptor_set.c
index c8fbe38849..7906002a55 100644
--- a/src/amd/vulkan/radv_descriptor_set.c
+++ b/src/amd/vulkan/radv_descriptor_set.c
@@ -711,3 +711,136 @@ void radv_UpdateDescriptorSets(
radv_update_descriptor_sets(device, VK_NULL_HANDLE, descriptorWriteCount, pDescriptorWrites,
descriptorCopyCount, pDescriptorCopies);
}
+
+VkResult radv_CreateDescriptorUpdateTemplateKHR(VkDevice _device,
+ const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate)
+{
+ RADV_FROM_HANDLE(radv_device, device, _device);
+ RADV_FROM_HANDLE(radv_descriptor_set_layout, set_layout, pCreateInfo->descriptorSetLayout);
+ const uint32_t entry_count = pCreateInfo->descriptorUpdateEntryCount;
+ const size_t size = sizeof(struct radv_descriptor_update_template) +
+ sizeof(struct radv_descriptor_update_template_entry) * entry_count;
+ struct radv_descriptor_update_template *templ;
+ uint32_t i;
+
+ templ = vk_alloc2(&device->alloc, pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+ if (!templ)
+ return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
+
+ templ->entry_count = entry_count;
+
+ for (i = 0; i < entry_count; i++) {
+ const VkDescriptorUpdateTemplateEntryKHR *entry = &pCreateInfo->pDescriptorUpdateEntries[i];
+ const struct radv_descriptor_set_binding_layout *binding_layout =
+ set_layout->binding + entry->dstBinding;
+ const uint32_t buffer_offset = binding_layout->buffer_offset +
+ binding_layout->buffer_count * entry->dstArrayElement;
+ uint16_t dst_offset;
+ uint16_t dst_stride;
+
+ /* dst_offset is an offset into dynamic_descriptors when the descriptor
+ is dynamic, and an offset into mapped_ptr otherwise */
+ switch (entry->descriptorType) {
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+ assert(pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR);
+ dst_offset = binding_layout->dynamic_offset_offset + entry->dstArrayElement;
+ dst_stride = 0; /* Not used */
+ break;
+ default:
+ dst_offset = binding_layout->offset / 4 + binding_layout->size * entry->dstArrayElement / 4;
+ dst_stride = binding_layout->size / 4;
+ break;
+ }
+
+ templ->entry[i].descriptor_type = entry->descriptorType;
+ templ->entry[i].descriptor_count = entry->descriptorCount;
+ templ->entry[i].src_offset = entry->offset;
+ templ->entry[i].src_stride = entry->stride;
+ templ->entry[i].dst_offset = dst_offset;
+ templ->entry[i].dst_stride = dst_stride;
+ templ->entry[i].buffer_offset = buffer_offset;
+ templ->entry[i].buffer_count = binding_layout->buffer_count;
+ templ->entry[i].has_sampler = !binding_layout->immutable_samplers;
+ }
+
+ *pDescriptorUpdateTemplate = radv_descriptor_update_template_to_handle(templ);
+ return VK_SUCCESS;
+}
+
+void radv_DestroyDescriptorUpdateTemplateKHR(VkDevice _device,
+ VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+ const VkAllocationCallbacks *pAllocator)
+{
+ RADV_FROM_HANDLE(radv_device, device, _device);
+ RADV_FROM_HANDLE(radv_descriptor_update_template, templ, descriptorUpdateTemplate);
+
+ if (!templ)
+ return;
+
+ vk_free2(&device->alloc, pAllocator, templ);
+}
+
+void radv_UpdateDescriptorSetWithTemplateKHR(VkDevice _device,
+ VkDescriptorSet descriptorSet,
+ VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+ const void *pData)
+{
+ RADV_FROM_HANDLE(radv_device, device, _device);
+ RADV_FROM_HANDLE(radv_descriptor_set, set, descriptorSet);
+ RADV_FROM_HANDLE(radv_descriptor_update_template, templ, descriptorUpdateTemplate);
+ uint32_t i;
+
+ for (i = 0; i < templ->entry_count; ++i) {
+ struct radeon_winsys_bo **buffer_list = set->descriptors + templ->entry[i].buffer_offset;
+ uint32_t *pDst = set->mapped_ptr + templ->entry[i].dst_offset;
+ const uint8_t *pSrc = ((const uint8_t *)pData) + templ->entry[i].src_offset;
+ uint32_t j;
+
+ for (j = 0; j < templ->entry[i].descriptor_count; ++j) {
+ switch (templ->entry[i].descriptor_type) {
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
+ const unsigned idx = templ->entry[i].dst_offset + j;
+ write_dynamic_buffer_descriptor(device, set->dynamic_descriptors + idx,
+ buffer_list, (struct VkDescriptorBufferInfo *) pSrc);
+ break;
+ }
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+ write_buffer_descriptor(device, pDst, buffer_list,
+ (struct VkDescriptorBufferInfo *) pSrc);
+ break;
+ case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+ write_texel_buffer_descriptor(device, pDst, buffer_list,
+ *(VkBufferView *) pSrc);
+ break;
+ case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+ case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+ case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+ write_image_descriptor(device, pDst, buffer_list,
+ (struct VkDescriptorImageInfo *) pSrc);
+ break;
+ case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+ write_combined_image_sampler_descriptor(device, pDst, buffer_list,
+ (struct VkDescriptorImageInfo *) pSrc,
+ templ->entry[i].has_sampler);
+ break;
+ case VK_DESCRIPTOR_TYPE_SAMPLER:
+ if (templ->entry[i].has_sampler)
+ write_sampler_descriptor(device, pDst,
+ (struct VkDescriptorImageInfo *) pSrc);
+ break;
+ default:
+ unreachable("unimplemented descriptor type");
+ break;
+ }
+ pSrc += templ->entry[i].src_stride;
+ pDst += templ->entry[i].dst_stride;
+ buffer_list += templ->entry[i].buffer_count;
+ }
+ }
+}
diff --git a/src/amd/vulkan/radv_descriptor_set.h b/src/amd/vulkan/radv_descriptor_set.h
index dae8442686..d87f2b498c 100644
--- a/src/amd/vulkan/radv_descriptor_set.h
+++ b/src/amd/vulkan/radv_descriptor_set.h
@@ -86,4 +86,32 @@ struct radv_pipeline_layout {
unsigned char sha1[20];
};
+struct radv_descriptor_update_template_entry {
+ VkDescriptorType descriptor_type;
+
+ /* The number of descriptors to update */
+ uint16_t descriptor_count;
+
+ /* From mapped_ptr or dynamic_descriptors, in units of the respective array */
+ uint16_t dst_offset;
+
+ /* In dwords. Not valid/used for dynamic descriptors */
+ uint16_t dst_stride;
+
+ uint16_t buffer_offset;
+ uint16_t buffer_count;
+
+ /* Only valid for combined image samplers and samplers */
+ uint16_t has_sampler;
+
+ /* In bytes */
+ size_t src_offset;
+ size_t src_stride;
+};
+
+struct radv_descriptor_update_template {
+ uint32_t entry_count;
+ struct radv_descriptor_update_template_entry entry[0];
+};
+
#endif /* RADV_DESCRIPTOR_SET_H */
diff --git a/src/amd/vulkan/radv_device.c b/src/amd/vulkan/radv_device.c
index 40369025be..ada4b5eff0 100644
--- a/src/amd/vulkan/radv_device.c
+++ b/src/amd/vulkan/radv_device.c
@@ -92,6 +92,10 @@ static const VkExtensionProperties instance_extensions[] = {
static const VkExtensionProperties common_device_extensions[] = {
{
+ .extensionName = VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME,
+ .specVersion = 1,
+ },
+ {
.extensionName = VK_KHR_MAINTENANCE1_EXTENSION_NAME,
.specVersion = 1,
},
diff --git a/src/amd/vulkan/radv_entrypoints_gen.py b/src/amd/vulkan/radv_entrypoints_gen.py
index 64bf42eff0..e75a9d6aa8 100644
--- a/src/amd/vulkan/radv_entrypoints_gen.py
+++ b/src/amd/vulkan/radv_entrypoints_gen.py
@@ -30,6 +30,7 @@ max_api_version = 1.0
supported_extensions = [
'VK_AMD_draw_indirect_count',
'VK_NV_dedicated_allocation',
+ 'VK_KHR_descriptor_update_template',
'VK_KHR_get_physical_device_properties2',
'VK_KHR_maintenance1',
'VK_KHR_push_descriptor',
diff --git a/src/amd/vulkan/radv_private.h b/src/amd/vulkan/radv_private.h
index f41d1e4650..cfb231cddf 100644
--- a/src/amd/vulkan/radv_private.h
+++ b/src/amd/vulkan/radv_private.h
@@ -1383,6 +1383,7 @@ RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_buffer_view, VkBufferView)
RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_descriptor_pool, VkDescriptorPool)
RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_descriptor_set, VkDescriptorSet)
RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_descriptor_set_layout, VkDescriptorSetLayout)
+RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_descriptor_update_template, VkDescriptorUpdateTemplateKHR)
RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_device_memory, VkDeviceMemory)
RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_fence, VkFence)
RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_event, VkEvent)