Add: VK_KHR_map_memory2
This commit is contained in:
parent
db8512a64f
commit
20e255b4e6
@ -330,6 +330,7 @@ In addition to core *Vulkan* functionality, **MoltenVK** also supports the foll
|
|||||||
- `VK_KHR_maintenance1`
|
- `VK_KHR_maintenance1`
|
||||||
- `VK_KHR_maintenance2`
|
- `VK_KHR_maintenance2`
|
||||||
- `VK_KHR_maintenance3`
|
- `VK_KHR_maintenance3`
|
||||||
|
- `VK_KHR_map_memory2`
|
||||||
- `VK_KHR_multiview`
|
- `VK_KHR_multiview`
|
||||||
- `VK_KHR_portability_subset`
|
- `VK_KHR_portability_subset`
|
||||||
- `VK_KHR_push_descriptor`
|
- `VK_KHR_push_descriptor`
|
||||||
|
@ -1 +1 @@
|
|||||||
65ad768d8603671fc1085fe115019e72a595ced8
|
fa204df59c6caea6b9be3cf0754a88cd89056a87
|
||||||
|
@ -78,10 +78,10 @@ public:
|
|||||||
* Maps the memory address at the specified offset from the start of this memory allocation,
|
* Maps the memory address at the specified offset from the start of this memory allocation,
|
||||||
* and returns the address in the specified data reference.
|
* and returns the address in the specified data reference.
|
||||||
*/
|
*/
|
||||||
VkResult map(VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData);
|
VkResult map(const VkMemoryMapInfoKHR* mapInfo, void** ppData);
|
||||||
|
|
||||||
/** Unmaps a previously mapped memory range. */
|
/** Unmaps a previously mapped memory range. */
|
||||||
void unmap();
|
VkResult unmap(const VkMemoryUnmapInfoKHR* unmapInfo);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If this device memory is currently mapped to host memory, returns the range within
|
* If this device memory is currently mapped to host memory, returns the range within
|
||||||
|
@ -36,8 +36,7 @@ void MVKDeviceMemory::propagateDebugName() {
|
|||||||
setLabelIfNotNil(_mtlBuffer, _debugName);
|
setLabelIfNotNil(_mtlBuffer, _debugName);
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult MVKDeviceMemory::map(VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) {
|
VkResult MVKDeviceMemory::map(const VkMemoryMapInfoKHR* pMemoryMapInfo, void** ppData) {
|
||||||
|
|
||||||
if ( !isMemoryHostAccessible() ) {
|
if ( !isMemoryHostAccessible() ) {
|
||||||
return reportError(VK_ERROR_MEMORY_MAP_FAILED, "Private GPU-only memory cannot be mapped to host memory.");
|
return reportError(VK_ERROR_MEMORY_MAP_FAILED, "Private GPU-only memory cannot be mapped to host memory.");
|
||||||
}
|
}
|
||||||
@ -50,25 +49,23 @@ VkResult MVKDeviceMemory::map(VkDeviceSize offset, VkDeviceSize size, VkMemoryMa
|
|||||||
return reportError(VK_ERROR_OUT_OF_HOST_MEMORY, "Could not allocate %llu bytes of host-accessible device memory.", _allocationSize);
|
return reportError(VK_ERROR_OUT_OF_HOST_MEMORY, "Could not allocate %llu bytes of host-accessible device memory.", _allocationSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
_mappedRange.offset = offset;
|
_mappedRange.offset = pMemoryMapInfo->offset;
|
||||||
_mappedRange.size = adjustMemorySize(size, offset);
|
_mappedRange.size = adjustMemorySize(pMemoryMapInfo->size, pMemoryMapInfo->offset);
|
||||||
|
|
||||||
*ppData = (void*)((uintptr_t)_pMemory + offset);
|
*ppData = (void*)((uintptr_t)_pMemory + pMemoryMapInfo->offset);
|
||||||
|
|
||||||
// Coherent memory does not require flushing by app, so we must flush now
|
// Coherent memory does not require flushing by app, so we must flush now
|
||||||
// to support Metal textures that actually reside in non-coherent memory.
|
// to support Metal textures that actually reside in non-coherent memory.
|
||||||
if (mvkIsAnyFlagEnabled(_vkMemPropFlags, VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) {
|
if (mvkIsAnyFlagEnabled(_vkMemPropFlags, VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) {
|
||||||
pullFromDevice(offset, size);
|
pullFromDevice(pMemoryMapInfo->offset, pMemoryMapInfo->size);
|
||||||
}
|
}
|
||||||
|
|
||||||
return VK_SUCCESS;
|
return VK_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
void MVKDeviceMemory::unmap() {
|
VkResult MVKDeviceMemory::unmap(const VkMemoryUnmapInfoKHR* pUnmapMemoryInfo) {
|
||||||
|
|
||||||
if ( !isMapped() ) {
|
if ( !isMapped() ) {
|
||||||
reportError(VK_ERROR_MEMORY_MAP_FAILED, "Memory is not mapped. Call vkMapMemory() first.");
|
return reportError(VK_ERROR_MEMORY_MAP_FAILED, "Memory is not mapped. Call vkMapMemory() first.");
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Coherent memory does not require flushing by app, so we must flush now
|
// Coherent memory does not require flushing by app, so we must flush now
|
||||||
@ -79,6 +76,8 @@ void MVKDeviceMemory::unmap() {
|
|||||||
|
|
||||||
_mappedRange.offset = 0;
|
_mappedRange.offset = 0;
|
||||||
_mappedRange.size = 0;
|
_mappedRange.size = 0;
|
||||||
|
|
||||||
|
return VK_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult MVKDeviceMemory::flushToDevice(VkDeviceSize offset, VkDeviceSize size) {
|
VkResult MVKDeviceMemory::flushToDevice(VkDeviceSize offset, VkDeviceSize size) {
|
||||||
|
@ -707,6 +707,8 @@ void MVKInstance::initProcAddrs() {
|
|||||||
// Device extension functions:
|
// Device extension functions:
|
||||||
ADD_DVC_EXT_ENTRY_POINT(vkCmdPushDescriptorSetKHR, KHR_PUSH_DESCRIPTOR);
|
ADD_DVC_EXT_ENTRY_POINT(vkCmdPushDescriptorSetKHR, KHR_PUSH_DESCRIPTOR);
|
||||||
ADD_DVC_EXT2_ENTRY_POINT(vkCmdPushDescriptorSetWithTemplateKHR, KHR_PUSH_DESCRIPTOR, KHR_DESCRIPTOR_UPDATE_TEMPLATE);
|
ADD_DVC_EXT2_ENTRY_POINT(vkCmdPushDescriptorSetWithTemplateKHR, KHR_PUSH_DESCRIPTOR, KHR_DESCRIPTOR_UPDATE_TEMPLATE);
|
||||||
|
ADD_DVC_EXT_ENTRY_POINT(vkMapMemory2KHR, KHR_MAP_MEMORY_2);
|
||||||
|
ADD_DVC_EXT_ENTRY_POINT(vkUnmapMemory2KHR, KHR_MAP_MEMORY_2);
|
||||||
ADD_DVC_EXT_ENTRY_POINT(vkCreateSwapchainKHR, KHR_SWAPCHAIN);
|
ADD_DVC_EXT_ENTRY_POINT(vkCreateSwapchainKHR, KHR_SWAPCHAIN);
|
||||||
ADD_DVC_EXT_ENTRY_POINT(vkDestroySwapchainKHR, KHR_SWAPCHAIN);
|
ADD_DVC_EXT_ENTRY_POINT(vkDestroySwapchainKHR, KHR_SWAPCHAIN);
|
||||||
ADD_DVC_EXT_ENTRY_POINT(vkGetSwapchainImagesKHR, KHR_SWAPCHAIN);
|
ADD_DVC_EXT_ENTRY_POINT(vkGetSwapchainImagesKHR, KHR_SWAPCHAIN);
|
||||||
|
@ -70,6 +70,7 @@ MVK_EXTENSION(KHR_image_format_list, KHR_IMAGE_FORMAT_LIST,
|
|||||||
MVK_EXTENSION(KHR_maintenance1, KHR_MAINTENANCE1, DEVICE, 10.11, 8.0)
|
MVK_EXTENSION(KHR_maintenance1, KHR_MAINTENANCE1, DEVICE, 10.11, 8.0)
|
||||||
MVK_EXTENSION(KHR_maintenance2, KHR_MAINTENANCE2, DEVICE, 10.11, 8.0)
|
MVK_EXTENSION(KHR_maintenance2, KHR_MAINTENANCE2, DEVICE, 10.11, 8.0)
|
||||||
MVK_EXTENSION(KHR_maintenance3, KHR_MAINTENANCE3, DEVICE, 10.11, 8.0)
|
MVK_EXTENSION(KHR_maintenance3, KHR_MAINTENANCE3, DEVICE, 10.11, 8.0)
|
||||||
|
MVK_EXTENSION(KHR_map_memory2, KHR_MAP_MEMORY_2, DEVICE, 10.11, 8.0)
|
||||||
MVK_EXTENSION(KHR_multiview, KHR_MULTIVIEW, DEVICE, 10.11, 8.0)
|
MVK_EXTENSION(KHR_multiview, KHR_MULTIVIEW, DEVICE, 10.11, 8.0)
|
||||||
MVK_EXTENSION(KHR_portability_subset, KHR_PORTABILITY_SUBSET, DEVICE, 10.11, 8.0)
|
MVK_EXTENSION(KHR_portability_subset, KHR_PORTABILITY_SUBSET, DEVICE, 10.11, 8.0)
|
||||||
MVK_EXTENSION(KHR_push_descriptor, KHR_PUSH_DESCRIPTOR, DEVICE, 10.11, 8.0)
|
MVK_EXTENSION(KHR_push_descriptor, KHR_PUSH_DESCRIPTOR, DEVICE, 10.11, 8.0)
|
||||||
|
@ -492,8 +492,16 @@ MVK_PUBLIC_VULKAN_SYMBOL VkResult vkMapMemory(
|
|||||||
void** ppData) {
|
void** ppData) {
|
||||||
|
|
||||||
MVKTraceVulkanCallStart();
|
MVKTraceVulkanCallStart();
|
||||||
|
VkMemoryMapInfoKHR mapInfo = {};
|
||||||
|
mapInfo.sType = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR;
|
||||||
|
mapInfo.pNext = nullptr;
|
||||||
|
mapInfo.flags = flags;
|
||||||
|
mapInfo.memory = mem;
|
||||||
|
mapInfo.offset = offset;
|
||||||
|
mapInfo.size = size;
|
||||||
|
|
||||||
MVKDeviceMemory* mvkMem = (MVKDeviceMemory*)mem;
|
MVKDeviceMemory* mvkMem = (MVKDeviceMemory*)mem;
|
||||||
VkResult rslt = mvkMem->map(offset, size, flags, ppData);
|
VkResult rslt = mvkMem->map(&mapInfo, ppData);
|
||||||
MVKTraceVulkanCallEnd();
|
MVKTraceVulkanCallEnd();
|
||||||
return rslt;
|
return rslt;
|
||||||
}
|
}
|
||||||
@ -503,8 +511,13 @@ MVK_PUBLIC_VULKAN_SYMBOL void vkUnmapMemory(
|
|||||||
VkDeviceMemory mem) {
|
VkDeviceMemory mem) {
|
||||||
|
|
||||||
MVKTraceVulkanCallStart();
|
MVKTraceVulkanCallStart();
|
||||||
|
VkMemoryUnmapInfoKHR unmapInfo = {};
|
||||||
|
unmapInfo.sType = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR;
|
||||||
|
unmapInfo.pNext = nullptr;
|
||||||
|
unmapInfo.flags = 0;
|
||||||
|
unmapInfo.memory = mem;
|
||||||
MVKDeviceMemory* mvkMem = (MVKDeviceMemory*)mem;
|
MVKDeviceMemory* mvkMem = (MVKDeviceMemory*)mem;
|
||||||
mvkMem->unmap();
|
mvkMem->unmap(&unmapInfo);
|
||||||
MVKTraceVulkanCallEnd();
|
MVKTraceVulkanCallEnd();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2727,6 +2740,33 @@ MVK_PUBLIC_VULKAN_CORE_ALIAS(vkTrimCommandPool, KHR);
|
|||||||
MVK_PUBLIC_VULKAN_CORE_ALIAS(vkGetDescriptorSetLayoutSupport, KHR);
|
MVK_PUBLIC_VULKAN_CORE_ALIAS(vkGetDescriptorSetLayoutSupport, KHR);
|
||||||
|
|
||||||
|
|
||||||
|
#pragma mark -
|
||||||
|
#pragma mark VK_KHR_map_memory2 extension
|
||||||
|
|
||||||
|
MVK_PUBLIC_VULKAN_SYMBOL VkResult vkMapMemory2KHR(
|
||||||
|
VkDevice device,
|
||||||
|
const VkMemoryMapInfoKHR* pMemoryMapInfo,
|
||||||
|
void** ppData) {
|
||||||
|
|
||||||
|
MVKTraceVulkanCallStart();
|
||||||
|
MVKDeviceMemory* mvkMem = (MVKDeviceMemory*)pMemoryMapInfo->memory;
|
||||||
|
VkResult rslt = mvkMem->map(pMemoryMapInfo, ppData);
|
||||||
|
MVKTraceVulkanCallEnd();
|
||||||
|
return rslt;
|
||||||
|
}
|
||||||
|
|
||||||
|
MVK_PUBLIC_VULKAN_SYMBOL VkResult vkUnmapMemory2KHR(
|
||||||
|
VkDevice device,
|
||||||
|
const VkMemoryUnmapInfoKHR* pMemoryUnmapInfo) {
|
||||||
|
|
||||||
|
MVKTraceVulkanCallStart();
|
||||||
|
MVKDeviceMemory* mvkMem = (MVKDeviceMemory*)pMemoryUnmapInfo->memory;
|
||||||
|
VkResult rslt = mvkMem->unmap(pMemoryUnmapInfo);
|
||||||
|
MVKTraceVulkanCallEnd();
|
||||||
|
return rslt;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
#pragma mark -
|
#pragma mark -
|
||||||
#pragma mark VK_KHR_push_descriptor extension
|
#pragma mark VK_KHR_push_descriptor extension
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user