Support for EXT_inline_uniform_block
This commit is contained in:
parent
c5b91b423d
commit
ddf108153d
@ -270,6 +270,7 @@ In addition to the core *Vulkan* API, **MoltenVK** also supports the following
|
||||
- `VK_EXT_debug_utils`
|
||||
- `VK_EXT_fragment_shader_interlock` *(requires Metal 2.0 and Raster Order Groups)*
|
||||
- `VK_EXT_host_query_reset`
|
||||
- `VK_EXT_inline_uniform_block`
|
||||
- `VK_EXT_memory_budget` *(requires Metal 2.0)*
|
||||
- `VK_EXT_metal_surface`
|
||||
- `VK_EXT_post_depth_coverage` *(iOS, requires GPU family 4)*
|
||||
|
@ -424,6 +424,9 @@ public:
|
||||
/** Binds the specified buffer for the specified shader stage. */
|
||||
void bindBuffer(MVKShaderStage stage, const MVKMTLBufferBinding& binding);
|
||||
|
||||
/** Binds the specified buffer for the specified shader stage. */
|
||||
void bindInline(MVKShaderStage stage, const MVKMTLInlineBinding& binding);
|
||||
|
||||
/** Binds the specified texture for the specified shader stage. */
|
||||
void bindTexture(MVKShaderStage stage, const MVKMTLTextureBinding& binding);
|
||||
|
||||
@ -458,7 +461,8 @@ public:
|
||||
std::function<void(MVKCommandEncoder*, MVKMTLBufferBinding&)> bindBuffer,
|
||||
std::function<void(MVKCommandEncoder*, MVKMTLBufferBinding&, MVKVector<uint32_t>&)> bindImplicitBuffer,
|
||||
std::function<void(MVKCommandEncoder*, MVKMTLTextureBinding&)> bindTexture,
|
||||
std::function<void(MVKCommandEncoder*, MVKMTLSamplerStateBinding&)> bindSampler);
|
||||
std::function<void(MVKCommandEncoder*, MVKMTLSamplerStateBinding&)> bindSampler,
|
||||
std::function<void(MVKCommandEncoder*, MVKMTLInlineBinding&)> bindInline);
|
||||
|
||||
#pragma mark Construction
|
||||
|
||||
@ -474,6 +478,7 @@ protected:
|
||||
MVKVectorInline<MVKMTLBufferBinding, 8> bufferBindings;
|
||||
MVKVectorInline<MVKMTLTextureBinding, 8> textureBindings;
|
||||
MVKVectorInline<MVKMTLSamplerStateBinding, 8> samplerStateBindings;
|
||||
MVKVectorInline<MVKMTLInlineBinding, 8> inlineBindings;
|
||||
MVKVectorInline<uint32_t, 8> swizzleConstants;
|
||||
MVKVectorInline<uint32_t, 8> bufferSizes;
|
||||
MVKMTLBufferBinding swizzleBufferBinding;
|
||||
@ -482,6 +487,7 @@ protected:
|
||||
bool areBufferBindingsDirty = false;
|
||||
bool areTextureBindingsDirty = false;
|
||||
bool areSamplerStateBindingsDirty = false;
|
||||
bool areInlineBindingsDirty = false;
|
||||
|
||||
bool needsSwizzle = false;
|
||||
};
|
||||
@ -501,6 +507,9 @@ public:
|
||||
/** Binds the specified buffer. */
|
||||
void bindBuffer(const MVKMTLBufferBinding& binding);
|
||||
|
||||
/** Binds the specified buffer. */
|
||||
void bindInline(const MVKMTLInlineBinding& binding);
|
||||
|
||||
/** Binds the specified texture. */
|
||||
void bindTexture(const MVKMTLTextureBinding& binding);
|
||||
|
||||
@ -526,6 +535,7 @@ protected:
|
||||
MVKVectorInline<MVKMTLBufferBinding, 4> _bufferBindings;
|
||||
MVKVectorInline<MVKMTLTextureBinding, 4> _textureBindings;
|
||||
MVKVectorInline<MVKMTLSamplerStateBinding, 4> _samplerStateBindings;
|
||||
MVKVectorInline<MVKMTLInlineBinding, 4> _inlineBindings;
|
||||
MVKVectorInline<uint32_t, 4> _swizzleConstants;
|
||||
MVKVectorInline<uint32_t, 4> _bufferSizes;
|
||||
MVKMTLBufferBinding _swizzleBufferBinding;
|
||||
@ -534,6 +544,7 @@ protected:
|
||||
bool _areBufferBindingsDirty = false;
|
||||
bool _areTextureBindingsDirty = false;
|
||||
bool _areSamplerStateBindingsDirty = false;
|
||||
bool _areInlineBindingsDirty = false;
|
||||
|
||||
bool _needsSwizzle = false;
|
||||
};
|
||||
|
@ -516,6 +516,10 @@ void MVKGraphicsResourcesCommandEncoderState::bindSamplerState(MVKShaderStage st
|
||||
bind(binding, _shaderStages[stage].samplerStateBindings, _shaderStages[stage].areSamplerStateBindingsDirty);
|
||||
}
|
||||
|
||||
void MVKGraphicsResourcesCommandEncoderState::bindInline(MVKShaderStage stage, const MVKMTLInlineBinding& binding) {
|
||||
bind(binding, _shaderStages[stage].inlineBindings, _shaderStages[stage].areInlineBindingsDirty);
|
||||
}
|
||||
|
||||
void MVKGraphicsResourcesCommandEncoderState::bindSwizzleBuffer(const MVKShaderImplicitRezBinding& binding,
|
||||
bool needVertexSwizzleBuffer,
|
||||
bool needTessCtlSwizzleBuffer,
|
||||
@ -550,9 +554,11 @@ void MVKGraphicsResourcesCommandEncoderState::encodeBindings(MVKShaderStage stag
|
||||
std::function<void(MVKCommandEncoder*, MVKMTLBufferBinding&)> bindBuffer,
|
||||
std::function<void(MVKCommandEncoder*, MVKMTLBufferBinding&, MVKVector<uint32_t>&)> bindImplicitBuffer,
|
||||
std::function<void(MVKCommandEncoder*, MVKMTLTextureBinding&)> bindTexture,
|
||||
std::function<void(MVKCommandEncoder*, MVKMTLSamplerStateBinding&)> bindSampler) {
|
||||
std::function<void(MVKCommandEncoder*, MVKMTLSamplerStateBinding&)> bindSampler,
|
||||
std::function<void(MVKCommandEncoder*, MVKMTLInlineBinding&)> bindInline) {
|
||||
auto& shaderStage = _shaderStages[stage];
|
||||
encodeBinding<MVKMTLBufferBinding>(shaderStage.bufferBindings, shaderStage.areBufferBindingsDirty, bindBuffer);
|
||||
encodeBinding<MVKMTLInlineBinding>(shaderStage.inlineBindings, shaderStage.areInlineBindingsDirty, bindInline);
|
||||
|
||||
if (shaderStage.swizzleBufferBinding.isDirty) {
|
||||
|
||||
@ -585,6 +591,7 @@ void MVKGraphicsResourcesCommandEncoderState::markDirty() {
|
||||
MVKResourcesCommandEncoderState::markDirty(_shaderStages[i].bufferBindings, _shaderStages[i].areBufferBindingsDirty);
|
||||
MVKResourcesCommandEncoderState::markDirty(_shaderStages[i].textureBindings, _shaderStages[i].areTextureBindingsDirty);
|
||||
MVKResourcesCommandEncoderState::markDirty(_shaderStages[i].samplerStateBindings, _shaderStages[i].areSamplerStateBindingsDirty);
|
||||
MVKResourcesCommandEncoderState::markDirty(_shaderStages[i].inlineBindings, _shaderStages[i].areInlineBindingsDirty);
|
||||
}
|
||||
}
|
||||
|
||||
@ -614,6 +621,12 @@ void MVKGraphicsResourcesCommandEncoderState::encodeImpl(uint32_t stage) {
|
||||
[](MVKCommandEncoder* cmdEncoder, MVKMTLSamplerStateBinding& b)->void {
|
||||
[cmdEncoder->_mtlRenderEncoder setVertexSamplerState: b.mtlSamplerState
|
||||
atIndex: b.index];
|
||||
},
|
||||
[](MVKCommandEncoder* cmdEncoder, MVKMTLInlineBinding& b)->void {
|
||||
cmdEncoder->setVertexBytes(cmdEncoder->_mtlRenderEncoder,
|
||||
b.mtlBytes,
|
||||
b.size,
|
||||
b.index);
|
||||
});
|
||||
|
||||
}
|
||||
@ -638,6 +651,12 @@ void MVKGraphicsResourcesCommandEncoderState::encodeImpl(uint32_t stage) {
|
||||
[](MVKCommandEncoder* cmdEncoder, MVKMTLSamplerStateBinding& b)->void {
|
||||
[cmdEncoder->getMTLComputeEncoder(kMVKCommandUseTessellationControl) setSamplerState: b.mtlSamplerState
|
||||
atIndex: b.index];
|
||||
},
|
||||
[](MVKCommandEncoder* cmdEncoder, MVKMTLInlineBinding& b)->void {
|
||||
cmdEncoder->setComputeBytes(cmdEncoder->getMTLComputeEncoder(kMVKCommandUseTessellationControl),
|
||||
b.mtlBytes,
|
||||
b.size,
|
||||
b.index);
|
||||
});
|
||||
|
||||
}
|
||||
@ -662,6 +681,12 @@ void MVKGraphicsResourcesCommandEncoderState::encodeImpl(uint32_t stage) {
|
||||
[](MVKCommandEncoder* cmdEncoder, MVKMTLSamplerStateBinding& b)->void {
|
||||
[cmdEncoder->_mtlRenderEncoder setVertexSamplerState: b.mtlSamplerState
|
||||
atIndex: b.index];
|
||||
},
|
||||
[](MVKCommandEncoder* cmdEncoder, MVKMTLInlineBinding& b)->void {
|
||||
cmdEncoder->setVertexBytes(cmdEncoder->_mtlRenderEncoder,
|
||||
b.mtlBytes,
|
||||
b.size,
|
||||
b.index);
|
||||
});
|
||||
|
||||
}
|
||||
@ -686,6 +711,12 @@ void MVKGraphicsResourcesCommandEncoderState::encodeImpl(uint32_t stage) {
|
||||
[](MVKCommandEncoder* cmdEncoder, MVKMTLSamplerStateBinding& b)->void {
|
||||
[cmdEncoder->_mtlRenderEncoder setFragmentSamplerState: b.mtlSamplerState
|
||||
atIndex: b.index];
|
||||
},
|
||||
[](MVKCommandEncoder* cmdEncoder, MVKMTLInlineBinding& b)->void {
|
||||
cmdEncoder->setFragmentBytes(cmdEncoder->_mtlRenderEncoder,
|
||||
b.mtlBytes,
|
||||
b.size,
|
||||
b.index);
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -695,12 +726,14 @@ void MVKGraphicsResourcesCommandEncoderState::resetImpl() {
|
||||
_shaderStages[i].bufferBindings.clear();
|
||||
_shaderStages[i].textureBindings.clear();
|
||||
_shaderStages[i].samplerStateBindings.clear();
|
||||
_shaderStages[i].inlineBindings.clear();
|
||||
_shaderStages[i].swizzleConstants.clear();
|
||||
_shaderStages[i].bufferSizes.clear();
|
||||
|
||||
_shaderStages[i].areBufferBindingsDirty = false;
|
||||
_shaderStages[i].areTextureBindingsDirty = false;
|
||||
_shaderStages[i].areSamplerStateBindingsDirty = false;
|
||||
_shaderStages[i].areInlineBindingsDirty = false;
|
||||
_shaderStages[i].swizzleBufferBinding.isDirty = false;
|
||||
_shaderStages[i].bufferSizeBufferBinding.isDirty = false;
|
||||
|
||||
@ -724,6 +757,10 @@ void MVKComputeResourcesCommandEncoderState::bindSamplerState(const MVKMTLSample
|
||||
bind(binding, _samplerStateBindings, _areSamplerStateBindingsDirty);
|
||||
}
|
||||
|
||||
void MVKComputeResourcesCommandEncoderState::bindInline(const MVKMTLInlineBinding& binding) {
|
||||
bind(binding, _inlineBindings, _areInlineBindingsDirty);
|
||||
}
|
||||
|
||||
void MVKComputeResourcesCommandEncoderState::bindSwizzleBuffer(const MVKShaderImplicitRezBinding& binding,
|
||||
bool needSwizzleBuffer) {
|
||||
_swizzleBufferBinding.index = binding.stages[kMVKShaderStageCompute];
|
||||
@ -742,6 +779,7 @@ void MVKComputeResourcesCommandEncoderState::markDirty() {
|
||||
MVKResourcesCommandEncoderState::markDirty(_bufferBindings, _areBufferBindingsDirty);
|
||||
MVKResourcesCommandEncoderState::markDirty(_textureBindings, _areTextureBindingsDirty);
|
||||
MVKResourcesCommandEncoderState::markDirty(_samplerStateBindings, _areSamplerStateBindingsDirty);
|
||||
MVKResourcesCommandEncoderState::markDirty(_inlineBindings, _areInlineBindingsDirty);
|
||||
}
|
||||
|
||||
void MVKComputeResourcesCommandEncoderState::encodeImpl(uint32_t) {
|
||||
@ -758,6 +796,14 @@ void MVKComputeResourcesCommandEncoderState::encodeImpl(uint32_t) {
|
||||
atIndex: b.index];
|
||||
});
|
||||
|
||||
encodeBinding<MVKMTLInlineBinding>(_inlineBindings, _areInlineBindingsDirty,
|
||||
[](MVKCommandEncoder* cmdEncoder, MVKMTLInlineBinding& b)->void {
|
||||
cmdEncoder->setComputeBytes(cmdEncoder->getMTLComputeEncoder(kMVKCommandUseDispatch),
|
||||
b.mtlBytes,
|
||||
b.size,
|
||||
b.index);
|
||||
});
|
||||
|
||||
if (_swizzleBufferBinding.isDirty) {
|
||||
|
||||
for (auto& b : _textureBindings) {
|
||||
@ -802,6 +848,7 @@ void MVKComputeResourcesCommandEncoderState::resetImpl() {
|
||||
_bufferBindings.clear();
|
||||
_textureBindings.clear();
|
||||
_samplerStateBindings.clear();
|
||||
_inlineBindings.clear();
|
||||
_swizzleConstants.clear();
|
||||
_bufferSizes.clear();
|
||||
|
||||
|
@ -51,3 +51,12 @@ typedef struct {
|
||||
MTLIndexType mtlIndexType;
|
||||
bool isDirty = true;
|
||||
} MVKIndexMTLBufferBinding;
|
||||
|
||||
/** Describes host bytes resource binding. */
|
||||
typedef struct {
|
||||
union { const void* mtlBytes = nil; const void* mtlResource; }; // aliases
|
||||
uint32_t index = 0;
|
||||
uint32_t size = 0;
|
||||
bool isDirty = true;
|
||||
} MVKMTLInlineBinding;
|
||||
|
||||
|
@ -253,7 +253,8 @@ public:
|
||||
VkDescriptorType& descType,
|
||||
VkDescriptorImageInfo* pImageInfo,
|
||||
VkDescriptorBufferInfo* pBufferInfo,
|
||||
VkBufferView* pTexelBufferView);
|
||||
VkBufferView* pTexelBufferView,
|
||||
VkWriteDescriptorSetInlineUniformBlockEXT* inlineUniformBlock);
|
||||
|
||||
/** Returns whether this instance represents the specified Vulkan binding point. */
|
||||
bool hasBinding(uint32_t binding);
|
||||
@ -274,6 +275,7 @@ protected:
|
||||
MVKDescriptorSetLayoutBinding* _pBindingLayout;
|
||||
std::vector<VkDescriptorImageInfo> _imageBindings;
|
||||
std::vector<VkDescriptorBufferInfo> _bufferBindings;
|
||||
std::vector<VkWriteDescriptorSetInlineUniformBlockEXT> _inlineBindings;
|
||||
std::vector<VkBufferView> _texelBufferBindings;
|
||||
std::vector<id<MTLBuffer>> _mtlBuffers;
|
||||
std::vector<NSUInteger> _mtlBufferOffsets;
|
||||
@ -311,7 +313,8 @@ public:
|
||||
VkDescriptorType& descType,
|
||||
VkDescriptorImageInfo* pImageInfo,
|
||||
VkDescriptorBufferInfo* pBufferInfo,
|
||||
VkBufferView* pTexelBufferView);
|
||||
VkBufferView* pTexelBufferView,
|
||||
VkWriteDescriptorSetInlineUniformBlockEXT* pInlineUniformBlock);
|
||||
|
||||
MVKDescriptorSet(MVKDevice* device) : MVKVulkanAPIDeviceObject(device) {}
|
||||
|
||||
|
@ -90,6 +90,7 @@ void MVKDescriptorSetLayoutBinding::bind(MVKCommandEncoder* cmdEncoder,
|
||||
MVKMTLBufferBinding bb;
|
||||
MVKMTLTextureBinding tb;
|
||||
MVKMTLSamplerStateBinding sb;
|
||||
MVKMTLInlineBinding ib;
|
||||
NSUInteger bufferDynamicOffset = 0;
|
||||
|
||||
// Establish the resource indices to use, by combining the offsets of the DSL and this DSL binding.
|
||||
@ -122,6 +123,22 @@ void MVKDescriptorSetLayoutBinding::bind(MVKCommandEncoder* cmdEncoder,
|
||||
break;
|
||||
}
|
||||
|
||||
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
|
||||
ib.mtlBytes = descBinding._inlineBindings[rezIdx].pData;
|
||||
ib.size = descBinding._inlineBindings[rezIdx].dataSize;
|
||||
for (uint32_t i = kMVKShaderStageVertex; i < kMVKShaderStageMax; i++) {
|
||||
if (_applyToStage[i]) {
|
||||
ib.index = mtlIdxs.stages[i].bufferIndex + rezIdx;
|
||||
if (i == kMVKShaderStageCompute) {
|
||||
if (cmdEncoder) { cmdEncoder->_computeResourcesState.bindInline(ib); }
|
||||
} else {
|
||||
if (cmdEncoder) { cmdEncoder->_graphicsResourcesState.bindInline(MVKShaderStage(i), ib); }
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
|
||||
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
|
||||
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
|
||||
@ -208,6 +225,7 @@ void MVKDescriptorSetLayoutBinding::push(MVKCommandEncoder* cmdEncoder,
|
||||
MVKMTLBufferBinding bb;
|
||||
MVKMTLTextureBinding tb;
|
||||
MVKMTLSamplerStateBinding sb;
|
||||
MVKMTLInlineBinding ib;
|
||||
|
||||
if (dstArrayElement >= _info.descriptorCount) {
|
||||
dstArrayElement -= _info.descriptorCount;
|
||||
@ -255,6 +273,23 @@ void MVKDescriptorSetLayoutBinding::push(MVKCommandEncoder* cmdEncoder,
|
||||
break;
|
||||
}
|
||||
|
||||
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
|
||||
const auto& inlineUniformBlock = get<VkWriteDescriptorSetInlineUniformBlockEXT>(pData, stride, rezIdx - dstArrayElement);
|
||||
ib.mtlBytes = inlineUniformBlock.pData;
|
||||
ib.size = inlineUniformBlock.dataSize;
|
||||
for (uint32_t i = kMVKShaderStageVertex; i < kMVKShaderStageMax; i++) {
|
||||
if (_applyToStage[i]) {
|
||||
ib.index = mtlIdxs.stages[i].bufferIndex + rezIdx;
|
||||
if (i == kMVKShaderStageCompute) {
|
||||
if (cmdEncoder) { cmdEncoder->_computeResourcesState.bindInline(ib); }
|
||||
} else {
|
||||
if (cmdEncoder) { cmdEncoder->_graphicsResourcesState.bindInline(MVKShaderStage(i), ib); }
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
|
||||
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
|
||||
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
|
||||
@ -499,6 +534,7 @@ void MVKDescriptorSetLayoutBinding::initMetalResourceIndexOffsets(MVKShaderStage
|
||||
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
|
||||
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
|
||||
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
|
||||
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
|
||||
pBindingIndexes->bufferIndex = pDescSetCounts->bufferIndex;
|
||||
pDescSetCounts->bufferIndex += pBinding->descriptorCount;
|
||||
break;
|
||||
@ -532,6 +568,7 @@ void MVKDescriptorSetLayout::bindDescriptorSet(MVKCommandEncoder* cmdEncoder,
|
||||
|
||||
static const void* getWriteParameters(VkDescriptorType type, const VkDescriptorImageInfo* pImageInfo,
|
||||
const VkDescriptorBufferInfo* pBufferInfo, const VkBufferView* pTexelBufferView,
|
||||
const VkWriteDescriptorSetInlineUniformBlockEXT* pInlineUniformBlock,
|
||||
size_t& stride) {
|
||||
const void* pData;
|
||||
switch (type) {
|
||||
@ -558,6 +595,11 @@ static const void* getWriteParameters(VkDescriptorType type, const VkDescriptorI
|
||||
stride = sizeof(MVKBufferView*);
|
||||
break;
|
||||
|
||||
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
|
||||
pData = pInlineUniformBlock;
|
||||
stride = sizeof(VkWriteDescriptorSetInlineUniformBlockEXT);
|
||||
break;
|
||||
|
||||
default:
|
||||
pData = nullptr;
|
||||
stride = 0;
|
||||
@ -580,6 +622,20 @@ void MVKDescriptorSetLayout::pushDescriptorSet(MVKCommandEncoder* cmdEncoder,
|
||||
const VkDescriptorImageInfo* pImageInfo = descWrite.pImageInfo;
|
||||
const VkDescriptorBufferInfo* pBufferInfo = descWrite.pBufferInfo;
|
||||
const VkBufferView* pTexelBufferView = descWrite.pTexelBufferView;
|
||||
const VkWriteDescriptorSetInlineUniformBlockEXT* pInlineUniformBlock = nullptr;
|
||||
if (_device->_enabledExtensions.vk_EXT_inline_uniform_block.enabled) {
|
||||
for (auto* next = (VkWriteDescriptorSetInlineUniformBlockEXT*)descWrite.pNext; next; next = (VkWriteDescriptorSetInlineUniformBlockEXT*)next->pNext)
|
||||
{
|
||||
switch (next->sType) {
|
||||
case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT: {
|
||||
pInlineUniformBlock = next;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!_bindingToIndex.count(dstBinding)) continue;
|
||||
// Note: This will result in us walking off the end of the array
|
||||
// in case there are too many updates... but that's ill-defined anyway.
|
||||
@ -587,7 +643,7 @@ void MVKDescriptorSetLayout::pushDescriptorSet(MVKCommandEncoder* cmdEncoder,
|
||||
if (!_bindingToIndex.count(dstBinding)) continue;
|
||||
size_t stride;
|
||||
const void* pData = getWriteParameters(descWrite.descriptorType, pImageInfo,
|
||||
pBufferInfo, pTexelBufferView, stride);
|
||||
pBufferInfo, pTexelBufferView, pInlineUniformBlock, stride);
|
||||
uint32_t descriptorsPushed = 0;
|
||||
uint32_t bindIdx = _bindingToIndex[dstBinding];
|
||||
_bindings[bindIdx].push(cmdEncoder, dstArrayElement, descriptorCount,
|
||||
@ -777,6 +833,26 @@ uint32_t MVKDescriptorBinding::writeBindings(uint32_t srcStartIndex,
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
|
||||
for (uint32_t i = 0; i < dstCnt; i++) {
|
||||
uint32_t dstIdx = dstStartIndex + i;
|
||||
const auto& srcInlineUniformBlock = get<VkWriteDescriptorSetInlineUniformBlockEXT>(pData, stride, srcStartIndex + i);
|
||||
auto& dstInlineUniformBlock = _inlineBindings[dstIdx];
|
||||
if (dstInlineUniformBlock.pData && dstInlineUniformBlock.pData != srcInlineUniformBlock.pData)
|
||||
delete [] reinterpret_cast<const uint8_t*>(dstInlineUniformBlock.pData);
|
||||
if (srcInlineUniformBlock.dataSize != 0) {
|
||||
dstInlineUniformBlock.pData = reinterpret_cast<const void*>(new uint8_t*[srcInlineUniformBlock.dataSize]);
|
||||
if (srcInlineUniformBlock.pData) {
|
||||
memcpy(const_cast<void*>(dstInlineUniformBlock.pData), srcInlineUniformBlock.pData, srcInlineUniformBlock.dataSize);
|
||||
}
|
||||
} else {
|
||||
dstInlineUniformBlock.pData = nullptr;
|
||||
}
|
||||
dstInlineUniformBlock.dataSize = srcInlineUniformBlock.dataSize;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
@ -790,7 +866,8 @@ uint32_t MVKDescriptorBinding::readBindings(uint32_t srcStartIndex,
|
||||
VkDescriptorType& descType,
|
||||
VkDescriptorImageInfo* pImageInfo,
|
||||
VkDescriptorBufferInfo* pBufferInfo,
|
||||
VkBufferView* pTexelBufferView) {
|
||||
VkBufferView* pTexelBufferView,
|
||||
VkWriteDescriptorSetInlineUniformBlockEXT* pInlineUniformBlock) {
|
||||
|
||||
uint32_t srcCnt = MIN(count, _pBindingLayout->_info.descriptorCount - srcStartIndex);
|
||||
|
||||
@ -822,6 +899,24 @@ uint32_t MVKDescriptorBinding::readBindings(uint32_t srcStartIndex,
|
||||
}
|
||||
break;
|
||||
|
||||
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
|
||||
for (uint32_t i = 0; i < srcCnt; i++) {
|
||||
const auto& srcInlineUniformBlock = _inlineBindings[srcStartIndex + i];
|
||||
auto& dstInlineUniformBlock = pInlineUniformBlock[dstStartIndex + i];
|
||||
if (dstInlineUniformBlock.pData && dstInlineUniformBlock.pData != srcInlineUniformBlock.pData)
|
||||
delete [] reinterpret_cast<const uint8_t*>(dstInlineUniformBlock.pData);
|
||||
if (srcInlineUniformBlock.dataSize != 0) {
|
||||
dstInlineUniformBlock.pData = reinterpret_cast<const void*>(new uint8_t*[srcInlineUniformBlock.dataSize]);
|
||||
if (srcInlineUniformBlock.pData) {
|
||||
memcpy(const_cast<void*>(dstInlineUniformBlock.pData), srcInlineUniformBlock.pData, srcInlineUniformBlock.dataSize);
|
||||
}
|
||||
} else {
|
||||
dstInlineUniformBlock.pData = nullptr;
|
||||
}
|
||||
dstInlineUniformBlock.dataSize = srcInlineUniformBlock.dataSize;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
@ -866,6 +961,12 @@ MVKDescriptorBinding::MVKDescriptorBinding(MVKDescriptorSet* pDescSet, MVKDescri
|
||||
_mtlBufferOffsets.resize(descCnt, 0);
|
||||
break;
|
||||
|
||||
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
|
||||
static const VkWriteDescriptorSetInlineUniformBlockEXT inlineUniformBlock {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT, nullptr, 0, nullptr};
|
||||
_inlineBindings.resize(descCnt, inlineUniformBlock);
|
||||
break;
|
||||
}
|
||||
|
||||
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
|
||||
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
|
||||
_texelBufferBindings.resize(descCnt, nil);
|
||||
@ -902,6 +1003,11 @@ MVKDescriptorBinding::~MVKDescriptorBinding() {
|
||||
((MVKBufferView*)buffView)->release();
|
||||
}
|
||||
}
|
||||
for (VkWriteDescriptorSetInlineUniformBlockEXT& inlineUniformBlock : _inlineBindings) {
|
||||
if (inlineUniformBlock.pData) {
|
||||
delete [] reinterpret_cast<const uint8_t*>(inlineUniformBlock.pData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -956,7 +1062,8 @@ void MVKDescriptorSet::readDescriptorSets(const VkCopyDescriptorSet* pDescriptor
|
||||
VkDescriptorType& descType,
|
||||
VkDescriptorImageInfo* pImageInfo,
|
||||
VkDescriptorBufferInfo* pBufferInfo,
|
||||
VkBufferView* pTexelBufferView) {
|
||||
VkBufferView* pTexelBufferView,
|
||||
VkWriteDescriptorSetInlineUniformBlockEXT* pInlineUniformBlock) {
|
||||
uint32_t srcStartIdx = pDescriptorCopy->srcArrayElement;
|
||||
uint32_t binding = pDescriptorCopy->srcBinding;
|
||||
uint32_t origCnt = pDescriptorCopy->descriptorCount;
|
||||
@ -966,7 +1073,7 @@ void MVKDescriptorSet::readDescriptorSets(const VkCopyDescriptorSet* pDescriptor
|
||||
while (mvkDescBind && remainCnt > 0) {
|
||||
uint32_t dstStartIdx = origCnt - remainCnt;
|
||||
remainCnt = mvkDescBind->readBindings(srcStartIdx, dstStartIdx, remainCnt, descType,
|
||||
pImageInfo, pBufferInfo, pTexelBufferView);
|
||||
pImageInfo, pBufferInfo, pTexelBufferView, pInlineUniformBlock);
|
||||
binding++; // If not consumed, move to next consecutive binding point
|
||||
mvkDescBind = getBinding(binding);
|
||||
srcStartIdx = 0; // Subsequent bindings start reading at first element
|
||||
@ -1125,10 +1232,26 @@ void mvkUpdateDescriptorSets(uint32_t writeCount,
|
||||
for (uint32_t i = 0; i < writeCount; i++) {
|
||||
const VkWriteDescriptorSet* pDescWrite = &pDescriptorWrites[i];
|
||||
size_t stride;
|
||||
MVKDescriptorSet* dstSet = (MVKDescriptorSet*)pDescWrite->dstSet;
|
||||
|
||||
const VkWriteDescriptorSetInlineUniformBlockEXT* pInlineUniformBlock = nullptr;
|
||||
if (dstSet->getDevice()->_enabledExtensions.vk_EXT_inline_uniform_block.enabled) {
|
||||
for (auto* next = (VkWriteDescriptorSetInlineUniformBlockEXT*)pDescWrite->pNext; next; next = (VkWriteDescriptorSetInlineUniformBlockEXT*)next->pNext)
|
||||
{
|
||||
switch (next->sType) {
|
||||
case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT: {
|
||||
pInlineUniformBlock = next;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const void* pData = getWriteParameters(pDescWrite->descriptorType, pDescWrite->pImageInfo,
|
||||
pDescWrite->pBufferInfo, pDescWrite->pTexelBufferView,
|
||||
stride);
|
||||
MVKDescriptorSet* dstSet = (MVKDescriptorSet*)pDescWrite->dstSet;
|
||||
pInlineUniformBlock, stride);
|
||||
dstSet->writeDescriptorSets(pDescWrite, stride, pData);
|
||||
}
|
||||
|
||||
@ -1141,13 +1264,14 @@ void mvkUpdateDescriptorSets(uint32_t writeCount,
|
||||
VkDescriptorImageInfo imgInfos[descCnt];
|
||||
VkDescriptorBufferInfo buffInfos[descCnt];
|
||||
VkBufferView texelBuffInfos[descCnt];
|
||||
VkWriteDescriptorSetInlineUniformBlockEXT inlineUniformBlocks[descCnt];
|
||||
|
||||
MVKDescriptorSet* srcSet = (MVKDescriptorSet*)pDescCopy->srcSet;
|
||||
srcSet->readDescriptorSets(pDescCopy, descType, imgInfos, buffInfos, texelBuffInfos);
|
||||
srcSet->readDescriptorSets(pDescCopy, descType, imgInfos, buffInfos, texelBuffInfos, inlineUniformBlocks);
|
||||
|
||||
MVKDescriptorSet* dstSet = (MVKDescriptorSet*)pDescCopy->dstSet;
|
||||
size_t stride;
|
||||
const void* pData = getWriteParameters(descType, imgInfos, buffInfos, texelBuffInfos, stride);
|
||||
const void* pData = getWriteParameters(descType, imgInfos, buffInfos, texelBuffInfos, inlineUniformBlocks, stride);
|
||||
dstSet->writeDescriptorSets(pDescCopy, stride, pData);
|
||||
}
|
||||
}
|
||||
|
@ -143,6 +143,12 @@ void MVKPhysicalDevice::getFeatures(VkPhysicalDeviceFeatures2* features) {
|
||||
shaderIntFuncsFeatures->shaderIntegerFunctions2 = true;
|
||||
break;
|
||||
}
|
||||
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT: {
|
||||
auto* inlineUniformBlockFeatures = (VkPhysicalDeviceInlineUniformBlockFeaturesEXT*)next;
|
||||
inlineUniformBlockFeatures->inlineUniformBlock = true;
|
||||
inlineUniformBlockFeatures->descriptorBindingInlineUniformBlockUpdateAfterBind = true;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
@ -70,6 +70,7 @@ MVK_EXTENSION(EXT_debug_utils, EXT_DEBUG_UTILS, MVK_EXTENSION_INSTANCE)
|
||||
MVK_EXTENSION(EXT_fragment_shader_interlock, EXT_FRAGMENT_SHADER_INTERLOCK, MVK_EXTENSION_DEVICE)
|
||||
MVK_EXTENSION(EXT_hdr_metadata, EXT_HDR_METADATA, MVK_EXTENSION_DEVICE)
|
||||
MVK_EXTENSION(EXT_host_query_reset, EXT_HOST_QUERY_RESET, MVK_EXTENSION_DEVICE)
|
||||
MVK_EXTENSION(EXT_inline_uniform_block, EXT_INLINE_UNIFORM_BLOCK, MVK_EXTENSION_DEVICE)
|
||||
MVK_EXTENSION(EXT_memory_budget, EXT_MEMORY_BUDGET, MVK_EXTENSION_DEVICE)
|
||||
MVK_EXTENSION(EXT_metal_surface, EXT_METAL_SURFACE, MVK_EXTENSION_INSTANCE)
|
||||
MVK_EXTENSION(EXT_post_depth_coverage, EXT_POST_DEPTH_COVERAGE, MVK_EXTENSION_DEVICE)
|
||||
|
Loading…
x
Reference in New Issue
Block a user