Merge remote-tracking branch 'upstream/master' into tvos
This commit is contained in:
commit
a0e357fab5
@ -18,12 +18,17 @@
|
|||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include "MVKCommonEnvironment.h"
|
||||||
#include <dispatch/dispatch.h>
|
#include <dispatch/dispatch.h>
|
||||||
#include <string>
|
#include <string>
|
||||||
|
#include <limits>
|
||||||
|
|
||||||
|
|
||||||
typedef float MVKOSVersion;
|
typedef float MVKOSVersion;
|
||||||
|
|
||||||
|
/*** Constant indicating unsupported functionality in an OS. */
|
||||||
|
static const MVKOSVersion kMVKOSVersionUnsupported = std::numeric_limits<MVKOSVersion>::max();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the operating system version as an MVKOSVersion, which is a float in which the
|
* Returns the operating system version as an MVKOSVersion, which is a float in which the
|
||||||
* whole number portion indicates the major version, and the fractional portion indicates
|
* whole number portion indicates the major version, and the fractional portion indicates
|
||||||
@ -33,9 +38,29 @@ typedef float MVKOSVersion;
|
|||||||
*/
|
*/
|
||||||
MVKOSVersion mvkOSVersion();
|
MVKOSVersion mvkOSVersion();
|
||||||
|
|
||||||
|
/** Returns a MVKOSVersion built from the version components. */
|
||||||
|
inline MVKOSVersion mvkMakeOSVersion(uint32_t major, uint32_t minor, uint32_t patch) {
|
||||||
|
return (float)major + ((float)minor / 100.0f) + ((float)patch / 10000.0f);
|
||||||
|
}
|
||||||
|
|
||||||
/** Returns whether the operating system version is at least minVer. */
|
/** Returns whether the operating system version is at least minVer. */
|
||||||
inline bool mvkOSVersionIsAtLeast(MVKOSVersion minVer) { return mvkOSVersion() >= minVer; }
|
inline bool mvkOSVersionIsAtLeast(MVKOSVersion minVer) { return mvkOSVersion() >= minVer; }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns whether the operating system version is at least the appropriate min version.
|
||||||
|
* The constant kMVKOSVersionUnsupported can be used for either value to cause the test
|
||||||
|
* to always fail on that OS, which is useful for indidicating functionalty guarded by
|
||||||
|
* this test is not supported on that OS.
|
||||||
|
*/
|
||||||
|
inline bool mvkOSVersionIsAtLeast(MVKOSVersion macOSMinVer, MVKOSVersion iOSMinVer) {
|
||||||
|
#if MVK_MACOS
|
||||||
|
return mvkOSVersionIsAtLeast(macOSMinVer);
|
||||||
|
#endif
|
||||||
|
#if MVK_IOS
|
||||||
|
return mvkOSVersionIsAtLeast(iOSMinVer);
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a monotonic timestamp value for use in Vulkan and performance timestamping.
|
* Returns a monotonic timestamp value for use in Vulkan and performance timestamping.
|
||||||
*
|
*
|
||||||
|
@ -29,15 +29,11 @@
|
|||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
static const MVKOSVersion kMVKOSVersionUnknown = 0.0f;
|
|
||||||
static MVKOSVersion _mvkOSVersion = kMVKOSVersionUnknown;
|
|
||||||
MVKOSVersion mvkOSVersion() {
|
MVKOSVersion mvkOSVersion() {
|
||||||
if (_mvkOSVersion == kMVKOSVersionUnknown) {
|
static MVKOSVersion _mvkOSVersion = 0;
|
||||||
|
if ( !_mvkOSVersion ) {
|
||||||
NSOperatingSystemVersion osVer = [[NSProcessInfo processInfo] operatingSystemVersion];
|
NSOperatingSystemVersion osVer = [[NSProcessInfo processInfo] operatingSystemVersion];
|
||||||
float maj = osVer.majorVersion;
|
_mvkOSVersion = mvkMakeOSVersion((uint32_t)osVer.majorVersion, (uint32_t)osVer.minorVersion, (uint32_t)osVer.patchVersion);
|
||||||
float min = osVer.minorVersion;
|
|
||||||
float pat = osVer.patchVersion;
|
|
||||||
_mvkOSVersion = maj + (min / 100.0f) + + (pat / 10000.0f);
|
|
||||||
}
|
}
|
||||||
return _mvkOSVersion;
|
return _mvkOSVersion;
|
||||||
}
|
}
|
||||||
|
@ -659,7 +659,6 @@
|
|||||||
PRODUCT_NAME = "API-Samples";
|
PRODUCT_NAME = "API-Samples";
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
TARGETED_DEVICE_FAMILY = "1,2";
|
TARGETED_DEVICE_FAMILY = "1,2";
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Debug;
|
name = Debug;
|
||||||
};
|
};
|
||||||
@ -681,7 +680,6 @@
|
|||||||
PRODUCT_NAME = "API-Samples";
|
PRODUCT_NAME = "API-Samples";
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
TARGETED_DEVICE_FAMILY = "1,2";
|
TARGETED_DEVICE_FAMILY = "1,2";
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Release;
|
name = Release;
|
||||||
};
|
};
|
||||||
@ -738,6 +736,7 @@
|
|||||||
C01FCF4F08A954540054247B /* Debug */ = {
|
C01FCF4F08A954540054247B /* Debug */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
|
ARCHS = "$(ARCHS_STANDARD_64_BIT)";
|
||||||
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
||||||
CLANG_CXX_LIBRARY = "libc++";
|
CLANG_CXX_LIBRARY = "libc++";
|
||||||
CLANG_ENABLE_MODULES = YES;
|
CLANG_ENABLE_MODULES = YES;
|
||||||
@ -768,6 +767,7 @@
|
|||||||
C01FCF5008A954540054247B /* Release */ = {
|
C01FCF5008A954540054247B /* Release */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
|
ARCHS = "$(ARCHS_STANDARD_64_BIT)";
|
||||||
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
||||||
CLANG_CXX_LIBRARY = "libc++";
|
CLANG_CXX_LIBRARY = "libc++";
|
||||||
CLANG_ENABLE_MODULES = YES;
|
CLANG_ENABLE_MODULES = YES;
|
||||||
|
@ -10,27 +10,14 @@
|
|||||||
set -e
|
set -e
|
||||||
|
|
||||||
echo
|
echo
|
||||||
echo ========== Building MoltenVKShaderConverter tool ==========
|
echo ========== Converting API-Samples shader files ==========
|
||||||
echo
|
echo
|
||||||
|
|
||||||
cd "../../.."
|
cd "../../.."
|
||||||
|
|
||||||
XC_PROJ="MoltenVKPackaging.xcodeproj"
|
|
||||||
XC_SCHEME="MVKShaderConverterTool Package"
|
|
||||||
|
|
||||||
xcodebuild \
|
|
||||||
-project "MoltenVKPackaging.xcodeproj" \
|
|
||||||
-scheme "MVKShaderConverterTool Package" \
|
|
||||||
-quiet \
|
|
||||||
build
|
|
||||||
|
|
||||||
echo
|
|
||||||
echo ========== Converting API-Samples shader files ==========
|
|
||||||
echo
|
|
||||||
|
|
||||||
"Package/Latest/MoltenVKShaderConverter/Tools/MoltenVKShaderConverter" \
|
"Package/Latest/MoltenVKShaderConverter/Tools/MoltenVKShaderConverter" \
|
||||||
-r -gi -so -oh -xs . -q \
|
-r -gi -so -oh -xs . -q \
|
||||||
-d "Demos/LunarG-VulkanSamples/VulkanSamples/API-Samples"
|
-d "Demos/LunarG-VulkanSamples/VulkanSamples/API-Samples" > /dev/null
|
||||||
|
|
||||||
cd - > /dev/null
|
cd - > /dev/null
|
||||||
|
|
||||||
|
@ -359,7 +359,6 @@
|
|||||||
PRODUCT_NAME = Cube;
|
PRODUCT_NAME = Cube;
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
TARGETED_DEVICE_FAMILY = "1,2";
|
TARGETED_DEVICE_FAMILY = "1,2";
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Debug;
|
name = Debug;
|
||||||
};
|
};
|
||||||
@ -380,13 +379,13 @@
|
|||||||
PRODUCT_NAME = Cube;
|
PRODUCT_NAME = Cube;
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
TARGETED_DEVICE_FAMILY = "1,2";
|
TARGETED_DEVICE_FAMILY = "1,2";
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Release;
|
name = Release;
|
||||||
};
|
};
|
||||||
C01FCF4F08A954540054247B /* Debug */ = {
|
C01FCF4F08A954540054247B /* Debug */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
|
ARCHS = "$(ARCHS_STANDARD_64_BIT)";
|
||||||
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
||||||
CLANG_CXX_LIBRARY = "libc++";
|
CLANG_CXX_LIBRARY = "libc++";
|
||||||
CLANG_ENABLE_MODULES = YES;
|
CLANG_ENABLE_MODULES = YES;
|
||||||
@ -417,6 +416,7 @@
|
|||||||
C01FCF5008A954540054247B /* Release */ = {
|
C01FCF5008A954540054247B /* Release */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
|
ARCHS = "$(ARCHS_STANDARD_64_BIT)";
|
||||||
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
||||||
CLANG_CXX_LIBRARY = "libc++";
|
CLANG_CXX_LIBRARY = "libc++";
|
||||||
CLANG_ENABLE_MODULES = YES;
|
CLANG_ENABLE_MODULES = YES;
|
||||||
|
@ -360,7 +360,6 @@
|
|||||||
LIBRARY_SEARCH_PATHS = "\"$(SRCROOT)/../../../MoltenVK/iOS/static\"";
|
LIBRARY_SEARCH_PATHS = "\"$(SRCROOT)/../../../MoltenVK/iOS/static\"";
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
TARGETED_DEVICE_FAMILY = "1,2";
|
TARGETED_DEVICE_FAMILY = "1,2";
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Debug;
|
name = Debug;
|
||||||
};
|
};
|
||||||
@ -375,7 +374,6 @@
|
|||||||
LIBRARY_SEARCH_PATHS = "\"$(SRCROOT)/../../../MoltenVK/iOS/static\"";
|
LIBRARY_SEARCH_PATHS = "\"$(SRCROOT)/../../../MoltenVK/iOS/static\"";
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
TARGETED_DEVICE_FAMILY = "1,2";
|
TARGETED_DEVICE_FAMILY = "1,2";
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Release;
|
name = Release;
|
||||||
};
|
};
|
||||||
@ -416,6 +414,7 @@
|
|||||||
C01FCF4F08A954540054247B /* Debug */ = {
|
C01FCF4F08A954540054247B /* Debug */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
|
ARCHS = "$(ARCHS_STANDARD_64_BIT)";
|
||||||
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
||||||
CLANG_CXX_LIBRARY = "libc++";
|
CLANG_CXX_LIBRARY = "libc++";
|
||||||
CLANG_ENABLE_MODULES = YES;
|
CLANG_ENABLE_MODULES = YES;
|
||||||
@ -448,6 +447,7 @@
|
|||||||
C01FCF5008A954540054247B /* Release */ = {
|
C01FCF5008A954540054247B /* Release */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
|
ARCHS = "$(ARCHS_STANDARD_64_BIT)";
|
||||||
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
||||||
CLANG_CXX_LIBRARY = "libc++";
|
CLANG_CXX_LIBRARY = "libc++";
|
||||||
CLANG_ENABLE_MODULES = YES;
|
CLANG_ENABLE_MODULES = YES;
|
||||||
|
@ -256,6 +256,7 @@ In addition to core *Vulkan* functionality, **MoltenVK** also supports the foll
|
|||||||
- `VK_KHR_push_descriptor`
|
- `VK_KHR_push_descriptor`
|
||||||
- `VK_KHR_relaxed_block_layout`
|
- `VK_KHR_relaxed_block_layout`
|
||||||
- `VK_KHR_sampler_mirror_clamp_to_edge` *(macOS)*
|
- `VK_KHR_sampler_mirror_clamp_to_edge` *(macOS)*
|
||||||
|
- `VK_KHR_sampler_ycbcr_conversion`
|
||||||
- `VK_KHR_shader_draw_parameters`
|
- `VK_KHR_shader_draw_parameters`
|
||||||
- `VK_KHR_shader_float16_int8`
|
- `VK_KHR_shader_float16_int8`
|
||||||
- `VK_KHR_storage_buffer_storage_class`
|
- `VK_KHR_storage_buffer_storage_class`
|
||||||
@ -273,6 +274,7 @@ In addition to core *Vulkan* functionality, **MoltenVK** also supports the foll
|
|||||||
- `VK_EXT_memory_budget` *(requires Metal 2.0)*
|
- `VK_EXT_memory_budget` *(requires Metal 2.0)*
|
||||||
- `VK_EXT_metal_surface`
|
- `VK_EXT_metal_surface`
|
||||||
- `VK_EXT_post_depth_coverage` *(iOS, requires GPU family 4)*
|
- `VK_EXT_post_depth_coverage` *(iOS, requires GPU family 4)*
|
||||||
|
- `VK_EXT_robustness2`
|
||||||
- `VK_EXT_scalar_block_layout`
|
- `VK_EXT_scalar_block_layout`
|
||||||
- `VK_EXT_shader_stencil_export` *(requires Mac GPU family 2 or iOS GPU family 5)*
|
- `VK_EXT_shader_stencil_export` *(requires Mac GPU family 2 or iOS GPU family 5)*
|
||||||
- `VK_EXT_shader_viewport_index_layer`
|
- `VK_EXT_shader_viewport_index_layer`
|
||||||
|
@ -13,6 +13,20 @@ For best results, use a Markdown reader.*
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
MoltenVK 1.0.44
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Released TBD
|
||||||
|
|
||||||
|
- Add support for extensions:
|
||||||
|
- VK_KHR_sampler_ycbcr_conversion
|
||||||
|
- VK_EXT_robustness2
|
||||||
|
- Fix issue where mapped host-coherent device memory not updated from image contents on *macOS*.
|
||||||
|
- Remove use of `@available()` directive as it was causing issues in some build environments.
|
||||||
|
- Refactor **MoltenVK** *Xcode* build architectures.
|
||||||
|
- Demo `API-Samples generateSPIRVShaders` no longer builds `MoltenVKShaderController` tool.
|
||||||
|
|
||||||
|
|
||||||
MoltenVK 1.0.43
|
MoltenVK 1.0.43
|
||||||
---------------
|
---------------
|
||||||
|
|
||||||
|
@ -5077,34 +5077,24 @@
|
|||||||
A90FD89D21CC4EAB00B92BB2 /* Debug */ = {
|
A90FD89D21CC4EAB00B92BB2 /* Debug */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ARCHS = arm64;
|
|
||||||
BITCODE_GENERATION_MODE = bitcode;
|
|
||||||
CODE_SIGN_IDENTITY = "iPhone Developer";
|
|
||||||
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
|
|
||||||
GCC_PREPROCESSOR_DEFINITIONS = (
|
GCC_PREPROCESSOR_DEFINITIONS = (
|
||||||
"$(inherited)",
|
"$(inherited)",
|
||||||
"SPIRV_CROSS_NAMESPACE_OVERRIDE=MVK_spirv_cross",
|
"SPIRV_CROSS_NAMESPACE_OVERRIDE=MVK_spirv_cross",
|
||||||
);
|
);
|
||||||
PRODUCT_NAME = SPIRVCross;
|
PRODUCT_NAME = SPIRVCross;
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Debug;
|
name = Debug;
|
||||||
};
|
};
|
||||||
A90FD89E21CC4EAB00B92BB2 /* Release */ = {
|
A90FD89E21CC4EAB00B92BB2 /* Release */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ARCHS = arm64;
|
|
||||||
BITCODE_GENERATION_MODE = bitcode;
|
|
||||||
CODE_SIGN_IDENTITY = "iPhone Developer";
|
|
||||||
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
|
|
||||||
GCC_PREPROCESSOR_DEFINITIONS = (
|
GCC_PREPROCESSOR_DEFINITIONS = (
|
||||||
"$(inherited)",
|
"$(inherited)",
|
||||||
"SPIRV_CROSS_NAMESPACE_OVERRIDE=MVK_spirv_cross",
|
"SPIRV_CROSS_NAMESPACE_OVERRIDE=MVK_spirv_cross",
|
||||||
);
|
);
|
||||||
PRODUCT_NAME = SPIRVCross;
|
PRODUCT_NAME = SPIRVCross;
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Release;
|
name = Release;
|
||||||
};
|
};
|
||||||
@ -5185,10 +5175,6 @@
|
|||||||
A972A80D21CECBBF0013AB25 /* Debug */ = {
|
A972A80D21CECBBF0013AB25 /* Debug */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ARCHS = arm64;
|
|
||||||
BITCODE_GENERATION_MODE = bitcode;
|
|
||||||
CODE_SIGN_IDENTITY = "iPhone Developer";
|
|
||||||
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
|
|
||||||
DEPLOYMENT_POSTPROCESSING = YES;
|
DEPLOYMENT_POSTPROCESSING = YES;
|
||||||
GCC_WARN_ABOUT_MISSING_PROTOTYPES = NO;
|
GCC_WARN_ABOUT_MISSING_PROTOTYPES = NO;
|
||||||
HEADER_SEARCH_PATHS = (
|
HEADER_SEARCH_PATHS = (
|
||||||
@ -5200,17 +5186,12 @@
|
|||||||
);
|
);
|
||||||
PRODUCT_NAME = SPIRVTools;
|
PRODUCT_NAME = SPIRVTools;
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Debug;
|
name = Debug;
|
||||||
};
|
};
|
||||||
A972A80E21CECBBF0013AB25 /* Release */ = {
|
A972A80E21CECBBF0013AB25 /* Release */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ARCHS = arm64;
|
|
||||||
BITCODE_GENERATION_MODE = bitcode;
|
|
||||||
CODE_SIGN_IDENTITY = "iPhone Developer";
|
|
||||||
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
|
|
||||||
DEPLOYMENT_POSTPROCESSING = YES;
|
DEPLOYMENT_POSTPROCESSING = YES;
|
||||||
GCC_WARN_ABOUT_MISSING_PROTOTYPES = NO;
|
GCC_WARN_ABOUT_MISSING_PROTOTYPES = NO;
|
||||||
HEADER_SEARCH_PATHS = (
|
HEADER_SEARCH_PATHS = (
|
||||||
@ -5222,7 +5203,6 @@
|
|||||||
);
|
);
|
||||||
PRODUCT_NAME = SPIRVTools;
|
PRODUCT_NAME = SPIRVTools;
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Release;
|
name = Release;
|
||||||
};
|
};
|
||||||
@ -5263,11 +5243,7 @@
|
|||||||
A972ABDA21CED7BC0013AB25 /* Debug */ = {
|
A972ABDA21CED7BC0013AB25 /* Debug */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ARCHS = arm64;
|
|
||||||
BITCODE_GENERATION_MODE = bitcode;
|
|
||||||
CLANG_WARN_SUSPICIOUS_IMPLICIT_CONVERSION = NO;
|
CLANG_WARN_SUSPICIOUS_IMPLICIT_CONVERSION = NO;
|
||||||
CODE_SIGN_IDENTITY = "iPhone Developer";
|
|
||||||
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
|
|
||||||
GCC_PREPROCESSOR_DEFINITIONS = (
|
GCC_PREPROCESSOR_DEFINITIONS = (
|
||||||
"$(inherited)",
|
"$(inherited)",
|
||||||
"AMD_EXTENSIONS=1",
|
"AMD_EXTENSIONS=1",
|
||||||
@ -5283,18 +5259,13 @@
|
|||||||
);
|
);
|
||||||
PRODUCT_NAME = glslang;
|
PRODUCT_NAME = glslang;
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Debug;
|
name = Debug;
|
||||||
};
|
};
|
||||||
A972ABDB21CED7BC0013AB25 /* Release */ = {
|
A972ABDB21CED7BC0013AB25 /* Release */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ARCHS = arm64;
|
|
||||||
BITCODE_GENERATION_MODE = bitcode;
|
|
||||||
CLANG_WARN_SUSPICIOUS_IMPLICIT_CONVERSION = NO;
|
CLANG_WARN_SUSPICIOUS_IMPLICIT_CONVERSION = NO;
|
||||||
CODE_SIGN_IDENTITY = "iPhone Developer";
|
|
||||||
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
|
|
||||||
GCC_PREPROCESSOR_DEFINITIONS = (
|
GCC_PREPROCESSOR_DEFINITIONS = (
|
||||||
"$(inherited)",
|
"$(inherited)",
|
||||||
"AMD_EXTENSIONS=1",
|
"AMD_EXTENSIONS=1",
|
||||||
@ -5310,7 +5281,6 @@
|
|||||||
);
|
);
|
||||||
PRODUCT_NAME = glslang;
|
PRODUCT_NAME = glslang;
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Release;
|
name = Release;
|
||||||
};
|
};
|
||||||
@ -5362,6 +5332,7 @@
|
|||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ALWAYS_SEARCH_USER_PATHS = NO;
|
ALWAYS_SEARCH_USER_PATHS = NO;
|
||||||
|
ARCHS = "$(ARCHS_STANDARD_64_BIT)";
|
||||||
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
||||||
CLANG_CXX_LIBRARY = "libc++";
|
CLANG_CXX_LIBRARY = "libc++";
|
||||||
CLANG_ENABLE_MODULES = NO;
|
CLANG_ENABLE_MODULES = NO;
|
||||||
@ -5410,6 +5381,7 @@
|
|||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ALWAYS_SEARCH_USER_PATHS = NO;
|
ALWAYS_SEARCH_USER_PATHS = NO;
|
||||||
|
ARCHS = "$(ARCHS_STANDARD_64_BIT)";
|
||||||
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
||||||
CLANG_CXX_LIBRARY = "libc++";
|
CLANG_CXX_LIBRARY = "libc++";
|
||||||
CLANG_ENABLE_MODULES = NO;
|
CLANG_ENABLE_MODULES = NO;
|
||||||
|
@ -1525,24 +1525,18 @@
|
|||||||
A9B8EE1E1A98D796009C5A02 /* Debug */ = {
|
A9B8EE1E1A98D796009C5A02 /* Debug */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ARCHS = arm64;
|
|
||||||
BITCODE_GENERATION_MODE = bitcode;
|
|
||||||
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
|
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
TARGETED_DEVICE_FAMILY = "1,2";
|
TARGETED_DEVICE_FAMILY = "1,2";
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Debug;
|
name = Debug;
|
||||||
};
|
};
|
||||||
A9B8EE1F1A98D796009C5A02 /* Release */ = {
|
A9B8EE1F1A98D796009C5A02 /* Release */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ARCHS = arm64;
|
|
||||||
BITCODE_GENERATION_MODE = bitcode;
|
|
||||||
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
|
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
TARGETED_DEVICE_FAMILY = "1,2";
|
TARGETED_DEVICE_FAMILY = "1,2";
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Release;
|
name = Release;
|
||||||
};
|
};
|
||||||
@ -1551,7 +1545,6 @@
|
|||||||
buildSettings = {
|
buildSettings = {
|
||||||
MACOSX_DEPLOYMENT_TARGET = 10.11;
|
MACOSX_DEPLOYMENT_TARGET = 10.11;
|
||||||
SDKROOT = macosx;
|
SDKROOT = macosx;
|
||||||
VALID_ARCHS = x86_64;
|
|
||||||
};
|
};
|
||||||
name = Debug;
|
name = Debug;
|
||||||
};
|
};
|
||||||
@ -1560,7 +1553,6 @@
|
|||||||
buildSettings = {
|
buildSettings = {
|
||||||
MACOSX_DEPLOYMENT_TARGET = 10.11;
|
MACOSX_DEPLOYMENT_TARGET = 10.11;
|
||||||
SDKROOT = macosx;
|
SDKROOT = macosx;
|
||||||
VALID_ARCHS = x86_64;
|
|
||||||
};
|
};
|
||||||
name = Release;
|
name = Release;
|
||||||
};
|
};
|
||||||
@ -1568,6 +1560,7 @@
|
|||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ALWAYS_SEARCH_USER_PATHS = NO;
|
ALWAYS_SEARCH_USER_PATHS = NO;
|
||||||
|
ARCHS = "$(ARCHS_STANDARD_64_BIT)";
|
||||||
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
||||||
CLANG_CXX_LIBRARY = "libc++";
|
CLANG_CXX_LIBRARY = "libc++";
|
||||||
CLANG_ENABLE_MODULES = NO;
|
CLANG_ENABLE_MODULES = NO;
|
||||||
@ -1628,6 +1621,7 @@
|
|||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ALWAYS_SEARCH_USER_PATHS = NO;
|
ALWAYS_SEARCH_USER_PATHS = NO;
|
||||||
|
ARCHS = "$(ARCHS_STANDARD_64_BIT)";
|
||||||
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
||||||
CLANG_CXX_LIBRARY = "libc++";
|
CLANG_CXX_LIBRARY = "libc++";
|
||||||
CLANG_ENABLE_MODULES = NO;
|
CLANG_ENABLE_MODULES = NO;
|
||||||
|
@ -50,7 +50,7 @@ typedef unsigned long MTLLanguageVersion;
|
|||||||
*/
|
*/
|
||||||
#define MVK_VERSION_MAJOR 1
|
#define MVK_VERSION_MAJOR 1
|
||||||
#define MVK_VERSION_MINOR 0
|
#define MVK_VERSION_MINOR 0
|
||||||
#define MVK_VERSION_PATCH 43
|
#define MVK_VERSION_PATCH 44
|
||||||
|
|
||||||
#define MVK_MAKE_VERSION(major, minor, patch) (((major) * 10000) + ((minor) * 100) + (patch))
|
#define MVK_MAKE_VERSION(major, minor, patch) (((major) * 10000) + ((minor) * 100) + (patch))
|
||||||
#define MVK_VERSION MVK_MAKE_VERSION(MVK_VERSION_MAJOR, MVK_VERSION_MINOR, MVK_VERSION_PATCH)
|
#define MVK_VERSION MVK_MAKE_VERSION(MVK_VERSION_MAJOR, MVK_VERSION_MINOR, MVK_VERSION_PATCH)
|
||||||
|
@ -90,7 +90,9 @@ void MVKCmdPipelineBarrier<N>::encode(MVKCommandEncoder* cmdEncoder) {
|
|||||||
break;
|
break;
|
||||||
|
|
||||||
case MVKPipelineBarrier::Image:
|
case MVKPipelineBarrier::Image:
|
||||||
resources[rezCnt++] = b.mvkImage->getMTLTexture();
|
for (uint8_t planeIndex = 0; planeIndex < b.mvkImage->getPlaneCount(); planeIndex++) {
|
||||||
|
resources[rezCnt++] = b.mvkImage->getMTLTexture(planeIndex);
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
|
@ -103,7 +103,7 @@ public:
|
|||||||
|
|
||||||
protected:
|
protected:
|
||||||
MVKCommandTypePool<MVKCommand>* getTypePool(MVKCommandPool* cmdPool) override;
|
MVKCommandTypePool<MVKCommand>* getTypePool(MVKCommandPool* cmdPool) override;
|
||||||
bool canCopyFormats();
|
bool canCopyFormats(const VkImageBlit& region);
|
||||||
bool canCopy(const VkImageBlit& region);
|
bool canCopy(const VkImageBlit& region);
|
||||||
void populateVertices(MVKVertexPosTex* vertices, const VkImageBlit& region);
|
void populateVertices(MVKVertexPosTex* vertices, const VkImageBlit& region);
|
||||||
|
|
||||||
@ -125,6 +125,7 @@ typedef MVKCmdBlitImage<4> MVKCmdBlitImageMulti;
|
|||||||
|
|
||||||
/** Describes Metal texture resolve parameters. */
|
/** Describes Metal texture resolve parameters. */
|
||||||
typedef struct {
|
typedef struct {
|
||||||
|
VkImageCopy* copyRegion;
|
||||||
uint32_t level;
|
uint32_t level;
|
||||||
uint32_t slice;
|
uint32_t slice;
|
||||||
} MVKMetalResolveSlice;
|
} MVKMetalResolveSlice;
|
||||||
|
@ -62,16 +62,22 @@ VkResult MVKCmdCopyImage<N>::setContent(MVKCommandBuffer* cmdBuff,
|
|||||||
_dstLayout = dstImageLayout;
|
_dstLayout = dstImageLayout;
|
||||||
|
|
||||||
_vkImageCopies.clear(); // Clear for reuse
|
_vkImageCopies.clear(); // Clear for reuse
|
||||||
for (uint32_t i = 0; i < regionCount; i++) {
|
for (uint32_t regionIdx = 0; regionIdx < regionCount; regionIdx++) {
|
||||||
_vkImageCopies.push_back(pRegions[i]);
|
auto& vkIR = pRegions[regionIdx];
|
||||||
}
|
uint8_t srcPlaneIndex = MVKImage::getPlaneFromVkImageAspectFlags(vkIR.srcSubresource.aspectMask);
|
||||||
|
uint8_t dstPlaneIndex = MVKImage::getPlaneFromVkImageAspectFlags(vkIR.dstSubresource.aspectMask);
|
||||||
|
|
||||||
// Validate
|
// Validate
|
||||||
MVKPixelFormats* pixFmts = cmdBuff->getPixelFormats();
|
MVKPixelFormats* pixFmts = cmdBuff->getPixelFormats();
|
||||||
if ((_dstImage->getSampleCount() != _srcImage->getSampleCount()) ||
|
if ((_dstImage->getSampleCount() != _srcImage->getSampleCount()) ||
|
||||||
(pixFmts->getBytesPerBlock(_dstImage->getMTLPixelFormat()) != pixFmts->getBytesPerBlock(_srcImage->getMTLPixelFormat()))) {
|
(pixFmts->getBytesPerBlock(_dstImage->getMTLPixelFormat(dstPlaneIndex)) != pixFmts->getBytesPerBlock(_srcImage->getMTLPixelFormat(srcPlaneIndex)))) {
|
||||||
return reportError(VK_ERROR_FEATURE_NOT_PRESENT, "vkCmdCopyImage(): Cannot copy between incompatible formats, such as formats of different pixel sizes, or between images with different sample counts.");
|
return reportError(VK_ERROR_FEATURE_NOT_PRESENT, "vkCmdCopyImage(): Cannot copy between incompatible formats, such as formats of different pixel sizes, or between images with different sample counts.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_vkImageCopies.push_back(vkIR);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate
|
||||||
if ((_srcImage->getMTLTextureType() == MTLTextureType3D) != (_dstImage->getMTLTextureType() == MTLTextureType3D)) {
|
if ((_srcImage->getMTLTextureType() == MTLTextureType3D) != (_dstImage->getMTLTextureType() == MTLTextureType3D)) {
|
||||||
return reportError(VK_ERROR_FEATURE_NOT_PRESENT, "vkCmdCopyImage(): Metal does not support copying to or from slices of a 3D texture.");
|
return reportError(VK_ERROR_FEATURE_NOT_PRESENT, "vkCmdCopyImage(): Metal does not support copying to or from slices of a 3D texture.");
|
||||||
}
|
}
|
||||||
@ -81,24 +87,28 @@ VkResult MVKCmdCopyImage<N>::setContent(MVKCommandBuffer* cmdBuff,
|
|||||||
|
|
||||||
template <size_t N>
|
template <size_t N>
|
||||||
void MVKCmdCopyImage<N>::encode(MVKCommandEncoder* cmdEncoder, MVKCommandUse commandUse) {
|
void MVKCmdCopyImage<N>::encode(MVKCommandEncoder* cmdEncoder, MVKCommandUse commandUse) {
|
||||||
|
|
||||||
MTLPixelFormat srcMTLPixFmt = _srcImage->getMTLPixelFormat();
|
|
||||||
bool isSrcCompressed = _srcImage->getIsCompressed();
|
|
||||||
|
|
||||||
MTLPixelFormat dstMTLPixFmt = _dstImage->getMTLPixelFormat();
|
|
||||||
bool isDstCompressed = _dstImage->getIsCompressed();
|
|
||||||
|
|
||||||
// If source and destination have different formats and at least one is compressed, use a temporary intermediary buffer
|
|
||||||
bool useTempBuffer = (srcMTLPixFmt != dstMTLPixFmt) && (isSrcCompressed || isDstCompressed);
|
|
||||||
if (useTempBuffer) {
|
|
||||||
MVKPixelFormats* pixFmts = cmdEncoder->getPixelFormats();
|
MVKPixelFormats* pixFmts = cmdEncoder->getPixelFormats();
|
||||||
uint32_t copyCnt = (uint32_t)_vkImageCopies.size();
|
uint32_t copyCnt = (uint32_t)_vkImageCopies.size();
|
||||||
VkBufferImageCopy vkSrcCopies[copyCnt];
|
VkBufferImageCopy vkSrcCopies[copyCnt];
|
||||||
VkBufferImageCopy vkDstCopies[copyCnt];
|
VkBufferImageCopy vkDstCopies[copyCnt];
|
||||||
size_t tmpBuffSize = 0;
|
size_t tmpBuffSize = 0;
|
||||||
|
|
||||||
for (uint32_t copyIdx = 0; copyIdx < copyCnt; copyIdx++) {
|
for (uint32_t copyIdx = 0; copyIdx < copyCnt; copyIdx++) {
|
||||||
auto& vkIC = _vkImageCopies[copyIdx];
|
auto& vkIC = _vkImageCopies[copyIdx];
|
||||||
|
|
||||||
|
uint8_t srcPlaneIndex = MVKImage::getPlaneFromVkImageAspectFlags(vkIC.srcSubresource.aspectMask);
|
||||||
|
uint8_t dstPlaneIndex = MVKImage::getPlaneFromVkImageAspectFlags(vkIC.dstSubresource.aspectMask);
|
||||||
|
|
||||||
|
MTLPixelFormat srcMTLPixFmt = _srcImage->getMTLPixelFormat(srcPlaneIndex);
|
||||||
|
bool isSrcCompressed = _srcImage->getIsCompressed();
|
||||||
|
|
||||||
|
MTLPixelFormat dstMTLPixFmt = _dstImage->getMTLPixelFormat(dstPlaneIndex);
|
||||||
|
bool isDstCompressed = _dstImage->getIsCompressed();
|
||||||
|
|
||||||
|
// If source and destination have different formats and at least one is compressed, use a temporary intermediary buffer
|
||||||
|
bool useTempBuffer = (srcMTLPixFmt != dstMTLPixFmt) && (isSrcCompressed || isDstCompressed);
|
||||||
|
|
||||||
|
if (useTempBuffer) {
|
||||||
// Add copy from source image to temp buffer.
|
// Add copy from source image to temp buffer.
|
||||||
auto& srcCpy = vkSrcCopies[copyIdx];
|
auto& srcCpy = vkSrcCopies[copyIdx];
|
||||||
srcCpy.bufferOffset = tmpBuffSize;
|
srcCpy.bufferOffset = tmpBuffSize;
|
||||||
@ -129,8 +139,42 @@ void MVKCmdCopyImage<N>::encode(MVKCommandEncoder* cmdEncoder, MVKCommandUse com
|
|||||||
size_t bytesPerRow = pixFmts->getBytesPerRow(srcMTLPixFmt, vkIC.extent.width);
|
size_t bytesPerRow = pixFmts->getBytesPerRow(srcMTLPixFmt, vkIC.extent.width);
|
||||||
size_t bytesPerRegion = pixFmts->getBytesPerLayer(srcMTLPixFmt, bytesPerRow, vkIC.extent.height);
|
size_t bytesPerRegion = pixFmts->getBytesPerLayer(srcMTLPixFmt, bytesPerRow, vkIC.extent.height);
|
||||||
tmpBuffSize += bytesPerRegion;
|
tmpBuffSize += bytesPerRegion;
|
||||||
|
} else {
|
||||||
|
// Map the source pixel format to the dest pixel format through a texture view on the source texture.
|
||||||
|
// If the source and dest pixel formats are the same, this will simply degenerate to the source texture itself.
|
||||||
|
id<MTLTexture> srcMTLTex = _srcImage->getMTLTexture(srcPlaneIndex, _dstImage->getMTLPixelFormat(dstPlaneIndex));
|
||||||
|
id<MTLTexture> dstMTLTex = _dstImage->getMTLTexture(dstPlaneIndex);
|
||||||
|
if ( !srcMTLTex || !dstMTLTex ) { return; }
|
||||||
|
|
||||||
|
id<MTLBlitCommandEncoder> mtlBlitEnc = cmdEncoder->getMTLBlitEncoder(commandUse);
|
||||||
|
|
||||||
|
// If copies can be performed using direct texture-texture copying, do so
|
||||||
|
uint32_t srcLevel = vkIC.srcSubresource.mipLevel;
|
||||||
|
MTLOrigin srcOrigin = mvkMTLOriginFromVkOffset3D(vkIC.srcOffset);
|
||||||
|
MTLSize srcSize = mvkClampMTLSize(mvkMTLSizeFromVkExtent3D(vkIC.extent),
|
||||||
|
srcOrigin,
|
||||||
|
mvkMTLSizeFromVkExtent3D(_srcImage->getExtent3D(srcPlaneIndex, srcLevel)));
|
||||||
|
uint32_t dstLevel = vkIC.dstSubresource.mipLevel;
|
||||||
|
MTLOrigin dstOrigin = mvkMTLOriginFromVkOffset3D(vkIC.dstOffset);
|
||||||
|
uint32_t srcBaseLayer = vkIC.srcSubresource.baseArrayLayer;
|
||||||
|
uint32_t dstBaseLayer = vkIC.dstSubresource.baseArrayLayer;
|
||||||
|
uint32_t layCnt = vkIC.srcSubresource.layerCount;
|
||||||
|
|
||||||
|
for (uint32_t layIdx = 0; layIdx < layCnt; layIdx++) {
|
||||||
|
[mtlBlitEnc copyFromTexture: srcMTLTex
|
||||||
|
sourceSlice: srcBaseLayer + layIdx
|
||||||
|
sourceLevel: srcLevel
|
||||||
|
sourceOrigin: srcOrigin
|
||||||
|
sourceSize: srcSize
|
||||||
|
toTexture: dstMTLTex
|
||||||
|
destinationSlice: dstBaseLayer + layIdx
|
||||||
|
destinationLevel: dstLevel
|
||||||
|
destinationOrigin: dstOrigin];
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (tmpBuffSize > 0) {
|
||||||
MVKBufferDescriptorData tempBuffData;
|
MVKBufferDescriptorData tempBuffData;
|
||||||
tempBuffData.size = tmpBuffSize;
|
tempBuffData.size = tmpBuffSize;
|
||||||
tempBuffData.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
|
tempBuffData.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
|
||||||
@ -149,41 +193,6 @@ void MVKCmdCopyImage<N>::encode(MVKCommandEncoder* cmdEncoder, MVKCommandUse com
|
|||||||
// To be threadsafe...do NOT acquire and return the command from the pool.
|
// To be threadsafe...do NOT acquire and return the command from the pool.
|
||||||
cpyCmd.setContent(cmdEncoder->_cmdBuffer, tempBuff, (VkImage)_dstImage, _dstLayout, copyCnt, vkDstCopies, true);
|
cpyCmd.setContent(cmdEncoder->_cmdBuffer, tempBuff, (VkImage)_dstImage, _dstLayout, copyCnt, vkDstCopies, true);
|
||||||
cpyCmd.encode(cmdEncoder);
|
cpyCmd.encode(cmdEncoder);
|
||||||
|
|
||||||
} else {
|
|
||||||
// Map the source pixel format to the dest pixel format through a texture view on the source texture.
|
|
||||||
// If the source and dest pixel formats are the same, this will simply degenerate to the source texture itself.
|
|
||||||
id<MTLTexture> srcMTLTex = _srcImage->getMTLTexture(_dstImage->getMTLPixelFormat());
|
|
||||||
id<MTLTexture> dstMTLTex = _dstImage->getMTLTexture();
|
|
||||||
if ( !srcMTLTex || !dstMTLTex ) { return; }
|
|
||||||
|
|
||||||
id<MTLBlitCommandEncoder> mtlBlitEnc = cmdEncoder->getMTLBlitEncoder(commandUse);
|
|
||||||
|
|
||||||
// If copies can be performed using direct texture-texture copying, do so
|
|
||||||
for (auto& cpyRgn : _vkImageCopies) {
|
|
||||||
uint32_t srcLevel = cpyRgn.srcSubresource.mipLevel;
|
|
||||||
MTLOrigin srcOrigin = mvkMTLOriginFromVkOffset3D(cpyRgn.srcOffset);
|
|
||||||
MTLSize srcSize = mvkClampMTLSize(mvkMTLSizeFromVkExtent3D(cpyRgn.extent),
|
|
||||||
srcOrigin,
|
|
||||||
mvkMTLSizeFromVkExtent3D(_srcImage->getExtent3D(srcLevel)));
|
|
||||||
uint32_t dstLevel = cpyRgn.dstSubresource.mipLevel;
|
|
||||||
MTLOrigin dstOrigin = mvkMTLOriginFromVkOffset3D(cpyRgn.dstOffset);
|
|
||||||
uint32_t srcBaseLayer = cpyRgn.srcSubresource.baseArrayLayer;
|
|
||||||
uint32_t dstBaseLayer = cpyRgn.dstSubresource.baseArrayLayer;
|
|
||||||
uint32_t layCnt = cpyRgn.srcSubresource.layerCount;
|
|
||||||
|
|
||||||
for (uint32_t layIdx = 0; layIdx < layCnt; layIdx++) {
|
|
||||||
[mtlBlitEnc copyFromTexture: srcMTLTex
|
|
||||||
sourceSlice: srcBaseLayer + layIdx
|
|
||||||
sourceLevel: srcLevel
|
|
||||||
sourceOrigin: srcOrigin
|
|
||||||
sourceSize: srcSize
|
|
||||||
toTexture: dstMTLTex
|
|
||||||
destinationSlice: dstBaseLayer + layIdx
|
|
||||||
destinationLevel: dstLevel
|
|
||||||
destinationOrigin: dstOrigin];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -208,33 +217,37 @@ VkResult MVKCmdBlitImage<N>::setContent(MVKCommandBuffer* cmdBuff,
|
|||||||
|
|
||||||
_srcImage = (MVKImage*)srcImage;
|
_srcImage = (MVKImage*)srcImage;
|
||||||
_srcLayout = srcImageLayout;
|
_srcLayout = srcImageLayout;
|
||||||
|
|
||||||
_dstImage = (MVKImage*)dstImage;
|
_dstImage = (MVKImage*)dstImage;
|
||||||
_dstLayout = dstImageLayout;
|
_dstLayout = dstImageLayout;
|
||||||
|
|
||||||
_filter = filter;
|
_filter = filter;
|
||||||
|
|
||||||
_vkImageBlits.clear(); // Clear for reuse
|
_vkImageBlits.clear(); // Clear for reuse
|
||||||
for (uint32_t i = 0; i < regionCount; i++) {
|
for (uint32_t regionIdx = 0; regionIdx < regionCount; regionIdx++) {
|
||||||
_vkImageBlits.push_back(pRegions[i]);
|
auto& vkIR = pRegions[regionIdx];
|
||||||
}
|
uint8_t srcPlaneIndex = MVKImage::getPlaneFromVkImageAspectFlags(vkIR.srcSubresource.aspectMask);
|
||||||
|
|
||||||
// Validate - depth stencil formats cannot be scaled or inverted
|
// Validate - depth stencil formats cannot be scaled or inverted
|
||||||
MTLPixelFormat srcMTLPixFmt = _srcImage->getMTLPixelFormat();
|
MTLPixelFormat srcMTLPixFmt = _srcImage->getMTLPixelFormat(srcPlaneIndex);
|
||||||
if (pixFmts->isDepthFormat(srcMTLPixFmt) || pixFmts->isStencilFormat(srcMTLPixFmt)) {
|
if (pixFmts->isDepthFormat(srcMTLPixFmt) || pixFmts->isStencilFormat(srcMTLPixFmt)) {
|
||||||
bool canCopyFmts = canCopyFormats();
|
|
||||||
for (auto& vkIB : _vkImageBlits) {
|
for (auto& vkIB : _vkImageBlits) {
|
||||||
if ( !(canCopyFmts && canCopy(vkIB)) ) {
|
if ( !(canCopyFormats(vkIB) && canCopy(vkIB)) ) {
|
||||||
return reportError(VK_ERROR_FEATURE_NOT_PRESENT, "vkCmdBlitImage(): Scaling or inverting depth/stencil images is not supported.");
|
return reportError(VK_ERROR_FEATURE_NOT_PRESENT, "vkCmdBlitImage(): Scaling or inverting depth/stencil images is not supported.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_vkImageBlits.push_back(vkIR);
|
||||||
|
}
|
||||||
|
|
||||||
return VK_SUCCESS;
|
return VK_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <size_t N>
|
template <size_t N>
|
||||||
bool MVKCmdBlitImage<N>::canCopyFormats() {
|
bool MVKCmdBlitImage<N>::canCopyFormats(const VkImageBlit& region) {
|
||||||
return ((_srcImage->getMTLPixelFormat() == _dstImage->getMTLPixelFormat()) &&
|
uint8_t srcPlaneIndex = MVKImage::getPlaneFromVkImageAspectFlags(region.srcSubresource.aspectMask);
|
||||||
|
uint8_t dstPlaneIndex = MVKImage::getPlaneFromVkImageAspectFlags(region.dstSubresource.aspectMask);
|
||||||
|
return ((_srcImage->getMTLPixelFormat(srcPlaneIndex) == _dstImage->getMTLPixelFormat(dstPlaneIndex)) &&
|
||||||
(_dstImage->getSampleCount() == _srcImage->getSampleCount()));
|
(_dstImage->getSampleCount() == _srcImage->getSampleCount()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -255,8 +268,10 @@ void MVKCmdBlitImage<N>::populateVertices(MVKVertexPosTex* vertices, const VkIma
|
|||||||
const VkOffset3D& do1 = region.dstOffsets[1];
|
const VkOffset3D& do1 = region.dstOffsets[1];
|
||||||
|
|
||||||
// Get the extents of the source and destination textures.
|
// Get the extents of the source and destination textures.
|
||||||
VkExtent3D srcExtent = _srcImage->getExtent3D(region.srcSubresource.mipLevel);
|
uint8_t srcPlaneIndex = MVKImage::getPlaneFromVkImageAspectFlags(region.srcSubresource.aspectMask);
|
||||||
VkExtent3D dstExtent = _dstImage->getExtent3D(region.dstSubresource.mipLevel);
|
uint8_t dstPlaneIndex = MVKImage::getPlaneFromVkImageAspectFlags(region.dstSubresource.aspectMask);
|
||||||
|
VkExtent3D srcExtent = _srcImage->getExtent3D(srcPlaneIndex, region.srcSubresource.mipLevel);
|
||||||
|
VkExtent3D dstExtent = _dstImage->getExtent3D(dstPlaneIndex, region.dstSubresource.mipLevel);
|
||||||
|
|
||||||
// Determine the bottom-left and top-right corners of the source and destination
|
// Determine the bottom-left and top-right corners of the source and destination
|
||||||
// texture regions, each as a fraction of the corresponding texture size.
|
// texture regions, each as a fraction of the corresponding texture size.
|
||||||
@ -317,9 +332,8 @@ void MVKCmdBlitImage<N>::encode(MVKCommandEncoder* cmdEncoder, MVKCommandUse com
|
|||||||
|
|
||||||
// Separate BLITs into those that are really just simple texure region copies,
|
// Separate BLITs into those that are really just simple texure region copies,
|
||||||
// and those that require rendering
|
// and those that require rendering
|
||||||
bool canCopyFmts = canCopyFormats();
|
|
||||||
for (auto& vkIB : _vkImageBlits) {
|
for (auto& vkIB : _vkImageBlits) {
|
||||||
if (canCopyFmts && canCopy(vkIB)) {
|
if (canCopyFormats(vkIB) && canCopy(vkIB)) {
|
||||||
|
|
||||||
const VkOffset3D& so0 = vkIB.srcOffsets[0];
|
const VkOffset3D& so0 = vkIB.srcOffsets[0];
|
||||||
const VkOffset3D& so1 = vkIB.srcOffsets[1];
|
const VkOffset3D& so1 = vkIB.srcOffsets[1];
|
||||||
@ -351,10 +365,15 @@ void MVKCmdBlitImage<N>::encode(MVKCommandEncoder* cmdEncoder, MVKCommandUse com
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Perform those BLITs that require rendering to destination texture.
|
// Perform those BLITs that require rendering to destination texture.
|
||||||
id<MTLTexture> srcMTLTex = _srcImage->getMTLTexture();
|
for (uint32_t blitIdx = 0; blitIdx < blitCnt; blitIdx++) {
|
||||||
id<MTLTexture> dstMTLTex = _dstImage->getMTLTexture();
|
auto& mvkIBR = mvkBlitRenders[blitIdx];
|
||||||
if (blitCnt && srcMTLTex && dstMTLTex) {
|
|
||||||
|
|
||||||
|
uint8_t srcPlaneIndex = MVKImage::getPlaneFromVkImageAspectFlags(mvkIBR.region.srcSubresource.aspectMask);
|
||||||
|
uint8_t dstPlaneIndex = MVKImage::getPlaneFromVkImageAspectFlags(mvkIBR.region.dstSubresource.aspectMask);
|
||||||
|
|
||||||
|
id<MTLTexture> srcMTLTex = _srcImage->getMTLTexture(srcPlaneIndex);
|
||||||
|
id<MTLTexture> dstMTLTex = _dstImage->getMTLTexture(dstPlaneIndex);
|
||||||
|
if (blitCnt && srcMTLTex && dstMTLTex) {
|
||||||
cmdEncoder->endCurrentMetalEncoding();
|
cmdEncoder->endCurrentMetalEncoding();
|
||||||
|
|
||||||
MTLRenderPassDescriptor* mtlRPD = [MTLRenderPassDescriptor renderPassDescriptor];
|
MTLRenderPassDescriptor* mtlRPD = [MTLRenderPassDescriptor renderPassDescriptor];
|
||||||
@ -364,18 +383,15 @@ void MVKCmdBlitImage<N>::encode(MVKCommandEncoder* cmdEncoder, MVKCommandUse com
|
|||||||
mtlColorAttDesc.texture = dstMTLTex;
|
mtlColorAttDesc.texture = dstMTLTex;
|
||||||
|
|
||||||
MVKRPSKeyBlitImg blitKey;
|
MVKRPSKeyBlitImg blitKey;
|
||||||
blitKey.srcMTLPixelFormat = _srcImage->getMTLPixelFormat();
|
blitKey.srcMTLPixelFormat = _srcImage->getMTLPixelFormat(srcPlaneIndex);
|
||||||
blitKey.srcMTLTextureType = _srcImage->getMTLTextureType();
|
blitKey.srcMTLTextureType = _srcImage->getMTLTextureType();
|
||||||
blitKey.dstMTLPixelFormat = _dstImage->getMTLPixelFormat();
|
blitKey.dstMTLPixelFormat = _dstImage->getMTLPixelFormat(dstPlaneIndex);
|
||||||
blitKey.srcFilter = mvkMTLSamplerMinMagFilterFromVkFilter(_filter);
|
blitKey.srcFilter = mvkMTLSamplerMinMagFilterFromVkFilter(_filter);
|
||||||
blitKey.dstSampleCount = mvkSampleCountFromVkSampleCountFlagBits(_dstImage->getSampleCount());
|
blitKey.dstSampleCount = mvkSampleCountFromVkSampleCountFlagBits(_dstImage->getSampleCount());
|
||||||
id<MTLRenderPipelineState> mtlRPS = cmdEncoder->getCommandEncodingPool()->getCmdBlitImageMTLRenderPipelineState(blitKey);
|
id<MTLRenderPipelineState> mtlRPS = cmdEncoder->getCommandEncodingPool()->getCmdBlitImageMTLRenderPipelineState(blitKey);
|
||||||
|
|
||||||
uint32_t vtxBuffIdx = cmdEncoder->getDevice()->getMetalBufferIndexForVertexAttributeBinding(kMVKVertexContentBufferIndex);
|
uint32_t vtxBuffIdx = cmdEncoder->getDevice()->getMetalBufferIndexForVertexAttributeBinding(kMVKVertexContentBufferIndex);
|
||||||
|
|
||||||
for (uint32_t blitIdx = 0; blitIdx < blitCnt; blitIdx++) {
|
|
||||||
auto& mvkIBR = mvkBlitRenders[blitIdx];
|
|
||||||
|
|
||||||
mtlColorAttDesc.level = mvkIBR.region.dstSubresource.mipLevel;
|
mtlColorAttDesc.level = mvkIBR.region.dstSubresource.mipLevel;
|
||||||
|
|
||||||
uint32_t layCnt = mvkIBR.region.srcSubresource.layerCount;
|
uint32_t layCnt = mvkIBR.region.srcSubresource.layerCount;
|
||||||
@ -428,16 +444,19 @@ VkResult MVKCmdResolveImage<N>::setContent(MVKCommandBuffer* cmdBuff,
|
|||||||
|
|
||||||
_vkImageResolves.clear(); // Clear for reuse
|
_vkImageResolves.clear(); // Clear for reuse
|
||||||
_vkImageResolves.reserve(regionCount);
|
_vkImageResolves.reserve(regionCount);
|
||||||
for (uint32_t i = 0; i < regionCount; i++) {
|
for (uint32_t regionIdx = 0; regionIdx < regionCount; regionIdx++) {
|
||||||
_vkImageResolves.push_back(pRegions[i]);
|
auto& vkIR = pRegions[regionIdx];
|
||||||
}
|
uint8_t dstPlaneIndex = MVKImage::getPlaneFromVkImageAspectFlags(vkIR.dstSubresource.aspectMask);
|
||||||
|
|
||||||
// Validate
|
// Validate
|
||||||
MVKPixelFormats* pixFmts = cmdBuff->getPixelFormats();
|
MVKPixelFormats* pixFmts = cmdBuff->getPixelFormats();
|
||||||
if ( !mvkAreAllFlagsEnabled(pixFmts->getCapabilities(_dstImage->getMTLPixelFormat()), kMVKMTLFmtCapsResolve) ) {
|
if ( !mvkAreAllFlagsEnabled(pixFmts->getCapabilities(_dstImage->getMTLPixelFormat(dstPlaneIndex)), kMVKMTLFmtCapsResolve) ) {
|
||||||
return reportError(VK_ERROR_FEATURE_NOT_PRESENT, "vkCmdResolveImage(): %s cannot be used as a resolve destination on this device.", pixFmts->getName(_dstImage->getVkFormat()));
|
return reportError(VK_ERROR_FEATURE_NOT_PRESENT, "vkCmdResolveImage(): %s cannot be used as a resolve destination on this device.", pixFmts->getName(_dstImage->getVkFormat()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_vkImageResolves.push_back(vkIR);
|
||||||
|
}
|
||||||
|
|
||||||
return VK_SUCCESS;
|
return VK_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -457,10 +476,12 @@ void MVKCmdResolveImage<N>::encode(MVKCommandEncoder* cmdEncoder) {
|
|||||||
uint32_t sliceCnt = 0;
|
uint32_t sliceCnt = 0;
|
||||||
|
|
||||||
for (VkImageResolve& vkIR : _vkImageResolves) {
|
for (VkImageResolve& vkIR : _vkImageResolves) {
|
||||||
|
uint8_t srcPlaneIndex = MVKImage::getPlaneFromVkImageAspectFlags(vkIR.srcSubresource.aspectMask);
|
||||||
|
uint8_t dstPlaneIndex = MVKImage::getPlaneFromVkImageAspectFlags(vkIR.dstSubresource.aspectMask);
|
||||||
|
|
||||||
uint32_t mipLvl = vkIR.dstSubresource.mipLevel;
|
uint32_t mipLvl = vkIR.dstSubresource.mipLevel;
|
||||||
VkExtent3D srcImgExt = _srcImage->getExtent3D(mipLvl);
|
VkExtent3D srcImgExt = _srcImage->getExtent3D(srcPlaneIndex, mipLvl);
|
||||||
VkExtent3D dstImgExt = _dstImage->getExtent3D(mipLvl);
|
VkExtent3D dstImgExt = _dstImage->getExtent3D(dstPlaneIndex, mipLvl);
|
||||||
|
|
||||||
// If the region does not cover the entire content of the source level, expand the
|
// If the region does not cover the entire content of the source level, expand the
|
||||||
// destination content in the region to the temporary image. The purpose of this
|
// destination content in the region to the temporary image. The purpose of this
|
||||||
@ -496,25 +517,23 @@ void MVKCmdResolveImage<N>::encode(MVKCommandEncoder* cmdEncoder) {
|
|||||||
uint32_t layCnt = vkIR.dstSubresource.layerCount;
|
uint32_t layCnt = vkIR.dstSubresource.layerCount;
|
||||||
for (uint32_t layIdx = 0; layIdx < layCnt; layIdx++) {
|
for (uint32_t layIdx = 0; layIdx < layCnt; layIdx++) {
|
||||||
MVKMetalResolveSlice& rslvSlice = mtlResolveSlices[sliceCnt++];
|
MVKMetalResolveSlice& rslvSlice = mtlResolveSlices[sliceCnt++];
|
||||||
|
rslvSlice.copyRegion = &cpyRgn;
|
||||||
rslvSlice.level = vkIR.dstSubresource.mipLevel;
|
rslvSlice.level = vkIR.dstSubresource.mipLevel;
|
||||||
rslvSlice.slice = baseLayer + layIdx;
|
rslvSlice.slice = baseLayer + layIdx;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
id<MTLTexture> srcMTLTex;
|
|
||||||
if (expCnt == 0) {
|
|
||||||
// Expansion and copying is not required. Each mip level of the source image
|
// Expansion and copying is not required. Each mip level of the source image
|
||||||
// is being resolved entirely. Resolve directly from the source image.
|
// is being resolved entirely. Resolve directly from the source image.
|
||||||
srcMTLTex = _srcImage->getMTLTexture();
|
MVKImage* xfrImage = _srcImage;
|
||||||
|
if (expCnt) {
|
||||||
} else {
|
|
||||||
// Expansion and copying is required. Acquire a temporary transfer image, expand
|
// Expansion and copying is required. Acquire a temporary transfer image, expand
|
||||||
// the destination image into it, copy from the source image to the temporary image,
|
// the destination image into it, copy from the source image to the temporary image,
|
||||||
// and then resolve from the temporary image to the destination image.
|
// and then resolve from the temporary image to the destination image.
|
||||||
MVKImageDescriptorData xferImageData;
|
MVKImageDescriptorData xferImageData;
|
||||||
_dstImage->getTransferDescriptorData(xferImageData);
|
_dstImage->getTransferDescriptorData(xferImageData);
|
||||||
xferImageData.samples = _srcImage->getSampleCount();
|
xferImageData.samples = _srcImage->getSampleCount();
|
||||||
MVKImage* xfrImage = cmdEncoder->getCommandEncodingPool()->getTransferMVKImage(xferImageData);
|
xfrImage = cmdEncoder->getCommandEncodingPool()->getTransferMVKImage(xferImageData);
|
||||||
|
|
||||||
// Expand the current content of the destination image to the temporary transfer image.
|
// Expand the current content of the destination image to the temporary transfer image.
|
||||||
MVKCmdBlitImage<N> expCmd;
|
MVKCmdBlitImage<N> expCmd;
|
||||||
@ -530,8 +549,6 @@ void MVKCmdResolveImage<N>::encode(MVKCommandEncoder* cmdEncoder) {
|
|||||||
(VkImage)xfrImage, _dstLayout,
|
(VkImage)xfrImage, _dstLayout,
|
||||||
copyCnt, copyRegions);
|
copyCnt, copyRegions);
|
||||||
copyCmd.encode(cmdEncoder, kMVKCommandUseResolveCopyImage);
|
copyCmd.encode(cmdEncoder, kMVKCommandUseResolveCopyImage);
|
||||||
|
|
||||||
srcMTLTex = xfrImage->getMTLTexture();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
cmdEncoder->endCurrentMetalEncoding();
|
cmdEncoder->endCurrentMetalEncoding();
|
||||||
@ -540,13 +557,16 @@ void MVKCmdResolveImage<N>::encode(MVKCommandEncoder* cmdEncoder) {
|
|||||||
MTLRenderPassColorAttachmentDescriptor* mtlColorAttDesc = mtlRPD.colorAttachments[0];
|
MTLRenderPassColorAttachmentDescriptor* mtlColorAttDesc = mtlRPD.colorAttachments[0];
|
||||||
mtlColorAttDesc.loadAction = MTLLoadActionLoad;
|
mtlColorAttDesc.loadAction = MTLLoadActionLoad;
|
||||||
mtlColorAttDesc.storeAction = MTLStoreActionMultisampleResolve;
|
mtlColorAttDesc.storeAction = MTLStoreActionMultisampleResolve;
|
||||||
mtlColorAttDesc.texture = srcMTLTex;
|
|
||||||
mtlColorAttDesc.resolveTexture = _dstImage->getMTLTexture();
|
|
||||||
|
|
||||||
// For each resolve slice, update the render pass descriptor for
|
// For each resolve slice, update the render pass descriptor for
|
||||||
// the texture level and slice and create a render encoder.
|
// the texture level and slice and create a render encoder.
|
||||||
for (uint32_t sIdx = 0; sIdx < sliceCnt; sIdx++) {
|
for (uint32_t sIdx = 0; sIdx < sliceCnt; sIdx++) {
|
||||||
MVKMetalResolveSlice& rslvSlice = mtlResolveSlices[sIdx];
|
MVKMetalResolveSlice& rslvSlice = mtlResolveSlices[sIdx];
|
||||||
|
uint8_t srcPlaneIndex = MVKImage::getPlaneFromVkImageAspectFlags(rslvSlice.copyRegion->srcSubresource.aspectMask);
|
||||||
|
uint8_t dstPlaneIndex = MVKImage::getPlaneFromVkImageAspectFlags(rslvSlice.copyRegion->dstSubresource.aspectMask);
|
||||||
|
|
||||||
|
mtlColorAttDesc.texture = xfrImage->getMTLTexture(srcPlaneIndex);
|
||||||
|
mtlColorAttDesc.resolveTexture = _dstImage->getMTLTexture(dstPlaneIndex);
|
||||||
mtlColorAttDesc.level = rslvSlice.level;
|
mtlColorAttDesc.level = rslvSlice.level;
|
||||||
mtlColorAttDesc.slice = rslvSlice.slice;
|
mtlColorAttDesc.slice = rslvSlice.slice;
|
||||||
mtlColorAttDesc.resolveLevel = rslvSlice.level;
|
mtlColorAttDesc.resolveLevel = rslvSlice.level;
|
||||||
@ -675,12 +695,13 @@ VkResult MVKCmdBufferImageCopy<N>::setContent(MVKCommandBuffer* cmdBuff,
|
|||||||
_bufferImageCopyRegions.reserve(regionCount);
|
_bufferImageCopyRegions.reserve(regionCount);
|
||||||
for (uint32_t i = 0; i < regionCount; i++) {
|
for (uint32_t i = 0; i < regionCount; i++) {
|
||||||
_bufferImageCopyRegions.push_back(pRegions[i]);
|
_bufferImageCopyRegions.push_back(pRegions[i]);
|
||||||
}
|
|
||||||
|
|
||||||
// Validate
|
// Validate
|
||||||
if ( !_image->hasExpectedTexelSize() ) {
|
if ( !_image->hasExpectedTexelSize() ) {
|
||||||
|
MTLPixelFormat mtlPixFmt = _image->getMTLPixelFormat(MVKImage::getPlaneFromVkImageAspectFlags(pRegions[i].imageSubresource.aspectMask));
|
||||||
const char* cmdName = _toImage ? "vkCmdCopyBufferToImage" : "vkCmdCopyImageToBuffer";
|
const char* cmdName = _toImage ? "vkCmdCopyBufferToImage" : "vkCmdCopyImageToBuffer";
|
||||||
return reportError(VK_ERROR_FORMAT_NOT_SUPPORTED, "%s(): The image is using Metal format %s as a substitute for Vulkan format %s. Since the pixel size is different, content for the image cannot be copied to or from a buffer.", cmdName, cmdBuff->getPixelFormats()->getName(_image->getMTLPixelFormat()), cmdBuff->getPixelFormats()->getName(_image->getVkFormat()));
|
return reportError(VK_ERROR_FORMAT_NOT_SUPPORTED, "%s(): The image is using Metal format %s as a substitute for Vulkan format %s. Since the pixel size is different, content for the image cannot be copied to or from a buffer.", cmdName, cmdBuff->getPixelFormats()->getName(mtlPixFmt), cmdBuff->getPixelFormats()->getName(_image->getVkFormat()));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return VK_SUCCESS;
|
return VK_SUCCESS;
|
||||||
@ -689,21 +710,23 @@ VkResult MVKCmdBufferImageCopy<N>::setContent(MVKCommandBuffer* cmdBuff,
|
|||||||
template <size_t N>
|
template <size_t N>
|
||||||
void MVKCmdBufferImageCopy<N>::encode(MVKCommandEncoder* cmdEncoder) {
|
void MVKCmdBufferImageCopy<N>::encode(MVKCommandEncoder* cmdEncoder) {
|
||||||
id<MTLBuffer> mtlBuffer = _buffer->getMTLBuffer();
|
id<MTLBuffer> mtlBuffer = _buffer->getMTLBuffer();
|
||||||
id<MTLTexture> mtlTexture = _image->getMTLTexture();
|
if ( !mtlBuffer ) { return; }
|
||||||
if ( !mtlBuffer || !mtlTexture ) { return; }
|
|
||||||
|
|
||||||
NSUInteger mtlBuffOffsetBase = _buffer->getMTLBufferOffset();
|
NSUInteger mtlBuffOffsetBase = _buffer->getMTLBufferOffset();
|
||||||
MTLPixelFormat mtlPixFmt = _image->getMTLPixelFormat();
|
|
||||||
MVKCommandUse cmdUse = _toImage ? kMVKCommandUseCopyBufferToImage : kMVKCommandUseCopyImageToBuffer;
|
MVKCommandUse cmdUse = _toImage ? kMVKCommandUseCopyBufferToImage : kMVKCommandUseCopyImageToBuffer;
|
||||||
MVKPixelFormats* pixFmts = cmdEncoder->getPixelFormats();
|
MVKPixelFormats* pixFmts = cmdEncoder->getPixelFormats();
|
||||||
|
|
||||||
for (auto& cpyRgn : _bufferImageCopyRegions) {
|
for (auto& cpyRgn : _bufferImageCopyRegions) {
|
||||||
|
uint8_t planeIndex = MVKImage::getPlaneFromVkImageAspectFlags(cpyRgn.imageSubresource.aspectMask);
|
||||||
|
MTLPixelFormat mtlPixFmt = _image->getMTLPixelFormat(planeIndex);
|
||||||
|
id<MTLTexture> mtlTexture = _image->getMTLTexture(planeIndex);
|
||||||
|
if ( !mtlTexture ) { continue; }
|
||||||
|
|
||||||
uint32_t mipLevel = cpyRgn.imageSubresource.mipLevel;
|
uint32_t mipLevel = cpyRgn.imageSubresource.mipLevel;
|
||||||
MTLOrigin mtlTxtOrigin = mvkMTLOriginFromVkOffset3D(cpyRgn.imageOffset);
|
MTLOrigin mtlTxtOrigin = mvkMTLOriginFromVkOffset3D(cpyRgn.imageOffset);
|
||||||
MTLSize mtlTxtSize = mvkClampMTLSize(mvkMTLSizeFromVkExtent3D(cpyRgn.imageExtent),
|
MTLSize mtlTxtSize = mvkClampMTLSize(mvkMTLSizeFromVkExtent3D(cpyRgn.imageExtent),
|
||||||
mtlTxtOrigin,
|
mtlTxtOrigin,
|
||||||
mvkMTLSizeFromVkExtent3D(_image->getExtent3D(mipLevel)));
|
mvkMTLSizeFromVkExtent3D(_image->getExtent3D(planeIndex, mipLevel)));
|
||||||
NSUInteger mtlBuffOffset = mtlBuffOffsetBase + cpyRgn.bufferOffset;
|
NSUInteger mtlBuffOffset = mtlBuffOffsetBase + cpyRgn.bufferOffset;
|
||||||
|
|
||||||
uint32_t buffImgWd = cpyRgn.bufferRowLength;
|
uint32_t buffImgWd = cpyRgn.bufferRowLength;
|
||||||
@ -1075,29 +1098,31 @@ VkResult MVKCmdClearImage<N>::setContent(MVKCommandBuffer* cmdBuff,
|
|||||||
// Add subresource ranges
|
// Add subresource ranges
|
||||||
_subresourceRanges.clear(); // Clear for reuse
|
_subresourceRanges.clear(); // Clear for reuse
|
||||||
_subresourceRanges.reserve(rangeCount);
|
_subresourceRanges.reserve(rangeCount);
|
||||||
for (uint32_t i = 0; i < rangeCount; i++) {
|
bool isDS = isDepthStencilClear();
|
||||||
_subresourceRanges.push_back(pRanges[i]);
|
for (uint32_t rangeIdx = 0; rangeIdx < rangeCount; rangeIdx++) {
|
||||||
}
|
auto& vkIR = pRanges[rangeIdx];
|
||||||
|
uint8_t planeIndex = MVKImage::getPlaneFromVkImageAspectFlags(vkIR.aspectMask);
|
||||||
|
|
||||||
// Validate
|
// Validate
|
||||||
bool isDS = isDepthStencilClear();
|
MVKMTLFmtCaps mtlFmtCaps = cmdBuff->getPixelFormats()->getCapabilities(_image->getMTLPixelFormat(planeIndex));
|
||||||
if (_image->getImageType() == VK_IMAGE_TYPE_1D) {
|
|
||||||
return reportError(VK_ERROR_FEATURE_NOT_PRESENT, "vkCmdClear%sImage(): Native 1D images cannot be cleared on this device. Consider enabling MVK_CONFIG_TEXTURE_1D_AS_2D.", (isDS ? "DepthStencil" : "Color"));
|
|
||||||
}
|
|
||||||
MVKMTLFmtCaps mtlFmtCaps = cmdBuff->getPixelFormats()->getCapabilities(_image->getMTLPixelFormat());
|
|
||||||
if ((isDS && !mvkAreAllFlagsEnabled(mtlFmtCaps, kMVKMTLFmtCapsDSAtt)) ||
|
if ((isDS && !mvkAreAllFlagsEnabled(mtlFmtCaps, kMVKMTLFmtCapsDSAtt)) ||
|
||||||
( !isDS && !mvkAreAllFlagsEnabled(mtlFmtCaps, kMVKMTLFmtCapsColorAtt))) {
|
( !isDS && !mvkAreAllFlagsEnabled(mtlFmtCaps, kMVKMTLFmtCapsColorAtt))) {
|
||||||
return reportError(VK_ERROR_FEATURE_NOT_PRESENT, "vkCmdClear%sImage(): Format %s cannot be cleared on this device.", (isDS ? "DepthStencil" : "Color"), cmdBuff->getPixelFormats()->getName(_image->getVkFormat()));
|
return reportError(VK_ERROR_FEATURE_NOT_PRESENT, "vkCmdClear%sImage(): Format %s cannot be cleared on this device.", (isDS ? "DepthStencil" : "Color"), cmdBuff->getPixelFormats()->getName(_image->getVkFormat()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_subresourceRanges.push_back(vkIR);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate
|
||||||
|
if (_image->getImageType() == VK_IMAGE_TYPE_1D) {
|
||||||
|
return reportError(VK_ERROR_FEATURE_NOT_PRESENT, "vkCmdClear%sImage(): Native 1D images cannot be cleared on this device. Consider enabling MVK_CONFIG_TEXTURE_1D_AS_2D.", (isDS ? "DepthStencil" : "Color"));
|
||||||
|
}
|
||||||
|
|
||||||
return VK_SUCCESS;
|
return VK_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <size_t N>
|
template <size_t N>
|
||||||
void MVKCmdClearImage<N>::encode(MVKCommandEncoder* cmdEncoder) {
|
void MVKCmdClearImage<N>::encode(MVKCommandEncoder* cmdEncoder) {
|
||||||
id<MTLTexture> imgMTLTex = _image->getMTLTexture();
|
|
||||||
if ( !imgMTLTex ) { return; }
|
|
||||||
|
|
||||||
bool isDS = isDepthStencilClear();
|
bool isDS = isDepthStencilClear();
|
||||||
NSString* mtlRendEncName = (isDS
|
NSString* mtlRendEncName = (isDS
|
||||||
? mvkMTLRenderCommandEncoderLabel(kMVKCommandUseClearDepthStencilImage)
|
? mvkMTLRenderCommandEncoderLabel(kMVKCommandUseClearDepthStencilImage)
|
||||||
@ -1107,6 +1132,8 @@ void MVKCmdClearImage<N>::encode(MVKCommandEncoder* cmdEncoder) {
|
|||||||
|
|
||||||
MVKPixelFormats* pixFmts = cmdEncoder->getPixelFormats();
|
MVKPixelFormats* pixFmts = cmdEncoder->getPixelFormats();
|
||||||
for (auto& srRange : _subresourceRanges) {
|
for (auto& srRange : _subresourceRanges) {
|
||||||
|
id<MTLTexture> imgMTLTex = _image->getMTLTexture(MVKImage::getPlaneFromVkImageAspectFlags(srRange.aspectMask));
|
||||||
|
if ( !imgMTLTex ) { continue; }
|
||||||
|
|
||||||
MTLRenderPassDescriptor* mtlRPDesc = [MTLRenderPassDescriptor renderPassDescriptor];
|
MTLRenderPassDescriptor* mtlRPDesc = [MTLRenderPassDescriptor renderPassDescriptor];
|
||||||
MTLRenderPassColorAttachmentDescriptor* mtlRPCADesc = nil;
|
MTLRenderPassColorAttachmentDescriptor* mtlRPCADesc = nil;
|
||||||
|
@ -369,7 +369,7 @@ MVKImage* MVKCommandResourceFactory::newMVKImage(MVKImageDescriptorData& imgData
|
|||||||
.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED
|
.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED
|
||||||
};
|
};
|
||||||
MVKImage* mvkImg = _device->createImage(&createInfo, nullptr);
|
MVKImage* mvkImg = _device->createImage(&createInfo, nullptr);
|
||||||
mvkImg->bindDeviceMemory(_transferImageMemory, 0);
|
mvkImg->bindDeviceMemory(_transferImageMemory, 0, 0);
|
||||||
return mvkImg;
|
return mvkImg;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -40,10 +40,10 @@ public:
|
|||||||
#pragma mark Resource memory
|
#pragma mark Resource memory
|
||||||
|
|
||||||
/** Returns the memory requirements of this resource by populating the specified structure. */
|
/** Returns the memory requirements of this resource by populating the specified structure. */
|
||||||
VkResult getMemoryRequirements(VkMemoryRequirements* pMemoryRequirements) override;
|
VkResult getMemoryRequirements(VkMemoryRequirements* pMemoryRequirements);
|
||||||
|
|
||||||
/** Returns the memory requirements of this resource by populating the specified structure. */
|
/** Returns the memory requirements of this resource by populating the specified structure. */
|
||||||
VkResult getMemoryRequirements(const void* pInfo, VkMemoryRequirements2* pMemoryRequirements) override;
|
VkResult getMemoryRequirements(const void* pInfo, VkMemoryRequirements2* pMemoryRequirements);
|
||||||
|
|
||||||
/** Binds this resource to the specified offset within the specified memory allocation. */
|
/** Binds this resource to the specified offset within the specified memory allocation. */
|
||||||
VkResult bindDeviceMemory(MVKDeviceMemory* mvkMem, VkDeviceSize memOffset) override;
|
VkResult bindDeviceMemory(MVKDeviceMemory* mvkMem, VkDeviceSize memOffset) override;
|
||||||
@ -86,7 +86,6 @@ public:
|
|||||||
|
|
||||||
protected:
|
protected:
|
||||||
friend class MVKDeviceMemory;
|
friend class MVKDeviceMemory;
|
||||||
using MVKResource::needsHostReadSync;
|
|
||||||
|
|
||||||
void propagateDebugName() override;
|
void propagateDebugName() override;
|
||||||
bool needsHostReadSync(VkPipelineStageFlags srcStageMask,
|
bool needsHostReadSync(VkPipelineStageFlags srcStageMask,
|
||||||
|
@ -71,7 +71,7 @@ public:
|
|||||||
inline uint32_t getBinding() { return _info.binding; }
|
inline uint32_t getBinding() { return _info.binding; }
|
||||||
|
|
||||||
/** Returns the number of descriptors in this layout. */
|
/** Returns the number of descriptors in this layout. */
|
||||||
inline uint32_t getDescriptorCount() { return _info.descriptorCount; }
|
inline uint32_t getDescriptorCount() { return (_info.descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) ? 1 : _info.descriptorCount; }
|
||||||
|
|
||||||
/** Returns the descriptor type of this layout. */
|
/** Returns the descriptor type of this layout. */
|
||||||
inline VkDescriptorType getDescriptorType() { return _info.descriptorType; }
|
inline VkDescriptorType getDescriptorType() { return _info.descriptorType; }
|
||||||
@ -115,6 +115,7 @@ public:
|
|||||||
~MVKDescriptorSetLayoutBinding() override;
|
~MVKDescriptorSetLayoutBinding() override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
friend class MVKInlineUniformBlockDescriptor;
|
||||||
void initMetalResourceIndexOffsets(MVKShaderStageResourceBinding* pBindingIndexes,
|
void initMetalResourceIndexOffsets(MVKShaderStageResourceBinding* pBindingIndexes,
|
||||||
MVKShaderStageResourceBinding* pDescSetCounts,
|
MVKShaderStageResourceBinding* pDescSetCounts,
|
||||||
const VkDescriptorSetLayoutBinding* pBinding);
|
const VkDescriptorSetLayoutBinding* pBinding);
|
||||||
@ -152,11 +153,12 @@ public:
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Updates the internal binding from the specified content. The format of the content depends
|
* Updates the internal binding from the specified content. The format of the content depends
|
||||||
* on the descriptor type, and is extracted from pData at the location given by srcIndex * stride.
|
* on the descriptor type, and is extracted from pData at the location given by index * stride.
|
||||||
|
* MVKInlineUniformBlockDescriptor uses the index as byte offset to write to.
|
||||||
*/
|
*/
|
||||||
virtual void write(MVKDescriptorSet* mvkDescSet,
|
virtual void write(MVKDescriptorSet* mvkDescSet,
|
||||||
VkDescriptorType descriptorType,
|
VkDescriptorType descriptorType,
|
||||||
uint32_t srcIndex,
|
uint32_t index,
|
||||||
size_t stride,
|
size_t stride,
|
||||||
const void* pData) = 0;
|
const void* pData) = 0;
|
||||||
|
|
||||||
@ -167,12 +169,13 @@ public:
|
|||||||
* specified pImageInfo, pBufferInfo, or pTexelBufferView arrays, and the other
|
* specified pImageInfo, pBufferInfo, or pTexelBufferView arrays, and the other
|
||||||
* arrays are ignored (and may be a null pointer).
|
* arrays are ignored (and may be a null pointer).
|
||||||
*
|
*
|
||||||
* The dstIndex parameter indicates the index of the initial descriptor element
|
* The index parameter indicates the index of the initial descriptor element
|
||||||
* at which to start writing.
|
* at which to start writing.
|
||||||
|
* MVKInlineUniformBlockDescriptor uses the index as byte offset to read from.
|
||||||
*/
|
*/
|
||||||
virtual void read(MVKDescriptorSet* mvkDescSet,
|
virtual void read(MVKDescriptorSet* mvkDescSet,
|
||||||
VkDescriptorType descriptorType,
|
VkDescriptorType descriptorType,
|
||||||
uint32_t dstIndex,
|
uint32_t index,
|
||||||
VkDescriptorImageInfo* pImageInfo,
|
VkDescriptorImageInfo* pImageInfo,
|
||||||
VkDescriptorBufferInfo* pBufferInfo,
|
VkDescriptorBufferInfo* pBufferInfo,
|
||||||
VkBufferView* pTexelBufferView,
|
VkBufferView* pTexelBufferView,
|
||||||
@ -284,25 +287,27 @@ public:
|
|||||||
|
|
||||||
void write(MVKDescriptorSet* mvkDescSet,
|
void write(MVKDescriptorSet* mvkDescSet,
|
||||||
VkDescriptorType descriptorType,
|
VkDescriptorType descriptorType,
|
||||||
uint32_t srcIndex,
|
uint32_t dstOffset, // For inline buffers we are using this parameter as dst offset not as src descIdx
|
||||||
size_t stride,
|
size_t stride,
|
||||||
const void* pData) override;
|
const void* pData) override;
|
||||||
|
|
||||||
void read(MVKDescriptorSet* mvkDescSet,
|
void read(MVKDescriptorSet* mvkDescSet,
|
||||||
VkDescriptorType descriptorType,
|
VkDescriptorType descriptorType,
|
||||||
uint32_t dstIndex,
|
uint32_t srcOffset, // For inline buffers we are using this parameter as src offset not as dst descIdx
|
||||||
VkDescriptorImageInfo* pImageInfo,
|
VkDescriptorImageInfo* pImageInfo,
|
||||||
VkDescriptorBufferInfo* pBufferInfo,
|
VkDescriptorBufferInfo* pBufferInfo,
|
||||||
VkBufferView* pTexelBufferView,
|
VkBufferView* pTexelBufferView,
|
||||||
VkWriteDescriptorSetInlineUniformBlockEXT* inlineUniformBlock) override;
|
VkWriteDescriptorSetInlineUniformBlockEXT* inlineUniformBlock) override;
|
||||||
|
|
||||||
|
void setLayout(MVKDescriptorSetLayoutBinding* dslBinding, uint32_t index) override;
|
||||||
|
|
||||||
void reset() override;
|
void reset() override;
|
||||||
|
|
||||||
~MVKInlineUniformBlockDescriptor() { reset(); }
|
~MVKInlineUniformBlockDescriptor() { reset(); }
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
id<MTLBuffer> _mtlBuffer = nil;
|
uint8_t* _buffer = nullptr;
|
||||||
uint32_t _dataSize = 0;
|
uint32_t _length;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -89,11 +89,10 @@ uint32_t MVKDescriptorSetLayoutBinding::bind(MVKCommandEncoder* cmdEncoder,
|
|||||||
// Establish the resource indices to use, by combining the offsets of the DSL and this DSL binding.
|
// Establish the resource indices to use, by combining the offsets of the DSL and this DSL binding.
|
||||||
MVKShaderResourceBinding mtlIdxs = _mtlResourceIndexOffsets + dslMTLRezIdxOffsets;
|
MVKShaderResourceBinding mtlIdxs = _mtlResourceIndexOffsets + dslMTLRezIdxOffsets;
|
||||||
|
|
||||||
uint32_t descCnt = _info.descriptorCount;
|
uint32_t descCnt = getDescriptorCount();
|
||||||
for (uint32_t descIdx = 0; descIdx < descCnt; descIdx++) {
|
for (uint32_t descIdx = 0; descIdx < descCnt; descIdx++) {
|
||||||
MVKDescriptor* mvkDesc = descSet->getDescriptor(descStartIndex + descIdx);
|
MVKDescriptor* mvkDesc = descSet->getDescriptor(descStartIndex + descIdx);
|
||||||
mvkDesc->bind(cmdEncoder, _info.descriptorType, descIdx, _applyToStage,
|
mvkDesc->bind(cmdEncoder, _info.descriptorType, descIdx, _applyToStage, mtlIdxs, dynamicOffsets, pDynamicOffsetIndex);
|
||||||
mtlIdxs, dynamicOffsets, pDynamicOffsetIndex);
|
|
||||||
}
|
}
|
||||||
return descCnt;
|
return descCnt;
|
||||||
}
|
}
|
||||||
@ -167,13 +166,13 @@ void MVKDescriptorSetLayoutBinding::push(MVKCommandEncoder* cmdEncoder,
|
|||||||
}
|
}
|
||||||
|
|
||||||
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
|
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
|
||||||
const auto& inlineUniformBlock = get<VkWriteDescriptorSetInlineUniformBlockEXT>(pData, stride, rezIdx - dstArrayElement);
|
const auto& inlineUniformBlock = *(VkWriteDescriptorSetInlineUniformBlockEXT*)pData;
|
||||||
bb.mtlBytes = inlineUniformBlock.pData;
|
bb.mtlBytes = inlineUniformBlock.pData;
|
||||||
bb.size = inlineUniformBlock.dataSize;
|
bb.size = inlineUniformBlock.dataSize;
|
||||||
bb.isInline = true;
|
bb.isInline = true;
|
||||||
for (uint32_t i = kMVKShaderStageVertex; i < kMVKShaderStageMax; i++) {
|
for (uint32_t i = kMVKShaderStageVertex; i < kMVKShaderStageMax; i++) {
|
||||||
if (_applyToStage[i]) {
|
if (_applyToStage[i]) {
|
||||||
bb.index = mtlIdxs.stages[i].bufferIndex + rezIdx;
|
bb.index = mtlIdxs.stages[i].bufferIndex;
|
||||||
if (i == kMVKShaderStageCompute) {
|
if (i == kMVKShaderStageCompute) {
|
||||||
if (cmdEncoder) { cmdEncoder->_computeResourcesState.bindBuffer(bb); }
|
if (cmdEncoder) { cmdEncoder->_computeResourcesState.bindBuffer(bb); }
|
||||||
} else {
|
} else {
|
||||||
@ -189,12 +188,10 @@ void MVKDescriptorSetLayoutBinding::push(MVKCommandEncoder* cmdEncoder,
|
|||||||
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
|
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
|
||||||
const auto& imageInfo = get<VkDescriptorImageInfo>(pData, stride, rezIdx - dstArrayElement);
|
const auto& imageInfo = get<VkDescriptorImageInfo>(pData, stride, rezIdx - dstArrayElement);
|
||||||
MVKImageView* imageView = (MVKImageView*)imageInfo.imageView;
|
MVKImageView* imageView = (MVKImageView*)imageInfo.imageView;
|
||||||
tb.mtlTexture = imageView->getMTLTexture();
|
uint8_t planeCount = (imageView) ? imageView->getPlaneCount() : 1;
|
||||||
if (_info.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) {
|
for (uint8_t planeIndex = 0; planeIndex < planeCount; planeIndex++) {
|
||||||
tb.swizzle = imageView->getPackedSwizzle();
|
tb.mtlTexture = imageView->getMTLTexture(planeIndex);
|
||||||
} else {
|
tb.swizzle = (_info.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ? imageView->getPackedSwizzle() : 0;
|
||||||
tb.swizzle = 0;
|
|
||||||
}
|
|
||||||
if (_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) {
|
if (_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) {
|
||||||
id<MTLTexture> mtlTex = tb.mtlTexture;
|
id<MTLTexture> mtlTex = tb.mtlTexture;
|
||||||
if (mtlTex.parentTexture) { mtlTex = mtlTex.parentTexture; }
|
if (mtlTex.parentTexture) { mtlTex = mtlTex.parentTexture; }
|
||||||
@ -204,7 +201,7 @@ void MVKDescriptorSetLayoutBinding::push(MVKCommandEncoder* cmdEncoder,
|
|||||||
}
|
}
|
||||||
for (uint32_t i = kMVKShaderStageVertex; i < kMVKShaderStageMax; i++) {
|
for (uint32_t i = kMVKShaderStageVertex; i < kMVKShaderStageMax; i++) {
|
||||||
if (_applyToStage[i]) {
|
if (_applyToStage[i]) {
|
||||||
tb.index = mtlIdxs.stages[i].textureIndex + rezIdx;
|
tb.index = mtlIdxs.stages[i].textureIndex + rezIdx + planeIndex;
|
||||||
if (i == kMVKShaderStageCompute) {
|
if (i == kMVKShaderStageCompute) {
|
||||||
if (cmdEncoder) { cmdEncoder->_computeResourcesState.bindTexture(tb); }
|
if (cmdEncoder) { cmdEncoder->_computeResourcesState.bindTexture(tb); }
|
||||||
} else {
|
} else {
|
||||||
@ -220,6 +217,7 @@ void MVKDescriptorSetLayoutBinding::push(MVKCommandEncoder* cmdEncoder,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -280,12 +278,10 @@ void MVKDescriptorSetLayoutBinding::push(MVKCommandEncoder* cmdEncoder,
|
|||||||
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
|
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
|
||||||
const auto& imageInfo = get<VkDescriptorImageInfo>(pData, stride, rezIdx - dstArrayElement);
|
const auto& imageInfo = get<VkDescriptorImageInfo>(pData, stride, rezIdx - dstArrayElement);
|
||||||
MVKImageView* imageView = (MVKImageView*)imageInfo.imageView;
|
MVKImageView* imageView = (MVKImageView*)imageInfo.imageView;
|
||||||
tb.mtlTexture = imageView->getMTLTexture();
|
uint8_t planeCount = (imageView) ? imageView->getPlaneCount() : 1;
|
||||||
if (imageView) {
|
for (uint8_t planeIndex = 0; planeIndex < planeCount; planeIndex++) {
|
||||||
tb.swizzle = imageView->getPackedSwizzle();
|
tb.mtlTexture = imageView->getMTLTexture(planeIndex);
|
||||||
} else {
|
tb.swizzle = (imageView) ? imageView->getPackedSwizzle() : 0;
|
||||||
tb.swizzle = 0;
|
|
||||||
}
|
|
||||||
MVKSampler* sampler;
|
MVKSampler* sampler;
|
||||||
if (_immutableSamplers.empty()) {
|
if (_immutableSamplers.empty()) {
|
||||||
sampler = (MVKSampler*)imageInfo.sampler;
|
sampler = (MVKSampler*)imageInfo.sampler;
|
||||||
@ -296,7 +292,7 @@ void MVKDescriptorSetLayoutBinding::push(MVKCommandEncoder* cmdEncoder,
|
|||||||
sb.mtlSamplerState = sampler->getMTLSamplerState();
|
sb.mtlSamplerState = sampler->getMTLSamplerState();
|
||||||
for (uint32_t i = kMVKShaderStageVertex; i < kMVKShaderStageMax; i++) {
|
for (uint32_t i = kMVKShaderStageVertex; i < kMVKShaderStageMax; i++) {
|
||||||
if (_applyToStage[i]) {
|
if (_applyToStage[i]) {
|
||||||
tb.index = mtlIdxs.stages[i].textureIndex + rezIdx;
|
tb.index = mtlIdxs.stages[i].textureIndex + rezIdx + planeIndex;
|
||||||
sb.index = mtlIdxs.stages[i].samplerIndex + rezIdx;
|
sb.index = mtlIdxs.stages[i].samplerIndex + rezIdx;
|
||||||
if (i == kMVKShaderStageCompute) {
|
if (i == kMVKShaderStageCompute) {
|
||||||
if (cmdEncoder) { cmdEncoder->_computeResourcesState.bindTexture(tb); }
|
if (cmdEncoder) { cmdEncoder->_computeResourcesState.bindTexture(tb); }
|
||||||
@ -307,6 +303,7 @@ void MVKDescriptorSetLayoutBinding::push(MVKCommandEncoder* cmdEncoder,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -327,7 +324,7 @@ void MVKDescriptorSetLayoutBinding::push(MVKCommandEncoder* cmdEncoder,
|
|||||||
// If depth compare is required, but unavailable on the device, the sampler can only be used as an immutable sampler
|
// If depth compare is required, but unavailable on the device, the sampler can only be used as an immutable sampler
|
||||||
bool MVKDescriptorSetLayoutBinding::validate(MVKSampler* mvkSampler) {
|
bool MVKDescriptorSetLayoutBinding::validate(MVKSampler* mvkSampler) {
|
||||||
if (mvkSampler->getRequiresConstExprSampler()) {
|
if (mvkSampler->getRequiresConstExprSampler()) {
|
||||||
mvkSampler->reportError(VK_ERROR_FEATURE_NOT_PRESENT, "vkCmdPushDescriptorSet/vkCmdPushDescriptorSetWithTemplate(): Depth texture samplers using a compare operation can only be used as immutable samplers on this device.");
|
mvkSampler->reportError(VK_ERROR_FEATURE_NOT_PRESENT, "vkCmdPushDescriptorSet/vkCmdPushDescriptorSetWithTemplate(): Tried to push an immutable sampler.");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
@ -437,6 +434,18 @@ void MVKDescriptorSetLayoutBinding::initMetalResourceIndexOffsets(MVKShaderStage
|
|||||||
if ( !_device->_pMetalFeatures->arrayOfSamplers ) {
|
if ( !_device->_pMetalFeatures->arrayOfSamplers ) {
|
||||||
_layout->setConfigurationResult(reportError(VK_ERROR_FEATURE_NOT_PRESENT, "Device %s does not support arrays of samplers.", _device->getName()));
|
_layout->setConfigurationResult(reportError(VK_ERROR_FEATURE_NOT_PRESENT, "Device %s does not support arrays of samplers.", _device->getName()));
|
||||||
}
|
}
|
||||||
|
if ( pBinding->pImmutableSamplers ) {
|
||||||
|
_layout->setConfigurationResult(reportError(VK_ERROR_FEATURE_NOT_PRESENT, "Sampler arrays contaning multi planar samplers are not supported."));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( pBinding->pImmutableSamplers ) {
|
||||||
|
for (uint32_t i = 0; i < pBinding->descriptorCount; i++) {
|
||||||
|
uint8_t planeCount = ((MVKSampler*)pBinding->pImmutableSamplers[i])->getPlaneCount();
|
||||||
|
if (planeCount > 1) {
|
||||||
|
pDescSetCounts->textureIndex += planeCount - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
@ -460,11 +469,15 @@ void MVKDescriptorSetLayoutBinding::initMetalResourceIndexOffsets(MVKShaderStage
|
|||||||
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
|
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
|
||||||
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
|
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
|
||||||
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
|
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
|
||||||
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
|
|
||||||
pBindingIndexes->bufferIndex = pDescSetCounts->bufferIndex;
|
pBindingIndexes->bufferIndex = pDescSetCounts->bufferIndex;
|
||||||
pDescSetCounts->bufferIndex += pBinding->descriptorCount;
|
pDescSetCounts->bufferIndex += pBinding->descriptorCount;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
|
||||||
|
pBindingIndexes->bufferIndex = pDescSetCounts->bufferIndex;
|
||||||
|
pDescSetCounts->bufferIndex += 1;
|
||||||
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -592,14 +605,14 @@ void MVKInlineUniformBlockDescriptor::bind(MVKCommandEncoder* cmdEncoder,
|
|||||||
MVKArrayRef<uint32_t> dynamicOffsets,
|
MVKArrayRef<uint32_t> dynamicOffsets,
|
||||||
uint32_t* pDynamicOffsetIndex) {
|
uint32_t* pDynamicOffsetIndex) {
|
||||||
MVKMTLBufferBinding bb;
|
MVKMTLBufferBinding bb;
|
||||||
|
|
||||||
switch (descriptorType) {
|
switch (descriptorType) {
|
||||||
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
|
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
|
||||||
bb.mtlBuffer = _mtlBuffer;
|
bb.mtlBytes = _buffer;
|
||||||
bb.size = _dataSize;
|
bb.size = _length;
|
||||||
|
bb.isInline = true;
|
||||||
for (uint32_t i = kMVKShaderStageVertex; i < kMVKShaderStageMax; i++) {
|
for (uint32_t i = kMVKShaderStageVertex; i < kMVKShaderStageMax; i++) {
|
||||||
if (stages[i]) {
|
if (stages[i]) {
|
||||||
bb.index = mtlIndexes.stages[i].bufferIndex + descriptorIndex;
|
bb.index = mtlIndexes.stages[i].bufferIndex;
|
||||||
if (i == kMVKShaderStageCompute) {
|
if (i == kMVKShaderStageCompute) {
|
||||||
if (cmdEncoder) { cmdEncoder->_computeResourcesState.bindBuffer(bb); }
|
if (cmdEncoder) { cmdEncoder->_computeResourcesState.bindBuffer(bb); }
|
||||||
} else {
|
} else {
|
||||||
@ -617,24 +630,15 @@ void MVKInlineUniformBlockDescriptor::bind(MVKCommandEncoder* cmdEncoder,
|
|||||||
|
|
||||||
void MVKInlineUniformBlockDescriptor::write(MVKDescriptorSet* mvkDescSet,
|
void MVKInlineUniformBlockDescriptor::write(MVKDescriptorSet* mvkDescSet,
|
||||||
VkDescriptorType descriptorType,
|
VkDescriptorType descriptorType,
|
||||||
uint32_t srcIndex,
|
uint32_t dstOffset,
|
||||||
size_t stride,
|
size_t stride,
|
||||||
const void* pData) {
|
const void* pData) {
|
||||||
switch (descriptorType) {
|
switch (descriptorType) {
|
||||||
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
|
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
|
||||||
const auto& srcInlineUniformBlock = get<VkWriteDescriptorSetInlineUniformBlockEXT>(pData, stride, srcIndex);
|
const auto& pInlineUniformBlock = *(VkWriteDescriptorSetInlineUniformBlockEXT*)pData;
|
||||||
_dataSize = srcInlineUniformBlock.dataSize;
|
if (pInlineUniformBlock.pData && _buffer) {
|
||||||
|
memcpy(_buffer + dstOffset, pInlineUniformBlock.pData, pInlineUniformBlock.dataSize);
|
||||||
[_mtlBuffer release];
|
|
||||||
if (srcInlineUniformBlock.dataSize > 0) {
|
|
||||||
MTLResourceOptions mtlBuffOpts = MTLResourceStorageModeShared | MTLResourceCPUCacheModeDefaultCache;
|
|
||||||
_mtlBuffer = [mvkDescSet->getMTLDevice() newBufferWithBytes: srcInlineUniformBlock.pData
|
|
||||||
length: srcInlineUniformBlock.dataSize
|
|
||||||
options:mtlBuffOpts]; // retained
|
|
||||||
} else {
|
|
||||||
_mtlBuffer = nil;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -645,21 +649,15 @@ void MVKInlineUniformBlockDescriptor::write(MVKDescriptorSet* mvkDescSet,
|
|||||||
|
|
||||||
void MVKInlineUniformBlockDescriptor::read(MVKDescriptorSet* mvkDescSet,
|
void MVKInlineUniformBlockDescriptor::read(MVKDescriptorSet* mvkDescSet,
|
||||||
VkDescriptorType descriptorType,
|
VkDescriptorType descriptorType,
|
||||||
uint32_t dstIndex,
|
uint32_t srcOffset,
|
||||||
VkDescriptorImageInfo* pImageInfo,
|
VkDescriptorImageInfo* pImageInfo,
|
||||||
VkDescriptorBufferInfo* pBufferInfo,
|
VkDescriptorBufferInfo* pBufferInfo,
|
||||||
VkBufferView* pTexelBufferView,
|
VkBufferView* pTexelBufferView,
|
||||||
VkWriteDescriptorSetInlineUniformBlockEXT* pInlineUniformBlock) {
|
VkWriteDescriptorSetInlineUniformBlockEXT* pInlineUniformBlock) {
|
||||||
switch (descriptorType) {
|
switch (descriptorType) {
|
||||||
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
|
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
|
||||||
auto& dstInlineUniformBlock = pInlineUniformBlock[dstIndex];
|
if (_buffer && pInlineUniformBlock->pData) {
|
||||||
void* pDstData = const_cast<void*>(dstInlineUniformBlock.pData);
|
memcpy((void*)pInlineUniformBlock->pData, _buffer + srcOffset, pInlineUniformBlock->dataSize);
|
||||||
void* pSrcData = _mtlBuffer.contents;
|
|
||||||
if (pSrcData && pDstData) {
|
|
||||||
memcpy(pDstData, pSrcData, _dataSize);
|
|
||||||
dstInlineUniformBlock.dataSize = _dataSize;
|
|
||||||
} else {
|
|
||||||
dstInlineUniformBlock.dataSize = 0;
|
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -669,10 +667,15 @@ void MVKInlineUniformBlockDescriptor::read(MVKDescriptorSet* mvkDescSet,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void MVKInlineUniformBlockDescriptor::setLayout(MVKDescriptorSetLayoutBinding* dslBinding, uint32_t index) {
|
||||||
|
_length = dslBinding->_info.descriptorCount;
|
||||||
|
_buffer = (uint8_t*)malloc(_length);
|
||||||
|
}
|
||||||
|
|
||||||
void MVKInlineUniformBlockDescriptor::reset() {
|
void MVKInlineUniformBlockDescriptor::reset() {
|
||||||
[_mtlBuffer release];
|
free(_buffer);
|
||||||
_mtlBuffer = nil;
|
_buffer = nullptr;
|
||||||
_dataSize = 0;
|
_length = 0;
|
||||||
MVKDescriptor::reset();
|
MVKDescriptor::reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -688,22 +691,28 @@ void MVKImageDescriptor::bind(MVKCommandEncoder* cmdEncoder,
|
|||||||
MVKShaderResourceBinding& mtlIndexes,
|
MVKShaderResourceBinding& mtlIndexes,
|
||||||
MVKArrayRef<uint32_t> dynamicOffsets,
|
MVKArrayRef<uint32_t> dynamicOffsets,
|
||||||
uint32_t* pDynamicOffsetIndex) {
|
uint32_t* pDynamicOffsetIndex) {
|
||||||
MVKMTLTextureBinding tb;
|
|
||||||
MVKMTLBufferBinding bb;
|
|
||||||
switch (descriptorType) {
|
switch (descriptorType) {
|
||||||
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
|
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
|
||||||
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
|
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
|
||||||
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
|
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
|
||||||
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
|
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint8_t planeCount = (_mvkImageView) ? _mvkImageView->getPlaneCount() : 1;
|
||||||
|
for (uint8_t planeIndex = 0; planeIndex < planeCount; planeIndex++) {
|
||||||
|
MVKMTLTextureBinding tb;
|
||||||
|
MVKMTLBufferBinding bb;
|
||||||
|
|
||||||
if (_mvkImageView) {
|
if (_mvkImageView) {
|
||||||
tb.mtlTexture = _mvkImageView->getMTLTexture();
|
tb.mtlTexture = _mvkImageView->getMTLTexture(planeIndex);
|
||||||
}
|
|
||||||
if ((descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE || descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
|
|
||||||
tb.mtlTexture) {
|
|
||||||
tb.swizzle = _mvkImageView->getPackedSwizzle();
|
|
||||||
} else {
|
|
||||||
tb.swizzle = 0;
|
|
||||||
}
|
}
|
||||||
|
tb.swizzle = ((descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||
|
||||||
|
descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
|
||||||
|
tb.mtlTexture) ? _mvkImageView->getPackedSwizzle() : 0;
|
||||||
if (descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE && tb.mtlTexture) {
|
if (descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE && tb.mtlTexture) {
|
||||||
id<MTLTexture> mtlTex = tb.mtlTexture;
|
id<MTLTexture> mtlTex = tb.mtlTexture;
|
||||||
if (mtlTex.parentTexture) { mtlTex = mtlTex.parentTexture; }
|
if (mtlTex.parentTexture) { mtlTex = mtlTex.parentTexture; }
|
||||||
@ -713,14 +722,14 @@ void MVKImageDescriptor::bind(MVKCommandEncoder* cmdEncoder,
|
|||||||
}
|
}
|
||||||
for (uint32_t i = kMVKShaderStageVertex; i < kMVKShaderStageMax; i++) {
|
for (uint32_t i = kMVKShaderStageVertex; i < kMVKShaderStageMax; i++) {
|
||||||
if (stages[i]) {
|
if (stages[i]) {
|
||||||
tb.index = mtlIndexes.stages[i].textureIndex + descriptorIndex;
|
tb.index = mtlIndexes.stages[i].textureIndex + descriptorIndex + planeIndex;
|
||||||
if (i == kMVKShaderStageCompute) {
|
if (i == kMVKShaderStageCompute) {
|
||||||
if (cmdEncoder) { cmdEncoder->_computeResourcesState.bindTexture(tb); }
|
if (cmdEncoder) { cmdEncoder->_computeResourcesState.bindTexture(tb); }
|
||||||
} else {
|
} else {
|
||||||
if (cmdEncoder) { cmdEncoder->_graphicsResourcesState.bindTexture(MVKShaderStage(i), tb); }
|
if (cmdEncoder) { cmdEncoder->_graphicsResourcesState.bindTexture(MVKShaderStage(i), tb); }
|
||||||
}
|
}
|
||||||
if (descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) {
|
if (descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) {
|
||||||
bb.index = mtlIndexes.stages[i].bufferIndex + descriptorIndex;
|
bb.index = mtlIndexes.stages[i].bufferIndex + descriptorIndex + planeIndex;
|
||||||
if (i == kMVKShaderStageCompute) {
|
if (i == kMVKShaderStageCompute) {
|
||||||
if (cmdEncoder) { cmdEncoder->_computeResourcesState.bindBuffer(bb); }
|
if (cmdEncoder) { cmdEncoder->_computeResourcesState.bindBuffer(bb); }
|
||||||
} else {
|
} else {
|
||||||
@ -729,11 +738,6 @@ void MVKImageDescriptor::bind(MVKCommandEncoder* cmdEncoder,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -845,7 +849,7 @@ void MVKSamplerDescriptorMixin::write(MVKDescriptorSet* mvkDescSet,
|
|||||||
const auto* pImgInfo = &get<VkDescriptorImageInfo>(pData, stride, srcIndex);
|
const auto* pImgInfo = &get<VkDescriptorImageInfo>(pData, stride, srcIndex);
|
||||||
_mvkSampler = (MVKSampler*)pImgInfo->sampler;
|
_mvkSampler = (MVKSampler*)pImgInfo->sampler;
|
||||||
if (_mvkSampler && _mvkSampler->getRequiresConstExprSampler()) {
|
if (_mvkSampler && _mvkSampler->getRequiresConstExprSampler()) {
|
||||||
_mvkSampler->reportError(VK_ERROR_FEATURE_NOT_PRESENT, "vkUpdateDescriptorSets(): Depth texture samplers using a compare operation can only be used as immutable samplers on this device.");
|
_mvkSampler->reportError(VK_ERROR_FEATURE_NOT_PRESENT, "vkUpdateDescriptorSets(): Tried to push an immutable sampler.");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_mvkSampler) { _mvkSampler->retain(); }
|
if (_mvkSampler) { _mvkSampler->retain(); }
|
||||||
|
@ -209,13 +209,19 @@ void MVKDescriptorSet::write(const DescriptorAction* pDescriptorAction,
|
|||||||
const void* pData) {
|
const void* pData) {
|
||||||
|
|
||||||
VkDescriptorType descType = getDescriptorType(pDescriptorAction->dstBinding);
|
VkDescriptorType descType = getDescriptorType(pDescriptorAction->dstBinding);
|
||||||
|
uint32_t descCnt = pDescriptorAction->descriptorCount;
|
||||||
|
if (descType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
|
||||||
|
uint32_t dstStartIdx = _layout->getDescriptorIndex(pDescriptorAction->dstBinding, 0);
|
||||||
|
// For inline buffers we are using the index argument as dst offset not as src descIdx
|
||||||
|
_descriptors[dstStartIdx]->write(this, descType, pDescriptorAction->dstArrayElement, stride, pData);
|
||||||
|
} else {
|
||||||
uint32_t dstStartIdx = _layout->getDescriptorIndex(pDescriptorAction->dstBinding,
|
uint32_t dstStartIdx = _layout->getDescriptorIndex(pDescriptorAction->dstBinding,
|
||||||
pDescriptorAction->dstArrayElement);
|
pDescriptorAction->dstArrayElement);
|
||||||
uint32_t descCnt = pDescriptorAction->descriptorCount;
|
|
||||||
for (uint32_t descIdx = 0; descIdx < descCnt; descIdx++) {
|
for (uint32_t descIdx = 0; descIdx < descCnt; descIdx++) {
|
||||||
_descriptors[dstStartIdx + descIdx]->write(this, descType, descIdx, stride, pData);
|
_descriptors[dstStartIdx + descIdx]->write(this, descType, descIdx, stride, pData);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Create concrete implementations of the three variations of the write() function.
|
// Create concrete implementations of the three variations of the write() function.
|
||||||
template void MVKDescriptorSet::write<VkWriteDescriptorSet>(const VkWriteDescriptorSet* pDescriptorAction,
|
template void MVKDescriptorSet::write<VkWriteDescriptorSet>(const VkWriteDescriptorSet* pDescriptorAction,
|
||||||
@ -232,12 +238,18 @@ void MVKDescriptorSet::read(const VkCopyDescriptorSet* pDescriptorCopy,
|
|||||||
VkWriteDescriptorSetInlineUniformBlockEXT* pInlineUniformBlock) {
|
VkWriteDescriptorSetInlineUniformBlockEXT* pInlineUniformBlock) {
|
||||||
|
|
||||||
VkDescriptorType descType = getDescriptorType(pDescriptorCopy->srcBinding);
|
VkDescriptorType descType = getDescriptorType(pDescriptorCopy->srcBinding);
|
||||||
|
uint32_t descCnt = pDescriptorCopy->descriptorCount;
|
||||||
|
if (descType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
|
||||||
|
pInlineUniformBlock->dataSize = pDescriptorCopy->descriptorCount;
|
||||||
|
uint32_t srcStartIdx = _layout->getDescriptorIndex(pDescriptorCopy->srcBinding, 0);
|
||||||
|
// For inline buffers we are using the index argument as src offset not as dst descIdx
|
||||||
|
_descriptors[srcStartIdx]->read(this, descType, pDescriptorCopy->srcArrayElement, pImageInfo, pBufferInfo, pTexelBufferView, pInlineUniformBlock);
|
||||||
|
} else {
|
||||||
uint32_t srcStartIdx = _layout->getDescriptorIndex(pDescriptorCopy->srcBinding,
|
uint32_t srcStartIdx = _layout->getDescriptorIndex(pDescriptorCopy->srcBinding,
|
||||||
pDescriptorCopy->srcArrayElement);
|
pDescriptorCopy->srcArrayElement);
|
||||||
uint32_t descCnt = pDescriptorCopy->descriptorCount;
|
|
||||||
for (uint32_t descIdx = 0; descIdx < descCnt; descIdx++) {
|
for (uint32_t descIdx = 0; descIdx < descCnt; descIdx++) {
|
||||||
_descriptors[srcStartIdx + descIdx]->read(this, descType, descIdx, pImageInfo, pBufferInfo,
|
_descriptors[srcStartIdx + descIdx]->read(this, descType, descIdx, pImageInfo, pBufferInfo, pTexelBufferView, pInlineUniformBlock);
|
||||||
pTexelBufferView, pInlineUniformBlock);
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -249,14 +261,22 @@ MVKDescriptorSet::MVKDescriptorSet(MVKDescriptorSetLayout* layout, MVKDescriptor
|
|||||||
uint32_t bindCnt = (uint32_t)layout->_bindings.size();
|
uint32_t bindCnt = (uint32_t)layout->_bindings.size();
|
||||||
for (uint32_t bindIdx = 0; bindIdx < bindCnt; bindIdx++) {
|
for (uint32_t bindIdx = 0; bindIdx < bindCnt; bindIdx++) {
|
||||||
MVKDescriptorSetLayoutBinding* mvkDSLBind = &layout->_bindings[bindIdx];
|
MVKDescriptorSetLayoutBinding* mvkDSLBind = &layout->_bindings[bindIdx];
|
||||||
|
MVKDescriptor* mvkDesc = nullptr;
|
||||||
|
if (mvkDSLBind->getDescriptorType() == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
|
||||||
|
setConfigurationResult(_pool->allocateDescriptor(mvkDSLBind->getDescriptorType(), &mvkDesc));
|
||||||
|
if ( !wasConfigurationSuccessful() ) { break; }
|
||||||
|
|
||||||
|
mvkDesc->setLayout(mvkDSLBind, 0);
|
||||||
|
_descriptors.push_back(mvkDesc);
|
||||||
|
} else {
|
||||||
uint32_t descCnt = mvkDSLBind->getDescriptorCount();
|
uint32_t descCnt = mvkDSLBind->getDescriptorCount();
|
||||||
for (uint32_t descIdx = 0; descIdx < descCnt; descIdx++) {
|
for (uint32_t descIdx = 0; descIdx < descCnt; descIdx++) {
|
||||||
MVKDescriptor* mvkDesc = nullptr;
|
|
||||||
setConfigurationResult(_pool->allocateDescriptor(mvkDSLBind->getDescriptorType(), &mvkDesc));
|
setConfigurationResult(_pool->allocateDescriptor(mvkDSLBind->getDescriptorType(), &mvkDesc));
|
||||||
if ( !wasConfigurationSuccessful() ) { break; }
|
if ( !wasConfigurationSuccessful() ) { break; }
|
||||||
|
|
||||||
mvkDesc->setLayout(mvkDSLBind, descIdx);
|
mvkDesc->setLayout(mvkDSLBind, descIdx);
|
||||||
_descriptors.push_back(mvkDesc);
|
_descriptors.push_back(mvkDesc);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if ( !wasConfigurationSuccessful() ) { break; }
|
if ( !wasConfigurationSuccessful() ) { break; }
|
||||||
}
|
}
|
||||||
|
@ -57,6 +57,7 @@ class MVKPipelineCache;
|
|||||||
class MVKPipelineLayout;
|
class MVKPipelineLayout;
|
||||||
class MVKPipeline;
|
class MVKPipeline;
|
||||||
class MVKSampler;
|
class MVKSampler;
|
||||||
|
class MVKSamplerYcbcrConversion;
|
||||||
class MVKDescriptorSetLayout;
|
class MVKDescriptorSetLayout;
|
||||||
class MVKDescriptorPool;
|
class MVKDescriptorPool;
|
||||||
class MVKDescriptorUpdateTemplate;
|
class MVKDescriptorUpdateTemplate;
|
||||||
@ -521,6 +522,11 @@ public:
|
|||||||
void destroySampler(MVKSampler* mvkSamp,
|
void destroySampler(MVKSampler* mvkSamp,
|
||||||
const VkAllocationCallbacks* pAllocator);
|
const VkAllocationCallbacks* pAllocator);
|
||||||
|
|
||||||
|
MVKSamplerYcbcrConversion* createSamplerYcbcrConversion(const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
|
||||||
|
const VkAllocationCallbacks* pAllocator);
|
||||||
|
void destroySamplerYcbcrConversion(MVKSamplerYcbcrConversion* mvkSampConv,
|
||||||
|
const VkAllocationCallbacks* pAllocator);
|
||||||
|
|
||||||
MVKDescriptorSetLayout* createDescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
|
MVKDescriptorSetLayout* createDescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
|
||||||
const VkAllocationCallbacks* pAllocator);
|
const VkAllocationCallbacks* pAllocator);
|
||||||
void destroyDescriptorSetLayout(MVKDescriptorSetLayout* mvkDSL,
|
void destroyDescriptorSetLayout(MVKDescriptorSetLayout* mvkDSL,
|
||||||
@ -652,6 +658,7 @@ public:
|
|||||||
const VkPhysicalDeviceVariablePointerFeatures _enabledVarPtrFeatures;
|
const VkPhysicalDeviceVariablePointerFeatures _enabledVarPtrFeatures;
|
||||||
const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT _enabledInterlockFeatures;
|
const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT _enabledInterlockFeatures;
|
||||||
const VkPhysicalDeviceHostQueryResetFeaturesEXT _enabledHostQryResetFeatures;
|
const VkPhysicalDeviceHostQueryResetFeaturesEXT _enabledHostQryResetFeatures;
|
||||||
|
const VkPhysicalDeviceSamplerYcbcrConversionFeatures _enabledSamplerYcbcrConversionFeatures;
|
||||||
const VkPhysicalDeviceScalarBlockLayoutFeaturesEXT _enabledScalarLayoutFeatures;
|
const VkPhysicalDeviceScalarBlockLayoutFeaturesEXT _enabledScalarLayoutFeatures;
|
||||||
const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT _enabledTexelBuffAlignFeatures;
|
const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT _enabledTexelBuffAlignFeatures;
|
||||||
const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT _enabledVtxAttrDivFeatures;
|
const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT _enabledVtxAttrDivFeatures;
|
||||||
|
@ -114,6 +114,13 @@ void MVKPhysicalDevice::getFeatures(VkPhysicalDeviceFeatures2* features) {
|
|||||||
hostQueryResetFeatures->hostQueryReset = true;
|
hostQueryResetFeatures->hostQueryReset = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT: {
|
||||||
|
auto* robustness2Features = (VkPhysicalDeviceRobustness2FeaturesEXT*)next;
|
||||||
|
robustness2Features->robustBufferAccess2 = false;
|
||||||
|
robustness2Features->robustImageAccess2 = true;
|
||||||
|
robustness2Features->nullDescriptor = false;
|
||||||
|
break;
|
||||||
|
}
|
||||||
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT: {
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT: {
|
||||||
auto* scalarLayoutFeatures = (VkPhysicalDeviceScalarBlockLayoutFeaturesEXT*)next;
|
auto* scalarLayoutFeatures = (VkPhysicalDeviceScalarBlockLayoutFeaturesEXT*)next;
|
||||||
scalarLayoutFeatures->scalarBlockLayout = true;
|
scalarLayoutFeatures->scalarBlockLayout = true;
|
||||||
@ -139,6 +146,11 @@ void MVKPhysicalDevice::getFeatures(VkPhysicalDeviceFeatures2* features) {
|
|||||||
portabilityFeatures->samplerMipLodBias = false;
|
portabilityFeatures->samplerMipLodBias = false;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: {
|
||||||
|
auto* samplerYcbcrConvFeatures = (VkPhysicalDeviceSamplerYcbcrConversionFeatures*)next;
|
||||||
|
samplerYcbcrConvFeatures->samplerYcbcrConversion = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL: {
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL: {
|
||||||
auto* shaderIntFuncsFeatures = (VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*)next;
|
auto* shaderIntFuncsFeatures = (VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*)next;
|
||||||
shaderIntFuncsFeatures->shaderIntegerFunctions2 = true;
|
shaderIntFuncsFeatures->shaderIntegerFunctions2 = true;
|
||||||
@ -181,6 +193,14 @@ void MVKPhysicalDevice::getProperties(VkPhysicalDeviceProperties2* properties) {
|
|||||||
pushDescProps->maxPushDescriptors = _properties.limits.maxPerStageResources;
|
pushDescProps->maxPushDescriptors = _properties.limits.maxPerStageResources;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT: {
|
||||||
|
auto* robustness2Props = (VkPhysicalDeviceRobustness2PropertiesEXT*)next;
|
||||||
|
// This isn't implemented yet, but when it is, I expect that we'll wind up
|
||||||
|
// doing it manually.
|
||||||
|
robustness2Props->robustStorageBufferAccessSizeAlignment = 1;
|
||||||
|
robustness2Props->robustUniformBufferAccessSizeAlignment = 1;
|
||||||
|
break;
|
||||||
|
}
|
||||||
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT: {
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT: {
|
||||||
auto* texelBuffAlignProps = (VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT*)next;
|
auto* texelBuffAlignProps = (VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT*)next;
|
||||||
// Save the 'next' pointer; we'll unintentionally overwrite it
|
// Save the 'next' pointer; we'll unintentionally overwrite it
|
||||||
@ -204,6 +224,15 @@ void MVKPhysicalDevice::getProperties(VkPhysicalDeviceProperties2* properties) {
|
|||||||
portabilityProps->minVertexInputBindingStrideAlignment = 4;
|
portabilityProps->minVertexInputBindingStrideAlignment = 4;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT: {
|
||||||
|
auto* inlineUniformBlockProps = (VkPhysicalDeviceInlineUniformBlockPropertiesEXT*)next;
|
||||||
|
inlineUniformBlockProps->maxInlineUniformBlockSize = _metalFeatures.dynamicMTLBufferSize;
|
||||||
|
inlineUniformBlockProps->maxPerStageDescriptorInlineUniformBlocks = _properties.limits.maxPerStageDescriptorUniformBuffers;
|
||||||
|
inlineUniformBlockProps->maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = _properties.limits.maxPerStageDescriptorUniformBuffers;
|
||||||
|
inlineUniformBlockProps->maxDescriptorSetInlineUniformBlocks = _properties.limits.maxDescriptorSetUniformBuffers;
|
||||||
|
inlineUniformBlockProps->maxDescriptorSetUpdateAfterBindInlineUniformBlocks = _properties.limits.maxDescriptorSetUniformBuffers;
|
||||||
|
break;
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -429,6 +458,18 @@ VkResult MVKPhysicalDevice::getImageFormatProperties(const VkPhysicalDeviceImage
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (const auto* nextProps = (VkBaseInStructure*)pImageFormatProperties->pNext; nextProps; nextProps = nextProps->pNext) {
|
||||||
|
switch (nextProps->sType) {
|
||||||
|
case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES: {
|
||||||
|
auto* samplerYcbcrConvProps = (VkSamplerYcbcrConversionImageFormatProperties*)nextProps;
|
||||||
|
samplerYcbcrConvProps->combinedImageSamplerDescriptorCount = _pixelFormats.getChromaSubsamplingPlaneCount(pImageFormatInfo->format);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if ( !_pixelFormats.isSupported(pImageFormatInfo->format) ) { return VK_ERROR_FORMAT_NOT_SUPPORTED; }
|
if ( !_pixelFormats.isSupported(pImageFormatInfo->format) ) { return VK_ERROR_FORMAT_NOT_SUPPORTED; }
|
||||||
|
|
||||||
if ( !getImageViewIsSupported(pImageFormatInfo) ) { return VK_ERROR_FORMAT_NOT_SUPPORTED; }
|
if ( !getImageViewIsSupported(pImageFormatInfo) ) { return VK_ERROR_FORMAT_NOT_SUPPORTED; }
|
||||||
@ -462,9 +503,9 @@ bool MVKPhysicalDevice::getImageViewIsSupported(const VkPhysicalDeviceImageForma
|
|||||||
.baseArrayLayer = 0,
|
.baseArrayLayer = 0,
|
||||||
.layerCount = 1},
|
.layerCount = 1},
|
||||||
};
|
};
|
||||||
MTLPixelFormat mtlPixFmt;
|
MTLPixelFormat mtlPixFmt = _pixelFormats.getMTLPixelFormat(viewInfo.format);
|
||||||
bool useSwizzle;
|
bool useSwizzle;
|
||||||
return (MVKImageView::validateSwizzledMTLPixelFormat(&viewInfo, &_pixelFormats, this,
|
return (MVKImageView::validateSwizzledMTLPixelFormat(&viewInfo, this,
|
||||||
_metalFeatures.nativeTextureSwizzle,
|
_metalFeatures.nativeTextureSwizzle,
|
||||||
_mvkInstance->getMoltenVKConfiguration()->fullImageViewSwizzle,
|
_mvkInstance->getMoltenVKConfiguration()->fullImageViewSwizzle,
|
||||||
mtlPixFmt, useSwizzle) == VK_SUCCESS);
|
mtlPixFmt, useSwizzle) == VK_SUCCESS);
|
||||||
@ -1356,6 +1397,7 @@ void MVKPhysicalDevice::initProperties() {
|
|||||||
uint32_t maxStorage = 0, maxUniform = 0;
|
uint32_t maxStorage = 0, maxUniform = 0;
|
||||||
bool singleTexelStorage = true, singleTexelUniform = true;
|
bool singleTexelStorage = true, singleTexelUniform = true;
|
||||||
_pixelFormats.enumerateSupportedFormats({0, 0, VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT}, true, [&](VkFormat vk) {
|
_pixelFormats.enumerateSupportedFormats({0, 0, VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT}, true, [&](VkFormat vk) {
|
||||||
|
if ( _pixelFormats.getChromaSubsamplingComponentBits(vk) > 0 ) { return false; } // Skip chroma subsampling formats
|
||||||
MTLPixelFormat mtlFmt = _pixelFormats.getMTLPixelFormat(vk);
|
MTLPixelFormat mtlFmt = _pixelFormats.getMTLPixelFormat(vk);
|
||||||
if ( !mtlFmt ) { return false; } // If format is invalid, avoid validation errors on MTLDevice format alignment calls
|
if ( !mtlFmt ) { return false; } // If format is invalid, avoid validation errors on MTLDevice format alignment calls
|
||||||
|
|
||||||
@ -1535,7 +1577,7 @@ void MVKPhysicalDevice::initProperties() {
|
|||||||
_properties.limits.maxComputeWorkGroupCount[1] = kMVKUndefinedLargeUInt32;
|
_properties.limits.maxComputeWorkGroupCount[1] = kMVKUndefinedLargeUInt32;
|
||||||
_properties.limits.maxComputeWorkGroupCount[2] = kMVKUndefinedLargeUInt32;
|
_properties.limits.maxComputeWorkGroupCount[2] = kMVKUndefinedLargeUInt32;
|
||||||
|
|
||||||
_properties.limits.maxDrawIndexedIndexValue = numeric_limits<uint32_t>::max();
|
_properties.limits.maxDrawIndexedIndexValue = numeric_limits<uint32_t>::max(); // Must be (2^32 - 1) to support fullDrawIndexUint32
|
||||||
_properties.limits.maxDrawIndirectCount = kMVKUndefinedLargeUInt32;
|
_properties.limits.maxDrawIndirectCount = kMVKUndefinedLargeUInt32;
|
||||||
|
|
||||||
_properties.limits.maxClipDistances = kMVKUndefinedLargeUInt32;
|
_properties.limits.maxClipDistances = kMVKUndefinedLargeUInt32;
|
||||||
@ -2314,16 +2356,21 @@ MVKImage* MVKDevice::createImage(const VkImageCreateInfo* pCreateInfo,
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (swapchainInfo) {
|
MVKImage* mvkImg = (swapchainInfo)
|
||||||
return (MVKImage*)addResource(new MVKPeerSwapchainImage(this, pCreateInfo, (MVKSwapchain*)swapchainInfo->swapchain, uint32_t(-1)));
|
? new MVKPeerSwapchainImage(this, pCreateInfo, (MVKSwapchain*)swapchainInfo->swapchain, uint32_t(-1))
|
||||||
|
: new MVKImage(this, pCreateInfo);
|
||||||
|
for (auto& memoryBinding : mvkImg->_memoryBindings) {
|
||||||
|
addResource(memoryBinding);
|
||||||
}
|
}
|
||||||
return (MVKImage*)addResource(new MVKImage(this, pCreateInfo));
|
return mvkImg;
|
||||||
}
|
}
|
||||||
|
|
||||||
void MVKDevice::destroyImage(MVKImage* mvkImg,
|
void MVKDevice::destroyImage(MVKImage* mvkImg,
|
||||||
const VkAllocationCallbacks* pAllocator) {
|
const VkAllocationCallbacks* pAllocator) {
|
||||||
if (mvkImg) {
|
if (mvkImg) {
|
||||||
removeResource(mvkImg);
|
for (auto& memoryBinding : mvkImg->_memoryBindings) {
|
||||||
|
removeResource(memoryBinding);
|
||||||
|
}
|
||||||
mvkImg->destroy();
|
mvkImg->destroy();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2352,13 +2399,19 @@ MVKPresentableSwapchainImage* MVKDevice::createPresentableSwapchainImage(const V
|
|||||||
MVKSwapchain* swapchain,
|
MVKSwapchain* swapchain,
|
||||||
uint32_t swapchainIndex,
|
uint32_t swapchainIndex,
|
||||||
const VkAllocationCallbacks* pAllocator) {
|
const VkAllocationCallbacks* pAllocator) {
|
||||||
return (MVKPresentableSwapchainImage*)addResource(new MVKPresentableSwapchainImage(this, pCreateInfo, swapchain, swapchainIndex));
|
MVKPresentableSwapchainImage* mvkImg = new MVKPresentableSwapchainImage(this, pCreateInfo, swapchain, swapchainIndex);
|
||||||
|
for (auto& memoryBinding : mvkImg->_memoryBindings) {
|
||||||
|
addResource(memoryBinding);
|
||||||
|
}
|
||||||
|
return mvkImg;
|
||||||
}
|
}
|
||||||
|
|
||||||
void MVKDevice::destroyPresentableSwapchainImage(MVKPresentableSwapchainImage* mvkImg,
|
void MVKDevice::destroyPresentableSwapchainImage(MVKPresentableSwapchainImage* mvkImg,
|
||||||
const VkAllocationCallbacks* pAllocator) {
|
const VkAllocationCallbacks* pAllocator) {
|
||||||
if (mvkImg) {
|
if (mvkImg) {
|
||||||
removeResource(mvkImg);
|
for (auto& memoryBinding : mvkImg->_memoryBindings) {
|
||||||
|
removeResource(memoryBinding);
|
||||||
|
}
|
||||||
mvkImg->destroy();
|
mvkImg->destroy();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2522,6 +2575,16 @@ void MVKDevice::destroySampler(MVKSampler* mvkSamp,
|
|||||||
if (mvkSamp) { mvkSamp->destroy(); }
|
if (mvkSamp) { mvkSamp->destroy(); }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
MVKSamplerYcbcrConversion* MVKDevice::createSamplerYcbcrConversion(const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
|
||||||
|
const VkAllocationCallbacks* pAllocator) {
|
||||||
|
return new MVKSamplerYcbcrConversion(this, pCreateInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
void MVKDevice::destroySamplerYcbcrConversion(MVKSamplerYcbcrConversion* mvkSampConv,
|
||||||
|
const VkAllocationCallbacks* pAllocator) {
|
||||||
|
mvkSampConv->destroy();
|
||||||
|
}
|
||||||
|
|
||||||
MVKDescriptorSetLayout* MVKDevice::createDescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
|
MVKDescriptorSetLayout* MVKDevice::createDescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
|
||||||
const VkAllocationCallbacks* pAllocator) {
|
const VkAllocationCallbacks* pAllocator) {
|
||||||
return new MVKDescriptorSetLayout(this, pCreateInfo);
|
return new MVKDescriptorSetLayout(this, pCreateInfo);
|
||||||
@ -2773,6 +2836,7 @@ MVKDevice::MVKDevice(MVKPhysicalDevice* physicalDevice, const VkDeviceCreateInfo
|
|||||||
_enabledVarPtrFeatures(),
|
_enabledVarPtrFeatures(),
|
||||||
_enabledInterlockFeatures(),
|
_enabledInterlockFeatures(),
|
||||||
_enabledHostQryResetFeatures(),
|
_enabledHostQryResetFeatures(),
|
||||||
|
_enabledSamplerYcbcrConversionFeatures(),
|
||||||
_enabledScalarLayoutFeatures(),
|
_enabledScalarLayoutFeatures(),
|
||||||
_enabledTexelBuffAlignFeatures(),
|
_enabledTexelBuffAlignFeatures(),
|
||||||
_enabledVtxAttrDivFeatures(),
|
_enabledVtxAttrDivFeatures(),
|
||||||
@ -2934,6 +2998,7 @@ void MVKDevice::enableFeatures(const VkDeviceCreateInfo* pCreateInfo) {
|
|||||||
mvkClear(&_enabledVarPtrFeatures);
|
mvkClear(&_enabledVarPtrFeatures);
|
||||||
mvkClear(&_enabledInterlockFeatures);
|
mvkClear(&_enabledInterlockFeatures);
|
||||||
mvkClear(&_enabledHostQryResetFeatures);
|
mvkClear(&_enabledHostQryResetFeatures);
|
||||||
|
mvkClear(&_enabledSamplerYcbcrConversionFeatures);
|
||||||
mvkClear(&_enabledScalarLayoutFeatures);
|
mvkClear(&_enabledScalarLayoutFeatures);
|
||||||
mvkClear(&_enabledTexelBuffAlignFeatures);
|
mvkClear(&_enabledTexelBuffAlignFeatures);
|
||||||
mvkClear(&_enabledVtxAttrDivFeatures);
|
mvkClear(&_enabledVtxAttrDivFeatures);
|
||||||
@ -2956,9 +3021,13 @@ void MVKDevice::enableFeatures(const VkDeviceCreateInfo* pCreateInfo) {
|
|||||||
pdScalarLayoutFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT;
|
pdScalarLayoutFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT;
|
||||||
pdScalarLayoutFeatures.pNext = &pdTexelBuffAlignFeatures;
|
pdScalarLayoutFeatures.pNext = &pdTexelBuffAlignFeatures;
|
||||||
|
|
||||||
|
VkPhysicalDeviceSamplerYcbcrConversionFeatures pdSamplerYcbcrConversionFeatures;
|
||||||
|
pdSamplerYcbcrConversionFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES;
|
||||||
|
pdSamplerYcbcrConversionFeatures.pNext = &pdScalarLayoutFeatures;
|
||||||
|
|
||||||
VkPhysicalDeviceHostQueryResetFeaturesEXT pdHostQryResetFeatures;
|
VkPhysicalDeviceHostQueryResetFeaturesEXT pdHostQryResetFeatures;
|
||||||
pdHostQryResetFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
|
pdHostQryResetFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
|
||||||
pdHostQryResetFeatures.pNext = &pdScalarLayoutFeatures;
|
pdHostQryResetFeatures.pNext = &pdSamplerYcbcrConversionFeatures;
|
||||||
|
|
||||||
VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT pdInterlockFeatures;
|
VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT pdInterlockFeatures;
|
||||||
pdInterlockFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT;
|
pdInterlockFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT;
|
||||||
@ -3055,6 +3124,13 @@ void MVKDevice::enableFeatures(const VkDeviceCreateInfo* pCreateInfo) {
|
|||||||
&pdHostQryResetFeatures.hostQueryReset, 1);
|
&pdHostQryResetFeatures.hostQueryReset, 1);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: {
|
||||||
|
auto* requestedFeatures = (VkPhysicalDeviceSamplerYcbcrConversionFeatures*)next;
|
||||||
|
enableFeatures(&_enabledSamplerYcbcrConversionFeatures.samplerYcbcrConversion,
|
||||||
|
&requestedFeatures->samplerYcbcrConversion,
|
||||||
|
&pdSamplerYcbcrConversionFeatures.samplerYcbcrConversion, 1);
|
||||||
|
break;
|
||||||
|
}
|
||||||
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT: {
|
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT: {
|
||||||
auto* requestedFeatures = (VkPhysicalDeviceScalarBlockLayoutFeaturesEXT*)next;
|
auto* requestedFeatures = (VkPhysicalDeviceScalarBlockLayoutFeaturesEXT*)next;
|
||||||
enableFeatures(&_enabledScalarLayoutFeatures.scalarBlockLayout,
|
enableFeatures(&_enabledScalarLayoutFeatures.scalarBlockLayout,
|
||||||
|
@ -24,8 +24,7 @@
|
|||||||
|
|
||||||
#import <Metal/Metal.h>
|
#import <Metal/Metal.h>
|
||||||
|
|
||||||
class MVKBuffer;
|
class MVKImageMemoryBinding;
|
||||||
class MVKImage;
|
|
||||||
|
|
||||||
// TODO: These are inoperable placeholders until VK_KHR_external_memory_metal defines them properly
|
// TODO: These are inoperable placeholders until VK_KHR_external_memory_metal defines them properly
|
||||||
static const VkExternalMemoryHandleTypeFlagBits VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLBUFFER_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM;
|
static const VkExternalMemoryHandleTypeFlagBits VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLBUFFER_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM;
|
||||||
@ -34,6 +33,12 @@ static const VkExternalMemoryHandleTypeFlagBits VK_EXTERNAL_MEMORY_HANDLE_TYPE_M
|
|||||||
|
|
||||||
#pragma mark MVKDeviceMemory
|
#pragma mark MVKDeviceMemory
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
VkDeviceSize offset = 0;
|
||||||
|
VkDeviceSize size = 0;
|
||||||
|
} MVKMappedMemoryRange;
|
||||||
|
|
||||||
|
|
||||||
/** Represents a Vulkan device-space memory allocation. */
|
/** Represents a Vulkan device-space memory allocation. */
|
||||||
class MVKDeviceMemory : public MVKVulkanAPIDeviceObject {
|
class MVKDeviceMemory : public MVKVulkanAPIDeviceObject {
|
||||||
|
|
||||||
@ -78,6 +83,16 @@ public:
|
|||||||
/** Unmaps a previously mapped memory range. */
|
/** Unmaps a previously mapped memory range. */
|
||||||
void unmap();
|
void unmap();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If this device memory is currently mapped to host memory, returns the range within
|
||||||
|
* this device memory that is currently mapped to host memory, or returns {0,0} if
|
||||||
|
* this device memory is not currently mapped to host memory.
|
||||||
|
*/
|
||||||
|
inline const MVKMappedMemoryRange& getMappedRange() { return _mappedRange; }
|
||||||
|
|
||||||
|
/** Returns whether this device memory is currently mapped to host memory. */
|
||||||
|
bool isMapped() { return _mappedRange.size > 0; }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If this memory is host-visible, the specified memory range is flushed to the device.
|
* If this memory is host-visible, the specified memory range is flushed to the device.
|
||||||
* Normally, flushing will only occur if the device memory is non-coherent, but flushing
|
* Normally, flushing will only occur if the device memory is non-coherent, but flushing
|
||||||
@ -131,15 +146,15 @@ public:
|
|||||||
~MVKDeviceMemory() override;
|
~MVKDeviceMemory() override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
friend MVKBuffer;
|
friend class MVKBuffer;
|
||||||
friend MVKImage;
|
friend class MVKImageMemoryBinding;
|
||||||
|
|
||||||
void propagateDebugName() override;
|
void propagateDebugName() override;
|
||||||
VkDeviceSize adjustMemorySize(VkDeviceSize size, VkDeviceSize offset);
|
VkDeviceSize adjustMemorySize(VkDeviceSize size, VkDeviceSize offset);
|
||||||
VkResult addBuffer(MVKBuffer* mvkBuff);
|
VkResult addBuffer(MVKBuffer* mvkBuff);
|
||||||
void removeBuffer(MVKBuffer* mvkBuff);
|
void removeBuffer(MVKBuffer* mvkBuff);
|
||||||
VkResult addImage(MVKImage* mvkImg);
|
VkResult addImageMemoryBinding(MVKImageMemoryBinding* mvkImg);
|
||||||
void removeImage(MVKImage* mvkImg);
|
void removeImageMemoryBinding(MVKImageMemoryBinding* mvkImg);
|
||||||
bool ensureMTLHeap();
|
bool ensureMTLHeap();
|
||||||
bool ensureMTLBuffer();
|
bool ensureMTLBuffer();
|
||||||
bool ensureHostMemory();
|
bool ensureHostMemory();
|
||||||
@ -148,16 +163,14 @@ protected:
|
|||||||
void initExternalMemory(VkExternalMemoryHandleTypeFlags handleTypes);
|
void initExternalMemory(VkExternalMemoryHandleTypeFlags handleTypes);
|
||||||
|
|
||||||
MVKSmallVector<MVKBuffer*, 4> _buffers;
|
MVKSmallVector<MVKBuffer*, 4> _buffers;
|
||||||
MVKSmallVector<MVKImage*, 4> _images;
|
MVKSmallVector<MVKImageMemoryBinding*, 4> _imageMemoryBindings;
|
||||||
std::mutex _rezLock;
|
std::mutex _rezLock;
|
||||||
VkDeviceSize _allocationSize = 0;
|
VkDeviceSize _allocationSize = 0;
|
||||||
VkDeviceSize _mapOffset = 0;
|
MVKMappedMemoryRange _mappedRange;
|
||||||
VkDeviceSize _mapSize = 0;
|
|
||||||
id<MTLBuffer> _mtlBuffer = nil;
|
id<MTLBuffer> _mtlBuffer = nil;
|
||||||
id<MTLHeap> _mtlHeap = nil;
|
id<MTLHeap> _mtlHeap = nil;
|
||||||
void* _pMemory = nullptr;
|
void* _pMemory = nullptr;
|
||||||
void* _pHostMemory = nullptr;
|
void* _pHostMemory = nullptr;
|
||||||
bool _isMapped = false;
|
|
||||||
bool _isDedicated = false;
|
bool _isDedicated = false;
|
||||||
MTLStorageMode _mtlStorageMode;
|
MTLStorageMode _mtlStorageMode;
|
||||||
MTLCPUCacheMode _mtlCPUCacheMode;
|
MTLCPUCacheMode _mtlCPUCacheMode;
|
||||||
|
@ -43,7 +43,7 @@ VkResult MVKDeviceMemory::map(VkDeviceSize offset, VkDeviceSize size, VkMemoryMa
|
|||||||
return reportError(VK_ERROR_MEMORY_MAP_FAILED, "Private GPU-only memory cannot be mapped to host memory.");
|
return reportError(VK_ERROR_MEMORY_MAP_FAILED, "Private GPU-only memory cannot be mapped to host memory.");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_isMapped) {
|
if (isMapped()) {
|
||||||
return reportError(VK_ERROR_MEMORY_MAP_FAILED, "Memory is already mapped. Call vkUnmapMemory() first.");
|
return reportError(VK_ERROR_MEMORY_MAP_FAILED, "Memory is already mapped. Call vkUnmapMemory() first.");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -51,9 +51,8 @@ VkResult MVKDeviceMemory::map(VkDeviceSize offset, VkDeviceSize size, VkMemoryMa
|
|||||||
return reportError(VK_ERROR_OUT_OF_HOST_MEMORY, "Could not allocate %llu bytes of host-accessible device memory.", _allocationSize);
|
return reportError(VK_ERROR_OUT_OF_HOST_MEMORY, "Could not allocate %llu bytes of host-accessible device memory.", _allocationSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
_mapOffset = offset;
|
_mappedRange.offset = offset;
|
||||||
_mapSize = adjustMemorySize(size, offset);
|
_mappedRange.size = adjustMemorySize(size, offset);
|
||||||
_isMapped = true;
|
|
||||||
|
|
||||||
*ppData = (void*)((uintptr_t)_pMemory + offset);
|
*ppData = (void*)((uintptr_t)_pMemory + offset);
|
||||||
|
|
||||||
@ -65,17 +64,16 @@ VkResult MVKDeviceMemory::map(VkDeviceSize offset, VkDeviceSize size, VkMemoryMa
|
|||||||
|
|
||||||
void MVKDeviceMemory::unmap() {
|
void MVKDeviceMemory::unmap() {
|
||||||
|
|
||||||
if ( !_isMapped ) {
|
if ( !isMapped() ) {
|
||||||
reportError(VK_ERROR_MEMORY_MAP_FAILED, "Memory is not mapped. Call vkMapMemory() first.");
|
reportError(VK_ERROR_MEMORY_MAP_FAILED, "Memory is not mapped. Call vkMapMemory() first.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Coherent memory does not require flushing by app, so we must flush now.
|
// Coherent memory does not require flushing by app, so we must flush now.
|
||||||
flushToDevice(_mapOffset, _mapSize, isMemoryHostCoherent());
|
flushToDevice(_mappedRange.offset, _mappedRange.size, isMemoryHostCoherent());
|
||||||
|
|
||||||
_mapOffset = 0;
|
_mappedRange.offset = 0;
|
||||||
_mapSize = 0;
|
_mappedRange.size = 0;
|
||||||
_isMapped = false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult MVKDeviceMemory::flushToDevice(VkDeviceSize offset, VkDeviceSize size, bool evenIfCoherent) {
|
VkResult MVKDeviceMemory::flushToDevice(VkDeviceSize offset, VkDeviceSize size, bool evenIfCoherent) {
|
||||||
@ -92,7 +90,7 @@ VkResult MVKDeviceMemory::flushToDevice(VkDeviceSize offset, VkDeviceSize size,
|
|||||||
// If we have an MTLHeap object, there's no need to sync memory manually between images and the buffer.
|
// If we have an MTLHeap object, there's no need to sync memory manually between images and the buffer.
|
||||||
if (!_mtlHeap) {
|
if (!_mtlHeap) {
|
||||||
lock_guard<mutex> lock(_rezLock);
|
lock_guard<mutex> lock(_rezLock);
|
||||||
for (auto& img : _images) { img->flushToDevice(offset, memSize); }
|
for (auto& img : _imageMemoryBindings) { img->flushToDevice(offset, memSize); }
|
||||||
for (auto& buf : _buffers) { buf->flushToDevice(offset, memSize); }
|
for (auto& buf : _buffers) { buf->flushToDevice(offset, memSize); }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -107,7 +105,7 @@ VkResult MVKDeviceMemory::pullFromDevice(VkDeviceSize offset,
|
|||||||
VkDeviceSize memSize = adjustMemorySize(size, offset);
|
VkDeviceSize memSize = adjustMemorySize(size, offset);
|
||||||
if (memSize > 0 && isMemoryHostAccessible() && (evenIfCoherent || !isMemoryHostCoherent()) && !_mtlHeap) {
|
if (memSize > 0 && isMemoryHostAccessible() && (evenIfCoherent || !isMemoryHostCoherent()) && !_mtlHeap) {
|
||||||
lock_guard<mutex> lock(_rezLock);
|
lock_guard<mutex> lock(_rezLock);
|
||||||
for (auto& img : _images) { img->pullFromDevice(offset, memSize); }
|
for (auto& img : _imageMemoryBindings) { img->pullFromDevice(offset, memSize); }
|
||||||
for (auto& buf : _buffers) { buf->pullFromDevice(offset, memSize); }
|
for (auto& buf : _buffers) { buf->pullFromDevice(offset, memSize); }
|
||||||
|
|
||||||
#if MVK_MACOS
|
#if MVK_MACOS
|
||||||
@ -152,23 +150,23 @@ void MVKDeviceMemory::removeBuffer(MVKBuffer* mvkBuff) {
|
|||||||
mvkRemoveAllOccurances(_buffers, mvkBuff);
|
mvkRemoveAllOccurances(_buffers, mvkBuff);
|
||||||
}
|
}
|
||||||
|
|
||||||
VkResult MVKDeviceMemory::addImage(MVKImage* mvkImg) {
|
VkResult MVKDeviceMemory::addImageMemoryBinding(MVKImageMemoryBinding* mvkImg) {
|
||||||
lock_guard<mutex> lock(_rezLock);
|
lock_guard<mutex> lock(_rezLock);
|
||||||
|
|
||||||
// If a dedicated alloc, ensure this image is the one and only image
|
// If a dedicated alloc, ensure this image is the one and only image
|
||||||
// I am dedicated to.
|
// I am dedicated to.
|
||||||
if (_isDedicated && (_images.empty() || _images[0] != mvkImg) ) {
|
if (_isDedicated && (_imageMemoryBindings.empty() || _imageMemoryBindings[0] != mvkImg) ) {
|
||||||
return reportError(VK_ERROR_OUT_OF_DEVICE_MEMORY, "Could not bind VkImage %p to a VkDeviceMemory dedicated to resource %p. A dedicated allocation may only be used with the resource it was dedicated to.", mvkImg, getDedicatedResource() );
|
return reportError(VK_ERROR_OUT_OF_DEVICE_MEMORY, "Could not bind VkImage %p to a VkDeviceMemory dedicated to resource %p. A dedicated allocation may only be used with the resource it was dedicated to.", mvkImg, getDedicatedResource() );
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!_isDedicated) { _images.push_back(mvkImg); }
|
if (!_isDedicated) { _imageMemoryBindings.push_back(mvkImg); }
|
||||||
|
|
||||||
return VK_SUCCESS;
|
return VK_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
void MVKDeviceMemory::removeImage(MVKImage* mvkImg) {
|
void MVKDeviceMemory::removeImageMemoryBinding(MVKImageMemoryBinding* mvkImg) {
|
||||||
lock_guard<mutex> lock(_rezLock);
|
lock_guard<mutex> lock(_rezLock);
|
||||||
mvkRemoveAllOccurances(_images, mvkImg);
|
mvkRemoveAllOccurances(_imageMemoryBindings, mvkImg);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensures that this instance is backed by a MTLHeap object,
|
// Ensures that this instance is backed by a MTLHeap object,
|
||||||
@ -266,7 +264,7 @@ void MVKDeviceMemory::freeHostMemory() {
|
|||||||
|
|
||||||
MVKResource* MVKDeviceMemory::getDedicatedResource() {
|
MVKResource* MVKDeviceMemory::getDedicatedResource() {
|
||||||
MVKAssert(_isDedicated, "This method should only be called on dedicated allocations!");
|
MVKAssert(_isDedicated, "This method should only be called on dedicated allocations!");
|
||||||
return _buffers.empty() ? (MVKResource*)_images[0] : (MVKResource*)_buffers[0];
|
return _buffers.empty() ? (MVKResource*)_imageMemoryBindings[0] : (MVKResource*)_buffers[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
MVKDeviceMemory::MVKDeviceMemory(MVKDevice* device,
|
MVKDeviceMemory::MVKDeviceMemory(MVKDevice* device,
|
||||||
@ -319,7 +317,9 @@ MVKDeviceMemory::MVKDeviceMemory(MVKDevice* device,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
_images.push_back((MVKImage*)dedicatedImage);
|
for (auto& memoryBinding : ((MVKImage*)dedicatedImage)->_memoryBindings) {
|
||||||
|
_imageMemoryBindings.push_back(memoryBinding);
|
||||||
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -367,7 +367,7 @@ MVKDeviceMemory::~MVKDeviceMemory() {
|
|||||||
// to allow the resource to callback to remove itself from the collection.
|
// to allow the resource to callback to remove itself from the collection.
|
||||||
auto buffCopies = _buffers;
|
auto buffCopies = _buffers;
|
||||||
for (auto& buf : buffCopies) { buf->bindDeviceMemory(nullptr, 0); }
|
for (auto& buf : buffCopies) { buf->bindDeviceMemory(nullptr, 0); }
|
||||||
auto imgCopies = _images;
|
auto imgCopies = _imageMemoryBindings;
|
||||||
for (auto& img : imgCopies) { img->bindDeviceMemory(nullptr, 0); }
|
for (auto& img : imgCopies) { img->bindDeviceMemory(nullptr, 0); }
|
||||||
|
|
||||||
[_mtlBuffer release];
|
[_mtlBuffer release];
|
||||||
|
@ -28,11 +28,15 @@
|
|||||||
|
|
||||||
#import <IOSurface/IOSurfaceRef.h>
|
#import <IOSurface/IOSurfaceRef.h>
|
||||||
|
|
||||||
|
class MVKImage;
|
||||||
class MVKImageView;
|
class MVKImageView;
|
||||||
class MVKSwapchain;
|
class MVKSwapchain;
|
||||||
class MVKCommandEncoder;
|
class MVKCommandEncoder;
|
||||||
|
|
||||||
|
|
||||||
|
#pragma mark -
|
||||||
|
#pragma mark MVKImagePlane
|
||||||
|
|
||||||
/** Tracks the state of an image subresource. */
|
/** Tracks the state of an image subresource. */
|
||||||
typedef struct {
|
typedef struct {
|
||||||
VkImageSubresource subresource;
|
VkImageSubresource subresource;
|
||||||
@ -40,12 +44,116 @@ typedef struct {
|
|||||||
VkImageLayout layoutState;
|
VkImageLayout layoutState;
|
||||||
} MVKImageSubresource;
|
} MVKImageSubresource;
|
||||||
|
|
||||||
|
class MVKImagePlane : public MVKBaseObject {
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
|
/** Returns the Vulkan API opaque object controlling this object. */
|
||||||
|
MVKVulkanAPIObject* getVulkanAPIObject() override;
|
||||||
|
|
||||||
|
/** Returns the Metal texture underlying this image plane. */
|
||||||
|
id<MTLTexture> getMTLTexture();
|
||||||
|
|
||||||
|
/** Returns a Metal texture that interprets the pixels in the specified format. */
|
||||||
|
id<MTLTexture> getMTLTexture(MTLPixelFormat mtlPixFmt);
|
||||||
|
|
||||||
|
void releaseMTLTexture();
|
||||||
|
|
||||||
|
~MVKImagePlane();
|
||||||
|
|
||||||
|
protected:
|
||||||
|
friend class MVKImageMemoryBinding;
|
||||||
|
friend MVKImage;
|
||||||
|
|
||||||
|
MTLTextureDescriptor* newMTLTextureDescriptor();
|
||||||
|
void initSubresources(const VkImageCreateInfo* pCreateInfo);
|
||||||
|
MVKImageSubresource* getSubresource(uint32_t mipLevel, uint32_t arrayLayer);
|
||||||
|
void updateMTLTextureContent(MVKImageSubresource& subresource, VkDeviceSize offset, VkDeviceSize size);
|
||||||
|
void getMTLTextureContent(MVKImageSubresource& subresource, VkDeviceSize offset, VkDeviceSize size);
|
||||||
|
bool overlaps(VkSubresourceLayout& imgLayout, VkDeviceSize offset, VkDeviceSize size);
|
||||||
|
void propagateDebugName();
|
||||||
|
MVKImageMemoryBinding* getMemoryBinding() const;
|
||||||
|
void applyImageMemoryBarrier(VkPipelineStageFlags srcStageMask,
|
||||||
|
VkPipelineStageFlags dstStageMask,
|
||||||
|
MVKPipelineBarrier& barrier,
|
||||||
|
MVKCommandEncoder* cmdEncoder,
|
||||||
|
MVKCommandUse cmdUse);
|
||||||
|
void pullFromDeviceOnCompletion(MVKCommandEncoder* cmdEncoder,
|
||||||
|
MVKImageSubresource& subresource,
|
||||||
|
const MVKMappedMemoryRange& mappedRange);
|
||||||
|
|
||||||
|
MVKImagePlane(MVKImage* image, uint8_t planeIndex);
|
||||||
|
|
||||||
|
MVKImage* _image;
|
||||||
|
uint8_t _planeIndex;
|
||||||
|
VkExtent2D _blockTexelSize;
|
||||||
|
uint32_t _bytesPerBlock;
|
||||||
|
MTLPixelFormat _mtlPixFmt;
|
||||||
|
id<MTLTexture> _mtlTexture;
|
||||||
|
std::unordered_map<NSUInteger, id<MTLTexture>> _mtlTextureViews;
|
||||||
|
MVKSmallVector<MVKImageSubresource, 1> _subresources;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
#pragma mark -
|
||||||
|
#pragma mark MVKImageMemoryBinding
|
||||||
|
|
||||||
|
class MVKImageMemoryBinding : public MVKResource {
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
|
/** Returns the Vulkan type of this object. */
|
||||||
|
VkObjectType getVkObjectType() override { return VK_OBJECT_TYPE_UNKNOWN; }
|
||||||
|
|
||||||
|
/** Returns the debug report object type of this object. */
|
||||||
|
VkDebugReportObjectTypeEXT getVkDebugReportObjectType() override { return VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT; }
|
||||||
|
|
||||||
|
/** Returns the memory requirements of this resource by populating the specified structure. */
|
||||||
|
VkResult getMemoryRequirements(VkMemoryRequirements* pMemoryRequirements);
|
||||||
|
|
||||||
|
/** Returns the memory requirements of this resource by populating the specified structure. */
|
||||||
|
VkResult getMemoryRequirements(const void* pInfo, VkMemoryRequirements2* pMemoryRequirements);
|
||||||
|
|
||||||
|
/** Binds this resource to the specified offset within the specified memory allocation. */
|
||||||
|
VkResult bindDeviceMemory(MVKDeviceMemory* mvkMem, VkDeviceSize memOffset) override;
|
||||||
|
|
||||||
|
/** Applies the specified global memory barrier. */
|
||||||
|
void applyMemoryBarrier(VkPipelineStageFlags srcStageMask,
|
||||||
|
VkPipelineStageFlags dstStageMask,
|
||||||
|
MVKPipelineBarrier& barrier,
|
||||||
|
MVKCommandEncoder* cmdEncoder,
|
||||||
|
MVKCommandUse cmdUse) override;
|
||||||
|
|
||||||
|
~MVKImageMemoryBinding();
|
||||||
|
|
||||||
|
protected:
|
||||||
|
friend MVKDeviceMemory;
|
||||||
|
friend MVKImagePlane;
|
||||||
|
friend MVKImage;
|
||||||
|
|
||||||
|
void propagateDebugName() override;
|
||||||
|
bool needsHostReadSync(VkPipelineStageFlags srcStageMask,
|
||||||
|
VkPipelineStageFlags dstStageMask,
|
||||||
|
MVKPipelineBarrier& barrier);
|
||||||
|
bool shouldFlushHostMemory();
|
||||||
|
VkResult flushToDevice(VkDeviceSize offset, VkDeviceSize size);
|
||||||
|
VkResult pullFromDevice(VkDeviceSize offset, VkDeviceSize size);
|
||||||
|
uint8_t beginPlaneIndex() const;
|
||||||
|
uint8_t endPlaneIndex() const;
|
||||||
|
|
||||||
|
MVKImageMemoryBinding(MVKDevice* device, MVKImage* image, uint8_t planeIndex);
|
||||||
|
|
||||||
|
MVKImage* _image;
|
||||||
|
uint8_t _planeIndex;
|
||||||
|
bool _usesTexelBuffer;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
#pragma mark -
|
#pragma mark -
|
||||||
#pragma mark MVKImage
|
#pragma mark MVKImage
|
||||||
|
|
||||||
/** Represents a Vulkan image. */
|
/** Represents a Vulkan image. */
|
||||||
class MVKImage : public MVKResource {
|
class MVKImage : public MVKVulkanAPIDeviceObject {
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
@ -55,6 +163,9 @@ public:
|
|||||||
/** Returns the debug report object type of this object. */
|
/** Returns the debug report object type of this object. */
|
||||||
VkDebugReportObjectTypeEXT getVkDebugReportObjectType() override { return VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT; }
|
VkDebugReportObjectTypeEXT getVkDebugReportObjectType() override { return VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT; }
|
||||||
|
|
||||||
|
/** Returns the plane index of VkImageAspectFlags. */
|
||||||
|
static uint8_t getPlaneFromVkImageAspectFlags(VkImageAspectFlags aspectMask);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the Vulkan image type of this image.
|
* Returns the Vulkan image type of this image.
|
||||||
* This may be different than the value originally specified for the image
|
* This may be different than the value originally specified for the image
|
||||||
@ -63,7 +174,7 @@ public:
|
|||||||
VkImageType getImageType();
|
VkImageType getImageType();
|
||||||
|
|
||||||
/** Returns the Vulkan image format of this image. */
|
/** Returns the Vulkan image format of this image. */
|
||||||
VkFormat getVkFormat();
|
VkFormat getVkFormat() { return _vkFormat; };
|
||||||
|
|
||||||
/** Returns whether this image has a depth or stencil format. */
|
/** Returns whether this image has a depth or stencil format. */
|
||||||
bool getIsDepthStencil();
|
bool getIsDepthStencil();
|
||||||
@ -81,7 +192,7 @@ public:
|
|||||||
* Returns the 3D extent of this image at the specified mipmap level.
|
* Returns the 3D extent of this image at the specified mipmap level.
|
||||||
* For 2D or cube images, the Z component will be 1.
|
* For 2D or cube images, the Z component will be 1.
|
||||||
*/
|
*/
|
||||||
VkExtent3D getExtent3D(uint32_t mipLevel);
|
VkExtent3D getExtent3D(uint8_t planeIndex, uint32_t mipLevel);
|
||||||
|
|
||||||
/** Returns the number of mipmap levels in this image. */
|
/** Returns the number of mipmap levels in this image. */
|
||||||
inline uint32_t getMipLevelCount() { return _mipLevels; }
|
inline uint32_t getMipLevelCount() { return _mipLevels; }
|
||||||
@ -101,7 +212,7 @@ public:
|
|||||||
* For compressed formats, this is the number of bytes in a row of blocks, which
|
* For compressed formats, this is the number of bytes in a row of blocks, which
|
||||||
* will typically span more than one row of texels.
|
* will typically span more than one row of texels.
|
||||||
*/
|
*/
|
||||||
VkDeviceSize getBytesPerRow(uint32_t mipLevel);
|
VkDeviceSize getBytesPerRow(uint8_t planeIndex, uint32_t mipLevel);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the number of bytes per image layer (for cube, array, or 3D images)
|
* Returns the number of bytes per image layer (for cube, array, or 3D images)
|
||||||
@ -109,7 +220,10 @@ public:
|
|||||||
* of bytes per row (as returned by the getBytesPerRow() function, multiplied by
|
* of bytes per row (as returned by the getBytesPerRow() function, multiplied by
|
||||||
* the height of each 2D image.
|
* the height of each 2D image.
|
||||||
*/
|
*/
|
||||||
VkDeviceSize getBytesPerLayer(uint32_t mipLevel);
|
VkDeviceSize getBytesPerLayer(uint8_t planeIndex, uint32_t mipLevel);
|
||||||
|
|
||||||
|
/** Returns the number of planes of this image view. */
|
||||||
|
inline uint8_t getPlaneCount() { return _planes.size(); }
|
||||||
|
|
||||||
/** Populates the specified layout for the specified sub-resource. */
|
/** Populates the specified layout for the specified sub-resource. */
|
||||||
VkResult getSubresourceLayout(const VkImageSubresource* pSubresource,
|
VkResult getSubresourceLayout(const VkImageSubresource* pSubresource,
|
||||||
@ -122,24 +236,17 @@ public:
|
|||||||
#pragma mark Resource memory
|
#pragma mark Resource memory
|
||||||
|
|
||||||
/** Returns the memory requirements of this resource by populating the specified structure. */
|
/** Returns the memory requirements of this resource by populating the specified structure. */
|
||||||
VkResult getMemoryRequirements(VkMemoryRequirements* pMemoryRequirements) override;
|
VkResult getMemoryRequirements(VkMemoryRequirements* pMemoryRequirements, uint8_t planeIndex);
|
||||||
|
|
||||||
/** Returns the memory requirements of this resource by populating the specified structure. */
|
/** Returns the memory requirements of this resource by populating the specified structure. */
|
||||||
VkResult getMemoryRequirements(const void* pInfo, VkMemoryRequirements2* pMemoryRequirements) override;
|
VkResult getMemoryRequirements(const void* pInfo, VkMemoryRequirements2* pMemoryRequirements);
|
||||||
|
|
||||||
/** Binds this resource to the specified offset within the specified memory allocation. */
|
/** Binds this resource to the specified offset within the specified memory allocation. */
|
||||||
VkResult bindDeviceMemory(MVKDeviceMemory* mvkMem, VkDeviceSize memOffset) override;
|
virtual VkResult bindDeviceMemory(MVKDeviceMemory* mvkMem, VkDeviceSize memOffset, uint8_t planeIndex);
|
||||||
|
|
||||||
/** Binds this resource to the specified offset within the specified memory allocation. */
|
/** Binds this resource to the specified offset within the specified memory allocation. */
|
||||||
virtual VkResult bindDeviceMemory2(const VkBindImageMemoryInfo* pBindInfo);
|
virtual VkResult bindDeviceMemory2(const VkBindImageMemoryInfo* pBindInfo);
|
||||||
|
|
||||||
/** Applies the specified global memory barrier. */
|
|
||||||
void applyMemoryBarrier(VkPipelineStageFlags srcStageMask,
|
|
||||||
VkPipelineStageFlags dstStageMask,
|
|
||||||
MVKPipelineBarrier& barrier,
|
|
||||||
MVKCommandEncoder* cmdEncoder,
|
|
||||||
MVKCommandUse cmdUse) override;
|
|
||||||
|
|
||||||
/** Applies the specified image memory barrier. */
|
/** Applies the specified image memory barrier. */
|
||||||
void applyImageMemoryBarrier(VkPipelineStageFlags srcStageMask,
|
void applyImageMemoryBarrier(VkPipelineStageFlags srcStageMask,
|
||||||
VkPipelineStageFlags dstStageMask,
|
VkPipelineStageFlags dstStageMask,
|
||||||
@ -150,10 +257,10 @@ public:
|
|||||||
#pragma mark Metal
|
#pragma mark Metal
|
||||||
|
|
||||||
/** Returns the Metal texture underlying this image. */
|
/** Returns the Metal texture underlying this image. */
|
||||||
virtual id<MTLTexture> getMTLTexture();
|
virtual id<MTLTexture> getMTLTexture(uint8_t planeIndex);
|
||||||
|
|
||||||
/** Returns a Metal texture that interprets the pixels in the specified format. */
|
/** Returns a Metal texture that interprets the pixels in the specified format. */
|
||||||
id<MTLTexture> getMTLTexture(MTLPixelFormat mtlPixFmt);
|
id<MTLTexture> getMTLTexture(uint8_t planeIndex, MTLPixelFormat mtlPixFmt);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets this image to use the specified MTLTexture.
|
* Sets this image to use the specified MTLTexture.
|
||||||
@ -163,7 +270,7 @@ public:
|
|||||||
*
|
*
|
||||||
* If a MTLTexture has already been created for this image, it will be destroyed.
|
* If a MTLTexture has already been created for this image, it will be destroyed.
|
||||||
*/
|
*/
|
||||||
VkResult setMTLTexture(id<MTLTexture> mtlTexture);
|
VkResult setMTLTexture(uint8_t planeIndex, id<MTLTexture> mtlTexture);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Indicates that this VkImage should use an IOSurface to underlay the Metal texture.
|
* Indicates that this VkImage should use an IOSurface to underlay the Metal texture.
|
||||||
@ -189,7 +296,7 @@ public:
|
|||||||
IOSurfaceRef getIOSurface();
|
IOSurfaceRef getIOSurface();
|
||||||
|
|
||||||
/** Returns the Metal pixel format of this image. */
|
/** Returns the Metal pixel format of this image. */
|
||||||
inline MTLPixelFormat getMTLPixelFormat() { return _mtlPixelFormat; }
|
inline MTLPixelFormat getMTLPixelFormat(uint8_t planeIndex) { return _planes[planeIndex]->_mtlPixFmt; }
|
||||||
|
|
||||||
/** Returns the Metal texture type of this image. */
|
/** Returns the Metal texture type of this image. */
|
||||||
inline MTLTextureType getMTLTextureType() { return _mtlTextureType; }
|
inline MTLTextureType getMTLTextureType() { return _mtlTextureType; }
|
||||||
@ -212,12 +319,6 @@ public:
|
|||||||
/** Returns the Metal CPU cache mode used by this image. */
|
/** Returns the Metal CPU cache mode used by this image. */
|
||||||
MTLCPUCacheMode getMTLCPUCacheMode();
|
MTLCPUCacheMode getMTLCPUCacheMode();
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns whether the memory is automatically coherent between device and host.
|
|
||||||
* On macOS, this always returns false because textures cannot use Shared storage mode.
|
|
||||||
*/
|
|
||||||
bool isMemoryHostCoherent();
|
|
||||||
|
|
||||||
#pragma mark Construction
|
#pragma mark Construction
|
||||||
|
|
||||||
MVKImage(MVKDevice* device, const VkImageCreateInfo* pCreateInfo);
|
MVKImage(MVKDevice* device, const VkImageCreateInfo* pCreateInfo);
|
||||||
@ -225,50 +326,37 @@ public:
|
|||||||
~MVKImage() override;
|
~MVKImage() override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
friend class MVKDeviceMemory;
|
friend MVKDeviceMemory;
|
||||||
friend class MVKImageView;
|
friend MVKDevice;
|
||||||
using MVKResource::needsHostReadSync;
|
friend MVKImageMemoryBinding;
|
||||||
|
friend MVKImagePlane;
|
||||||
|
friend class MVKImageViewPlane;
|
||||||
|
friend MVKImageView;
|
||||||
|
|
||||||
void propagateDebugName() override;
|
void propagateDebugName() override;
|
||||||
MVKImageSubresource* getSubresource(uint32_t mipLevel, uint32_t arrayLayer);
|
|
||||||
void validateConfig(const VkImageCreateInfo* pCreateInfo, bool isAttachment);
|
void validateConfig(const VkImageCreateInfo* pCreateInfo, bool isAttachment);
|
||||||
VkSampleCountFlagBits validateSamples(const VkImageCreateInfo* pCreateInfo, bool isAttachment);
|
VkSampleCountFlagBits validateSamples(const VkImageCreateInfo* pCreateInfo, bool isAttachment);
|
||||||
uint32_t validateMipLevels(const VkImageCreateInfo* pCreateInfo, bool isAttachment);
|
uint32_t validateMipLevels(const VkImageCreateInfo* pCreateInfo, bool isAttachment);
|
||||||
bool validateLinear(const VkImageCreateInfo* pCreateInfo, bool isAttachment);
|
bool validateLinear(const VkImageCreateInfo* pCreateInfo, bool isAttachment);
|
||||||
bool validateUseTexelBuffer();
|
|
||||||
void initSubresources(const VkImageCreateInfo* pCreateInfo);
|
|
||||||
void initSubresourceLayout(MVKImageSubresource& imgSubRez);
|
|
||||||
void initExternalMemory(VkExternalMemoryHandleTypeFlags handleTypes);
|
void initExternalMemory(VkExternalMemoryHandleTypeFlags handleTypes);
|
||||||
id<MTLTexture> newMTLTexture();
|
|
||||||
void releaseMTLTexture();
|
|
||||||
void releaseIOSurface();
|
void releaseIOSurface();
|
||||||
MTLTextureDescriptor* newMTLTextureDescriptor();
|
|
||||||
void updateMTLTextureContent(MVKImageSubresource& subresource, VkDeviceSize offset, VkDeviceSize size);
|
|
||||||
void getMTLTextureContent(MVKImageSubresource& subresource, VkDeviceSize offset, VkDeviceSize size);
|
|
||||||
bool shouldFlushHostMemory();
|
|
||||||
VkResult flushToDevice(VkDeviceSize offset, VkDeviceSize size);
|
|
||||||
VkResult pullFromDevice(VkDeviceSize offset, VkDeviceSize size);
|
|
||||||
bool needsHostReadSync(VkPipelineStageFlags srcStageMask,
|
|
||||||
VkPipelineStageFlags dstStageMask,
|
|
||||||
MVKPipelineBarrier& barrier);
|
|
||||||
|
|
||||||
MVKSmallVector<MVKImageSubresource, 1> _subresources;
|
MVKSmallVector<MVKImageMemoryBinding*, 3> _memoryBindings;
|
||||||
std::unordered_map<NSUInteger, id<MTLTexture>> _mtlTextureViews;
|
MVKSmallVector<MVKImagePlane*, 3> _planes;
|
||||||
VkExtent3D _extent;
|
VkExtent3D _extent;
|
||||||
uint32_t _mipLevels;
|
uint32_t _mipLevels;
|
||||||
uint32_t _arrayLayers;
|
uint32_t _arrayLayers;
|
||||||
VkSampleCountFlagBits _samples;
|
VkSampleCountFlagBits _samples;
|
||||||
VkImageUsageFlags _usage;
|
VkImageUsageFlags _usage;
|
||||||
MTLPixelFormat _mtlPixelFormat;
|
VkFormat _vkFormat;
|
||||||
MTLTextureType _mtlTextureType;
|
MTLTextureType _mtlTextureType;
|
||||||
id<MTLTexture> _mtlTexture;
|
|
||||||
std::mutex _lock;
|
std::mutex _lock;
|
||||||
IOSurfaceRef _ioSurface;
|
IOSurfaceRef _ioSurface;
|
||||||
VkDeviceSize _rowByteAlignment;
|
VkDeviceSize _rowByteAlignment;
|
||||||
bool _isDepthStencilAttachment;
|
bool _isDepthStencilAttachment;
|
||||||
bool _canSupportMTLTextureView;
|
bool _canSupportMTLTextureView;
|
||||||
bool _hasExpectedTexelSize;
|
bool _hasExpectedTexelSize;
|
||||||
bool _usesTexelBuffer;
|
bool _hasChromaSubsampling;
|
||||||
bool _isLinear;
|
bool _isLinear;
|
||||||
bool _is3DCompressed;
|
bool _is3DCompressed;
|
||||||
bool _isAliasable;
|
bool _isAliasable;
|
||||||
@ -284,12 +372,12 @@ class MVKSwapchainImage : public MVKImage {
|
|||||||
public:
|
public:
|
||||||
|
|
||||||
/** Binds this resource to the specified offset within the specified memory allocation. */
|
/** Binds this resource to the specified offset within the specified memory allocation. */
|
||||||
VkResult bindDeviceMemory(MVKDeviceMemory* mvkMem, VkDeviceSize memOffset) override;
|
VkResult bindDeviceMemory(MVKDeviceMemory* mvkMem, VkDeviceSize memOffset, uint8_t planeIndex) override;
|
||||||
|
|
||||||
#pragma mark Metal
|
#pragma mark Metal
|
||||||
|
|
||||||
/** Returns the Metal texture used by the CAMetalDrawable underlying this image. */
|
/** Returns the Metal texture used by the CAMetalDrawable underlying this image. */
|
||||||
id<MTLTexture> getMTLTexture() override;
|
id<MTLTexture> getMTLTexture(uint8_t planeIndex) override;
|
||||||
|
|
||||||
|
|
||||||
#pragma mark Construction
|
#pragma mark Construction
|
||||||
@ -401,6 +489,37 @@ protected:
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
#pragma mark -
|
||||||
|
#pragma mark MVKImageViewPlane
|
||||||
|
|
||||||
|
class MVKImageViewPlane : public MVKBaseDeviceObject {
|
||||||
|
|
||||||
|
/** Returns the Vulkan API opaque object controlling this object. */
|
||||||
|
MVKVulkanAPIObject* getVulkanAPIObject() override;
|
||||||
|
|
||||||
|
public:
|
||||||
|
/** Returns the Metal texture underlying this image view. */
|
||||||
|
id<MTLTexture> getMTLTexture();
|
||||||
|
|
||||||
|
void releaseMTLTexture();
|
||||||
|
|
||||||
|
~MVKImageViewPlane();
|
||||||
|
|
||||||
|
protected:
|
||||||
|
void propagateDebugName();
|
||||||
|
id<MTLTexture> newMTLTexture();
|
||||||
|
MVKImageViewPlane(MVKImageView* imageView, uint8_t planeIndex, MTLPixelFormat mtlPixFmt, const VkImageViewCreateInfo* pCreateInfo);
|
||||||
|
|
||||||
|
friend MVKImageView;
|
||||||
|
MVKImageView* _imageView;
|
||||||
|
uint8_t _planeIndex;
|
||||||
|
MTLPixelFormat _mtlPixFmt;
|
||||||
|
uint32_t _packedSwizzle;
|
||||||
|
id<MTLTexture> _mtlTexture;
|
||||||
|
bool _useMTLTextureView;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
#pragma mark -
|
#pragma mark -
|
||||||
#pragma mark MVKImageView
|
#pragma mark MVKImageView
|
||||||
|
|
||||||
@ -418,17 +537,20 @@ public:
|
|||||||
#pragma mark Metal
|
#pragma mark Metal
|
||||||
|
|
||||||
/** Returns the Metal texture underlying this image view. */
|
/** Returns the Metal texture underlying this image view. */
|
||||||
id<MTLTexture> getMTLTexture();
|
id<MTLTexture> getMTLTexture(uint8_t planeIndex) { return _planes[planeIndex]->getMTLTexture(); }
|
||||||
|
|
||||||
/** Returns the Metal pixel format of this image view. */
|
/** Returns the Metal pixel format of this image view. */
|
||||||
inline MTLPixelFormat getMTLPixelFormat() { return _mtlPixelFormat; }
|
inline MTLPixelFormat getMTLPixelFormat(uint8_t planeIndex) { return _planes[planeIndex]->_mtlPixFmt; }
|
||||||
|
|
||||||
|
/** Returns the packed component swizzle of this image view. */
|
||||||
|
inline uint32_t getPackedSwizzle() { return _planes[0]->_packedSwizzle; }
|
||||||
|
|
||||||
|
/** Returns the number of planes of this image view. */
|
||||||
|
inline uint8_t getPlaneCount() { return _planes.size(); }
|
||||||
|
|
||||||
/** Returns the Metal texture type of this image view. */
|
/** Returns the Metal texture type of this image view. */
|
||||||
inline MTLTextureType getMTLTextureType() { return _mtlTextureType; }
|
inline MTLTextureType getMTLTextureType() { return _mtlTextureType; }
|
||||||
|
|
||||||
/** Returns the packed component swizzle of this image view. */
|
|
||||||
inline uint32_t getPackedSwizzle() { return _packedSwizzle; }
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Populates the texture of the specified render pass descriptor
|
* Populates the texture of the specified render pass descriptor
|
||||||
* with the Metal texture underlying this image.
|
* with the Metal texture underlying this image.
|
||||||
@ -458,7 +580,6 @@ public:
|
|||||||
* This is a static function that can be used to validate image view formats prior to creating one.
|
* This is a static function that can be used to validate image view formats prior to creating one.
|
||||||
*/
|
*/
|
||||||
static VkResult validateSwizzledMTLPixelFormat(const VkImageViewCreateInfo* pCreateInfo,
|
static VkResult validateSwizzledMTLPixelFormat(const VkImageViewCreateInfo* pCreateInfo,
|
||||||
MVKPixelFormats* mvkPixFmts,
|
|
||||||
MVKVulkanAPIObject* apiObject,
|
MVKVulkanAPIObject* apiObject,
|
||||||
bool hasNativeSwizzleSupport,
|
bool hasNativeSwizzleSupport,
|
||||||
bool hasShaderSwizzleSupport,
|
bool hasShaderSwizzleSupport,
|
||||||
@ -472,23 +593,56 @@ public:
|
|||||||
const VkImageViewCreateInfo* pCreateInfo,
|
const VkImageViewCreateInfo* pCreateInfo,
|
||||||
const MVKConfiguration* pAltMVKConfig = nullptr);
|
const MVKConfiguration* pAltMVKConfig = nullptr);
|
||||||
|
|
||||||
~MVKImageView() override;
|
~MVKImageView();
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
friend MVKImageViewPlane;
|
||||||
|
|
||||||
void propagateDebugName() override;
|
void propagateDebugName() override;
|
||||||
id<MTLTexture> newMTLTexture();
|
|
||||||
void initMTLTextureViewSupport();
|
|
||||||
void validateImageViewConfig(const VkImageViewCreateInfo* pCreateInfo);
|
|
||||||
|
|
||||||
MVKImage* _image;
|
MVKImage* _image;
|
||||||
|
MVKSmallVector<MVKImageViewPlane*, 3> _planes;
|
||||||
VkImageSubresourceRange _subresourceRange;
|
VkImageSubresourceRange _subresourceRange;
|
||||||
VkImageUsageFlags _usage;
|
VkImageUsageFlags _usage;
|
||||||
id<MTLTexture> _mtlTexture;
|
|
||||||
std::mutex _lock;
|
std::mutex _lock;
|
||||||
MTLPixelFormat _mtlPixelFormat;
|
|
||||||
MTLTextureType _mtlTextureType;
|
MTLTextureType _mtlTextureType;
|
||||||
uint32_t _packedSwizzle;
|
};
|
||||||
bool _useMTLTextureView;
|
|
||||||
|
|
||||||
|
#pragma mark -
|
||||||
|
#pragma mark MVKSamplerYcbcrConversion
|
||||||
|
|
||||||
|
/** Represents a Vulkan sampler ycbcr conversion. */
|
||||||
|
class MVKSamplerYcbcrConversion : public MVKVulkanAPIDeviceObject {
|
||||||
|
|
||||||
|
public:
|
||||||
|
/** Returns the Vulkan type of this object. */
|
||||||
|
VkObjectType getVkObjectType() override { return VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION; }
|
||||||
|
|
||||||
|
/** Returns the debug report object type of this object. */
|
||||||
|
VkDebugReportObjectTypeEXT getVkDebugReportObjectType() override { return VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT; }
|
||||||
|
|
||||||
|
/** Returns the number of planes of this ycbcr conversion. */
|
||||||
|
inline uint8_t getPlaneCount() { return _planes; }
|
||||||
|
|
||||||
|
/** Writes this conversion settings to a MSL constant sampler */
|
||||||
|
void updateConstExprSampler(SPIRV_CROSS_NAMESPACE::MSLConstexprSampler& constExprSampler) const;
|
||||||
|
|
||||||
|
MVKSamplerYcbcrConversion(MVKDevice* device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo);
|
||||||
|
|
||||||
|
~MVKSamplerYcbcrConversion() override {}
|
||||||
|
|
||||||
|
protected:
|
||||||
|
void propagateDebugName() override {}
|
||||||
|
|
||||||
|
uint8_t _planes, _bpc;
|
||||||
|
SPIRV_CROSS_NAMESPACE::MSLFormatResolution _resolution;
|
||||||
|
SPIRV_CROSS_NAMESPACE::MSLSamplerFilter _chroma_filter;
|
||||||
|
SPIRV_CROSS_NAMESPACE::MSLChromaLocation _x_chroma_offset, _y_chroma_offset;
|
||||||
|
SPIRV_CROSS_NAMESPACE::MSLComponentSwizzle _swizzle[4];
|
||||||
|
SPIRV_CROSS_NAMESPACE::MSLSamplerYCbCrModelConversion _ycbcr_model;
|
||||||
|
SPIRV_CROSS_NAMESPACE::MSLSamplerYCbCrRange _ycbcr_range;
|
||||||
|
bool _forceExplicitReconstruction;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@ -509,6 +663,10 @@ public:
|
|||||||
/** Returns the Metal sampler state. */
|
/** Returns the Metal sampler state. */
|
||||||
inline id<MTLSamplerState> getMTLSamplerState() { return _mtlSamplerState; }
|
inline id<MTLSamplerState> getMTLSamplerState() { return _mtlSamplerState; }
|
||||||
|
|
||||||
|
/** Returns the number of planes if this is a ycbcr conversion or 0 otherwise. */
|
||||||
|
inline uint8_t getPlaneCount() { return (_ycbcrConversion) ? _ycbcrConversion->getPlaneCount() : 0; }
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If this sampler requires hardcoding in MSL, populates the hardcoded sampler in the resource binding.
|
* If this sampler requires hardcoding in MSL, populates the hardcoded sampler in the resource binding.
|
||||||
* Returns whether this sampler requires hardcoding in MSL, and the constant sampler was populated.
|
* Returns whether this sampler requires hardcoding in MSL, and the constant sampler was populated.
|
||||||
@ -529,5 +687,6 @@ protected:
|
|||||||
|
|
||||||
id<MTLSamplerState> _mtlSamplerState;
|
id<MTLSamplerState> _mtlSamplerState;
|
||||||
SPIRV_CROSS_NAMESPACE::MSLConstexprSampler _constExprSampler;
|
SPIRV_CROSS_NAMESPACE::MSLConstexprSampler _constExprSampler;
|
||||||
|
MVKSamplerYcbcrConversion* _ycbcrConversion;
|
||||||
bool _requiresConstExprSampler;
|
bool _requiresConstExprSampler;
|
||||||
};
|
};
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -622,6 +622,8 @@ void MVKInstance::initProcAddrs() {
|
|||||||
ADD_DVC_EXT_ENTRY_POINT(vkGetDescriptorSetLayoutSupportKHR, KHR_MAINTENANCE3);
|
ADD_DVC_EXT_ENTRY_POINT(vkGetDescriptorSetLayoutSupportKHR, KHR_MAINTENANCE3);
|
||||||
ADD_DVC_EXT_ENTRY_POINT(vkCmdPushDescriptorSetKHR, KHR_PUSH_DESCRIPTOR);
|
ADD_DVC_EXT_ENTRY_POINT(vkCmdPushDescriptorSetKHR, KHR_PUSH_DESCRIPTOR);
|
||||||
ADD_DVC_EXT2_ENTRY_POINT(vkCmdPushDescriptorSetWithTemplateKHR, KHR_PUSH_DESCRIPTOR, KHR_DESCRIPTOR_UPDATE_TEMPLATE);
|
ADD_DVC_EXT2_ENTRY_POINT(vkCmdPushDescriptorSetWithTemplateKHR, KHR_PUSH_DESCRIPTOR, KHR_DESCRIPTOR_UPDATE_TEMPLATE);
|
||||||
|
ADD_DVC_EXT_ENTRY_POINT(vkCreateSamplerYcbcrConversionKHR, KHR_SAMPLER_YCBCR_CONVERSION);
|
||||||
|
ADD_DVC_EXT_ENTRY_POINT(vkDestroySamplerYcbcrConversionKHR, KHR_SAMPLER_YCBCR_CONVERSION);
|
||||||
ADD_DVC_EXT_ENTRY_POINT(vkCreateSwapchainKHR, KHR_SWAPCHAIN);
|
ADD_DVC_EXT_ENTRY_POINT(vkCreateSwapchainKHR, KHR_SWAPCHAIN);
|
||||||
ADD_DVC_EXT_ENTRY_POINT(vkDestroySwapchainKHR, KHR_SWAPCHAIN);
|
ADD_DVC_EXT_ENTRY_POINT(vkDestroySwapchainKHR, KHR_SWAPCHAIN);
|
||||||
ADD_DVC_EXT_ENTRY_POINT(vkGetSwapchainImagesKHR, KHR_SWAPCHAIN);
|
ADD_DVC_EXT_ENTRY_POINT(vkGetSwapchainImagesKHR, KHR_SWAPCHAIN);
|
||||||
|
@ -21,6 +21,7 @@
|
|||||||
#include "mvk_datatypes.h"
|
#include "mvk_datatypes.h"
|
||||||
#include "MVKEnvironment.h"
|
#include "MVKEnvironment.h"
|
||||||
#include "MVKBaseObject.h"
|
#include "MVKBaseObject.h"
|
||||||
|
#include <SPIRV-Cross/spirv_msl.hpp>
|
||||||
#include <unordered_map>
|
#include <unordered_map>
|
||||||
|
|
||||||
#import <Metal/Metal.h>
|
#import <Metal/Metal.h>
|
||||||
@ -70,6 +71,8 @@ typedef enum : uint16_t {
|
|||||||
kMVKMTLFmtCapsDRMR = (kMVKMTLFmtCapsDRM | kMVKMTLFmtCapsResolve),
|
kMVKMTLFmtCapsDRMR = (kMVKMTLFmtCapsDRM | kMVKMTLFmtCapsResolve),
|
||||||
kMVKMTLFmtCapsDRFMR = (kMVKMTLFmtCapsDRMR | kMVKMTLFmtCapsFilter),
|
kMVKMTLFmtCapsDRFMR = (kMVKMTLFmtCapsDRMR | kMVKMTLFmtCapsFilter),
|
||||||
|
|
||||||
|
kMVKMTLFmtCapsChromaSubsampling = kMVKMTLFmtCapsRF,
|
||||||
|
kMVKMTLFmtCapsMultiPlanar = kMVKMTLFmtCapsChromaSubsampling,
|
||||||
} MVKMTLFmtCaps;
|
} MVKMTLFmtCaps;
|
||||||
|
|
||||||
|
|
||||||
@ -83,6 +86,8 @@ typedef struct {
|
|||||||
MTLPixelFormat mtlPixelFormatSubstitute;
|
MTLPixelFormat mtlPixelFormatSubstitute;
|
||||||
MTLVertexFormat mtlVertexFormat;
|
MTLVertexFormat mtlVertexFormat;
|
||||||
MTLVertexFormat mtlVertexFormatSubstitute;
|
MTLVertexFormat mtlVertexFormatSubstitute;
|
||||||
|
uint8_t chromaSubsamplingPlaneCount;
|
||||||
|
uint8_t chromaSubsamplingComponentBits;
|
||||||
VkExtent2D blockTexelSize;
|
VkExtent2D blockTexelSize;
|
||||||
uint32_t bytesPerBlock;
|
uint32_t bytesPerBlock;
|
||||||
MVKFormatType formatType;
|
MVKFormatType formatType;
|
||||||
@ -92,7 +97,7 @@ typedef struct {
|
|||||||
|
|
||||||
inline double bytesPerTexel() const { return (double)bytesPerBlock / (double)(blockTexelSize.width * blockTexelSize.height); };
|
inline double bytesPerTexel() const { return (double)bytesPerBlock / (double)(blockTexelSize.width * blockTexelSize.height); };
|
||||||
|
|
||||||
inline bool isSupported() const { return (mtlPixelFormat != MTLPixelFormatInvalid); };
|
inline bool isSupported() const { return (mtlPixelFormat != MTLPixelFormatInvalid || chromaSubsamplingPlaneCount > 0); };
|
||||||
inline bool isSupportedOrSubstitutable() const { return isSupported() || (mtlPixelFormatSubstitute != MTLPixelFormatInvalid); };
|
inline bool isSupportedOrSubstitutable() const { return isSupported() || (mtlPixelFormatSubstitute != MTLPixelFormatInvalid); };
|
||||||
|
|
||||||
inline bool vertexIsSupported() const { return (mtlVertexFormat != MTLVertexFormatInvalid); };
|
inline bool vertexIsSupported() const { return (mtlVertexFormat != MTLVertexFormatInvalid); };
|
||||||
@ -174,16 +179,28 @@ public:
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the size of the compression block, measured in texels for a Vulkan format.
|
* Returns the size of the compression block, measured in texels for a Vulkan format.
|
||||||
* The returned value will be {1, 1} for non-compressed formats.
|
* The returned value will be {1, 1} for non-compressed formats without chroma-subsampling.
|
||||||
*/
|
*/
|
||||||
VkExtent2D getBlockTexelSize(VkFormat vkFormat);
|
VkExtent2D getBlockTexelSize(VkFormat vkFormat);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the size of the compression block, measured in texels for a Metal format.
|
* Returns the size of the compression block, measured in texels for a Metal format.
|
||||||
* The returned value will be {1, 1} for non-compressed formats.
|
* The returned value will be {1, 1} for non-compressed formats without chroma-subsampling.
|
||||||
*/
|
*/
|
||||||
VkExtent2D getBlockTexelSize(MTLPixelFormat mtlFormat);
|
VkExtent2D getBlockTexelSize(MTLPixelFormat mtlFormat);
|
||||||
|
|
||||||
|
/** Returns the number of planes of the specified chroma-subsampling (YCbCr) VkFormat */
|
||||||
|
uint8_t getChromaSubsamplingPlaneCount(VkFormat vkFormat);
|
||||||
|
|
||||||
|
/** Returns the number of bits per channel of the specified chroma-subsampling (YCbCr) VkFormat */
|
||||||
|
uint8_t getChromaSubsamplingComponentBits(VkFormat vkFormat);
|
||||||
|
|
||||||
|
/** Returns the MSLFormatResolution of the specified chroma-subsampling (YCbCr) VkFormat */
|
||||||
|
SPIRV_CROSS_NAMESPACE::MSLFormatResolution getChromaSubsamplingResolution(VkFormat vkFormat);
|
||||||
|
|
||||||
|
/** Returns the number of planes, blockTexelSize, bytesPerBlock and mtlPixFmt of each plane of the specified chroma-subsampling (YCbCr) VkFormat into the given arrays */
|
||||||
|
uint8_t getChromaSubsamplingPlanes(VkFormat vkFormat, VkExtent2D blockTexelSize[3], uint32_t bytesPerBlock[3], MTLPixelFormat mtlPixFmt[3]);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the size, in bytes, of a texel of the specified Vulkan format.
|
* Returns the size, in bytes, of a texel of the specified Vulkan format.
|
||||||
* The returned value may be fractional for certain compressed formats.
|
* The returned value may be fractional for certain compressed formats.
|
||||||
|
@ -236,6 +236,61 @@ VkExtent2D MVKPixelFormats::getBlockTexelSize(MTLPixelFormat mtlFormat) {
|
|||||||
return getVkFormatDesc(mtlFormat).blockTexelSize;
|
return getVkFormatDesc(mtlFormat).blockTexelSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
uint8_t MVKPixelFormats::getChromaSubsamplingPlaneCount(VkFormat vkFormat) {
|
||||||
|
return getVkFormatDesc(vkFormat).chromaSubsamplingPlaneCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint8_t MVKPixelFormats::getChromaSubsamplingComponentBits(VkFormat vkFormat) {
|
||||||
|
return getVkFormatDesc(vkFormat).chromaSubsamplingComponentBits;
|
||||||
|
}
|
||||||
|
|
||||||
|
SPIRV_CROSS_NAMESPACE::MSLFormatResolution MVKPixelFormats::getChromaSubsamplingResolution(VkFormat vkFormat) {
|
||||||
|
VkExtent2D blockTexelSize = getVkFormatDesc(vkFormat).blockTexelSize;
|
||||||
|
return (blockTexelSize.width != 2) ? SPIRV_CROSS_NAMESPACE::MSL_FORMAT_RESOLUTION_444
|
||||||
|
: (blockTexelSize.height != 2) ? SPIRV_CROSS_NAMESPACE::MSL_FORMAT_RESOLUTION_422
|
||||||
|
: SPIRV_CROSS_NAMESPACE::MSL_FORMAT_RESOLUTION_420;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint8_t MVKPixelFormats::getChromaSubsamplingPlanes(VkFormat vkFormat, VkExtent2D blockTexelSize[3], uint32_t bytesPerBlock[3], MTLPixelFormat mtlPixFmt[3]) {
|
||||||
|
uint8_t planes = getChromaSubsamplingPlaneCount(vkFormat);
|
||||||
|
uint8_t bits = getChromaSubsamplingComponentBits(vkFormat);
|
||||||
|
SPIRV_CROSS_NAMESPACE::MSLFormatResolution resolution = getChromaSubsamplingResolution(vkFormat);
|
||||||
|
bytesPerBlock[0] = mvkCeilingDivide((uint32_t)bits/8U, 1U);
|
||||||
|
switch(resolution) {
|
||||||
|
default:
|
||||||
|
return 0;
|
||||||
|
case SPIRV_CROSS_NAMESPACE::MSL_FORMAT_RESOLUTION_444:
|
||||||
|
blockTexelSize[0] = blockTexelSize[1] = blockTexelSize[2] = VkExtent2D{1, 1};
|
||||||
|
break;
|
||||||
|
case SPIRV_CROSS_NAMESPACE::MSL_FORMAT_RESOLUTION_422:
|
||||||
|
blockTexelSize[0] = blockTexelSize[1] = blockTexelSize[2] = VkExtent2D{2, 1};
|
||||||
|
break;
|
||||||
|
case SPIRV_CROSS_NAMESPACE::MSL_FORMAT_RESOLUTION_420:
|
||||||
|
blockTexelSize[0] = blockTexelSize[1] = blockTexelSize[2] = VkExtent2D{2, 2};
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
switch(planes) {
|
||||||
|
default:
|
||||||
|
return 0;
|
||||||
|
case 1:
|
||||||
|
bytesPerBlock[0] *= 4;
|
||||||
|
mtlPixFmt[0] = (bits == 8) ? MTLPixelFormatRGBA8Unorm : MTLPixelFormatRGBA16Unorm;
|
||||||
|
break;
|
||||||
|
case 2:
|
||||||
|
blockTexelSize[0] = VkExtent2D{1, 1};
|
||||||
|
bytesPerBlock[1] = bytesPerBlock[0]*2;
|
||||||
|
mtlPixFmt[0] = (bits == 8) ? MTLPixelFormatR8Unorm : MTLPixelFormatR16Unorm;
|
||||||
|
mtlPixFmt[1] = (bits == 8) ? MTLPixelFormatRG8Unorm : MTLPixelFormatRG16Unorm;
|
||||||
|
break;
|
||||||
|
case 3:
|
||||||
|
blockTexelSize[0] = VkExtent2D{1, 1};
|
||||||
|
bytesPerBlock[1] = bytesPerBlock[2] = bytesPerBlock[0];
|
||||||
|
mtlPixFmt[0] = mtlPixFmt[1] = mtlPixFmt[2] = (bits == 8) ? MTLPixelFormatR8Unorm : MTLPixelFormatR16Unorm;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return planes;
|
||||||
|
}
|
||||||
|
|
||||||
float MVKPixelFormats::getBytesPerTexel(VkFormat vkFormat) {
|
float MVKPixelFormats::getBytesPerTexel(VkFormat vkFormat) {
|
||||||
return getVkFormatDesc(vkFormat).bytesPerTexel();
|
return getVkFormatDesc(vkFormat).bytesPerTexel();
|
||||||
}
|
}
|
||||||
@ -492,10 +547,16 @@ MVKPixelFormats::MVKPixelFormats(MVKPhysicalDevice* physicalDevice) : _physicalD
|
|||||||
// test();
|
// test();
|
||||||
}
|
}
|
||||||
|
|
||||||
#define addVkFormatDesc(VK_FMT, MTL_FMT, MTL_FMT_ALT, MTL_VTX_FMT, MTL_VTX_FMT_ALT, BLK_W, BLK_H, BLK_BYTE_CNT, MVK_FMT_TYPE) \
|
#define addVkFormatDescFull(VK_FMT, MTL_FMT, MTL_FMT_ALT, MTL_VTX_FMT, MTL_VTX_FMT_ALT, CSPC, CSCB, BLK_W, BLK_H, BLK_BYTE_CNT, MVK_FMT_TYPE) \
|
||||||
MVKAssert(fmtIdx < _vkFormatCount, "Attempting to describe %d VkFormats, but only have space for %d. Increase the value of _vkFormatCount", fmtIdx + 1, _vkFormatCount); \
|
MVKAssert(fmtIdx < _vkFormatCount, "Attempting to describe %d VkFormats, but only have space for %d. Increase the value of _vkFormatCount", fmtIdx + 1, _vkFormatCount); \
|
||||||
_vkFormatDescriptions[fmtIdx++] = { VK_FORMAT_ ##VK_FMT, MTLPixelFormat ##MTL_FMT, MTLPixelFormat ##MTL_FMT_ALT, MTLVertexFormat ##MTL_VTX_FMT, MTLVertexFormat ##MTL_VTX_FMT_ALT, \
|
_vkFormatDescriptions[fmtIdx++] = { VK_FORMAT_ ##VK_FMT, MTLPixelFormat ##MTL_FMT, MTLPixelFormat ##MTL_FMT_ALT, MTLVertexFormat ##MTL_VTX_FMT, MTLVertexFormat ##MTL_VTX_FMT_ALT, \
|
||||||
{ BLK_W, BLK_H }, BLK_BYTE_CNT, kMVKFormat ##MVK_FMT_TYPE, { 0, 0, 0 }, "VK_FORMAT_" #VK_FMT, false }
|
CSPC, CSCB, { BLK_W, BLK_H }, BLK_BYTE_CNT, kMVKFormat ##MVK_FMT_TYPE, { 0, 0, 0 }, "VK_FORMAT_" #VK_FMT, false }
|
||||||
|
|
||||||
|
#define addVkFormatDesc(VK_FMT, MTL_FMT, MTL_FMT_ALT, MTL_VTX_FMT, MTL_VTX_FMT_ALT, BLK_W, BLK_H, BLK_BYTE_CNT, MVK_FMT_TYPE) \
|
||||||
|
addVkFormatDescFull(VK_FMT, MTL_FMT, MTL_FMT_ALT, MTL_VTX_FMT, MTL_VTX_FMT_ALT, 0, 0, BLK_W, BLK_H, BLK_BYTE_CNT, MVK_FMT_TYPE)
|
||||||
|
|
||||||
|
#define addVkFormatDescChromaSubsampling(VK_FMT, MTL_FMT, CSPC, CSCB, BLK_W, BLK_H, BLK_BYTE_CNT) \
|
||||||
|
addVkFormatDescFull(VK_FMT, MTL_FMT, Invalid, Invalid, Invalid, CSPC, CSCB, BLK_W, BLK_H, BLK_BYTE_CNT, ColorFloat)
|
||||||
|
|
||||||
void MVKPixelFormats::initVkFormatCapabilities() {
|
void MVKPixelFormats::initVkFormatCapabilities() {
|
||||||
|
|
||||||
@ -744,9 +805,41 @@ void MVKPixelFormats::initVkFormatCapabilities() {
|
|||||||
addVkFormatDesc( PVRTC2_2BPP_SRGB_BLOCK_IMG, Invalid, Invalid, Invalid, Invalid, 8, 4, 8, Compressed );
|
addVkFormatDesc( PVRTC2_2BPP_SRGB_BLOCK_IMG, Invalid, Invalid, Invalid, Invalid, 8, 4, 8, Compressed );
|
||||||
addVkFormatDesc( PVRTC2_4BPP_SRGB_BLOCK_IMG, Invalid, Invalid, Invalid, Invalid, 4, 4, 8, Compressed );
|
addVkFormatDesc( PVRTC2_4BPP_SRGB_BLOCK_IMG, Invalid, Invalid, Invalid, Invalid, 4, 4, 8, Compressed );
|
||||||
|
|
||||||
// Future extension VK_KHX_color_conversion and Vulkan 1.1.
|
// Extension VK_KHR_sampler_ycbcr_conversion
|
||||||
addVkFormatDesc( UNDEFINED, GBGR422, Invalid, Invalid, Invalid, 2, 1, 4, ColorFloat );
|
addVkFormatDescChromaSubsampling( G8B8G8R8_422_UNORM, GBGR422, 1, 8, 2, 1, 4 );
|
||||||
addVkFormatDesc( UNDEFINED, BGRG422, Invalid, Invalid, Invalid, 2, 1, 4, ColorFloat );
|
addVkFormatDescChromaSubsampling( B8G8R8G8_422_UNORM, BGRG422, 1, 8, 2, 1, 4 );
|
||||||
|
addVkFormatDescChromaSubsampling( G8_B8_R8_3PLANE_420_UNORM, Invalid, 3, 8, 2, 2, 6 );
|
||||||
|
addVkFormatDescChromaSubsampling( G8_B8R8_2PLANE_420_UNORM, Invalid, 2, 8, 2, 2, 6 );
|
||||||
|
addVkFormatDescChromaSubsampling( G8_B8_R8_3PLANE_422_UNORM, Invalid, 3, 8, 2, 1, 4 );
|
||||||
|
addVkFormatDescChromaSubsampling( G8_B8R8_2PLANE_422_UNORM, Invalid, 2, 8, 2, 1, 4 );
|
||||||
|
addVkFormatDescChromaSubsampling( G8_B8_R8_3PLANE_444_UNORM, Invalid, 3, 8, 1, 1, 3 );
|
||||||
|
addVkFormatDescChromaSubsampling( R10X6_UNORM_PACK16, Invalid, 0, 10, 1, 1, 2 );
|
||||||
|
addVkFormatDescChromaSubsampling( R10X6G10X6_UNORM_2PACK16, Invalid, 0, 10, 1, 1, 4 );
|
||||||
|
addVkFormatDescChromaSubsampling( R10X6G10X6B10X6A10X6_UNORM_4PACK16, Invalid, 0, 10, 1, 1, 8 );
|
||||||
|
addVkFormatDescChromaSubsampling( G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, Invalid, 1, 10, 2, 1, 8 );
|
||||||
|
addVkFormatDescChromaSubsampling( B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, Invalid, 1, 10, 2, 1, 8 );
|
||||||
|
addVkFormatDescChromaSubsampling( G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, Invalid, 3, 10, 2, 2, 12 );
|
||||||
|
addVkFormatDescChromaSubsampling( G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, Invalid, 2, 10, 2, 2, 12 );
|
||||||
|
addVkFormatDescChromaSubsampling( G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, Invalid, 3, 10, 2, 1, 8 );
|
||||||
|
addVkFormatDescChromaSubsampling( G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, Invalid, 2, 10, 2, 1, 8 );
|
||||||
|
addVkFormatDescChromaSubsampling( G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, Invalid, 3, 10, 1, 1, 6 );
|
||||||
|
addVkFormatDescChromaSubsampling( R12X4_UNORM_PACK16, Invalid, 0, 12, 1, 1, 2 );
|
||||||
|
addVkFormatDescChromaSubsampling( R12X4G12X4_UNORM_2PACK16, Invalid, 0, 12, 1, 1, 4 );
|
||||||
|
addVkFormatDescChromaSubsampling( R12X4G12X4B12X4A12X4_UNORM_4PACK16, Invalid, 0, 12, 1, 1, 8 );
|
||||||
|
addVkFormatDescChromaSubsampling( G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, Invalid, 1, 12, 2, 1, 8 );
|
||||||
|
addVkFormatDescChromaSubsampling( B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, Invalid, 1, 12, 2, 1, 8 );
|
||||||
|
addVkFormatDescChromaSubsampling( G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, Invalid, 3, 12, 2, 2, 12 );
|
||||||
|
addVkFormatDescChromaSubsampling( G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, Invalid, 2, 12, 2, 2, 12 );
|
||||||
|
addVkFormatDescChromaSubsampling( G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, Invalid, 3, 12, 2, 1, 8 );
|
||||||
|
addVkFormatDescChromaSubsampling( G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, Invalid, 2, 12, 2, 1, 8 );
|
||||||
|
addVkFormatDescChromaSubsampling( G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, Invalid, 3, 12, 1, 1, 6 );
|
||||||
|
addVkFormatDescChromaSubsampling( G16B16G16R16_422_UNORM, Invalid, 1, 16, 2, 1, 8 );
|
||||||
|
addVkFormatDescChromaSubsampling( B16G16R16G16_422_UNORM, Invalid, 1, 16, 2, 1, 8 );
|
||||||
|
addVkFormatDescChromaSubsampling( G16_B16_R16_3PLANE_420_UNORM, Invalid, 3, 16, 2, 2, 12 );
|
||||||
|
addVkFormatDescChromaSubsampling( G16_B16R16_2PLANE_420_UNORM, Invalid, 2, 16, 2, 2, 12 );
|
||||||
|
addVkFormatDescChromaSubsampling( G16_B16_R16_3PLANE_422_UNORM, Invalid, 3, 16, 2, 1, 8 );
|
||||||
|
addVkFormatDescChromaSubsampling( G16_B16R16_2PLANE_422_UNORM, Invalid, 2, 16, 2, 1, 8 );
|
||||||
|
addVkFormatDescChromaSubsampling( G16_B16_R16_3PLANE_444_UNORM, Invalid, 3, 16, 1, 1, 6 );
|
||||||
|
|
||||||
// When adding to this list, be sure to ensure _vkFormatCount is large enough for the format count
|
// When adding to this list, be sure to ensure _vkFormatCount is large enough for the format count
|
||||||
}
|
}
|
||||||
@ -1328,6 +1421,17 @@ typedef enum : VkFormatFeatureFlags {
|
|||||||
VK_FORMAT_FEATURE_BLIT_DST_BIT),
|
VK_FORMAT_FEATURE_BLIT_DST_BIT),
|
||||||
kMVKVkFormatFeatureFlagsTexDSAtt = (VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT),
|
kMVKVkFormatFeatureFlagsTexDSAtt = (VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT),
|
||||||
kMVKVkFormatFeatureFlagsTexBlend = (VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT),
|
kMVKVkFormatFeatureFlagsTexBlend = (VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT),
|
||||||
|
kMVKVkFormatFeatureFlagsTexTransfer = (VK_FORMAT_FEATURE_TRANSFER_SRC_BIT |
|
||||||
|
VK_FORMAT_FEATURE_TRANSFER_DST_BIT |
|
||||||
|
VK_FORMAT_FEATURE_BLIT_SRC_BIT |
|
||||||
|
VK_FORMAT_FEATURE_BLIT_DST_BIT),
|
||||||
|
kMVKVkFormatFeatureFlagsTexChromaSubsampling = (VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR |
|
||||||
|
VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR |
|
||||||
|
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR |
|
||||||
|
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR |
|
||||||
|
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR |
|
||||||
|
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR),
|
||||||
|
kMVKVkFormatFeatureFlagsTexMultiPlanar = (VK_FORMAT_FEATURE_DISJOINT_BIT_KHR),
|
||||||
kMVKVkFormatFeatureFlagsBufRead = (VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT),
|
kMVKVkFormatFeatureFlagsBufRead = (VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT),
|
||||||
kMVKVkFormatFeatureFlagsBufWrite = (VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT),
|
kMVKVkFormatFeatureFlagsBufWrite = (VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT),
|
||||||
kMVKVkFormatFeatureFlagsBufAtomic = (VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT),
|
kMVKVkFormatFeatureFlagsBufAtomic = (VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT),
|
||||||
@ -1344,9 +1448,23 @@ void MVKPixelFormats::setFormatProperties(MVKVkFormatDesc& vkDesc) {
|
|||||||
|
|
||||||
VkFormatProperties& vkProps = vkDesc.properties;
|
VkFormatProperties& vkProps = vkDesc.properties;
|
||||||
MVKMTLFmtCaps mtlPixFmtCaps = getMTLPixelFormatDesc(vkDesc.mtlPixelFormat).mtlFmtCaps;
|
MVKMTLFmtCaps mtlPixFmtCaps = getMTLPixelFormatDesc(vkDesc.mtlPixelFormat).mtlFmtCaps;
|
||||||
|
|
||||||
// Set optimal tiling features first
|
|
||||||
vkProps.optimalTilingFeatures = kMVKVkFormatFeatureFlagsTexNone;
|
vkProps.optimalTilingFeatures = kMVKVkFormatFeatureFlagsTexNone;
|
||||||
|
vkProps.linearTilingFeatures = kMVKVkFormatFeatureFlagsTexNone;
|
||||||
|
|
||||||
|
// Chroma subsampling and multi planar features
|
||||||
|
if (getChromaSubsamplingComponentBits(vkDesc.vkFormat) > 0) {
|
||||||
|
vkProps.optimalTilingFeatures = kMVKVkFormatFeatureFlagsTexTransfer;
|
||||||
|
}
|
||||||
|
uint8_t chromaSubsamplingPlaneCount = getChromaSubsamplingPlaneCount(vkDesc.vkFormat);
|
||||||
|
if (chromaSubsamplingPlaneCount > 0) {
|
||||||
|
mtlPixFmtCaps = kMVKMTLFmtCapsRF;
|
||||||
|
enableFormatFeatures(ChromaSubsampling, Tex, mtlPixFmtCaps, vkProps.optimalTilingFeatures);
|
||||||
|
}
|
||||||
|
if (chromaSubsamplingPlaneCount > 1) {
|
||||||
|
enableFormatFeatures(MultiPlanar, Tex, mtlPixFmtCaps, vkProps.optimalTilingFeatures);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Optimal tiling features
|
||||||
enableFormatFeatures(Read, Tex, mtlPixFmtCaps, vkProps.optimalTilingFeatures);
|
enableFormatFeatures(Read, Tex, mtlPixFmtCaps, vkProps.optimalTilingFeatures);
|
||||||
enableFormatFeatures(Filter, Tex, mtlPixFmtCaps, vkProps.optimalTilingFeatures);
|
enableFormatFeatures(Filter, Tex, mtlPixFmtCaps, vkProps.optimalTilingFeatures);
|
||||||
enableFormatFeatures(Write, Tex, mtlPixFmtCaps, vkProps.optimalTilingFeatures);
|
enableFormatFeatures(Write, Tex, mtlPixFmtCaps, vkProps.optimalTilingFeatures);
|
||||||
@ -1355,9 +1473,7 @@ void MVKPixelFormats::setFormatProperties(MVKVkFormatDesc& vkDesc) {
|
|||||||
enableFormatFeatures(Blend, Tex, mtlPixFmtCaps, vkProps.optimalTilingFeatures);
|
enableFormatFeatures(Blend, Tex, mtlPixFmtCaps, vkProps.optimalTilingFeatures);
|
||||||
|
|
||||||
// Linear tiling is not available to depth/stencil or compressed formats.
|
// Linear tiling is not available to depth/stencil or compressed formats.
|
||||||
vkProps.linearTilingFeatures = kMVKVkFormatFeatureFlagsTexNone;
|
|
||||||
if ( !(vkDesc.formatType == kMVKFormatDepthStencil || vkDesc.formatType == kMVKFormatCompressed) ) {
|
if ( !(vkDesc.formatType == kMVKFormatDepthStencil || vkDesc.formatType == kMVKFormatCompressed) ) {
|
||||||
|
|
||||||
// Start with optimal tiling features, and modify.
|
// Start with optimal tiling features, and modify.
|
||||||
vkProps.linearTilingFeatures = vkProps.optimalTilingFeatures;
|
vkProps.linearTilingFeatures = vkProps.optimalTilingFeatures;
|
||||||
|
|
||||||
|
@ -110,7 +110,7 @@ void MVKRenderSubpass::populateMTLRenderPassDescriptor(MTLRenderPassDescriptor*
|
|||||||
if (dsRPAttIdx != VK_ATTACHMENT_UNUSED) {
|
if (dsRPAttIdx != VK_ATTACHMENT_UNUSED) {
|
||||||
MVKRenderPassAttachment* dsMVKRPAtt = &_renderPass->_attachments[dsRPAttIdx];
|
MVKRenderPassAttachment* dsMVKRPAtt = &_renderPass->_attachments[dsRPAttIdx];
|
||||||
MVKImageView* dsImage = framebuffer->getAttachment(dsRPAttIdx);
|
MVKImageView* dsImage = framebuffer->getAttachment(dsRPAttIdx);
|
||||||
MTLPixelFormat mtlDSFormat = dsImage->getMTLPixelFormat();
|
MTLPixelFormat mtlDSFormat = dsImage->getMTLPixelFormat(0);
|
||||||
|
|
||||||
if (pixFmts->isDepthFormat(mtlDSFormat)) {
|
if (pixFmts->isDepthFormat(mtlDSFormat)) {
|
||||||
MTLRenderPassDepthAttachmentDescriptor* mtlDepthAttDesc = mtlRPDesc.depthAttachment;
|
MTLRenderPassDepthAttachmentDescriptor* mtlDepthAttDesc = mtlRPDesc.depthAttachment;
|
||||||
|
@ -39,12 +39,6 @@ public:
|
|||||||
/** Returns the byte offset in the bound device memory. */
|
/** Returns the byte offset in the bound device memory. */
|
||||||
inline VkDeviceSize getDeviceMemoryOffset() { return _deviceMemoryOffset; }
|
inline VkDeviceSize getDeviceMemoryOffset() { return _deviceMemoryOffset; }
|
||||||
|
|
||||||
/** Returns the memory requirements of this resource by populating the specified structure. */
|
|
||||||
virtual VkResult getMemoryRequirements(VkMemoryRequirements* pMemoryRequirements) = 0;
|
|
||||||
|
|
||||||
/** Returns the memory requirements of this resource by populating the specified structure. */
|
|
||||||
virtual VkResult getMemoryRequirements(const void* pInfo, VkMemoryRequirements2* pMemoryRequirements) = 0;
|
|
||||||
|
|
||||||
/** Binds this resource to the specified offset within the specified memory allocation. */
|
/** Binds this resource to the specified offset within the specified memory allocation. */
|
||||||
virtual VkResult bindDeviceMemory(MVKDeviceMemory* mvkMem, VkDeviceSize memOffset);
|
virtual VkResult bindDeviceMemory(MVKDeviceMemory* mvkMem, VkDeviceSize memOffset);
|
||||||
|
|
||||||
@ -78,10 +72,6 @@ public:
|
|||||||
MVKResource(MVKDevice* device) : MVKVulkanAPIDeviceObject(device) {}
|
MVKResource(MVKDevice* device) : MVKVulkanAPIDeviceObject(device) {}
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
virtual bool needsHostReadSync(VkPipelineStageFlags srcStageMask,
|
|
||||||
VkPipelineStageFlags dstStageMask,
|
|
||||||
VkMemoryBarrier* pMemoryBarrier);
|
|
||||||
|
|
||||||
MVKDeviceMemory* _deviceMemory = nullptr;
|
MVKDeviceMemory* _deviceMemory = nullptr;
|
||||||
VkDeviceSize _deviceMemoryOffset = 0;
|
VkDeviceSize _deviceMemoryOffset = 0;
|
||||||
VkDeviceSize _byteCount = 0;
|
VkDeviceSize _byteCount = 0;
|
||||||
|
@ -34,18 +34,3 @@ VkResult MVKResource::bindDeviceMemory(MVKDeviceMemory* mvkMem, VkDeviceSize mem
|
|||||||
return VK_SUCCESS;
|
return VK_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns whether the specified global memory barrier requires a sync between this
|
|
||||||
// texture and host memory for the purpose of the host reading texture memory.
|
|
||||||
bool MVKResource::needsHostReadSync(VkPipelineStageFlags srcStageMask,
|
|
||||||
VkPipelineStageFlags dstStageMask,
|
|
||||||
VkMemoryBarrier* pMemoryBarrier) {
|
|
||||||
#if MVK_IOS_OR_TVOS
|
|
||||||
return false;
|
|
||||||
#endif
|
|
||||||
#if MVK_MACOS
|
|
||||||
return (mvkIsAnyFlagEnabled(dstStageMask, (VK_PIPELINE_STAGE_HOST_BIT)) &&
|
|
||||||
mvkIsAnyFlagEnabled(pMemoryBarrier->dstAccessMask, (VK_ACCESS_HOST_READ_BIT)) &&
|
|
||||||
isMemoryHostAccessible() && !isMemoryHostCoherent());
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
|
@ -384,18 +384,16 @@ void MVKSwapchain::initSurfaceImages(const VkSwapchainCreateInfoKHR* pCreateInfo
|
|||||||
|
|
||||||
VkResult MVKSwapchain::getRefreshCycleDuration(VkRefreshCycleDurationGOOGLE *pRefreshCycleDuration) {
|
VkResult MVKSwapchain::getRefreshCycleDuration(VkRefreshCycleDurationGOOGLE *pRefreshCycleDuration) {
|
||||||
#if MVK_IOS_OR_TVOS
|
#if MVK_IOS_OR_TVOS
|
||||||
NSInteger framesPerSecond = 60;
|
UIScreen* screen = [UIScreen mainScreen];
|
||||||
if (@available(iOS 10.3, tvOS 10.3, *)) {
|
if ([screen respondsToSelector: @selector(maximumFramesPerSecond)]) {
|
||||||
framesPerSecond = [UIScreen mainScreen].maximumFramesPerSecond;
|
framesPerSecond = screen.maximumFramesPerSecond;
|
||||||
} else {
|
|
||||||
// TODO: fallback
|
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
#if MVK_MACOS
|
#if MVK_MACOS
|
||||||
// TODO: hook this up for macOS, probably need to use CGDisplayModeGetRefeshRate
|
// TODO: hook this up for macOS, probably need to use CGDisplayModeGetRefeshRate
|
||||||
NSInteger framesPerSecond = 60;
|
|
||||||
#endif
|
#endif
|
||||||
pRefreshCycleDuration->refreshDuration = 1e9 / ( uint64_t ) framesPerSecond;
|
|
||||||
|
pRefreshCycleDuration->refreshDuration = (uint64_t)1e9 / framesPerSecond;
|
||||||
return VK_SUCCESS;
|
return VK_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -63,7 +63,7 @@ public:
|
|||||||
* Called when this instance has been retained as a reference by another object,
|
* Called when this instance has been retained as a reference by another object,
|
||||||
* indicating that this instance will not be deleted until that reference is released.
|
* indicating that this instance will not be deleted until that reference is released.
|
||||||
*/
|
*/
|
||||||
void retain();
|
inline void retain() { _refCount++; }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Called when this instance has been released as a reference from another object.
|
* Called when this instance has been released as a reference from another object.
|
||||||
@ -71,14 +71,14 @@ public:
|
|||||||
* If the destroy() function has already been called on this instance by the time
|
* If the destroy() function has already been called on this instance by the time
|
||||||
* this function is called, this instance will be deleted.
|
* this function is called, this instance will be deleted.
|
||||||
*/
|
*/
|
||||||
void release();
|
inline void release() { if (--_refCount == 0) { MVKConfigurableObject::destroy(); } }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Marks this instance as destroyed. If all previous references to this instance
|
* Marks this instance as destroyed. If all previous references to this instance
|
||||||
* have been released, this instance will be deleted, otherwise deletion of this
|
* have been released, this instance will be deleted, otherwise deletion of this
|
||||||
* instance will automatically be deferred until all references have been released.
|
* instance will automatically be deferred until all references have been released.
|
||||||
*/
|
*/
|
||||||
void destroy() override;
|
void destroy() override { release(); }
|
||||||
|
|
||||||
/** Gets the debug object name of this instance. */
|
/** Gets the debug object name of this instance. */
|
||||||
inline NSString* getDebugName() { return _debugName; }
|
inline NSString* getDebugName() { return _debugName; }
|
||||||
|
@ -24,18 +24,6 @@ using namespace std;
|
|||||||
#pragma mark -
|
#pragma mark -
|
||||||
#pragma mark MVKVulkanAPIObject
|
#pragma mark MVKVulkanAPIObject
|
||||||
|
|
||||||
void MVKVulkanAPIObject::retain() {
|
|
||||||
_refCount++;
|
|
||||||
}
|
|
||||||
|
|
||||||
void MVKVulkanAPIObject::release() {
|
|
||||||
if (--_refCount == 0) { MVKConfigurableObject::destroy(); }
|
|
||||||
}
|
|
||||||
|
|
||||||
void MVKVulkanAPIObject::destroy() {
|
|
||||||
release();
|
|
||||||
}
|
|
||||||
|
|
||||||
VkResult MVKVulkanAPIObject::setDebugName(const char* pObjectName) {
|
VkResult MVKVulkanAPIObject::setDebugName(const char* pObjectName) {
|
||||||
if (pObjectName) {
|
if (pObjectName) {
|
||||||
[_debugName release];
|
[_debugName release];
|
||||||
|
@ -58,6 +58,7 @@ MVK_EXTENSION(KHR_maintenance3, KHR_MAINTENANCE3, DEVICE)
|
|||||||
MVK_EXTENSION(KHR_push_descriptor, KHR_PUSH_DESCRIPTOR, DEVICE)
|
MVK_EXTENSION(KHR_push_descriptor, KHR_PUSH_DESCRIPTOR, DEVICE)
|
||||||
MVK_EXTENSION(KHR_relaxed_block_layout, KHR_RELAXED_BLOCK_LAYOUT, DEVICE)
|
MVK_EXTENSION(KHR_relaxed_block_layout, KHR_RELAXED_BLOCK_LAYOUT, DEVICE)
|
||||||
MVK_EXTENSION(KHR_sampler_mirror_clamp_to_edge, KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE, DEVICE)
|
MVK_EXTENSION(KHR_sampler_mirror_clamp_to_edge, KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE, DEVICE)
|
||||||
|
MVK_EXTENSION(KHR_sampler_ycbcr_conversion, KHR_SAMPLER_YCBCR_CONVERSION, DEVICE)
|
||||||
MVK_EXTENSION(KHR_shader_draw_parameters, KHR_SHADER_DRAW_PARAMETERS, DEVICE)
|
MVK_EXTENSION(KHR_shader_draw_parameters, KHR_SHADER_DRAW_PARAMETERS, DEVICE)
|
||||||
MVK_EXTENSION(KHR_shader_float16_int8, KHR_SHADER_FLOAT16_INT8, DEVICE)
|
MVK_EXTENSION(KHR_shader_float16_int8, KHR_SHADER_FLOAT16_INT8, DEVICE)
|
||||||
MVK_EXTENSION(KHR_storage_buffer_storage_class, KHR_STORAGE_BUFFER_STORAGE_CLASS, DEVICE)
|
MVK_EXTENSION(KHR_storage_buffer_storage_class, KHR_STORAGE_BUFFER_STORAGE_CLASS, DEVICE)
|
||||||
@ -76,6 +77,7 @@ MVK_EXTENSION(EXT_inline_uniform_block, EXT_INLINE_UNIFORM_BLOCK, DEVICE)
|
|||||||
MVK_EXTENSION(EXT_memory_budget, EXT_MEMORY_BUDGET, DEVICE)
|
MVK_EXTENSION(EXT_memory_budget, EXT_MEMORY_BUDGET, DEVICE)
|
||||||
MVK_EXTENSION(EXT_metal_surface, EXT_METAL_SURFACE, INSTANCE)
|
MVK_EXTENSION(EXT_metal_surface, EXT_METAL_SURFACE, INSTANCE)
|
||||||
MVK_EXTENSION(EXT_post_depth_coverage, EXT_POST_DEPTH_COVERAGE, DEVICE)
|
MVK_EXTENSION(EXT_post_depth_coverage, EXT_POST_DEPTH_COVERAGE, DEVICE)
|
||||||
|
MVK_EXTENSION(EXT_robustness2, EXT_ROBUSTNESS_2, DEVICE)
|
||||||
MVK_EXTENSION(EXT_scalar_block_layout, EXT_SCALAR_BLOCK_LAYOUT, DEVICE)
|
MVK_EXTENSION(EXT_scalar_block_layout, EXT_SCALAR_BLOCK_LAYOUT, DEVICE)
|
||||||
MVK_EXTENSION(EXT_shader_stencil_export, EXT_SHADER_STENCIL_EXPORT, DEVICE)
|
MVK_EXTENSION(EXT_shader_stencil_export, EXT_SHADER_STENCIL_EXPORT, DEVICE)
|
||||||
MVK_EXTENSION(EXT_shader_viewport_index_layer, EXT_SHADER_VIEWPORT_INDEX_LAYER, DEVICE)
|
MVK_EXTENSION(EXT_shader_viewport_index_layer, EXT_SHADER_VIEWPORT_INDEX_LAYER, DEVICE)
|
||||||
|
@ -523,7 +523,7 @@ void mvkDisableFlags(Tv& value, const Tm bitMask) { value = (Tv)(value & ~(Tv)bi
|
|||||||
|
|
||||||
/** Returns whether the specified value has ANY of the flags specified in bitMask enabled (set to 1). */
|
/** Returns whether the specified value has ANY of the flags specified in bitMask enabled (set to 1). */
|
||||||
template<typename Tv, typename Tm>
|
template<typename Tv, typename Tm>
|
||||||
bool mvkIsAnyFlagEnabled(Tv value, const Tm bitMask) { return !!(value & bitMask); }
|
bool mvkIsAnyFlagEnabled(Tv value, const Tm bitMask) { return ((value & bitMask) != 0); }
|
||||||
|
|
||||||
/** Returns whether the specified value has ALL of the flags specified in bitMask enabled (set to 1). */
|
/** Returns whether the specified value has ALL of the flags specified in bitMask enabled (set to 1). */
|
||||||
template<typename Tv, typename Tm>
|
template<typename Tv, typename Tm>
|
||||||
|
@ -115,7 +115,7 @@ MVK_PUBLIC_SYMBOL VkResult vkSetMTLTextureMVK(
|
|||||||
id<MTLTexture> mtlTexture) {
|
id<MTLTexture> mtlTexture) {
|
||||||
|
|
||||||
MVKImage* mvkImg = (MVKImage*)image;
|
MVKImage* mvkImg = (MVKImage*)image;
|
||||||
return mvkImg->setMTLTexture(mtlTexture);
|
return mvkImg->setMTLTexture(0, mtlTexture);
|
||||||
}
|
}
|
||||||
|
|
||||||
MVK_PUBLIC_SYMBOL void vkGetMTLTextureMVK(
|
MVK_PUBLIC_SYMBOL void vkGetMTLTextureMVK(
|
||||||
@ -123,7 +123,7 @@ MVK_PUBLIC_SYMBOL void vkGetMTLTextureMVK(
|
|||||||
id<MTLTexture>* pMTLTexture) {
|
id<MTLTexture>* pMTLTexture) {
|
||||||
|
|
||||||
MVKImage* mvkImg = (MVKImage*)image;
|
MVKImage* mvkImg = (MVKImage*)image;
|
||||||
*pMTLTexture = mvkImg->getMTLTexture();
|
*pMTLTexture = mvkImg->getMTLTexture(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
MVK_PUBLIC_SYMBOL void vkGetMTLBufferMVK(
|
MVK_PUBLIC_SYMBOL void vkGetMTLBufferMVK(
|
||||||
|
@ -526,7 +526,7 @@ MVK_PUBLIC_SYMBOL VkResult vkBindImageMemory(
|
|||||||
MVKTraceVulkanCallStart();
|
MVKTraceVulkanCallStart();
|
||||||
MVKImage* mvkImg = (MVKImage*)image;
|
MVKImage* mvkImg = (MVKImage*)image;
|
||||||
MVKDeviceMemory* mvkMem = (MVKDeviceMemory*)mem;
|
MVKDeviceMemory* mvkMem = (MVKDeviceMemory*)mem;
|
||||||
VkResult rslt = mvkImg->bindDeviceMemory(mvkMem, memOffset);
|
VkResult rslt = mvkImg->bindDeviceMemory(mvkMem, memOffset, 0);
|
||||||
MVKTraceVulkanCallEnd();
|
MVKTraceVulkanCallEnd();
|
||||||
return rslt;
|
return rslt;
|
||||||
}
|
}
|
||||||
@ -549,7 +549,7 @@ MVK_PUBLIC_SYMBOL void vkGetImageMemoryRequirements(
|
|||||||
|
|
||||||
MVKTraceVulkanCallStart();
|
MVKTraceVulkanCallStart();
|
||||||
MVKImage* mvkImg = (MVKImage*)image;
|
MVKImage* mvkImg = (MVKImage*)image;
|
||||||
mvkImg->getMemoryRequirements(pMemoryRequirements);
|
mvkImg->getMemoryRequirements(pMemoryRequirements, 0);
|
||||||
MVKTraceVulkanCallEnd();
|
MVKTraceVulkanCallEnd();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2218,6 +2218,37 @@ MVK_PUBLIC_SYMBOL void vkCmdPushDescriptorSetWithTemplateKHR(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#pragma mark -
|
||||||
|
#pragma mark VK_KHR_sampler_ycbcr_conversion extension
|
||||||
|
|
||||||
|
MVK_PUBLIC_SYMBOL VkResult vkCreateSamplerYcbcrConversionKHR(
|
||||||
|
VkDevice device,
|
||||||
|
const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
|
||||||
|
const VkAllocationCallbacks* pAllocator,
|
||||||
|
VkSamplerYcbcrConversion* pYcbcrConversion) {
|
||||||
|
|
||||||
|
MVKTraceVulkanCallStart();
|
||||||
|
MVKDevice* mvkDev = MVKDevice::getMVKDevice(device);
|
||||||
|
MVKSamplerYcbcrConversion* mvkSampConv = mvkDev->createSamplerYcbcrConversion(pCreateInfo, pAllocator);
|
||||||
|
*pYcbcrConversion = (VkSamplerYcbcrConversion)mvkSampConv;
|
||||||
|
VkResult rslt = mvkSampConv->getConfigurationResult();
|
||||||
|
MVKTraceVulkanCallEnd();
|
||||||
|
return rslt;
|
||||||
|
}
|
||||||
|
|
||||||
|
MVK_PUBLIC_SYMBOL void vkDestroySamplerYcbcrConversionKHR(
|
||||||
|
VkDevice device,
|
||||||
|
VkSamplerYcbcrConversion ycbcrConversion,
|
||||||
|
const VkAllocationCallbacks* pAllocator) {
|
||||||
|
|
||||||
|
MVKTraceVulkanCallStart();
|
||||||
|
if ( !ycbcrConversion ) { return; }
|
||||||
|
MVKDevice* mvkDev = MVKDevice::getMVKDevice(device);
|
||||||
|
mvkDev->destroySamplerYcbcrConversion((MVKSamplerYcbcrConversion*)ycbcrConversion, pAllocator);
|
||||||
|
MVKTraceVulkanCallEnd();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
#pragma mark -
|
#pragma mark -
|
||||||
#pragma mark VK_KHR_swapchain extension
|
#pragma mark VK_KHR_swapchain extension
|
||||||
|
|
||||||
|
@ -136,9 +136,22 @@ MVK_PUBLIC_SYMBOL bool mvk::MSLResourceBinding::matches(const MSLResourceBinding
|
|||||||
if (constExprSampler.lod_clamp_min != other.constExprSampler.lod_clamp_min) { return false; }
|
if (constExprSampler.lod_clamp_min != other.constExprSampler.lod_clamp_min) { return false; }
|
||||||
if (constExprSampler.lod_clamp_max != other.constExprSampler.lod_clamp_max) { return false; }
|
if (constExprSampler.lod_clamp_max != other.constExprSampler.lod_clamp_max) { return false; }
|
||||||
if (constExprSampler.max_anisotropy != other.constExprSampler.max_anisotropy) { return false; }
|
if (constExprSampler.max_anisotropy != other.constExprSampler.max_anisotropy) { return false; }
|
||||||
|
|
||||||
|
if (constExprSampler.planes != other.constExprSampler.planes) { return false; }
|
||||||
|
if (constExprSampler.resolution != other.constExprSampler.resolution) { return false; }
|
||||||
|
if (constExprSampler.chroma_filter != other.constExprSampler.chroma_filter) { return false; }
|
||||||
|
if (constExprSampler.x_chroma_offset != other.constExprSampler.x_chroma_offset) { return false; }
|
||||||
|
if (constExprSampler.y_chroma_offset != other.constExprSampler.y_chroma_offset) { return false; }
|
||||||
|
for(uint32_t i = 0; i < 4; ++i)
|
||||||
|
if (constExprSampler.swizzle[i] != other.constExprSampler.swizzle[i]) { return false; }
|
||||||
|
if (constExprSampler.ycbcr_model != other.constExprSampler.ycbcr_model) { return false; }
|
||||||
|
if (constExprSampler.ycbcr_range != other.constExprSampler.ycbcr_range) { return false; }
|
||||||
|
if (constExprSampler.bpc != other.constExprSampler.bpc) { return false; }
|
||||||
|
|
||||||
if (constExprSampler.compare_enable != other.constExprSampler.compare_enable) { return false; }
|
if (constExprSampler.compare_enable != other.constExprSampler.compare_enable) { return false; }
|
||||||
if (constExprSampler.lod_clamp_enable != other.constExprSampler.lod_clamp_enable) { return false; }
|
if (constExprSampler.lod_clamp_enable != other.constExprSampler.lod_clamp_enable) { return false; }
|
||||||
if (constExprSampler.anisotropy_enable != other.constExprSampler.anisotropy_enable) { return false; }
|
if (constExprSampler.anisotropy_enable != other.constExprSampler.anisotropy_enable) { return false; }
|
||||||
|
if (constExprSampler.ycbcr_conversion_enable != other.constExprSampler.ycbcr_conversion_enable) { return false; }
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
@ -989,8 +989,6 @@
|
|||||||
A93747401A9A8B2900F29B34 /* Debug */ = {
|
A93747401A9A8B2900F29B34 /* Debug */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ARCHS = arm64;
|
|
||||||
BITCODE_GENERATION_MODE = bitcode;
|
|
||||||
CLANG_WARN_SUSPICIOUS_IMPLICIT_CONVERSION = NO;
|
CLANG_WARN_SUSPICIOUS_IMPLICIT_CONVERSION = NO;
|
||||||
CLANG_WARN_UNREACHABLE_CODE = NO;
|
CLANG_WARN_UNREACHABLE_CODE = NO;
|
||||||
GCC_WARN_ABOUT_MISSING_FIELD_INITIALIZERS = NO;
|
GCC_WARN_ABOUT_MISSING_FIELD_INITIALIZERS = NO;
|
||||||
@ -1000,15 +998,12 @@
|
|||||||
PRODUCT_NAME = MoltenVKGLSLToSPIRVConverter;
|
PRODUCT_NAME = MoltenVKGLSLToSPIRVConverter;
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
TARGETED_DEVICE_FAMILY = "1,2";
|
TARGETED_DEVICE_FAMILY = "1,2";
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Debug;
|
name = Debug;
|
||||||
};
|
};
|
||||||
A93747411A9A8B2900F29B34 /* Release */ = {
|
A93747411A9A8B2900F29B34 /* Release */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ARCHS = arm64;
|
|
||||||
BITCODE_GENERATION_MODE = bitcode;
|
|
||||||
CLANG_WARN_SUSPICIOUS_IMPLICIT_CONVERSION = NO;
|
CLANG_WARN_SUSPICIOUS_IMPLICIT_CONVERSION = NO;
|
||||||
CLANG_WARN_UNREACHABLE_CODE = NO;
|
CLANG_WARN_UNREACHABLE_CODE = NO;
|
||||||
GCC_WARN_ABOUT_MISSING_FIELD_INITIALIZERS = NO;
|
GCC_WARN_ABOUT_MISSING_FIELD_INITIALIZERS = NO;
|
||||||
@ -1018,7 +1013,6 @@
|
|||||||
PRODUCT_NAME = MoltenVKGLSLToSPIRVConverter;
|
PRODUCT_NAME = MoltenVKGLSLToSPIRVConverter;
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
TARGETED_DEVICE_FAMILY = "1,2";
|
TARGETED_DEVICE_FAMILY = "1,2";
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Release;
|
name = Release;
|
||||||
};
|
};
|
||||||
@ -1053,8 +1047,6 @@
|
|||||||
A93903BD1C57E9D700FE90DC /* Debug */ = {
|
A93903BD1C57E9D700FE90DC /* Debug */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ARCHS = arm64;
|
|
||||||
BITCODE_GENERATION_MODE = bitcode;
|
|
||||||
GCC_PREPROCESSOR_DEFINITIONS = (
|
GCC_PREPROCESSOR_DEFINITIONS = (
|
||||||
"$(inherited)",
|
"$(inherited)",
|
||||||
"SPIRV_CROSS_NAMESPACE_OVERRIDE=MVK_spirv_cross",
|
"SPIRV_CROSS_NAMESPACE_OVERRIDE=MVK_spirv_cross",
|
||||||
@ -1064,15 +1056,12 @@
|
|||||||
PRODUCT_NAME = MoltenVKSPIRVToMSLConverter;
|
PRODUCT_NAME = MoltenVKSPIRVToMSLConverter;
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
TARGETED_DEVICE_FAMILY = "1,2";
|
TARGETED_DEVICE_FAMILY = "1,2";
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Debug;
|
name = Debug;
|
||||||
};
|
};
|
||||||
A93903BE1C57E9D700FE90DC /* Release */ = {
|
A93903BE1C57E9D700FE90DC /* Release */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ARCHS = arm64;
|
|
||||||
BITCODE_GENERATION_MODE = bitcode;
|
|
||||||
GCC_PREPROCESSOR_DEFINITIONS = (
|
GCC_PREPROCESSOR_DEFINITIONS = (
|
||||||
"$(inherited)",
|
"$(inherited)",
|
||||||
"SPIRV_CROSS_NAMESPACE_OVERRIDE=MVK_spirv_cross",
|
"SPIRV_CROSS_NAMESPACE_OVERRIDE=MVK_spirv_cross",
|
||||||
@ -1082,7 +1071,6 @@
|
|||||||
PRODUCT_NAME = MoltenVKSPIRVToMSLConverter;
|
PRODUCT_NAME = MoltenVKSPIRVToMSLConverter;
|
||||||
SDKROOT = iphoneos;
|
SDKROOT = iphoneos;
|
||||||
TARGETED_DEVICE_FAMILY = "1,2";
|
TARGETED_DEVICE_FAMILY = "1,2";
|
||||||
VALID_ARCHS = arm64;
|
|
||||||
};
|
};
|
||||||
name = Release;
|
name = Release;
|
||||||
};
|
};
|
||||||
@ -1118,6 +1106,7 @@
|
|||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ALWAYS_SEARCH_USER_PATHS = NO;
|
ALWAYS_SEARCH_USER_PATHS = NO;
|
||||||
|
ARCHS = "$(ARCHS_STANDARD_64_BIT)";
|
||||||
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
||||||
CLANG_CXX_LIBRARY = "libc++";
|
CLANG_CXX_LIBRARY = "libc++";
|
||||||
CLANG_ENABLE_MODULES = NO;
|
CLANG_ENABLE_MODULES = NO;
|
||||||
@ -1131,9 +1120,10 @@
|
|||||||
CLANG_WARN_INT_CONVERSION = YES;
|
CLANG_WARN_INT_CONVERSION = YES;
|
||||||
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
|
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
|
||||||
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
|
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
|
||||||
CLANG_WARN_SUSPICIOUS_IMPLICIT_CONVERSION = YES;
|
CLANG_WARN_SUSPICIOUS_IMPLICIT_CONVERSION = NO;
|
||||||
CLANG_WARN_SUSPICIOUS_MOVE = YES;
|
CLANG_WARN_SUSPICIOUS_MOVE = YES;
|
||||||
CLANG_WARN_UNREACHABLE_CODE = NO;
|
CLANG_WARN_UNGUARDED_AVAILABILITY = NO;
|
||||||
|
CLANG_WARN_UNREACHABLE_CODE = YES;
|
||||||
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
|
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
|
||||||
COPY_PHASE_STRIP = NO;
|
COPY_PHASE_STRIP = NO;
|
||||||
ENABLE_STRICT_OBJC_MSGSEND = YES;
|
ENABLE_STRICT_OBJC_MSGSEND = YES;
|
||||||
@ -1150,8 +1140,8 @@
|
|||||||
GCC_WARN_HIDDEN_VIRTUAL_FUNCTIONS = YES;
|
GCC_WARN_HIDDEN_VIRTUAL_FUNCTIONS = YES;
|
||||||
GCC_WARN_NON_VIRTUAL_DESTRUCTOR = YES;
|
GCC_WARN_NON_VIRTUAL_DESTRUCTOR = YES;
|
||||||
GCC_WARN_UNDECLARED_SELECTOR = YES;
|
GCC_WARN_UNDECLARED_SELECTOR = YES;
|
||||||
|
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
|
||||||
GCC_WARN_UNUSED_FUNCTION = YES;
|
GCC_WARN_UNUSED_FUNCTION = YES;
|
||||||
GCC_WARN_UNUSED_PARAMETER = YES;
|
|
||||||
GCC_WARN_UNUSED_VARIABLE = NO;
|
GCC_WARN_UNUSED_VARIABLE = NO;
|
||||||
HEADER_SEARCH_PATHS = (
|
HEADER_SEARCH_PATHS = (
|
||||||
"$(inherited)",
|
"$(inherited)",
|
||||||
@ -1164,7 +1154,6 @@
|
|||||||
MACOSX_DEPLOYMENT_TARGET = 10.11;
|
MACOSX_DEPLOYMENT_TARGET = 10.11;
|
||||||
ONLY_ACTIVE_ARCH = YES;
|
ONLY_ACTIVE_ARCH = YES;
|
||||||
SKIP_INSTALL = YES;
|
SKIP_INSTALL = YES;
|
||||||
VALID_ARCHS = x86_64;
|
|
||||||
};
|
};
|
||||||
name = Debug;
|
name = Debug;
|
||||||
};
|
};
|
||||||
@ -1172,6 +1161,7 @@
|
|||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
buildSettings = {
|
buildSettings = {
|
||||||
ALWAYS_SEARCH_USER_PATHS = NO;
|
ALWAYS_SEARCH_USER_PATHS = NO;
|
||||||
|
ARCHS = "$(ARCHS_STANDARD_64_BIT)";
|
||||||
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
CLANG_CXX_LANGUAGE_STANDARD = "c++0x";
|
||||||
CLANG_CXX_LIBRARY = "libc++";
|
CLANG_CXX_LIBRARY = "libc++";
|
||||||
CLANG_ENABLE_MODULES = NO;
|
CLANG_ENABLE_MODULES = NO;
|
||||||
@ -1185,9 +1175,10 @@
|
|||||||
CLANG_WARN_INT_CONVERSION = YES;
|
CLANG_WARN_INT_CONVERSION = YES;
|
||||||
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
|
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
|
||||||
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
|
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
|
||||||
CLANG_WARN_SUSPICIOUS_IMPLICIT_CONVERSION = YES;
|
CLANG_WARN_SUSPICIOUS_IMPLICIT_CONVERSION = NO;
|
||||||
CLANG_WARN_SUSPICIOUS_MOVE = YES;
|
CLANG_WARN_SUSPICIOUS_MOVE = YES;
|
||||||
CLANG_WARN_UNREACHABLE_CODE = NO;
|
CLANG_WARN_UNGUARDED_AVAILABILITY = NO;
|
||||||
|
CLANG_WARN_UNREACHABLE_CODE = YES;
|
||||||
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
|
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
|
||||||
COPY_PHASE_STRIP = YES;
|
COPY_PHASE_STRIP = YES;
|
||||||
ENABLE_NS_ASSERTIONS = NO;
|
ENABLE_NS_ASSERTIONS = NO;
|
||||||
@ -1204,8 +1195,8 @@
|
|||||||
GCC_WARN_HIDDEN_VIRTUAL_FUNCTIONS = YES;
|
GCC_WARN_HIDDEN_VIRTUAL_FUNCTIONS = YES;
|
||||||
GCC_WARN_NON_VIRTUAL_DESTRUCTOR = YES;
|
GCC_WARN_NON_VIRTUAL_DESTRUCTOR = YES;
|
||||||
GCC_WARN_UNDECLARED_SELECTOR = YES;
|
GCC_WARN_UNDECLARED_SELECTOR = YES;
|
||||||
|
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
|
||||||
GCC_WARN_UNUSED_FUNCTION = YES;
|
GCC_WARN_UNUSED_FUNCTION = YES;
|
||||||
GCC_WARN_UNUSED_PARAMETER = YES;
|
|
||||||
GCC_WARN_UNUSED_VARIABLE = NO;
|
GCC_WARN_UNUSED_VARIABLE = NO;
|
||||||
HEADER_SEARCH_PATHS = (
|
HEADER_SEARCH_PATHS = (
|
||||||
"$(inherited)",
|
"$(inherited)",
|
||||||
@ -1218,7 +1209,6 @@
|
|||||||
MACOSX_DEPLOYMENT_TARGET = 10.11;
|
MACOSX_DEPLOYMENT_TARGET = 10.11;
|
||||||
SKIP_INSTALL = YES;
|
SKIP_INSTALL = YES;
|
||||||
VALIDATE_PRODUCT = YES;
|
VALIDATE_PRODUCT = YES;
|
||||||
VALID_ARCHS = x86_64;
|
|
||||||
};
|
};
|
||||||
name = Release;
|
name = Release;
|
||||||
};
|
};
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
#include "OSSupport.h"
|
#include "OSSupport.h"
|
||||||
#include "FileSupport.h"
|
#include "FileSupport.h"
|
||||||
#include "MoltenVKShaderConverterTool.h"
|
#include "MoltenVKShaderConverterTool.h"
|
||||||
|
#include "MVKOSExtensions.h"
|
||||||
|
|
||||||
#import <Foundation/Foundation.h>
|
#import <Foundation/Foundation.h>
|
||||||
#import <Metal/Metal.h>
|
#import <Metal/Metal.h>
|
||||||
@ -71,13 +72,9 @@ bool mvk::compile(const string& mslSourceCode,
|
|||||||
|
|
||||||
MTLLanguageVersion mslVerEnum = (MTLLanguageVersion)0;
|
MTLLanguageVersion mslVerEnum = (MTLLanguageVersion)0;
|
||||||
if (mslVer(2, 1, 0)) {
|
if (mslVer(2, 1, 0)) {
|
||||||
if (@available(macOS 10.14, *)) {
|
|
||||||
mslVerEnum = MTLLanguageVersion2_1;
|
mslVerEnum = MTLLanguageVersion2_1;
|
||||||
}
|
|
||||||
} else if (mslVer(2, 0, 0)) {
|
} else if (mslVer(2, 0, 0)) {
|
||||||
if (@available(macOS 10.13, *)) {
|
|
||||||
mslVerEnum = MTLLanguageVersion2_0;
|
mslVerEnum = MTLLanguageVersion2_0;
|
||||||
}
|
|
||||||
} else if (mslVer(1, 2, 0)) {
|
} else if (mslVer(1, 2, 0)) {
|
||||||
mslVerEnum = MTLLanguageVersion1_2;
|
mslVerEnum = MTLLanguageVersion1_2;
|
||||||
} else if (mslVer(1, 1, 0)) {
|
} else if (mslVer(1, 1, 0)) {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user