HLSL: Map min types to GLSL 16-bit types
This commit is contained in:
parent
02538357c8
commit
cb61eec948
4 changed files with 20 additions and 1 deletions
|
|
@ -114,6 +114,7 @@ public:
|
|||
bool relaxedErrors() const { return (messages & EShMsgRelaxedErrors) != 0; }
|
||||
bool suppressWarnings() const { return (messages & EShMsgSuppressWarnings) != 0; }
|
||||
bool isReadingHLSL() const { return (messages & EShMsgReadHlsl) == EShMsgReadHlsl; }
|
||||
bool hlslEnable16BitTypes() const { return (messages & EShMsgHlslEnable16BitTypes) != 0; }
|
||||
|
||||
TInfoSink& infoSink;
|
||||
|
||||
|
|
|
|||
|
|
@ -200,6 +200,7 @@ enum EShMessages {
|
|||
EShMsgKeepUncalled = (1 << 8), // for testing, don't eliminate uncalled functions
|
||||
EShMsgHlslOffsets = (1 << 9), // allow block offsets to follow HLSL rules instead of GLSL rules
|
||||
EShMsgDebugInfo = (1 << 10), // save debug information
|
||||
EShMsgHlslEnable16BitTypes = (1 << 11), // enable use of 16-bit types in SPIR-V for HLSL
|
||||
};
|
||||
|
||||
//
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue