Avoid generating 8/16-bit constants when 8/16-bit arithmetic extensions aren't enabled

This commit is contained in:
Jeff Bolz 2019-07-10 13:06:15 -05:00
parent f970253a5d
commit c1ad396258
10 changed files with 136 additions and 80 deletions

View file

@ -780,6 +780,27 @@ public:
const char* const implicitThisName;
const char* const implicitCounterName;
// Certain explicit conversions are allowed conditionally
bool getArithemeticInt8Enabled() const {
return extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) ||
extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int8);
}
bool getArithemeticInt16Enabled() const {
return extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) ||
#ifdef AMD_EXTENSIONS
extensionRequested(E_GL_AMD_gpu_shader_int16) ||
#endif
extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int16);
}
bool getArithemeticFloat16Enabled() const {
return extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) ||
#ifdef AMD_EXTENSIONS
extensionRequested(E_GL_AMD_gpu_shader_half_float) ||
#endif
extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_float16);
}
protected:
TIntermSymbol* addSymbol(int Id, const TString&, const TType&, const TConstUnionArray&, TIntermTyped* subtree, const TSourceLoc&);
void error(TInfoSink& infoSink, const char*);