diff --git a/glslang/CInterface/glslang_c_interface.cpp b/glslang/CInterface/glslang_c_interface.cpp index 1dbdeec0..d4e3dff3 100644 --- a/glslang/CInterface/glslang_c_interface.cpp +++ b/glslang/CInterface/glslang_c_interface.cpp @@ -345,6 +345,11 @@ GLSLANG_EXPORT glslang_shader_t* glslang_shader_create(const glslang_input_t* in shader->shader->setEnvTarget(c_shader_target_language(input->target_language), c_shader_target_language_version(input->target_language_version)); +#ifdef ENABLE_HLSL + if (input->hlsl_functionality_1) + shader->shader->setEnvTargetHlslFunctionality1(); +#endif + if (input->entrypoint != nullptr) shader->shader->setEntryPoint(input->entrypoint); diff --git a/glslang/Include/glslang_c_interface.h b/glslang/Include/glslang_c_interface.h index fd9d597c..10cebab5 100644 --- a/glslang/Include/glslang_c_interface.h +++ b/glslang/Include/glslang_c_interface.h @@ -220,6 +220,7 @@ typedef struct glslang_input_s { const char* entrypoint; // This is what actually gets called by the GPU. Best to leave it at 'main' or something so opengl doesn't trip over itself. const char* source_entrypoint; // This just renames the source entrypoint in the code to 'entrypoint' and should be what is used to set different entrypoint names. int invert_y; + int hlsl_functionality_1; int default_version; glslang_profile_t default_profile; int force_default_version_and_profile;