PP: Non-functional: rationalize TPpToken.

Always keep 'token' outside.
Always return the string to upper levels inside.
This commit is contained in:
John Kessenich 2016-12-20 19:42:53 -07:00
parent 1fbb9c1430
commit 54af2de761
6 changed files with 38 additions and 36 deletions

View file

@ -92,7 +92,7 @@ namespace glslang {
class TPpToken {
public:
TPpToken() : token(0), space(false), ival(0), dval(0.0), i64val(0), atom(0)
TPpToken() : space(false), ival(0), dval(0.0), i64val(0), atom(0)
{
loc.init();
name[0] = 0;
@ -101,14 +101,13 @@ public:
// This is used for comparing macro definitions, so checks what is relevant for that.
bool operator==(const TPpToken& right)
{
return token == right.token && space == right.space &&
return space == right.space &&
ival == right.ival && dval == right.dval && i64val == right.i64val &&
strncmp(name, right.name, MaxTokenLength) == 0;
}
bool operator!=(const TPpToken& right) { return ! operator==(right); }
TSourceLoc loc;
int token;
bool space; // true if a space (for white space or a removed comment) should also be recognized, in front of the token returned
int ival;
double dval;
@ -129,7 +128,7 @@ public:
void setPreamble(const char* preamble, size_t length);
const char* tokenize(TPpToken& ppToken);
int tokenize(TPpToken& ppToken);
int tokenPaste(int token, TPpToken&);
class tInput {

View file

@ -715,33 +715,31 @@ int TPpContext::tStringInput::scan(TPpToken* ppToken)
// The main functional entry point into the preprocessor, which will
// scan the source strings to figure out and return the next processing token.
//
// Return string pointer to next token.
// Return 0 when no more tokens.
// Return the token, or EndOfInput when no more tokens.
//
const char* TPpContext::tokenize(TPpToken& ppToken)
int TPpContext::tokenize(TPpToken& ppToken)
{
for(;;) {
int token = scanToken(&ppToken);
// Handle token-pasting logic
token = tokenPaste(token, ppToken);
ppToken.token = token;
if (token == EndOfInput) {
missingEndifCheck();
return nullptr;
return EndOfInput;
}
if (token == '#') {
if (previous_token == '\n') {
token = readCPPline(&ppToken);
if (token == EndOfInput) {
missingEndifCheck();
return nullptr;
return EndOfInput;
}
continue;
} else {
parseContext.ppError(ppToken.loc, "preprocessor directive cannot be preceded by another token", "#", "");
return nullptr;
return EndOfInput;
}
}
previous_token = token;
@ -753,7 +751,6 @@ const char* TPpContext::tokenize(TPpToken& ppToken)
if (token == PpAtomIdentifier && MacroExpand(&ppToken, false, true) != 0)
continue;
const char* tokenString = nullptr;
switch (token) {
case PpAtomIdentifier:
case PpAtomConstInt:
@ -765,26 +762,25 @@ const char* TPpContext::tokenize(TPpToken& ppToken)
#ifdef AMD_EXTENSIONS
case PpAtomConstFloat16:
#endif
tokenString = ppToken.name;
if (ppToken.name[0] == '\0')
continue;
break;
case PpAtomConstString:
if (parseContext.intermediate.getSource() == EShSourceHlsl) {
if (parseContext.intermediate.getSource() != EShSourceHlsl) {
// HLSL allows string literals.
tokenString = ppToken.name;
} else {
parseContext.ppError(ppToken.loc, "string literals not supported", "\"\"", "");
continue;
}
break;
case '\'':
parseContext.ppError(ppToken.loc, "character literals not supported", "\'", "");
break;
continue;
default:
tokenString = GetAtomString(token);
strcpy(ppToken.name, GetAtomString(token));
break;
}
if (tokenString)
return tokenString;
return token;
}
}