Commit 5b6a68e0 by alokp@chromium.org

Replaced pp::Token::value with pp::Token::text. The term value will be used for…

Replaced pp::Token::value with pp::Token::text. The term value will be used for a function which will convert text to integer/float constant. git-svn-id: https://angleproject.googlecode.com/svn/trunk@1175 736b8ea6-26fd-11df-bfd4-992fa37f6226
parent fadc2058
......@@ -407,9 +407,9 @@ int string_input(char* buf, int max_size, yyscan_t yyscanner) {
#if ANGLE_USE_NEW_PREPROCESSOR
pp::Token token;
yyget_extra(yyscanner)->preprocessor.lex(&token);
len = token.type == pp::Token::LAST ? 0 : token.value.size();
len = token.type == pp::Token::LAST ? 0 : token.text.size();
if ((len > 0) && (len < max_size))
memcpy(buf, token.value.c_str(), len);
memcpy(buf, token.text.c_str(), len);
yyset_lineno(EncodeSourceLoc(token.location.file, token.location.line), yyscanner);
#else
len = yylex_CPP(buf, max_size);
......
......@@ -3083,9 +3083,9 @@ int string_input(char* buf, int max_size, yyscan_t yyscanner) {
#if ANGLE_USE_NEW_PREPROCESSOR
pp::Token token;
yyget_extra(yyscanner)->preprocessor.lex(&token);
len = token.type == pp::Token::LAST ? 0 : token.value.size();
len = token.type == pp::Token::LAST ? 0 : token.text.size();
if ((len > 0) && (len < max_size))
memcpy(buf, token.value.c_str(), len);
memcpy(buf, token.text.c_str(), len);
yyset_lineno(EncodeSourceLoc(token.location.file, token.location.line),yyscanner);
#else
len = yylex_CPP(buf, max_size);
......
......@@ -56,31 +56,31 @@ static DirectiveType getDirective(const pp::Token* token)
if (token->type != pp::Token::IDENTIFIER)
return DIRECTIVE_NONE;
if (token->value == kDirectiveDefine)
if (token->text == kDirectiveDefine)
return DIRECTIVE_DEFINE;
else if (token->value == kDirectiveUndef)
else if (token->text == kDirectiveUndef)
return DIRECTIVE_UNDEF;
else if (token->value == kDirectiveIf)
else if (token->text == kDirectiveIf)
return DIRECTIVE_IF;
else if (token->value == kDirectiveIfdef)
else if (token->text == kDirectiveIfdef)
return DIRECTIVE_IFDEF;
else if (token->value == kDirectiveIfndef)
else if (token->text == kDirectiveIfndef)
return DIRECTIVE_IFNDEF;
else if (token->value == kDirectiveElse)
else if (token->text == kDirectiveElse)
return DIRECTIVE_ELSE;
else if (token->value == kDirectiveElif)
else if (token->text == kDirectiveElif)
return DIRECTIVE_ELIF;
else if (token->value == kDirectiveEndif)
else if (token->text == kDirectiveEndif)
return DIRECTIVE_ENDIF;
else if (token->value == kDirectiveError)
else if (token->text == kDirectiveError)
return DIRECTIVE_ERROR;
else if (token->value == kDirectivePragma)
else if (token->text == kDirectivePragma)
return DIRECTIVE_PRAGMA;
else if (token->value == kDirectiveExtension)
else if (token->text == kDirectiveExtension)
return DIRECTIVE_EXTENSION;
else if (token->value == kDirectiveVersion)
else if (token->text == kDirectiveVersion)
return DIRECTIVE_VERSION;
else if (token->value == kDirectiveLine)
else if (token->text == kDirectiveLine)
return DIRECTIVE_LINE;
return DIRECTIVE_NONE;
......@@ -159,7 +159,7 @@ class DefinedParser : public Lexer
mLexer->lex(token);
if (token->type != Token::IDENTIFIER)
return;
if (token->value != kDefined)
if (token->text != kDefined)
return;
bool paren = false;
......@@ -173,11 +173,11 @@ class DefinedParser : public Lexer
if (token->type != Token::IDENTIFIER)
{
mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
token->location, token->value);
token->location, token->text);
skipUntilEOD(mLexer, token);
return;
}
MacroSet::const_iterator iter = mMacroSet->find(token->value);
MacroSet::const_iterator iter = mMacroSet->find(token->text);
std::string expression = iter != mMacroSet->end() ? "1" : "0";
if (paren)
......@@ -186,7 +186,7 @@ class DefinedParser : public Lexer
if (token->type != ')')
{
mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
token->location, token->value);
token->location, token->text);
skipUntilEOD(mLexer, token);
return;
}
......@@ -195,7 +195,7 @@ class DefinedParser : public Lexer
// We have a valid defined operator.
// Convert the current token into a CONST_INT token.
token->type = Token::CONST_INT;
token->value = expression;
token->text = expression;
}
private:
......@@ -259,7 +259,7 @@ void DirectiveParser::parseDirective(Token* token)
{
case DIRECTIVE_NONE:
mDiagnostics->report(Diagnostics::DIRECTIVE_INVALID_NAME,
token->location, token->value);
token->location, token->text);
skipUntilEOD(mTokenizer, token);
break;
case DIRECTIVE_DEFINE:
......@@ -310,7 +310,7 @@ void DirectiveParser::parseDirective(Token* token)
if (token->type == Token::LAST)
{
mDiagnostics->report(Diagnostics::EOF_IN_DIRECTIVE,
token->location, token->value);
token->location, token->text);
}
}
......@@ -322,25 +322,25 @@ void DirectiveParser::parseDefine(Token* token)
if (token->type != Token::IDENTIFIER)
{
mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
token->location, token->value);
token->location, token->text);
return;
}
if (isMacroPredefined(token->value, *mMacroSet))
if (isMacroPredefined(token->text, *mMacroSet))
{
mDiagnostics->report(Diagnostics::MACRO_PREDEFINED_REDEFINED,
token->location, token->value);
token->location, token->text);
return;
}
if (isMacroNameReserved(token->value))
if (isMacroNameReserved(token->text))
{
mDiagnostics->report(Diagnostics::MACRO_NAME_RESERVED,
token->location, token->value);
token->location, token->text);
return;
}
Macro macro;
macro.type = Macro::kTypeObj;
macro.name = token->value;
macro.name = token->text;
mTokenizer->lex(token);
if (token->type == '(' && !token->hasLeadingSpace())
......@@ -351,7 +351,7 @@ void DirectiveParser::parseDefine(Token* token)
mTokenizer->lex(token);
if (token->type != Token::IDENTIFIER)
break;
macro.parameters.push_back(token->value);
macro.parameters.push_back(token->text);
mTokenizer->lex(token); // Get ','.
} while (token->type == ',');
......@@ -360,7 +360,7 @@ void DirectiveParser::parseDefine(Token* token)
{
mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
token->location,
token->value);
token->text);
return;
}
mTokenizer->lex(token); // Get ')'.
......@@ -402,17 +402,17 @@ void DirectiveParser::parseUndef(Token* token)
if (token->type != Token::IDENTIFIER)
{
mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
token->location, token->value);
token->location, token->text);
return;
}
MacroSet::iterator iter = mMacroSet->find(token->value);
MacroSet::iterator iter = mMacroSet->find(token->text);
if (iter != mMacroSet->end())
{
if (iter->second.predefined)
{
mDiagnostics->report(Diagnostics::MACRO_PREDEFINED_UNDEFINED,
token->location, token->value);
token->location, token->text);
}
else
{
......@@ -448,7 +448,7 @@ void DirectiveParser::parseElse(Token* token)
if (mConditionalStack.empty())
{
mDiagnostics->report(Diagnostics::CONDITIONAL_ELSE_WITHOUT_IF,
token->location, token->value);
token->location, token->text);
skipUntilEOD(mTokenizer, token);
return;
}
......@@ -463,7 +463,7 @@ void DirectiveParser::parseElse(Token* token)
if (block.foundElseGroup)
{
mDiagnostics->report(Diagnostics::CONDITIONAL_ELSE_AFTER_ELSE,
token->location, token->value);
token->location, token->text);
skipUntilEOD(mTokenizer, token);
return;
}
......@@ -477,7 +477,7 @@ void DirectiveParser::parseElse(Token* token)
if (!isEOD(token))
{
mDiagnostics->report(Diagnostics::CONDITIONAL_UNEXPECTED_TOKEN,
token->location, token->value);
token->location, token->text);
skipUntilEOD(mTokenizer, token);
}
}
......@@ -489,7 +489,7 @@ void DirectiveParser::parseElif(Token* token)
if (mConditionalStack.empty())
{
mDiagnostics->report(Diagnostics::CONDITIONAL_ELIF_WITHOUT_IF,
token->location, token->value);
token->location, token->text);
skipUntilEOD(mTokenizer, token);
return;
}
......@@ -504,7 +504,7 @@ void DirectiveParser::parseElif(Token* token)
if (block.foundElseGroup)
{
mDiagnostics->report(Diagnostics::CONDITIONAL_ELIF_AFTER_ELSE,
token->location, token->value);
token->location, token->text);
skipUntilEOD(mTokenizer, token);
return;
}
......@@ -529,7 +529,7 @@ void DirectiveParser::parseEndif(Token* token)
if (mConditionalStack.empty())
{
mDiagnostics->report(Diagnostics::CONDITIONAL_ENDIF_WITHOUT_IF,
token->location, token->value);
token->location, token->text);
skipUntilEOD(mTokenizer, token);
return;
}
......@@ -541,7 +541,7 @@ void DirectiveParser::parseEndif(Token* token)
if (!isEOD(token))
{
mDiagnostics->report(Diagnostics::CONDITIONAL_UNEXPECTED_TOKEN,
token->location, token->value);
token->location, token->text);
skipUntilEOD(mTokenizer, token);
}
}
......@@ -583,14 +583,14 @@ void DirectiveParser::parsePragma(Token* token)
switch(state++)
{
case PRAGMA_NAME:
name = token->value;
name = token->text;
valid = valid && (token->type == Token::IDENTIFIER);
break;
case LEFT_PAREN:
valid = valid && (token->type == '(');
break;
case PRAGMA_VALUE:
value = token->value;
value = token->text;
valid = valid && (token->type == Token::IDENTIFIER);
break;
case RIGHT_PAREN:
......@@ -641,16 +641,16 @@ void DirectiveParser::parseExtension(Token* token)
if (valid && (token->type != Token::IDENTIFIER))
{
mDiagnostics->report(Diagnostics::INVALID_EXTENSION_NAME,
token->location, token->value);
token->location, token->text);
valid = false;
}
if (valid) name = token->value;
if (valid) name = token->text;
break;
case COLON:
if (valid && (token->type != ':'))
{
mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
token->location, token->value);
token->location, token->text);
valid = false;
}
break;
......@@ -658,16 +658,16 @@ void DirectiveParser::parseExtension(Token* token)
if (valid && (token->type != Token::IDENTIFIER))
{
mDiagnostics->report(Diagnostics::INVALID_EXTENSION_BEHAVIOR,
token->location, token->value);
token->location, token->text);
valid = false;
}
if (valid) behavior = token->value;
if (valid) behavior = token->text;
break;
default:
if (valid)
{
mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
token->location, token->value);
token->location, token->text);
valid = false;
}
break;
......@@ -677,7 +677,7 @@ void DirectiveParser::parseExtension(Token* token)
if (valid && (state != EXT_BEHAVIOR + 1))
{
mDiagnostics->report(Diagnostics::INVALID_EXTENSION_DIRECTIVE,
token->location, token->value);
token->location, token->text);
valid = false;
}
if (valid)
......@@ -706,16 +706,16 @@ void DirectiveParser::parseVersion(Token* token)
if (valid && (token->type != Token::CONST_INT))
{
mDiagnostics->report(Diagnostics::INVALID_VERSION_NUMBER,
token->location, token->value);
token->location, token->text);
valid = false;
}
if (valid) version = atoi(token->value.c_str());
if (valid) version = atoi(token->text.c_str());
break;
default:
if (valid)
{
mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
token->location, token->value);
token->location, token->text);
valid = false;
}
break;
......@@ -725,7 +725,7 @@ void DirectiveParser::parseVersion(Token* token)
if (valid && (state != VERSION_NUMBER + 1))
{
mDiagnostics->report(Diagnostics::INVALID_VERSION_DIRECTIVE,
token->location, token->value);
token->location, token->text);
valid = false;
}
if (valid)
......@@ -756,25 +756,25 @@ void DirectiveParser::parseLine(Token* token)
if (valid && (token->type != Token::CONST_INT))
{
mDiagnostics->report(Diagnostics::INVALID_LINE_NUMBER,
token->location, token->value);
token->location, token->text);
valid = false;
}
if (valid) line = atoi(token->value.c_str());
if (valid) line = atoi(token->text.c_str());
break;
case FILE_NUMBER:
if (valid && (token->type != Token::CONST_INT))
{
mDiagnostics->report(Diagnostics::INVALID_FILE_NUMBER,
token->location, token->value);
token->location, token->text);
valid = false;
}
if (valid) file = atoi(token->value.c_str());
if (valid) file = atoi(token->text.c_str());
break;
default:
if (valid)
{
mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
token->location, token->value);
token->location, token->text);
valid = false;
}
break;
......@@ -785,7 +785,7 @@ void DirectiveParser::parseLine(Token* token)
if (valid && (state != FILE_NUMBER) && (state != FILE_NUMBER + 1))
{
mDiagnostics->report(Diagnostics::INVALID_LINE_DIRECTIVE,
token->location, token->value);
token->location, token->text);
valid = false;
}
if (valid)
......@@ -806,7 +806,7 @@ bool DirectiveParser::skipping() const
void DirectiveParser::parseConditionalIf(Token* token)
{
ConditionalBlock block;
block.type = token->value;
block.type = token->text;
block.location = token->location;
if (skipping())
......@@ -861,7 +861,7 @@ int DirectiveParser::parseExpressionIf(Token* token)
if (!isEOD(token))
{
mDiagnostics->report(Diagnostics::CONDITIONAL_UNEXPECTED_TOKEN,
token->location, token->value);
token->location, token->text);
skipUntilEOD(mTokenizer, token);
}
......@@ -877,12 +877,12 @@ int DirectiveParser::parseExpressionIfdef(Token* token)
if (token->type != Token::IDENTIFIER)
{
mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
token->location, token->value);
token->location, token->text);
skipUntilEOD(mTokenizer, token);
return 0;
}
MacroSet::const_iterator iter = mMacroSet->find(token->value);
MacroSet::const_iterator iter = mMacroSet->find(token->text);
int expression = iter != mMacroSet->end() ? 1 : 0;
// Warn if there are tokens after #ifdef expression.
......@@ -890,7 +890,7 @@ int DirectiveParser::parseExpressionIfdef(Token* token)
if (!isEOD(token))
{
mDiagnostics->report(Diagnostics::CONDITIONAL_UNEXPECTED_TOKEN,
token->location, token->value);
token->location, token->text);
skipUntilEOD(mTokenizer, token);
}
return expression;
......
......@@ -1846,7 +1846,7 @@ int yylex(YYSTYPE* lvalp, Context* context)
switch (token->type)
{
case pp::Token::CONST_INT:
*lvalp = strtoll(token->value.c_str(), NULL, 0);
*lvalp = strtoll(token->text.c_str(), NULL, 0);
type = CONST_INT;
break;
......
......@@ -193,7 +193,7 @@ int yylex(YYSTYPE* lvalp, Context* context)
switch (token->type)
{
case pp::Token::CONST_INT:
*lvalp = strtoll(token->value.c_str(), NULL, 0);
*lvalp = strtoll(token->text.c_str(), NULL, 0);
type = CONST_INT;
break;
......
......@@ -73,7 +73,7 @@ void MacroExpander::lex(Token* token)
if (token->expansionDisabled())
break;
MacroSet::const_iterator iter = mMacroSet->find(token->value);
MacroSet::const_iterator iter = mMacroSet->find(token->text);
if (iter == mMacroSet->end())
break;
......@@ -152,7 +152,7 @@ bool MacroExpander::pushMacro(const Macro& macro, const Token& identifier)
assert(!macro.disabled);
assert(!identifier.expansionDisabled());
assert(identifier.type == Token::IDENTIFIER);
assert(identifier.value == macro.name);
assert(identifier.text == macro.name);
std::vector<Token> replacements;
if (!expandMacro(macro, identifier, &replacements))
......@@ -202,13 +202,13 @@ bool MacroExpander::expandMacro(const Macro& macro,
{
std::stringstream stream;
stream << identifier.location.line;
repl.value = stream.str();
repl.text = stream.str();
}
else if (macro.name == kFile)
{
std::stringstream stream;
stream << identifier.location.file;
repl.value = stream.str();
repl.text = stream.str();
}
}
}
......@@ -254,7 +254,7 @@ bool MacroExpander::collectMacroArgs(const Macro& macro,
if (token.type == Token::LAST)
{
mDiagnostics->report(Diagnostics::MACRO_UNTERMINATED_INVOCATION,
identifier.location, identifier.value);
identifier.location, identifier.text);
// Do not lose EOF token.
ungetToken(token);
return false;
......@@ -303,7 +303,7 @@ bool MacroExpander::collectMacroArgs(const Macro& macro,
Diagnostics::ID id = args->size() < macro.parameters.size() ?
Diagnostics::MACRO_TOO_FEW_ARGS :
Diagnostics::MACRO_TOO_MANY_ARGS;
mDiagnostics->report(id, identifier.location, identifier.value);
mDiagnostics->report(id, identifier.location, identifier.text);
return false;
}
......@@ -344,7 +344,7 @@ void MacroExpander::replaceMacroParams(const Macro& macro,
// There is no need to search for macro params every time.
// The param index can be cached with the replacement token.
Macro::Parameters::const_iterator iter = std::find(
macro.parameters.begin(), macro.parameters.end(), repl.value);
macro.parameters.begin(), macro.parameters.end(), repl.text);
if (iter == macro.parameters.end())
{
replacements->push_back(repl);
......
......@@ -70,7 +70,7 @@ void Preprocessor::predefineMacro(const char* name, int value)
Token token;
token.type = Token::CONST_INT;
token.value = stream.str();
token.text = stream.str();
Macro macro;
macro.predefined = true;
......@@ -97,11 +97,11 @@ void Preprocessor::lex(Token* token)
break;
case Token::PP_NUMBER:
mImpl->diagnostics->report(Diagnostics::INVALID_NUMBER,
token->location, token->value);
token->location, token->text);
break;
case Token::PP_OTHER:
mImpl->diagnostics->report(Diagnostics::INVALID_CHARACTER,
token->location, token->value);
token->location, token->text);
break;
default:
validToken = true;
......
......@@ -14,7 +14,7 @@ void Token::reset()
type = 0;
flags = 0;
location = SourceLocation();
value.clear();
text.clear();
}
bool Token::equals(const Token& other) const
......@@ -22,7 +22,7 @@ bool Token::equals(const Token& other) const
return (type == other.type) &&
(flags == other.flags) &&
(location == other.location) &&
(value == other.value);
(text == other.text);
}
void Token::setAtStartOfLine(bool start)
......@@ -54,7 +54,7 @@ std::ostream& operator<<(std::ostream& out, const Token& token)
if (token.hasLeadingSpace())
out << " ";
out << token.value;
out << token.text;
return out;
}
......
......@@ -81,7 +81,7 @@ struct Token
int type;
unsigned int flags;
SourceLocation location;
std::string value;
std::string text;
};
inline bool operator==(const Token& lhs, const Token& rhs)
......
......@@ -2313,7 +2313,7 @@ void Tokenizer::setLineNumber(int line)
void Tokenizer::lex(Token* token)
{
token->type = pplex(&token->value,&token->location,mHandle);
token->type = pplex(&token->text,&token->location,mHandle);
token->flags = 0;
token->setAtStartOfLine(mContext.lineStart);
......
......@@ -297,7 +297,7 @@ void Tokenizer::setLineNumber(int line)
void Tokenizer::lex(Token* token)
{
token->type = yylex(&token->value, &token->location, mHandle);
token->type = yylex(&token->text, &token->location, mHandle);
token->flags = 0;
token->setAtStartOfLine(mContext.lineStart);
......
......@@ -88,7 +88,7 @@ TEST_P(CharTest, Identified)
pp::Token token;
mPreprocessor.lex(&token);
EXPECT_EQ(expectedType, token.type);
EXPECT_EQ(expectedValue, token.value);
EXPECT_EQ(expectedValue, token.text);
};
// Note +1 for the max-value in range. It is there because the max-value
......
......@@ -51,7 +51,7 @@ TEST_F(BlockCommentTest, CommentReplacedWithSpace)
pp::Token token;
mPreprocessor.lex(&token);
EXPECT_EQ(pp::Token::IDENTIFIER, token.type);
EXPECT_EQ("bar", token.value);
EXPECT_EQ("bar", token.text);
EXPECT_TRUE(token.hasLeadingSpace());
}
......
......@@ -855,7 +855,7 @@ TEST_F(DefineTest, Predefined_LINE1)
pp::Token token;
mPreprocessor.lex(&token);
EXPECT_EQ(pp::Token::CONST_INT, token.type);
EXPECT_EQ("3", token.value);
EXPECT_EQ("3", token.text);
}
TEST_F(DefineTest, Predefined_LINE2)
......@@ -867,7 +867,7 @@ TEST_F(DefineTest, Predefined_LINE2)
pp::Token token;
mPreprocessor.lex(&token);
EXPECT_EQ(pp::Token::CONST_INT, token.type);
EXPECT_EQ("10", token.value);
EXPECT_EQ("10", token.text);
}
TEST_F(DefineTest, Predefined_FILE1)
......@@ -878,7 +878,7 @@ TEST_F(DefineTest, Predefined_FILE1)
pp::Token token;
mPreprocessor.lex(&token);
EXPECT_EQ(pp::Token::CONST_INT, token.type);
EXPECT_EQ("2", token.value);
EXPECT_EQ("2", token.text);
}
TEST_F(DefineTest, Predefined_FILE2)
......@@ -889,5 +889,5 @@ TEST_F(DefineTest, Predefined_FILE2)
pp::Token token;
mPreprocessor.lex(&token);
EXPECT_EQ(pp::Token::CONST_INT, token.type);
EXPECT_EQ("21", token.value);
EXPECT_EQ("21", token.text);
}
......@@ -20,7 +20,7 @@ protected:
pp::Token token;
mPreprocessor.lex(&token);
EXPECT_EQ(pp::Token::IDENTIFIER, token.type);
EXPECT_EQ(str, token.value);
EXPECT_EQ(str, token.text);
}
};
......
......@@ -20,7 +20,7 @@ protected:
pp::Token token;
mPreprocessor.lex(&token);
EXPECT_EQ(pp::Token::IDENTIFIER, token.type);
EXPECT_EQ("foo", token.value);
EXPECT_EQ("foo", token.text);
EXPECT_EQ(location.file, token.location.file);
EXPECT_EQ(location.line, token.location.line);
......@@ -114,7 +114,7 @@ TEST_F(LocationTest, EndOfFileWithoutNewline)
pp::Token token;
mPreprocessor.lex(&token);
EXPECT_EQ(pp::Token::IDENTIFIER, token.type);
EXPECT_EQ("foo", token.value);
EXPECT_EQ("foo", token.text);
EXPECT_EQ(0, token.location.file);
EXPECT_EQ(1, token.location.line);
......@@ -132,7 +132,7 @@ TEST_F(LocationTest, EndOfFileAfterNewline)
pp::Token token;
mPreprocessor.lex(&token);
EXPECT_EQ(pp::Token::IDENTIFIER, token.type);
EXPECT_EQ("foo", token.value);
EXPECT_EQ("foo", token.text);
EXPECT_EQ(0, token.location.file);
EXPECT_EQ(1, token.location.line);
......@@ -150,7 +150,7 @@ TEST_F(LocationTest, EndOfFileAfterEmptyString)
pp::Token token;
mPreprocessor.lex(&token);
EXPECT_EQ(pp::Token::IDENTIFIER, token.type);
EXPECT_EQ("foo", token.value);
EXPECT_EQ("foo", token.text);
EXPECT_EQ(0, token.location.file);
EXPECT_EQ(1, token.location.line);
......
......@@ -50,7 +50,7 @@ TEST_P(IntegerTest, Identified)
pp::Token token;
mPreprocessor.lex(&token);
EXPECT_EQ(pp::Token::CONST_INT, token.type);
EXPECT_EQ(str, token.value);
EXPECT_EQ(str, token.text);
}
INSTANTIATE_TEST_CASE_P(DecimalInteger,
......@@ -89,7 +89,7 @@ class FloatTest : public PreprocessorTest
pp::Token token;
mPreprocessor.lex(&token);
EXPECT_EQ(pp::Token::CONST_FLOAT, token.type);
EXPECT_EQ(str, token.value);
EXPECT_EQ(str, token.text);
}
};
......
......@@ -27,7 +27,7 @@ TEST_P(OperatorTest, Identified)
pp::Token token;
mPreprocessor.lex(&token);
EXPECT_EQ(param.op, token.type);
EXPECT_EQ(param.str, token.value);
EXPECT_EQ(param.str, token.text);
}
static const OperatorTestParam kOperators[] = {
......
......@@ -19,7 +19,7 @@ class SpaceTest : public PreprocessorTest
// "foo" is returned after ignoring the whitespace characters.
mPreprocessor.lex(&token);
EXPECT_EQ(pp::Token::IDENTIFIER, token.type);
EXPECT_EQ("foo", token.value);
EXPECT_EQ("foo", token.text);
// The whitespace character is however recorded with the next token.
EXPECT_TRUE(token.hasLeadingSpace());
}
......@@ -88,7 +88,7 @@ TEST_F(SpaceTest, LeadingSpace)
pp::Token token;
mPreprocessor.lex(&token);
EXPECT_EQ(pp::Token::IDENTIFIER, token.type);
EXPECT_EQ("foo", token.value);
EXPECT_EQ("foo", token.text);
EXPECT_TRUE(token.hasLeadingSpace());
mPreprocessor.lex(&token);
......@@ -101,6 +101,6 @@ TEST_F(SpaceTest, LeadingSpace)
mPreprocessor.lex(&token);
EXPECT_EQ(pp::Token::IDENTIFIER, token.type);
EXPECT_EQ("bar", token.value);
EXPECT_EQ("bar", token.text);
EXPECT_FALSE(token.hasLeadingSpace());
}
......@@ -15,7 +15,7 @@ TEST(TokenTest, DefaultConstructor)
EXPECT_EQ(0, token.flags);
EXPECT_EQ(0, token.location.line);
EXPECT_EQ(0, token.location.file);
EXPECT_EQ("", token.value);
EXPECT_EQ("", token.text);
}
TEST(TokenTest, Assignment)
......@@ -25,14 +25,14 @@ TEST(TokenTest, Assignment)
token.flags = 1;
token.location.line = 1;
token.location.file = 1;
token.value.assign("foo");
token.text.assign("foo");
token = pp::Token();
EXPECT_EQ(0, token.type);
EXPECT_EQ(0, token.flags);
EXPECT_EQ(0, token.location.line);
EXPECT_EQ(0, token.location.file);
EXPECT_EQ("", token.value);
EXPECT_EQ("", token.text);
}
TEST(TokenTest, Equals)
......@@ -56,9 +56,9 @@ TEST(TokenTest, Equals)
EXPECT_FALSE(token.equals(pp::Token()));
token.location.file = 0;
token.value.assign("foo");
token.text.assign("foo");
EXPECT_FALSE(token.equals(pp::Token()));
token.value.clear();
token.text.clear();
EXPECT_TRUE(token.equals(pp::Token()));
}
......@@ -76,7 +76,7 @@ TEST(TokenTest, HasLeadingSpace)
TEST(TokenTest, Write)
{
pp::Token token;
token.value.assign("foo");
token.text.assign("foo");
std::stringstream out1;
out1 << token;
EXPECT_TRUE(out1.good());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment