Commit 04d7d22b by alokp@chromium.org

Reafactored Lexer class to allow chaining. The full chain when parsing #if…

Reafactored Lexer class to allow chaining. The full chain when parsing #if directive looks like this: Preprocessor -> MacroExpander -> DirectiveHandler -> MacroExpander -> DefinedFilter -> Tokenizer. This chain dynamically changes depending on the current context. Also added an incomplete implementation of #if handling and ExpressionParser to illustrate how this design is supposed to work. Review URL: https://codereview.appspot.com/6203060 git-svn-id: https://angleproject.googlecode.com/svn/trunk@1084 736b8ea6-26fd-11df-bfd4-992fa37f6226
parent 355989bc
......@@ -16,15 +16,21 @@
'include_dirs': [
],
'sources': [
'compiler/preprocessor/new/DirectiveParser.cpp',
'compiler/preprocessor/new/DirectiveParser.h',
'compiler/preprocessor/new/ExpressionParser.cpp',
'compiler/preprocessor/new/ExpressionParser.h',
'compiler/preprocessor/new/Input.cpp',
'compiler/preprocessor/new/Input.h',
'compiler/preprocessor/new/Lexer.cpp',
'compiler/preprocessor/new/Lexer.h',
'compiler/preprocessor/new/pp_lex.cpp',
'compiler/preprocessor/new/MacroExpander.cpp',
'compiler/preprocessor/new/MacroExpander.h',
'compiler/preprocessor/new/Preprocessor.cpp',
'compiler/preprocessor/new/Preprocessor.h',
'compiler/preprocessor/new/Token.cpp',
'compiler/preprocessor/new/Token.h',
'compiler/preprocessor/new/Tokenizer.cpp',
'compiler/preprocessor/new/Tokenizer.h',
],
},
{
......
//
// Copyright (c) 2011 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
#include "DirectiveParser.h"
#include <cassert>
#include "ExpressionParser.h"
#include "MacroExpander.h"
#include "Token.h"
#include "Tokenizer.h"
namespace {
static const std::string kDirectiveDefine("define");
static const std::string kDirectiveUndef("undef");
static const std::string kDirectiveIf("if");
static const std::string kDirectiveIfdef("ifdef");
static const std::string kDirectiveIfndef("ifndef");
static const std::string kDirectiveElse("else");
static const std::string kDirectiveElif("elif");
static const std::string kDirectiveEndif("endif");
static const std::string kDirectiveError("error");
static const std::string kDirectivePragma("pragma");
static const std::string kDirectiveExtension("extension");
static const std::string kDirectiveVersion("version");
static const std::string kDirectiveLine("line");
} // namespace
namespace pp
{
class DefinedParser : public Lexer
{
public:
DefinedParser(Lexer* lexer) : mLexer(lexer) { }
protected:
virtual void lex(Token* token)
{
// TODO(alokp): Implement me.
mLexer->lex(token);
}
private:
Lexer* mLexer;
};
void DirectiveParser::lex(Token* token)
{
do
{
mTokenizer->lex(token);
if (token->type == '#') parseDirective(token);
} while (token->type == '\n');
}
void DirectiveParser::parseDirective(Token* token)
{
assert(token->type == '#');
mTokenizer->lex(token);
if (token->type == pp::Token::IDENTIFIER)
{
if (token->value == kDirectiveDefine)
parseDefine(token);
else if (token->value == kDirectiveUndef)
parseUndef(token);
else if (token->value == kDirectiveIf)
parseIf(token);
else if (token->value == kDirectiveIfdef)
parseIfdef(token);
else if (token->value == kDirectiveIfndef)
parseIfndef(token);
else if (token->value == kDirectiveElse)
parseElse(token);
else if (token->value == kDirectiveElif)
parseElif(token);
else if (token->value == kDirectiveEndif)
parseEndif(token);
else if (token->value == kDirectiveError)
parseError(token);
else if (token->value == kDirectivePragma)
parsePragma(token);
else if (token->value == kDirectiveExtension)
parseExtension(token);
else if (token->value == kDirectiveVersion)
parseVersion(token);
else if (token->value == kDirectiveLine)
parseLine(token);
else
token->type = pp::Token::INVALID_DIRECTIVE;
}
while (token->type != '\n')
{
if (token->type == 0) {
//token->type = pp::Token::EOF_IN_DIRECTIVE;
break;
}
//token->type = pp::Token::INVALID_DIRECTIVE;
mTokenizer->lex(token);
}
}
void DirectiveParser::parseDefine(Token* token)
{
// TODO(alokp): Implement me.
assert(token->value == kDirectiveDefine);
mTokenizer->lex(token);
}
void DirectiveParser::parseUndef(Token* token)
{
// TODO(alokp): Implement me.
assert(token->value == kDirectiveUndef);
mTokenizer->lex(token);
}
void DirectiveParser::parseIf(Token* token)
{
// TODO(alokp): Implement me.
assert(token->value == kDirectiveIf);
DefinedParser definedParser(mTokenizer);
MacroExpander macroExpander(&definedParser);
ExpressionParser expressionParser(&macroExpander);
macroExpander.lex(token);
int expression = 0;
if (!expressionParser.parse(token, &expression))
{
// TODO(alokp): Report diagnostic.
return;
}
// We have a valid #if directive. Handle it.
// TODO(alokp): Push conditional block.
}
void DirectiveParser::parseIfdef(Token* token)
{
// TODO(alokp): Implement me.
assert(token->value == kDirectiveIfdef);
mTokenizer->lex(token);
}
void DirectiveParser::parseIfndef(Token* token)
{
// TODO(alokp): Implement me.
assert(token->value == kDirectiveIfndef);
mTokenizer->lex(token);
}
void DirectiveParser::parseElse(Token* token)
{
// TODO(alokp): Implement me.
assert(token->value == kDirectiveElse);
mTokenizer->lex(token);
}
void DirectiveParser::parseElif(Token* token)
{
// TODO(alokp): Implement me.
assert(token->value == kDirectiveElif);
mTokenizer->lex(token);
}
void DirectiveParser::parseEndif(Token* token)
{
// TODO(alokp): Implement me.
assert(token->value == kDirectiveEndif);
mTokenizer->lex(token);
}
void DirectiveParser::parseError(Token* token)
{
// TODO(alokp): Implement me.
assert(token->value == kDirectiveError);
mTokenizer->lex(token);
}
void DirectiveParser::parsePragma(Token* token)
{
// TODO(alokp): Implement me.
assert(token->value == kDirectivePragma);
mTokenizer->lex(token);
}
void DirectiveParser::parseExtension(Token* token)
{
// TODO(alokp): Implement me.
assert(token->value == kDirectiveExtension);
mTokenizer->lex(token);
}
void DirectiveParser::parseVersion(Token* token)
{
// TODO(alokp): Implement me.
assert(token->value == kDirectiveVersion);
mTokenizer->lex(token);
}
void DirectiveParser::parseLine(Token* token)
{
// TODO(alokp): Implement me.
assert(token->value == kDirectiveLine);
MacroExpander macroExpander(mTokenizer);
macroExpander.lex(token);
}
} // namespace pp
//
// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
#ifndef COMPILER_PREPROCESSOR_DIRECTIVE_PARSER_H_
#define COMPILER_PREPROCESSOR_DIRECTIVE_PARSER_H_
#include "Lexer.h"
#include "pp_utils.h"
namespace pp
{
class Tokenizer;
class DirectiveParser : public Lexer
{
public:
DirectiveParser(Tokenizer* tokenizer) : mTokenizer(tokenizer) { }
virtual void lex(Token* token);
private:
PP_DISALLOW_COPY_AND_ASSIGN(DirectiveParser);
void parseDirective(Token* token);
void parseDefine(Token* token);
void parseUndef(Token* token);
void parseIf(Token* token);
void parseIfdef(Token* token);
void parseIfndef(Token* token);
void parseElse(Token* token);
void parseElif(Token* token);
void parseEndif(Token* token);
void parseError(Token* token);
void parsePragma(Token* token);
void parseExtension(Token* token);
void parseVersion(Token* token);
void parseLine(Token* token);
Tokenizer* mTokenizer;
};
} // namespace pp
#endif // COMPILER_PREPROCESSOR_DIRECTIVE_PARSER_H_
//
// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
#ifndef COMPILER_PREPROCESSOR_EXPRESSION_PARSER_H_
#define COMPILER_PREPROCESSOR_EXPRESSION_PARSER_H_
#include "pp_utils.h"
namespace pp
{
class Lexer;
struct Token;
class ExpressionParser
{
public:
ExpressionParser(Lexer* lexer) : mLexer(lexer) { }
bool parse(Token* token, int* result);
private:
PP_DISALLOW_COPY_AND_ASSIGN(ExpressionParser);
Lexer* mLexer;
};
} // namespace pp
#endif // COMPILER_PREPROCESSOR_EXPRESSION_PARSER_H_
/*
//
// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
This file contains the Yacc grammar for GLSL ES preprocessor expression.
IF YOU MODIFY THIS FILE YOU ALSO NEED TO RUN generate_parser.sh,
WHICH GENERATES THE GLSL ES preprocessor expression parser.
*/
%{
//
// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// This file is auto-generated by generate_parser.sh. DO NOT EDIT!
#if defined(__GNUC__)
#elif defined(_MSC_VER)
#pragma warning(disable: 4065 4701)
#endif
#include "ExpressionParser.h"
#include <cassert>
#include <sstream>
#include "Lexer.h"
#include "Token.h"
namespace {
struct Context
{
pp::Lexer* lexer;
pp::Token* token;
int* result;
};
} // namespace
%}
%define api.pure
%name-prefix="pp"
%parse-param {Context *context}
%lex-param {Context *context}
%{
static int yylex(int* lvalp, Context* context);
static void yyerror(Context* context, const char* reason);
%}
%token CONST_INT
%left OP_OR
%left OP_AND
%left '|'
%left '^'
%left '&'
%left OP_EQ OP_NE
%left '<' '>' OP_LE OP_GE
%left OP_LEFT OP_RIGHT
%left '+' '-'
%left '*' '/' '%'
%right UNARY
%%
input
: expression {
*(context->result) = $1;
YYACCEPT;
}
;
expression
: CONST_INT
| expression OP_OR expression {
$$ = $1 || $3;
}
| expression OP_AND expression {
$$ = $1 && $3;
}
| expression '|' expression {
$$ = $1 | $3;
}
| expression '^' expression {
$$ = $1 ^ $3;
}
| expression '&' expression {
$$ = $1 & $3;
}
| expression OP_NE expression {
$$ = $1 != $3;
}
| expression OP_EQ expression {
$$ = $1 == $3;
}
| expression OP_GE expression {
$$ = $1 >= $3;
}
| expression OP_LE expression {
$$ = $1 <= $3;
}
| expression '>' expression {
$$ = $1 > $3;
}
| expression '<' expression {
$$ = $1 < $3;
}
| expression OP_RIGHT expression {
$$ = $1 >> $3;
}
| expression OP_LEFT expression {
$$ = $1 << $3;
}
| expression '-' expression {
$$ = $1 - $3;
}
| expression '+' expression {
$$ = $1 + $3;
}
| expression '%' expression {
if ($3 == 0) {
std::stringstream stream;
stream << $1 << " % " << $3;
context->token->type = pp::Token::DIVISION_BY_ZERO;
context->token->value = stream.str();
YYABORT;
} else {
$$ = $1 % $3;
}
}
| expression '/' expression {
if ($3 == 0) {
std::stringstream stream;
stream << $1 << " / " << $3;
context->token->type = pp::Token::DIVISION_BY_ZERO;
context->token->value = stream.str();
YYABORT;
} else {
$$ = $1 / $3;
}
}
| expression '*' expression {
$$ = $1 * $3;
}
| '!' expression %prec UNARY {
$$ = ! $2;
}
| '~' expression %prec UNARY {
$$ = ~ $2;
}
| '-' expression %prec UNARY {
$$ = - $2;
}
| '+' expression %prec UNARY {
$$ = + $2;
}
| '(' expression ')' {
$$ = $2;
}
;
%%
int yylex(int* lvalp, Context* context)
{
int type = 0;
pp::Token* token = context->token;
switch (token->type)
{
case pp::Token::CONST_INT:
*lvalp = atoi(token->value.c_str());
type = CONST_INT;
break;
case pp::Token::OP_OR: type = OP_OR; break;
case pp::Token::OP_AND: type = OP_AND; break;
case pp::Token::OP_NE: type = OP_NE; break;
case pp::Token::OP_EQ: type = OP_EQ; break;
case pp::Token::OP_GE: type = OP_GE; break;
case pp::Token::OP_LE: type = OP_LE; break;
case pp::Token::OP_RIGHT: type = OP_RIGHT; break;
case pp::Token::OP_LEFT: type = OP_LEFT; break;
case '|': type = '|'; break;
case '^': type = '^'; break;
case '&': type = '&'; break;
case '>': type = '>'; break;
case '<': type = '<'; break;
case '-': type = '-'; break;
case '+': type = '+'; break;
case '%': type = '%'; break;
case '/': type = '/'; break;
case '*': type = '*'; break;
case '!': type = '!'; break;
case '~': type = '~'; break;
case '(': type = '('; break;
case ')': type = ')'; break;
default: break;
}
// Advance to the next token if the current one is valid.
if (type != 0) context->lexer->lex(token);
return type;
}
void yyerror(Context* context, const char* reason)
{
context->token->type = pp::Token::INVALID_EXPRESSION;
context->token->value = reason;
}
namespace pp {
bool ExpressionParser::parse(Token* token, int* result)
{
Context context;
context.lexer = mLexer;
context.token = token;
context.result = result;
int ret = yyparse(&context);
switch (ret)
{
case 0:
case 1:
break;
case 2:
token->type = pp::Token::OUT_OF_MEMORY;
token->value.clear();
break;
default:
assert(false);
token->type = pp::Token::INTERNAL_ERROR;
token->value.clear();
break;
}
return ret == 0;
}
} // namespace pp
......@@ -7,10 +7,8 @@
#ifndef COMPILER_PREPROCESSOR_LEXER_H_
#define COMPILER_PREPROCESSOR_LEXER_H_
#include <memory>
#include "Input.h"
#include "pp_utils.h"
#include <cassert>
#include <vector>
namespace pp
{
......@@ -20,28 +18,7 @@ struct Token;
class Lexer
{
public:
struct Context
{
Input input;
// The location where yytext points to. Token location should track
// scanLoc instead of Input::mReadLoc because they may not be the same
// if text is buffered up in the lexer input buffer.
Input::Location scanLoc;
};
Lexer();
~Lexer();
bool init(int count, const char* const string[], const int length[]);
int lex(Token* token);
private:
PP_DISALLOW_COPY_AND_ASSIGN(Lexer);
bool initLexer();
void destroyLexer();
void* mHandle; // Lexer handle.
Context mContext; // Lexer extra.
virtual void lex(Token* token) = 0;
};
} // namespace pp
......
//
// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
// Copyright (c) 2011 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
#include "Lexer.h"
#include <cassert>
#include "MacroExpander.h"
namespace pp
{
Lexer::Lexer() : mHandle(0)
MacroExpander::MacroExpander(Lexer* lexer) : mLexer(lexer)
{
}
Lexer::~Lexer()
void MacroExpander::lex(Token* token)
{
destroyLexer();
}
bool Lexer::init(int count, const char* const string[], const int length[])
{
if (count < 0) return false;
if ((count > 0) && (string == 0)) return false;
mContext.input = Input(count, string, length);
return initLexer();
// TODO(alokp): Implement me.
mLexer->lex(token);
}
} // namespace pp
......
//
// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
#ifndef COMPILER_PREPROCESSOR_MACRO_EXPANDER_H_
#define COMPILER_PREPROCESSOR_MACRO_EXPANDER_H_
#include "Lexer.h"
#include "pp_utils.h"
namespace pp
{
class MacroExpander : public Lexer
{
public:
MacroExpander(Lexer* lexer);
virtual void lex(Token* token);
private:
PP_DISALLOW_COPY_AND_ASSIGN(MacroExpander);
Lexer* mLexer;
};
} // namespace pp
#endif // COMPILER_PREPROCESSOR_MACRO_EXPANDER_H_
......@@ -6,26 +6,27 @@
#include "Preprocessor.h"
#include "Token.h"
namespace pp
{
Preprocessor::Preprocessor() : mDirectiveParser(&mTokenizer),
mMacroExpander(&mDirectiveParser)
{
}
bool Preprocessor::init(int count,
const char* const string[],
const int length[])
{
return mLexer.init(count, string, length);
return mTokenizer.init(count, string, length);
}
int Preprocessor::lex(Token* token)
{
int ret = mLexer.lex(token);
// TODO(alokp): Handle preprocessor directives. Ignore them for now.
while ((ret == '\n') || (ret == '#'))
{
ret = mLexer.lex(token);
}
return ret;
mMacroExpander.lex(token);
return token->type;
}
} // namespace pp
......
......@@ -7,8 +7,9 @@
#ifndef COMPILER_PREPROCESSOR_PREPROCESSOR_H_
#define COMPILER_PREPROCESSOR_PREPROCESSOR_H_
#include "Lexer.h"
#include "pp_utils.h"
#include "DirectiveParser.h"
#include "MacroExpander.h"
#include "Tokenizer.h"
namespace pp
{
......@@ -16,7 +17,7 @@ namespace pp
class Preprocessor
{
public:
Preprocessor() { }
Preprocessor();
// count: specifies the number of elements in the string and length arrays.
// string: specifies an array of pointers to strings.
......@@ -33,7 +34,10 @@ class Preprocessor
private:
PP_DISALLOW_COPY_AND_ASSIGN(Preprocessor);
Lexer mLexer;
Tokenizer mTokenizer;
DirectiveParser mDirectiveParser;
MacroExpander mMacroExpander;
};
} // namespace pp
......
......@@ -18,9 +18,16 @@ struct Token
enum Type
{
// Token IDs for error conditions are negative.
INVALID_CHARACTER = -1,
INVALID_NUMBER = -2,
EOF_IN_COMMENT = -3,
INTERNAL_ERROR = -1,
OUT_OF_MEMORY = -2,
INVALID_CHARACTER = -3,
INVALID_NUMBER = -4,
INVALID_DIRECTIVE = -5,
INVALID_EXPRESSION = -6,
DIVISION_BY_ZERO = -7,
EOF_IN_COMMENT = -8,
EOF_IN_DIRECTIVE = -9,
UNEXPECTED_TOKEN_IN_DIRECTIVE = -10,
// Indicates EOF.
LAST = 0,
......@@ -70,6 +77,14 @@ struct Token
Token() : type(0), flags(0) { }
void reset()
{
type = 0;
flags = 0;
location = Location();
value.clear();
}
bool equals(const Token& other) const
{
return (type == other.type) &&
......
//
// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
#ifndef COMPILER_PREPROCESSOR_TOKENIZER_H_
#define COMPILER_PREPROCESSOR_TOKENIZER_H_
#include "Input.h"
#include "Lexer.h"
#include "pp_utils.h"
namespace pp
{
class Tokenizer : public Lexer
{
public:
struct Context
{
Input input;
// The location where yytext points to. Token location should track
// scanLoc instead of Input::mReadLoc because they may not be the same
// if text is buffered up in the scanner input buffer.
Input::Location scanLoc;
bool leadingSpace;
bool lineStart;
};
Tokenizer();
~Tokenizer();
bool init(int count, const char* const string[], const int length[]);
virtual void lex(Token* token);
private:
PP_DISALLOW_COPY_AND_ASSIGN(Tokenizer);
bool initScanner();
void destroyScanner();
void* mHandle; // Scanner handle.
Context mContext; // Scanner extra.
};
} // namespace pp
#endif // COMPILER_PREPROCESSOR_TOKENIZER_H_
......@@ -23,7 +23,7 @@ IF YOU MODIFY THIS FILE YOU ALSO NEED TO RUN generate_parser.sh.
}
%{
#include "Lexer.h"
#include "Tokenizer.h"
#include "Token.h"
typedef std::string YYSTYPE;
......@@ -32,10 +32,12 @@ typedef pp::Token::Location YYLTYPE;
// Use the unused yycolumn variable to track file (string) number.
#define yyfileno yycolumn
#define YY_USER_INIT \
do { \
yyfileno = 0; \
yylineno = 1; \
#define YY_USER_INIT \
do { \
yyfileno = 0; \
yylineno = 1; \
yyextra->leadingSpace = false; \
yyextra->lineStart = true; \
} while(0);
#define YY_USER_ACTION \
......@@ -60,7 +62,7 @@ typedef pp::Token::Location YYLTYPE;
%option noyywrap nounput never-interactive
%option reentrant bison-bridge bison-locations
%option prefix="pp"
%option extra-type="pp::Lexer::Context*"
%option extra-type="pp::Tokenizer::Context*"
%x COMMENT
NEWLINE \n|\r|\r\n
......@@ -88,9 +90,17 @@ FRACTIONAL_CONSTANT ({DIGIT}*"."{DIGIT}+)|({DIGIT}+".")
<COMMENT>"*"
<COMMENT>{NEWLINE} { ++yylineno; }
<COMMENT><<EOF>> { return pp::Token::EOF_IN_COMMENT; }
<COMMENT>"*/" { BEGIN(INITIAL); return ' '; }
# { return yytext[0]; }
<COMMENT>"*/" { yyextra->leadingSpace = true; BEGIN(INITIAL); }
# {
// # is only valid at start of line for preprocessor directives.
if (yyextra->lineStart) {
return yytext[0];
} else {
yylval->assign(yytext, yyleng);
return pp::Token::INVALID_CHARACTER;
}
}
{IDENTIFIER} {
yylval->assign(yytext, yyleng);
......@@ -137,7 +147,7 @@ FRACTIONAL_CONSTANT ({DIGIT}*"."{DIGIT}+)|({DIGIT}+".")
"|=" { return pp::Token::OP_OR_ASSIGN; }
{PUNCTUATOR} { return yytext[0]; }
[ \t\v\f]+ { return ' '; }
[ \t\v\f]+ { yyextra->leadingSpace = true; }
{NEWLINE} {
++yylineno;
......@@ -155,21 +165,34 @@ FRACTIONAL_CONSTANT ({DIGIT}*"."{DIGIT}+)|({DIGIT}+".")
namespace pp {
int Lexer::lex(Token* token)
Tokenizer::Tokenizer() : mHandle(0)
{
}
Tokenizer::~Tokenizer()
{
destroyScanner();
}
bool Tokenizer::init(int count, const char* const string[], const int length[])
{
if (count < 0) return false;
if ((count > 0) && (string == 0)) return false;
mContext.input = Input(count, string, length);
return initScanner();
}
void Tokenizer::lex(Token* token)
{
bool leadingSpace = false;
token->type = yylex(&token->value, &token->location, mHandle);
while (token->type == ' ')
{
leadingSpace = true;
token->type = yylex(&token->value, &token->location, mHandle);
}
token->setHasLeadingSpace(leadingSpace);
return token->type;
token->setHasLeadingSpace(mContext.leadingSpace);
mContext.leadingSpace = false;
mContext.lineStart = token->type == '\n';
}
bool Lexer::initLexer()
bool Tokenizer::initScanner()
{
if ((mHandle == NULL) && yylex_init_extra(&mContext, &mHandle))
return false;
......@@ -178,7 +201,7 @@ bool Lexer::initLexer()
return true;
}
void Lexer::destroyLexer()
void Tokenizer::destroyScanner()
{
if (mHandle == NULL)
return;
......
#!/bin/bash
# Copyright (c) 2010 The ANGLE Project Authors. All rights reserved.
# Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Generates GLSL ES preprocessor - pp_lex.cpp, pp_tab.h, and pp_tab.cpp
# Generates various components of GLSL ES preprocessor.
run_flex()
{
input_file=$script_dir/$1.l
output_source=$script_dir/$1_lex.cpp
input_file=$script_dir/$1
output_source=$script_dir/$2
flex --noline --nounistd --outfile=$output_source $input_file
}
run_bison()
{
input_file=$script_dir/$1.y
output_header=$script_dir/$1_tab.h
output_source=$script_dir/$1_tab.cpp
bison --no-lines --skeleton=yacc.c --defines=$output_header --output=$output_source $input_file
input_file=$script_dir/$1
output_source=$script_dir/$2
bison --no-lines --skeleton=yacc.c --output=$output_source $input_file
}
script_dir=$(dirname $0)
# Generate preprocessor
run_flex pp
run_bison pp
run_flex Tokenizer.l Tokenizer.cpp
run_bison ExpressionParser.y ExpressionParser.cpp
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment