Commit d7d42395 by Geoff Lang Committed by Commit Bot

Format all of ANGLE's python code.

BUG=angleproject:3421 Change-Id: I1d7282ac513c046de5d8ed87f7789290780d30a6 Reviewed-on: https://chromium-review.googlesource.com/c/angle/angle/+/1595440Reviewed-by: 's avatarJamie Madill <jmadill@chromium.org> Commit-Queue: Geoff Lang <geofflang@chromium.org>
parent 8ba78da0
[style] [style]
based_on_style = chromium based_on_style = chromium
column_limit = 99 column_limit = 99
indent_width = 4
# Copyright 2019 The ANGLE Project Authors. All rights reserved. # Copyright 2019 The ANGLE Project Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
"""Top-level presubmit script for code generation. """Top-level presubmit script for code generation.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
...@@ -10,11 +9,9 @@ for more details on the presubmit API built into depot_tools. ...@@ -10,11 +9,9 @@ for more details on the presubmit API built into depot_tools.
from subprocess import call from subprocess import call
# Fragment of a regular expression that matches C++ and Objective-C++ implementation files. # Fragment of a regular expression that matches C++ and Objective-C++ implementation files.
_IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$' _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
# Fragment of a regular expression that matches C++ and Objective-C++ header files. # Fragment of a regular expression that matches C++ and Objective-C++ header files.
_HEADER_EXTENSIONS = r'\.(h|hpp|hxx)$' _HEADER_EXTENSIONS = r'\.(h|hpp|hxx)$'
...@@ -23,8 +20,10 @@ def _CheckCodeGeneration(input_api, output_api): ...@@ -23,8 +20,10 @@ def _CheckCodeGeneration(input_api, output_api):
class Msg(output_api.PresubmitError): class Msg(output_api.PresubmitError):
"""Specialized error message""" """Specialized error message"""
def __init__(self, message): def __init__(self, message):
super(output_api.PresubmitError, self).__init__(message, super(output_api.PresubmitError, self).__init__(
message,
long_text='Please ensure your ANGLE repositiory is synced to tip-of-tree\n' long_text='Please ensure your ANGLE repositiory is synced to tip-of-tree\n'
'and you have an up-to-date checkout of all ANGLE dependencies.\n' 'and you have an up-to-date checkout of all ANGLE dependencies.\n'
'If you are using ANGLE inside Chromium you may need to bootstrap ANGLE \n' 'If you are using ANGLE inside Chromium you may need to bootstrap ANGLE \n'
...@@ -34,11 +33,7 @@ def _CheckCodeGeneration(input_api, output_api): ...@@ -34,11 +33,7 @@ def _CheckCodeGeneration(input_api, output_api):
'scripts/run_code_generation.py') 'scripts/run_code_generation.py')
cmd_name = 'run_code_generation' cmd_name = 'run_code_generation'
cmd = [input_api.python_executable, code_gen_path, '--verify-no-dirty'] cmd = [input_api.python_executable, code_gen_path, '--verify-no-dirty']
test_cmd = input_api.Command( test_cmd = input_api.Command(name=cmd_name, cmd=cmd, kwargs={}, message=Msg)
name=cmd_name,
cmd=cmd,
kwargs={},
message=Msg)
if input_api.verbose: if input_api.verbose:
print('Running ' + cmd_name) print('Running ' + cmd_name)
return input_api.RunTests([test_cmd]) return input_api.RunTests([test_cmd])
...@@ -52,8 +47,7 @@ def _CheckNewHeaderWithoutGnChange(input_api, output_api): ...@@ -52,8 +47,7 @@ def _CheckNewHeaderWithoutGnChange(input_api, output_api):
""" """
def headers(f): def headers(f):
return input_api.FilterSourceFile( return input_api.FilterSourceFile(f, white_list=(r'.+%s' % _HEADER_EXTENSIONS,))
f, white_list=(r'.+%s' % _HEADER_EXTENSIONS, ))
new_headers = [] new_headers = []
for f in input_api.AffectedSourceFiles(headers): for f in input_api.AffectedSourceFiles(headers):
...@@ -62,7 +56,7 @@ def _CheckNewHeaderWithoutGnChange(input_api, output_api): ...@@ -62,7 +56,7 @@ def _CheckNewHeaderWithoutGnChange(input_api, output_api):
new_headers.append(f.LocalPath()) new_headers.append(f.LocalPath())
def gn_files(f): def gn_files(f):
return input_api.FilterSourceFile(f, white_list=(r'.+\.gn', )) return input_api.FilterSourceFile(f, white_list=(r'.+\.gn',))
all_gn_changed_contents = '' all_gn_changed_contents = ''
for f in input_api.AffectedSourceFiles(gn_files): for f in input_api.AffectedSourceFiles(gn_files):
...@@ -76,35 +70,32 @@ def _CheckNewHeaderWithoutGnChange(input_api, output_api): ...@@ -76,35 +70,32 @@ def _CheckNewHeaderWithoutGnChange(input_api, output_api):
problems.append(header) problems.append(header)
if problems: if problems:
return [output_api.PresubmitPromptWarning( return [
'Missing GN changes for new header files', items=sorted(problems), output_api.PresubmitPromptWarning(
'Missing GN changes for new header files',
items=sorted(problems),
long_text='Please double check whether newly added header files need ' long_text='Please double check whether newly added header files need '
'corresponding changes in gn or gni files.\nThis checking is only a ' 'corresponding changes in gn or gni files.\nThis checking is only a '
'heuristic. Run build/check_gn_headers.py to be precise.\n' 'heuristic. Run build/check_gn_headers.py to be precise.\n'
'Read https://crbug.com/661774 for more info.')] 'Read https://crbug.com/661774 for more info.')
]
return [] return []
def CheckChangeOnUpload(input_api, output_api): def CheckChangeOnUpload(input_api, output_api):
results = [] results = []
results.extend(_CheckCodeGeneration(input_api, output_api)) results.extend(_CheckCodeGeneration(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasBugField( results.extend(input_api.canned_checks.CheckChangeHasBugField(input_api, output_api))
input_api, output_api)) results.extend(input_api.canned_checks.CheckChangeHasDescription(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasDescription(
input_api, output_api))
results.extend(_CheckNewHeaderWithoutGnChange(input_api, output_api)) results.extend(_CheckNewHeaderWithoutGnChange(input_api, output_api))
results.extend( results.extend(input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
return results return results
def CheckChangeOnCommit(input_api, output_api): def CheckChangeOnCommit(input_api, output_api):
results = [] results = []
results.extend(_CheckCodeGeneration(input_api, output_api)) results.extend(_CheckCodeGeneration(input_api, output_api))
results.extend( results.extend(input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
input_api.canned_checks.CheckPatchFormatted(input_api, output_api)) results.extend(input_api.canned_checks.CheckChangeHasBugField(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasBugField( results.extend(input_api.canned_checks.CheckChangeHasDescription(input_api, output_api))
input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasDescription(
input_api, output_api))
return results return results
...@@ -16,22 +16,15 @@ import sys ...@@ -16,22 +16,15 @@ import sys
def main(): def main():
parser = argparse.ArgumentParser(description=__doc__) parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument( parser.add_argument(
'--objcopy', '--objcopy', required=True, help='The objcopy binary to run', metavar='PATH')
required=True, parser.add_argument('--nm', required=True, help='The nm binary to run', metavar='PATH')
help='The objcopy binary to run',
metavar='PATH')
parser.add_argument(
'--nm', required=True, help='The nm binary to run', metavar='PATH')
parser.add_argument( parser.add_argument(
'--sofile', '--sofile',
required=True, required=True,
help='Shared object file produced by linking command', help='Shared object file produced by linking command',
metavar='FILE') metavar='FILE')
parser.add_argument( parser.add_argument(
'--output', '--output', required=True, help='Final output shared object file', metavar='FILE')
required=True,
help='Final output shared object file',
metavar='FILE')
parser.add_argument( parser.add_argument(
'--unstrippedsofile', '--unstrippedsofile',
required=True, required=True,
...@@ -48,20 +41,16 @@ def main(): ...@@ -48,20 +41,16 @@ def main():
objcopy_cmd.append(args.output + '.debug') objcopy_cmd.append(args.output + '.debug')
result = subprocess.call(objcopy_cmd) result = subprocess.call(objcopy_cmd)
nm_cmd = subprocess.Popen( nm_cmd = subprocess.Popen([args.nm, args.unstrippedsofile, '--format=posix', '--defined-only'],
[args.nm, args.unstrippedsofile, '--format=posix', '--defined-only'],
stdout=subprocess.PIPE) stdout=subprocess.PIPE)
awk_cmd = subprocess.Popen(['awk', '{ print $1}'], awk_cmd = subprocess.Popen(['awk', '{ print $1}'], stdin=nm_cmd.stdout, stdout=subprocess.PIPE)
stdin=nm_cmd.stdout,
stdout=subprocess.PIPE)
dynsym_out = open(args.output + '.dynsyms', 'w') dynsym_out = open(args.output + '.dynsyms', 'w')
sort_cmd = subprocess.Popen(['sort'], stdin=awk_cmd.stdout, stdout=dynsym_out) sort_cmd = subprocess.Popen(['sort'], stdin=awk_cmd.stdout, stdout=dynsym_out)
dynsym_out.close() dynsym_out.close()
nm_cmd = subprocess.Popen( nm_cmd = subprocess.Popen([args.nm, args.unstrippedsofile, '--format=posix', '--defined-only'],
[args.nm, args.unstrippedsofile, '--format=posix', '--defined-only'],
stdout=subprocess.PIPE) stdout=subprocess.PIPE)
awk_cmd = subprocess.Popen( awk_cmd = subprocess.Popen(
...@@ -70,9 +59,7 @@ def main(): ...@@ -70,9 +59,7 @@ def main():
stdout=subprocess.PIPE) stdout=subprocess.PIPE)
funcsyms_out = open(args.output + '.funcsyms', 'w') funcsyms_out = open(args.output + '.funcsyms', 'w')
sort_cmd = subprocess.Popen(['sort'], sort_cmd = subprocess.Popen(['sort'], stdin=awk_cmd.stdout, stdout=funcsyms_out)
stdin=awk_cmd.stdout,
stdout=funcsyms_out)
funcsyms_out.close() funcsyms_out.close()
keep_symbols = open(args.output + '.keep_symbols', 'w') keep_symbols = open(args.output + '.keep_symbols', 'w')
...@@ -86,15 +73,14 @@ def main(): ...@@ -86,15 +73,14 @@ def main():
keep_symbols.close() keep_symbols.close()
objcopy_cmd = [ objcopy_cmd = [
args.objcopy, '--rename-section', '.debug_frame=saved_debug_frame', args.objcopy, '--rename-section', '.debug_frame=saved_debug_frame', args.output + '.debug',
args.output + '.debug', args.output + ".mini_debuginfo" args.output + ".mini_debuginfo"
] ]
subprocess.check_call(objcopy_cmd) subprocess.check_call(objcopy_cmd)
objcopy_cmd = [ objcopy_cmd = [
args.objcopy, '-S', '--remove-section', '.gdb_index', '--remove-section', args.objcopy, '-S', '--remove-section', '.gdb_index', '--remove-section', '.comment',
'.comment', '--keep-symbols=' + args.output + '.keep_symbols', '--keep-symbols=' + args.output + '.keep_symbols', args.output + '.mini_debuginfo'
args.output + '.mini_debuginfo'
] ]
subprocess.check_call(objcopy_cmd) subprocess.check_call(objcopy_cmd)
...@@ -108,16 +94,16 @@ def main(): ...@@ -108,16 +94,16 @@ def main():
subprocess.check_call(xz_cmd) subprocess.check_call(xz_cmd)
objcopy_cmd = [ objcopy_cmd = [
args.objcopy, '--add-section', args.objcopy, '--add-section', '.gnu_debugdata=' + args.output + '.mini_debuginfo.xz',
'.gnu_debugdata=' + args.output + '.mini_debuginfo.xz', args.output args.output
] ]
subprocess.check_call(objcopy_cmd) subprocess.check_call(objcopy_cmd)
# Clean out scratch files # Clean out scratch files
rm_cmd = [ rm_cmd = [
'rm', '-f', args.output + '.dynsyms', args.output + '.funcsyms', 'rm', '-f', args.output + '.dynsyms', args.output + '.funcsyms',
args.output + '.keep_symbols', args.output + '.debug', args.output + '.keep_symbols', args.output + '.debug', args.output + '.mini_debuginfo',
args.output + '.mini_debuginfo', args.output + '.mini_debuginfo.xz' args.output + '.mini_debuginfo.xz'
] ]
result = subprocess.call(rm_cmd) result = subprocess.call(rm_cmd)
......
...@@ -49,12 +49,12 @@ bmp_file.close() ...@@ -49,12 +49,12 @@ bmp_file.close()
# convert to YUV 4:4:4 # convert to YUV 4:4:4
converted_pixels = bytearray(pixels) converted_pixels = bytearray(pixels)
for i in range(0, width * height): for i in range(0, width * height):
R, = struct.unpack("B", pixels[i*3+2]) R, = struct.unpack("B", pixels[i * 3 + 2])
G, = struct.unpack("B", pixels[i*3+1]) G, = struct.unpack("B", pixels[i * 3 + 1])
B, = struct.unpack("B", pixels[i*3]) B, = struct.unpack("B", pixels[i * 3])
converted_pixels[i*3] = ((66*R + 129*G + 25*B + 128) >> 8) + 16 converted_pixels[i * 3] = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16
converted_pixels[i*3+1] = ((-38*R - 74*G + 112*B + 128) >> 8) + 128 converted_pixels[i * 3 + 1] = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128
converted_pixels[i*3+2] = ((112*R - 94*G - 18*B + 128) >> 8) + 128 converted_pixels[i * 3 + 2] = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128
# downsample to packed UV buffer # downsample to packed UV buffer
uv_buffer = bytearray(width * height / 2) uv_buffer = bytearray(width * height / 2)
......
...@@ -3,7 +3,6 @@ ...@@ -3,7 +3,6 @@
# Copyright 2015 Google Inc. All rights reserved. # Copyright 2015 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
"""Generate .gclient file for Angle. """Generate .gclient file for Angle.
Because gclient won't accept "--name ." use a different name then edit. Because gclient won't accept "--name ." use a different name then edit.
...@@ -34,5 +33,6 @@ def main(): ...@@ -34,5 +33,6 @@ def main():
print 'created .gclient' print 'created .gclient'
if __name__ == '__main__': if __name__ == '__main__':
main() main()
...@@ -9,6 +9,7 @@ from __future__ import print_function ...@@ -9,6 +9,7 @@ from __future__ import print_function
import os, shutil, sys import os, shutil, sys
def main(): def main():
if len(sys.argv) != 2: if len(sys.argv) != 2:
print("Usage: %s <path>" % sys.argv[0]) print("Usage: %s <path>" % sys.argv[0])
...@@ -20,5 +21,6 @@ def main(): ...@@ -20,5 +21,6 @@ def main():
print("false") print("false")
sys.exit(0) sys.exit(0)
if __name__ == '__main__': if __name__ == '__main__':
main() main()
...@@ -31,8 +31,7 @@ import sys ...@@ -31,8 +31,7 @@ import sys
def get_json_description(gn_out, target_name): def get_json_description(gn_out, target_name):
try: try:
text_desc = subprocess.check_output( text_desc = subprocess.check_output(['gn', 'desc', '--format=json', gn_out, target_name])
['gn', 'desc', '--format=json', gn_out, target_name])
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
logging.error("e.retcode = %s" % e.returncode) logging.error("e.retcode = %s" % e.returncode)
logging.error("e.cmd = %s" % e.cmd) logging.error("e.cmd = %s" % e.cmd)
...@@ -40,12 +39,12 @@ def get_json_description(gn_out, target_name): ...@@ -40,12 +39,12 @@ def get_json_description(gn_out, target_name):
try: try:
json_out = json.loads(text_desc) json_out = json.loads(text_desc)
except ValueError: except ValueError:
raise ValueError("Unable to decode JSON\ncmd: %s\noutput:\n%s" % raise ValueError("Unable to decode JSON\ncmd: %s\noutput:\n%s" % (subprocess.list2cmdline(
(subprocess.list2cmdline(['gn', 'desc', '--format=json', ['gn', 'desc', '--format=json', gn_out, target_name]), text_desc))
gn_out, target_name]), text_desc))
return json_out return json_out
def load_json_deps(desc, gn_out, target_name, all_desc, indent=" "): def load_json_deps(desc, gn_out, target_name, all_desc, indent=" "):
"""Extracts dependencies from the given target json description """Extracts dependencies from the given target json description
and recursively extracts json descriptions. and recursively extracts json descriptions.
...@@ -60,12 +59,13 @@ def load_json_deps(desc, gn_out, target_name, all_desc, indent=" "): ...@@ -60,12 +59,13 @@ def load_json_deps(desc, gn_out, target_name, all_desc, indent=" "):
text_descriptions = [] text_descriptions = []
for dep in target.get('deps', []): for dep in target.get('deps', []):
if dep not in all_desc: if dep not in all_desc:
logging.debug("dep: %s%s" % (indent,dep)) logging.debug("dep: %s%s" % (indent, dep))
new_desc = get_json_description(gn_out, dep) new_desc = get_json_description(gn_out, dep)
all_desc[dep] = new_desc[dep] all_desc[dep] = new_desc[dep]
load_json_deps(new_desc, gn_out, dep, all_desc, indent+" ") load_json_deps(new_desc, gn_out, dep, all_desc, indent + " ")
else: else:
logging.debug("dup: %s%s" % (indent,dep)) logging.debug("dup: %s%s" % (indent, dep))
def create_build_description(gn_out, targets): def create_build_description(gn_out, targets):
"""Creates the JSON build description by running GN.""" """Creates the JSON build description by running GN."""
...@@ -89,8 +89,7 @@ def main(): ...@@ -89,8 +89,7 @@ def main():
description='Generate json build information from a GN description.') description='Generate json build information from a GN description.')
parser.add_argument( parser.add_argument(
'--gn_out', '--gn_out',
help= help='GN output config to use (e.g., out/Default or out/Debug.)',
'GN output config to use (e.g., out/Default or out/Debug.)',
default='out/Default', default='out/Default',
) )
parser.add_argument( parser.add_argument(
...@@ -105,7 +104,7 @@ def main(): ...@@ -105,7 +104,7 @@ def main():
args = parser.parse_args() args = parser.parse_args()
desc = create_build_description(args.gn_out, args.targets) desc = create_build_description(args.gn_out, args.targets)
fh = open(args.output,"w") fh = open(args.output, "w")
fh.write(json.dumps(desc, indent=4, sort_keys=True)) fh.write(json.dumps(desc, indent=4, sort_keys=True))
fh.close() fh.close()
......
...@@ -263,39 +263,47 @@ EXPORTS ...@@ -263,39 +263,47 @@ EXPORTS
{exports} {exports}
""" """
def script_relative(path): def script_relative(path):
return os.path.join(os.path.dirname(sys.argv[0]), path) return os.path.join(os.path.dirname(sys.argv[0]), path)
with open(script_relative('entry_point_packed_gl_enums.json')) as f: with open(script_relative('entry_point_packed_gl_enums.json')) as f:
cmd_packed_gl_enums = json.loads(f.read()) cmd_packed_gl_enums = json.loads(f.read())
def format_entry_point_decl(cmd_name, proto, params, is_explicit_context): def format_entry_point_decl(cmd_name, proto, params, is_explicit_context):
comma_if_needed = ", " if len(params) > 0 else "" comma_if_needed = ", " if len(params) > 0 else ""
return template_entry_point_decl.format( return template_entry_point_decl.format(
name = cmd_name[2:], name=cmd_name[2:],
return_type = proto[:-len(cmd_name)], return_type=proto[:-len(cmd_name)],
params = ", ".join(params), params=", ".join(params),
comma_if_needed = comma_if_needed, comma_if_needed=comma_if_needed,
explicit_context_suffix = "ContextANGLE" if is_explicit_context else "", explicit_context_suffix="ContextANGLE" if is_explicit_context else "",
explicit_context_param = "GLeglContext ctx" if is_explicit_context else "", explicit_context_param="GLeglContext ctx" if is_explicit_context else "",
explicit_context_comma = ", " if is_explicit_context and len(params) > 0 else "") explicit_context_comma=", " if is_explicit_context and len(params) > 0 else "")
def type_name_sep_index(param): def type_name_sep_index(param):
space = param.rfind(" ") space = param.rfind(" ")
pointer = param.rfind("*") pointer = param.rfind("*")
return max(space, pointer) return max(space, pointer)
def just_the_type(param): def just_the_type(param):
if "*" in param: if "*" in param:
return param[:type_name_sep_index(param) + 1] return param[:type_name_sep_index(param) + 1]
return param[:type_name_sep_index(param)] return param[:type_name_sep_index(param)]
def just_the_name(param): def just_the_name(param):
return param[type_name_sep_index(param)+1:] return param[type_name_sep_index(param) + 1:]
def make_param(param_type, param_name): def make_param(param_type, param_name):
return param_type + " " + param_name return param_type + " " + param_name
def just_the_type_packed(param, entry): def just_the_type_packed(param, entry):
name = just_the_name(param) name = just_the_name(param)
if entry.has_key(name): if entry.has_key(name):
...@@ -303,6 +311,7 @@ def just_the_type_packed(param, entry): ...@@ -303,6 +311,7 @@ def just_the_type_packed(param, entry):
else: else:
return just_the_type(param) return just_the_type(param)
def just_the_name_packed(param, reserved_set): def just_the_name_packed(param, reserved_set):
name = just_the_name(param) name = just_the_name(param)
if name in reserved_set: if name in reserved_set:
...@@ -310,6 +319,7 @@ def just_the_name_packed(param, reserved_set): ...@@ -310,6 +319,7 @@ def just_the_name_packed(param, reserved_set):
else: else:
return name return name
def param_print_argument(param): def param_print_argument(param):
name_only = just_the_name(param) name_only = just_the_name(param)
type_only = just_the_type(param) type_only = just_the_type(param)
...@@ -325,6 +335,7 @@ def param_print_argument(param): ...@@ -325,6 +335,7 @@ def param_print_argument(param):
return name_only return name_only
def param_format_string(param): def param_format_string(param):
if "*" in param: if "*" in param:
return param + " = 0x%016\" PRIxPTR \"" return param + " = 0x%016\" PRIxPTR \""
...@@ -335,11 +346,13 @@ def param_format_string(param): ...@@ -335,11 +346,13 @@ def param_format_string(param):
return param + " = " + format_dict[type_only] return param + " = " + format_dict[type_only]
def default_return_value(cmd_name, return_type): def default_return_value(cmd_name, return_type):
if return_type == "void": if return_type == "void":
return "" return ""
return "GetDefaultReturnValue<EntryPoint::" + cmd_name[2:] + ", " + return_type + ">()" return "GetDefaultReturnValue<EntryPoint::" + cmd_name[2:] + ", " + return_type + ">()"
def get_context_getter_function(cmd_name, is_explicit_context): def get_context_getter_function(cmd_name, is_explicit_context):
if is_explicit_context: if is_explicit_context:
return "static_cast<gl::Context *>(ctx)" return "static_cast<gl::Context *>(ctx)"
...@@ -357,6 +370,7 @@ def get_context_getter_function(cmd_name, is_explicit_context): ...@@ -357,6 +370,7 @@ def get_context_getter_function(cmd_name, is_explicit_context):
return "GetGlobalContext()" return "GetGlobalContext()"
return "GetValidGlobalContext()" return "GetValidGlobalContext()"
def format_entry_point_def(cmd_name, proto, params, is_explicit_context): def format_entry_point_def(cmd_name, proto, params, is_explicit_context):
packed_gl_enums = cmd_packed_gl_enums.get(cmd_name, {}) packed_gl_enums = cmd_packed_gl_enums.get(cmd_name, {})
internal_params = [just_the_name_packed(param, packed_gl_enums) for param in params] internal_params = [just_the_name_packed(param, packed_gl_enums) for param in params]
...@@ -366,8 +380,10 @@ def format_entry_point_def(cmd_name, proto, params, is_explicit_context): ...@@ -366,8 +380,10 @@ def format_entry_point_def(cmd_name, proto, params, is_explicit_context):
if name in packed_gl_enums: if name in packed_gl_enums:
internal_name = name + "Packed" internal_name = name + "Packed"
internal_type = packed_gl_enums[name] internal_type = packed_gl_enums[name]
packed_gl_enum_conversions += ["\n " + internal_type + " " + internal_name +" = FromGLenum<" + packed_gl_enum_conversions += [
internal_type + ">(" + name + ");"] "\n " + internal_type + " " + internal_name + " = FromGLenum<" +
internal_type + ">(" + name + ");"
]
pass_params = [param_print_argument(param) for param in params] pass_params = [param_print_argument(param) for param in params]
format_params = [param_format_string(param) for param in params] format_params = [param_format_string(param) for param in params]
...@@ -381,30 +397,36 @@ def format_entry_point_def(cmd_name, proto, params, is_explicit_context): ...@@ -381,30 +397,36 @@ def format_entry_point_def(cmd_name, proto, params, is_explicit_context):
name_lower_no_suffix = name_lower_no_suffix[0:-len(suffix)] name_lower_no_suffix = name_lower_no_suffix[0:-len(suffix)]
return template_entry_point_def.format( return template_entry_point_def.format(
name = cmd_name[2:], name=cmd_name[2:],
name_lower_no_suffix = name_lower_no_suffix, name_lower_no_suffix=name_lower_no_suffix,
return_type = return_type, return_type=return_type,
params = ", ".join(params), params=", ".join(params),
internal_params = ", ".join(internal_params), internal_params=", ".join(internal_params),
packed_gl_enum_conversions = "".join(packed_gl_enum_conversions), packed_gl_enum_conversions="".join(packed_gl_enum_conversions),
pass_params = ", ".join(pass_params), pass_params=", ".join(pass_params),
comma_if_needed = ", " if len(params) > 0 else "", comma_if_needed=", " if len(params) > 0 else "",
validate_params = ", ".join(["context"] + internal_params), validate_params=", ".join(["context"] + internal_params),
format_params = ", ".join(format_params), format_params=", ".join(format_params),
return_if_needed = "" if default_return == "" else "return ", return_if_needed="" if default_return == "" else "return ",
default_return_if_needed = "" if default_return == "" else "\n return " + default_return + ";\n", default_return_if_needed=""
context_getter = get_context_getter_function(cmd_name, is_explicit_context), if default_return == "" else "\n return " + default_return + ";\n",
event_comment = event_comment, context_getter=get_context_getter_function(cmd_name, is_explicit_context),
explicit_context_suffix = "ContextANGLE" if is_explicit_context else "", event_comment=event_comment,
explicit_context_param = "GLeglContext ctx" if is_explicit_context else "", explicit_context_suffix="ContextANGLE" if is_explicit_context else "",
explicit_context_comma = ", " if is_explicit_context and len(params) > 0 else "", explicit_context_param="GLeglContext ctx" if is_explicit_context else "",
assert_explicit_context = "\nASSERT(context == GetValidGlobalContext());" explicit_context_comma=", " if is_explicit_context and len(params) > 0 else "",
assert_explicit_context="\nASSERT(context == GetValidGlobalContext());"
if is_explicit_context else "") if is_explicit_context else "")
def get_internal_params(cmd_name, params): def get_internal_params(cmd_name, params):
packed_gl_enums = cmd_packed_gl_enums.get(cmd_name, {}) packed_gl_enums = cmd_packed_gl_enums.get(cmd_name, {})
return ", ".join([make_param(just_the_type_packed(param, packed_gl_enums), return ", ".join([
just_the_name_packed(param, packed_gl_enums)) for param in params]) make_param(
just_the_type_packed(param, packed_gl_enums),
just_the_name_packed(param, packed_gl_enums)) for param in params
])
def format_context_gles_decl(cmd_name, proto, params): def format_context_gles_decl(cmd_name, proto, params):
internal_params = get_internal_params(cmd_name, params) internal_params = get_internal_params(cmd_name, params)
...@@ -417,31 +439,35 @@ def format_context_gles_decl(cmd_name, proto, params): ...@@ -417,31 +439,35 @@ def format_context_gles_decl(cmd_name, proto, params):
name_lower_no_suffix = name_lower_no_suffix[0:-len(suffix)] name_lower_no_suffix = name_lower_no_suffix[0:-len(suffix)]
return context_gles_decl.format( return context_gles_decl.format(
return_type = return_type, return_type=return_type,
name_lower_no_suffix = name_lower_no_suffix, name_lower_no_suffix=name_lower_no_suffix,
internal_params = internal_params) internal_params=internal_params)
def format_libgles_entry_point_def(cmd_name, proto, params, is_explicit_context): def format_libgles_entry_point_def(cmd_name, proto, params, is_explicit_context):
internal_params = [just_the_name(param) for param in params] internal_params = [just_the_name(param) for param in params]
return_type = proto[:-len(cmd_name)] return_type = proto[:-len(cmd_name)]
return libgles_entry_point_def.format( return libgles_entry_point_def.format(
name = cmd_name[2:], name=cmd_name[2:],
return_type = return_type, return_type=return_type,
params = ", ".join(params), params=", ".join(params),
internal_params = ", ".join(internal_params), internal_params=", ".join(internal_params),
explicit_context_suffix = "ContextANGLE" if is_explicit_context else "", explicit_context_suffix="ContextANGLE" if is_explicit_context else "",
explicit_context_param = "GLeglContext ctx" if is_explicit_context else "", explicit_context_param="GLeglContext ctx" if is_explicit_context else "",
explicit_context_comma = ", " if is_explicit_context and len(params) > 0 else "", explicit_context_comma=", " if is_explicit_context and len(params) > 0 else "",
explicit_context_internal_param = "ctx" if is_explicit_context else "") explicit_context_internal_param="ctx" if is_explicit_context else "")
def format_validation_proto(cmd_name, params): def format_validation_proto(cmd_name, params):
internal_params = get_internal_params(cmd_name, ["Context *context"] + params) internal_params = get_internal_params(cmd_name, ["Context *context"] + params)
return template_validation_proto % (cmd_name[2:], internal_params) return template_validation_proto % (cmd_name[2:], internal_params)
def path_to(folder, file): def path_to(folder, file):
return os.path.join(script_relative(".."), "src", folder, file) return os.path.join(script_relative(".."), "src", folder, file)
def get_entry_points(all_commands, gles_commands, is_explicit_context): def get_entry_points(all_commands, gles_commands, is_explicit_context):
decls = [] decls = []
defs = [] defs = []
...@@ -457,17 +483,18 @@ def get_entry_points(all_commands, gles_commands, is_explicit_context): ...@@ -457,17 +483,18 @@ def get_entry_points(all_commands, gles_commands, is_explicit_context):
param_text = ["".join(param.itertext()) for param in command.findall('param')] param_text = ["".join(param.itertext()) for param in command.findall('param')]
proto_text = "".join(proto.itertext()) proto_text = "".join(proto.itertext())
decls.append(format_entry_point_decl(cmd_name, proto_text, param_text, decls.append(
is_explicit_context)) format_entry_point_decl(cmd_name, proto_text, param_text, is_explicit_context))
defs.append(format_entry_point_def(cmd_name, proto_text, param_text, is_explicit_context)) defs.append(format_entry_point_def(cmd_name, proto_text, param_text, is_explicit_context))
export_defs.append(format_libgles_entry_point_def(cmd_name, proto_text, param_text, export_defs.append(
is_explicit_context)) format_libgles_entry_point_def(cmd_name, proto_text, param_text, is_explicit_context))
validation_protos.append(format_validation_proto(cmd_name, param_text)) validation_protos.append(format_validation_proto(cmd_name, param_text))
return decls, defs, export_defs, validation_protos return decls, defs, export_defs, validation_protos
def get_gles1_decls(all_commands, gles_commands): def get_gles1_decls(all_commands, gles_commands):
decls = [] decls = []
for command in all_commands: for command in all_commands:
...@@ -486,12 +513,13 @@ def get_gles1_decls(all_commands, gles_commands): ...@@ -486,12 +513,13 @@ def get_gles1_decls(all_commands, gles_commands):
return decls return decls
def get_glext_decls(all_commands, gles_commands, version, is_explicit_context): def get_glext_decls(all_commands, gles_commands, version, is_explicit_context):
glext_ptrs = [] glext_ptrs = []
glext_protos = [] glext_protos = []
is_gles1 = False is_gles1 = False
if(version == ""): if (version == ""):
is_gles1 = True is_gles1 = True
for command in all_commands: for command in all_commands:
...@@ -516,25 +544,25 @@ def get_glext_decls(all_commands, gles_commands, version, is_explicit_context): ...@@ -516,25 +544,25 @@ def get_glext_decls(all_commands, gles_commands, version, is_explicit_context):
"explicit_context_comma": ", " if is_explicit_context and len(params) > 0 else "", "explicit_context_comma": ", " if is_explicit_context and len(params) > 0 else "",
"explicit_context_suffix": "ContextANGLE" if is_explicit_context else "", "explicit_context_suffix": "ContextANGLE" if is_explicit_context else "",
"explicit_context_suffix_upper": "CONTEXTANGLE" if is_explicit_context else "", "explicit_context_suffix_upper": "CONTEXTANGLE" if is_explicit_context else "",
"explicit_context_param": "GLeglContext ctx" if is_explicit_context else ""} "explicit_context_param": "GLeglContext ctx" if is_explicit_context else ""
}
glext_ptrs.append(template_glext_function_pointer.format( glext_ptrs.append(template_glext_function_pointer.format(**format_params))
**format_params)) glext_protos.append(template_glext_function_prototype.format(**format_params))
glext_protos.append(template_glext_function_prototype.format(
**format_params))
return glext_ptrs, glext_protos return glext_ptrs, glext_protos
def write_file(annotation, comment, template, entry_points, suffix, includes, file): def write_file(annotation, comment, template, entry_points, suffix, includes, file):
content = template.format( content = template.format(
script_name = os.path.basename(sys.argv[0]), script_name=os.path.basename(sys.argv[0]),
data_source_name = file, data_source_name=file,
year = date.today().year, year=date.today().year,
annotation_lower = annotation.lower(), annotation_lower=annotation.lower(),
annotation_upper = annotation.upper(), annotation_upper=annotation.upper(),
comment = comment, comment=comment,
includes = includes, includes=includes,
entry_points = entry_points) entry_points=entry_points)
path = path_to("libGLESv2", "entry_points_gles_{}_autogen.{}".format( path = path_to("libGLESv2", "entry_points_gles_{}_autogen.{}".format(
annotation.lower(), suffix)) annotation.lower(), suffix))
...@@ -543,13 +571,14 @@ def write_file(annotation, comment, template, entry_points, suffix, includes, fi ...@@ -543,13 +571,14 @@ def write_file(annotation, comment, template, entry_points, suffix, includes, fi
out.write(content) out.write(content)
out.close() out.close()
def write_export_files(entry_points, includes): def write_export_files(entry_points, includes):
content = template_libgles_entry_point_source.format( content = template_libgles_entry_point_source.format(
script_name = os.path.basename(sys.argv[0]), script_name=os.path.basename(sys.argv[0]),
data_source_name = "gl.xml and gl_angle_ext.xml", data_source_name="gl.xml and gl_angle_ext.xml",
year = date.today().year, year=date.today().year,
includes = includes, includes=includes,
entry_points = entry_points) entry_points=entry_points)
path = path_to("libGLESv2", "libGLESv2_autogen.cpp") path = path_to("libGLESv2", "libGLESv2_autogen.cpp")
...@@ -557,6 +586,7 @@ def write_export_files(entry_points, includes): ...@@ -557,6 +586,7 @@ def write_export_files(entry_points, includes):
out.write(content) out.write(content)
out.close() out.close()
def write_context_api_decls(annotation, template, decls): def write_context_api_decls(annotation, template, decls):
interface_lines = [] interface_lines = []
...@@ -568,12 +598,12 @@ def write_context_api_decls(annotation, template, decls): ...@@ -568,12 +598,12 @@ def write_context_api_decls(annotation, template, decls):
interface_lines.extend(decls['exts'][extname]) interface_lines.extend(decls['exts'][extname])
content = template.format( content = template.format(
annotation_lower = annotation.lower(), annotation_lower=annotation.lower(),
annotation_upper = annotation.upper(), annotation_upper=annotation.upper(),
script_name = os.path.basename(sys.argv[0]), script_name=os.path.basename(sys.argv[0]),
data_source_name = "gl.xml", data_source_name="gl.xml",
year = date.today().year, year=date.today().year,
interface = "\n".join(interface_lines)) interface="\n".join(interface_lines))
path = path_to("libANGLE", "Context_gles_%s_autogen.h" % annotation.lower()) path = path_to("libANGLE", "Context_gles_%s_autogen.h" % annotation.lower())
...@@ -581,32 +611,35 @@ def write_context_api_decls(annotation, template, decls): ...@@ -581,32 +611,35 @@ def write_context_api_decls(annotation, template, decls):
out.write(content) out.write(content)
out.close() out.close()
def write_glext_explicit_context_inc(version, ptrs, protos): def write_glext_explicit_context_inc(version, ptrs, protos):
folder_version = version if version != "31" else "3" folder_version = version if version != "31" else "3"
content = template_glext_explicit_context_inc.format( content = template_glext_explicit_context_inc.format(
script_name = os.path.basename(sys.argv[0]), script_name=os.path.basename(sys.argv[0]),
data_source_name = "gl.xml and gl_angle_ext.xml", data_source_name="gl.xml and gl_angle_ext.xml",
year = date.today().year, year=date.today().year,
version = version, version=version,
function_pointers = ptrs, function_pointers=ptrs,
function_prototypes = protos) function_prototypes=protos)
path = os.path.join(script_relative(".."), "include", "GLES{}".format(folder_version), path = os.path.join(
script_relative(".."), "include", "GLES{}".format(folder_version),
"gl{}ext_explicit_context_autogen.inc".format(version)) "gl{}ext_explicit_context_autogen.inc".format(version))
with open(path, "w") as out: with open(path, "w") as out:
out.write(content) out.write(content)
out.close() out.close()
def write_validation_header(annotation, comment, protos): def write_validation_header(annotation, comment, protos):
content = template_validation_header.format( content = template_validation_header.format(
script_name = os.path.basename(sys.argv[0]), script_name=os.path.basename(sys.argv[0]),
data_source_name = "gl.xml and gl_angle_ext.xml", data_source_name="gl.xml and gl_angle_ext.xml",
year = date.today().year, year=date.today().year,
annotation = annotation, annotation=annotation,
comment = comment, comment=comment,
prototypes = "\n".join(protos)) prototypes="\n".join(protos))
path = path_to("libANGLE", "validationES%s_autogen.h" % annotation) path = path_to("libANGLE", "validationES%s_autogen.h" % annotation)
...@@ -614,14 +647,15 @@ def write_validation_header(annotation, comment, protos): ...@@ -614,14 +647,15 @@ def write_validation_header(annotation, comment, protos):
out.write(content) out.write(content)
out.close() out.close()
def write_windows_def_file(data_source_name, lib, exports): def write_windows_def_file(data_source_name, lib, exports):
content = template_windows_def_file.format( content = template_windows_def_file.format(
script_name = os.path.basename(sys.argv[0]), script_name=os.path.basename(sys.argv[0]),
data_source_name = data_source_name, data_source_name=data_source_name,
exports = "\n".join(exports), exports="\n".join(exports),
year = date.today().year, year=date.today().year,
lib = lib) lib=lib)
path = path_to(lib, "%s_autogen.def" % lib) path = path_to(lib, "%s_autogen.def" % lib)
...@@ -629,12 +663,14 @@ def write_windows_def_file(data_source_name, lib, exports): ...@@ -629,12 +663,14 @@ def write_windows_def_file(data_source_name, lib, exports):
out.write(content) out.write(content)
out.close() out.close()
def get_exports(commands, fmt = None):
def get_exports(commands, fmt=None):
if fmt: if fmt:
return [" %s" % fmt(cmd) for cmd in sorted(commands)] return [" %s" % fmt(cmd) for cmd in sorted(commands)]
else: else:
return [" %s" % cmd for cmd in sorted(commands)] return [" %s" % cmd for cmd in sorted(commands)]
# Get EGL exports # Get EGL exports
def get_egl_exports(): def get_egl_exports():
...@@ -671,6 +707,7 @@ def get_egl_exports(): ...@@ -671,6 +707,7 @@ def get_egl_exports():
return exports return exports
def main(): def main():
# auto_script parameters. # auto_script parameters.
...@@ -762,20 +799,19 @@ def main(): ...@@ -762,20 +799,19 @@ def main():
if major_version == 3 and minor_version == 1: if major_version == 3 and minor_version == 1:
header_includes += "\n#include \"common/platform.h\"\n" header_includes += "\n#include \"common/platform.h\"\n"
source_includes = template_sources_includes.format( source_includes = template_sources_includes.format(annotation.lower(), major_version,
annotation.lower(), major_version, minor_if_not_zero) minor_if_not_zero)
write_file(annotation, comment, template_entry_point_header, write_file(annotation, comment, template_entry_point_header, "\n".join(decls), "h",
"\n".join(decls), "h", header_includes, "gl.xml") header_includes, "gl.xml")
write_file(annotation, comment, template_entry_point_source, write_file(annotation, comment, template_entry_point_source, "\n".join(defs), "cpp",
"\n".join(defs), "cpp", source_includes, "gl.xml") source_includes, "gl.xml")
if is_gles1: if is_gles1:
gles1decls['core'] = get_gles1_decls(all_commands, gles_commands) gles1decls['core'] = get_gles1_decls(all_commands, gles_commands)
validation_annotation = "%s%s" % (major_version, minor_if_not_zero) validation_annotation = "%s%s" % (major_version, minor_if_not_zero)
write_validation_header(validation_annotation, comment, validation_protos) write_validation_header(validation_annotation, comment, validation_protos)
# After we finish with the main entry points, we process the extensions. # After we finish with the main entry points, we process the extensions.
extension_defs = [] extension_defs = []
extension_decls = [] extension_decls = []
...@@ -848,27 +884,27 @@ def main(): ...@@ -848,27 +884,27 @@ def main():
version = "{}{}".format(major_if_not_one, minor_if_not_zero) version = "{}{}".format(major_if_not_one, minor_if_not_zero)
glext_ptrs, glext_protos = get_glext_decls(all_commands, glext_ptrs, glext_protos = get_glext_decls(all_commands,
xml.all_cmd_names.get_commands(annotation), version, True) xml.all_cmd_names.get_commands(annotation),
version, True)
glext_ext_ptrs = [] glext_ext_ptrs = []
glext_ext_protos = [] glext_ext_protos = []
# Append extensions for 1.0 and 2.0 # Append extensions for 1.0 and 2.0
if(annotation == "1_0"): if (annotation == "1_0"):
glext_ext_ptrs, glext_ext_protos = get_glext_decls(all_commands, glext_ext_ptrs, glext_ext_protos = get_glext_decls(
xml.all_cmd_names.get_commands("glext"), version, True) all_commands, xml.all_cmd_names.get_commands("glext"), version, True)
elif(annotation == "2_0"): elif (annotation == "2_0"):
glext_ext_ptrs, glext_ext_protos = get_glext_decls(all_commands, glext_ext_ptrs, glext_ext_protos = get_glext_decls(
xml.all_cmd_names.get_commands("gl2ext"), version, True) all_commands, xml.all_cmd_names.get_commands("gl2ext"), version, True)
glext_ptrs += glext_ext_ptrs glext_ptrs += glext_ext_ptrs
glext_protos += glext_ext_protos glext_protos += glext_ext_protos
write_glext_explicit_context_inc(version, "\n".join(glext_ptrs), "\n".join(glext_protos)) write_glext_explicit_context_inc(version, "\n".join(glext_ptrs),
"\n".join(glext_protos))
header_includes = template_header_includes.format( header_includes = template_header_includes.format(major="", minor="")
major="", minor="")
header_includes += """ header_includes += """
#include <GLES/glext.h> #include <GLES/glext.h>
#include <GLES2/gl2.h> #include <GLES2/gl2.h>
...@@ -883,24 +919,23 @@ def main(): ...@@ -883,24 +919,23 @@ def main():
#include "libANGLE/validationES31.h" #include "libANGLE/validationES31.h"
""" """
write_file("ext", "extension", template_entry_point_header, write_file("ext", "extension", template_entry_point_header, "\n".join(
"\n".join([item for item in extension_decls]), "h", header_includes, [item for item in extension_decls]), "h", header_includes, "gl.xml and gl_angle_ext.xml")
"gl.xml and gl_angle_ext.xml") write_file("ext", "extension", template_entry_point_source, "\n".join(
write_file("ext", "extension", template_entry_point_source, [item for item in extension_defs]), "cpp", source_includes, "gl.xml and gl_angle_ext.xml")
"\n".join([item for item in extension_defs]), "cpp", source_includes,
"gl.xml and gl_angle_ext.xml")
write_validation_header("EXT", "extension", ext_validation_protos) write_validation_header("EXT", "extension", ext_validation_protos)
write_context_api_decls("1_0", context_gles_header, gles1decls) write_context_api_decls("1_0", context_gles_header, gles1decls)
sorted_cmd_names = ["Invalid"] + [cmd[2:] for cmd in sorted(xml.all_cmd_names.get_all_commands())] sorted_cmd_names = ["Invalid"
] + [cmd[2:] for cmd in sorted(xml.all_cmd_names.get_all_commands())]
entry_points_enum = template_entry_points_enum_header.format( entry_points_enum = template_entry_points_enum_header.format(
script_name = os.path.basename(sys.argv[0]), script_name=os.path.basename(sys.argv[0]),
data_source_name = "gl.xml and gl_angle_ext.xml", data_source_name="gl.xml and gl_angle_ext.xml",
year = date.today().year, year=date.today().year,
entry_points_list = ",\n".join([" " + cmd for cmd in sorted_cmd_names])) entry_points_list=",\n".join([" " + cmd for cmd in sorted_cmd_names]))
entry_points_enum_header_path = path_to("libGLESv2", "entry_points_enum_autogen.h") entry_points_enum_header_path = path_to("libGLESv2", "entry_points_enum_autogen.h")
with open(entry_points_enum_header_path, "w") as out: with open(entry_points_enum_header_path, "w") as out:
...@@ -926,5 +961,6 @@ def main(): ...@@ -926,5 +961,6 @@ def main():
everything = "Khronos and ANGLE XML files" everything = "Khronos and ANGLE XML files"
write_windows_def_file(everything, "libGLESv2", libgles_ep_exports) write_windows_def_file(everything, "libGLESv2", libgles_ep_exports)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())
...@@ -12,32 +12,47 @@ import sys, os, pprint, json ...@@ -12,32 +12,47 @@ import sys, os, pprint, json
from datetime import date from datetime import date
import registry_xml import registry_xml
def write_header(data_source_name, all_cmds, api, preamble, path, lib, ns = "", prefix = None, export = ""):
def write_header(data_source_name,
all_cmds,
api,
preamble,
path,
lib,
ns="",
prefix=None,
export=""):
file_name = "%s_loader_autogen.h" % api file_name = "%s_loader_autogen.h" % api
header_path = registry_xml.path_to(path, file_name) header_path = registry_xml.path_to(path, file_name)
def pre(cmd): def pre(cmd):
if prefix == None: if prefix == None:
return cmd return cmd
return prefix + cmd[len(api):] return prefix + cmd[len(api):]
with open(header_path, "w") as out: with open(header_path, "w") as out:
var_protos = ["%sextern PFN%sPROC %s%s;" % (export, cmd.upper(), ns, pre(cmd)) for cmd in all_cmds] var_protos = [
"%sextern PFN%sPROC %s%s;" % (export, cmd.upper(), ns, pre(cmd)) for cmd in all_cmds
]
loader_header = template_loader_h.format( loader_header = template_loader_h.format(
script_name = os.path.basename(sys.argv[0]), script_name=os.path.basename(sys.argv[0]),
data_source_name = data_source_name, data_source_name=data_source_name,
year = date.today().year, year=date.today().year,
function_pointers = "\n".join(var_protos), function_pointers="\n".join(var_protos),
api_upper = api.upper(), api_upper=api.upper(),
api_lower = api, api_lower=api,
preamble = preamble, preamble=preamble,
export = export, export=export,
lib = lib.upper()) lib=lib.upper())
out.write(loader_header) out.write(loader_header)
out.close() out.close()
def write_source(data_source_name, all_cmds, api, path, ns = "", prefix = None, export = ""):
def write_source(data_source_name, all_cmds, api, path, ns="", prefix=None, export=""):
file_name = "%s_loader_autogen.cpp" % api file_name = "%s_loader_autogen.cpp" % api
source_path = registry_xml.path_to(path, file_name) source_path = registry_xml.path_to(path, file_name)
def pre(cmd): def pre(cmd):
if prefix == None: if prefix == None:
return cmd return cmd
...@@ -50,17 +65,18 @@ def write_source(data_source_name, all_cmds, api, path, ns = "", prefix = None, ...@@ -50,17 +65,18 @@ def write_source(data_source_name, all_cmds, api, path, ns = "", prefix = None,
setters = [setter % (ns, pre(cmd), cmd.upper(), pre(cmd)) for cmd in all_cmds] setters = [setter % (ns, pre(cmd), cmd.upper(), pre(cmd)) for cmd in all_cmds]
loader_source = template_loader_cpp.format( loader_source = template_loader_cpp.format(
script_name = os.path.basename(sys.argv[0]), script_name=os.path.basename(sys.argv[0]),
data_source_name = data_source_name, data_source_name=data_source_name,
year = date.today().year, year=date.today().year,
function_pointers = "\n".join(var_defs), function_pointers="\n".join(var_defs),
set_pointers = "\n".join(setters), set_pointers="\n".join(setters),
api_upper = api.upper(), api_upper=api.upper(),
api_lower = api) api_lower=api)
out.write(loader_source) out.write(loader_source)
out.close() out.close()
def gen_libegl_loader(): def gen_libegl_loader():
data_source_name = "egl.xml and egl_angle_ext.xml" data_source_name = "egl.xml and egl_angle_ext.xml"
...@@ -82,6 +98,7 @@ def gen_libegl_loader(): ...@@ -82,6 +98,7 @@ def gen_libegl_loader():
write_header(data_source_name, all_cmds, "egl", libegl_preamble, path, "LIBEGL", "", "EGL_") write_header(data_source_name, all_cmds, "egl", libegl_preamble, path, "LIBEGL", "", "EGL_")
write_source(data_source_name, all_cmds, "egl", path, "", "EGL_") write_source(data_source_name, all_cmds, "egl", path, "", "EGL_")
def gen_gl_loader(): def gen_gl_loader():
data_source_name = "gl.xml and gl_angle_ext.xml" data_source_name = "gl.xml and gl_angle_ext.xml"
...@@ -113,6 +130,7 @@ def gen_gl_loader(): ...@@ -113,6 +130,7 @@ def gen_gl_loader():
write_header(data_source_name, all_cmds, "gles", util_gles_preamble, path, "UTIL", export=ex) write_header(data_source_name, all_cmds, "gles", util_gles_preamble, path, "UTIL", export=ex)
write_source(data_source_name, all_cmds, "gles", path, export=ex) write_source(data_source_name, all_cmds, "gles", path, export=ex)
def gen_egl_loader(): def gen_egl_loader():
data_source_name = "egl.xml and egl_angle_ext.xml" data_source_name = "egl.xml and egl_angle_ext.xml"
...@@ -135,6 +153,7 @@ def gen_egl_loader(): ...@@ -135,6 +153,7 @@ def gen_egl_loader():
write_header(data_source_name, all_cmds, "egl", util_egl_preamble, path, "UTIL", export=ex) write_header(data_source_name, all_cmds, "egl", util_egl_preamble, path, "UTIL", export=ex)
write_source(data_source_name, all_cmds, "egl", path, export=ex) write_source(data_source_name, all_cmds, "egl", path, export=ex)
def gen_wgl_loader(): def gen_wgl_loader():
supported_wgl_extensions = [ supported_wgl_extensions = [
...@@ -162,6 +181,7 @@ def gen_wgl_loader(): ...@@ -162,6 +181,7 @@ def gen_wgl_loader():
write_header(source, all_cmds, "wgl", util_wgl_preamble, path, "UTIL_WINDOWS", "_") write_header(source, all_cmds, "wgl", util_wgl_preamble, path, "UTIL_WINDOWS", "_")
write_source(source, all_cmds, "wgl", path, "_") write_source(source, all_cmds, "wgl", path, "_")
def main(): def main():
# Handle inputs/outputs for run_code_generation.py's auto_script # Handle inputs/outputs for run_code_generation.py's auto_script
......
...@@ -109,11 +109,13 @@ $ImplMethodDefinitions ...@@ -109,11 +109,13 @@ $ImplMethodDefinitions
} // namespace rx } // namespace rx
""" """
def generate_impl_declaration(impl_stub): def generate_impl_declaration(impl_stub):
# ensure the wrapped lines are aligned vertically # ensure the wrapped lines are aligned vertically
temp = re.sub(r'\n ', '\n', impl_stub) temp = re.sub(r'\n ', '\n', impl_stub)
return temp + ' override;\n' return temp + ' override;\n'
def generate_impl_definition(impl_stub, typed_impl): def generate_impl_definition(impl_stub, typed_impl):
function_signature = impl_stub.strip() function_signature = impl_stub.strip()
...@@ -150,15 +152,17 @@ def generate_impl_definition(impl_stub, typed_impl): ...@@ -150,15 +152,17 @@ def generate_impl_definition(impl_stub, typed_impl):
else: else:
return_statement = ' return ' + return_type + '();\n' return_statement = ' return ' + return_type + '();\n'
body = '{\n' + ' UNIMPLEMENTED();\n' + return_statement +'}\n' body = '{\n' + ' UNIMPLEMENTED();\n' + return_statement + '}\n'
return '\n' + function_signature + body return '\n' + function_signature + body
def get_constructor_args(constructor): def get_constructor_args(constructor):
params = re.search(r'\((.*)\)', constructor).group(1) params = re.search(r'\((.*)\)', constructor).group(1)
args = ', '.join(re.findall(r'[^\w]?(\w+)(?:\,|$)', params)) args = ', '.join(re.findall(r'[^\w]?(\w+)(?:\,|$)', params))
return params, args return params, args
def parse_impl_header(base_impl): def parse_impl_header(base_impl):
impl_h_file_path = base_impl + '.h' impl_h_file_path = base_impl + '.h'
impl_h_file = open(impl_h_file_path, 'r') impl_h_file = open(impl_h_file_path, 'r')
...@@ -172,7 +176,7 @@ def parse_impl_header(base_impl): ...@@ -172,7 +176,7 @@ def parse_impl_header(base_impl):
for line in impl_h_file: for line in impl_h_file:
clean_line = line.strip() clean_line = line.strip()
match = re.search(r'^(?:explicit )?(' + base_impl + r'\([^\)]*\))', clean_line); match = re.search(r'^(?:explicit )?(' + base_impl + r'\([^\)]*\))', clean_line)
if match: if match:
constructor = match.group(1) constructor = match.group(1)
...@@ -200,6 +204,7 @@ def parse_impl_header(base_impl): ...@@ -200,6 +204,7 @@ def parse_impl_header(base_impl):
return impl_stubs, private_impl_stubs, constructor return impl_stubs, private_impl_stubs, constructor
def get_base_class(base_impl): def get_base_class(base_impl):
impl_h_file_path = base_impl + '.h' impl_h_file_path = base_impl + '.h'
with open(impl_h_file_path, 'r') as impl_h_file: with open(impl_h_file_path, 'r') as impl_h_file:
...@@ -209,6 +214,7 @@ def get_base_class(base_impl): ...@@ -209,6 +214,7 @@ def get_base_class(base_impl):
return match.group(1) return match.group(1)
return False return False
for impl_class in impl_classes: for impl_class in impl_classes:
base_impl = impl_class + 'Impl' base_impl = impl_class + 'Impl'
......
...@@ -3,7 +3,6 @@ ...@@ -3,7 +3,6 @@
# Copyright 2016 The ANGLE Project Authors. All rights reserved. # Copyright 2016 The ANGLE Project Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
"""Generate copies of the Vulkan layers JSON files, with no paths, forcing """Generate copies of the Vulkan layers JSON files, with no paths, forcing
Vulkan to use the default search path to look for layers.""" Vulkan to use the default search path to look for layers."""
...@@ -60,8 +59,7 @@ def main(): ...@@ -60,8 +59,7 @@ def main():
# Update the path. # Update the path.
if not data_key in data: if not data_key in data:
raise Exception( raise Exception("Could not find '%s' key in %s" % (data_key, json_fname))
"Could not find '%s' key in %s" % (data_key, json_fname))
# The standard validation layer has no library path. # The standard validation layer has no library path.
if 'library_path' in data[data_key]: if 'library_path' in data[data_key]:
...@@ -93,8 +91,7 @@ def main(): ...@@ -93,8 +91,7 @@ def main():
# For each *.json.in template files in source dir generate actual json file # For each *.json.in template files in source dir generate actual json file
# in target dir # in target dir
if (set(glob_slash(os.path.join(source_dir, '*.json.in'))) != if (set(glob_slash(os.path.join(source_dir, '*.json.in'))) != set(json_in_files)):
set(json_in_files)):
print('.json.in list in gn file is out-of-date', file=sys.stderr) print('.json.in list in gn file is out-of-date', file=sys.stderr)
return 1 return 1
for json_in_name in json_in_files: for json_in_name in json_in_files:
...@@ -112,5 +109,6 @@ def main(): ...@@ -112,5 +109,6 @@ def main():
line = line.replace('@VK_VERSION@', '1.1.' + vk_version) line = line.replace('@VK_VERSION@', '1.1.' + vk_version)
json_out_file.write(line) json_out_file.write(line)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())
...@@ -22,12 +22,14 @@ os.chdir(os.path.join(script_dir, '..')) ...@@ -22,12 +22,14 @@ os.chdir(os.path.join(script_dir, '..'))
out_dir = 'out' out_dir = 'out'
# Generate the VS solutions for any valid directory. # Generate the VS solutions for any valid directory.
def generate_projects(dirname): def generate_projects(dirname):
args = ['gn.bat', 'gen', dirname, '--ide=' + target_ide, '--sln=' + solution_name] args = ['gn.bat', 'gen', dirname, '--ide=' + target_ide, '--sln=' + solution_name]
print('Running "' + ' '.join(args) + '"') print('Running "' + ' '.join(args) + '"')
subprocess.call(args) subprocess.call(args)
for potential_dir in os.listdir(out_dir): for potential_dir in os.listdir(out_dir):
path = os.path.join(out_dir, potential_dir) path = os.path.join(out_dir, potential_dir)
build_ninja_d = os.path.join(path, 'build.ninja.d') build_ninja_d = os.path.join(path, 'build.ninja.d')
......
...@@ -29,19 +29,22 @@ if sys.platform == 'win32': ...@@ -29,19 +29,22 @@ if sys.platform == 'win32':
scores = [] scores = []
# Danke to http://stackoverflow.com/a/27758326 # Danke to http://stackoverflow.com/a/27758326
def mean(data): def mean(data):
"""Return the sample arithmetic mean of data.""" """Return the sample arithmetic mean of data."""
n = len(data) n = len(data)
if n < 1: if n < 1:
raise ValueError('mean requires at least one data point') raise ValueError('mean requires at least one data point')
return float(sum(data))/float(n) # in Python 2 use sum(data)/float(n) return float(sum(data)) / float(n) # in Python 2 use sum(data)/float(n)
def sum_of_square_deviations(data, c): def sum_of_square_deviations(data, c):
"""Return sum of square deviations of sequence data.""" """Return sum of square deviations of sequence data."""
ss = sum((float(x)-c)**2 for x in data) ss = sum((float(x) - c)**2 for x in data)
return ss return ss
def coefficient_of_variation(data): def coefficient_of_variation(data):
"""Calculates the population coefficient of variation.""" """Calculates the population coefficient of variation."""
n = len(data) n = len(data)
...@@ -49,24 +52,28 @@ def coefficient_of_variation(data): ...@@ -49,24 +52,28 @@ def coefficient_of_variation(data):
raise ValueError('variance requires at least two data points') raise ValueError('variance requires at least two data points')
c = mean(data) c = mean(data)
ss = sum_of_square_deviations(data, c) ss = sum_of_square_deviations(data, c)
pvar = ss/n # the population variance pvar = ss / n # the population variance
stddev = (pvar**0.5) # population standard deviation stddev = (pvar**0.5) # population standard deviation
return stddev / c return stddev / c
def truncated_list(data, n): def truncated_list(data, n):
"""Compute a truncated list, n is truncation size""" """Compute a truncated list, n is truncation size"""
if len(data) < n * 2: if len(data) < n * 2:
raise ValueError('list not large enough to truncate') raise ValueError('list not large enough to truncate')
return sorted(data)[n:-n] return sorted(data)[n:-n]
def truncated_mean(data, n): def truncated_mean(data, n):
"""Compute a truncated mean, n is truncation size""" """Compute a truncated mean, n is truncation size"""
return mean(truncated_list(data, n)) return mean(truncated_list(data, n))
def truncated_cov(data, n): def truncated_cov(data, n):
"""Compute a truncated coefficient of variation, n is truncation size""" """Compute a truncated coefficient of variation, n is truncation size"""
return coefficient_of_variation(truncated_list(data, n)) return coefficient_of_variation(truncated_list(data, n))
# Find most recent binary # Find most recent binary
newest_binary = None newest_binary = None
newest_mtime = None newest_mtime = None
...@@ -96,8 +103,12 @@ if len(sys.argv) >= 2: ...@@ -96,8 +103,12 @@ if len(sys.argv) >= 2:
print('Using test executable: ' + perftests_path) print('Using test executable: ' + perftests_path)
print('Test name: ' + test_name) print('Test name: ' + test_name)
def get_results(metric, extra_args=[]): def get_results(metric, extra_args=[]):
process = subprocess.Popen([perftests_path, '--gtest_filter=' + test_name] + extra_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) process = subprocess.Popen(
[perftests_path, '--gtest_filter=' + test_name] + extra_args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output, err = process.communicate() output, err = process.communicate()
m = re.search(r'Running (\d+) tests', output) m = re.search(r'Running (\d+) tests', output)
...@@ -115,6 +126,7 @@ def get_results(metric, extra_args=[]): ...@@ -115,6 +126,7 @@ def get_results(metric, extra_args=[]):
return [float(value) for value in m] return [float(value) for value in m]
# Calibrate the number of steps # Calibrate the number of steps
steps = get_results("steps", ["--calibration"])[0] steps = get_results("steps", ["--calibration"])[0]
print("running with %d steps." % steps) print("running with %d steps." % steps)
......
...@@ -108,13 +108,17 @@ strip_suffixes = ["ANGLE", "EXT", "KHR", "OES", "CHROMIUM"] ...@@ -108,13 +108,17 @@ strip_suffixes = ["ANGLE", "EXT", "KHR", "OES", "CHROMIUM"]
# Toggle generation here. # Toggle generation here.
support_EGL_ANGLE_explicit_context = True support_EGL_ANGLE_explicit_context = True
def script_relative(path): def script_relative(path):
return os.path.join(os.path.dirname(sys.argv[0]), path) return os.path.join(os.path.dirname(sys.argv[0]), path)
def path_to(folder, file): def path_to(folder, file):
return os.path.join(script_relative(".."), "src", folder, file) return os.path.join(script_relative(".."), "src", folder, file)
class GLCommandNames: class GLCommandNames:
def __init__(self): def __init__(self):
self.command_names = {} self.command_names = {}
...@@ -136,8 +140,10 @@ class GLCommandNames: ...@@ -136,8 +140,10 @@ class GLCommandNames:
# Add the commands that aren't duplicates # Add the commands that aren't duplicates
self.command_names[version] += commands self.command_names[version] += commands
class RegistryXML: class RegistryXML:
def __init__(self, xml_file, ext_file = None):
def __init__(self, xml_file, ext_file=None):
tree = etree.parse(script_relative(xml_file)) tree = etree.parse(script_relative(xml_file))
self.root = tree.getroot() self.root = tree.getroot()
if (ext_file): if (ext_file):
......
...@@ -18,25 +18,30 @@ root_dir = os.path.abspath(os.path.join(script_dir, '..')) ...@@ -18,25 +18,30 @@ root_dir = os.path.abspath(os.path.join(script_dir, '..'))
# auto_script is a standard way for scripts to return their inputs and outputs. # auto_script is a standard way for scripts to return their inputs and outputs.
def get_child_script_dirname(script): def get_child_script_dirname(script):
# All script names are relative to ANGLE's root # All script names are relative to ANGLE's root
return os.path.dirname(os.path.abspath(os.path.join(root_dir, script))) return os.path.dirname(os.path.abspath(os.path.join(root_dir, script)))
# Replace all backslashes with forward slashes to be platform independent # Replace all backslashes with forward slashes to be platform independent
def clean_path_slashes(path): def clean_path_slashes(path):
return path.replace("\\", "/") return path.replace("\\", "/")
# Takes a script file name which is relative to the code generation script's directory and # Takes a script file name which is relative to the code generation script's directory and
# changes it to be relative to the angle root directory # changes it to be relative to the angle root directory
def rebase_script_path(script_path, relative_path): def rebase_script_path(script_path, relative_path):
return os.path.relpath(os.path.join(os.path.dirname(script_path), relative_path), root_dir) return os.path.relpath(os.path.join(os.path.dirname(script_path), relative_path), root_dir)
def grab_from_script(script, param): def grab_from_script(script, param):
res = subprocess.check_output(['python', script, param]).strip() res = subprocess.check_output(['python', script, param]).strip()
if res == '': if res == '':
return [] return []
return [clean_path_slashes(rebase_script_path(script, name)) for name in res.split(',')] return [clean_path_slashes(rebase_script_path(script, name)) for name in res.split(',')]
def auto_script(script): def auto_script(script):
# Set the CWD to the script directory. # Set the CWD to the script directory.
os.chdir(get_child_script_dirname(script)) os.chdir(get_child_script_dirname(script))
...@@ -49,6 +54,7 @@ def auto_script(script): ...@@ -49,6 +54,7 @@ def auto_script(script):
os.chdir(root_dir) os.chdir(root_dir)
return info return info
hash_fname = "run_code_generation_hashes.json" hash_fname = "run_code_generation_hashes.json"
generators = { generators = {
...@@ -183,7 +189,11 @@ def main(): ...@@ -183,7 +189,11 @@ def main():
update_output_hashes(name, info['outputs'], new_hashes) update_output_hashes(name, info['outputs'], new_hashes)
os.chdir(script_dir) os.chdir(script_dir)
json.dump(new_hashes, open(hash_fname, "w"), indent=2, sort_keys=True, json.dump(
new_hashes,
open(hash_fname, "w"),
indent=2,
sort_keys=True,
separators=(',', ':\n ')) separators=(',', ':\n '))
......
...@@ -4,15 +4,15 @@ ...@@ -4,15 +4,15 @@
"ANGLE format:src/libANGLE/renderer/Format_table_autogen.cpp": "ANGLE format:src/libANGLE/renderer/Format_table_autogen.cpp":
"a4cf00b75621bc058c4a1b341bdf6989", "a4cf00b75621bc058c4a1b341bdf6989",
"ANGLE format:src/libANGLE/renderer/angle_format.py": "ANGLE format:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798", "7ce0869650454e1eebc93658d4d96844",
"ANGLE format:src/libANGLE/renderer/angle_format_data.json": "ANGLE format:src/libANGLE/renderer/angle_format_data.json":
"288d2f350948f8b1928c249234a44b25", "288d2f350948f8b1928c249234a44b25",
"ANGLE format:src/libANGLE/renderer/angle_format_map.json": "ANGLE format:src/libANGLE/renderer/angle_format_map.json":
"be9f9bdbdf785dda05920146e8c55dbb", "be9f9bdbdf785dda05920146e8c55dbb",
"ANGLE format:src/libANGLE/renderer/gen_angle_format_table.py": "ANGLE format:src/libANGLE/renderer/gen_angle_format_table.py":
"3d9f679b65f39ccf19bd7bdf5498f837", "1443d23f2dc1e9d7dc86ae0d512e6814",
"ANGLE load functions table:src/libANGLE/renderer/gen_load_functions_table.py": "ANGLE load functions table:src/libANGLE/renderer/gen_load_functions_table.py":
"2dcc3aa0cd700165b588cf53441e243b", "e65c50e84fc38ad34d0eb0bebb84aab6",
"ANGLE load functions table:src/libANGLE/renderer/load_functions_data.json": "ANGLE load functions table:src/libANGLE/renderer/load_functions_data.json":
"816be111bf4d1995589350dceb367315", "816be111bf4d1995589350dceb367315",
"ANGLE load functions table:src/libANGLE/renderer/load_functions_table_autogen.cpp": "ANGLE load functions table:src/libANGLE/renderer/load_functions_table_autogen.cpp":
...@@ -22,11 +22,11 @@ ...@@ -22,11 +22,11 @@
"D3D11 blit shader selection:src/libANGLE/renderer/d3d/d3d11/d3d11_blit_shaders_autogen.gni": "D3D11 blit shader selection:src/libANGLE/renderer/d3d/d3d11/d3d11_blit_shaders_autogen.gni":
"329dbafc64b0cb578348819198abcfea", "329dbafc64b0cb578348819198abcfea",
"D3D11 blit shader selection:src/libANGLE/renderer/d3d/d3d11/gen_blit11helper.py": "D3D11 blit shader selection:src/libANGLE/renderer/d3d/d3d11/gen_blit11helper.py":
"38bff72bc17ac25c6b42c98d40c76e20", "704a82846928d3e21fc0794dff3a08f8",
"D3D11 format:src/libANGLE/renderer/angle_format.py": "D3D11 format:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798", "7ce0869650454e1eebc93658d4d96844",
"D3D11 format:src/libANGLE/renderer/d3d/d3d11/gen_texture_format_table.py": "D3D11 format:src/libANGLE/renderer/d3d/d3d11/gen_texture_format_table.py":
"d3260e0390ad2cd8b07420b7426fad43", "bf11e3404d4622059b6e9c4e96abf95e",
"D3D11 format:src/libANGLE/renderer/d3d/d3d11/texture_format_data.json": "D3D11 format:src/libANGLE/renderer/d3d/d3d11/texture_format_data.json":
"d7483ece817e819588f4ca157716dc7b", "d7483ece817e819588f4ca157716dc7b",
"D3D11 format:src/libANGLE/renderer/d3d/d3d11/texture_format_map.json": "D3D11 format:src/libANGLE/renderer/d3d/d3d11/texture_format_map.json":
...@@ -38,9 +38,9 @@ ...@@ -38,9 +38,9 @@
"DXGI format support:src/libANGLE/renderer/d3d/d3d11/dxgi_support_table_autogen.cpp": "DXGI format support:src/libANGLE/renderer/d3d/d3d11/dxgi_support_table_autogen.cpp":
"7ec32ce0ad41450be7493c1db1130e25", "7ec32ce0ad41450be7493c1db1130e25",
"DXGI format support:src/libANGLE/renderer/d3d/d3d11/gen_dxgi_support_tables.py": "DXGI format support:src/libANGLE/renderer/d3d/d3d11/gen_dxgi_support_tables.py":
"389a6358534ebad5e232a44944b6123b", "b464f153f15d60df1c6536adbfafb072",
"DXGI format:src/libANGLE/renderer/angle_format.py": "DXGI format:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798", "7ce0869650454e1eebc93658d4d96844",
"DXGI format:src/libANGLE/renderer/angle_format_map.json": "DXGI format:src/libANGLE/renderer/angle_format_map.json":
"be9f9bdbdf785dda05920146e8c55dbb", "be9f9bdbdf785dda05920146e8c55dbb",
"DXGI format:src/libANGLE/renderer/d3d/d3d11/dxgi_format_data.json": "DXGI format:src/libANGLE/renderer/d3d/d3d11/dxgi_format_data.json":
...@@ -48,7 +48,7 @@ ...@@ -48,7 +48,7 @@
"DXGI format:src/libANGLE/renderer/d3d/d3d11/dxgi_format_map_autogen.cpp": "DXGI format:src/libANGLE/renderer/d3d/d3d11/dxgi_format_map_autogen.cpp":
"32b9860e3fd8e87a89ff9a09e848e516", "32b9860e3fd8e87a89ff9a09e848e516",
"DXGI format:src/libANGLE/renderer/d3d/d3d11/gen_dxgi_format_table.py": "DXGI format:src/libANGLE/renderer/d3d/d3d11/gen_dxgi_format_table.py":
"bed2688ca828fc9fd1904408d33ba007", "411e6064b916d570fa76949820d34a45",
"ESSL static builtins:src/compiler/translator/ParseContext_autogen.h": "ESSL static builtins:src/compiler/translator/ParseContext_autogen.h":
"6be7f97ce68aa5ba5ecf30b835bc344d", "6be7f97ce68aa5ba5ecf30b835bc344d",
"ESSL static builtins:src/compiler/translator/SymbolTable_autogen.cpp": "ESSL static builtins:src/compiler/translator/SymbolTable_autogen.cpp":
...@@ -58,11 +58,11 @@ ...@@ -58,11 +58,11 @@
"ESSL static builtins:src/compiler/translator/builtin_function_declarations.txt": "ESSL static builtins:src/compiler/translator/builtin_function_declarations.txt":
"e5e567406476306ea06984d885be028d", "e5e567406476306ea06984d885be028d",
"ESSL static builtins:src/compiler/translator/builtin_symbols_hash_autogen.txt": "ESSL static builtins:src/compiler/translator/builtin_symbols_hash_autogen.txt":
"e60e2185718a035adfd19ab91536fdb7", "05cd84d02529a1e83c88caa9097dc0ef",
"ESSL static builtins:src/compiler/translator/builtin_variables.json": "ESSL static builtins:src/compiler/translator/builtin_variables.json":
"a8f3d76c3c395e8f6a35dd22eb2e8416", "a8f3d76c3c395e8f6a35dd22eb2e8416",
"ESSL static builtins:src/compiler/translator/gen_builtin_symbols.py": "ESSL static builtins:src/compiler/translator/gen_builtin_symbols.py":
"f056dba2fdeac5a5dbad9d8f7b17f55f", "5d5467e17ca5ed5bf9938df9a3391e6f",
"ESSL static builtins:src/compiler/translator/tree_util/BuiltIn_autogen.h": "ESSL static builtins:src/compiler/translator/tree_util/BuiltIn_autogen.h":
"6df5ab6576da4f364763b581da839b77", "6df5ab6576da4f364763b581da839b77",
"ESSL static builtins:src/tests/compiler_tests/ImmutableString_test_autogen.cpp": "ESSL static builtins:src/tests/compiler_tests/ImmutableString_test_autogen.cpp":
...@@ -72,13 +72,13 @@ ...@@ -72,13 +72,13 @@
"Emulated HLSL functions:src/compiler/translator/emulated_builtin_functions_hlsl_autogen.cpp": "Emulated HLSL functions:src/compiler/translator/emulated_builtin_functions_hlsl_autogen.cpp":
"1c759ffdd27a86fd8f2d590b2f3dcb56", "1c759ffdd27a86fd8f2d590b2f3dcb56",
"Emulated HLSL functions:src/compiler/translator/gen_emulated_builtin_function_tables.py": "Emulated HLSL functions:src/compiler/translator/gen_emulated_builtin_function_tables.py":
"c24de0c9ce5f201985c852d2b4b12b98", "5991de4f43758f59d9d042581ae04eab",
"GL copy conversion table:src/libANGLE/es3_copy_conversion_formats.json": "GL copy conversion table:src/libANGLE/es3_copy_conversion_formats.json":
"54608f6f7d9aa7c59a8458ccf3ab9935", "54608f6f7d9aa7c59a8458ccf3ab9935",
"GL copy conversion table:src/libANGLE/es3_copy_conversion_table_autogen.cpp": "GL copy conversion table:src/libANGLE/es3_copy_conversion_table_autogen.cpp":
"b20d198cf5e292c43170d4873b381b34", "b20d198cf5e292c43170d4873b381b34",
"GL copy conversion table:src/libANGLE/gen_copy_conversion_table.py": "GL copy conversion table:src/libANGLE/gen_copy_conversion_table.py":
"92428cef9d97d33ee7063cfa387ccf56", "827a4a27cea1e11bef18fed9dce6dceb",
"GL format map:src/libANGLE/es3_format_type_combinations.json": "GL format map:src/libANGLE/es3_format_type_combinations.json":
"a232823cd6430f14e28793ccabb968ee", "a232823cd6430f14e28793ccabb968ee",
"GL format map:src/libANGLE/format_map_autogen.cpp": "GL format map:src/libANGLE/format_map_autogen.cpp":
...@@ -86,7 +86,7 @@ ...@@ -86,7 +86,7 @@
"GL format map:src/libANGLE/format_map_data.json": "GL format map:src/libANGLE/format_map_data.json":
"779798d4879e5f73a5a108e3e3fd3095", "779798d4879e5f73a5a108e3e3fd3095",
"GL format map:src/libANGLE/gen_format_map.py": "GL format map:src/libANGLE/gen_format_map.py":
"0fd8c00e8b5afb28a5f8b40d9628b9a4", "dbc855d50826670a9e1a4ff2747e7583",
"GL/EGL entry points:scripts/egl.xml": "GL/EGL entry points:scripts/egl.xml":
"842e24514c4cfe09fba703c17a0fd292", "842e24514c4cfe09fba703c17a0fd292",
"GL/EGL entry points:scripts/egl_angle_ext.xml": "GL/EGL entry points:scripts/egl_angle_ext.xml":
...@@ -94,13 +94,13 @@ ...@@ -94,13 +94,13 @@
"GL/EGL entry points:scripts/entry_point_packed_gl_enums.json": "GL/EGL entry points:scripts/entry_point_packed_gl_enums.json":
"28238b0f52826c3794eaa1aa940238bf", "28238b0f52826c3794eaa1aa940238bf",
"GL/EGL entry points:scripts/generate_entry_points.py": "GL/EGL entry points:scripts/generate_entry_points.py":
"83064b09d168c807431cac137b845b5f", "e7ab486465bf7873d8f06ddd9b204539",
"GL/EGL entry points:scripts/gl.xml": "GL/EGL entry points:scripts/gl.xml":
"b470cb06b06cbbe7adb2c8129ec85708", "b470cb06b06cbbe7adb2c8129ec85708",
"GL/EGL entry points:scripts/gl_angle_ext.xml": "GL/EGL entry points:scripts/gl_angle_ext.xml":
"11e1eb2cbe51ae6e7b8705d3506846d5", "11e1eb2cbe51ae6e7b8705d3506846d5",
"GL/EGL entry points:scripts/registry_xml.py": "GL/EGL entry points:scripts/registry_xml.py":
"3b9a36e0be051dc5b4e5162d54749e49", "169e89c63aad5bde60012b64cccced27",
"GL/EGL entry points:src/libANGLE/Context_gles_1_0_autogen.h": "GL/EGL entry points:src/libANGLE/Context_gles_1_0_autogen.h":
"fad4ec629b41e9d97ff57a132ad946cb", "fad4ec629b41e9d97ff57a132ad946cb",
"GL/EGL entry points:src/libANGLE/validationES1_autogen.h": "GL/EGL entry points:src/libANGLE/validationES1_autogen.h":
...@@ -144,9 +144,9 @@ ...@@ -144,9 +144,9 @@
"GL/EGL/WGL loader:scripts/egl_angle_ext.xml": "GL/EGL/WGL loader:scripts/egl_angle_ext.xml":
"745534010f31fbe8e1a1fcddce15ed2d", "745534010f31fbe8e1a1fcddce15ed2d",
"GL/EGL/WGL loader:scripts/generate_loader.py": "GL/EGL/WGL loader:scripts/generate_loader.py":
"475030714c1644b6dfb1f6f08572039d", "b8c0dc876c8122bdc2447de982bcfad6",
"GL/EGL/WGL loader:scripts/registry_xml.py": "GL/EGL/WGL loader:scripts/registry_xml.py":
"3b9a36e0be051dc5b4e5162d54749e49", "169e89c63aad5bde60012b64cccced27",
"GL/EGL/WGL loader:scripts/wgl.xml": "GL/EGL/WGL loader:scripts/wgl.xml":
"aa96419c582af2f6673430e2847693f4", "aa96419c582af2f6673430e2847693f4",
"GL/EGL/WGL loader:src/libEGL/egl_loader_autogen.cpp": "GL/EGL/WGL loader:src/libEGL/egl_loader_autogen.cpp":
...@@ -168,13 +168,13 @@ ...@@ -168,13 +168,13 @@
"OpenGL dispatch table:scripts/gl.xml": "OpenGL dispatch table:scripts/gl.xml":
"b470cb06b06cbbe7adb2c8129ec85708", "b470cb06b06cbbe7adb2c8129ec85708",
"OpenGL dispatch table:src/libANGLE/renderer/angle_format.py": "OpenGL dispatch table:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798", "7ce0869650454e1eebc93658d4d96844",
"OpenGL dispatch table:src/libANGLE/renderer/gl/DispatchTableGL_autogen.cpp": "OpenGL dispatch table:src/libANGLE/renderer/gl/DispatchTableGL_autogen.cpp":
"96d06b3acf7826aee1ec813a8fa3a867", "96d06b3acf7826aee1ec813a8fa3a867",
"OpenGL dispatch table:src/libANGLE/renderer/gl/DispatchTableGL_autogen.h": "OpenGL dispatch table:src/libANGLE/renderer/gl/DispatchTableGL_autogen.h":
"ea5eded625b5db7d7b2b7f689c72f14b", "ea5eded625b5db7d7b2b7f689c72f14b",
"OpenGL dispatch table:src/libANGLE/renderer/gl/generate_gl_dispatch_table.py": "OpenGL dispatch table:src/libANGLE/renderer/gl/generate_gl_dispatch_table.py":
"7571edb9e610891ed0c95dc496120cff", "f21314d401e650b4182c4b7d66ac5c9c",
"OpenGL dispatch table:src/libANGLE/renderer/gl/gl_bindings_data.json": "OpenGL dispatch table:src/libANGLE/renderer/gl/gl_bindings_data.json":
"1afca09d29ed7788c76cbc9bcfb4de0a", "1afca09d29ed7788c76cbc9bcfb4de0a",
"OpenGL dispatch table:src/libANGLE/renderer/gl/null_functions.cpp": "OpenGL dispatch table:src/libANGLE/renderer/gl/null_functions.cpp":
...@@ -182,17 +182,17 @@ ...@@ -182,17 +182,17 @@
"OpenGL dispatch table:src/libANGLE/renderer/gl/null_functions.h": "OpenGL dispatch table:src/libANGLE/renderer/gl/null_functions.h":
"7906751710cab691f9e7365e59b7beed", "7906751710cab691f9e7365e59b7beed",
"Vulkan format:src/libANGLE/renderer/angle_format.py": "Vulkan format:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798", "7ce0869650454e1eebc93658d4d96844",
"Vulkan format:src/libANGLE/renderer/angle_format_map.json": "Vulkan format:src/libANGLE/renderer/angle_format_map.json":
"be9f9bdbdf785dda05920146e8c55dbb", "be9f9bdbdf785dda05920146e8c55dbb",
"Vulkan format:src/libANGLE/renderer/vulkan/gen_vk_format_table.py": "Vulkan format:src/libANGLE/renderer/vulkan/gen_vk_format_table.py":
"c1f153d67fa50e5f6683170c83b610d4", "c50c9c66b89df7179a688cda42eb85f2",
"Vulkan format:src/libANGLE/renderer/vulkan/vk_format_map.json": "Vulkan format:src/libANGLE/renderer/vulkan/vk_format_map.json":
"a6522dc0af17eebfee8b3d6d4723594f", "a6522dc0af17eebfee8b3d6d4723594f",
"Vulkan format:src/libANGLE/renderer/vulkan/vk_format_table_autogen.cpp": "Vulkan format:src/libANGLE/renderer/vulkan/vk_format_table_autogen.cpp":
"34dcf4f106f94b03f74c9fd08b22f6ed", "34dcf4f106f94b03f74c9fd08b22f6ed",
"Vulkan internal shader programs:src/libANGLE/renderer/vulkan/gen_vk_internal_shaders.py": "Vulkan internal shader programs:src/libANGLE/renderer/vulkan/gen_vk_internal_shaders.py":
"1262e5e903c7dad214ded83625f9d3c4", "4cc82aa02df5371fc2e3d7448a241fc1",
"Vulkan internal shader programs:src/libANGLE/renderer/vulkan/shaders/gen/BufferUtils.comp.00000000.inc": "Vulkan internal shader programs:src/libANGLE/renderer/vulkan/shaders/gen/BufferUtils.comp.00000000.inc":
"caa03e84d757844a099d0e408a162c7e", "caa03e84d757844a099d0e408a162c7e",
"Vulkan internal shader programs:src/libANGLE/renderer/vulkan/shaders/gen/BufferUtils.comp.00000001.inc": "Vulkan internal shader programs:src/libANGLE/renderer/vulkan/shaders/gen/BufferUtils.comp.00000001.inc":
...@@ -354,9 +354,9 @@ ...@@ -354,9 +354,9 @@
"Vulkan internal shader programs:tools/glslang/glslang_validator.sha1": "Vulkan internal shader programs:tools/glslang/glslang_validator.sha1":
"ea685e0867a4b3a07ad7e4246ac84e10", "ea685e0867a4b3a07ad7e4246ac84e10",
"Vulkan mandatory format support table:src/libANGLE/renderer/angle_format.py": "Vulkan mandatory format support table:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798", "7ce0869650454e1eebc93658d4d96844",
"Vulkan mandatory format support table:src/libANGLE/renderer/vulkan/gen_vk_mandatory_format_support_table.py": "Vulkan mandatory format support table:src/libANGLE/renderer/vulkan/gen_vk_mandatory_format_support_table.py":
"417772416d3082400ce05acc2f209c9f", "dab4614bbee0c3fbc5b3ccaaa11ba9d3",
"Vulkan mandatory format support table:src/libANGLE/renderer/vulkan/vk_mandatory_format_support_data.json": "Vulkan mandatory format support table:src/libANGLE/renderer/vulkan/vk_mandatory_format_support_data.json":
"fa2bd54c1bb0ab2cf1d386061a4bc5c5", "fa2bd54c1bb0ab2cf1d386061a4bc5c5",
"Vulkan mandatory format support table:src/libANGLE/renderer/vulkan/vk_mandatory_format_support_table_autogen.cpp": "Vulkan mandatory format support table:src/libANGLE/renderer/vulkan/vk_mandatory_format_support_table_autogen.cpp":
...@@ -372,19 +372,19 @@ ...@@ -372,19 +372,19 @@
"packed enum:src/common/PackedGLEnums_autogen.h": "packed enum:src/common/PackedGLEnums_autogen.h":
"0766f2bb7874b2b6b4aaed4a6d0ef49e", "0766f2bb7874b2b6b4aaed4a6d0ef49e",
"packed enum:src/common/gen_packed_gl_enums.py": "packed enum:src/common/gen_packed_gl_enums.py":
"0cd1a1cb6d5fde8cbac2994db24eb901", "cc463afc5e37b0f73e119fec59a39420",
"packed enum:src/common/packed_egl_enums.json": "packed enum:src/common/packed_egl_enums.json":
"5f591d220ee53b6e54a27d1523a3ab79", "5f591d220ee53b6e54a27d1523a3ab79",
"packed enum:src/common/packed_gl_enums.json": "packed enum:src/common/packed_gl_enums.json":
"cd2c00958dd8cc546b816dedaf4769d3", "cd2c00958dd8cc546b816dedaf4769d3",
"proc table:src/libGLESv2/gen_proc_table.py": "proc table:src/libGLESv2/gen_proc_table.py":
"20ebe54894d613de42b0b15ca34078d9", "3be3e8ed7fad58e8cc6fcf348da7b17d",
"proc table:src/libGLESv2/proc_table_autogen.cpp": "proc table:src/libGLESv2/proc_table_autogen.cpp":
"1e89c264adbe7120edb636013383598b", "1e89c264adbe7120edb636013383598b",
"proc table:src/libGLESv2/proc_table_data.json": "proc table:src/libGLESv2/proc_table_data.json":
"04123621b8fd5e6d18f9f3c95c190693", "04123621b8fd5e6d18f9f3c95c190693",
"uniform type:src/common/gen_uniform_type_table.py": "uniform type:src/common/gen_uniform_type_table.py":
"fa40444d496ac07cd9dc0cd239e4a499", "9dd389f2b5793ba635169d61cef2dde9",
"uniform type:src/common/uniform_type_info_autogen.cpp": "uniform type:src/common/uniform_type_info_autogen.cpp":
"b31d181bc49ad1c3540401a5c874e692" "b31d181bc49ad1c3540401a5c874e692"
} }
\ No newline at end of file
...@@ -39,10 +39,9 @@ def main(): ...@@ -39,10 +39,9 @@ def main():
isolated_file = os.path.join(out_file_path, '%s.isolated' % args.test) isolated_file = os.path.join(out_file_path, '%s.isolated' % args.test)
isolate_args = [ isolate_args = [
'python', isolate_script_path, 'archive', 'python', isolate_script_path, 'archive', '-I', 'https://isolateserver.appspot.com', '-i',
'-I', 'https://isolateserver.appspot.com', isolate_file, '-s', isolated_file
'-i', isolate_file, ]
'-s', isolated_file]
stdout = subprocess.check_output(isolate_args) stdout = subprocess.check_output(isolate_args)
sha = stdout[:40] sha = stdout[:40]
...@@ -50,14 +49,11 @@ def main(): ...@@ -50,14 +49,11 @@ def main():
swarming_script_path = os.path.join('tools', 'swarming_client', 'swarming.py') swarming_script_path = os.path.join('tools', 'swarming_client', 'swarming.py')
swarmings_args = [ swarmings_args = [
'python', swarming_script_path, 'trigger', 'python', swarming_script_path, 'trigger', '-S', 'chromium-swarm.appspot.com', '-I',
'-S', 'chromium-swarm.appspot.com', 'isolateserver.appspot.com', '-d', 'os', args.os_dim, '-d', 'pool', args.pool, '-d', 'gpu',
'-I', 'isolateserver.appspot.com', args.gpu_dim,
'-d', 'os', args.os_dim, '--shards=%d' % args.shards, '-s', sha
'-d', 'pool', args.pool, ]
'-d', 'gpu', args.gpu_dim,
'--shards=%d' % args.shards,
'-s', sha]
if args.extra_args: if args.extra_args:
swarmings_args += ['--'] + args.extra_args swarmings_args += ['--'] + args.extra_args
......
...@@ -39,12 +39,15 @@ if newest_folder is None: ...@@ -39,12 +39,15 @@ if newest_folder is None:
source_folder = newest_folder source_folder = newest_folder
# Is a folder a chrome binary directory? # Is a folder a chrome binary directory?
def is_chrome_bin(str): def is_chrome_bin(str):
chrome_file = os.path.join(chrome_folder, str) chrome_file = os.path.join(chrome_folder, str)
return os.path.isdir(chrome_file) and all([char.isdigit() or char == '.' for char in str]) return os.path.isdir(chrome_file) and all([char.isdigit() or char == '.' for char in str])
sorted_chrome_bins = sorted([folder for folder in os.listdir(chrome_folder) if is_chrome_bin(folder)], reverse=True)
sorted_chrome_bins = sorted(
[folder for folder in os.listdir(chrome_folder) if is_chrome_bin(folder)], reverse=True)
dest_folder = os.path.join(chrome_folder, sorted_chrome_bins[0]) dest_folder = os.path.join(chrome_folder, sorted_chrome_bins[0])
......
...@@ -14,9 +14,11 @@ usage = """\ ...@@ -14,9 +14,11 @@ usage = """\
Usage: commit_id.py check <angle_dir> - check if git is present Usage: commit_id.py check <angle_dir> - check if git is present
commit_id.py gen <angle_dir> <file_to_write> - generate commit.h""" commit_id.py gen <angle_dir> <file_to_write> - generate commit.h"""
def grab_output(command, cwd): def grab_output(command, cwd):
return sp.Popen(command, stdout=sp.PIPE, shell=True, cwd=cwd).communicate()[0].strip() return sp.Popen(command, stdout=sp.PIPE, shell=True, cwd=cwd).communicate()[0].strip()
if len(sys.argv) < 3: if len(sys.argv) < 3:
sys.exit(usage) sys.exit(usage)
......
...@@ -9,6 +9,7 @@ ...@@ -9,6 +9,7 @@
#include "common/mathutil.h" #include "common/mathutil.h"
def convertMantissa(i): def convertMantissa(i):
if i == 0: if i == 0:
return 0 return 0
...@@ -24,6 +25,7 @@ def convertMantissa(i): ...@@ -24,6 +25,7 @@ def convertMantissa(i):
else: else:
return 0x38000000 + ((i - 1024) << 13) return 0x38000000 + ((i - 1024) << 13)
def convertExponent(i): def convertExponent(i):
if i == 0: if i == 0:
return 0 return 0
...@@ -38,12 +40,14 @@ def convertExponent(i): ...@@ -38,12 +40,14 @@ def convertExponent(i):
else: else:
return 0xC7800000 return 0xC7800000
def convertOffset(i): def convertOffset(i):
if i == 0 or i == 32: if i == 0 or i == 32:
return 0 return 0
else: else:
return 1024 return 1024
print """// print """//
// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved. // Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be // Use of this source code is governed by a BSD-style license that can be
......
...@@ -28,6 +28,7 @@ Generators = [ ...@@ -28,6 +28,7 @@ Generators = [
}, },
] ]
def load_enums(path): def load_enums(path):
with open(path) as map_file: with open(path) as map_file:
enums_dict = json.loads(map_file.read(), object_pairs_hook=OrderedDict) enums_dict = json.loads(map_file.read(), object_pairs_hook=OrderedDict)
...@@ -42,18 +43,21 @@ def load_enums(path): ...@@ -42,18 +43,21 @@ def load_enums(path):
values.append(EnumValue(value_name, value_gl_name, i)) values.append(EnumValue(value_name, value_gl_name, i))
i += 1 i += 1
assert(i < 255) # This makes sure enums fit in the uint8_t assert (i < 255) # This makes sure enums fit in the uint8_t
enums.append(Enum(enum_name, values, i)) enums.append(Enum(enum_name, values, i))
enums.sort(key=lambda enum: enum.name) enums.sort(key=lambda enum: enum.name)
return enums return enums
def generate_include_guard(path): def generate_include_guard(path):
return path.replace(".", "_").upper() return path.replace(".", "_").upper()
def header_name_from_cpp_name(path): def header_name_from_cpp_name(path):
return path.replace(".cpp", ".h") return path.replace(".cpp", ".h")
header_template = """// GENERATED FILE - DO NOT EDIT. header_template = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}. // Generated by {script_name} using data from {data_source_name}.
// //
...@@ -99,6 +103,7 @@ template <> ...@@ -99,6 +103,7 @@ template <>
{api_enum_name} To{api_enum_name}({enum_name} from); {api_enum_name} To{api_enum_name}({enum_name} from);
""" """
def write_header(enums, path_prefix, file_name, data_source_name, namespace, api_enum_name): def write_header(enums, path_prefix, file_name, data_source_name, namespace, api_enum_name):
content = [''] content = ['']
...@@ -107,27 +112,27 @@ def write_header(enums, path_prefix, file_name, data_source_name, namespace, api ...@@ -107,27 +112,27 @@ def write_header(enums, path_prefix, file_name, data_source_name, namespace, api
for value in enum.values: for value in enum.values:
value_declarations.append(' ' + value.name + ' = ' + str(value.value) + ',') value_declarations.append(' ' + value.name + ' = ' + str(value.value) + ',')
content.append(enum_declaration_template.format( content.append(
enum_name = enum.name, enum_declaration_template.format(
max_value = str(enum.max_value), enum_name=enum.name,
value_declarations = '\n'.join(value_declarations), max_value=str(enum.max_value),
api_enum_name = api_enum_name value_declarations='\n'.join(value_declarations),
)) api_enum_name=api_enum_name))
header = header_template.format( header = header_template.format(
content = ''.join(content), content=''.join(content),
copyright_year = datetime.date.today().year, copyright_year=datetime.date.today().year,
data_source_name = data_source_name, data_source_name=data_source_name,
script_name = sys.argv[0], script_name=sys.argv[0],
file_name = file_name, file_name=file_name,
include_guard = generate_include_guard(file_name), include_guard=generate_include_guard(file_name),
namespace = namespace, namespace=namespace,
api_enum_name = api_enum_name api_enum_name=api_enum_name)
)
with (open(path_prefix + file_name, 'wt')) as f: with (open(path_prefix + file_name, 'wt')) as f:
f.write(header) f.write(header)
cpp_template = """// GENERATED FILE - DO NOT EDIT. cpp_template = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}. // Generated by {script_name} using data from {data_source_name}.
// //
...@@ -172,6 +177,7 @@ template <> ...@@ -172,6 +177,7 @@ template <>
}} }}
""" """
def write_cpp(enums, path_prefix, file_name, data_source_name, namespace, api_enum_name): def write_cpp(enums, path_prefix, file_name, data_source_name, namespace, api_enum_name):
content = [''] content = ['']
...@@ -180,27 +186,28 @@ def write_cpp(enums, path_prefix, file_name, data_source_name, namespace, api_en ...@@ -180,27 +186,28 @@ def write_cpp(enums, path_prefix, file_name, data_source_name, namespace, api_en
to_glenum_cases = [] to_glenum_cases = []
for value in enum.values: for value in enum.values:
qualified_name = enum.name + '::' + value.name qualified_name = enum.name + '::' + value.name
from_glenum_cases.append(' case ' + value.gl_name + ':\n return ' + qualified_name + ';') from_glenum_cases.append(' case ' + value.gl_name + ':\n return ' +
to_glenum_cases.append(' case ' + qualified_name + ':\n return ' + value.gl_name + ';') qualified_name + ';')
to_glenum_cases.append(' case ' + qualified_name + ':\n return ' +
content.append(enum_implementation_template.format( value.gl_name + ';')
enum_name = enum.name,
from_glenum_cases = '\n'.join(from_glenum_cases), content.append(
max_value = str(enum.max_value), enum_implementation_template.format(
to_glenum_cases = '\n'.join(to_glenum_cases), enum_name=enum.name,
api_enum_name = api_enum_name from_glenum_cases='\n'.join(from_glenum_cases),
)) max_value=str(enum.max_value),
to_glenum_cases='\n'.join(to_glenum_cases),
api_enum_name=api_enum_name))
cpp = cpp_template.format( cpp = cpp_template.format(
content = ''.join(content), content=''.join(content),
copyright_year = datetime.date.today().year, copyright_year=datetime.date.today().year,
data_source_name = data_source_name, data_source_name=data_source_name,
script_name = sys.argv[0], script_name=sys.argv[0],
file_name = file_name, file_name=file_name,
header_name = header_name_from_cpp_name(file_name), header_name=header_name_from_cpp_name(file_name),
namespace = namespace, namespace=namespace,
api_enum_name = api_enum_name api_enum_name=api_enum_name)
)
with (open(path_prefix + file_name, 'wt')) as f: with (open(path_prefix + file_name, 'wt')) as f:
f.write(cpp) f.write(cpp)
...@@ -236,8 +243,10 @@ def main(): ...@@ -236,8 +243,10 @@ def main():
namespace = generator['namespace'] namespace = generator['namespace']
enum_type = generator['enum_type'] enum_type = generator['enum_type']
enums = load_enums(path_prefix + json_file) enums = load_enums(path_prefix + json_file)
write_header(enums, path_prefix, output_file + '_autogen.h', json_file, namespace, enum_type) write_header(enums, path_prefix, output_file + '_autogen.h', json_file, namespace,
write_cpp(enums, path_prefix, output_file + '_autogen.cpp', json_file, namespace, enum_type) enum_type)
write_cpp(enums, path_prefix, output_file + '_autogen.cpp', json_file, namespace,
enum_type)
return 0 return 0
......
...@@ -12,68 +12,23 @@ from datetime import date ...@@ -12,68 +12,23 @@ from datetime import date
import sys import sys
all_uniform_types = [ all_uniform_types = [
"GL_NONE", "GL_NONE", "GL_BOOL", "GL_BOOL_VEC2", "GL_BOOL_VEC3", "GL_BOOL_VEC4", "GL_FLOAT",
"GL_BOOL", "GL_FLOAT_MAT2", "GL_FLOAT_MAT2x3", "GL_FLOAT_MAT2x4", "GL_FLOAT_MAT3", "GL_FLOAT_MAT3x2",
"GL_BOOL_VEC2", "GL_FLOAT_MAT3x4", "GL_FLOAT_MAT4", "GL_FLOAT_MAT4x2", "GL_FLOAT_MAT4x3", "GL_FLOAT_VEC2",
"GL_BOOL_VEC3", "GL_FLOAT_VEC3", "GL_FLOAT_VEC4", "GL_IMAGE_2D", "GL_IMAGE_2D_ARRAY", "GL_IMAGE_3D",
"GL_BOOL_VEC4", "GL_IMAGE_CUBE", "GL_INT", "GL_INT_IMAGE_2D", "GL_INT_IMAGE_2D_ARRAY", "GL_INT_IMAGE_3D",
"GL_FLOAT", "GL_INT_IMAGE_CUBE", "GL_INT_SAMPLER_2D", "GL_INT_SAMPLER_2D_ARRAY",
"GL_FLOAT_MAT2", "GL_INT_SAMPLER_2D_MULTISAMPLE", "GL_INT_SAMPLER_2D_MULTISAMPLE_ARRAY", "GL_INT_SAMPLER_3D",
"GL_FLOAT_MAT2x3", "GL_INT_SAMPLER_CUBE", "GL_INT_VEC2", "GL_INT_VEC3", "GL_INT_VEC4", "GL_SAMPLER_2D",
"GL_FLOAT_MAT2x4", "GL_SAMPLER_2D_ARRAY", "GL_SAMPLER_2D_ARRAY_SHADOW", "GL_SAMPLER_2D_MULTISAMPLE",
"GL_FLOAT_MAT3", "GL_SAMPLER_2D_MULTISAMPLE_ARRAY", "GL_SAMPLER_2D_RECT_ANGLE", "GL_SAMPLER_2D_SHADOW",
"GL_FLOAT_MAT3x2", "GL_SAMPLER_3D", "GL_SAMPLER_CUBE", "GL_SAMPLER_CUBE_SHADOW", "GL_SAMPLER_EXTERNAL_OES",
"GL_FLOAT_MAT3x4", "GL_UNSIGNED_INT", "GL_UNSIGNED_INT_ATOMIC_COUNTER", "GL_UNSIGNED_INT_IMAGE_2D",
"GL_FLOAT_MAT4", "GL_UNSIGNED_INT_IMAGE_2D_ARRAY", "GL_UNSIGNED_INT_IMAGE_3D", "GL_UNSIGNED_INT_IMAGE_CUBE",
"GL_FLOAT_MAT4x2", "GL_UNSIGNED_INT_SAMPLER_2D", "GL_UNSIGNED_INT_SAMPLER_2D_ARRAY",
"GL_FLOAT_MAT4x3", "GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE", "GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE_ARRAY",
"GL_FLOAT_VEC2", "GL_UNSIGNED_INT_SAMPLER_3D", "GL_UNSIGNED_INT_SAMPLER_CUBE", "GL_UNSIGNED_INT_VEC2",
"GL_FLOAT_VEC3", "GL_UNSIGNED_INT_VEC3", "GL_UNSIGNED_INT_VEC4"
"GL_FLOAT_VEC4",
"GL_IMAGE_2D",
"GL_IMAGE_2D_ARRAY",
"GL_IMAGE_3D",
"GL_IMAGE_CUBE",
"GL_INT",
"GL_INT_IMAGE_2D",
"GL_INT_IMAGE_2D_ARRAY",
"GL_INT_IMAGE_3D",
"GL_INT_IMAGE_CUBE",
"GL_INT_SAMPLER_2D",
"GL_INT_SAMPLER_2D_ARRAY",
"GL_INT_SAMPLER_2D_MULTISAMPLE",
"GL_INT_SAMPLER_2D_MULTISAMPLE_ARRAY",
"GL_INT_SAMPLER_3D",
"GL_INT_SAMPLER_CUBE",
"GL_INT_VEC2",
"GL_INT_VEC3",
"GL_INT_VEC4",
"GL_SAMPLER_2D",
"GL_SAMPLER_2D_ARRAY",
"GL_SAMPLER_2D_ARRAY_SHADOW",
"GL_SAMPLER_2D_MULTISAMPLE",
"GL_SAMPLER_2D_MULTISAMPLE_ARRAY",
"GL_SAMPLER_2D_RECT_ANGLE",
"GL_SAMPLER_2D_SHADOW",
"GL_SAMPLER_3D",
"GL_SAMPLER_CUBE",
"GL_SAMPLER_CUBE_SHADOW",
"GL_SAMPLER_EXTERNAL_OES",
"GL_UNSIGNED_INT",
"GL_UNSIGNED_INT_ATOMIC_COUNTER",
"GL_UNSIGNED_INT_IMAGE_2D",
"GL_UNSIGNED_INT_IMAGE_2D_ARRAY",
"GL_UNSIGNED_INT_IMAGE_3D",
"GL_UNSIGNED_INT_IMAGE_CUBE",
"GL_UNSIGNED_INT_SAMPLER_2D",
"GL_UNSIGNED_INT_SAMPLER_2D_ARRAY",
"GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE",
"GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE_ARRAY",
"GL_UNSIGNED_INT_SAMPLER_3D",
"GL_UNSIGNED_INT_SAMPLER_CUBE",
"GL_UNSIGNED_INT_VEC2",
"GL_UNSIGNED_INT_VEC3",
"GL_UNSIGNED_INT_VEC4"
] ]
# Uniform texture types. Be wary of substrings finding the wrong types. # Uniform texture types. Be wary of substrings finding the wrong types.
...@@ -142,9 +97,11 @@ const UniformTypeInfo &GetUniformTypeInfo(GLenum uniformType) ...@@ -142,9 +97,11 @@ const UniformTypeInfo &GetUniformTypeInfo(GLenum uniformType)
type_info_data_template = """{{{type}, {component_type}, {texture_type}, {transposed_type}, {bool_type}, {sampler_format}, {rows}, {columns}, {components}, {component_size}, {internal_size}, {external_size}, {is_sampler}, {is_matrix}, {is_image} }}""" type_info_data_template = """{{{type}, {component_type}, {texture_type}, {transposed_type}, {bool_type}, {sampler_format}, {rows}, {columns}, {components}, {component_size}, {internal_size}, {external_size}, {is_sampler}, {is_matrix}, {is_image} }}"""
type_index_case_template = """case {enum_value}: return {index_value};""" type_index_case_template = """case {enum_value}: return {index_value};"""
def cpp_bool(value): def cpp_bool(value):
return "true" if value else "false" return "true" if value else "false"
def get_component_type(uniform_type): def get_component_type(uniform_type):
if uniform_type.find("GL_BOOL") == 0: if uniform_type.find("GL_BOOL") == 0:
return "GL_BOOL" return "GL_BOOL"
...@@ -159,21 +116,25 @@ def get_component_type(uniform_type): ...@@ -159,21 +116,25 @@ def get_component_type(uniform_type):
else: else:
return "GL_INT" return "GL_INT"
def get_texture_type(uniform_type): def get_texture_type(uniform_type):
for sampler_type, tex_type in texture_types.items(): for sampler_type, tex_type in texture_types.items():
if uniform_type.endswith(sampler_type): if uniform_type.endswith(sampler_type):
return "GL_TEXTURE_" + tex_type return "GL_TEXTURE_" + tex_type
return "GL_NONE" return "GL_NONE"
def get_transposed_type(uniform_type): def get_transposed_type(uniform_type):
if "_MAT" in uniform_type: if "_MAT" in uniform_type:
if "x" in uniform_type: if "x" in uniform_type:
return "GL_FLOAT_MAT" + uniform_type[-1] + "x" + uniform_type[uniform_type.find("_MAT")+4] return "GL_FLOAT_MAT" + uniform_type[-1] + "x" + uniform_type[uniform_type.find("_MAT")
+ 4]
else: else:
return uniform_type return uniform_type
else: else:
return "GL_NONE" return "GL_NONE"
def get_bool_type(uniform_type): def get_bool_type(uniform_type):
if uniform_type == "GL_INT" or uniform_type == "GL_UNSIGNED_INT" or uniform_type == "GL_FLOAT": if uniform_type == "GL_INT" or uniform_type == "GL_UNSIGNED_INT" or uniform_type == "GL_FLOAT":
return "GL_BOOL" return "GL_BOOL"
...@@ -182,6 +143,7 @@ def get_bool_type(uniform_type): ...@@ -182,6 +143,7 @@ def get_bool_type(uniform_type):
else: else:
return "GL_NONE" return "GL_NONE"
def get_sampler_format(uniform_type): def get_sampler_format(uniform_type):
if not "_SAMPLER_" in uniform_type: if not "_SAMPLER_" in uniform_type:
return "SamplerFormat::InvalidEnum" return "SamplerFormat::InvalidEnum"
...@@ -194,6 +156,7 @@ def get_sampler_format(uniform_type): ...@@ -194,6 +156,7 @@ def get_sampler_format(uniform_type):
else: else:
return "SamplerFormat::Float" return "SamplerFormat::Float"
def get_rows(uniform_type): def get_rows(uniform_type):
if uniform_type == "GL_NONE": if uniform_type == "GL_NONE":
return "0" return "0"
...@@ -202,6 +165,7 @@ def get_rows(uniform_type): ...@@ -202,6 +165,7 @@ def get_rows(uniform_type):
else: else:
return "1" return "1"
def get_columns(uniform_type): def get_columns(uniform_type):
if uniform_type == "GL_NONE": if uniform_type == "GL_NONE":
return "0" return "0"
...@@ -212,9 +176,11 @@ def get_columns(uniform_type): ...@@ -212,9 +176,11 @@ def get_columns(uniform_type):
else: else:
return "1" return "1"
def get_components(uniform_type): def get_components(uniform_type):
return str(int(get_rows(uniform_type)) * int(get_columns(uniform_type))) return str(int(get_rows(uniform_type)) * int(get_columns(uniform_type)))
def get_component_size(uniform_type): def get_component_size(uniform_type):
component_type = get_component_type(uniform_type) component_type = get_component_type(uniform_type)
if (component_type) == "GL_BOOL": if (component_type) == "GL_BOOL":
...@@ -230,38 +196,45 @@ def get_component_size(uniform_type): ...@@ -230,38 +196,45 @@ def get_component_size(uniform_type):
else: else:
raise "Invalid component type: " + component_type raise "Invalid component type: " + component_type
def get_internal_size(uniform_type): def get_internal_size(uniform_type):
return get_component_size(uniform_type) + " * " + str(int(get_rows(uniform_type)) * 4) return get_component_size(uniform_type) + " * " + str(int(get_rows(uniform_type)) * 4)
def get_external_size(uniform_type): def get_external_size(uniform_type):
return get_component_size(uniform_type) + " * " + get_components(uniform_type) return get_component_size(uniform_type) + " * " + get_components(uniform_type)
def get_is_sampler(uniform_type): def get_is_sampler(uniform_type):
return cpp_bool("_SAMPLER_" in uniform_type) return cpp_bool("_SAMPLER_" in uniform_type)
def get_is_matrix(uniform_type): def get_is_matrix(uniform_type):
return cpp_bool("_MAT" in uniform_type) return cpp_bool("_MAT" in uniform_type)
def get_is_image(uniform_type): def get_is_image(uniform_type):
return cpp_bool("_IMAGE_" in uniform_type) return cpp_bool("_IMAGE_" in uniform_type)
def gen_type_info(uniform_type): def gen_type_info(uniform_type):
return type_info_data_template.format( return type_info_data_template.format(
type = uniform_type, type=uniform_type,
component_type = get_component_type(uniform_type), component_type=get_component_type(uniform_type),
texture_type = get_texture_type(uniform_type), texture_type=get_texture_type(uniform_type),
transposed_type = get_transposed_type(uniform_type), transposed_type=get_transposed_type(uniform_type),
bool_type = get_bool_type(uniform_type), bool_type=get_bool_type(uniform_type),
sampler_format = get_sampler_format(uniform_type), sampler_format=get_sampler_format(uniform_type),
rows = get_rows(uniform_type), rows=get_rows(uniform_type),
columns = get_columns(uniform_type), columns=get_columns(uniform_type),
components = get_components(uniform_type), components=get_components(uniform_type),
component_size = get_component_size(uniform_type), component_size=get_component_size(uniform_type),
internal_size = get_internal_size(uniform_type), internal_size=get_internal_size(uniform_type),
external_size = get_external_size(uniform_type), external_size=get_external_size(uniform_type),
is_sampler = get_is_sampler(uniform_type), is_sampler=get_is_sampler(uniform_type),
is_matrix = get_is_matrix(uniform_type), is_matrix=get_is_matrix(uniform_type),
is_image = get_is_image(uniform_type)) is_image=get_is_image(uniform_type))
def gen_type_index_case(index, uniform_type): def gen_type_index_case(index, uniform_type):
return "case " + uniform_type + ": return " + str(index) + ";" return "case " + uniform_type + ": return " + str(index) + ";"
...@@ -283,16 +256,20 @@ def main(): ...@@ -283,16 +256,20 @@ def main():
return 1 return 1
return 0 return 0
uniform_type_info_data = ",\n".join([gen_type_info(uniform_type) for uniform_type in all_uniform_types]) uniform_type_info_data = ",\n".join(
uniform_type_index_cases = "\n".join([gen_type_index_case(index, uniform_type) for index, uniform_type in enumerate(all_uniform_types)]) [gen_type_info(uniform_type) for uniform_type in all_uniform_types])
uniform_type_index_cases = "\n".join([
gen_type_index_case(index, uniform_type)
for index, uniform_type in enumerate(all_uniform_types)
])
with open('uniform_type_info_autogen.cpp', 'wt') as out_file: with open('uniform_type_info_autogen.cpp', 'wt') as out_file:
output_cpp = template_cpp.format( output_cpp = template_cpp.format(
script_name = sys.argv[0], script_name=sys.argv[0],
copyright_year = date.today().year, copyright_year=date.today().year,
total_count = len(all_uniform_types), total_count=len(all_uniform_types),
uniform_type_info_data = uniform_type_info_data, uniform_type_info_data=uniform_type_info_data,
uniform_type_index_cases = uniform_type_index_cases) uniform_type_index_cases=uniform_type_index_cases)
out_file.write(output_cpp) out_file.write(output_cpp)
out_file.close() out_file.close()
return 0 return 0
......
0af87b7f37d8a5260c859e9169a91f6a defc05f112e255400323d95b3610cfeb
\ No newline at end of file \ No newline at end of file
...@@ -255,65 +255,34 @@ namespace BuiltInGroup ...@@ -255,65 +255,34 @@ namespace BuiltInGroup
parsed_variables = None parsed_variables = None
basic_types_enumeration = [ basic_types_enumeration = [
'Void', 'Void', 'Float', 'Int', 'UInt', 'Bool', 'AtomicCounter', 'YuvCscStandardEXT', 'Sampler2D',
'Float', 'Sampler3D', 'SamplerCube', 'Sampler2DArray', 'SamplerExternalOES', 'SamplerExternal2DY2YEXT',
'Int', 'Sampler2DRect', 'Sampler2DMS', 'Sampler2DMSArray', 'ISampler2D', 'ISampler3D', 'ISamplerCube',
'UInt', 'ISampler2DArray', 'ISampler2DMS', 'ISampler2DMSArray', 'USampler2D', 'USampler3D',
'Bool', 'USamplerCube', 'USampler2DArray', 'USampler2DMS', 'USampler2DMSArray', 'Sampler2DShadow',
'AtomicCounter', 'SamplerCubeShadow', 'Sampler2DArrayShadow', 'Image2D', 'IImage2D', 'UImage2D', 'Image3D',
'YuvCscStandardEXT', 'IImage3D', 'UImage3D', 'Image2DArray', 'IImage2DArray', 'UImage2DArray', 'ImageCube',
'Sampler2D', 'IImageCube', 'UImageCube'
'Sampler3D',
'SamplerCube',
'Sampler2DArray',
'SamplerExternalOES',
'SamplerExternal2DY2YEXT',
'Sampler2DRect',
'Sampler2DMS',
'Sampler2DMSArray',
'ISampler2D',
'ISampler3D',
'ISamplerCube',
'ISampler2DArray',
'ISampler2DMS',
'ISampler2DMSArray',
'USampler2D',
'USampler3D',
'USamplerCube',
'USampler2DArray',
'USampler2DMS',
'USampler2DMSArray',
'Sampler2DShadow',
'SamplerCubeShadow',
'Sampler2DArrayShadow',
'Image2D',
'IImage2D',
'UImage2D',
'Image3D',
'IImage3D',
'UImage3D',
'Image2DArray',
'IImage2DArray',
'UImage2DArray',
'ImageCube',
'IImageCube',
'UImageCube'
] ]
id_counter = 0 id_counter = 0
def set_working_dir(): def set_working_dir():
script_dir = os.path.dirname(os.path.abspath(__file__)) script_dir = os.path.dirname(os.path.abspath(__file__))
os.chdir(script_dir) os.chdir(script_dir)
def get_basic_mangled_name(basic): def get_basic_mangled_name(basic):
index = basic_types_enumeration.index(basic) index = basic_types_enumeration.index(basic)
if index < 26: if index < 26:
return chr(ord('A') + index) return chr(ord('A') + index)
return chr(ord('a') + index - 26) return chr(ord('a') + index - 26)
levels = ['ESSL3_1_BUILTINS', 'ESSL3_BUILTINS', 'ESSL1_BUILTINS', 'COMMON_BUILTINS'] levels = ['ESSL3_1_BUILTINS', 'ESSL3_BUILTINS', 'ESSL1_BUILTINS', 'COMMON_BUILTINS']
def get_shader_version_condition_for_level(level): def get_shader_version_condition_for_level(level):
if level == 'ESSL3_1_BUILTINS': if level == 'ESSL3_1_BUILTINS':
return 'shaderVersion >= 310' return 'shaderVersion >= 310'
...@@ -326,8 +295,10 @@ def get_shader_version_condition_for_level(level): ...@@ -326,8 +295,10 @@ def get_shader_version_condition_for_level(level):
else: else:
raise Exception('Unsupported symbol table level') raise Exception('Unsupported symbol table level')
class GroupedList: class GroupedList:
""""Class for storing a list of objects grouped by symbol table level and condition.""" """"Class for storing a list of objects grouped by symbol table level and condition."""
def __init__(self): def __init__(self):
self.objs = OrderedDict() self.objs = OrderedDict()
self.max_name_length = 0 self.max_name_length = 0
...@@ -366,12 +337,12 @@ class GroupedList: ...@@ -366,12 +337,12 @@ class GroupedList:
continue continue
level_condition = get_shader_version_condition_for_level(level) level_condition = get_shader_version_condition_for_level(level)
if level_condition != '': if level_condition != '':
code.append('if ({condition})\n {{'.format(condition = level_condition)) code.append('if ({condition})\n {{'.format(condition=level_condition))
for condition, objs in self.objs[level].iteritems(): for condition, objs in self.objs[level].iteritems():
if len(objs) > 0: if len(objs) > 0:
if condition != 'NO_CONDITION': if condition != 'NO_CONDITION':
condition_header = ' if ({condition})\n {{'.format(condition = condition) condition_header = ' if ({condition})\n {{'.format(condition=condition)
code.append(condition_header.replace('shaderType', 'mShaderType')) code.append(condition_header.replace('shaderType', 'mShaderType'))
switch = {} switch = {}
...@@ -396,7 +367,9 @@ class GroupedList: ...@@ -396,7 +367,9 @@ class GroupedList:
code.append('return nullptr;') code.append('return nullptr;')
return '\n'.join(code) return '\n'.join(code)
class TType: class TType:
def __init__(self, glsl_header_type): def __init__(self, glsl_header_type):
if isinstance(glsl_header_type, basestring): if isinstance(glsl_header_type, basestring):
self.data = self.parse_type(glsl_header_type) self.data = self.parse_type(glsl_header_type)
...@@ -409,7 +382,8 @@ class TType: ...@@ -409,7 +382,8 @@ class TType:
# are overridden when the specific types are generated. # are overridden when the specific types are generated.
if 'primarySize' not in self.data: if 'primarySize' not in self.data:
if ('secondarySize' in self.data): if ('secondarySize' in self.data):
raise Exception('Unexpected secondarySize on type that does not have primarySize set') raise Exception(
'Unexpected secondarySize on type that does not have primarySize set')
self.data['primarySize'] = 1 self.data['primarySize'] = 1
if 'secondarySize' not in self.data: if 'secondarySize' not in self.data:
self.data['secondarySize'] = 1 self.data['secondarySize'] = 1
...@@ -553,6 +527,7 @@ class TType: ...@@ -553,6 +527,7 @@ class TType:
raise Exception('Unrecognized type: ' + str(glsl_header_type)) raise Exception('Unrecognized type: ' + str(glsl_header_type))
def get_parsed_functions(functions_txt_filename): def get_parsed_functions(functions_txt_filename):
def parse_function_parameters(parameters): def parse_function_parameters(parameters):
...@@ -567,7 +542,9 @@ def get_parsed_functions(functions_txt_filename): ...@@ -567,7 +542,9 @@ def get_parsed_functions(functions_txt_filename):
lines = [] lines = []
with open(functions_txt_filename) as f: with open(functions_txt_filename) as f:
lines = f.readlines() lines = f.readlines()
lines = [line.strip() for line in lines if line.strip() != '' and not line.strip().startswith('//')] lines = [
line.strip() for line in lines if line.strip() != '' and not line.strip().startswith('//')
]
fun_re = re.compile(r'^(\w+) (\w+)\((.*)\);$') fun_re = re.compile(r'^(\w+) (\w+)\((.*)\);$')
...@@ -580,11 +557,7 @@ def get_parsed_functions(functions_txt_filename): ...@@ -580,11 +557,7 @@ def get_parsed_functions(functions_txt_filename):
if line.startswith('GROUP BEGIN '): if line.startswith('GROUP BEGIN '):
group_rest = line[12:].strip() group_rest = line[12:].strip()
group_parts = group_rest.split(' ', 1) group_parts = group_rest.split(' ', 1)
current_group = { current_group = {'functions': [], 'name': group_parts[0], 'subgroups': {}}
'functions': [],
'name': group_parts[0],
'subgroups': {}
}
if len(group_parts) > 1: if len(group_parts) > 1:
group_metadata = json.loads(group_parts[1]) group_metadata = json.loads(group_parts[1])
current_group.update(group_metadata) current_group.update(group_metadata)
...@@ -593,7 +566,8 @@ def get_parsed_functions(functions_txt_filename): ...@@ -593,7 +566,8 @@ def get_parsed_functions(functions_txt_filename):
group_end_name = line[10:].strip() group_end_name = line[10:].strip()
current_group = group_stack[-1] current_group = group_stack[-1]
if current_group['name'] != group_end_name: if current_group['name'] != group_end_name:
raise Exception('GROUP END: Unexpected function group name "' + group_end_name + '" was expecting "' + current_group['name'] + '"') raise Exception('GROUP END: Unexpected function group name "' + group_end_name +
'" was expecting "' + current_group['name'] + '"')
group_stack.pop() group_stack.pop()
is_top_level_group = (len(group_stack) == 0) is_top_level_group = (len(group_stack) == 0)
if is_top_level_group: if is_top_level_group:
...@@ -621,7 +595,10 @@ def get_parsed_functions(functions_txt_filename): ...@@ -621,7 +595,10 @@ def get_parsed_functions(functions_txt_filename):
return parsed_functions return parsed_functions
fnvPrime = 16777619 fnvPrime = 16777619
def hash32(str): def hash32(str):
fnvOffsetBasis = 0x811c9dc5 fnvOffsetBasis = 0x811c9dc5
hash = fnvOffsetBasis hash = fnvOffsetBasis
...@@ -630,7 +607,8 @@ def hash32(str): ...@@ -630,7 +607,8 @@ def hash32(str):
hash = (hash * fnvPrime) & 0xffffffff hash = (hash * fnvPrime) & 0xffffffff
return hash return hash
def mangledNameHash(str, script_generated_hash_tests, save_test = True):
def mangledNameHash(str, script_generated_hash_tests, save_test=True):
hash = hash32(str) hash = hash32(str)
index = 0 index = 0
max_six_bit_value = (1 << 6) - 1 max_six_bit_value = (1 << 6) - 1
...@@ -642,22 +620,27 @@ def mangledNameHash(str, script_generated_hash_tests, save_test = True): ...@@ -642,22 +620,27 @@ def mangledNameHash(str, script_generated_hash_tests, save_test = True):
elif c == '{' or c == '[': elif c == '{' or c == '[':
has_array_or_block_param_bit = 1 has_array_or_block_param_bit = 1
index += 1 index += 1
hash = ((hash >> 13) ^ (hash & 0x1fff)) | (index << 19) | (paren_location << 25) | (has_array_or_block_param_bit << 31) hash = ((hash >> 13) ^ (hash & 0x1fff)) | (index << 19) | (paren_location << 25) | (
has_array_or_block_param_bit << 31)
if save_test: if save_test:
sanity_check = ' ASSERT_EQ(0x{hash}u, ImmutableString("{str}").mangledNameHash());'.format(hash = ('%08x' % hash), str = str) sanity_check = ' ASSERT_EQ(0x{hash}u, ImmutableString("{str}").mangledNameHash());'.format(
hash=('%08x' % hash), str=str)
script_generated_hash_tests.update({sanity_check: None}) script_generated_hash_tests.update({sanity_check: None})
return hash return hash
def get_suffix(props): def get_suffix(props):
if 'suffix' in props: if 'suffix' in props:
return props['suffix'] return props['suffix']
return '' return ''
def get_extension(props): def get_extension(props):
if 'extension' in props: if 'extension' in props:
return props['extension'] return props['extension']
return 'UNDEFINED' return 'UNDEFINED'
def get_op(name, function_props): def get_op(name, function_props):
if 'op' not in function_props: if 'op' not in function_props:
raise Exception('function op not defined') raise Exception('function op not defined')
...@@ -665,34 +648,40 @@ def get_op(name, function_props): ...@@ -665,34 +648,40 @@ def get_op(name, function_props):
return name[0].upper() + name[1:] return name[0].upper() + name[1:]
return function_props['op'] return function_props['op']
def get_known_to_not_have_side_effects(function_props): def get_known_to_not_have_side_effects(function_props):
if 'op' in function_props and function_props['op'] != 'CallBuiltInFunction': if 'op' in function_props and function_props['op'] != 'CallBuiltInFunction':
if 'hasSideEffects' in function_props: if 'hasSideEffects' in function_props:
return 'false' return 'false'
else: else:
for param in get_parameters(function_props): for param in get_parameters(function_props):
if 'qualifier' in param.data and (param.data['qualifier'] == 'Out' or param.data['qualifier'] == 'InOut'): if 'qualifier' in param.data and (param.data['qualifier'] == 'Out' or
param.data['qualifier'] == 'InOut'):
return 'false' return 'false'
return 'true' return 'true'
return 'false' return 'false'
def get_parameters(function_props): def get_parameters(function_props):
if 'parameters' in function_props: if 'parameters' in function_props:
return function_props['parameters'] return function_props['parameters']
return [] return []
def get_function_mangled_name(function_name, parameters): def get_function_mangled_name(function_name, parameters):
mangled_name = function_name + '(' mangled_name = function_name + '('
for param in parameters: for param in parameters:
mangled_name += param.get_mangled_name() mangled_name += param.get_mangled_name()
return mangled_name return mangled_name
def get_function_human_readable_name(function_name, parameters): def get_function_human_readable_name(function_name, parameters):
name = function_name name = function_name
for param in parameters: for param in parameters:
name += '_' + param.get_human_readable_name() name += '_' + param.get_human_readable_name()
return name return name
def gen_parameters_variant_ids(str_len, ttype_mangled_name_variants): def gen_parameters_variant_ids(str_len, ttype_mangled_name_variants):
# Note that this doesn't generate variants with array parameters or struct / interface block parameters. They are assumed to have been filtered out separately. # Note that this doesn't generate variants with array parameters or struct / interface block parameters. They are assumed to have been filtered out separately.
if str_len % 2 != 0: if str_len % 2 != 0:
...@@ -700,7 +689,9 @@ def gen_parameters_variant_ids(str_len, ttype_mangled_name_variants): ...@@ -700,7 +689,9 @@ def gen_parameters_variant_ids(str_len, ttype_mangled_name_variants):
num_variants = pow(len(ttype_mangled_name_variants), str_len / 2) num_variants = pow(len(ttype_mangled_name_variants), str_len / 2)
return xrange(num_variants) return xrange(num_variants)
def get_parameters_mangled_name_variant(variant_id, paren_location, total_length, ttype_mangled_name_variants):
def get_parameters_mangled_name_variant(variant_id, paren_location, total_length,
ttype_mangled_name_variants):
str_len = total_length - paren_location - 1 str_len = total_length - paren_location - 1
if str_len % 2 != 0: if str_len % 2 != 0:
raise Exception('Expecting parameters mangled name length to be divisible by two') raise Exception('Expecting parameters mangled name length to be divisible by two')
...@@ -714,6 +705,7 @@ def get_parameters_mangled_name_variant(variant_id, paren_location, total_length ...@@ -714,6 +705,7 @@ def get_parameters_mangled_name_variant(variant_id, paren_location, total_length
variant += ttype_mangled_name_variants[parameter_variant_index] variant += ttype_mangled_name_variants[parameter_variant_index]
return variant return variant
# Calculate the mangled name hash of a common prefix string that's been pre-hashed with hash32() # Calculate the mangled name hash of a common prefix string that's been pre-hashed with hash32()
# plus a variant of the parameters. This is faster than constructing the whole string and then # plus a variant of the parameters. This is faster than constructing the whole string and then
# calculating the hash for that. # calculating the hash for that.
...@@ -732,8 +724,10 @@ def get_mangled_name_variant_hash(prefix_hash32, variant_id, paren_location, tot ...@@ -732,8 +724,10 @@ def get_mangled_name_variant_hash(prefix_hash32, variant_id, paren_location, tot
parameter_variant_id_base = parameter_variant_id_base / num_type_variants parameter_variant_id_base = parameter_variant_id_base / num_type_variants
return ((hash >> 13) ^ (hash & 0x1fff)) | (total_length << 19) | (paren_location << 25) return ((hash >> 13) ^ (hash & 0x1fff)) | (total_length << 19) | (paren_location << 25)
def mangled_name_hash_can_collide_with_different_parameters(function_variant_props, num_type_variants,
ttype_mangled_name_variants, script_generated_hash_tests): def mangled_name_hash_can_collide_with_different_parameters(
function_variant_props, num_type_variants, ttype_mangled_name_variants,
script_generated_hash_tests):
# We exhaustively search through all possible lists of parameters and see if any other mangled # We exhaustively search through all possible lists of parameters and see if any other mangled
# name has the same hash. # name has the same hash.
mangled_name = function_variant_props['mangled_name'] mangled_name = function_variant_props['mangled_name']
...@@ -747,21 +741,25 @@ def mangled_name_hash_can_collide_with_different_parameters(function_variant_pro ...@@ -747,21 +741,25 @@ def mangled_name_hash_can_collide_with_different_parameters(function_variant_pro
if (parameters_mangled_name_len > 6): if (parameters_mangled_name_len > 6):
# This increases the complexity of searching for hash collisions considerably, so rather than doing it we just conservatively assume that a hash collision may be possible. # This increases the complexity of searching for hash collisions considerably, so rather than doing it we just conservatively assume that a hash collision may be possible.
return True return True
for variant_id in gen_parameters_variant_ids(parameters_mangled_name_len, ttype_mangled_name_variants): for variant_id in gen_parameters_variant_ids(parameters_mangled_name_len,
variant_hash = get_mangled_name_variant_hash(prefix_hash32, variant_id, paren_location, mangled_name_len, ttype_mangled_name_variants):
num_type_variants, ttype_mangled_name_variants) variant_hash = get_mangled_name_variant_hash(prefix_hash32, variant_id, paren_location,
manged_name_variant = get_parameters_mangled_name_variant(variant_id, paren_location, mangled_name_len, mangled_name_len, num_type_variants,
ttype_mangled_name_variants) ttype_mangled_name_variants)
manged_name_variant = get_parameters_mangled_name_variant(
variant_id, paren_location, mangled_name_len, ttype_mangled_name_variants)
if variant_hash == hash and manged_name_variant != parameters_mangled_name: if variant_hash == hash and manged_name_variant != parameters_mangled_name:
return True return True
return False return False
def get_unique_identifier_name(function_name, parameters): def get_unique_identifier_name(function_name, parameters):
unique_name = function_name + '_' unique_name = function_name + '_'
for param in parameters: for param in parameters:
unique_name += param.get_mangled_name() unique_name += param.get_mangled_name()
return unique_name return unique_name
def get_variable_name_to_store_parameter(param): def get_variable_name_to_store_parameter(param):
unique_name = 'pt' unique_name = 'pt'
if 'qualifier' in param.data: if 'qualifier' in param.data:
...@@ -772,6 +770,7 @@ def get_variable_name_to_store_parameter(param): ...@@ -772,6 +770,7 @@ def get_variable_name_to_store_parameter(param):
unique_name += param.get_mangled_name() unique_name += param.get_mangled_name()
return unique_name return unique_name
def get_variable_name_to_store_parameters(parameters): def get_variable_name_to_store_parameters(parameters):
if len(parameters) == 0: if len(parameters) == 0:
return 'empty' return 'empty'
...@@ -785,10 +784,12 @@ def get_variable_name_to_store_parameters(parameters): ...@@ -785,10 +784,12 @@ def get_variable_name_to_store_parameters(parameters):
unique_name += param.get_mangled_name() unique_name += param.get_mangled_name()
return unique_name return unique_name
def define_constexpr_variable(template_args, variable_declarations): def define_constexpr_variable(template_args, variable_declarations):
template_variable_declaration = 'constexpr const TVariable kVar_{name_with_suffix}(BuiltInId::{name_with_suffix}, BuiltInName::{name}, SymbolType::BuiltIn, TExtension::{extension}, {type});' template_variable_declaration = 'constexpr const TVariable kVar_{name_with_suffix}(BuiltInId::{name_with_suffix}, BuiltInName::{name}, SymbolType::BuiltIn, TExtension::{extension}, {type});'
variable_declarations.append(template_variable_declaration.format(**template_args)) variable_declarations.append(template_variable_declaration.format(**template_args))
def gen_function_variants(function_name, function_props): def gen_function_variants(function_name, function_props):
function_variants = [] function_variants = []
parameters = get_parameters(function_props) parameters = get_parameters(function_props)
...@@ -797,10 +798,12 @@ def gen_function_variants(function_name, function_props): ...@@ -797,10 +798,12 @@ def gen_function_variants(function_name, function_props):
for param in parameters: for param in parameters:
if 'genType' in param.data: if 'genType' in param.data:
if param.data['genType'] not in ['sampler_or_image', 'vec', 'yes']: if param.data['genType'] not in ['sampler_or_image', 'vec', 'yes']:
raise Exception('Unexpected value of genType "' + str(param.data['genType']) + '" should be "sampler_or_image", "vec", or "yes"') raise Exception('Unexpected value of genType "' + str(param.data['genType']) +
'" should be "sampler_or_image", "vec", or "yes"')
gen_type.add(param.data['genType']) gen_type.add(param.data['genType'])
if len(gen_type) > 1: if len(gen_type) > 1:
raise Exception('Unexpected multiple values of genType set on the same function: ' + str(list(gen_type))) raise Exception('Unexpected multiple values of genType set on the same function: '
+ str(list(gen_type)))
if len(gen_type) == 0: if len(gen_type) == 0:
function_variants.append(function_props) function_variants.append(function_props)
return function_variants return function_variants
...@@ -815,7 +818,8 @@ def gen_function_variants(function_name, function_props): ...@@ -815,7 +818,8 @@ def gen_function_variants(function_name, function_props):
for param in parameters: for param in parameters:
variant_parameters.append(param.specific_sampler_or_image_type(type)) variant_parameters.append(param.specific_sampler_or_image_type(type))
variant_props['parameters'] = variant_parameters variant_props['parameters'] = variant_parameters
variant_props['returnType'] = function_props['returnType'].specific_sampler_or_image_type(type) variant_props['returnType'] = function_props[
'returnType'].specific_sampler_or_image_type(type)
function_variants.append(variant_props) function_variants.append(variant_props)
return function_variants return function_variants
...@@ -833,10 +837,13 @@ def gen_function_variants(function_name, function_props): ...@@ -833,10 +837,13 @@ def gen_function_variants(function_name, function_props):
function_variants.append(variant_props) function_variants.append(variant_props)
return function_variants return function_variants
def process_single_function_group(condition, group_name, group, num_type_variants, parameter_declarations, ttype_mangled_name_variants,
name_declarations, unmangled_function_if_statements, unmangled_builtin_declarations, defined_function_variants, def process_single_function_group(
builtin_id_declarations, builtin_id_definitions, defined_parameter_names, variable_declarations, function_declarations, condition, group_name, group, num_type_variants, parameter_declarations,
script_generated_hash_tests, get_builtin_if_statements): ttype_mangled_name_variants, name_declarations, unmangled_function_if_statements,
unmangled_builtin_declarations, defined_function_variants, builtin_id_declarations,
builtin_id_definitions, defined_parameter_names, variable_declarations,
function_declarations, script_generated_hash_tests, get_builtin_if_statements):
global id_counter global id_counter
if 'functions' not in group: if 'functions' not in group:
...@@ -867,21 +874,30 @@ def process_single_function_group(condition, group_name, group, num_type_variant ...@@ -867,21 +874,30 @@ def process_single_function_group(condition, group_name, group, num_type_variant
return &UnmangledBuiltIns::{extension}; return &UnmangledBuiltIns::{extension};
}}""" }}"""
unmangled_if = template_unmangled_if.format(**template_args) unmangled_if = template_unmangled_if.format(**template_args)
unmangled_builtin_no_condition = unmangled_function_if_statements.get(level, 'NO_CONDITION', function_name) unmangled_builtin_no_condition = unmangled_function_if_statements.get(
if unmangled_builtin_no_condition != None and unmangled_builtin_no_condition['extension'] == 'UNDEFINED': level, 'NO_CONDITION', function_name)
if unmangled_builtin_no_condition != None and unmangled_builtin_no_condition[
'extension'] == 'UNDEFINED':
# We already have this unmangled name without a condition nor extension on the same level. No need to add a duplicate with a condition. # We already have this unmangled name without a condition nor extension on the same level. No need to add a duplicate with a condition.
pass pass
elif (not unmangled_function_if_statements.has_key(level, condition, function_name)) or extension == 'UNDEFINED': elif (not unmangled_function_if_statements.has_key(
level, condition, function_name)) or extension == 'UNDEFINED':
# We don't have this unmangled builtin recorded yet or we might replace an unmangled builtin from an extension with one from core. # We don't have this unmangled builtin recorded yet or we might replace an unmangled builtin from an extension with one from core.
unmangled_function_if_statements.add_obj(level, condition, function_name, {'hash_matched_code': unmangled_if, 'extension': extension}) unmangled_function_if_statements.add_obj(level, condition, function_name, {
unmangled_builtin_declarations.add('constexpr const UnmangledBuiltIn {extension}(TExtension::{extension});'.format(**template_args)) 'hash_matched_code': unmangled_if,
'extension': extension
})
unmangled_builtin_declarations.add(
'constexpr const UnmangledBuiltIn {extension}(TExtension::{extension});'.format(
**template_args))
for function_props in function_variants: for function_props in function_variants:
template_args['id'] = id_counter template_args['id'] = id_counter
parameters = get_parameters(function_props) parameters = get_parameters(function_props)
template_args['unique_name'] = get_unique_identifier_name(template_args['name_with_suffix'], parameters) template_args['unique_name'] = get_unique_identifier_name(
template_args['name_with_suffix'], parameters)
if template_args['unique_name'] in defined_function_variants: if template_args['unique_name'] in defined_function_variants:
continue continue
...@@ -890,7 +906,8 @@ def process_single_function_group(condition, group_name, group, num_type_variant ...@@ -890,7 +906,8 @@ def process_single_function_group(condition, group_name, group, num_type_variant
template_args['param_count'] = len(parameters) template_args['param_count'] = len(parameters)
template_args['return_type'] = function_props['returnType'].get_statictype_string() template_args['return_type'] = function_props['returnType'].get_statictype_string()
template_args['mangled_name'] = get_function_mangled_name(function_name, parameters) template_args['mangled_name'] = get_function_mangled_name(function_name, parameters)
template_args['human_readable_name'] = get_function_human_readable_name(template_args['name_with_suffix'], parameters) template_args['human_readable_name'] = get_function_human_readable_name(
template_args['name_with_suffix'], parameters)
template_args['mangled_name_length'] = len(template_args['mangled_name']) template_args['mangled_name_length'] = len(template_args['mangled_name'])
template_builtin_id_declaration = ' static constexpr const TSymbolUniqueId {human_readable_name} = TSymbolUniqueId({id});' template_builtin_id_declaration = ' static constexpr const TSymbolUniqueId {human_readable_name} = TSymbolUniqueId({id});'
...@@ -911,19 +928,26 @@ def process_single_function_group(condition, group_name, group, num_type_variant ...@@ -911,19 +928,26 @@ def process_single_function_group(condition, group_name, group, num_type_variant
id_counter += 1 id_counter += 1
param_template_args['id'] = id_counter param_template_args['id'] = id_counter
template_builtin_id_declaration = ' static constexpr const TSymbolUniqueId {name_with_suffix} = TSymbolUniqueId({id});' template_builtin_id_declaration = ' static constexpr const TSymbolUniqueId {name_with_suffix} = TSymbolUniqueId({id});'
builtin_id_declarations.append(template_builtin_id_declaration.format(**param_template_args)) builtin_id_declarations.append(
template_builtin_id_declaration.format(**param_template_args))
define_constexpr_variable(param_template_args, variable_declarations) define_constexpr_variable(param_template_args, variable_declarations)
defined_parameter_names.add(unique_param_name) defined_parameter_names.add(unique_param_name)
parameters_list.append('&BuiltInVariable::kVar_{name_with_suffix}'.format(**param_template_args)); parameters_list.append('&BuiltInVariable::kVar_{name_with_suffix}'.format(
**param_template_args))
template_args['parameters_var_name'] = get_variable_name_to_store_parameters(parameters) template_args['parameters_var_name'] = get_variable_name_to_store_parameters(
parameters)
if len(parameters) > 0: if len(parameters) > 0:
template_args['parameters_list'] = ', '.join(parameters_list) template_args['parameters_list'] = ', '.join(parameters_list)
template_parameter_list_declaration = 'constexpr const TVariable *{parameters_var_name}[{param_count}] = {{ {parameters_list} }};' template_parameter_list_declaration = 'constexpr const TVariable *{parameters_var_name}[{param_count}] = {{ {parameters_list} }};'
parameter_declarations[template_args['parameters_var_name']] = template_parameter_list_declaration.format(**template_args) parameter_declarations[template_args[
'parameters_var_name']] = template_parameter_list_declaration.format(
**template_args)
else: else:
template_parameter_list_declaration = 'constexpr const TVariable **{parameters_var_name} = nullptr;' template_parameter_list_declaration = 'constexpr const TVariable **{parameters_var_name} = nullptr;'
parameter_declarations[template_args['parameters_var_name']] = template_parameter_list_declaration.format(**template_args) parameter_declarations[template_args[
'parameters_var_name']] = template_parameter_list_declaration.format(
**template_args)
template_function_declaration = 'constexpr const TFunction kFunction_{unique_name}(BuiltInId::{human_readable_name}, BuiltInName::{name_with_suffix}, TExtension::{extension}, BuiltInParameters::{parameters_var_name}, {param_count}, {return_type}, EOp{op}, {known_to_not_have_side_effects});' template_function_declaration = 'constexpr const TFunction kFunction_{unique_name}(BuiltInId::{human_readable_name}, BuiltInName::{name_with_suffix}, TExtension::{extension}, BuiltInParameters::{parameters_var_name}, {param_count}, {return_type}, EOp{op}, {known_to_not_have_side_effects});'
function_declarations.append(template_function_declaration.format(**template_args)) function_declarations.append(template_function_declaration.format(**template_args))
...@@ -932,8 +956,9 @@ def process_single_function_group(condition, group_name, group, num_type_variant ...@@ -932,8 +956,9 @@ def process_single_function_group(condition, group_name, group, num_type_variant
# name and hash, then we can only check the mangled name length and the function name # name and hash, then we can only check the mangled name length and the function name
# instead of checking the whole mangled name. # instead of checking the whole mangled name.
template_mangled_if = '' template_mangled_if = ''
if mangled_name_hash_can_collide_with_different_parameters(template_args, num_type_variants, if mangled_name_hash_can_collide_with_different_parameters(
ttype_mangled_name_variants, script_generated_hash_tests): template_args, num_type_variants, ttype_mangled_name_variants,
script_generated_hash_tests):
template_mangled_name_declaration = 'constexpr const ImmutableString {unique_name}("{mangled_name}");' template_mangled_name_declaration = 'constexpr const ImmutableString {unique_name}("{mangled_name}");'
name_declarations.add(template_mangled_name_declaration.format(**template_args)) name_declarations.add(template_mangled_name_declaration.format(**template_args))
template_mangled_if = """if (name == BuiltInName::{unique_name}) template_mangled_if = """if (name == BuiltInName::{unique_name})
...@@ -947,15 +972,18 @@ def process_single_function_group(condition, group_name, group, num_type_variant ...@@ -947,15 +972,18 @@ def process_single_function_group(condition, group_name, group, num_type_variant
return &BuiltInFunction::kFunction_{unique_name}; return &BuiltInFunction::kFunction_{unique_name};
}}""" }}"""
mangled_if = template_mangled_if.format(**template_args) mangled_if = template_mangled_if.format(**template_args)
get_builtin_if_statements.add_obj(level, condition, template_args['mangled_name'], {'hash_matched_code': mangled_if}) get_builtin_if_statements.add_obj(level, condition, template_args['mangled_name'],
{'hash_matched_code': mangled_if})
id_counter += 1 id_counter += 1
def process_function_group(group_name, group, num_type_variants, parameter_declarations, ttype_mangled_name_variants,
def process_function_group(
group_name, group, num_type_variants, parameter_declarations, ttype_mangled_name_variants,
name_declarations, unmangled_function_if_statements, unmangled_builtin_declarations, name_declarations, unmangled_function_if_statements, unmangled_builtin_declarations,
defined_function_variants, builtin_id_declarations, builtin_id_definitions, defined_parameter_names, defined_function_variants, builtin_id_declarations, builtin_id_definitions,
variable_declarations, function_declarations, script_generated_hash_tests, get_builtin_if_statements, defined_parameter_names, variable_declarations, function_declarations,
is_in_group_definitions): script_generated_hash_tests, get_builtin_if_statements, is_in_group_definitions):
global id_counter global id_counter
first_id = id_counter first_id = id_counter
...@@ -963,25 +991,25 @@ def process_function_group(group_name, group, num_type_variants, parameter_decla ...@@ -963,25 +991,25 @@ def process_function_group(group_name, group, num_type_variants, parameter_decla
if 'condition' in group: if 'condition' in group:
condition = group['condition'] condition = group['condition']
process_single_function_group(condition, group_name, group, num_type_variants, parameter_declarations, process_single_function_group(
ttype_mangled_name_variants, name_declarations, unmangled_function_if_statements, unmangled_builtin_declarations, condition, group_name, group, num_type_variants, parameter_declarations,
defined_function_variants, builtin_id_declarations, builtin_id_definitions, defined_parameter_names, ttype_mangled_name_variants, name_declarations, unmangled_function_if_statements,
variable_declarations, function_declarations, script_generated_hash_tests, get_builtin_if_statements) unmangled_builtin_declarations, defined_function_variants, builtin_id_declarations,
builtin_id_definitions, defined_parameter_names, variable_declarations,
function_declarations, script_generated_hash_tests, get_builtin_if_statements)
if 'subgroups' in group: if 'subgroups' in group:
for subgroup_name, subgroup in group['subgroups'].iteritems(): for subgroup_name, subgroup in group['subgroups'].iteritems():
process_function_group(group_name + subgroup_name, subgroup, num_type_variants, parameter_declarations, process_function_group(
group_name + subgroup_name, subgroup, num_type_variants, parameter_declarations,
ttype_mangled_name_variants, name_declarations, unmangled_function_if_statements, ttype_mangled_name_variants, name_declarations, unmangled_function_if_statements,
unmangled_builtin_declarations, defined_function_variants, builtin_id_declarations, unmangled_builtin_declarations, defined_function_variants, builtin_id_declarations,
builtin_id_definitions, defined_parameter_names, variable_declarations, function_declarations, builtin_id_definitions, defined_parameter_names, variable_declarations,
script_generated_hash_tests, get_builtin_if_statements, is_in_group_definitions) function_declarations, script_generated_hash_tests, get_builtin_if_statements,
is_in_group_definitions)
if 'queryFunction' in group: if 'queryFunction' in group:
template_args = { template_args = {'first_id': first_id, 'last_id': id_counter - 1, 'group_name': group_name}
'first_id': first_id,
'last_id': id_counter - 1,
'group_name': group_name
}
template_is_in_group_definition = """bool is{group_name}(const TFunction *func) template_is_in_group_definition = """bool is{group_name}(const TFunction *func)
{{ {{
int id = func->uniqueId().get(); int id = func->uniqueId().get();
...@@ -989,11 +1017,13 @@ def process_function_group(group_name, group, num_type_variants, parameter_decla ...@@ -989,11 +1017,13 @@ def process_function_group(group_name, group, num_type_variants, parameter_decla
}}""" }}"""
is_in_group_definitions.append(template_is_in_group_definition.format(**template_args)) is_in_group_definitions.append(template_is_in_group_definition.format(**template_args))
def prune_parameters_arrays(parameter_declarations, function_declarations): def prune_parameters_arrays(parameter_declarations, function_declarations):
# We can share parameters arrays between functions in case one array is a subarray of another. # We can share parameters arrays between functions in case one array is a subarray of another.
parameter_variable_name_replacements = {} parameter_variable_name_replacements = {}
used_param_variable_names = set() used_param_variable_names = set()
for param_variable_name, param_declaration in sorted(parameter_declarations.iteritems(), key=lambda item: -len(item[0])): for param_variable_name, param_declaration in sorted(
parameter_declarations.iteritems(), key=lambda item: -len(item[0])):
replaced = False replaced = False
for used in used_param_variable_names: for used in used_param_variable_names:
if used.startswith(param_variable_name): if used.startswith(param_variable_name):
...@@ -1005,12 +1035,19 @@ def prune_parameters_arrays(parameter_declarations, function_declarations): ...@@ -1005,12 +1035,19 @@ def prune_parameters_arrays(parameter_declarations, function_declarations):
for i in xrange(len(function_declarations)): for i in xrange(len(function_declarations)):
for replaced, replacement in parameter_variable_name_replacements.iteritems(): for replaced, replacement in parameter_variable_name_replacements.iteritems():
function_declarations[i] = function_declarations[i].replace('BuiltInParameters::' + replaced + ',', 'BuiltInParameters::' + replacement + ',') function_declarations[i] = function_declarations[i].replace(
'BuiltInParameters::' + replaced + ',', 'BuiltInParameters::' + replacement + ',')
return [
value for key, value in parameter_declarations.iteritems()
if key in used_param_variable_names
]
return [value for key, value in parameter_declarations.iteritems() if key in used_param_variable_names]
def process_single_variable_group(condition, group_name, group, builtin_id_declarations, builtin_id_definitions, name_declarations, def process_single_variable_group(condition, group_name, group, builtin_id_declarations,
init_member_variables, get_variable_declarations, get_builtin_if_statements, declare_member_variables, variable_declarations, builtin_id_definitions, name_declarations, init_member_variables,
get_variable_declarations, get_builtin_if_statements,
declare_member_variables, variable_declarations,
get_variable_definitions, variable_name_count): get_variable_definitions, variable_name_count):
global id_counter global id_counter
if 'variables' not in group: if 'variables' not in group:
...@@ -1046,7 +1083,8 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla ...@@ -1046,7 +1083,8 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla
# Handle struct and interface block definitions. # Handle struct and interface block definitions.
template_args['class'] = props['class'] template_args['class'] = props['class']
template_args['fields'] = 'fields_{name_with_suffix}'.format(**template_args) template_args['fields'] = 'fields_{name_with_suffix}'.format(**template_args)
init_member_variables.append(' TFieldList *{fields} = new TFieldList();'.format(**template_args)) init_member_variables.append(' TFieldList *{fields} = new TFieldList();'.format(
**template_args))
for field_name, field_type in props['fields'].iteritems(): for field_name, field_type in props['fields'].iteritems():
template_args['field_name'] = field_name template_args['field_name'] = field_name
template_args['field_type'] = TType(field_type).get_dynamic_type_string() template_args['field_type'] = TType(field_type).get_dynamic_type_string()
...@@ -1074,7 +1112,8 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla ...@@ -1074,7 +1112,8 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla
elif 'value' in props: elif 'value' in props:
# Handle variables with constant value, such as gl_MaxDrawBuffers. # Handle variables with constant value, such as gl_MaxDrawBuffers.
if props['value'] != 'resources': if props['value'] != 'resources':
raise Exception('Unrecognized value source in variable properties: ' + str(props['value'])) raise Exception('Unrecognized value source in variable properties: ' +
str(props['value']))
resources_key = variable_name[3:] resources_key = variable_name[3:]
if 'valueKey' in props: if 'valueKey' in props:
resources_key = props['valueKey'] resources_key = props['valueKey']
...@@ -1104,14 +1143,16 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla ...@@ -1104,14 +1143,16 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla
is_member = False is_member = False
template_get_variable_declaration = 'const TVariable *{name_with_suffix}();' template_get_variable_declaration = 'const TVariable *{name_with_suffix}();'
get_variable_declarations.append(template_get_variable_declaration.format(**template_args)) get_variable_declarations.append(
template_get_variable_declaration.format(**template_args))
template_get_variable_definition = """const TVariable *{name_with_suffix}() template_get_variable_definition = """const TVariable *{name_with_suffix}()
{{ {{
return &kVar_{name_with_suffix}; return &kVar_{name_with_suffix};
}} }}
""" """
get_variable_definitions.append(template_get_variable_definition.format(**template_args)) get_variable_definitions.append(
template_get_variable_definition.format(**template_args))
if level != 'GLSL_BUILTINS': if level != 'GLSL_BUILTINS':
template_name_if = """if (name == BuiltInName::{name}) template_name_if = """if (name == BuiltInName::{name})
...@@ -1119,15 +1160,20 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla ...@@ -1119,15 +1160,20 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla
return &BuiltInVariable::kVar_{name_with_suffix}; return &BuiltInVariable::kVar_{name_with_suffix};
}}""" }}"""
name_if = template_name_if.format(**template_args) name_if = template_name_if.format(**template_args)
get_builtin_if_statements.add_obj(level, condition, template_args['name'], {'hash_matched_code': name_if}) get_builtin_if_statements.add_obj(level, condition, template_args['name'],
{'hash_matched_code': name_if})
if is_member: if is_member:
get_condition = condition get_condition = condition
init_conditionally = (condition != 'NO_CONDITION' and variable_name_count[variable_name] == 1) init_conditionally = (
condition != 'NO_CONDITION' and variable_name_count[variable_name] == 1)
if init_conditionally: if init_conditionally:
# Instead of having the condition if statement at lookup, it's cheaper to have it at initialization time. # Instead of having the condition if statement at lookup, it's cheaper to have it at initialization time.
init_member_variables.append(' if ({condition})\n {{'.format(condition = condition)) init_member_variables.append(
template_args['condition_comment'] = '\n // Only initialized if {condition}'.format(condition = condition) ' if ({condition})\n {{'.format(condition=condition))
template_args[
'condition_comment'] = '\n // Only initialized if {condition}'.format(
condition=condition)
get_condition = 'NO_CONDITION' get_condition = 'NO_CONDITION'
else: else:
template_args['condition_comment'] = '' template_args['condition_comment'] = ''
...@@ -1136,7 +1182,8 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla ...@@ -1136,7 +1182,8 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla
init_member_variables.append(' }') init_member_variables.append(' }')
template_declare_member_variable = '{class} *mVar_{name_with_suffix} = nullptr;' template_declare_member_variable = '{class} *mVar_{name_with_suffix} = nullptr;'
declare_member_variables.append(template_declare_member_variable.format(**template_args)) declare_member_variables.append(
template_declare_member_variable.format(**template_args))
if level != 'GLSL_BUILTINS': if level != 'GLSL_BUILTINS':
template_name_if = """if (name == BuiltInName::{name}) template_name_if = """if (name == BuiltInName::{name})
...@@ -1144,10 +1191,12 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla ...@@ -1144,10 +1191,12 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla
return mVar_{name_with_suffix}; return mVar_{name_with_suffix};
}}""" }}"""
name_if = template_name_if.format(**template_args) name_if = template_name_if.format(**template_args)
get_builtin_if_statements.add_obj(level, get_condition, variable_name, {'hash_matched_code': name_if}) get_builtin_if_statements.add_obj(level, get_condition, variable_name,
{'hash_matched_code': name_if})
id_counter += 1 id_counter += 1
def count_variable_names(group, variable_name_count): def count_variable_names(group, variable_name_count):
if 'variables' in group: if 'variables' in group:
for name in group['variables'].iterkeys(): for name in group['variables'].iterkeys():
...@@ -1159,8 +1208,11 @@ def count_variable_names(group, variable_name_count): ...@@ -1159,8 +1208,11 @@ def count_variable_names(group, variable_name_count):
for subgroup_name, subgroup in group['subgroups'].iteritems(): for subgroup_name, subgroup in group['subgroups'].iteritems():
count_variable_names(subgroup, variable_name_count) count_variable_names(subgroup, variable_name_count)
def process_variable_group(parent_condition, group_name, group, builtin_id_declarations, builtin_id_definitions, name_declarations,
init_member_variables, get_variable_declarations, get_builtin_if_statements, declare_member_variables, variable_declarations, def process_variable_group(parent_condition, group_name, group, builtin_id_declarations,
builtin_id_definitions, name_declarations, init_member_variables,
get_variable_declarations, get_builtin_if_statements,
declare_member_variables, variable_declarations,
get_variable_definitions, variable_name_count): get_variable_definitions, variable_name_count):
global id_counter global id_counter
condition = 'NO_CONDITION' condition = 'NO_CONDITION'
...@@ -1171,17 +1223,21 @@ def process_variable_group(parent_condition, group_name, group, builtin_id_decla ...@@ -1171,17 +1223,21 @@ def process_variable_group(parent_condition, group_name, group, builtin_id_decla
if condition == 'NO_CONDITION': if condition == 'NO_CONDITION':
condition = parent_condition condition = parent_condition
else: else:
condition = '({cond1}) && ({cond2})'.format(cond1 = parent_condition, cond2 = condition) condition = '({cond1}) && ({cond2})'.format(cond1=parent_condition, cond2=condition)
process_single_variable_group(condition, group_name, group, builtin_id_declarations, builtin_id_definitions, name_declarations, process_single_variable_group(condition, group_name, group, builtin_id_declarations,
init_member_variables, get_variable_declarations, get_builtin_if_statements, declare_member_variables, variable_declarations, builtin_id_definitions, name_declarations, init_member_variables,
get_variable_declarations, get_builtin_if_statements,
declare_member_variables, variable_declarations,
get_variable_definitions, variable_name_count) get_variable_definitions, variable_name_count)
if 'subgroups' in group: if 'subgroups' in group:
for subgroup_name, subgroup in group['subgroups'].iteritems(): for subgroup_name, subgroup in group['subgroups'].iteritems():
process_variable_group(condition, subgroup_name, subgroup, builtin_id_declarations, builtin_id_definitions, name_declarations, process_variable_group(
init_member_variables, get_variable_declarations, get_builtin_if_statements, declare_member_variables, variable_declarations, condition, subgroup_name, subgroup, builtin_id_declarations,
get_variable_definitions, variable_name_count) builtin_id_definitions, name_declarations, init_member_variables,
get_variable_declarations, get_builtin_if_statements, declare_member_variables,
variable_declarations, get_variable_definitions, variable_name_count)
def main(): def main():
...@@ -1189,7 +1245,10 @@ def main(): ...@@ -1189,7 +1245,10 @@ def main():
set_working_dir() set_working_dir()
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--dump-intermediate-json', help='Dump parsed function data as a JSON file builtin_functions.json', action="store_true") parser.add_argument(
'--dump-intermediate-json',
help='Dump parsed function data as a JSON file builtin_functions.json',
action="store_true")
parser.add_argument('auto_script_command', nargs='?', default='') parser.add_argument('auto_script_command', nargs='?', default='')
args = parser.parse_args() args = parser.parse_args()
...@@ -1222,7 +1281,6 @@ def main(): ...@@ -1222,7 +1281,6 @@ def main():
return 1 return 1
return 0 return 0
all_inputs = [os.path.abspath(__file__), variables_json_filename, functions_txt_filename] all_inputs = [os.path.abspath(__file__), variables_json_filename, functions_txt_filename]
# This script takes a while to run since it searches for hash collisions of mangled names. To avoid # This script takes a while to run since it searches for hash collisions of mangled names. To avoid
# running it unnecessarily, we first check if we've already ran it with the same inputs. # running it unnecessarily, we first check if we've already ran it with the same inputs.
...@@ -1287,17 +1345,19 @@ def main(): ...@@ -1287,17 +1345,19 @@ def main():
defined_function_variants = set() defined_function_variants = set()
defined_parameter_names = set() defined_parameter_names = set()
parsed_functions = get_parsed_functions(functions_txt_filename) parsed_functions = get_parsed_functions(functions_txt_filename)
if args.dump_intermediate_json: if args.dump_intermediate_json:
with open('builtin_functions.json', 'w') as outfile: with open('builtin_functions.json', 'w') as outfile:
def serialize_obj(obj): def serialize_obj(obj):
if isinstance(obj, TType): if isinstance(obj, TType):
return obj.data return obj.data
else: else:
raise "Cannot serialize to JSON: " + str(obj) raise "Cannot serialize to JSON: " + str(obj)
json.dump(parsed_functions, outfile, indent=4, separators=(',', ': '), default=serialize_obj)
json.dump(
parsed_functions, outfile, indent=4, separators=(',', ': '), default=serialize_obj)
with open(variables_json_filename) as f: with open(variables_json_filename) as f:
parsed_variables = json.load(f, object_pairs_hook=OrderedDict) parsed_variables = json.load(f, object_pairs_hook=OrderedDict)
...@@ -1311,24 +1371,31 @@ def main(): ...@@ -1311,24 +1371,31 @@ def main():
secondary_sizes = [1, 2, 3, 4] secondary_sizes = [1, 2, 3, 4]
for primary_size in primary_sizes: for primary_size in primary_sizes:
for secondary_size in secondary_sizes: for secondary_size in secondary_sizes:
type = TType({'basic': basic_type, 'primarySize': primary_size, 'secondarySize': secondary_size}) type = TType({
'basic': basic_type,
'primarySize': primary_size,
'secondarySize': secondary_size
})
ttype_mangled_name_variants.append(type.get_mangled_name()) ttype_mangled_name_variants.append(type.get_mangled_name())
num_type_variants = len(ttype_mangled_name_variants) num_type_variants = len(ttype_mangled_name_variants)
# Sanity check for get_mangled_name_variant_hash: # Sanity check for get_mangled_name_variant_hash:
variant_hash = get_mangled_name_variant_hash(hash32("atan("), 3, 4, len("atan(0123"), num_type_variants, variant_hash = get_mangled_name_variant_hash(
ttype_mangled_name_variants) hash32("atan("), 3, 4, len("atan(0123"), num_type_variants, ttype_mangled_name_variants)
mangled_name_hash = mangledNameHash("atan(" + get_parameters_mangled_name_variant(3, 4, len("atan(0123"), mangled_name_hash = mangledNameHash(
ttype_mangled_name_variants), script_generated_hash_tests) "atan(" + get_parameters_mangled_name_variant(
3, 4, len("atan(0123"), ttype_mangled_name_variants), script_generated_hash_tests)
if variant_hash != mangled_name_hash: if variant_hash != mangled_name_hash:
raise Exception("get_mangled_name_variant_hash sanity check failed") raise Exception("get_mangled_name_variant_hash sanity check failed")
for group_name, group in parsed_functions.iteritems(): for group_name, group in parsed_functions.iteritems():
process_function_group(group_name, group, num_type_variants, parameter_declarations, ttype_mangled_name_variants, process_function_group(
name_declarations, unmangled_function_if_statements, unmangled_builtin_declarations, group_name, group, num_type_variants, parameter_declarations,
defined_function_variants, builtin_id_declarations, builtin_id_definitions, defined_parameter_names, ttype_mangled_name_variants, name_declarations, unmangled_function_if_statements,
variable_declarations, function_declarations, script_generated_hash_tests, get_builtin_if_statements, unmangled_builtin_declarations, defined_function_variants, builtin_id_declarations,
builtin_id_definitions, defined_parameter_names, variable_declarations,
function_declarations, script_generated_hash_tests, get_builtin_if_statements,
is_in_group_definitions) is_in_group_definitions)
parameter_declarations = prune_parameters_arrays(parameter_declarations, function_declarations) parameter_declarations = prune_parameters_arrays(parameter_declarations, function_declarations)
...@@ -1337,40 +1404,57 @@ def main(): ...@@ -1337,40 +1404,57 @@ def main():
count_variable_names(group, variable_name_count) count_variable_names(group, variable_name_count)
for group_name, group in parsed_variables.iteritems(): for group_name, group in parsed_variables.iteritems():
process_variable_group('NO_CONDITION', group_name, group, builtin_id_declarations, builtin_id_definitions, name_declarations, process_variable_group('NO_CONDITION', group_name, group, builtin_id_declarations,
init_member_variables, get_variable_declarations, get_builtin_if_statements, declare_member_variables, variable_declarations, builtin_id_definitions, name_declarations, init_member_variables,
get_variable_declarations, get_builtin_if_statements,
declare_member_variables, variable_declarations,
get_variable_definitions, variable_name_count) get_variable_definitions, variable_name_count)
output_strings = { output_strings = {
'script_name': os.path.basename(__file__), 'script_name':
'copyright_year': date.today().year, os.path.basename(__file__),
'copyright_year':
'builtin_id_declarations': '\n'.join(builtin_id_declarations), date.today().year,
'builtin_id_definitions': '\n'.join(builtin_id_definitions), 'builtin_id_declarations':
'last_builtin_id': id_counter - 1, '\n'.join(builtin_id_declarations),
'name_declarations': '\n'.join(sorted(list(name_declarations))), 'builtin_id_definitions':
'\n'.join(builtin_id_definitions),
'function_data_source_name': functions_txt_filename, 'last_builtin_id':
'function_declarations': '\n'.join(function_declarations), id_counter - 1,
'parameter_declarations': '\n'.join(sorted(parameter_declarations)), 'name_declarations':
'\n'.join(sorted(list(name_declarations))),
'is_in_group_definitions': '\n'.join(is_in_group_definitions), 'function_data_source_name':
functions_txt_filename,
'variable_data_source_name': variables_json_filename, 'function_declarations':
'variable_declarations': '\n'.join(sorted(variable_declarations)), '\n'.join(function_declarations),
'get_variable_declarations': '\n'.join(sorted(get_variable_declarations)), 'parameter_declarations':
'get_variable_definitions': '\n'.join(sorted(get_variable_definitions)), '\n'.join(sorted(parameter_declarations)),
'unmangled_builtin_declarations': '\n'.join(sorted(unmangled_builtin_declarations)), 'is_in_group_definitions':
'\n'.join(is_in_group_definitions),
'declare_member_variables': '\n'.join(declare_member_variables), 'variable_data_source_name':
'init_member_variables': '\n'.join(init_member_variables), variables_json_filename,
'variable_declarations':
'get_unmangled_builtin': unmangled_function_if_statements.get_switch_code(script_generated_hash_tests), '\n'.join(sorted(variable_declarations)),
'get_builtin': get_builtin_if_statements.get_switch_code(script_generated_hash_tests), 'get_variable_declarations':
'max_unmangled_name_length': unmangled_function_if_statements.get_max_name_length(), '\n'.join(sorted(get_variable_declarations)),
'max_mangled_name_length': get_builtin_if_statements.get_max_name_length(), 'get_variable_definitions':
'\n'.join(sorted(get_variable_definitions)),
'script_generated_hash_tests': '\n'.join(script_generated_hash_tests.iterkeys()) 'unmangled_builtin_declarations':
'\n'.join(sorted(unmangled_builtin_declarations)),
'declare_member_variables':
'\n'.join(declare_member_variables),
'init_member_variables':
'\n'.join(init_member_variables),
'get_unmangled_builtin':
unmangled_function_if_statements.get_switch_code(script_generated_hash_tests),
'get_builtin':
get_builtin_if_statements.get_switch_code(script_generated_hash_tests),
'max_unmangled_name_length':
unmangled_function_if_statements.get_max_name_length(),
'max_mangled_name_length':
get_builtin_if_statements.get_max_name_length(),
'script_generated_hash_tests':
'\n'.join(script_generated_hash_tests.iterkeys())
} }
with open(test_filename, 'wt') as outfile_cpp: with open(test_filename, 'wt') as outfile_cpp:
......
...@@ -60,6 +60,7 @@ const char *FindHLSLFunction(int uniqueId) ...@@ -60,6 +60,7 @@ const char *FindHLSLFunction(int uniqueId)
}} // namespace sh }} // namespace sh
""" """
def reject_duplicate_keys(pairs): def reject_duplicate_keys(pairs):
found_keys = {} found_keys = {}
for key, value in pairs: for key, value in pairs:
...@@ -69,12 +70,14 @@ def reject_duplicate_keys(pairs): ...@@ -69,12 +70,14 @@ def reject_duplicate_keys(pairs):
found_keys[key] = value found_keys[key] = value
return found_keys return found_keys
def load_json(path): def load_json(path):
with open(path) as map_file: with open(path) as map_file:
file_data = map_file.read() file_data = map_file.read()
map_file.close() map_file.close()
return json.loads(file_data, object_pairs_hook=reject_duplicate_keys) return json.loads(file_data, object_pairs_hook=reject_duplicate_keys)
def enum_type(arg): def enum_type(arg):
# handle 'argtype argname' and 'out argtype argname' # handle 'argtype argname' and 'out argtype argname'
chunks = arg.split(' ') chunks = arg.split(' ')
...@@ -89,22 +92,24 @@ def enum_type(arg): ...@@ -89,22 +92,24 @@ def enum_type(arg):
return 'UI' + arg_type[2:] + suffix return 'UI' + arg_type[2:] + suffix
return arg_type.capitalize() + suffix return arg_type.capitalize() + suffix
def gen_emulated_function(data): def gen_emulated_function(data):
func = "" func = ""
if 'comment' in data: if 'comment' in data:
func += "".join([ "// " + line + "\n" for line in data['comment'] ]) func += "".join(["// " + line + "\n" for line in data['comment']])
sig = data['return_type'] + ' ' + data['op'] + '_emu(' + ', '.join(data['args']) + ')' sig = data['return_type'] + ' ' + data['op'] + '_emu(' + ', '.join(data['args']) + ')'
body = [ sig, '{' ] + [' ' + line for line in data['body']] + ['}'] body = [sig, '{'] + [' ' + line for line in data['body']] + ['}']
func += "{\n" func += "{\n"
func += "BuiltInId::" + data['op'] + "_" + "_".join([enum_type(arg) for arg in data['args']]) + ",\n" func += "BuiltInId::" + data['op'] + "_" + "_".join([enum_type(arg) for arg in data['args']
]) + ",\n"
if 'helper' in data: if 'helper' in data:
func += '"' + '\\n"\n"'.join(data['helper']) + '\\n"\n' func += '"' + '\\n"\n"'.join(data['helper']) + '\\n"\n'
func += '"' + '\\n"\n"'.join(body) + '\\n"\n' func += '"' + '\\n"\n"'.join(body) + '\\n"\n'
func += "},\n" func += "},\n"
return [ func ] return [func]
def main(): def main():
...@@ -133,10 +138,10 @@ def main(): ...@@ -133,10 +138,10 @@ def main():
emulated_functions += gen_emulated_function(item) emulated_functions += gen_emulated_function(item)
hlsl_gen = template_emulated_builtin_functions_hlsl.format( hlsl_gen = template_emulated_builtin_functions_hlsl.format(
script_name = sys.argv[0], script_name=sys.argv[0],
data_source_name = input_script, data_source_name=input_script,
copyright_year = date.today().year, copyright_year=date.today().year,
emulated_functions = "".join(emulated_functions)) emulated_functions="".join(emulated_functions))
with open(hlsl_fname, 'wt') as f: with open(hlsl_fname, 'wt') as f:
f.write(hlsl_gen) f.write(hlsl_gen)
......
...@@ -58,12 +58,13 @@ template_format_case = """ case {texture_format}: ...@@ -58,12 +58,13 @@ template_format_case = """ case {texture_format}:
template_simple_case = """ case {key}: template_simple_case = """ case {key}:
""" """
def parse_texture_format_case(texture_format, framebuffer_formats): def parse_texture_format_case(texture_format, framebuffer_formats):
framebuffer_format_cases = "" framebuffer_format_cases = ""
for framebuffer_format in sorted(framebuffer_formats): for framebuffer_format in sorted(framebuffer_formats):
framebuffer_format_cases += template_simple_case.format(key = framebuffer_format) framebuffer_format_cases += template_simple_case.format(key=framebuffer_format)
return template_format_case.format( return template_format_case.format(
texture_format = texture_format, framebuffer_format_cases = framebuffer_format_cases) texture_format=texture_format, framebuffer_format_cases=framebuffer_format_cases)
def main(): def main():
...@@ -93,7 +94,7 @@ def main(): ...@@ -93,7 +94,7 @@ def main():
for texture_format, framebuffer_format in data: for texture_format, framebuffer_format in data:
if texture_format not in format_map: if texture_format not in format_map:
format_map[texture_format] = [] format_map[texture_format] = []
format_map[texture_format] += [ framebuffer_format ] format_map[texture_format] += [framebuffer_format]
texture_format_cases = "" texture_format_cases = ""
...@@ -102,10 +103,10 @@ def main(): ...@@ -102,10 +103,10 @@ def main():
with open(out_file_name, 'wt') as out_file: with open(out_file_name, 'wt') as out_file:
output_cpp = template_cpp.format( output_cpp = template_cpp.format(
script_name = sys.argv[0], script_name=sys.argv[0],
data_source_name = data_source_name, data_source_name=data_source_name,
copyright_year = date.today().year, copyright_year=date.today().year,
texture_format_cases = texture_format_cases) texture_format_cases=texture_format_cases)
out_file.write(output_cpp) out_file.write(output_cpp)
out_file.close() out_file.close()
return 0 return 0
......
...@@ -113,16 +113,14 @@ template_es3_combo_type_case = """ case {type}: ...@@ -113,16 +113,14 @@ template_es3_combo_type_case = """ case {type}:
def parse_type_case(type, result): def parse_type_case(type, result):
return template_simple_case.format( return template_simple_case.format(key=type, result=result)
key = type, result = result)
def parse_format_case(format, type_map): def parse_format_case(format, type_map):
type_cases = "" type_cases = ""
for type, internal_format in sorted(type_map.iteritems()): for type, internal_format in sorted(type_map.iteritems()):
type_cases += parse_type_case(type, internal_format) type_cases += parse_type_case(type, internal_format)
return template_format_case.format( return template_format_case.format(format=format, type_cases=type_cases)
format = format, type_cases = type_cases)
def main(): def main():
...@@ -188,21 +186,20 @@ def main(): ...@@ -188,21 +186,20 @@ def main():
internal_format_cases += " case " + internal_format + ":\n" internal_format_cases += " case " + internal_format + ":\n"
this_type_cases += template_es3_combo_type_case.format( this_type_cases += template_es3_combo_type_case.format(
type = type, internal_format_cases = internal_format_cases) type=type, internal_format_cases=internal_format_cases)
es3_combo_cases += template_format_case.format( es3_combo_cases += template_format_case.format(format=format, type_cases=this_type_cases)
format = format, type_cases = this_type_cases)
with open('format_map_autogen.cpp', 'wt') as out_file: with open('format_map_autogen.cpp', 'wt') as out_file:
output_cpp = template_cpp.format( output_cpp = template_cpp.format(
script_name = sys.argv[0], script_name=sys.argv[0],
data_source_name = input_script, data_source_name=input_script,
es3_data_source_name = combo_data_file, es3_data_source_name=combo_data_file,
copyright_year = date.today().year, copyright_year=date.today().year,
format_cases = format_cases, format_cases=format_cases,
es3_format_cases = es3_format_cases, es3_format_cases=es3_format_cases,
es3_type_cases = es3_type_cases, es3_type_cases=es3_type_cases,
es3_combo_cases = es3_combo_cases) es3_combo_cases=es3_combo_cases)
out_file.write(output_cpp) out_file.write(output_cpp)
return 0 return 0
......
...@@ -12,9 +12,11 @@ import re ...@@ -12,9 +12,11 @@ import re
kChannels = "ABDGLRSX" kChannels = "ABDGLRSX"
def get_angle_format_map_abs_path(): def get_angle_format_map_abs_path():
return os.path.join(os.path.dirname(os.path.realpath(__file__)), 'angle_format_map.json') return os.path.join(os.path.dirname(os.path.realpath(__file__)), 'angle_format_map.json')
def reject_duplicate_keys(pairs): def reject_duplicate_keys(pairs):
found_keys = {} found_keys = {}
for key, value in pairs: for key, value in pairs:
...@@ -24,24 +26,29 @@ def reject_duplicate_keys(pairs): ...@@ -24,24 +26,29 @@ def reject_duplicate_keys(pairs):
found_keys[key] = value found_keys[key] = value
return found_keys return found_keys
def load_json(path): def load_json(path):
with open(path) as map_file: with open(path) as map_file:
return json.loads(map_file.read(), object_pairs_hook=reject_duplicate_keys) return json.loads(map_file.read(), object_pairs_hook=reject_duplicate_keys)
def load_forward_table(path): def load_forward_table(path):
pairs = load_json(path) pairs = load_json(path)
reject_duplicate_keys(pairs) reject_duplicate_keys(pairs)
return { gl: angle for gl, angle in pairs } return {gl: angle for gl, angle in pairs}
def load_inverse_table(path): def load_inverse_table(path):
pairs = load_json(path) pairs = load_json(path)
reject_duplicate_keys(pairs) reject_duplicate_keys(pairs)
return { angle: gl for gl, angle in pairs } return {angle: gl for gl, angle in pairs}
def load_without_override(): def load_without_override():
map_path = get_angle_format_map_abs_path() map_path = get_angle_format_map_abs_path()
return load_forward_table(map_path) return load_forward_table(map_path)
def load_with_override(override_path): def load_with_override(override_path):
results = load_without_override() results = load_without_override()
overrides = load_json(override_path) overrides = load_json(override_path)
...@@ -51,10 +58,12 @@ def load_with_override(override_path): ...@@ -51,10 +58,12 @@ def load_with_override(override_path):
return results return results
def get_all_angle_formats(): def get_all_angle_formats():
map_path = get_angle_format_map_abs_path() map_path = get_angle_format_map_abs_path()
return load_inverse_table(map_path).keys() return load_inverse_table(map_path).keys()
def get_component_type(format_id): def get_component_type(format_id):
if "SNORM" in format_id: if "SNORM" in format_id:
return "snorm" return "snorm"
...@@ -83,10 +92,12 @@ def get_component_type(format_id): ...@@ -83,10 +92,12 @@ def get_component_type(format_id):
else: else:
raise ValueError("Unknown component type for " + format_id) raise ValueError("Unknown component type for " + format_id)
def get_channel_tokens(format_id): def get_channel_tokens(format_id):
r = re.compile(r'([' + kChannels + '][\d]+)') r = re.compile(r'([' + kChannels + '][\d]+)')
return filter(r.match, r.split(format_id)) return filter(r.match, r.split(format_id))
def get_channels(format_id): def get_channels(format_id):
channels = '' channels = ''
tokens = get_channel_tokens(format_id) tokens = get_channel_tokens(format_id)
...@@ -97,6 +108,7 @@ def get_channels(format_id): ...@@ -97,6 +108,7 @@ def get_channels(format_id):
return channels return channels
def get_bits(format_id): def get_bits(format_id):
bits = {} bits = {}
tokens = get_channel_tokens(format_id) tokens = get_channel_tokens(format_id)
...@@ -106,9 +118,11 @@ def get_bits(format_id): ...@@ -106,9 +118,11 @@ def get_bits(format_id):
bits[token[0]] = int(token[1:]) bits[token[0]] = int(token[1:])
return bits return bits
def get_format_info(format_id): def get_format_info(format_id):
return get_component_type(format_id), get_bits(format_id), get_channels(format_id) return get_component_type(format_id), get_bits(format_id), get_channels(format_id)
# TODO(oetuaho): Expand this code so that it could generate the gl format info tables as well. # TODO(oetuaho): Expand this code so that it could generate the gl format info tables as well.
def gl_format_channels(internal_format): def gl_format_channels(internal_format):
if internal_format == 'GL_BGR5_A1_ANGLEX': if internal_format == 'GL_BGR5_A1_ANGLEX':
...@@ -142,6 +156,7 @@ def gl_format_channels(internal_format): ...@@ -142,6 +156,7 @@ def gl_format_channels(internal_format):
return 's' return 's'
return channels_string.lower() return channels_string.lower()
def get_internal_format_initializer(internal_format, format_id): def get_internal_format_initializer(internal_format, format_id):
gl_channels = gl_format_channels(internal_format) gl_channels = gl_format_channels(internal_format)
gl_format_no_alpha = gl_channels == 'rgb' or gl_channels == 'l' gl_format_no_alpha = gl_channels == 'rgb' or gl_channels == 'l'
...@@ -182,19 +197,22 @@ def get_internal_format_initializer(internal_format, format_id): ...@@ -182,19 +197,22 @@ def get_internal_format_initializer(internal_format, format_id):
elif component_type == 'uint' and bits['R'] == 32: elif component_type == 'uint' and bits['R'] == 32:
return 'Initialize4ComponentData<GLuint, 0x00000000, 0x00000000, 0x00000000, 0x00000001>' return 'Initialize4ComponentData<GLuint, 0x00000000, 0x00000000, 0x00000000, 0x00000001>'
else: else:
raise ValueError('warning: internal format initializer could not be generated and may be needed for ' + internal_format) raise ValueError(
'warning: internal format initializer could not be generated and may be needed for ' +
internal_format)
def get_vertex_copy_function(src_format, dst_format): def get_vertex_copy_function(src_format, dst_format):
if dst_format == "NONE": if dst_format == "NONE":
return "nullptr"; return "nullptr"
num_channel = len(get_channel_tokens(src_format)) num_channel = len(get_channel_tokens(src_format))
if num_channel < 1 or num_channel > 4: if num_channel < 1 or num_channel > 4:
return "nullptr"; return "nullptr"
if 'FIXED' in src_format: if 'FIXED' in src_format:
assert 'FLOAT' in dst_format, ('get_vertex_copy_function: can only convert fixed to float,' assert 'FLOAT' in dst_format, (
+ ' not to ' + dst_format) 'get_vertex_copy_function: can only convert fixed to float,' + ' not to ' + dst_format)
return 'Copy32FixedTo32FVertexData<%d, %d>' % (num_channel, num_channel) return 'Copy32FixedTo32FVertexData<%d, %d>' % (num_channel, num_channel)
sign = '' sign = ''
...@@ -215,14 +233,14 @@ def get_vertex_copy_function(src_format, dst_format): ...@@ -215,14 +233,14 @@ def get_vertex_copy_function(src_format, dst_format):
sign = 'u' sign = 'u'
if base_type is None: if base_type is None:
return "nullptr"; return "nullptr"
gl_type = 'GL' + sign + base_type gl_type = 'GL' + sign + base_type
if src_format == dst_format: if src_format == dst_format:
return 'CopyNativeVertexData<%s, %d, %d, 0>' % (gl_type, num_channel, num_channel) return 'CopyNativeVertexData<%s, %d, %d, 0>' % (gl_type, num_channel, num_channel)
assert 'FLOAT' in dst_format, ('get_vertex_copy_function: can only convert to float,' assert 'FLOAT' in dst_format, (
+ ' not to ' + dst_format) 'get_vertex_copy_function: can only convert to float,' + ' not to ' + dst_format)
normalized = 'true' if 'NORM' in src_format else 'false' normalized = 'true' if 'NORM' in src_format else 'false'
return "CopyTo32FVertexData<%s, %d, %d, %s>" % (gl_type, num_channel, num_channel, normalized) return "CopyTo32FVertexData<%s, %d, %d, %s>" % (gl_type, num_channel, num_channel, normalized)
...@@ -103,8 +103,7 @@ supported_dimensions = ["2D", "3D", "2DArray"] ...@@ -103,8 +103,7 @@ supported_dimensions = ["2D", "3D", "2DArray"]
# field 2: Name of compiled shader # field 2: Name of compiled shader
# field 3: Filename of compiled shader # field 3: Filename of compiled shader
blitshader_data = [ blitshader_data = [
("RGBAF", "PassthroughRGBA*", "passthroughrgba*11ps.h"), ("RGBAF", "PassthroughRGBA*", "passthroughrgba*11ps.h"), ("BGRAF", "PassthroughRGBA*"),
("BGRAF", "PassthroughRGBA*"),
("RGBF", "PassthroughRGB*", "passthroughrgb*11ps.h"), ("RGBF", "PassthroughRGB*", "passthroughrgb*11ps.h"),
("RGF", "PassthroughRG*", "passthroughrg*11ps.h"), ("RGF", "PassthroughRG*", "passthroughrg*11ps.h"),
("RF", "PassthroughR*", "passthroughr*11ps.h"), ("RF", "PassthroughR*", "passthroughr*11ps.h"),
...@@ -119,53 +118,35 @@ blitshader_data = [ ...@@ -119,53 +118,35 @@ blitshader_data = [
("RGI", "PassthroughRG*I", "passthroughrg*i11ps.h"), ("RGI", "PassthroughRG*I", "passthroughrg*i11ps.h"),
("RUI", "PassthroughR*UI", "passthroughr*ui11ps.h"), ("RUI", "PassthroughR*UI", "passthroughr*ui11ps.h"),
("RI", "PassthroughR*I", "passthroughr*i11ps.h"), ("RI", "PassthroughR*I", "passthroughr*i11ps.h"),
("RGBAF_PREMULTIPLY", "FtoF_PM_RGBA_*", ("RGBAF_PREMULTIPLY", "FtoF_PM_RGBA_*", "multiplyalpha_ftof_pm_rgba_*_ps.h"),
"multiplyalpha_ftof_pm_rgba_*_ps.h"),
("RGBAF_UNMULTIPLY", "FtoF_UM_RGBA_*", "multiplyalpha_ftof_um_rgba_*_ps.h"), ("RGBAF_UNMULTIPLY", "FtoF_UM_RGBA_*", "multiplyalpha_ftof_um_rgba_*_ps.h"),
("RGBF_PREMULTIPLY", "FtoF_PM_RGB_*", "multiplyalpha_ftof_pm_rgb_*_ps.h"), ("RGBF_PREMULTIPLY", "FtoF_PM_RGB_*", "multiplyalpha_ftof_pm_rgb_*_ps.h"),
("RGBF_UNMULTIPLY", "FtoF_UM_RGB_*", "multiplyalpha_ftof_um_rgb_*_ps.h"), ("RGBF_UNMULTIPLY", "FtoF_UM_RGB_*", "multiplyalpha_ftof_um_rgb_*_ps.h"),
("RGBAF_TOUI", "FtoU_PT_RGBA_*", "multiplyalpha_ftou_pt_rgba_*_ps.h"), ("RGBAF_TOUI", "FtoU_PT_RGBA_*", "multiplyalpha_ftou_pt_rgba_*_ps.h"),
("RGBAF_TOUI_PREMULTIPLY", "FtoU_PM_RGBA_*", ("RGBAF_TOUI_PREMULTIPLY", "FtoU_PM_RGBA_*", "multiplyalpha_ftou_pm_rgba_*_ps.h"),
"multiplyalpha_ftou_pm_rgba_*_ps.h"), ("RGBAF_TOUI_UNMULTIPLY", "FtoU_UM_RGBA_*", "multiplyalpha_ftou_um_rgba_*_ps.h"),
("RGBAF_TOUI_UNMULTIPLY", "FtoU_UM_RGBA_*",
"multiplyalpha_ftou_um_rgba_*_ps.h"),
("RGBF_TOUI", "FtoU_PT_RGB_*", "multiplyalpha_ftou_pt_rgb_*_ps.h"), ("RGBF_TOUI", "FtoU_PT_RGB_*", "multiplyalpha_ftou_pt_rgb_*_ps.h"),
("RGBF_TOUI_PREMULTIPLY", "FtoU_PM_RGB_*", ("RGBF_TOUI_PREMULTIPLY", "FtoU_PM_RGB_*", "multiplyalpha_ftou_pm_rgb_*_ps.h"),
"multiplyalpha_ftou_pm_rgb_*_ps.h"), ("RGBF_TOUI_UNMULTIPLY", "FtoU_UM_RGB_*", "multiplyalpha_ftou_um_rgb_*_ps.h"),
("RGBF_TOUI_UNMULTIPLY", "FtoU_UM_RGB_*",
"multiplyalpha_ftou_um_rgb_*_ps.h"),
("RGBAF_TOI", "FtoI_PT_RGBA_*", "multiplyalpha_ftoi_pt_rgba_*_ps.h"), ("RGBAF_TOI", "FtoI_PT_RGBA_*", "multiplyalpha_ftoi_pt_rgba_*_ps.h"),
("RGBAF_TOI_PREMULTIPLY", "FtoI_PM_RGBA_*", ("RGBAF_TOI_PREMULTIPLY", "FtoI_PM_RGBA_*", "multiplyalpha_ftoi_pm_rgba_*_ps.h"),
"multiplyalpha_ftoi_pm_rgba_*_ps.h"), ("RGBAF_TOI_UNMULTIPLY", "FtoI_UM_RGBA_*", "multiplyalpha_ftoi_um_rgba_*_ps.h"),
("RGBAF_TOI_UNMULTIPLY", "FtoI_UM_RGBA_*",
"multiplyalpha_ftoi_um_rgba_*_ps.h"),
("RGBF_TOI", "FtoI_PT_RGB_*", "multiplyalpha_ftoi_pt_rgb_*_ps.h"), ("RGBF_TOI", "FtoI_PT_RGB_*", "multiplyalpha_ftoi_pt_rgb_*_ps.h"),
("RGBF_TOI_PREMULTIPLY", "FtoI_PM_RGB_*", ("RGBF_TOI_PREMULTIPLY", "FtoI_PM_RGB_*", "multiplyalpha_ftoi_pm_rgb_*_ps.h"),
"multiplyalpha_ftoi_pm_rgb_*_ps.h"), ("RGBF_TOI_UNMULTIPLY", "FtoI_UM_RGB_*", "multiplyalpha_ftoi_um_rgb_*_ps.h"),
("RGBF_TOI_UNMULTIPLY", "FtoI_UM_RGB_*", ("LUMAF_PREMULTIPLY", "FtoF_PM_LUMA_*", "multiplyalpha_ftof_pm_luma_*_ps.h"),
"multiplyalpha_ftoi_um_rgb_*_ps.h"),
("LUMAF_PREMULTIPLY", "FtoF_PM_LUMA_*",
"multiplyalpha_ftof_pm_luma_*_ps.h"),
("LUMAF_UNMULTIPLY", "FtoF_UM_LUMA_*", "multiplyalpha_ftof_um_luma_*_ps.h"), ("LUMAF_UNMULTIPLY", "FtoF_UM_LUMA_*", "multiplyalpha_ftof_um_luma_*_ps.h"),
("LUMAALPHAF_PREMULTIPLY", "FtoF_PM_LUMAALPHA_*", ("LUMAALPHAF_PREMULTIPLY", "FtoF_PM_LUMAALPHA_*", "multiplyalpha_ftof_pm_lumaalpha_*_ps.h"),
"multiplyalpha_ftof_pm_lumaalpha_*_ps.h"), ("LUMAALPHAF_UNMULTIPLY", "FtoF_UM_LUMAALPHA_*", "multiplyalpha_ftof_um_lumaalpha_*_ps.h"),
("LUMAALPHAF_UNMULTIPLY", "FtoF_UM_LUMAALPHA_*",
"multiplyalpha_ftof_um_lumaalpha_*_ps.h"),
("RGBAF_4444", "PassthroughRGBA*_4444", "passthroughrgba*_4444_11ps.h"), ("RGBAF_4444", "PassthroughRGBA*_4444", "passthroughrgba*_4444_11ps.h"),
("RGBAF_4444_PREMULTIPLY", "FtoF_PM_RGBA_4444_*", ("RGBAF_4444_PREMULTIPLY", "FtoF_PM_RGBA_4444_*", "multiplyalpha_ftof_pm_rgba_4444_*_ps.h"),
"multiplyalpha_ftof_pm_rgba_4444_*_ps.h"), ("RGBAF_4444_UNMULTIPLY", "FtoF_UM_RGBA_4444_*", "multiplyalpha_ftof_um_rgba_4444_*_ps.h"),
("RGBAF_4444_UNMULTIPLY", "FtoF_UM_RGBA_4444_*",
"multiplyalpha_ftof_um_rgba_4444_*_ps.h"),
("RGBF_565", "PassthroughRGB*_565", "passthroughrgb*_565_11ps.h"), ("RGBF_565", "PassthroughRGB*_565", "passthroughrgb*_565_11ps.h"),
("RGBF_565_PREMULTIPLY", "FtoF_PM_RGB_565_*", ("RGBF_565_PREMULTIPLY", "FtoF_PM_RGB_565_*", "multiplyalpha_ftof_pm_rgb_565_*_ps.h"),
"multiplyalpha_ftof_pm_rgb_565_*_ps.h"), ("RGBF_565_UNMULTIPLY", "FtoF_UM_RGB_565_*", "multiplyalpha_ftof_um_rgb_565_*_ps.h"),
("RGBF_565_UNMULTIPLY", "FtoF_UM_RGB_565_*",
"multiplyalpha_ftof_um_rgb_565_*_ps.h"),
("RGBAF_5551", "PassthroughRGBA*_5551", "passthroughrgba*_5551_11ps.h"), ("RGBAF_5551", "PassthroughRGBA*_5551", "passthroughrgba*_5551_11ps.h"),
("RGBAF_5551_PREMULTIPLY", "FtoF_PM_RGBA_5551_*", ("RGBAF_5551_PREMULTIPLY", "FtoF_PM_RGBA_5551_*", "multiplyalpha_ftof_pm_rgba_5551_*_ps.h"),
"multiplyalpha_ftof_pm_rgba_5551_*_ps.h"), ("RGBAF_5551_UNMULTIPLY", "FtoF_UM_RGBA_5551_*", "multiplyalpha_ftof_um_rgba_5551_*_ps.h")
("RGBAF_5551_UNMULTIPLY", "FtoF_UM_RGBA_5551_*",
"multiplyalpha_ftof_um_rgba_5551_*_ps.h")
] ]
...@@ -177,8 +158,7 @@ def format_shader_include(dimension, blitshader): ...@@ -177,8 +158,7 @@ def format_shader_include(dimension, blitshader):
def format_get_blitshader_case(operation): def format_get_blitshader_case(operation):
dimension_cases = [] dimension_cases = []
for dimension in supported_dimensions: for dimension in supported_dimensions:
dimension_cases.append( dimension_cases.append(format_get_blitshader_case_dimension(operation, dimension))
format_get_blitshader_case_dimension(operation, dimension))
return template_get_blitshader_case.format( return template_get_blitshader_case.format(
get_blitshader_dimension_cases="\n".join([c for c in dimension_cases]), get_blitshader_dimension_cases="\n".join([c for c in dimension_cases]),
...@@ -219,8 +199,7 @@ def format_map_blitshader_case(dimension, blitshader): ...@@ -219,8 +199,7 @@ def format_map_blitshader_case(dimension, blitshader):
def format_shader_filename(dimension, blitshader): def format_shader_filename(dimension, blitshader):
return "shaders/compiled/" + blitshader[2].replace("*", return "shaders/compiled/" + blitshader[2].replace("*", dimension.lower()) + ","
dimension.lower()) + ","
def get_shader_includes(): def get_shader_includes():
...@@ -279,8 +258,7 @@ def get_blitshadertype_enums(): ...@@ -279,8 +258,7 @@ def get_blitshadertype_enums():
# 2D float to int shaders have not been implemented # 2D float to int shaders have not been implemented
if dimension == "2D" and blitshader[0].find("TOI") != -1: if dimension == "2D" and blitshader[0].find("TOI") != -1:
continue continue
blitshaders.append(" BLITSHADER_" + dimension.upper() + "_" + blitshaders.append(" BLITSHADER_" + dimension.upper() + "_" + blitshader[0] + ",")
blitshader[0] + ",")
blitshaders.append(" BLITSHADER_INVALID") blitshaders.append(" BLITSHADER_INVALID")
return blitshaders return blitshaders
...@@ -299,14 +277,14 @@ def get_shader_filenames(): ...@@ -299,14 +277,14 @@ def get_shader_filenames():
continue continue
if len(blitshader) == 3: if len(blitshader) == 3:
filenames.append( filenames.append(
(" \"src/libANGLE/renderer/d3d/d3d11/shaders/compiled/{0}\"," (" \"src/libANGLE/renderer/d3d/d3d11/shaders/compiled/{0}\",").format(
).format(blitshader[2].replace("*", dimension.lower()))) blitshader[2].replace("*", dimension.lower())))
return filenames return filenames
def write_inc_file(get_blitshaders_case_list, add_blitshader_case_list, def write_inc_file(get_blitshaders_case_list, add_blitshader_case_list, shader_includes,
shader_includes, blitshaderop_enums, blitshadertype_enums): blitshaderop_enums, blitshadertype_enums):
content = template_blitshader_source.format( content = template_blitshader_source.format(
script_name=os.path.basename(sys.argv[0]), script_name=os.path.basename(sys.argv[0]),
year=date.today().year, year=date.today().year,
...@@ -367,11 +345,11 @@ def main(): ...@@ -367,11 +345,11 @@ def main():
shader_filenames = get_shader_filenames() shader_filenames = get_shader_filenames()
write_inc_file("\n".join([d for d in blitshadertype_cases]), "\n".join( write_inc_file("\n".join([d for d in blitshadertype_cases]), "\n".join(
[c for c in map_blitshader_cases]), "\n".join([i for i in shader_includes]), [c for c in map_blitshader_cases]), "\n".join([i for i in shader_includes]), "\n".join(
"\n".join([e for e in blitshaderop_enums]), "\n".join( [e for e in blitshaderop_enums]), "\n".join([e for e in blitshadertype_enums]))
[e for e in blitshadertype_enums]))
write_gni_file("\n".join([s for s in shader_filenames])) write_gni_file("\n".join([s for s in shader_filenames]))
return 0 return 0
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())
...@@ -75,13 +75,13 @@ template_undefined_case = """ case DXGI_FORMAT_{dxgi_format}: ...@@ -75,13 +75,13 @@ template_undefined_case = """ case DXGI_FORMAT_{dxgi_format}:
break; break;
""" """
def format_case(dxgi_format, result): def format_case(dxgi_format, result):
return template_format_case.format( return template_format_case.format(dxgi_format=dxgi_format, result=result)
dxgi_format = dxgi_format,
result = result)
def undefined_case(dxgi_format): def undefined_case(dxgi_format):
return template_undefined_case.format(dxgi_format = dxgi_format) return template_undefined_case.format(dxgi_format=dxgi_format)
def main(): def main():
...@@ -146,11 +146,11 @@ def main(): ...@@ -146,11 +146,11 @@ def main():
with open('dxgi_format_map_autogen.cpp', 'wt') as out_file: with open('dxgi_format_map_autogen.cpp', 'wt') as out_file:
output_cpp = template_cpp.format( output_cpp = template_cpp.format(
script_name = sys.argv[0], script_name=sys.argv[0],
data_source_name = input_data, data_source_name=input_data,
copyright_year = date.today().year, copyright_year=date.today().year,
component_type_cases = component_cases, component_type_cases=component_cases,
format_cases = format_cases) format_cases=format_cases)
out_file.write(output_cpp) out_file.write(output_cpp)
out_file.close() out_file.close()
return 0 return 0
......
...@@ -168,6 +168,7 @@ const DXGISupport &GetDXGISupport(DXGI_FORMAT dxgiFormat, D3D_FEATURE_LEVEL feat ...@@ -168,6 +168,7 @@ const DXGISupport &GetDXGISupport(DXGI_FORMAT dxgiFormat, D3D_FEATURE_LEVEL feat
}} // namespace rx }} // namespace rx
""" """
def do_format(format_data): def do_format(format_data):
table_data = {'9_3': '', '10_0': '', '10_1': '', '11_0': '', '11_1': ''} table_data = {'9_3': '', '10_0': '', '10_1': '', '11_0': '', '11_1': ''}
...@@ -270,24 +271,31 @@ def do_format(format_data): ...@@ -270,24 +271,31 @@ def do_format(format_data):
never = ' | '.join(sorted(never_supported)) never = ' | '.join(sorted(never_supported))
optional = ' | '.join(sorted(optional_for_fl)) optional = ' | '.join(sorted(optional_for_fl))
if not always: always = '0' if not always:
if not never: never = '0' always = '0'
if not optional: optional = '0' if not never:
never = '0'
if not optional:
optional = '0'
table_data[feature_level] += ' case ' + format_name + ':\n' table_data[feature_level] += ' case ' + format_name + ':\n'
table_data[feature_level] += ' {\n' table_data[feature_level] += ' {\n'
table_data[feature_level] += ' static const DXGISupport info(' + always + ', ' + never + ', ' + optional + ');\n' table_data[
feature_level] += ' static const DXGISupport info(' + always + ', ' + never + ', ' + optional + ');\n'
table_data[feature_level] += ' return info;\n' table_data[feature_level] += ' return info;\n'
table_data[feature_level] += ' }\n' table_data[feature_level] += ' }\n'
return table_data return table_data
def join_table_data(table_data_1, table_data_2): def join_table_data(table_data_1, table_data_2):
return {'9_3': table_data_1['9_3'] + table_data_2['9_3'], return {
'9_3': table_data_1['9_3'] + table_data_2['9_3'],
'10_0': table_data_1['10_0'] + table_data_2['10_0'], '10_0': table_data_1['10_0'] + table_data_2['10_0'],
'10_1': table_data_1['10_1'] + table_data_2['10_1'], '10_1': table_data_1['10_1'] + table_data_2['10_1'],
'11_0': table_data_1['11_0'] + table_data_2['11_0'], '11_0': table_data_1['11_0'] + table_data_2['11_0'],
'11_1': table_data_1['11_1'] + table_data_2['11_1']} '11_1': table_data_1['11_1'] + table_data_2['11_1']
}
def main(): def main():
...@@ -316,7 +324,8 @@ def main(): ...@@ -316,7 +324,8 @@ def main():
for format_data in json_data: for format_data in json_data:
table_data = join_table_data(table_data, do_format(format_data)) table_data = join_table_data(table_data, do_format(format_data))
out_data = template.format(prefix=macro_prefix, out_data = template.format(
prefix=macro_prefix,
table_data_9_3=table_data['9_3'], table_data_9_3=table_data['9_3'],
table_data_10_0=table_data['10_0'], table_data_10_0=table_data['10_0'],
table_data_10_1=table_data['10_1'], table_data_10_1=table_data['10_1'],
......
...@@ -69,6 +69,7 @@ const Format &Format::Get(GLenum internalFormat, const Renderer11DeviceCaps &dev ...@@ -69,6 +69,7 @@ const Format &Format::Get(GLenum internalFormat, const Renderer11DeviceCaps &dev
}} // namespace rx }} // namespace rx
""" """
def get_swizzle_format_id(internal_format, angle_format): def get_swizzle_format_id(internal_format, angle_format):
angle_format_id = angle_format["formatName"] angle_format_id = angle_format["formatName"]
if (internal_format == 'GL_NONE') or (angle_format_id == 'NONE'): if (internal_format == 'GL_NONE') or (angle_format_id == 'NONE'):
...@@ -80,16 +81,20 @@ def get_swizzle_format_id(internal_format, angle_format): ...@@ -80,16 +81,20 @@ def get_swizzle_format_id(internal_format, angle_format):
return angle_format['swizzleFormat'] return angle_format['swizzleFormat']
if 'bits' not in angle_format: if 'bits' not in angle_format:
raise ValueError('no bits information for determining swizzleformat for format: ' + internal_format) raise ValueError('no bits information for determining swizzleformat for format: ' +
internal_format)
bits = angle_format['bits'] bits = angle_format['bits']
max_component_bits = max(bits.itervalues()) max_component_bits = max(bits.itervalues())
channels_different = not all([component_bits == bits.itervalues().next() for component_bits in bits.itervalues()]) channels_different = not all(
[component_bits == bits.itervalues().next() for component_bits in bits.itervalues()])
# The format itself can be used for swizzles if it can be accessed as a render target and # The format itself can be used for swizzles if it can be accessed as a render target and
# sampled and the bit count for all 4 channels is the same. # sampled and the bit count for all 4 channels is the same.
if "rtvFormat" in angle_format and "srvFormat" in angle_format and "uavFormat" in angle_format and not channels_different and len(angle_format['channels']) == 4: if "rtvFormat" in angle_format and "srvFormat" in angle_format and "uavFormat" in angle_format and not channels_different and len(
return angle_format["glInternalFormat"] if "glInternalFormat" in angle_format else internal_format angle_format['channels']) == 4:
return angle_format[
"glInternalFormat"] if "glInternalFormat" in angle_format else internal_format
b = int(math.ceil(float(max_component_bits) / 8) * 8) b = int(math.ceil(float(max_component_bits) / 8) * 8)
...@@ -102,10 +107,12 @@ def get_swizzle_format_id(internal_format, angle_format): ...@@ -102,10 +107,12 @@ def get_swizzle_format_id(internal_format, angle_format):
return 'GL_RGBA16_EXT' return 'GL_RGBA16_EXT'
if b == 24: if b == 24:
raise ValueError('unexpected 24-bit format when determining swizzleformat for format: ' + internal_format) raise ValueError('unexpected 24-bit format when determining swizzleformat for format: ' +
internal_format)
if 'componentType' not in angle_format: if 'componentType' not in angle_format:
raise ValueError('no component type information for determining swizzleformat for format: ' + internal_format) raise ValueError('no component type information for determining swizzleformat for format: '
+ internal_format)
component_type = angle_format['componentType'] component_type = angle_format['componentType']
...@@ -127,10 +134,12 @@ def get_swizzle_format_id(internal_format, angle_format): ...@@ -127,10 +134,12 @@ def get_swizzle_format_id(internal_format, angle_format):
if (b == 16): if (b == 16):
swizzle += "_EXT" swizzle += "_EXT"
else: else:
raise ValueError('could not determine swizzleformat based on componentType for format: ' + internal_format) raise ValueError('could not determine swizzleformat based on componentType for format: ' +
internal_format)
return swizzle return swizzle
def get_blit_srv_format(angle_format): def get_blit_srv_format(angle_format):
if 'channels' not in angle_format: if 'channels' not in angle_format:
return 'DXGI_FORMAT_UNKNOWN' return 'DXGI_FORMAT_UNKNOWN'
...@@ -171,6 +180,7 @@ split_format_entry_template = """{space} {condition} ...@@ -171,6 +180,7 @@ split_format_entry_template = """{space} {condition}
{space} }} {space} }}
""" """
def json_to_table_data(internal_format, format_name, prefix, json): def json_to_table_data(internal_format, format_name, prefix, json):
table_data = "" table_data = ""
...@@ -201,6 +211,7 @@ def json_to_table_data(internal_format, format_name, prefix, json): ...@@ -201,6 +211,7 @@ def json_to_table_data(internal_format, format_name, prefix, json):
else: else:
return format_entry_template.format(**parsed) return format_entry_template.format(**parsed)
def parse_json_angle_format_case(format_name, angle_format, json_data): def parse_json_angle_format_case(format_name, angle_format, json_data):
supported_case = {} supported_case = {}
unsupported_case = {} unsupported_case = {}
...@@ -227,8 +238,8 @@ def parse_json_angle_format_case(format_name, angle_format, json_data): ...@@ -227,8 +238,8 @@ def parse_json_angle_format_case(format_name, angle_format, json_data):
unsupported_case[k] = v unsupported_case[k] = v
if fallback != None: if fallback != None:
unsupported_case, _, _ = parse_json_angle_format_case( unsupported_case, _, _ = parse_json_angle_format_case(fallback, json_data[fallback],
fallback, json_data[fallback], json_data) json_data)
unsupported_case["formatName"] = fallback unsupported_case["formatName"] = fallback
if support_test != None: if support_test != None:
...@@ -236,6 +247,7 @@ def parse_json_angle_format_case(format_name, angle_format, json_data): ...@@ -236,6 +247,7 @@ def parse_json_angle_format_case(format_name, angle_format, json_data):
else: else:
return supported_case, None, None return supported_case, None, None
def parse_json_into_switch_angle_format_string(json_map, json_data): def parse_json_into_switch_angle_format_string(json_map, json_data):
table_data = '' table_data = ''
...@@ -259,8 +271,10 @@ def parse_json_into_switch_angle_format_string(json_map, json_data): ...@@ -259,8 +271,10 @@ def parse_json_into_switch_angle_format_string(json_map, json_data):
if support_test != None: if support_test != None:
table_data += " {\n" table_data += " {\n"
table_data += json_to_table_data(internal_format, format_name, "if (" + support_test + ")", supported_case) table_data += json_to_table_data(internal_format, format_name,
table_data += json_to_table_data(internal_format, format_name, "else", unsupported_case) "if (" + support_test + ")", supported_case)
table_data += json_to_table_data(internal_format, format_name, "else",
unsupported_case)
table_data += " }\n" table_data += " }\n"
else: else:
table_data += json_to_table_data(internal_format, format_name, "", supported_case) table_data += json_to_table_data(internal_format, format_name, "", supported_case)
...@@ -290,10 +304,10 @@ def main(): ...@@ -290,10 +304,10 @@ def main():
angle_format_cases = parse_json_into_switch_angle_format_string(json_map, json_data) angle_format_cases = parse_json_into_switch_angle_format_string(json_map, json_data)
output_cpp = template_texture_format_table_autogen_cpp.format( output_cpp = template_texture_format_table_autogen_cpp.format(
script_name = sys.argv[0], script_name=sys.argv[0],
copyright_year = date.today().year, copyright_year=date.today().year,
angle_format_info_cases = angle_format_cases, angle_format_info_cases=angle_format_cases,
data_source_name = data_source_name) data_source_name=data_source_name)
with open('texture_format_table_autogen.cpp', 'wt') as out_file: with open('texture_format_table_autogen.cpp', 'wt') as out_file:
out_file.write(output_cpp) out_file.write(output_cpp)
out_file.close() out_file.close()
......
...@@ -168,7 +168,7 @@ def get_color_read_function(angle_format): ...@@ -168,7 +168,7 @@ def get_color_read_function(angle_format):
return 'ReadDepthStencil<' + channel_struct + '>' return 'ReadDepthStencil<' + channel_struct + '>'
read_component_type = get_color_read_write_component_type(angle_format) read_component_type = get_color_read_write_component_type(angle_format)
return 'ReadColor<' + channel_struct + ', '+ read_component_type + '>' return 'ReadColor<' + channel_struct + ', ' + read_component_type + '>'
def get_color_write_function(angle_format): def get_color_write_function(angle_format):
...@@ -180,12 +180,13 @@ def get_color_write_function(angle_format): ...@@ -180,12 +180,13 @@ def get_color_write_function(angle_format):
return 'WriteDepthStencil<' + channel_struct + '>' return 'WriteDepthStencil<' + channel_struct + '>'
write_component_type = get_color_read_write_component_type(angle_format) write_component_type = get_color_read_write_component_type(angle_format)
return 'WriteColor<' + channel_struct + ', '+ write_component_type + '>' return 'WriteColor<' + channel_struct + ', ' + write_component_type + '>'
format_entry_template = """ {{ FormatID::{id}, {glInternalFormat}, {fboImplementationInternalFormat}, {mipGenerationFunction}, {fastCopyFunctions}, {colorReadFunction}, {colorWriteFunction}, {namedComponentType}, {R}, {G}, {B}, {A}, {L}, {D}, {S}, {pixelBytes}, {componentAlignmentMask}, {isBlock}, {isFixed} }}, format_entry_template = """ {{ FormatID::{id}, {glInternalFormat}, {fboImplementationInternalFormat}, {mipGenerationFunction}, {fastCopyFunctions}, {colorReadFunction}, {colorWriteFunction}, {namedComponentType}, {R}, {G}, {B}, {A}, {L}, {D}, {S}, {pixelBytes}, {componentAlignmentMask}, {isBlock}, {isFixed} }},
""" """
def get_named_component_type(component_type): def get_named_component_type(component_type):
if component_type == "snorm": if component_type == "snorm":
return "GL_SIGNED_NORMALIZED" return "GL_SIGNED_NORMALIZED"
...@@ -283,8 +284,8 @@ def json_to_table_data(format_id, json, angle_to_gl): ...@@ -283,8 +284,8 @@ def json_to_table_data(format_id, json, angle_to_gl):
sum_of_bits += int(parsed[channel]) sum_of_bits += int(parsed[channel])
pixel_bytes = sum_of_bits / 8 pixel_bytes = sum_of_bits / 8
parsed["pixelBytes"] = pixel_bytes parsed["pixelBytes"] = pixel_bytes
parsed["componentAlignmentMask"] = get_component_alignment_mask( parsed["componentAlignmentMask"] = get_component_alignment_mask(parsed["channels"],
parsed["channels"], parsed["bits"]) parsed["bits"])
parsed["isBlock"] = "true" if is_block else "false" parsed["isBlock"] = "true" if is_block else "false"
parsed["isFixed"] = "true" if "FIXED" in format_id else "false" parsed["isFixed"] = "true" if "FIXED" in format_id else "false"
...@@ -309,21 +310,20 @@ def gen_enum_string(all_angle): ...@@ -309,21 +310,20 @@ def gen_enum_string(all_angle):
enum_data += ',\n ' + format_id enum_data += ',\n ' + format_id
return enum_data return enum_data
case_template = """ case {gl_format}: case_template = """ case {gl_format}:
return FormatID::{angle_format}; return FormatID::{angle_format};
""" """
def gen_map_switch_string(gl_to_angle): def gen_map_switch_string(gl_to_angle):
switch_data = ''; switch_data = ''
for gl_format in sorted(gl_to_angle.keys()): for gl_format in sorted(gl_to_angle.keys()):
angle_format = gl_to_angle[gl_format] angle_format = gl_to_angle[gl_format]
switch_data += case_template.format( switch_data += case_template.format(gl_format=gl_format, angle_format=angle_format)
gl_format=gl_format,
angle_format=angle_format)
switch_data += " default:\n" switch_data += " default:\n"
switch_data += " return FormatID::NONE;" switch_data += " return FormatID::NONE;"
return switch_data; return switch_data
def main(): def main():
...@@ -348,15 +348,14 @@ def main(): ...@@ -348,15 +348,14 @@ def main():
json_data = angle_format.load_json(data_source_name) json_data = angle_format.load_json(data_source_name)
all_angle = angle_to_gl.keys() all_angle = angle_to_gl.keys()
angle_format_cases = parse_angle_format_table( angle_format_cases = parse_angle_format_table(all_angle, json_data, angle_to_gl)
all_angle, json_data, angle_to_gl)
switch_data = gen_map_switch_string(gl_to_angle) switch_data = gen_map_switch_string(gl_to_angle)
output_cpp = template_autogen_inl.format( output_cpp = template_autogen_inl.format(
script_name = sys.argv[0], script_name=sys.argv[0],
copyright_year = date.today().year, copyright_year=date.today().year,
angle_format_info_cases = angle_format_cases, angle_format_info_cases=angle_format_cases,
angle_format_switch = switch_data, angle_format_switch=switch_data,
data_source_name = data_source_name) data_source_name=data_source_name)
with open('Format_table_autogen.cpp', 'wt') as out_file: with open('Format_table_autogen.cpp', 'wt') as out_file:
out_file.write(output_cpp) out_file.write(output_cpp)
out_file.close() out_file.close()
...@@ -364,11 +363,11 @@ def main(): ...@@ -364,11 +363,11 @@ def main():
enum_data = gen_enum_string(all_angle) enum_data = gen_enum_string(all_angle)
num_angle_formats = len(all_angle) num_angle_formats = len(all_angle)
output_h = template_autogen_h.format( output_h = template_autogen_h.format(
script_name = sys.argv[0], script_name=sys.argv[0],
copyright_year = date.today().year, copyright_year=date.today().year,
angle_format_enum = enum_data, angle_format_enum=enum_data,
data_source_name = data_source_name, data_source_name=data_source_name,
num_angle_formats = num_angle_formats) num_angle_formats=num_angle_formats)
with open('FormatID_autogen.h', 'wt') as out_file: with open('FormatID_autogen.h', 'wt') as out_file:
out_file.write(output_h) out_file.write(output_h)
out_file.close() out_file.close()
......
...@@ -99,12 +99,15 @@ internal_format_param = 'internalFormat' ...@@ -99,12 +99,15 @@ internal_format_param = 'internalFormat'
angle_format_param = 'angleFormat' angle_format_param = 'angleFormat'
angle_format_unknown = 'NONE' angle_format_unknown = 'NONE'
def load_functions_name(internal_format, angle_format): def load_functions_name(internal_format, angle_format):
return internal_format[3:] + "_to_" + angle_format return internal_format[3:] + "_to_" + angle_format
def unknown_func_name(internal_format): def unknown_func_name(internal_format):
return load_functions_name(internal_format, "default") return load_functions_name(internal_format, "default")
def get_load_func(func_name, type_functions): def get_load_func(func_name, type_functions):
snippet = "LoadImageFunctionInfo " + func_name + "(GLenum type)\n" snippet = "LoadImageFunctionInfo " + func_name + "(GLenum type)\n"
snippet += "{\n" snippet += "{\n"
...@@ -123,9 +126,12 @@ def get_load_func(func_name, type_functions): ...@@ -123,9 +126,12 @@ def get_load_func(func_name, type_functions):
return snippet return snippet
def get_unknown_load_func(angle_to_type_map, internal_format): def get_unknown_load_func(angle_to_type_map, internal_format):
assert angle_format_unknown in angle_to_type_map assert angle_format_unknown in angle_to_type_map
return get_load_func(unknown_func_name(internal_format), angle_to_type_map[angle_format_unknown]) return get_load_func(
unknown_func_name(internal_format), angle_to_type_map[angle_format_unknown])
def parse_json(json_data): def parse_json(json_data):
table_data = '' table_data = ''
...@@ -136,7 +142,8 @@ def parse_json(json_data): ...@@ -136,7 +142,8 @@ def parse_json(json_data):
table_data += s + 'case ' + internal_format + ':\n' table_data += s + 'case ' + internal_format + ':\n'
do_switch = len(angle_to_type_map) > 1 or angle_to_type_map.keys()[0] != angle_format_unknown do_switch = len(
angle_to_type_map) > 1 or angle_to_type_map.keys()[0] != angle_format_unknown
if do_switch: if do_switch:
table_data += s + '{\n' table_data += s + '{\n'
...@@ -186,6 +193,7 @@ def parse_json(json_data): ...@@ -186,6 +193,7 @@ def parse_json(json_data):
return table_data, load_functions_data return table_data, load_functions_data
def main(): def main():
# auto_script parameters. # auto_script parameters.
...@@ -205,16 +213,18 @@ def main(): ...@@ -205,16 +213,18 @@ def main():
json_data = angle_format.load_json('load_functions_data.json') json_data = angle_format.load_json('load_functions_data.json')
switch_data, load_functions_data = parse_json(json_data) switch_data, load_functions_data = parse_json(json_data)
output = template.format(internal_format = internal_format_param, output = template.format(
angle_format = angle_format_param, internal_format=internal_format_param,
switch_data = switch_data, angle_format=angle_format_param,
load_functions_data = load_functions_data, switch_data=switch_data,
copyright_year = date.today().year) load_functions_data=load_functions_data,
copyright_year=date.today().year)
with open('load_functions_table_autogen.cpp', 'wt') as out_file: with open('load_functions_table_autogen.cpp', 'wt') as out_file:
out_file.write(output) out_file.write(output)
out_file.close() out_file.close()
return 0 return 0
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())
...@@ -19,11 +19,13 @@ os.chdir(os.path.dirname(os.path.abspath(sys.argv[0]))) ...@@ -19,11 +19,13 @@ os.chdir(os.path.dirname(os.path.abspath(sys.argv[0])))
sys.path.append('..') sys.path.append('..')
import angle_format import angle_format
def safe_append(the_dict, key, element): def safe_append(the_dict, key, element):
if key not in the_dict: if key not in the_dict:
the_dict[key] = [] the_dict[key] = []
the_dict[key].append(element) the_dict[key].append(element)
# Template for the header declaration of the dispatch table. # Template for the header declaration of the dispatch table.
dispatch_table_header_template = """// GENERATED FILE - DO NOT EDIT. dispatch_table_header_template = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name} and gl.xml. // Generated by {script_name} using data from {data_source_name} and gl.xml.
...@@ -79,12 +81,15 @@ class DispatchTableGL : angle::NonCopyable ...@@ -79,12 +81,15 @@ class DispatchTableGL : angle::NonCopyable
#endif // LIBGLESV2_RENDERER_GL_DISPATCH_TABLE_GL_AUTOGEN_H_ #endif // LIBGLESV2_RENDERER_GL_DISPATCH_TABLE_GL_AUTOGEN_H_
""" """
def first_lower(str): def first_lower(str):
return str[:1].lower() + str[1:] return str[:1].lower() + str[1:]
def format_ep_decl(entry_point): def format_ep_decl(entry_point):
return " PFNGL" + entry_point.upper() + "PROC " + first_lower(entry_point) + " = nullptr;" return " PFNGL" + entry_point.upper() + "PROC " + first_lower(entry_point) + " = nullptr;"
# Template for the initialization file of the dispatch table. # Template for the initialization file of the dispatch table.
dispatch_table_source_template = """// GENERATED FILE - DO NOT EDIT. dispatch_table_source_template = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name} and gl.xml. // Generated by {script_name} using data from {data_source_name} and gl.xml.
...@@ -155,9 +160,11 @@ void DispatchTableGL::initProcsSharedExtensionsNULL(const std::set<std::string> ...@@ -155,9 +160,11 @@ void DispatchTableGL::initProcsSharedExtensionsNULL(const std::set<std::string>
}} // namespace rx }} // namespace rx
""" """
def format_assign_ep(entry_point, ep): def format_assign_ep(entry_point, ep):
return ' ASSIGN("' + ep + '", ' + first_lower(entry_point[2:]) + ');' return ' ASSIGN("' + ep + '", ' + first_lower(entry_point[2:]) + ');'
def format_requirements_lines(required, entry_points): def format_requirements_lines(required, entry_points):
major, minor = required major, minor = required
lines = [' if (version >= gl::Version(' + major + ', ' + minor + '))', ' {'] lines = [' if (version >= gl::Version(' + major + ', ' + minor + '))', ' {']
...@@ -165,12 +172,14 @@ def format_requirements_lines(required, entry_points): ...@@ -165,12 +172,14 @@ def format_requirements_lines(required, entry_points):
lines += [' }'] lines += [' }']
return '\n'.join(lines) return '\n'.join(lines)
def format_extension_requirements_lines(extension, entry_points, api): def format_extension_requirements_lines(extension, entry_points, api):
lines = [' if (extensions.count("' + extension + '") != 0)', ' {'] lines = [' if (extensions.count("' + extension + '") != 0)', ' {']
lines += [format_assign_ep(entry_point, ep) for entry_point, ep in sorted(entry_points)] lines += [format_assign_ep(entry_point, ep) for entry_point, ep in sorted(entry_points)]
lines += [' }'] lines += [' }']
return '\n'.join(lines) return '\n'.join(lines)
def assign_null_line(line): def assign_null_line(line):
m = re.match(r' ASSIGN\("gl.*", (.+)\);', line) m = re.match(r' ASSIGN\("gl.*", (.+)\);', line)
if m: if m:
...@@ -179,15 +188,19 @@ def assign_null_line(line): ...@@ -179,15 +188,19 @@ def assign_null_line(line):
else: else:
return line return line
def assign_null(entry): def assign_null(entry):
return '\n'.join([assign_null_line(line) for line in entry.split('\n')]) return '\n'.join([assign_null_line(line) for line in entry.split('\n')])
def nullify(data): def nullify(data):
return [assign_null(entry) for entry in data] return [assign_null(entry) for entry in data]
def format_param(param): def format_param(param):
return "".join(param.itertext()) return "".join(param.itertext())
null_functions_header_template = """// GENERATED FILE - DO NOT EDIT. null_functions_header_template = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name} and gl.xml. // Generated by {script_name} using data from {data_source_name} and gl.xml.
// //
...@@ -229,6 +242,7 @@ namespace rx ...@@ -229,6 +242,7 @@ namespace rx
}} // namespace rx }} // namespace rx
""" """
def main(): def main():
# auto_script parameters. # auto_script parameters.
...@@ -269,7 +283,7 @@ def main(): ...@@ -269,7 +283,7 @@ def main():
core_removed_eps = [] core_removed_eps = []
for core_removed_ep in xml_root.findall('feature/remove'): for core_removed_ep in xml_root.findall('feature/remove'):
assert(core_removed_ep.attrib['profile'] == 'core') assert (core_removed_ep.attrib['profile'] == 'core')
for command in core_removed_ep.findall('./command'): for command in core_removed_ep.findall('./command'):
core_removed_eps.append(command.attrib['name']) core_removed_eps.append(command.attrib['name'])
...@@ -333,15 +347,18 @@ def main(): ...@@ -333,15 +347,18 @@ def main():
if not gl_required: if not gl_required:
gl_required = reqs gl_required = reqs
elif entry_point in core_removed_eps: elif entry_point in core_removed_eps:
print('Upgrade ' + entry_point + ' to ' + str(reqs) + ' instead of ' + str(gl_required)) print('Upgrade ' + entry_point + ' to ' + str(reqs) + ' instead of ' +
str(gl_required))
gl_required = reqs gl_required = reqs
else: else:
print('Keep ' + entry_point + ' at ' + str(gl_required) + ' instead of ' + str(reqs)) print('Keep ' + entry_point + ' at ' + str(gl_required) +
' instead of ' + str(reqs))
elif api == 'gles2': elif api == 'gles2':
if not gles2_required: if not gles2_required:
gles2_required = reqs gles2_required = reqs
else: else:
print("Duplicate for " + entry_point + ": " + str(reqs) + " and " + str(gles2_required)) print("Duplicate for " + entry_point + ": " + str(reqs) + " and " +
str(gles2_required))
else: else:
raise Exception('Bad api type: ' + api) raise Exception('Bad api type: ' + api)
...@@ -373,7 +390,8 @@ def main(): ...@@ -373,7 +390,8 @@ def main():
full_ep = ep full_ep = ep
if '_KHR_' in extension: if '_KHR_' in extension:
full_ep += 'KHR' full_ep += 'KHR'
safe_append(gles2_extension_requirements, extension, (entry_point, full_ep)) safe_append(gles2_extension_requirements, extension,
(entry_point, full_ep))
if not (gl_required or gles2_required or extension): if not (gl_required or gles2_required or extension):
raise Exception('Entry point ' + entry_point + ' not found in the xml.') raise Exception('Entry point ' + entry_point + ' not found in the xml.')
...@@ -386,11 +404,11 @@ def main(): ...@@ -386,11 +404,11 @@ def main():
table_data.append("\n".join(formatted)) table_data.append("\n".join(formatted))
dispatch_table_header = dispatch_table_header_template.format( dispatch_table_header = dispatch_table_header_template.format(
script_name = os.path.basename(sys.argv[0]), script_name=os.path.basename(sys.argv[0]),
data_source_name = data_source_name, data_source_name=data_source_name,
year = date.today().year, year=date.today().year,
file_name = dispatch_header_path, file_name=dispatch_header_path,
table_data = "\n\n".join(table_data)) table_data="\n\n".join(table_data))
with open(dispatch_header_path, "w") as out: with open(dispatch_header_path, "w") as out:
out.write(dispatch_table_header) out.write(dispatch_table_header)
...@@ -401,7 +419,8 @@ def main(): ...@@ -401,7 +419,8 @@ def main():
gl_extensions_data = [] gl_extensions_data = []
for extension, entry_points in sorted(gl_extension_requirements.iteritems()): for extension, entry_points in sorted(gl_extension_requirements.iteritems()):
gl_extensions_data.append(format_extension_requirements_lines(extension, entry_points, "gl")) gl_extensions_data.append(
format_extension_requirements_lines(extension, entry_points, "gl"))
gles2_data = [] gles2_data = []
for gles2_required, entry_points in sorted(gles2_requirements.iteritems()): for gles2_required, entry_points in sorted(gles2_requirements.iteritems()):
...@@ -409,27 +428,29 @@ def main(): ...@@ -409,27 +428,29 @@ def main():
gles2_extensions_data = [] gles2_extensions_data = []
for extension, entry_points in sorted(gles2_extension_requirements.iteritems()): for extension, entry_points in sorted(gles2_extension_requirements.iteritems()):
gles2_extensions_data.append(format_extension_requirements_lines(extension, entry_points, "gles2")) gles2_extensions_data.append(
format_extension_requirements_lines(extension, entry_points, "gles2"))
both_extensions_data = [] both_extensions_data = []
for extension, entry_points in sorted(both_extension_requirements.iteritems()): for extension, entry_points in sorted(both_extension_requirements.iteritems()):
both_extensions_data.append(format_extension_requirements_lines(extension, entry_points, "gles2|gl")) both_extensions_data.append(
format_extension_requirements_lines(extension, entry_points, "gles2|gl"))
dispatch_table_source = dispatch_table_source_template.format( dispatch_table_source = dispatch_table_source_template.format(
script_name = os.path.basename(sys.argv[0]), script_name=os.path.basename(sys.argv[0]),
data_source_name = data_source_name, data_source_name=data_source_name,
year = date.today().year, year=date.today().year,
file_name = dispatch_source_path, file_name=dispatch_source_path,
gl_data = "\n\n".join(gl_data), gl_data="\n\n".join(gl_data),
gl_extensions_data = "\n\n".join(gl_extensions_data), gl_extensions_data="\n\n".join(gl_extensions_data),
gles2_data = "\n\n".join(gles2_data), gles2_data="\n\n".join(gles2_data),
gles2_extensions_data = "\n\n".join(gles2_extensions_data), gles2_extensions_data="\n\n".join(gles2_extensions_data),
both_extensions_data = "\n\n".join(both_extensions_data), both_extensions_data="\n\n".join(both_extensions_data),
gl_null_data = "\n\n".join(nullify(gl_data)), gl_null_data="\n\n".join(nullify(gl_data)),
gl_null_extensions_data = "\n\n".join(nullify(gl_extensions_data)), gl_null_extensions_data="\n\n".join(nullify(gl_extensions_data)),
gles2_null_data = "\n\n".join(nullify(gles2_data)), gles2_null_data="\n\n".join(nullify(gles2_data)),
gles2_null_extensions_data = "\n\n".join(nullify(gles2_extensions_data)), gles2_null_extensions_data="\n\n".join(nullify(gles2_extensions_data)),
both_null_extensions_data = "\n\n".join(nullify(both_extensions_data))) both_null_extensions_data="\n\n".join(nullify(both_extensions_data)))
with open(dispatch_source_path, "w") as out: with open(dispatch_source_path, "w") as out:
out.write(dispatch_table_source) out.write(dispatch_table_source)
...@@ -463,21 +484,21 @@ def main(): ...@@ -463,21 +484,21 @@ def main():
null_stubs = [command_defs[entry_point] for entry_point in sorted(all_entry_points)] null_stubs = [command_defs[entry_point] for entry_point in sorted(all_entry_points)]
null_functions_header = null_functions_header_template.format( null_functions_header = null_functions_header_template.format(
script_name = os.path.basename(sys.argv[0]), script_name=os.path.basename(sys.argv[0]),
data_source_name = data_source_name, data_source_name=data_source_name,
year = date.today().year, year=date.today().year,
file_name = null_functions_header_path, file_name=null_functions_header_path,
table_data = "\n".join(null_decls)) table_data="\n".join(null_decls))
with open(null_functions_header_path, "w") as out: with open(null_functions_header_path, "w") as out:
out.write(null_functions_header) out.write(null_functions_header)
null_functions_source = null_functions_source_template.format( null_functions_source = null_functions_source_template.format(
script_name = os.path.basename(sys.argv[0]), script_name=os.path.basename(sys.argv[0]),
data_source_name = data_source_name, data_source_name=data_source_name,
year = date.today().year, year=date.today().year,
file_name = null_functions_source_path, file_name=null_functions_source_path,
table_data = "\n\n".join(null_stubs)) table_data="\n\n".join(null_stubs))
with open(null_functions_source_path, "w") as out: with open(null_functions_source_path, "w") as out:
out.write(null_functions_source) out.write(null_functions_source)
......
...@@ -75,7 +75,7 @@ image_basic_template = """imageFormatID = {image}; ...@@ -75,7 +75,7 @@ image_basic_template = """imageFormatID = {image};
vkImageFormat = {vk_image_format}; vkImageFormat = {vk_image_format};
imageInitializerFunction = {image_initializer};""" imageInitializerFunction = {image_initializer};"""
image_struct_template="{{{image}, {vk_image_format}, {image_initializer}}}" image_struct_template = "{{{image}, {vk_image_format}, {image_initializer}}}"
image_fallback_template = """{{ image_fallback_template = """{{
static constexpr ImageFormatInitInfo kInfo[] = {{{image_list}}}; static constexpr ImageFormatInitInfo kInfo[] = {{{image_list}}};
...@@ -88,7 +88,7 @@ vkBufferFormatIsPacked = {vk_buffer_format_is_packed}; ...@@ -88,7 +88,7 @@ vkBufferFormatIsPacked = {vk_buffer_format_is_packed};
vertexLoadFunction = {vertex_load_function}; vertexLoadFunction = {vertex_load_function};
vertexLoadRequiresConversion = {vertex_load_converts};""" vertexLoadRequiresConversion = {vertex_load_converts};"""
buffer_struct_template="""{{{buffer}, {vk_buffer_format}, {vk_buffer_format_is_packed}, buffer_struct_template = """{{{buffer}, {vk_buffer_format}, {vk_buffer_format_is_packed},
{vertex_load_function}, {vertex_load_converts}}}""" {vertex_load_function}, {vertex_load_converts}}}"""
buffer_fallback_template = """{{ buffer_fallback_template = """{{
...@@ -106,10 +106,7 @@ def gen_format_case(angle, internal_format, vk_json_data): ...@@ -106,10 +106,7 @@ def gen_format_case(angle, internal_format, vk_json_data):
vk_overrides = vk_json_data["overrides"] vk_overrides = vk_json_data["overrides"]
vk_fallbacks = vk_json_data["fallbacks"] vk_fallbacks = vk_json_data["fallbacks"]
args = dict( args = dict(
format_id=angle, format_id=angle, internal_format=internal_format, image_template="", buffer_template="")
internal_format=internal_format,
image_template="",
buffer_template="")
if ((angle not in vk_map) and (angle not in vk_overrides) and if ((angle not in vk_map) and (angle not in vk_overrides) and
(angle not in vk_fallbacks)) or angle == 'NONE': (angle not in vk_fallbacks)) or angle == 'NONE':
...@@ -136,8 +133,7 @@ def gen_format_case(angle, internal_format, vk_json_data): ...@@ -136,8 +133,7 @@ def gen_format_case(angle, internal_format, vk_json_data):
buffer="angle::FormatID::" + format, buffer="angle::FormatID::" + format,
vk_buffer_format=vk_map[format], vk_buffer_format=vk_map[format],
vk_buffer_format_is_packed=is_packed(vk_map[format]), vk_buffer_format_is_packed=is_packed(vk_map[format]),
vertex_load_function=angle_format.get_vertex_copy_function( vertex_load_function=angle_format.get_vertex_copy_function(angle, format),
angle, format),
vertex_load_converts='false' if angle == format else 'true', vertex_load_converts='false' if angle == format else 'true',
) )
...@@ -148,9 +144,7 @@ def gen_format_case(angle, internal_format, vk_json_data): ...@@ -148,9 +144,7 @@ def gen_format_case(angle, internal_format, vk_json_data):
elif len(images) > 1: elif len(images) > 1:
args.update( args.update(
image_template=image_fallback_template, image_template=image_fallback_template,
image_list=", ".join( image_list=", ".join(image_struct_template.format(**image_args(i)) for i in images))
image_struct_template.format(**image_args(i))
for i in images))
buffers = get_formats(angle, "buffer") buffers = get_formats(angle, "buffer")
if len(buffers) == 1: if len(buffers) == 1:
...@@ -172,11 +166,7 @@ def main(): ...@@ -172,11 +166,7 @@ def main():
# auto_script parameters. # auto_script parameters.
if len(sys.argv) > 1: if len(sys.argv) > 1:
inputs = [ inputs = ['../angle_format.py', '../angle_format_map.json', input_file_name]
'../angle_format.py',
'../angle_format_map.json',
input_file_name
]
outputs = [out_file_name] outputs = [out_file_name]
if sys.argv[1] == 'inputs': if sys.argv[1] == 'inputs':
...@@ -190,15 +180,16 @@ def main(): ...@@ -190,15 +180,16 @@ def main():
angle_to_gl = angle_format.load_inverse_table(os.path.join('..', 'angle_format_map.json')) angle_to_gl = angle_format.load_inverse_table(os.path.join('..', 'angle_format_map.json'))
vk_json_data = angle_format.load_json(input_file_name) vk_json_data = angle_format.load_json(input_file_name)
vk_cases = [gen_format_case(angle, gl, vk_json_data) vk_cases = [
for angle, gl in sorted(angle_to_gl.iteritems())] gen_format_case(angle, gl, vk_json_data) for angle, gl in sorted(angle_to_gl.iteritems())
]
output_cpp = template_table_autogen_cpp.format( output_cpp = template_table_autogen_cpp.format(
copyright_year = date.today().year, copyright_year=date.today().year,
format_case_data = "\n".join(vk_cases), format_case_data="\n".join(vk_cases),
script_name = __file__, script_name=__file__,
out_file_name = out_file_name, out_file_name=out_file_name,
input_file_name = input_file_name) input_file_name=input_file_name)
with open(out_file_name, 'wt') as out_file: with open(out_file_name, 'wt') as out_file:
out_file.write(output_cpp) out_file.write(output_cpp)
......
...@@ -157,34 +157,42 @@ angle_vulkan_internal_shaders = [ ...@@ -157,34 +157,42 @@ angle_vulkan_internal_shaders = [
] ]
""" """
# Gets the constant variable name for a generated shader. # Gets the constant variable name for a generated shader.
def get_var_name(output, prefix='k'): def get_var_name(output, prefix='k'):
return prefix + output.replace(".", "_") return prefix + output.replace(".", "_")
# Gets the namespace name given to constants generated from shader_file # Gets the namespace name given to constants generated from shader_file
def get_namespace_name(shader_file): def get_namespace_name(shader_file):
return get_var_name(os.path.basename(shader_file), '') return get_var_name(os.path.basename(shader_file), '')
# Gets the namespace name given to constants generated from shader_file # Gets the namespace name given to constants generated from shader_file
def get_variation_table_name(shader_file, prefix='k'): def get_variation_table_name(shader_file, prefix='k'):
return get_var_name(os.path.basename(shader_file), prefix) + '_shaders' return get_var_name(os.path.basename(shader_file), prefix) + '_shaders'
# Gets the internal ID string for a particular shader. # Gets the internal ID string for a particular shader.
def get_shader_id(shader): def get_shader_id(shader):
file = os.path.splitext(os.path.basename(shader))[0] file = os.path.splitext(os.path.basename(shader))[0]
return file.replace(".", "_") return file.replace(".", "_")
# Returns the name of the generated SPIR-V file for a shader. # Returns the name of the generated SPIR-V file for a shader.
def get_output_path(name): def get_output_path(name):
return os.path.join('shaders', 'gen', name + ".inc") return os.path.join('shaders', 'gen', name + ".inc")
# Finds a path to GN's out directory # Finds a path to GN's out directory
def get_linux_glslang_exe_path(): def get_linux_glslang_exe_path():
return '../../../../tools/glslang/glslang_validator' return '../../../../tools/glslang/glslang_validator'
def get_win_glslang_exe_path(): def get_win_glslang_exe_path():
return get_linux_glslang_exe_path() + '.exe' return get_linux_glslang_exe_path() + '.exe'
def get_glslang_exe_path(): def get_glslang_exe_path():
glslang_exe = get_win_glslang_exe_path() if is_windows else get_linux_glslang_exe_path() glslang_exe = get_win_glslang_exe_path() if is_windows else get_linux_glslang_exe_path()
if not os.path.isfile(glslang_exe): if not os.path.isfile(glslang_exe):
...@@ -197,12 +205,15 @@ def gen_shader_blob_entry(shader): ...@@ -197,12 +205,15 @@ def gen_shader_blob_entry(shader):
var_name = get_var_name(os.path.basename(shader))[0:-4] var_name = get_var_name(os.path.basename(shader))[0:-4]
return "{%s, %s}" % (var_name, "sizeof(%s)" % var_name) return "{%s, %s}" % (var_name, "sizeof(%s)" % var_name)
def slash(s): def slash(s):
return s.replace('\\', '/') return s.replace('\\', '/')
def gen_shader_include(shader): def gen_shader_include(shader):
return '#include "libANGLE/renderer/vulkan/%s"' % slash(shader) return '#include "libANGLE/renderer/vulkan/%s"' % slash(shader)
def get_shader_variations(shader): def get_shader_variations(shader):
variation_file = shader + '.json' variation_file = shader + '.json'
if not os.path.exists(variation_file): if not os.path.exists(variation_file):
...@@ -227,11 +238,13 @@ def get_shader_variations(shader): ...@@ -227,11 +238,13 @@ def get_shader_variations(shader):
return (flags, enums) return (flags, enums)
def get_variation_bits(flags, enums): def get_variation_bits(flags, enums):
flags_bits = len(flags) flags_bits = len(flags)
enum_bits = [(len(enum[1]) - 1).bit_length() for enum in enums] enum_bits = [(len(enum[1]) - 1).bit_length() for enum in enums]
return (flags_bits, enum_bits) return (flags_bits, enum_bits)
def next_enum_variation(enums, enum_indices): def next_enum_variation(enums, enum_indices):
"""Loop through indices from [0, 0, ...] to [L0-1, L1-1, ...] """Loop through indices from [0, 0, ...] to [L0-1, L1-1, ...]
where Li is len(enums[i]). The list can be thought of as a number with many where Li is len(enums[i]). The list can be thought of as a number with many
...@@ -242,24 +255,29 @@ def next_enum_variation(enums, enum_indices): ...@@ -242,24 +255,29 @@ def next_enum_variation(enums, enum_indices):
# if current digit has room, increment it. # if current digit has room, increment it.
if current + 1 < len(enums[i][1]): if current + 1 < len(enums[i][1]):
enum_indices[i] = current + 1 enum_indices[i] = current + 1
return True; return True
# otherwise reset it to 0 and carry to the next digit. # otherwise reset it to 0 and carry to the next digit.
enum_indices[i] = 0 enum_indices[i] = 0
# if this is reached, the number has overflowed and the loop is finished. # if this is reached, the number has overflowed and the loop is finished.
return False return False
compact_newlines_regex = re.compile(r"\n\s*\n", re.MULTILINE) compact_newlines_regex = re.compile(r"\n\s*\n", re.MULTILINE)
def cleanup_preprocessed_shader(shader_text): def cleanup_preprocessed_shader(shader_text):
return compact_newlines_regex.sub('\n\n', shader_text.strip()) return compact_newlines_regex.sub('\n\n', shader_text.strip())
class CompileQueue: class CompileQueue:
class AppendPreprocessorOutput: class AppendPreprocessorOutput:
def __init__(self, shader_file, preprocessor_args, output_path): def __init__(self, shader_file, preprocessor_args, output_path):
# Asynchronously launch the preprocessor job. # Asynchronously launch the preprocessor job.
self.process = subprocess.Popen(preprocessor_args, self.process = subprocess.Popen(
stdout=subprocess.PIPE, preprocessor_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr=subprocess.PIPE)
# Store the file name for output to be appended to. # Store the file name for output to be appended to.
self.output_path = output_path self.output_path = output_path
# Store info for error description. # Store info for error description.
...@@ -278,12 +296,12 @@ class CompileQueue: ...@@ -278,12 +296,12 @@ class CompileQueue:
"Error running preprocessor on " + self.shader_file) "Error running preprocessor on " + self.shader_file)
class CompileToSPIRV: class CompileToSPIRV:
def __init__(self, shader_file, shader_basename, variation_string, output_path, def __init__(self, shader_file, shader_basename, variation_string, output_path,
compile_args, preprocessor_args): compile_args, preprocessor_args):
# Asynchronously launch the compile job. # Asynchronously launch the compile job.
self.process = subprocess.Popen(compile_args, self.process = subprocess.Popen(
stdout=subprocess.PIPE, compile_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr=subprocess.PIPE)
# Store info for launching the preprocessor. # Store info for launching the preprocessor.
self.preprocessor_args = preprocessor_args self.preprocessor_args = preprocessor_args
self.output_path = output_path self.output_path = output_path
...@@ -296,8 +314,8 @@ class CompileQueue: ...@@ -296,8 +314,8 @@ class CompileQueue:
(out, err) = self.process.communicate() (out, err) = self.process.communicate()
if self.process.returncode == 0: if self.process.returncode == 0:
# Insert the preprocessor job in the queue. # Insert the preprocessor job in the queue.
queue.append(CompileQueue.AppendPreprocessorOutput(self.shader_file, queue.append(
self.preprocessor_args, CompileQueue.AppendPreprocessorOutput(self.shader_file, self.preprocessor_args,
self.output_path)) self.output_path))
# If all the output says is the source file name, don't bother printing it. # If all the output says is the source file name, don't bother printing it.
if out.strip() == self.shader_file: if out.strip() == self.shader_file:
...@@ -341,8 +359,8 @@ class CompileQueue: ...@@ -341,8 +359,8 @@ class CompileQueue:
return exception_description return exception_description
def add_job(self, shader_file, shader_basename, variation_string, output_path, def add_job(self, shader_file, shader_basename, variation_string, output_path, compile_args,
compile_args, preprocessor_args): preprocessor_args):
# If the queue is full, wait until there is at least one slot available. # If the queue is full, wait until there is at least one slot available.
while len(self.queue) >= self.thread_count: while len(self.queue) >= self.thread_count:
exception = self._wait_first(False) exception = self._wait_first(False)
...@@ -352,9 +370,9 @@ class CompileQueue: ...@@ -352,9 +370,9 @@ class CompileQueue:
raise Exception(exception) raise Exception(exception)
# Add a compile job # Add a compile job
self.queue.append(CompileQueue.CompileToSPIRV(shader_file, shader_basename, self.queue.append(
variation_string, output_path, CompileQueue.CompileToSPIRV(shader_file, shader_basename, variation_string,
compile_args, preprocessor_args)) output_path, compile_args, preprocessor_args))
def finish(self): def finish(self):
exception = self._wait_all(False) exception = self._wait_all(False)
...@@ -362,6 +380,7 @@ class CompileQueue: ...@@ -362,6 +380,7 @@ class CompileQueue:
if exception is not None: if exception is not None:
raise Exception(exception) raise Exception(exception)
def compile_variation(glslang_path, compile_queue, shader_file, shader_basename, flags, enums, def compile_variation(glslang_path, compile_queue, shader_file, shader_basename, flags, enums,
flags_active, enum_indices, flags_bits, enum_bits, output_shaders): flags_active, enum_indices, flags_bits, enum_bits, output_shaders):
...@@ -408,7 +427,9 @@ def compile_variation(glslang_path, compile_queue, shader_file, shader_basename, ...@@ -408,7 +427,9 @@ def compile_variation(glslang_path, compile_queue, shader_file, shader_basename,
compile_queue.add_job(shader_file, shader_basename, variation_string, output_path, compile_queue.add_job(shader_file, shader_basename, variation_string, output_path,
glslang_args, glslang_preprocessor_output_args) glslang_args, glslang_preprocessor_output_args)
class ShaderAndVariations: class ShaderAndVariations:
def __init__(self, shader_file): def __init__(self, shader_file):
self.shader_file = shader_file self.shader_file = shader_file
(self.flags, self.enums) = get_shader_variations(shader_file) (self.flags, self.enums) = get_shader_variations(shader_file)
...@@ -438,15 +459,18 @@ def get_variation_definition(shader_and_variation): ...@@ -438,15 +459,18 @@ def get_variation_definition(shader_and_variation):
enum = enums[e] enum = enums[e]
enum_name = enum[0] enum_name = enum[0]
definition += 'enum %s\n{\n' % enum_name definition += 'enum %s\n{\n' % enum_name
definition += ''.join(['k%s = 0x%08X,\n' % definition += ''.join([
(enum[1][v], v << current_bit_start) for v in range(len(enum[1]))]) 'k%s = 0x%08X,\n' % (enum[1][v], v << current_bit_start) for v in range(len(enum[1]))
definition += 'k%sMask = 0x%08X,\n' % (enum_name, ((1 << enum_bits[e]) - 1) << current_bit_start) ])
definition += 'k%sMask = 0x%08X,\n' % (enum_name,
((1 << enum_bits[e]) - 1) << current_bit_start)
definition += '};\n' definition += '};\n'
current_bit_start += enum_bits[e] current_bit_start += enum_bits[e]
definition += '} // namespace %s\n' % namespace_name definition += '} // namespace %s\n' % namespace_name
return definition return definition
def get_shader_table_h(shader_and_variation): def get_shader_table_h(shader_and_variation):
shader_file = shader_and_variation.shader_file shader_file = shader_and_variation.shader_file
flags = shader_and_variation.flags flags = shader_and_variation.flags
...@@ -477,6 +501,7 @@ def get_shader_table_h(shader_and_variation): ...@@ -477,6 +501,7 @@ def get_shader_table_h(shader_and_variation):
table += '];' table += '];'
return table return table
def get_shader_table_cpp(shader_and_variation): def get_shader_table_cpp(shader_and_variation):
shader_file = shader_and_variation.shader_file shader_file = shader_and_variation.shader_file
enums = shader_and_variation.enums enums = shader_and_variation.enums
...@@ -500,7 +525,7 @@ def get_shader_table_cpp(shader_and_variation): ...@@ -500,7 +525,7 @@ def get_shader_table_cpp(shader_and_variation):
table = 'constexpr ShaderBlob %s[] = {\n' % table_name table = 'constexpr ShaderBlob %s[] = {\n' % table_name
# The last possible variation is every flag enabled and every enum at max # The last possible variation is every flag enabled and every enum at max
last_variation = ((1 << flags_bits) - 1) | reduce(lambda x, y: x|y, enum_maxes, 0) last_variation = ((1 << flags_bits) - 1) | reduce(lambda x, y: x | y, enum_maxes, 0)
for variation in range(last_variation + 1): for variation in range(last_variation + 1):
# if any variation is invalid, output an empty entry # if any variation is invalid, output an empty entry
...@@ -513,6 +538,7 @@ def get_shader_table_cpp(shader_and_variation): ...@@ -513,6 +538,7 @@ def get_shader_table_cpp(shader_and_variation):
table += '};' table += '};'
return table return table
def get_get_function_h(shader_and_variation): def get_get_function_h(shader_and_variation):
shader_file = shader_and_variation.shader_file shader_file = shader_and_variation.shader_file
...@@ -523,6 +549,7 @@ def get_get_function_h(shader_and_variation): ...@@ -523,6 +549,7 @@ def get_get_function_h(shader_and_variation):
return definition return definition
def get_get_function_cpp(shader_and_variation): def get_get_function_cpp(shader_and_variation):
shader_file = shader_and_variation.shader_file shader_file = shader_and_variation.shader_file
enums = shader_and_variation.enums enums = shader_and_variation.enums
...@@ -539,6 +566,7 @@ def get_get_function_cpp(shader_and_variation): ...@@ -539,6 +566,7 @@ def get_get_function_cpp(shader_and_variation):
return definition return definition
def get_destroy_call(shader_and_variation): def get_destroy_call(shader_and_variation):
shader_file = shader_and_variation.shader_file shader_file = shader_and_variation.shader_file
...@@ -564,9 +592,11 @@ def main(): ...@@ -564,9 +592,11 @@ def main():
shader_files_to_compile = [f for f in shader_files_to_compile if f.find(sys.argv[1]) != -1] shader_files_to_compile = [f for f in shader_files_to_compile if f.find(sys.argv[1]) != -1]
valid_extensions = ['.vert', '.frag', '.comp'] valid_extensions = ['.vert', '.frag', '.comp']
input_shaders = sorted([os.path.join(shaders_dir, shader) input_shaders = sorted([
os.path.join(shaders_dir, shader)
for shader in os.listdir(shaders_dir) for shader in os.listdir(shaders_dir)
if any([os.path.splitext(shader)[1] == ext for ext in valid_extensions])]) if any([os.path.splitext(shader)[1] == ext for ext in valid_extensions])
])
if print_inputs: if print_inputs:
glslang_binaries = [get_linux_glslang_exe_path(), get_win_glslang_exe_path()] glslang_binaries = [get_linux_glslang_exe_path(), get_win_glslang_exe_path()]
glslang_binary_hashes = [path + '.sha1' for path in glslang_binaries] glslang_binary_hashes = [path + '.sha1' for path in glslang_binaries]
...@@ -582,7 +612,9 @@ def main(): ...@@ -582,7 +612,9 @@ def main():
output_shaders = [] output_shaders = []
input_shaders_and_variations = [ShaderAndVariations(shader_file) for shader_file in input_shaders] input_shaders_and_variations = [
ShaderAndVariations(shader_file) for shader_file in input_shaders
]
compile_queue = CompileQueue() compile_queue = CompileQueue()
...@@ -605,7 +637,8 @@ def main(): ...@@ -605,7 +637,8 @@ def main():
# with values in [0, 2^len(flags)) # with values in [0, 2^len(flags))
for flags_active in range(1 << len(flags)): for flags_active in range(1 << len(flags)):
compile_variation(glslang_path, compile_queue, shader_file, output_name, flags, compile_variation(glslang_path, compile_queue, shader_file, output_name, flags,
enums, flags_active, enum_indices, flags_bits, enum_bits, output_shaders) enums, flags_active, enum_indices, flags_bits, enum_bits,
output_shaders)
if not next_enum_variation(enums, enum_indices): if not next_enum_variation(enums, enum_indices):
break break
...@@ -622,51 +655,50 @@ def main(): ...@@ -622,51 +655,50 @@ def main():
# STEP 2: Consolidate the .inc files into an auto-generated cpp/h library. # STEP 2: Consolidate the .inc files into an auto-generated cpp/h library.
with open(out_file_cpp, 'w') as outfile: with open(out_file_cpp, 'w') as outfile:
includes = "\n".join([gen_shader_include(shader) for shader in output_shaders]) includes = "\n".join([gen_shader_include(shader) for shader in output_shaders])
shader_tables_cpp = '\n'.join([get_shader_table_cpp(s) shader_tables_cpp = '\n'.join(
for s in input_shaders_and_variations]) [get_shader_table_cpp(s) for s in input_shaders_and_variations])
shader_destroy_calls = '\n'.join([get_destroy_call(s) shader_destroy_calls = '\n'.join(
for s in input_shaders_and_variations]) [get_destroy_call(s) for s in input_shaders_and_variations])
shader_get_functions_cpp = '\n'.join([get_get_function_cpp(s) shader_get_functions_cpp = '\n'.join(
for s in input_shaders_and_variations]) [get_get_function_cpp(s) for s in input_shaders_and_variations])
outcode = template_shader_library_cpp.format( outcode = template_shader_library_cpp.format(
script_name = __file__, script_name=__file__,
copyright_year = date.today().year, copyright_year=date.today().year,
out_file_name = out_file_cpp, out_file_name=out_file_cpp,
input_file_name = 'shaders/src/*', input_file_name='shaders/src/*',
internal_shader_includes = includes, internal_shader_includes=includes,
shader_tables_cpp = shader_tables_cpp, shader_tables_cpp=shader_tables_cpp,
shader_destroy_calls = shader_destroy_calls, shader_destroy_calls=shader_destroy_calls,
shader_get_functions_cpp = shader_get_functions_cpp) shader_get_functions_cpp=shader_get_functions_cpp)
outfile.write(outcode) outfile.write(outcode)
outfile.close() outfile.close()
with open(out_file_h, 'w') as outfile: with open(out_file_h, 'w') as outfile:
shader_variation_definitions = '\n'.join([get_variation_definition(s) shader_variation_definitions = '\n'.join(
for s in input_shaders_and_variations]) [get_variation_definition(s) for s in input_shaders_and_variations])
shader_get_functions_h = '\n'.join([get_get_function_h(s) shader_get_functions_h = '\n'.join(
for s in input_shaders_and_variations]) [get_get_function_h(s) for s in input_shaders_and_variations])
shader_tables_h = '\n'.join([get_shader_table_h(s) shader_tables_h = '\n'.join([get_shader_table_h(s) for s in input_shaders_and_variations])
for s in input_shaders_and_variations])
outcode = template_shader_library_h.format( outcode = template_shader_library_h.format(
script_name = __file__, script_name=__file__,
copyright_year = date.today().year, copyright_year=date.today().year,
out_file_name = out_file_h, out_file_name=out_file_h,
input_file_name = 'shaders/src/*', input_file_name='shaders/src/*',
shader_variation_definitions = shader_variation_definitions, shader_variation_definitions=shader_variation_definitions,
shader_get_functions_h = shader_get_functions_h, shader_get_functions_h=shader_get_functions_h,
shader_tables_h = shader_tables_h) shader_tables_h=shader_tables_h)
outfile.write(outcode) outfile.write(outcode)
outfile.close() outfile.close()
# STEP 3: Create a gni file with the generated files. # STEP 3: Create a gni file with the generated files.
with io.open(out_file_gni, 'w', newline='\n') as outfile: with io.open(out_file_gni, 'w', newline='\n') as outfile:
outcode = template_shader_includes_gni.format( outcode = template_shader_includes_gni.format(
script_name = __file__, script_name=__file__,
copyright_year = date.today().year, copyright_year=date.today().year,
out_file_name = out_file_gni, out_file_name=out_file_gni,
input_file_name = 'shaders/src/*', input_file_name='shaders/src/*',
shaders_list = ',\n'.join([' "' + slash(shader) + '"' for shader in output_shaders])) shaders_list=',\n'.join([' "' + slash(shader) + '"' for shader in output_shaders]))
outfile.write(outcode) outfile.write(outcode)
outfile.close() outfile.close()
......
...@@ -15,7 +15,6 @@ import angle_format ...@@ -15,7 +15,6 @@ import angle_format
import xml.etree.ElementTree as etree import xml.etree.ElementTree as etree
import sys, os import sys, os
template_table_autogen_cpp = """// GENERATED FILE - DO NOT EDIT. template_table_autogen_cpp = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {input_file_name} and // Generated by {script_name} using data from {input_file_name} and
// the vk.xml file situated at // the vk.xml file situated at
...@@ -81,9 +80,9 @@ def gen_format_case(index, vk_to_index_to_format_map, vk_map): ...@@ -81,9 +80,9 @@ def gen_format_case(index, vk_to_index_to_format_map, vk_map):
buffer_features_str = "0" buffer_features_str = "0"
return template_format_property.format( return template_format_property.format(
vk_format = vk_format, vk_format=vk_format,
optimal_features = optimal_features_str, optimal_features=optimal_features_str,
buffer_features = buffer_features_str) buffer_features=buffer_features_str)
def main(): def main():
...@@ -121,15 +120,18 @@ def main(): ...@@ -121,15 +120,18 @@ def main():
vk_format_name_to_index_map[index] = vk_format vk_format_name_to_index_map[index] = vk_format
vk_map = angle_format.load_json(input_file_name) vk_map = angle_format.load_json(input_file_name)
vk_cases = [gen_format_case(index, vk_format_name_to_index_map, vk_map) for index in vk_format_name_to_index_map] vk_cases = [
gen_format_case(index, vk_format_name_to_index_map, vk_map)
for index in vk_format_name_to_index_map
]
output_cpp = template_table_autogen_cpp.format( output_cpp = template_table_autogen_cpp.format(
copyright_year = date.today().year, copyright_year=date.today().year,
num_formats = num_formats, num_formats=num_formats,
format_case_data = "\n,".join(vk_cases), format_case_data="\n,".join(vk_cases),
script_name = __file__, script_name=__file__,
out_file_name = out_file_name, out_file_name=out_file_name,
input_file_name = input_file_name) input_file_name=input_file_name)
with open(out_file_name, 'wt') as out_file: with open(out_file_name, 'wt') as out_file:
out_file.write(output_cpp) out_file.write(output_cpp)
......
...@@ -56,6 +56,7 @@ size_t g_numProcs = {num_procs}; ...@@ -56,6 +56,7 @@ size_t g_numProcs = {num_procs};
sys.path.append('../libANGLE/renderer') sys.path.append('../libANGLE/renderer')
import angle_format import angle_format
def main(): def main():
# auto_script parameters. # auto_script parameters.
...@@ -82,21 +83,23 @@ def main(): ...@@ -82,21 +83,23 @@ def main():
all_functions[function] = "gl::" + function[2:] all_functions[function] = "gl::" + function[2:]
# Special handling for EGL_ANGLE_explicit_context extension # Special handling for EGL_ANGLE_explicit_context extension
if support_egl_ANGLE_explicit_context: if support_egl_ANGLE_explicit_context:
all_functions[function + "ContextANGLE"] = "gl::" + function[2:] + "ContextANGLE" all_functions[function +
"ContextANGLE"] = "gl::" + function[2:] + "ContextANGLE"
elif function.startswith("egl"): elif function.startswith("egl"):
all_functions[function] = "EGL_" + function[3:] all_functions[function] = "EGL_" + function[3:]
else: else:
all_functions[function] = function all_functions[function] = function
proc_data = [(' {"%s", P(%s)}' % (func, angle_func)) for func, angle_func in sorted(all_functions.iteritems())] proc_data = [(' {"%s", P(%s)}' % (func, angle_func))
for func, angle_func in sorted(all_functions.iteritems())]
with open(out_file_name, 'w') as out_file: with open(out_file_name, 'w') as out_file:
output_cpp = template_cpp.format( output_cpp = template_cpp.format(
script_name = sys.argv[0], script_name=sys.argv[0],
data_source_name = data_source_name, data_source_name=data_source_name,
copyright_year = date.today().year, copyright_year=date.today().year,
proc_data = ",\n".join(proc_data), proc_data=",\n".join(proc_data),
num_procs = len(proc_data)) num_procs=len(proc_data))
out_file.write(output_cpp) out_file.write(output_cpp)
out_file.close() out_file.close()
return 0 return 0
......
...@@ -2,6 +2,7 @@ import os ...@@ -2,6 +2,7 @@ import os
import re import re
import sys import sys
def ReadFileAsLines(filename): def ReadFileAsLines(filename):
"""Reads a file, removing blank lines and lines that start with #""" """Reads a file, removing blank lines and lines that start with #"""
file = open(filename, "r") file = open(filename, "r")
...@@ -14,17 +15,20 @@ def ReadFileAsLines(filename): ...@@ -14,17 +15,20 @@ def ReadFileAsLines(filename):
lines.append(line) lines.append(line)
return lines return lines
def GetSuiteName(testName): def GetSuiteName(testName):
return testName[:testName.find("/")] return testName[:testName.find("/")]
def GetTestName(testName): def GetTestName(testName):
replacements = { ".test": "", ".": "_" } replacements = {".test": "", ".": "_"}
splitTestName = testName.split("/") splitTestName = testName.split("/")
cleanName = splitTestName[-2] + "_" + splitTestName[-1] cleanName = splitTestName[-2] + "_" + splitTestName[-1]
for replaceKey in replacements: for replaceKey in replacements:
cleanName = cleanName.replace(replaceKey, replacements[replaceKey]) cleanName = cleanName.replace(replaceKey, replacements[replaceKey])
return cleanName return cleanName
def GenerateTests(outFile, testNames): def GenerateTests(outFile, testNames):
# Remove duplicate tests # Remove duplicate tests
testNames = list(set(testNames)) testNames = list(set(testNames))
...@@ -43,8 +47,9 @@ def GenerateTests(outFile, testNames): ...@@ -43,8 +47,9 @@ def GenerateTests(outFile, testNames):
outFile.write(" run(\"" + test + "\");\n") outFile.write(" run(\"" + test + "\");\n")
outFile.write("}\n\n") outFile.write("}\n\n")
def GenerateTestList(sourceFile, rootDir): def GenerateTestList(sourceFile, rootDir):
tests = [ ] tests = []
fileName, fileExtension = os.path.splitext(sourceFile) fileName, fileExtension = os.path.splitext(sourceFile)
if fileExtension == ".run": if fileExtension == ".run":
lines = ReadFileAsLines(sourceFile) lines = ReadFileAsLines(sourceFile)
...@@ -52,7 +57,8 @@ def GenerateTestList(sourceFile, rootDir): ...@@ -52,7 +57,8 @@ def GenerateTestList(sourceFile, rootDir):
tests += GenerateTestList(os.path.join(os.path.dirname(sourceFile), line), rootDir) tests += GenerateTestList(os.path.join(os.path.dirname(sourceFile), line), rootDir)
elif fileExtension == ".test": elif fileExtension == ".test":
tests.append(os.path.relpath(os.path.realpath(sourceFile), rootDir).replace("\\", "/")) tests.append(os.path.relpath(os.path.realpath(sourceFile), rootDir).replace("\\", "/"))
return tests; return tests
def main(argv): def main(argv):
tests = GenerateTestList(argv[0], argv[1]) tests = GenerateTestList(argv[0], argv[1])
...@@ -64,5 +70,6 @@ def main(argv): ...@@ -64,5 +70,6 @@ def main(argv):
return 0 return 0
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))
...@@ -18,12 +18,10 @@ import shutil ...@@ -18,12 +18,10 @@ import shutil
import subprocess import subprocess
import sys import sys
gn_args = """is_clang = true gn_args = """is_clang = true
is_debug = false is_debug = false
angle_enable_vulkan = true""" angle_enable_vulkan = true"""
is_windows = platform.system() == 'Windows' is_windows = platform.system() == 'Windows'
is_linux = platform.system() == 'Linux' is_linux = platform.system() == 'Linux'
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment