Commit d7d42395 by Geoff Lang Committed by Commit Bot

Format all of ANGLE's python code.

BUG=angleproject:3421 Change-Id: I1d7282ac513c046de5d8ed87f7789290780d30a6 Reviewed-on: https://chromium-review.googlesource.com/c/angle/angle/+/1595440Reviewed-by: 's avatarJamie Madill <jmadill@chromium.org> Commit-Queue: Geoff Lang <geofflang@chromium.org>
parent 8ba78da0
[style]
based_on_style = chromium
column_limit = 99
indent_width = 4
# Copyright 2019 The ANGLE Project Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for code generation.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
......@@ -10,11 +9,9 @@ for more details on the presubmit API built into depot_tools.
from subprocess import call
# Fragment of a regular expression that matches C++ and Objective-C++ implementation files.
_IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
# Fragment of a regular expression that matches C++ and Objective-C++ header files.
_HEADER_EXTENSIONS = r'\.(h|hpp|hxx)$'
......@@ -23,8 +20,10 @@ def _CheckCodeGeneration(input_api, output_api):
class Msg(output_api.PresubmitError):
"""Specialized error message"""
def __init__(self, message):
super(output_api.PresubmitError, self).__init__(message,
super(output_api.PresubmitError, self).__init__(
message,
long_text='Please ensure your ANGLE repositiory is synced to tip-of-tree\n'
'and you have an up-to-date checkout of all ANGLE dependencies.\n'
'If you are using ANGLE inside Chromium you may need to bootstrap ANGLE \n'
......@@ -34,11 +33,7 @@ def _CheckCodeGeneration(input_api, output_api):
'scripts/run_code_generation.py')
cmd_name = 'run_code_generation'
cmd = [input_api.python_executable, code_gen_path, '--verify-no-dirty']
test_cmd = input_api.Command(
name=cmd_name,
cmd=cmd,
kwargs={},
message=Msg)
test_cmd = input_api.Command(name=cmd_name, cmd=cmd, kwargs={}, message=Msg)
if input_api.verbose:
print('Running ' + cmd_name)
return input_api.RunTests([test_cmd])
......@@ -52,8 +47,7 @@ def _CheckNewHeaderWithoutGnChange(input_api, output_api):
"""
def headers(f):
return input_api.FilterSourceFile(
f, white_list=(r'.+%s' % _HEADER_EXTENSIONS, ))
return input_api.FilterSourceFile(f, white_list=(r'.+%s' % _HEADER_EXTENSIONS,))
new_headers = []
for f in input_api.AffectedSourceFiles(headers):
......@@ -62,7 +56,7 @@ def _CheckNewHeaderWithoutGnChange(input_api, output_api):
new_headers.append(f.LocalPath())
def gn_files(f):
return input_api.FilterSourceFile(f, white_list=(r'.+\.gn', ))
return input_api.FilterSourceFile(f, white_list=(r'.+\.gn',))
all_gn_changed_contents = ''
for f in input_api.AffectedSourceFiles(gn_files):
......@@ -76,35 +70,32 @@ def _CheckNewHeaderWithoutGnChange(input_api, output_api):
problems.append(header)
if problems:
return [output_api.PresubmitPromptWarning(
'Missing GN changes for new header files', items=sorted(problems),
return [
output_api.PresubmitPromptWarning(
'Missing GN changes for new header files',
items=sorted(problems),
long_text='Please double check whether newly added header files need '
'corresponding changes in gn or gni files.\nThis checking is only a '
'heuristic. Run build/check_gn_headers.py to be precise.\n'
'Read https://crbug.com/661774 for more info.')]
'Read https://crbug.com/661774 for more info.')
]
return []
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CheckCodeGeneration(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasBugField(
input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasDescription(
input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasBugField(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasDescription(input_api, output_api))
results.extend(_CheckNewHeaderWithoutGnChange(input_api, output_api))
results.extend(
input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
results.extend(input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CheckCodeGeneration(input_api, output_api))
results.extend(
input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasBugField(
input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasDescription(
input_api, output_api))
results.extend(input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasBugField(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasDescription(input_api, output_api))
return results
......@@ -16,22 +16,15 @@ import sys
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--objcopy',
required=True,
help='The objcopy binary to run',
metavar='PATH')
parser.add_argument(
'--nm', required=True, help='The nm binary to run', metavar='PATH')
'--objcopy', required=True, help='The objcopy binary to run', metavar='PATH')
parser.add_argument('--nm', required=True, help='The nm binary to run', metavar='PATH')
parser.add_argument(
'--sofile',
required=True,
help='Shared object file produced by linking command',
metavar='FILE')
parser.add_argument(
'--output',
required=True,
help='Final output shared object file',
metavar='FILE')
'--output', required=True, help='Final output shared object file', metavar='FILE')
parser.add_argument(
'--unstrippedsofile',
required=True,
......@@ -48,20 +41,16 @@ def main():
objcopy_cmd.append(args.output + '.debug')
result = subprocess.call(objcopy_cmd)
nm_cmd = subprocess.Popen(
[args.nm, args.unstrippedsofile, '--format=posix', '--defined-only'],
nm_cmd = subprocess.Popen([args.nm, args.unstrippedsofile, '--format=posix', '--defined-only'],
stdout=subprocess.PIPE)
awk_cmd = subprocess.Popen(['awk', '{ print $1}'],
stdin=nm_cmd.stdout,
stdout=subprocess.PIPE)
awk_cmd = subprocess.Popen(['awk', '{ print $1}'], stdin=nm_cmd.stdout, stdout=subprocess.PIPE)
dynsym_out = open(args.output + '.dynsyms', 'w')
sort_cmd = subprocess.Popen(['sort'], stdin=awk_cmd.stdout, stdout=dynsym_out)
dynsym_out.close()
nm_cmd = subprocess.Popen(
[args.nm, args.unstrippedsofile, '--format=posix', '--defined-only'],
nm_cmd = subprocess.Popen([args.nm, args.unstrippedsofile, '--format=posix', '--defined-only'],
stdout=subprocess.PIPE)
awk_cmd = subprocess.Popen(
......@@ -70,9 +59,7 @@ def main():
stdout=subprocess.PIPE)
funcsyms_out = open(args.output + '.funcsyms', 'w')
sort_cmd = subprocess.Popen(['sort'],
stdin=awk_cmd.stdout,
stdout=funcsyms_out)
sort_cmd = subprocess.Popen(['sort'], stdin=awk_cmd.stdout, stdout=funcsyms_out)
funcsyms_out.close()
keep_symbols = open(args.output + '.keep_symbols', 'w')
......@@ -86,15 +73,14 @@ def main():
keep_symbols.close()
objcopy_cmd = [
args.objcopy, '--rename-section', '.debug_frame=saved_debug_frame',
args.output + '.debug', args.output + ".mini_debuginfo"
args.objcopy, '--rename-section', '.debug_frame=saved_debug_frame', args.output + '.debug',
args.output + ".mini_debuginfo"
]
subprocess.check_call(objcopy_cmd)
objcopy_cmd = [
args.objcopy, '-S', '--remove-section', '.gdb_index', '--remove-section',
'.comment', '--keep-symbols=' + args.output + '.keep_symbols',
args.output + '.mini_debuginfo'
args.objcopy, '-S', '--remove-section', '.gdb_index', '--remove-section', '.comment',
'--keep-symbols=' + args.output + '.keep_symbols', args.output + '.mini_debuginfo'
]
subprocess.check_call(objcopy_cmd)
......@@ -108,16 +94,16 @@ def main():
subprocess.check_call(xz_cmd)
objcopy_cmd = [
args.objcopy, '--add-section',
'.gnu_debugdata=' + args.output + '.mini_debuginfo.xz', args.output
args.objcopy, '--add-section', '.gnu_debugdata=' + args.output + '.mini_debuginfo.xz',
args.output
]
subprocess.check_call(objcopy_cmd)
# Clean out scratch files
rm_cmd = [
'rm', '-f', args.output + '.dynsyms', args.output + '.funcsyms',
args.output + '.keep_symbols', args.output + '.debug',
args.output + '.mini_debuginfo', args.output + '.mini_debuginfo.xz'
args.output + '.keep_symbols', args.output + '.debug', args.output + '.mini_debuginfo',
args.output + '.mini_debuginfo.xz'
]
result = subprocess.call(rm_cmd)
......
......@@ -49,12 +49,12 @@ bmp_file.close()
# convert to YUV 4:4:4
converted_pixels = bytearray(pixels)
for i in range(0, width * height):
R, = struct.unpack("B", pixels[i*3+2])
G, = struct.unpack("B", pixels[i*3+1])
B, = struct.unpack("B", pixels[i*3])
converted_pixels[i*3] = ((66*R + 129*G + 25*B + 128) >> 8) + 16
converted_pixels[i*3+1] = ((-38*R - 74*G + 112*B + 128) >> 8) + 128
converted_pixels[i*3+2] = ((112*R - 94*G - 18*B + 128) >> 8) + 128
R, = struct.unpack("B", pixels[i * 3 + 2])
G, = struct.unpack("B", pixels[i * 3 + 1])
B, = struct.unpack("B", pixels[i * 3])
converted_pixels[i * 3] = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16
converted_pixels[i * 3 + 1] = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128
converted_pixels[i * 3 + 2] = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128
# downsample to packed UV buffer
uv_buffer = bytearray(width * height / 2)
......
......@@ -3,7 +3,6 @@
# Copyright 2015 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generate .gclient file for Angle.
Because gclient won't accept "--name ." use a different name then edit.
......@@ -34,5 +33,6 @@ def main():
print 'created .gclient'
if __name__ == '__main__':
main()
......@@ -9,6 +9,7 @@ from __future__ import print_function
import os, shutil, sys
def main():
if len(sys.argv) != 2:
print("Usage: %s <path>" % sys.argv[0])
......@@ -20,5 +21,6 @@ def main():
print("false")
sys.exit(0)
if __name__ == '__main__':
main()
......@@ -31,8 +31,7 @@ import sys
def get_json_description(gn_out, target_name):
try:
text_desc = subprocess.check_output(
['gn', 'desc', '--format=json', gn_out, target_name])
text_desc = subprocess.check_output(['gn', 'desc', '--format=json', gn_out, target_name])
except subprocess.CalledProcessError as e:
logging.error("e.retcode = %s" % e.returncode)
logging.error("e.cmd = %s" % e.cmd)
......@@ -40,12 +39,12 @@ def get_json_description(gn_out, target_name):
try:
json_out = json.loads(text_desc)
except ValueError:
raise ValueError("Unable to decode JSON\ncmd: %s\noutput:\n%s" %
(subprocess.list2cmdline(['gn', 'desc', '--format=json',
gn_out, target_name]), text_desc))
raise ValueError("Unable to decode JSON\ncmd: %s\noutput:\n%s" % (subprocess.list2cmdline(
['gn', 'desc', '--format=json', gn_out, target_name]), text_desc))
return json_out
def load_json_deps(desc, gn_out, target_name, all_desc, indent=" "):
"""Extracts dependencies from the given target json description
and recursively extracts json descriptions.
......@@ -60,12 +59,13 @@ def load_json_deps(desc, gn_out, target_name, all_desc, indent=" "):
text_descriptions = []
for dep in target.get('deps', []):
if dep not in all_desc:
logging.debug("dep: %s%s" % (indent,dep))
logging.debug("dep: %s%s" % (indent, dep))
new_desc = get_json_description(gn_out, dep)
all_desc[dep] = new_desc[dep]
load_json_deps(new_desc, gn_out, dep, all_desc, indent+" ")
load_json_deps(new_desc, gn_out, dep, all_desc, indent + " ")
else:
logging.debug("dup: %s%s" % (indent,dep))
logging.debug("dup: %s%s" % (indent, dep))
def create_build_description(gn_out, targets):
"""Creates the JSON build description by running GN."""
......@@ -89,8 +89,7 @@ def main():
description='Generate json build information from a GN description.')
parser.add_argument(
'--gn_out',
help=
'GN output config to use (e.g., out/Default or out/Debug.)',
help='GN output config to use (e.g., out/Default or out/Debug.)',
default='out/Default',
)
parser.add_argument(
......@@ -105,7 +104,7 @@ def main():
args = parser.parse_args()
desc = create_build_description(args.gn_out, args.targets)
fh = open(args.output,"w")
fh = open(args.output, "w")
fh.write(json.dumps(desc, indent=4, sort_keys=True))
fh.close()
......
......@@ -263,39 +263,47 @@ EXPORTS
{exports}
"""
def script_relative(path):
return os.path.join(os.path.dirname(sys.argv[0]), path)
with open(script_relative('entry_point_packed_gl_enums.json')) as f:
cmd_packed_gl_enums = json.loads(f.read())
def format_entry_point_decl(cmd_name, proto, params, is_explicit_context):
comma_if_needed = ", " if len(params) > 0 else ""
return template_entry_point_decl.format(
name = cmd_name[2:],
return_type = proto[:-len(cmd_name)],
params = ", ".join(params),
comma_if_needed = comma_if_needed,
explicit_context_suffix = "ContextANGLE" if is_explicit_context else "",
explicit_context_param = "GLeglContext ctx" if is_explicit_context else "",
explicit_context_comma = ", " if is_explicit_context and len(params) > 0 else "")
name=cmd_name[2:],
return_type=proto[:-len(cmd_name)],
params=", ".join(params),
comma_if_needed=comma_if_needed,
explicit_context_suffix="ContextANGLE" if is_explicit_context else "",
explicit_context_param="GLeglContext ctx" if is_explicit_context else "",
explicit_context_comma=", " if is_explicit_context and len(params) > 0 else "")
def type_name_sep_index(param):
space = param.rfind(" ")
pointer = param.rfind("*")
return max(space, pointer)
def just_the_type(param):
if "*" in param:
return param[:type_name_sep_index(param) + 1]
return param[:type_name_sep_index(param)]
def just_the_name(param):
return param[type_name_sep_index(param)+1:]
return param[type_name_sep_index(param) + 1:]
def make_param(param_type, param_name):
return param_type + " " + param_name
def just_the_type_packed(param, entry):
name = just_the_name(param)
if entry.has_key(name):
......@@ -303,6 +311,7 @@ def just_the_type_packed(param, entry):
else:
return just_the_type(param)
def just_the_name_packed(param, reserved_set):
name = just_the_name(param)
if name in reserved_set:
......@@ -310,6 +319,7 @@ def just_the_name_packed(param, reserved_set):
else:
return name
def param_print_argument(param):
name_only = just_the_name(param)
type_only = just_the_type(param)
......@@ -325,6 +335,7 @@ def param_print_argument(param):
return name_only
def param_format_string(param):
if "*" in param:
return param + " = 0x%016\" PRIxPTR \""
......@@ -335,11 +346,13 @@ def param_format_string(param):
return param + " = " + format_dict[type_only]
def default_return_value(cmd_name, return_type):
if return_type == "void":
return ""
return "GetDefaultReturnValue<EntryPoint::" + cmd_name[2:] + ", " + return_type + ">()"
def get_context_getter_function(cmd_name, is_explicit_context):
if is_explicit_context:
return "static_cast<gl::Context *>(ctx)"
......@@ -357,6 +370,7 @@ def get_context_getter_function(cmd_name, is_explicit_context):
return "GetGlobalContext()"
return "GetValidGlobalContext()"
def format_entry_point_def(cmd_name, proto, params, is_explicit_context):
packed_gl_enums = cmd_packed_gl_enums.get(cmd_name, {})
internal_params = [just_the_name_packed(param, packed_gl_enums) for param in params]
......@@ -366,8 +380,10 @@ def format_entry_point_def(cmd_name, proto, params, is_explicit_context):
if name in packed_gl_enums:
internal_name = name + "Packed"
internal_type = packed_gl_enums[name]
packed_gl_enum_conversions += ["\n " + internal_type + " " + internal_name +" = FromGLenum<" +
internal_type + ">(" + name + ");"]
packed_gl_enum_conversions += [
"\n " + internal_type + " " + internal_name + " = FromGLenum<" +
internal_type + ">(" + name + ");"
]
pass_params = [param_print_argument(param) for param in params]
format_params = [param_format_string(param) for param in params]
......@@ -381,30 +397,36 @@ def format_entry_point_def(cmd_name, proto, params, is_explicit_context):
name_lower_no_suffix = name_lower_no_suffix[0:-len(suffix)]
return template_entry_point_def.format(
name = cmd_name[2:],
name_lower_no_suffix = name_lower_no_suffix,
return_type = return_type,
params = ", ".join(params),
internal_params = ", ".join(internal_params),
packed_gl_enum_conversions = "".join(packed_gl_enum_conversions),
pass_params = ", ".join(pass_params),
comma_if_needed = ", " if len(params) > 0 else "",
validate_params = ", ".join(["context"] + internal_params),
format_params = ", ".join(format_params),
return_if_needed = "" if default_return == "" else "return ",
default_return_if_needed = "" if default_return == "" else "\n return " + default_return + ";\n",
context_getter = get_context_getter_function(cmd_name, is_explicit_context),
event_comment = event_comment,
explicit_context_suffix = "ContextANGLE" if is_explicit_context else "",
explicit_context_param = "GLeglContext ctx" if is_explicit_context else "",
explicit_context_comma = ", " if is_explicit_context and len(params) > 0 else "",
assert_explicit_context = "\nASSERT(context == GetValidGlobalContext());"
name=cmd_name[2:],
name_lower_no_suffix=name_lower_no_suffix,
return_type=return_type,
params=", ".join(params),
internal_params=", ".join(internal_params),
packed_gl_enum_conversions="".join(packed_gl_enum_conversions),
pass_params=", ".join(pass_params),
comma_if_needed=", " if len(params) > 0 else "",
validate_params=", ".join(["context"] + internal_params),
format_params=", ".join(format_params),
return_if_needed="" if default_return == "" else "return ",
default_return_if_needed=""
if default_return == "" else "\n return " + default_return + ";\n",
context_getter=get_context_getter_function(cmd_name, is_explicit_context),
event_comment=event_comment,
explicit_context_suffix="ContextANGLE" if is_explicit_context else "",
explicit_context_param="GLeglContext ctx" if is_explicit_context else "",
explicit_context_comma=", " if is_explicit_context and len(params) > 0 else "",
assert_explicit_context="\nASSERT(context == GetValidGlobalContext());"
if is_explicit_context else "")
def get_internal_params(cmd_name, params):
packed_gl_enums = cmd_packed_gl_enums.get(cmd_name, {})
return ", ".join([make_param(just_the_type_packed(param, packed_gl_enums),
just_the_name_packed(param, packed_gl_enums)) for param in params])
return ", ".join([
make_param(
just_the_type_packed(param, packed_gl_enums),
just_the_name_packed(param, packed_gl_enums)) for param in params
])
def format_context_gles_decl(cmd_name, proto, params):
internal_params = get_internal_params(cmd_name, params)
......@@ -417,31 +439,35 @@ def format_context_gles_decl(cmd_name, proto, params):
name_lower_no_suffix = name_lower_no_suffix[0:-len(suffix)]
return context_gles_decl.format(
return_type = return_type,
name_lower_no_suffix = name_lower_no_suffix,
internal_params = internal_params)
return_type=return_type,
name_lower_no_suffix=name_lower_no_suffix,
internal_params=internal_params)
def format_libgles_entry_point_def(cmd_name, proto, params, is_explicit_context):
internal_params = [just_the_name(param) for param in params]
return_type = proto[:-len(cmd_name)]
return libgles_entry_point_def.format(
name = cmd_name[2:],
return_type = return_type,
params = ", ".join(params),
internal_params = ", ".join(internal_params),
explicit_context_suffix = "ContextANGLE" if is_explicit_context else "",
explicit_context_param = "GLeglContext ctx" if is_explicit_context else "",
explicit_context_comma = ", " if is_explicit_context and len(params) > 0 else "",
explicit_context_internal_param = "ctx" if is_explicit_context else "")
name=cmd_name[2:],
return_type=return_type,
params=", ".join(params),
internal_params=", ".join(internal_params),
explicit_context_suffix="ContextANGLE" if is_explicit_context else "",
explicit_context_param="GLeglContext ctx" if is_explicit_context else "",
explicit_context_comma=", " if is_explicit_context and len(params) > 0 else "",
explicit_context_internal_param="ctx" if is_explicit_context else "")
def format_validation_proto(cmd_name, params):
internal_params = get_internal_params(cmd_name, ["Context *context"] + params)
return template_validation_proto % (cmd_name[2:], internal_params)
def path_to(folder, file):
return os.path.join(script_relative(".."), "src", folder, file)
def get_entry_points(all_commands, gles_commands, is_explicit_context):
decls = []
defs = []
......@@ -457,17 +483,18 @@ def get_entry_points(all_commands, gles_commands, is_explicit_context):
param_text = ["".join(param.itertext()) for param in command.findall('param')]
proto_text = "".join(proto.itertext())
decls.append(format_entry_point_decl(cmd_name, proto_text, param_text,
is_explicit_context))
decls.append(
format_entry_point_decl(cmd_name, proto_text, param_text, is_explicit_context))
defs.append(format_entry_point_def(cmd_name, proto_text, param_text, is_explicit_context))
export_defs.append(format_libgles_entry_point_def(cmd_name, proto_text, param_text,
is_explicit_context))
export_defs.append(
format_libgles_entry_point_def(cmd_name, proto_text, param_text, is_explicit_context))
validation_protos.append(format_validation_proto(cmd_name, param_text))
return decls, defs, export_defs, validation_protos
def get_gles1_decls(all_commands, gles_commands):
decls = []
for command in all_commands:
......@@ -486,12 +513,13 @@ def get_gles1_decls(all_commands, gles_commands):
return decls
def get_glext_decls(all_commands, gles_commands, version, is_explicit_context):
glext_ptrs = []
glext_protos = []
is_gles1 = False
if(version == ""):
if (version == ""):
is_gles1 = True
for command in all_commands:
......@@ -516,25 +544,25 @@ def get_glext_decls(all_commands, gles_commands, version, is_explicit_context):
"explicit_context_comma": ", " if is_explicit_context and len(params) > 0 else "",
"explicit_context_suffix": "ContextANGLE" if is_explicit_context else "",
"explicit_context_suffix_upper": "CONTEXTANGLE" if is_explicit_context else "",
"explicit_context_param": "GLeglContext ctx" if is_explicit_context else ""}
"explicit_context_param": "GLeglContext ctx" if is_explicit_context else ""
}
glext_ptrs.append(template_glext_function_pointer.format(
**format_params))
glext_protos.append(template_glext_function_prototype.format(
**format_params))
glext_ptrs.append(template_glext_function_pointer.format(**format_params))
glext_protos.append(template_glext_function_prototype.format(**format_params))
return glext_ptrs, glext_protos
def write_file(annotation, comment, template, entry_points, suffix, includes, file):
content = template.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = file,
year = date.today().year,
annotation_lower = annotation.lower(),
annotation_upper = annotation.upper(),
comment = comment,
includes = includes,
entry_points = entry_points)
script_name=os.path.basename(sys.argv[0]),
data_source_name=file,
year=date.today().year,
annotation_lower=annotation.lower(),
annotation_upper=annotation.upper(),
comment=comment,
includes=includes,
entry_points=entry_points)
path = path_to("libGLESv2", "entry_points_gles_{}_autogen.{}".format(
annotation.lower(), suffix))
......@@ -543,13 +571,14 @@ def write_file(annotation, comment, template, entry_points, suffix, includes, fi
out.write(content)
out.close()
def write_export_files(entry_points, includes):
content = template_libgles_entry_point_source.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = "gl.xml and gl_angle_ext.xml",
year = date.today().year,
includes = includes,
entry_points = entry_points)
script_name=os.path.basename(sys.argv[0]),
data_source_name="gl.xml and gl_angle_ext.xml",
year=date.today().year,
includes=includes,
entry_points=entry_points)
path = path_to("libGLESv2", "libGLESv2_autogen.cpp")
......@@ -557,6 +586,7 @@ def write_export_files(entry_points, includes):
out.write(content)
out.close()
def write_context_api_decls(annotation, template, decls):
interface_lines = []
......@@ -568,12 +598,12 @@ def write_context_api_decls(annotation, template, decls):
interface_lines.extend(decls['exts'][extname])
content = template.format(
annotation_lower = annotation.lower(),
annotation_upper = annotation.upper(),
script_name = os.path.basename(sys.argv[0]),
data_source_name = "gl.xml",
year = date.today().year,
interface = "\n".join(interface_lines))
annotation_lower=annotation.lower(),
annotation_upper=annotation.upper(),
script_name=os.path.basename(sys.argv[0]),
data_source_name="gl.xml",
year=date.today().year,
interface="\n".join(interface_lines))
path = path_to("libANGLE", "Context_gles_%s_autogen.h" % annotation.lower())
......@@ -581,32 +611,35 @@ def write_context_api_decls(annotation, template, decls):
out.write(content)
out.close()
def write_glext_explicit_context_inc(version, ptrs, protos):
folder_version = version if version != "31" else "3"
content = template_glext_explicit_context_inc.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = "gl.xml and gl_angle_ext.xml",
year = date.today().year,
version = version,
function_pointers = ptrs,
function_prototypes = protos)
path = os.path.join(script_relative(".."), "include", "GLES{}".format(folder_version),
script_name=os.path.basename(sys.argv[0]),
data_source_name="gl.xml and gl_angle_ext.xml",
year=date.today().year,
version=version,
function_pointers=ptrs,
function_prototypes=protos)
path = os.path.join(
script_relative(".."), "include", "GLES{}".format(folder_version),
"gl{}ext_explicit_context_autogen.inc".format(version))
with open(path, "w") as out:
out.write(content)
out.close()
def write_validation_header(annotation, comment, protos):
content = template_validation_header.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = "gl.xml and gl_angle_ext.xml",
year = date.today().year,
annotation = annotation,
comment = comment,
prototypes = "\n".join(protos))
script_name=os.path.basename(sys.argv[0]),
data_source_name="gl.xml and gl_angle_ext.xml",
year=date.today().year,
annotation=annotation,
comment=comment,
prototypes="\n".join(protos))
path = path_to("libANGLE", "validationES%s_autogen.h" % annotation)
......@@ -614,14 +647,15 @@ def write_validation_header(annotation, comment, protos):
out.write(content)
out.close()
def write_windows_def_file(data_source_name, lib, exports):
content = template_windows_def_file.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = data_source_name,
exports = "\n".join(exports),
year = date.today().year,
lib = lib)
script_name=os.path.basename(sys.argv[0]),
data_source_name=data_source_name,
exports="\n".join(exports),
year=date.today().year,
lib=lib)
path = path_to(lib, "%s_autogen.def" % lib)
......@@ -629,12 +663,14 @@ def write_windows_def_file(data_source_name, lib, exports):
out.write(content)
out.close()
def get_exports(commands, fmt = None):
def get_exports(commands, fmt=None):
if fmt:
return [" %s" % fmt(cmd) for cmd in sorted(commands)]
else:
return [" %s" % cmd for cmd in sorted(commands)]
# Get EGL exports
def get_egl_exports():
......@@ -671,6 +707,7 @@ def get_egl_exports():
return exports
def main():
# auto_script parameters.
......@@ -762,20 +799,19 @@ def main():
if major_version == 3 and minor_version == 1:
header_includes += "\n#include \"common/platform.h\"\n"
source_includes = template_sources_includes.format(
annotation.lower(), major_version, minor_if_not_zero)
source_includes = template_sources_includes.format(annotation.lower(), major_version,
minor_if_not_zero)
write_file(annotation, comment, template_entry_point_header,
"\n".join(decls), "h", header_includes, "gl.xml")
write_file(annotation, comment, template_entry_point_source,
"\n".join(defs), "cpp", source_includes, "gl.xml")
write_file(annotation, comment, template_entry_point_header, "\n".join(decls), "h",
header_includes, "gl.xml")
write_file(annotation, comment, template_entry_point_source, "\n".join(defs), "cpp",
source_includes, "gl.xml")
if is_gles1:
gles1decls['core'] = get_gles1_decls(all_commands, gles_commands)
validation_annotation = "%s%s" % (major_version, minor_if_not_zero)
write_validation_header(validation_annotation, comment, validation_protos)
# After we finish with the main entry points, we process the extensions.
extension_defs = []
extension_decls = []
......@@ -848,27 +884,27 @@ def main():
version = "{}{}".format(major_if_not_one, minor_if_not_zero)
glext_ptrs, glext_protos = get_glext_decls(all_commands,
xml.all_cmd_names.get_commands(annotation), version, True)
xml.all_cmd_names.get_commands(annotation),
version, True)
glext_ext_ptrs = []
glext_ext_protos = []
# Append extensions for 1.0 and 2.0
if(annotation == "1_0"):
glext_ext_ptrs, glext_ext_protos = get_glext_decls(all_commands,
xml.all_cmd_names.get_commands("glext"), version, True)
elif(annotation == "2_0"):
glext_ext_ptrs, glext_ext_protos = get_glext_decls(all_commands,
xml.all_cmd_names.get_commands("gl2ext"), version, True)
if (annotation == "1_0"):
glext_ext_ptrs, glext_ext_protos = get_glext_decls(
all_commands, xml.all_cmd_names.get_commands("glext"), version, True)
elif (annotation == "2_0"):
glext_ext_ptrs, glext_ext_protos = get_glext_decls(
all_commands, xml.all_cmd_names.get_commands("gl2ext"), version, True)
glext_ptrs += glext_ext_ptrs
glext_protos += glext_ext_protos
write_glext_explicit_context_inc(version, "\n".join(glext_ptrs), "\n".join(glext_protos))
write_glext_explicit_context_inc(version, "\n".join(glext_ptrs),
"\n".join(glext_protos))
header_includes = template_header_includes.format(
major="", minor="")
header_includes = template_header_includes.format(major="", minor="")
header_includes += """
#include <GLES/glext.h>
#include <GLES2/gl2.h>
......@@ -883,24 +919,23 @@ def main():
#include "libANGLE/validationES31.h"
"""
write_file("ext", "extension", template_entry_point_header,
"\n".join([item for item in extension_decls]), "h", header_includes,
"gl.xml and gl_angle_ext.xml")
write_file("ext", "extension", template_entry_point_source,
"\n".join([item for item in extension_defs]), "cpp", source_includes,
"gl.xml and gl_angle_ext.xml")
write_file("ext", "extension", template_entry_point_header, "\n".join(
[item for item in extension_decls]), "h", header_includes, "gl.xml and gl_angle_ext.xml")
write_file("ext", "extension", template_entry_point_source, "\n".join(
[item for item in extension_defs]), "cpp", source_includes, "gl.xml and gl_angle_ext.xml")
write_validation_header("EXT", "extension", ext_validation_protos)
write_context_api_decls("1_0", context_gles_header, gles1decls)
sorted_cmd_names = ["Invalid"] + [cmd[2:] for cmd in sorted(xml.all_cmd_names.get_all_commands())]
sorted_cmd_names = ["Invalid"
] + [cmd[2:] for cmd in sorted(xml.all_cmd_names.get_all_commands())]
entry_points_enum = template_entry_points_enum_header.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = "gl.xml and gl_angle_ext.xml",
year = date.today().year,
entry_points_list = ",\n".join([" " + cmd for cmd in sorted_cmd_names]))
script_name=os.path.basename(sys.argv[0]),
data_source_name="gl.xml and gl_angle_ext.xml",
year=date.today().year,
entry_points_list=",\n".join([" " + cmd for cmd in sorted_cmd_names]))
entry_points_enum_header_path = path_to("libGLESv2", "entry_points_enum_autogen.h")
with open(entry_points_enum_header_path, "w") as out:
......@@ -926,5 +961,6 @@ def main():
everything = "Khronos and ANGLE XML files"
write_windows_def_file(everything, "libGLESv2", libgles_ep_exports)
if __name__ == '__main__':
sys.exit(main())
......@@ -12,32 +12,47 @@ import sys, os, pprint, json
from datetime import date
import registry_xml
def write_header(data_source_name, all_cmds, api, preamble, path, lib, ns = "", prefix = None, export = ""):
def write_header(data_source_name,
all_cmds,
api,
preamble,
path,
lib,
ns="",
prefix=None,
export=""):
file_name = "%s_loader_autogen.h" % api
header_path = registry_xml.path_to(path, file_name)
def pre(cmd):
if prefix == None:
return cmd
return prefix + cmd[len(api):]
with open(header_path, "w") as out:
var_protos = ["%sextern PFN%sPROC %s%s;" % (export, cmd.upper(), ns, pre(cmd)) for cmd in all_cmds]
var_protos = [
"%sextern PFN%sPROC %s%s;" % (export, cmd.upper(), ns, pre(cmd)) for cmd in all_cmds
]
loader_header = template_loader_h.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = data_source_name,
year = date.today().year,
function_pointers = "\n".join(var_protos),
api_upper = api.upper(),
api_lower = api,
preamble = preamble,
export = export,
lib = lib.upper())
script_name=os.path.basename(sys.argv[0]),
data_source_name=data_source_name,
year=date.today().year,
function_pointers="\n".join(var_protos),
api_upper=api.upper(),
api_lower=api,
preamble=preamble,
export=export,
lib=lib.upper())
out.write(loader_header)
out.close()
def write_source(data_source_name, all_cmds, api, path, ns = "", prefix = None, export = ""):
def write_source(data_source_name, all_cmds, api, path, ns="", prefix=None, export=""):
file_name = "%s_loader_autogen.cpp" % api
source_path = registry_xml.path_to(path, file_name)
def pre(cmd):
if prefix == None:
return cmd
......@@ -50,17 +65,18 @@ def write_source(data_source_name, all_cmds, api, path, ns = "", prefix = None,
setters = [setter % (ns, pre(cmd), cmd.upper(), pre(cmd)) for cmd in all_cmds]
loader_source = template_loader_cpp.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = data_source_name,
year = date.today().year,
function_pointers = "\n".join(var_defs),
set_pointers = "\n".join(setters),
api_upper = api.upper(),
api_lower = api)
script_name=os.path.basename(sys.argv[0]),
data_source_name=data_source_name,
year=date.today().year,
function_pointers="\n".join(var_defs),
set_pointers="\n".join(setters),
api_upper=api.upper(),
api_lower=api)
out.write(loader_source)
out.close()
def gen_libegl_loader():
data_source_name = "egl.xml and egl_angle_ext.xml"
......@@ -82,6 +98,7 @@ def gen_libegl_loader():
write_header(data_source_name, all_cmds, "egl", libegl_preamble, path, "LIBEGL", "", "EGL_")
write_source(data_source_name, all_cmds, "egl", path, "", "EGL_")
def gen_gl_loader():
data_source_name = "gl.xml and gl_angle_ext.xml"
......@@ -113,6 +130,7 @@ def gen_gl_loader():
write_header(data_source_name, all_cmds, "gles", util_gles_preamble, path, "UTIL", export=ex)
write_source(data_source_name, all_cmds, "gles", path, export=ex)
def gen_egl_loader():
data_source_name = "egl.xml and egl_angle_ext.xml"
......@@ -135,6 +153,7 @@ def gen_egl_loader():
write_header(data_source_name, all_cmds, "egl", util_egl_preamble, path, "UTIL", export=ex)
write_source(data_source_name, all_cmds, "egl", path, export=ex)
def gen_wgl_loader():
supported_wgl_extensions = [
......@@ -162,6 +181,7 @@ def gen_wgl_loader():
write_header(source, all_cmds, "wgl", util_wgl_preamble, path, "UTIL_WINDOWS", "_")
write_source(source, all_cmds, "wgl", path, "_")
def main():
# Handle inputs/outputs for run_code_generation.py's auto_script
......
......@@ -109,11 +109,13 @@ $ImplMethodDefinitions
} // namespace rx
"""
def generate_impl_declaration(impl_stub):
# ensure the wrapped lines are aligned vertically
temp = re.sub(r'\n ', '\n', impl_stub)
return temp + ' override;\n'
def generate_impl_definition(impl_stub, typed_impl):
function_signature = impl_stub.strip()
......@@ -150,15 +152,17 @@ def generate_impl_definition(impl_stub, typed_impl):
else:
return_statement = ' return ' + return_type + '();\n'
body = '{\n' + ' UNIMPLEMENTED();\n' + return_statement +'}\n'
body = '{\n' + ' UNIMPLEMENTED();\n' + return_statement + '}\n'
return '\n' + function_signature + body
def get_constructor_args(constructor):
params = re.search(r'\((.*)\)', constructor).group(1)
args = ', '.join(re.findall(r'[^\w]?(\w+)(?:\,|$)', params))
return params, args
def parse_impl_header(base_impl):
impl_h_file_path = base_impl + '.h'
impl_h_file = open(impl_h_file_path, 'r')
......@@ -172,7 +176,7 @@ def parse_impl_header(base_impl):
for line in impl_h_file:
clean_line = line.strip()
match = re.search(r'^(?:explicit )?(' + base_impl + r'\([^\)]*\))', clean_line);
match = re.search(r'^(?:explicit )?(' + base_impl + r'\([^\)]*\))', clean_line)
if match:
constructor = match.group(1)
......@@ -200,6 +204,7 @@ def parse_impl_header(base_impl):
return impl_stubs, private_impl_stubs, constructor
def get_base_class(base_impl):
impl_h_file_path = base_impl + '.h'
with open(impl_h_file_path, 'r') as impl_h_file:
......@@ -209,6 +214,7 @@ def get_base_class(base_impl):
return match.group(1)
return False
for impl_class in impl_classes:
base_impl = impl_class + 'Impl'
......
......@@ -3,7 +3,6 @@
# Copyright 2016 The ANGLE Project Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generate copies of the Vulkan layers JSON files, with no paths, forcing
Vulkan to use the default search path to look for layers."""
......@@ -60,8 +59,7 @@ def main():
# Update the path.
if not data_key in data:
raise Exception(
"Could not find '%s' key in %s" % (data_key, json_fname))
raise Exception("Could not find '%s' key in %s" % (data_key, json_fname))
# The standard validation layer has no library path.
if 'library_path' in data[data_key]:
......@@ -93,8 +91,7 @@ def main():
# For each *.json.in template files in source dir generate actual json file
# in target dir
if (set(glob_slash(os.path.join(source_dir, '*.json.in'))) !=
set(json_in_files)):
if (set(glob_slash(os.path.join(source_dir, '*.json.in'))) != set(json_in_files)):
print('.json.in list in gn file is out-of-date', file=sys.stderr)
return 1
for json_in_name in json_in_files:
......@@ -112,5 +109,6 @@ def main():
line = line.replace('@VK_VERSION@', '1.1.' + vk_version)
json_out_file.write(line)
if __name__ == '__main__':
sys.exit(main())
......@@ -22,12 +22,14 @@ os.chdir(os.path.join(script_dir, '..'))
out_dir = 'out'
# Generate the VS solutions for any valid directory.
def generate_projects(dirname):
args = ['gn.bat', 'gen', dirname, '--ide=' + target_ide, '--sln=' + solution_name]
print('Running "' + ' '.join(args) + '"')
subprocess.call(args)
for potential_dir in os.listdir(out_dir):
path = os.path.join(out_dir, potential_dir)
build_ninja_d = os.path.join(path, 'build.ninja.d')
......
......@@ -29,19 +29,22 @@ if sys.platform == 'win32':
scores = []
# Danke to http://stackoverflow.com/a/27758326
def mean(data):
"""Return the sample arithmetic mean of data."""
n = len(data)
if n < 1:
raise ValueError('mean requires at least one data point')
return float(sum(data))/float(n) # in Python 2 use sum(data)/float(n)
return float(sum(data)) / float(n) # in Python 2 use sum(data)/float(n)
def sum_of_square_deviations(data, c):
"""Return sum of square deviations of sequence data."""
ss = sum((float(x)-c)**2 for x in data)
ss = sum((float(x) - c)**2 for x in data)
return ss
def coefficient_of_variation(data):
"""Calculates the population coefficient of variation."""
n = len(data)
......@@ -49,24 +52,28 @@ def coefficient_of_variation(data):
raise ValueError('variance requires at least two data points')
c = mean(data)
ss = sum_of_square_deviations(data, c)
pvar = ss/n # the population variance
pvar = ss / n # the population variance
stddev = (pvar**0.5) # population standard deviation
return stddev / c
def truncated_list(data, n):
"""Compute a truncated list, n is truncation size"""
if len(data) < n * 2:
raise ValueError('list not large enough to truncate')
return sorted(data)[n:-n]
def truncated_mean(data, n):
"""Compute a truncated mean, n is truncation size"""
return mean(truncated_list(data, n))
def truncated_cov(data, n):
"""Compute a truncated coefficient of variation, n is truncation size"""
return coefficient_of_variation(truncated_list(data, n))
# Find most recent binary
newest_binary = None
newest_mtime = None
......@@ -96,8 +103,12 @@ if len(sys.argv) >= 2:
print('Using test executable: ' + perftests_path)
print('Test name: ' + test_name)
def get_results(metric, extra_args=[]):
process = subprocess.Popen([perftests_path, '--gtest_filter=' + test_name] + extra_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
process = subprocess.Popen(
[perftests_path, '--gtest_filter=' + test_name] + extra_args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output, err = process.communicate()
m = re.search(r'Running (\d+) tests', output)
......@@ -115,6 +126,7 @@ def get_results(metric, extra_args=[]):
return [float(value) for value in m]
# Calibrate the number of steps
steps = get_results("steps", ["--calibration"])[0]
print("running with %d steps." % steps)
......
......@@ -108,13 +108,17 @@ strip_suffixes = ["ANGLE", "EXT", "KHR", "OES", "CHROMIUM"]
# Toggle generation here.
support_EGL_ANGLE_explicit_context = True
def script_relative(path):
return os.path.join(os.path.dirname(sys.argv[0]), path)
def path_to(folder, file):
return os.path.join(script_relative(".."), "src", folder, file)
class GLCommandNames:
def __init__(self):
self.command_names = {}
......@@ -136,8 +140,10 @@ class GLCommandNames:
# Add the commands that aren't duplicates
self.command_names[version] += commands
class RegistryXML:
def __init__(self, xml_file, ext_file = None):
def __init__(self, xml_file, ext_file=None):
tree = etree.parse(script_relative(xml_file))
self.root = tree.getroot()
if (ext_file):
......
......@@ -18,25 +18,30 @@ root_dir = os.path.abspath(os.path.join(script_dir, '..'))
# auto_script is a standard way for scripts to return their inputs and outputs.
def get_child_script_dirname(script):
# All script names are relative to ANGLE's root
return os.path.dirname(os.path.abspath(os.path.join(root_dir, script)))
# Replace all backslashes with forward slashes to be platform independent
def clean_path_slashes(path):
return path.replace("\\", "/")
# Takes a script file name which is relative to the code generation script's directory and
# changes it to be relative to the angle root directory
def rebase_script_path(script_path, relative_path):
return os.path.relpath(os.path.join(os.path.dirname(script_path), relative_path), root_dir)
def grab_from_script(script, param):
res = subprocess.check_output(['python', script, param]).strip()
if res == '':
return []
return [clean_path_slashes(rebase_script_path(script, name)) for name in res.split(',')]
def auto_script(script):
# Set the CWD to the script directory.
os.chdir(get_child_script_dirname(script))
......@@ -49,6 +54,7 @@ def auto_script(script):
os.chdir(root_dir)
return info
hash_fname = "run_code_generation_hashes.json"
generators = {
......@@ -183,7 +189,11 @@ def main():
update_output_hashes(name, info['outputs'], new_hashes)
os.chdir(script_dir)
json.dump(new_hashes, open(hash_fname, "w"), indent=2, sort_keys=True,
json.dump(
new_hashes,
open(hash_fname, "w"),
indent=2,
sort_keys=True,
separators=(',', ':\n '))
......
......@@ -4,15 +4,15 @@
"ANGLE format:src/libANGLE/renderer/Format_table_autogen.cpp":
"a4cf00b75621bc058c4a1b341bdf6989",
"ANGLE format:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798",
"7ce0869650454e1eebc93658d4d96844",
"ANGLE format:src/libANGLE/renderer/angle_format_data.json":
"288d2f350948f8b1928c249234a44b25",
"ANGLE format:src/libANGLE/renderer/angle_format_map.json":
"be9f9bdbdf785dda05920146e8c55dbb",
"ANGLE format:src/libANGLE/renderer/gen_angle_format_table.py":
"3d9f679b65f39ccf19bd7bdf5498f837",
"1443d23f2dc1e9d7dc86ae0d512e6814",
"ANGLE load functions table:src/libANGLE/renderer/gen_load_functions_table.py":
"2dcc3aa0cd700165b588cf53441e243b",
"e65c50e84fc38ad34d0eb0bebb84aab6",
"ANGLE load functions table:src/libANGLE/renderer/load_functions_data.json":
"816be111bf4d1995589350dceb367315",
"ANGLE load functions table:src/libANGLE/renderer/load_functions_table_autogen.cpp":
......@@ -22,11 +22,11 @@
"D3D11 blit shader selection:src/libANGLE/renderer/d3d/d3d11/d3d11_blit_shaders_autogen.gni":
"329dbafc64b0cb578348819198abcfea",
"D3D11 blit shader selection:src/libANGLE/renderer/d3d/d3d11/gen_blit11helper.py":
"38bff72bc17ac25c6b42c98d40c76e20",
"704a82846928d3e21fc0794dff3a08f8",
"D3D11 format:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798",
"7ce0869650454e1eebc93658d4d96844",
"D3D11 format:src/libANGLE/renderer/d3d/d3d11/gen_texture_format_table.py":
"d3260e0390ad2cd8b07420b7426fad43",
"bf11e3404d4622059b6e9c4e96abf95e",
"D3D11 format:src/libANGLE/renderer/d3d/d3d11/texture_format_data.json":
"d7483ece817e819588f4ca157716dc7b",
"D3D11 format:src/libANGLE/renderer/d3d/d3d11/texture_format_map.json":
......@@ -38,9 +38,9 @@
"DXGI format support:src/libANGLE/renderer/d3d/d3d11/dxgi_support_table_autogen.cpp":
"7ec32ce0ad41450be7493c1db1130e25",
"DXGI format support:src/libANGLE/renderer/d3d/d3d11/gen_dxgi_support_tables.py":
"389a6358534ebad5e232a44944b6123b",
"b464f153f15d60df1c6536adbfafb072",
"DXGI format:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798",
"7ce0869650454e1eebc93658d4d96844",
"DXGI format:src/libANGLE/renderer/angle_format_map.json":
"be9f9bdbdf785dda05920146e8c55dbb",
"DXGI format:src/libANGLE/renderer/d3d/d3d11/dxgi_format_data.json":
......@@ -48,7 +48,7 @@
"DXGI format:src/libANGLE/renderer/d3d/d3d11/dxgi_format_map_autogen.cpp":
"32b9860e3fd8e87a89ff9a09e848e516",
"DXGI format:src/libANGLE/renderer/d3d/d3d11/gen_dxgi_format_table.py":
"bed2688ca828fc9fd1904408d33ba007",
"411e6064b916d570fa76949820d34a45",
"ESSL static builtins:src/compiler/translator/ParseContext_autogen.h":
"6be7f97ce68aa5ba5ecf30b835bc344d",
"ESSL static builtins:src/compiler/translator/SymbolTable_autogen.cpp":
......@@ -58,11 +58,11 @@
"ESSL static builtins:src/compiler/translator/builtin_function_declarations.txt":
"e5e567406476306ea06984d885be028d",
"ESSL static builtins:src/compiler/translator/builtin_symbols_hash_autogen.txt":
"e60e2185718a035adfd19ab91536fdb7",
"05cd84d02529a1e83c88caa9097dc0ef",
"ESSL static builtins:src/compiler/translator/builtin_variables.json":
"a8f3d76c3c395e8f6a35dd22eb2e8416",
"ESSL static builtins:src/compiler/translator/gen_builtin_symbols.py":
"f056dba2fdeac5a5dbad9d8f7b17f55f",
"5d5467e17ca5ed5bf9938df9a3391e6f",
"ESSL static builtins:src/compiler/translator/tree_util/BuiltIn_autogen.h":
"6df5ab6576da4f364763b581da839b77",
"ESSL static builtins:src/tests/compiler_tests/ImmutableString_test_autogen.cpp":
......@@ -72,13 +72,13 @@
"Emulated HLSL functions:src/compiler/translator/emulated_builtin_functions_hlsl_autogen.cpp":
"1c759ffdd27a86fd8f2d590b2f3dcb56",
"Emulated HLSL functions:src/compiler/translator/gen_emulated_builtin_function_tables.py":
"c24de0c9ce5f201985c852d2b4b12b98",
"5991de4f43758f59d9d042581ae04eab",
"GL copy conversion table:src/libANGLE/es3_copy_conversion_formats.json":
"54608f6f7d9aa7c59a8458ccf3ab9935",
"GL copy conversion table:src/libANGLE/es3_copy_conversion_table_autogen.cpp":
"b20d198cf5e292c43170d4873b381b34",
"GL copy conversion table:src/libANGLE/gen_copy_conversion_table.py":
"92428cef9d97d33ee7063cfa387ccf56",
"827a4a27cea1e11bef18fed9dce6dceb",
"GL format map:src/libANGLE/es3_format_type_combinations.json":
"a232823cd6430f14e28793ccabb968ee",
"GL format map:src/libANGLE/format_map_autogen.cpp":
......@@ -86,7 +86,7 @@
"GL format map:src/libANGLE/format_map_data.json":
"779798d4879e5f73a5a108e3e3fd3095",
"GL format map:src/libANGLE/gen_format_map.py":
"0fd8c00e8b5afb28a5f8b40d9628b9a4",
"dbc855d50826670a9e1a4ff2747e7583",
"GL/EGL entry points:scripts/egl.xml":
"842e24514c4cfe09fba703c17a0fd292",
"GL/EGL entry points:scripts/egl_angle_ext.xml":
......@@ -94,13 +94,13 @@
"GL/EGL entry points:scripts/entry_point_packed_gl_enums.json":
"28238b0f52826c3794eaa1aa940238bf",
"GL/EGL entry points:scripts/generate_entry_points.py":
"83064b09d168c807431cac137b845b5f",
"e7ab486465bf7873d8f06ddd9b204539",
"GL/EGL entry points:scripts/gl.xml":
"b470cb06b06cbbe7adb2c8129ec85708",
"GL/EGL entry points:scripts/gl_angle_ext.xml":
"11e1eb2cbe51ae6e7b8705d3506846d5",
"GL/EGL entry points:scripts/registry_xml.py":
"3b9a36e0be051dc5b4e5162d54749e49",
"169e89c63aad5bde60012b64cccced27",
"GL/EGL entry points:src/libANGLE/Context_gles_1_0_autogen.h":
"fad4ec629b41e9d97ff57a132ad946cb",
"GL/EGL entry points:src/libANGLE/validationES1_autogen.h":
......@@ -144,9 +144,9 @@
"GL/EGL/WGL loader:scripts/egl_angle_ext.xml":
"745534010f31fbe8e1a1fcddce15ed2d",
"GL/EGL/WGL loader:scripts/generate_loader.py":
"475030714c1644b6dfb1f6f08572039d",
"b8c0dc876c8122bdc2447de982bcfad6",
"GL/EGL/WGL loader:scripts/registry_xml.py":
"3b9a36e0be051dc5b4e5162d54749e49",
"169e89c63aad5bde60012b64cccced27",
"GL/EGL/WGL loader:scripts/wgl.xml":
"aa96419c582af2f6673430e2847693f4",
"GL/EGL/WGL loader:src/libEGL/egl_loader_autogen.cpp":
......@@ -168,13 +168,13 @@
"OpenGL dispatch table:scripts/gl.xml":
"b470cb06b06cbbe7adb2c8129ec85708",
"OpenGL dispatch table:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798",
"7ce0869650454e1eebc93658d4d96844",
"OpenGL dispatch table:src/libANGLE/renderer/gl/DispatchTableGL_autogen.cpp":
"96d06b3acf7826aee1ec813a8fa3a867",
"OpenGL dispatch table:src/libANGLE/renderer/gl/DispatchTableGL_autogen.h":
"ea5eded625b5db7d7b2b7f689c72f14b",
"OpenGL dispatch table:src/libANGLE/renderer/gl/generate_gl_dispatch_table.py":
"7571edb9e610891ed0c95dc496120cff",
"f21314d401e650b4182c4b7d66ac5c9c",
"OpenGL dispatch table:src/libANGLE/renderer/gl/gl_bindings_data.json":
"1afca09d29ed7788c76cbc9bcfb4de0a",
"OpenGL dispatch table:src/libANGLE/renderer/gl/null_functions.cpp":
......@@ -182,17 +182,17 @@
"OpenGL dispatch table:src/libANGLE/renderer/gl/null_functions.h":
"7906751710cab691f9e7365e59b7beed",
"Vulkan format:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798",
"7ce0869650454e1eebc93658d4d96844",
"Vulkan format:src/libANGLE/renderer/angle_format_map.json":
"be9f9bdbdf785dda05920146e8c55dbb",
"Vulkan format:src/libANGLE/renderer/vulkan/gen_vk_format_table.py":
"c1f153d67fa50e5f6683170c83b610d4",
"c50c9c66b89df7179a688cda42eb85f2",
"Vulkan format:src/libANGLE/renderer/vulkan/vk_format_map.json":
"a6522dc0af17eebfee8b3d6d4723594f",
"Vulkan format:src/libANGLE/renderer/vulkan/vk_format_table_autogen.cpp":
"34dcf4f106f94b03f74c9fd08b22f6ed",
"Vulkan internal shader programs:src/libANGLE/renderer/vulkan/gen_vk_internal_shaders.py":
"1262e5e903c7dad214ded83625f9d3c4",
"4cc82aa02df5371fc2e3d7448a241fc1",
"Vulkan internal shader programs:src/libANGLE/renderer/vulkan/shaders/gen/BufferUtils.comp.00000000.inc":
"caa03e84d757844a099d0e408a162c7e",
"Vulkan internal shader programs:src/libANGLE/renderer/vulkan/shaders/gen/BufferUtils.comp.00000001.inc":
......@@ -354,9 +354,9 @@
"Vulkan internal shader programs:tools/glslang/glslang_validator.sha1":
"ea685e0867a4b3a07ad7e4246ac84e10",
"Vulkan mandatory format support table:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798",
"7ce0869650454e1eebc93658d4d96844",
"Vulkan mandatory format support table:src/libANGLE/renderer/vulkan/gen_vk_mandatory_format_support_table.py":
"417772416d3082400ce05acc2f209c9f",
"dab4614bbee0c3fbc5b3ccaaa11ba9d3",
"Vulkan mandatory format support table:src/libANGLE/renderer/vulkan/vk_mandatory_format_support_data.json":
"fa2bd54c1bb0ab2cf1d386061a4bc5c5",
"Vulkan mandatory format support table:src/libANGLE/renderer/vulkan/vk_mandatory_format_support_table_autogen.cpp":
......@@ -372,19 +372,19 @@
"packed enum:src/common/PackedGLEnums_autogen.h":
"0766f2bb7874b2b6b4aaed4a6d0ef49e",
"packed enum:src/common/gen_packed_gl_enums.py":
"0cd1a1cb6d5fde8cbac2994db24eb901",
"cc463afc5e37b0f73e119fec59a39420",
"packed enum:src/common/packed_egl_enums.json":
"5f591d220ee53b6e54a27d1523a3ab79",
"packed enum:src/common/packed_gl_enums.json":
"cd2c00958dd8cc546b816dedaf4769d3",
"proc table:src/libGLESv2/gen_proc_table.py":
"20ebe54894d613de42b0b15ca34078d9",
"3be3e8ed7fad58e8cc6fcf348da7b17d",
"proc table:src/libGLESv2/proc_table_autogen.cpp":
"1e89c264adbe7120edb636013383598b",
"proc table:src/libGLESv2/proc_table_data.json":
"04123621b8fd5e6d18f9f3c95c190693",
"uniform type:src/common/gen_uniform_type_table.py":
"fa40444d496ac07cd9dc0cd239e4a499",
"9dd389f2b5793ba635169d61cef2dde9",
"uniform type:src/common/uniform_type_info_autogen.cpp":
"b31d181bc49ad1c3540401a5c874e692"
}
\ No newline at end of file
......@@ -39,10 +39,9 @@ def main():
isolated_file = os.path.join(out_file_path, '%s.isolated' % args.test)
isolate_args = [
'python', isolate_script_path, 'archive',
'-I', 'https://isolateserver.appspot.com',
'-i', isolate_file,
'-s', isolated_file]
'python', isolate_script_path, 'archive', '-I', 'https://isolateserver.appspot.com', '-i',
isolate_file, '-s', isolated_file
]
stdout = subprocess.check_output(isolate_args)
sha = stdout[:40]
......@@ -50,14 +49,11 @@ def main():
swarming_script_path = os.path.join('tools', 'swarming_client', 'swarming.py')
swarmings_args = [
'python', swarming_script_path, 'trigger',
'-S', 'chromium-swarm.appspot.com',
'-I', 'isolateserver.appspot.com',
'-d', 'os', args.os_dim,
'-d', 'pool', args.pool,
'-d', 'gpu', args.gpu_dim,
'--shards=%d' % args.shards,
'-s', sha]
'python', swarming_script_path, 'trigger', '-S', 'chromium-swarm.appspot.com', '-I',
'isolateserver.appspot.com', '-d', 'os', args.os_dim, '-d', 'pool', args.pool, '-d', 'gpu',
args.gpu_dim,
'--shards=%d' % args.shards, '-s', sha
]
if args.extra_args:
swarmings_args += ['--'] + args.extra_args
......
......@@ -39,12 +39,15 @@ if newest_folder is None:
source_folder = newest_folder
# Is a folder a chrome binary directory?
def is_chrome_bin(str):
chrome_file = os.path.join(chrome_folder, str)
return os.path.isdir(chrome_file) and all([char.isdigit() or char == '.' for char in str])
sorted_chrome_bins = sorted([folder for folder in os.listdir(chrome_folder) if is_chrome_bin(folder)], reverse=True)
sorted_chrome_bins = sorted(
[folder for folder in os.listdir(chrome_folder) if is_chrome_bin(folder)], reverse=True)
dest_folder = os.path.join(chrome_folder, sorted_chrome_bins[0])
......
......@@ -14,9 +14,11 @@ usage = """\
Usage: commit_id.py check <angle_dir> - check if git is present
commit_id.py gen <angle_dir> <file_to_write> - generate commit.h"""
def grab_output(command, cwd):
return sp.Popen(command, stdout=sp.PIPE, shell=True, cwd=cwd).communicate()[0].strip()
if len(sys.argv) < 3:
sys.exit(usage)
......
......@@ -9,6 +9,7 @@
#include "common/mathutil.h"
def convertMantissa(i):
if i == 0:
return 0
......@@ -24,6 +25,7 @@ def convertMantissa(i):
else:
return 0x38000000 + ((i - 1024) << 13)
def convertExponent(i):
if i == 0:
return 0
......@@ -38,12 +40,14 @@ def convertExponent(i):
else:
return 0xC7800000
def convertOffset(i):
if i == 0 or i == 32:
return 0
else:
return 1024
print """//
// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
......
......@@ -28,6 +28,7 @@ Generators = [
},
]
def load_enums(path):
with open(path) as map_file:
enums_dict = json.loads(map_file.read(), object_pairs_hook=OrderedDict)
......@@ -42,18 +43,21 @@ def load_enums(path):
values.append(EnumValue(value_name, value_gl_name, i))
i += 1
assert(i < 255) # This makes sure enums fit in the uint8_t
assert (i < 255) # This makes sure enums fit in the uint8_t
enums.append(Enum(enum_name, values, i))
enums.sort(key=lambda enum: enum.name)
return enums
def generate_include_guard(path):
return path.replace(".", "_").upper()
def header_name_from_cpp_name(path):
return path.replace(".cpp", ".h")
header_template = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
......@@ -99,6 +103,7 @@ template <>
{api_enum_name} To{api_enum_name}({enum_name} from);
"""
def write_header(enums, path_prefix, file_name, data_source_name, namespace, api_enum_name):
content = ['']
......@@ -107,27 +112,27 @@ def write_header(enums, path_prefix, file_name, data_source_name, namespace, api
for value in enum.values:
value_declarations.append(' ' + value.name + ' = ' + str(value.value) + ',')
content.append(enum_declaration_template.format(
enum_name = enum.name,
max_value = str(enum.max_value),
value_declarations = '\n'.join(value_declarations),
api_enum_name = api_enum_name
))
content.append(
enum_declaration_template.format(
enum_name=enum.name,
max_value=str(enum.max_value),
value_declarations='\n'.join(value_declarations),
api_enum_name=api_enum_name))
header = header_template.format(
content = ''.join(content),
copyright_year = datetime.date.today().year,
data_source_name = data_source_name,
script_name = sys.argv[0],
file_name = file_name,
include_guard = generate_include_guard(file_name),
namespace = namespace,
api_enum_name = api_enum_name
)
content=''.join(content),
copyright_year=datetime.date.today().year,
data_source_name=data_source_name,
script_name=sys.argv[0],
file_name=file_name,
include_guard=generate_include_guard(file_name),
namespace=namespace,
api_enum_name=api_enum_name)
with (open(path_prefix + file_name, 'wt')) as f:
f.write(header)
cpp_template = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
......@@ -172,6 +177,7 @@ template <>
}}
"""
def write_cpp(enums, path_prefix, file_name, data_source_name, namespace, api_enum_name):
content = ['']
......@@ -180,27 +186,28 @@ def write_cpp(enums, path_prefix, file_name, data_source_name, namespace, api_en
to_glenum_cases = []
for value in enum.values:
qualified_name = enum.name + '::' + value.name
from_glenum_cases.append(' case ' + value.gl_name + ':\n return ' + qualified_name + ';')
to_glenum_cases.append(' case ' + qualified_name + ':\n return ' + value.gl_name + ';')
content.append(enum_implementation_template.format(
enum_name = enum.name,
from_glenum_cases = '\n'.join(from_glenum_cases),
max_value = str(enum.max_value),
to_glenum_cases = '\n'.join(to_glenum_cases),
api_enum_name = api_enum_name
))
from_glenum_cases.append(' case ' + value.gl_name + ':\n return ' +
qualified_name + ';')
to_glenum_cases.append(' case ' + qualified_name + ':\n return ' +
value.gl_name + ';')
content.append(
enum_implementation_template.format(
enum_name=enum.name,
from_glenum_cases='\n'.join(from_glenum_cases),
max_value=str(enum.max_value),
to_glenum_cases='\n'.join(to_glenum_cases),
api_enum_name=api_enum_name))
cpp = cpp_template.format(
content = ''.join(content),
copyright_year = datetime.date.today().year,
data_source_name = data_source_name,
script_name = sys.argv[0],
file_name = file_name,
header_name = header_name_from_cpp_name(file_name),
namespace = namespace,
api_enum_name = api_enum_name
)
content=''.join(content),
copyright_year=datetime.date.today().year,
data_source_name=data_source_name,
script_name=sys.argv[0],
file_name=file_name,
header_name=header_name_from_cpp_name(file_name),
namespace=namespace,
api_enum_name=api_enum_name)
with (open(path_prefix + file_name, 'wt')) as f:
f.write(cpp)
......@@ -236,8 +243,10 @@ def main():
namespace = generator['namespace']
enum_type = generator['enum_type']
enums = load_enums(path_prefix + json_file)
write_header(enums, path_prefix, output_file + '_autogen.h', json_file, namespace, enum_type)
write_cpp(enums, path_prefix, output_file + '_autogen.cpp', json_file, namespace, enum_type)
write_header(enums, path_prefix, output_file + '_autogen.h', json_file, namespace,
enum_type)
write_cpp(enums, path_prefix, output_file + '_autogen.cpp', json_file, namespace,
enum_type)
return 0
......
......@@ -12,68 +12,23 @@ from datetime import date
import sys
all_uniform_types = [
"GL_NONE",
"GL_BOOL",
"GL_BOOL_VEC2",
"GL_BOOL_VEC3",
"GL_BOOL_VEC4",
"GL_FLOAT",
"GL_FLOAT_MAT2",
"GL_FLOAT_MAT2x3",
"GL_FLOAT_MAT2x4",
"GL_FLOAT_MAT3",
"GL_FLOAT_MAT3x2",
"GL_FLOAT_MAT3x4",
"GL_FLOAT_MAT4",
"GL_FLOAT_MAT4x2",
"GL_FLOAT_MAT4x3",
"GL_FLOAT_VEC2",
"GL_FLOAT_VEC3",
"GL_FLOAT_VEC4",
"GL_IMAGE_2D",
"GL_IMAGE_2D_ARRAY",
"GL_IMAGE_3D",
"GL_IMAGE_CUBE",
"GL_INT",
"GL_INT_IMAGE_2D",
"GL_INT_IMAGE_2D_ARRAY",
"GL_INT_IMAGE_3D",
"GL_INT_IMAGE_CUBE",
"GL_INT_SAMPLER_2D",
"GL_INT_SAMPLER_2D_ARRAY",
"GL_INT_SAMPLER_2D_MULTISAMPLE",
"GL_INT_SAMPLER_2D_MULTISAMPLE_ARRAY",
"GL_INT_SAMPLER_3D",
"GL_INT_SAMPLER_CUBE",
"GL_INT_VEC2",
"GL_INT_VEC3",
"GL_INT_VEC4",
"GL_SAMPLER_2D",
"GL_SAMPLER_2D_ARRAY",
"GL_SAMPLER_2D_ARRAY_SHADOW",
"GL_SAMPLER_2D_MULTISAMPLE",
"GL_SAMPLER_2D_MULTISAMPLE_ARRAY",
"GL_SAMPLER_2D_RECT_ANGLE",
"GL_SAMPLER_2D_SHADOW",
"GL_SAMPLER_3D",
"GL_SAMPLER_CUBE",
"GL_SAMPLER_CUBE_SHADOW",
"GL_SAMPLER_EXTERNAL_OES",
"GL_UNSIGNED_INT",
"GL_UNSIGNED_INT_ATOMIC_COUNTER",
"GL_UNSIGNED_INT_IMAGE_2D",
"GL_UNSIGNED_INT_IMAGE_2D_ARRAY",
"GL_UNSIGNED_INT_IMAGE_3D",
"GL_UNSIGNED_INT_IMAGE_CUBE",
"GL_UNSIGNED_INT_SAMPLER_2D",
"GL_UNSIGNED_INT_SAMPLER_2D_ARRAY",
"GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE",
"GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE_ARRAY",
"GL_UNSIGNED_INT_SAMPLER_3D",
"GL_UNSIGNED_INT_SAMPLER_CUBE",
"GL_UNSIGNED_INT_VEC2",
"GL_UNSIGNED_INT_VEC3",
"GL_UNSIGNED_INT_VEC4"
"GL_NONE", "GL_BOOL", "GL_BOOL_VEC2", "GL_BOOL_VEC3", "GL_BOOL_VEC4", "GL_FLOAT",
"GL_FLOAT_MAT2", "GL_FLOAT_MAT2x3", "GL_FLOAT_MAT2x4", "GL_FLOAT_MAT3", "GL_FLOAT_MAT3x2",
"GL_FLOAT_MAT3x4", "GL_FLOAT_MAT4", "GL_FLOAT_MAT4x2", "GL_FLOAT_MAT4x3", "GL_FLOAT_VEC2",
"GL_FLOAT_VEC3", "GL_FLOAT_VEC4", "GL_IMAGE_2D", "GL_IMAGE_2D_ARRAY", "GL_IMAGE_3D",
"GL_IMAGE_CUBE", "GL_INT", "GL_INT_IMAGE_2D", "GL_INT_IMAGE_2D_ARRAY", "GL_INT_IMAGE_3D",
"GL_INT_IMAGE_CUBE", "GL_INT_SAMPLER_2D", "GL_INT_SAMPLER_2D_ARRAY",
"GL_INT_SAMPLER_2D_MULTISAMPLE", "GL_INT_SAMPLER_2D_MULTISAMPLE_ARRAY", "GL_INT_SAMPLER_3D",
"GL_INT_SAMPLER_CUBE", "GL_INT_VEC2", "GL_INT_VEC3", "GL_INT_VEC4", "GL_SAMPLER_2D",
"GL_SAMPLER_2D_ARRAY", "GL_SAMPLER_2D_ARRAY_SHADOW", "GL_SAMPLER_2D_MULTISAMPLE",
"GL_SAMPLER_2D_MULTISAMPLE_ARRAY", "GL_SAMPLER_2D_RECT_ANGLE", "GL_SAMPLER_2D_SHADOW",
"GL_SAMPLER_3D", "GL_SAMPLER_CUBE", "GL_SAMPLER_CUBE_SHADOW", "GL_SAMPLER_EXTERNAL_OES",
"GL_UNSIGNED_INT", "GL_UNSIGNED_INT_ATOMIC_COUNTER", "GL_UNSIGNED_INT_IMAGE_2D",
"GL_UNSIGNED_INT_IMAGE_2D_ARRAY", "GL_UNSIGNED_INT_IMAGE_3D", "GL_UNSIGNED_INT_IMAGE_CUBE",
"GL_UNSIGNED_INT_SAMPLER_2D", "GL_UNSIGNED_INT_SAMPLER_2D_ARRAY",
"GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE", "GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE_ARRAY",
"GL_UNSIGNED_INT_SAMPLER_3D", "GL_UNSIGNED_INT_SAMPLER_CUBE", "GL_UNSIGNED_INT_VEC2",
"GL_UNSIGNED_INT_VEC3", "GL_UNSIGNED_INT_VEC4"
]
# Uniform texture types. Be wary of substrings finding the wrong types.
......@@ -142,9 +97,11 @@ const UniformTypeInfo &GetUniformTypeInfo(GLenum uniformType)
type_info_data_template = """{{{type}, {component_type}, {texture_type}, {transposed_type}, {bool_type}, {sampler_format}, {rows}, {columns}, {components}, {component_size}, {internal_size}, {external_size}, {is_sampler}, {is_matrix}, {is_image} }}"""
type_index_case_template = """case {enum_value}: return {index_value};"""
def cpp_bool(value):
return "true" if value else "false"
def get_component_type(uniform_type):
if uniform_type.find("GL_BOOL") == 0:
return "GL_BOOL"
......@@ -159,21 +116,25 @@ def get_component_type(uniform_type):
else:
return "GL_INT"
def get_texture_type(uniform_type):
for sampler_type, tex_type in texture_types.items():
if uniform_type.endswith(sampler_type):
return "GL_TEXTURE_" + tex_type
return "GL_NONE"
def get_transposed_type(uniform_type):
if "_MAT" in uniform_type:
if "x" in uniform_type:
return "GL_FLOAT_MAT" + uniform_type[-1] + "x" + uniform_type[uniform_type.find("_MAT")+4]
return "GL_FLOAT_MAT" + uniform_type[-1] + "x" + uniform_type[uniform_type.find("_MAT")
+ 4]
else:
return uniform_type
else:
return "GL_NONE"
def get_bool_type(uniform_type):
if uniform_type == "GL_INT" or uniform_type == "GL_UNSIGNED_INT" or uniform_type == "GL_FLOAT":
return "GL_BOOL"
......@@ -182,6 +143,7 @@ def get_bool_type(uniform_type):
else:
return "GL_NONE"
def get_sampler_format(uniform_type):
if not "_SAMPLER_" in uniform_type:
return "SamplerFormat::InvalidEnum"
......@@ -194,6 +156,7 @@ def get_sampler_format(uniform_type):
else:
return "SamplerFormat::Float"
def get_rows(uniform_type):
if uniform_type == "GL_NONE":
return "0"
......@@ -202,6 +165,7 @@ def get_rows(uniform_type):
else:
return "1"
def get_columns(uniform_type):
if uniform_type == "GL_NONE":
return "0"
......@@ -212,9 +176,11 @@ def get_columns(uniform_type):
else:
return "1"
def get_components(uniform_type):
return str(int(get_rows(uniform_type)) * int(get_columns(uniform_type)))
def get_component_size(uniform_type):
component_type = get_component_type(uniform_type)
if (component_type) == "GL_BOOL":
......@@ -230,38 +196,45 @@ def get_component_size(uniform_type):
else:
raise "Invalid component type: " + component_type
def get_internal_size(uniform_type):
return get_component_size(uniform_type) + " * " + str(int(get_rows(uniform_type)) * 4)
def get_external_size(uniform_type):
return get_component_size(uniform_type) + " * " + get_components(uniform_type)
def get_is_sampler(uniform_type):
return cpp_bool("_SAMPLER_" in uniform_type)
def get_is_matrix(uniform_type):
return cpp_bool("_MAT" in uniform_type)
def get_is_image(uniform_type):
return cpp_bool("_IMAGE_" in uniform_type)
def gen_type_info(uniform_type):
return type_info_data_template.format(
type = uniform_type,
component_type = get_component_type(uniform_type),
texture_type = get_texture_type(uniform_type),
transposed_type = get_transposed_type(uniform_type),
bool_type = get_bool_type(uniform_type),
sampler_format = get_sampler_format(uniform_type),
rows = get_rows(uniform_type),
columns = get_columns(uniform_type),
components = get_components(uniform_type),
component_size = get_component_size(uniform_type),
internal_size = get_internal_size(uniform_type),
external_size = get_external_size(uniform_type),
is_sampler = get_is_sampler(uniform_type),
is_matrix = get_is_matrix(uniform_type),
is_image = get_is_image(uniform_type))
type=uniform_type,
component_type=get_component_type(uniform_type),
texture_type=get_texture_type(uniform_type),
transposed_type=get_transposed_type(uniform_type),
bool_type=get_bool_type(uniform_type),
sampler_format=get_sampler_format(uniform_type),
rows=get_rows(uniform_type),
columns=get_columns(uniform_type),
components=get_components(uniform_type),
component_size=get_component_size(uniform_type),
internal_size=get_internal_size(uniform_type),
external_size=get_external_size(uniform_type),
is_sampler=get_is_sampler(uniform_type),
is_matrix=get_is_matrix(uniform_type),
is_image=get_is_image(uniform_type))
def gen_type_index_case(index, uniform_type):
return "case " + uniform_type + ": return " + str(index) + ";"
......@@ -283,16 +256,20 @@ def main():
return 1
return 0
uniform_type_info_data = ",\n".join([gen_type_info(uniform_type) for uniform_type in all_uniform_types])
uniform_type_index_cases = "\n".join([gen_type_index_case(index, uniform_type) for index, uniform_type in enumerate(all_uniform_types)])
uniform_type_info_data = ",\n".join(
[gen_type_info(uniform_type) for uniform_type in all_uniform_types])
uniform_type_index_cases = "\n".join([
gen_type_index_case(index, uniform_type)
for index, uniform_type in enumerate(all_uniform_types)
])
with open('uniform_type_info_autogen.cpp', 'wt') as out_file:
output_cpp = template_cpp.format(
script_name = sys.argv[0],
copyright_year = date.today().year,
total_count = len(all_uniform_types),
uniform_type_info_data = uniform_type_info_data,
uniform_type_index_cases = uniform_type_index_cases)
script_name=sys.argv[0],
copyright_year=date.today().year,
total_count=len(all_uniform_types),
uniform_type_info_data=uniform_type_info_data,
uniform_type_index_cases=uniform_type_index_cases)
out_file.write(output_cpp)
out_file.close()
return 0
......
0af87b7f37d8a5260c859e9169a91f6a
\ No newline at end of file
defc05f112e255400323d95b3610cfeb
\ No newline at end of file
......@@ -255,65 +255,34 @@ namespace BuiltInGroup
parsed_variables = None
basic_types_enumeration = [
'Void',
'Float',
'Int',
'UInt',
'Bool',
'AtomicCounter',
'YuvCscStandardEXT',
'Sampler2D',
'Sampler3D',
'SamplerCube',
'Sampler2DArray',
'SamplerExternalOES',
'SamplerExternal2DY2YEXT',
'Sampler2DRect',
'Sampler2DMS',
'Sampler2DMSArray',
'ISampler2D',
'ISampler3D',
'ISamplerCube',
'ISampler2DArray',
'ISampler2DMS',
'ISampler2DMSArray',
'USampler2D',
'USampler3D',
'USamplerCube',
'USampler2DArray',
'USampler2DMS',
'USampler2DMSArray',
'Sampler2DShadow',
'SamplerCubeShadow',
'Sampler2DArrayShadow',
'Image2D',
'IImage2D',
'UImage2D',
'Image3D',
'IImage3D',
'UImage3D',
'Image2DArray',
'IImage2DArray',
'UImage2DArray',
'ImageCube',
'IImageCube',
'UImageCube'
'Void', 'Float', 'Int', 'UInt', 'Bool', 'AtomicCounter', 'YuvCscStandardEXT', 'Sampler2D',
'Sampler3D', 'SamplerCube', 'Sampler2DArray', 'SamplerExternalOES', 'SamplerExternal2DY2YEXT',
'Sampler2DRect', 'Sampler2DMS', 'Sampler2DMSArray', 'ISampler2D', 'ISampler3D', 'ISamplerCube',
'ISampler2DArray', 'ISampler2DMS', 'ISampler2DMSArray', 'USampler2D', 'USampler3D',
'USamplerCube', 'USampler2DArray', 'USampler2DMS', 'USampler2DMSArray', 'Sampler2DShadow',
'SamplerCubeShadow', 'Sampler2DArrayShadow', 'Image2D', 'IImage2D', 'UImage2D', 'Image3D',
'IImage3D', 'UImage3D', 'Image2DArray', 'IImage2DArray', 'UImage2DArray', 'ImageCube',
'IImageCube', 'UImageCube'
]
id_counter = 0
def set_working_dir():
script_dir = os.path.dirname(os.path.abspath(__file__))
os.chdir(script_dir)
def get_basic_mangled_name(basic):
index = basic_types_enumeration.index(basic)
if index < 26:
return chr(ord('A') + index)
return chr(ord('a') + index - 26)
levels = ['ESSL3_1_BUILTINS', 'ESSL3_BUILTINS', 'ESSL1_BUILTINS', 'COMMON_BUILTINS']
def get_shader_version_condition_for_level(level):
if level == 'ESSL3_1_BUILTINS':
return 'shaderVersion >= 310'
......@@ -326,8 +295,10 @@ def get_shader_version_condition_for_level(level):
else:
raise Exception('Unsupported symbol table level')
class GroupedList:
""""Class for storing a list of objects grouped by symbol table level and condition."""
def __init__(self):
self.objs = OrderedDict()
self.max_name_length = 0
......@@ -366,12 +337,12 @@ class GroupedList:
continue
level_condition = get_shader_version_condition_for_level(level)
if level_condition != '':
code.append('if ({condition})\n {{'.format(condition = level_condition))
code.append('if ({condition})\n {{'.format(condition=level_condition))
for condition, objs in self.objs[level].iteritems():
if len(objs) > 0:
if condition != 'NO_CONDITION':
condition_header = ' if ({condition})\n {{'.format(condition = condition)
condition_header = ' if ({condition})\n {{'.format(condition=condition)
code.append(condition_header.replace('shaderType', 'mShaderType'))
switch = {}
......@@ -396,7 +367,9 @@ class GroupedList:
code.append('return nullptr;')
return '\n'.join(code)
class TType:
def __init__(self, glsl_header_type):
if isinstance(glsl_header_type, basestring):
self.data = self.parse_type(glsl_header_type)
......@@ -409,7 +382,8 @@ class TType:
# are overridden when the specific types are generated.
if 'primarySize' not in self.data:
if ('secondarySize' in self.data):
raise Exception('Unexpected secondarySize on type that does not have primarySize set')
raise Exception(
'Unexpected secondarySize on type that does not have primarySize set')
self.data['primarySize'] = 1
if 'secondarySize' not in self.data:
self.data['secondarySize'] = 1
......@@ -553,6 +527,7 @@ class TType:
raise Exception('Unrecognized type: ' + str(glsl_header_type))
def get_parsed_functions(functions_txt_filename):
def parse_function_parameters(parameters):
......@@ -567,7 +542,9 @@ def get_parsed_functions(functions_txt_filename):
lines = []
with open(functions_txt_filename) as f:
lines = f.readlines()
lines = [line.strip() for line in lines if line.strip() != '' and not line.strip().startswith('//')]
lines = [
line.strip() for line in lines if line.strip() != '' and not line.strip().startswith('//')
]
fun_re = re.compile(r'^(\w+) (\w+)\((.*)\);$')
......@@ -580,11 +557,7 @@ def get_parsed_functions(functions_txt_filename):
if line.startswith('GROUP BEGIN '):
group_rest = line[12:].strip()
group_parts = group_rest.split(' ', 1)
current_group = {
'functions': [],
'name': group_parts[0],
'subgroups': {}
}
current_group = {'functions': [], 'name': group_parts[0], 'subgroups': {}}
if len(group_parts) > 1:
group_metadata = json.loads(group_parts[1])
current_group.update(group_metadata)
......@@ -593,7 +566,8 @@ def get_parsed_functions(functions_txt_filename):
group_end_name = line[10:].strip()
current_group = group_stack[-1]
if current_group['name'] != group_end_name:
raise Exception('GROUP END: Unexpected function group name "' + group_end_name + '" was expecting "' + current_group['name'] + '"')
raise Exception('GROUP END: Unexpected function group name "' + group_end_name +
'" was expecting "' + current_group['name'] + '"')
group_stack.pop()
is_top_level_group = (len(group_stack) == 0)
if is_top_level_group:
......@@ -621,7 +595,10 @@ def get_parsed_functions(functions_txt_filename):
return parsed_functions
fnvPrime = 16777619
def hash32(str):
fnvOffsetBasis = 0x811c9dc5
hash = fnvOffsetBasis
......@@ -630,7 +607,8 @@ def hash32(str):
hash = (hash * fnvPrime) & 0xffffffff
return hash
def mangledNameHash(str, script_generated_hash_tests, save_test = True):
def mangledNameHash(str, script_generated_hash_tests, save_test=True):
hash = hash32(str)
index = 0
max_six_bit_value = (1 << 6) - 1
......@@ -642,22 +620,27 @@ def mangledNameHash(str, script_generated_hash_tests, save_test = True):
elif c == '{' or c == '[':
has_array_or_block_param_bit = 1
index += 1
hash = ((hash >> 13) ^ (hash & 0x1fff)) | (index << 19) | (paren_location << 25) | (has_array_or_block_param_bit << 31)
hash = ((hash >> 13) ^ (hash & 0x1fff)) | (index << 19) | (paren_location << 25) | (
has_array_or_block_param_bit << 31)
if save_test:
sanity_check = ' ASSERT_EQ(0x{hash}u, ImmutableString("{str}").mangledNameHash());'.format(hash = ('%08x' % hash), str = str)
sanity_check = ' ASSERT_EQ(0x{hash}u, ImmutableString("{str}").mangledNameHash());'.format(
hash=('%08x' % hash), str=str)
script_generated_hash_tests.update({sanity_check: None})
return hash
def get_suffix(props):
if 'suffix' in props:
return props['suffix']
return ''
def get_extension(props):
if 'extension' in props:
return props['extension']
return 'UNDEFINED'
def get_op(name, function_props):
if 'op' not in function_props:
raise Exception('function op not defined')
......@@ -665,34 +648,40 @@ def get_op(name, function_props):
return name[0].upper() + name[1:]
return function_props['op']
def get_known_to_not_have_side_effects(function_props):
if 'op' in function_props and function_props['op'] != 'CallBuiltInFunction':
if 'hasSideEffects' in function_props:
return 'false'
else:
for param in get_parameters(function_props):
if 'qualifier' in param.data and (param.data['qualifier'] == 'Out' or param.data['qualifier'] == 'InOut'):
if 'qualifier' in param.data and (param.data['qualifier'] == 'Out' or
param.data['qualifier'] == 'InOut'):
return 'false'
return 'true'
return 'false'
def get_parameters(function_props):
if 'parameters' in function_props:
return function_props['parameters']
return []
def get_function_mangled_name(function_name, parameters):
mangled_name = function_name + '('
for param in parameters:
mangled_name += param.get_mangled_name()
return mangled_name
def get_function_human_readable_name(function_name, parameters):
name = function_name
for param in parameters:
name += '_' + param.get_human_readable_name()
return name
def gen_parameters_variant_ids(str_len, ttype_mangled_name_variants):
# Note that this doesn't generate variants with array parameters or struct / interface block parameters. They are assumed to have been filtered out separately.
if str_len % 2 != 0:
......@@ -700,7 +689,9 @@ def gen_parameters_variant_ids(str_len, ttype_mangled_name_variants):
num_variants = pow(len(ttype_mangled_name_variants), str_len / 2)
return xrange(num_variants)
def get_parameters_mangled_name_variant(variant_id, paren_location, total_length, ttype_mangled_name_variants):
def get_parameters_mangled_name_variant(variant_id, paren_location, total_length,
ttype_mangled_name_variants):
str_len = total_length - paren_location - 1
if str_len % 2 != 0:
raise Exception('Expecting parameters mangled name length to be divisible by two')
......@@ -714,6 +705,7 @@ def get_parameters_mangled_name_variant(variant_id, paren_location, total_length
variant += ttype_mangled_name_variants[parameter_variant_index]
return variant
# Calculate the mangled name hash of a common prefix string that's been pre-hashed with hash32()
# plus a variant of the parameters. This is faster than constructing the whole string and then
# calculating the hash for that.
......@@ -732,8 +724,10 @@ def get_mangled_name_variant_hash(prefix_hash32, variant_id, paren_location, tot
parameter_variant_id_base = parameter_variant_id_base / num_type_variants
return ((hash >> 13) ^ (hash & 0x1fff)) | (total_length << 19) | (paren_location << 25)
def mangled_name_hash_can_collide_with_different_parameters(function_variant_props, num_type_variants,
ttype_mangled_name_variants, script_generated_hash_tests):
def mangled_name_hash_can_collide_with_different_parameters(
function_variant_props, num_type_variants, ttype_mangled_name_variants,
script_generated_hash_tests):
# We exhaustively search through all possible lists of parameters and see if any other mangled
# name has the same hash.
mangled_name = function_variant_props['mangled_name']
......@@ -747,21 +741,25 @@ def mangled_name_hash_can_collide_with_different_parameters(function_variant_pro
if (parameters_mangled_name_len > 6):
# This increases the complexity of searching for hash collisions considerably, so rather than doing it we just conservatively assume that a hash collision may be possible.
return True
for variant_id in gen_parameters_variant_ids(parameters_mangled_name_len, ttype_mangled_name_variants):
variant_hash = get_mangled_name_variant_hash(prefix_hash32, variant_id, paren_location, mangled_name_len,
num_type_variants, ttype_mangled_name_variants)
manged_name_variant = get_parameters_mangled_name_variant(variant_id, paren_location, mangled_name_len,
for variant_id in gen_parameters_variant_ids(parameters_mangled_name_len,
ttype_mangled_name_variants):
variant_hash = get_mangled_name_variant_hash(prefix_hash32, variant_id, paren_location,
mangled_name_len, num_type_variants,
ttype_mangled_name_variants)
manged_name_variant = get_parameters_mangled_name_variant(
variant_id, paren_location, mangled_name_len, ttype_mangled_name_variants)
if variant_hash == hash and manged_name_variant != parameters_mangled_name:
return True
return False
def get_unique_identifier_name(function_name, parameters):
unique_name = function_name + '_'
for param in parameters:
unique_name += param.get_mangled_name()
return unique_name
def get_variable_name_to_store_parameter(param):
unique_name = 'pt'
if 'qualifier' in param.data:
......@@ -772,6 +770,7 @@ def get_variable_name_to_store_parameter(param):
unique_name += param.get_mangled_name()
return unique_name
def get_variable_name_to_store_parameters(parameters):
if len(parameters) == 0:
return 'empty'
......@@ -785,10 +784,12 @@ def get_variable_name_to_store_parameters(parameters):
unique_name += param.get_mangled_name()
return unique_name
def define_constexpr_variable(template_args, variable_declarations):
template_variable_declaration = 'constexpr const TVariable kVar_{name_with_suffix}(BuiltInId::{name_with_suffix}, BuiltInName::{name}, SymbolType::BuiltIn, TExtension::{extension}, {type});'
variable_declarations.append(template_variable_declaration.format(**template_args))
def gen_function_variants(function_name, function_props):
function_variants = []
parameters = get_parameters(function_props)
......@@ -797,10 +798,12 @@ def gen_function_variants(function_name, function_props):
for param in parameters:
if 'genType' in param.data:
if param.data['genType'] not in ['sampler_or_image', 'vec', 'yes']:
raise Exception('Unexpected value of genType "' + str(param.data['genType']) + '" should be "sampler_or_image", "vec", or "yes"')
raise Exception('Unexpected value of genType "' + str(param.data['genType']) +
'" should be "sampler_or_image", "vec", or "yes"')
gen_type.add(param.data['genType'])
if len(gen_type) > 1:
raise Exception('Unexpected multiple values of genType set on the same function: ' + str(list(gen_type)))
raise Exception('Unexpected multiple values of genType set on the same function: '
+ str(list(gen_type)))
if len(gen_type) == 0:
function_variants.append(function_props)
return function_variants
......@@ -815,7 +818,8 @@ def gen_function_variants(function_name, function_props):
for param in parameters:
variant_parameters.append(param.specific_sampler_or_image_type(type))
variant_props['parameters'] = variant_parameters
variant_props['returnType'] = function_props['returnType'].specific_sampler_or_image_type(type)
variant_props['returnType'] = function_props[
'returnType'].specific_sampler_or_image_type(type)
function_variants.append(variant_props)
return function_variants
......@@ -833,10 +837,13 @@ def gen_function_variants(function_name, function_props):
function_variants.append(variant_props)
return function_variants
def process_single_function_group(condition, group_name, group, num_type_variants, parameter_declarations, ttype_mangled_name_variants,
name_declarations, unmangled_function_if_statements, unmangled_builtin_declarations, defined_function_variants,
builtin_id_declarations, builtin_id_definitions, defined_parameter_names, variable_declarations, function_declarations,
script_generated_hash_tests, get_builtin_if_statements):
def process_single_function_group(
condition, group_name, group, num_type_variants, parameter_declarations,
ttype_mangled_name_variants, name_declarations, unmangled_function_if_statements,
unmangled_builtin_declarations, defined_function_variants, builtin_id_declarations,
builtin_id_definitions, defined_parameter_names, variable_declarations,
function_declarations, script_generated_hash_tests, get_builtin_if_statements):
global id_counter
if 'functions' not in group:
......@@ -867,21 +874,30 @@ def process_single_function_group(condition, group_name, group, num_type_variant
return &UnmangledBuiltIns::{extension};
}}"""
unmangled_if = template_unmangled_if.format(**template_args)
unmangled_builtin_no_condition = unmangled_function_if_statements.get(level, 'NO_CONDITION', function_name)
if unmangled_builtin_no_condition != None and unmangled_builtin_no_condition['extension'] == 'UNDEFINED':
unmangled_builtin_no_condition = unmangled_function_if_statements.get(
level, 'NO_CONDITION', function_name)
if unmangled_builtin_no_condition != None and unmangled_builtin_no_condition[
'extension'] == 'UNDEFINED':
# We already have this unmangled name without a condition nor extension on the same level. No need to add a duplicate with a condition.
pass
elif (not unmangled_function_if_statements.has_key(level, condition, function_name)) or extension == 'UNDEFINED':
elif (not unmangled_function_if_statements.has_key(
level, condition, function_name)) or extension == 'UNDEFINED':
# We don't have this unmangled builtin recorded yet or we might replace an unmangled builtin from an extension with one from core.
unmangled_function_if_statements.add_obj(level, condition, function_name, {'hash_matched_code': unmangled_if, 'extension': extension})
unmangled_builtin_declarations.add('constexpr const UnmangledBuiltIn {extension}(TExtension::{extension});'.format(**template_args))
unmangled_function_if_statements.add_obj(level, condition, function_name, {
'hash_matched_code': unmangled_if,
'extension': extension
})
unmangled_builtin_declarations.add(
'constexpr const UnmangledBuiltIn {extension}(TExtension::{extension});'.format(
**template_args))
for function_props in function_variants:
template_args['id'] = id_counter
parameters = get_parameters(function_props)
template_args['unique_name'] = get_unique_identifier_name(template_args['name_with_suffix'], parameters)
template_args['unique_name'] = get_unique_identifier_name(
template_args['name_with_suffix'], parameters)
if template_args['unique_name'] in defined_function_variants:
continue
......@@ -890,7 +906,8 @@ def process_single_function_group(condition, group_name, group, num_type_variant
template_args['param_count'] = len(parameters)
template_args['return_type'] = function_props['returnType'].get_statictype_string()
template_args['mangled_name'] = get_function_mangled_name(function_name, parameters)
template_args['human_readable_name'] = get_function_human_readable_name(template_args['name_with_suffix'], parameters)
template_args['human_readable_name'] = get_function_human_readable_name(
template_args['name_with_suffix'], parameters)
template_args['mangled_name_length'] = len(template_args['mangled_name'])
template_builtin_id_declaration = ' static constexpr const TSymbolUniqueId {human_readable_name} = TSymbolUniqueId({id});'
......@@ -911,19 +928,26 @@ def process_single_function_group(condition, group_name, group, num_type_variant
id_counter += 1
param_template_args['id'] = id_counter
template_builtin_id_declaration = ' static constexpr const TSymbolUniqueId {name_with_suffix} = TSymbolUniqueId({id});'
builtin_id_declarations.append(template_builtin_id_declaration.format(**param_template_args))
builtin_id_declarations.append(
template_builtin_id_declaration.format(**param_template_args))
define_constexpr_variable(param_template_args, variable_declarations)
defined_parameter_names.add(unique_param_name)
parameters_list.append('&BuiltInVariable::kVar_{name_with_suffix}'.format(**param_template_args));
parameters_list.append('&BuiltInVariable::kVar_{name_with_suffix}'.format(
**param_template_args))
template_args['parameters_var_name'] = get_variable_name_to_store_parameters(parameters)
template_args['parameters_var_name'] = get_variable_name_to_store_parameters(
parameters)
if len(parameters) > 0:
template_args['parameters_list'] = ', '.join(parameters_list)
template_parameter_list_declaration = 'constexpr const TVariable *{parameters_var_name}[{param_count}] = {{ {parameters_list} }};'
parameter_declarations[template_args['parameters_var_name']] = template_parameter_list_declaration.format(**template_args)
parameter_declarations[template_args[
'parameters_var_name']] = template_parameter_list_declaration.format(
**template_args)
else:
template_parameter_list_declaration = 'constexpr const TVariable **{parameters_var_name} = nullptr;'
parameter_declarations[template_args['parameters_var_name']] = template_parameter_list_declaration.format(**template_args)
parameter_declarations[template_args[
'parameters_var_name']] = template_parameter_list_declaration.format(
**template_args)
template_function_declaration = 'constexpr const TFunction kFunction_{unique_name}(BuiltInId::{human_readable_name}, BuiltInName::{name_with_suffix}, TExtension::{extension}, BuiltInParameters::{parameters_var_name}, {param_count}, {return_type}, EOp{op}, {known_to_not_have_side_effects});'
function_declarations.append(template_function_declaration.format(**template_args))
......@@ -932,8 +956,9 @@ def process_single_function_group(condition, group_name, group, num_type_variant
# name and hash, then we can only check the mangled name length and the function name
# instead of checking the whole mangled name.
template_mangled_if = ''
if mangled_name_hash_can_collide_with_different_parameters(template_args, num_type_variants,
ttype_mangled_name_variants, script_generated_hash_tests):
if mangled_name_hash_can_collide_with_different_parameters(
template_args, num_type_variants, ttype_mangled_name_variants,
script_generated_hash_tests):
template_mangled_name_declaration = 'constexpr const ImmutableString {unique_name}("{mangled_name}");'
name_declarations.add(template_mangled_name_declaration.format(**template_args))
template_mangled_if = """if (name == BuiltInName::{unique_name})
......@@ -947,15 +972,18 @@ def process_single_function_group(condition, group_name, group, num_type_variant
return &BuiltInFunction::kFunction_{unique_name};
}}"""
mangled_if = template_mangled_if.format(**template_args)
get_builtin_if_statements.add_obj(level, condition, template_args['mangled_name'], {'hash_matched_code': mangled_if})
get_builtin_if_statements.add_obj(level, condition, template_args['mangled_name'],
{'hash_matched_code': mangled_if})
id_counter += 1
def process_function_group(group_name, group, num_type_variants, parameter_declarations, ttype_mangled_name_variants,
def process_function_group(
group_name, group, num_type_variants, parameter_declarations, ttype_mangled_name_variants,
name_declarations, unmangled_function_if_statements, unmangled_builtin_declarations,
defined_function_variants, builtin_id_declarations, builtin_id_definitions, defined_parameter_names,
variable_declarations, function_declarations, script_generated_hash_tests, get_builtin_if_statements,
is_in_group_definitions):
defined_function_variants, builtin_id_declarations, builtin_id_definitions,
defined_parameter_names, variable_declarations, function_declarations,
script_generated_hash_tests, get_builtin_if_statements, is_in_group_definitions):
global id_counter
first_id = id_counter
......@@ -963,25 +991,25 @@ def process_function_group(group_name, group, num_type_variants, parameter_decla
if 'condition' in group:
condition = group['condition']
process_single_function_group(condition, group_name, group, num_type_variants, parameter_declarations,
ttype_mangled_name_variants, name_declarations, unmangled_function_if_statements, unmangled_builtin_declarations,
defined_function_variants, builtin_id_declarations, builtin_id_definitions, defined_parameter_names,
variable_declarations, function_declarations, script_generated_hash_tests, get_builtin_if_statements)
process_single_function_group(
condition, group_name, group, num_type_variants, parameter_declarations,
ttype_mangled_name_variants, name_declarations, unmangled_function_if_statements,
unmangled_builtin_declarations, defined_function_variants, builtin_id_declarations,
builtin_id_definitions, defined_parameter_names, variable_declarations,
function_declarations, script_generated_hash_tests, get_builtin_if_statements)
if 'subgroups' in group:
for subgroup_name, subgroup in group['subgroups'].iteritems():
process_function_group(group_name + subgroup_name, subgroup, num_type_variants, parameter_declarations,
process_function_group(
group_name + subgroup_name, subgroup, num_type_variants, parameter_declarations,
ttype_mangled_name_variants, name_declarations, unmangled_function_if_statements,
unmangled_builtin_declarations, defined_function_variants, builtin_id_declarations,
builtin_id_definitions, defined_parameter_names, variable_declarations, function_declarations,
script_generated_hash_tests, get_builtin_if_statements, is_in_group_definitions)
builtin_id_definitions, defined_parameter_names, variable_declarations,
function_declarations, script_generated_hash_tests, get_builtin_if_statements,
is_in_group_definitions)
if 'queryFunction' in group:
template_args = {
'first_id': first_id,
'last_id': id_counter - 1,
'group_name': group_name
}
template_args = {'first_id': first_id, 'last_id': id_counter - 1, 'group_name': group_name}
template_is_in_group_definition = """bool is{group_name}(const TFunction *func)
{{
int id = func->uniqueId().get();
......@@ -989,11 +1017,13 @@ def process_function_group(group_name, group, num_type_variants, parameter_decla
}}"""
is_in_group_definitions.append(template_is_in_group_definition.format(**template_args))
def prune_parameters_arrays(parameter_declarations, function_declarations):
# We can share parameters arrays between functions in case one array is a subarray of another.
parameter_variable_name_replacements = {}
used_param_variable_names = set()
for param_variable_name, param_declaration in sorted(parameter_declarations.iteritems(), key=lambda item: -len(item[0])):
for param_variable_name, param_declaration in sorted(
parameter_declarations.iteritems(), key=lambda item: -len(item[0])):
replaced = False
for used in used_param_variable_names:
if used.startswith(param_variable_name):
......@@ -1005,12 +1035,19 @@ def prune_parameters_arrays(parameter_declarations, function_declarations):
for i in xrange(len(function_declarations)):
for replaced, replacement in parameter_variable_name_replacements.iteritems():
function_declarations[i] = function_declarations[i].replace('BuiltInParameters::' + replaced + ',', 'BuiltInParameters::' + replacement + ',')
function_declarations[i] = function_declarations[i].replace(
'BuiltInParameters::' + replaced + ',', 'BuiltInParameters::' + replacement + ',')
return [
value for key, value in parameter_declarations.iteritems()
if key in used_param_variable_names
]
return [value for key, value in parameter_declarations.iteritems() if key in used_param_variable_names]
def process_single_variable_group(condition, group_name, group, builtin_id_declarations, builtin_id_definitions, name_declarations,
init_member_variables, get_variable_declarations, get_builtin_if_statements, declare_member_variables, variable_declarations,
def process_single_variable_group(condition, group_name, group, builtin_id_declarations,
builtin_id_definitions, name_declarations, init_member_variables,
get_variable_declarations, get_builtin_if_statements,
declare_member_variables, variable_declarations,
get_variable_definitions, variable_name_count):
global id_counter
if 'variables' not in group:
......@@ -1046,7 +1083,8 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla
# Handle struct and interface block definitions.
template_args['class'] = props['class']
template_args['fields'] = 'fields_{name_with_suffix}'.format(**template_args)
init_member_variables.append(' TFieldList *{fields} = new TFieldList();'.format(**template_args))
init_member_variables.append(' TFieldList *{fields} = new TFieldList();'.format(
**template_args))
for field_name, field_type in props['fields'].iteritems():
template_args['field_name'] = field_name
template_args['field_type'] = TType(field_type).get_dynamic_type_string()
......@@ -1074,7 +1112,8 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla
elif 'value' in props:
# Handle variables with constant value, such as gl_MaxDrawBuffers.
if props['value'] != 'resources':
raise Exception('Unrecognized value source in variable properties: ' + str(props['value']))
raise Exception('Unrecognized value source in variable properties: ' +
str(props['value']))
resources_key = variable_name[3:]
if 'valueKey' in props:
resources_key = props['valueKey']
......@@ -1104,14 +1143,16 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla
is_member = False
template_get_variable_declaration = 'const TVariable *{name_with_suffix}();'
get_variable_declarations.append(template_get_variable_declaration.format(**template_args))
get_variable_declarations.append(
template_get_variable_declaration.format(**template_args))
template_get_variable_definition = """const TVariable *{name_with_suffix}()
{{
return &kVar_{name_with_suffix};
}}
"""
get_variable_definitions.append(template_get_variable_definition.format(**template_args))
get_variable_definitions.append(
template_get_variable_definition.format(**template_args))
if level != 'GLSL_BUILTINS':
template_name_if = """if (name == BuiltInName::{name})
......@@ -1119,15 +1160,20 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla
return &BuiltInVariable::kVar_{name_with_suffix};
}}"""
name_if = template_name_if.format(**template_args)
get_builtin_if_statements.add_obj(level, condition, template_args['name'], {'hash_matched_code': name_if})
get_builtin_if_statements.add_obj(level, condition, template_args['name'],
{'hash_matched_code': name_if})
if is_member:
get_condition = condition
init_conditionally = (condition != 'NO_CONDITION' and variable_name_count[variable_name] == 1)
init_conditionally = (
condition != 'NO_CONDITION' and variable_name_count[variable_name] == 1)
if init_conditionally:
# Instead of having the condition if statement at lookup, it's cheaper to have it at initialization time.
init_member_variables.append(' if ({condition})\n {{'.format(condition = condition))
template_args['condition_comment'] = '\n // Only initialized if {condition}'.format(condition = condition)
init_member_variables.append(
' if ({condition})\n {{'.format(condition=condition))
template_args[
'condition_comment'] = '\n // Only initialized if {condition}'.format(
condition=condition)
get_condition = 'NO_CONDITION'
else:
template_args['condition_comment'] = ''
......@@ -1136,7 +1182,8 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla
init_member_variables.append(' }')
template_declare_member_variable = '{class} *mVar_{name_with_suffix} = nullptr;'
declare_member_variables.append(template_declare_member_variable.format(**template_args))
declare_member_variables.append(
template_declare_member_variable.format(**template_args))
if level != 'GLSL_BUILTINS':
template_name_if = """if (name == BuiltInName::{name})
......@@ -1144,10 +1191,12 @@ def process_single_variable_group(condition, group_name, group, builtin_id_decla
return mVar_{name_with_suffix};
}}"""
name_if = template_name_if.format(**template_args)
get_builtin_if_statements.add_obj(level, get_condition, variable_name, {'hash_matched_code': name_if})
get_builtin_if_statements.add_obj(level, get_condition, variable_name,
{'hash_matched_code': name_if})
id_counter += 1
def count_variable_names(group, variable_name_count):
if 'variables' in group:
for name in group['variables'].iterkeys():
......@@ -1159,8 +1208,11 @@ def count_variable_names(group, variable_name_count):
for subgroup_name, subgroup in group['subgroups'].iteritems():
count_variable_names(subgroup, variable_name_count)
def process_variable_group(parent_condition, group_name, group, builtin_id_declarations, builtin_id_definitions, name_declarations,
init_member_variables, get_variable_declarations, get_builtin_if_statements, declare_member_variables, variable_declarations,
def process_variable_group(parent_condition, group_name, group, builtin_id_declarations,
builtin_id_definitions, name_declarations, init_member_variables,
get_variable_declarations, get_builtin_if_statements,
declare_member_variables, variable_declarations,
get_variable_definitions, variable_name_count):
global id_counter
condition = 'NO_CONDITION'
......@@ -1171,17 +1223,21 @@ def process_variable_group(parent_condition, group_name, group, builtin_id_decla
if condition == 'NO_CONDITION':
condition = parent_condition
else:
condition = '({cond1}) && ({cond2})'.format(cond1 = parent_condition, cond2 = condition)
condition = '({cond1}) && ({cond2})'.format(cond1=parent_condition, cond2=condition)
process_single_variable_group(condition, group_name, group, builtin_id_declarations, builtin_id_definitions, name_declarations,
init_member_variables, get_variable_declarations, get_builtin_if_statements, declare_member_variables, variable_declarations,
process_single_variable_group(condition, group_name, group, builtin_id_declarations,
builtin_id_definitions, name_declarations, init_member_variables,
get_variable_declarations, get_builtin_if_statements,
declare_member_variables, variable_declarations,
get_variable_definitions, variable_name_count)
if 'subgroups' in group:
for subgroup_name, subgroup in group['subgroups'].iteritems():
process_variable_group(condition, subgroup_name, subgroup, builtin_id_declarations, builtin_id_definitions, name_declarations,
init_member_variables, get_variable_declarations, get_builtin_if_statements, declare_member_variables, variable_declarations,
get_variable_definitions, variable_name_count)
process_variable_group(
condition, subgroup_name, subgroup, builtin_id_declarations,
builtin_id_definitions, name_declarations, init_member_variables,
get_variable_declarations, get_builtin_if_statements, declare_member_variables,
variable_declarations, get_variable_definitions, variable_name_count)
def main():
......@@ -1189,7 +1245,10 @@ def main():
set_working_dir()
parser = argparse.ArgumentParser()
parser.add_argument('--dump-intermediate-json', help='Dump parsed function data as a JSON file builtin_functions.json', action="store_true")
parser.add_argument(
'--dump-intermediate-json',
help='Dump parsed function data as a JSON file builtin_functions.json',
action="store_true")
parser.add_argument('auto_script_command', nargs='?', default='')
args = parser.parse_args()
......@@ -1222,7 +1281,6 @@ def main():
return 1
return 0
all_inputs = [os.path.abspath(__file__), variables_json_filename, functions_txt_filename]
# This script takes a while to run since it searches for hash collisions of mangled names. To avoid
# running it unnecessarily, we first check if we've already ran it with the same inputs.
......@@ -1287,17 +1345,19 @@ def main():
defined_function_variants = set()
defined_parameter_names = set()
parsed_functions = get_parsed_functions(functions_txt_filename)
if args.dump_intermediate_json:
with open('builtin_functions.json', 'w') as outfile:
def serialize_obj(obj):
if isinstance(obj, TType):
return obj.data
else:
raise "Cannot serialize to JSON: " + str(obj)
json.dump(parsed_functions, outfile, indent=4, separators=(',', ': '), default=serialize_obj)
json.dump(
parsed_functions, outfile, indent=4, separators=(',', ': '), default=serialize_obj)
with open(variables_json_filename) as f:
parsed_variables = json.load(f, object_pairs_hook=OrderedDict)
......@@ -1311,24 +1371,31 @@ def main():
secondary_sizes = [1, 2, 3, 4]
for primary_size in primary_sizes:
for secondary_size in secondary_sizes:
type = TType({'basic': basic_type, 'primarySize': primary_size, 'secondarySize': secondary_size})
type = TType({
'basic': basic_type,
'primarySize': primary_size,
'secondarySize': secondary_size
})
ttype_mangled_name_variants.append(type.get_mangled_name())
num_type_variants = len(ttype_mangled_name_variants)
# Sanity check for get_mangled_name_variant_hash:
variant_hash = get_mangled_name_variant_hash(hash32("atan("), 3, 4, len("atan(0123"), num_type_variants,
ttype_mangled_name_variants)
mangled_name_hash = mangledNameHash("atan(" + get_parameters_mangled_name_variant(3, 4, len("atan(0123"),
ttype_mangled_name_variants), script_generated_hash_tests)
variant_hash = get_mangled_name_variant_hash(
hash32("atan("), 3, 4, len("atan(0123"), num_type_variants, ttype_mangled_name_variants)
mangled_name_hash = mangledNameHash(
"atan(" + get_parameters_mangled_name_variant(
3, 4, len("atan(0123"), ttype_mangled_name_variants), script_generated_hash_tests)
if variant_hash != mangled_name_hash:
raise Exception("get_mangled_name_variant_hash sanity check failed")
for group_name, group in parsed_functions.iteritems():
process_function_group(group_name, group, num_type_variants, parameter_declarations, ttype_mangled_name_variants,
name_declarations, unmangled_function_if_statements, unmangled_builtin_declarations,
defined_function_variants, builtin_id_declarations, builtin_id_definitions, defined_parameter_names,
variable_declarations, function_declarations, script_generated_hash_tests, get_builtin_if_statements,
process_function_group(
group_name, group, num_type_variants, parameter_declarations,
ttype_mangled_name_variants, name_declarations, unmangled_function_if_statements,
unmangled_builtin_declarations, defined_function_variants, builtin_id_declarations,
builtin_id_definitions, defined_parameter_names, variable_declarations,
function_declarations, script_generated_hash_tests, get_builtin_if_statements,
is_in_group_definitions)
parameter_declarations = prune_parameters_arrays(parameter_declarations, function_declarations)
......@@ -1337,40 +1404,57 @@ def main():
count_variable_names(group, variable_name_count)
for group_name, group in parsed_variables.iteritems():
process_variable_group('NO_CONDITION', group_name, group, builtin_id_declarations, builtin_id_definitions, name_declarations,
init_member_variables, get_variable_declarations, get_builtin_if_statements, declare_member_variables, variable_declarations,
process_variable_group('NO_CONDITION', group_name, group, builtin_id_declarations,
builtin_id_definitions, name_declarations, init_member_variables,
get_variable_declarations, get_builtin_if_statements,
declare_member_variables, variable_declarations,
get_variable_definitions, variable_name_count)
output_strings = {
'script_name': os.path.basename(__file__),
'copyright_year': date.today().year,
'builtin_id_declarations': '\n'.join(builtin_id_declarations),
'builtin_id_definitions': '\n'.join(builtin_id_definitions),
'last_builtin_id': id_counter - 1,
'name_declarations': '\n'.join(sorted(list(name_declarations))),
'function_data_source_name': functions_txt_filename,
'function_declarations': '\n'.join(function_declarations),
'parameter_declarations': '\n'.join(sorted(parameter_declarations)),
'is_in_group_definitions': '\n'.join(is_in_group_definitions),
'variable_data_source_name': variables_json_filename,
'variable_declarations': '\n'.join(sorted(variable_declarations)),
'get_variable_declarations': '\n'.join(sorted(get_variable_declarations)),
'get_variable_definitions': '\n'.join(sorted(get_variable_definitions)),
'unmangled_builtin_declarations': '\n'.join(sorted(unmangled_builtin_declarations)),
'declare_member_variables': '\n'.join(declare_member_variables),
'init_member_variables': '\n'.join(init_member_variables),
'get_unmangled_builtin': unmangled_function_if_statements.get_switch_code(script_generated_hash_tests),
'get_builtin': get_builtin_if_statements.get_switch_code(script_generated_hash_tests),
'max_unmangled_name_length': unmangled_function_if_statements.get_max_name_length(),
'max_mangled_name_length': get_builtin_if_statements.get_max_name_length(),
'script_generated_hash_tests': '\n'.join(script_generated_hash_tests.iterkeys())
'script_name':
os.path.basename(__file__),
'copyright_year':
date.today().year,
'builtin_id_declarations':
'\n'.join(builtin_id_declarations),
'builtin_id_definitions':
'\n'.join(builtin_id_definitions),
'last_builtin_id':
id_counter - 1,
'name_declarations':
'\n'.join(sorted(list(name_declarations))),
'function_data_source_name':
functions_txt_filename,
'function_declarations':
'\n'.join(function_declarations),
'parameter_declarations':
'\n'.join(sorted(parameter_declarations)),
'is_in_group_definitions':
'\n'.join(is_in_group_definitions),
'variable_data_source_name':
variables_json_filename,
'variable_declarations':
'\n'.join(sorted(variable_declarations)),
'get_variable_declarations':
'\n'.join(sorted(get_variable_declarations)),
'get_variable_definitions':
'\n'.join(sorted(get_variable_definitions)),
'unmangled_builtin_declarations':
'\n'.join(sorted(unmangled_builtin_declarations)),
'declare_member_variables':
'\n'.join(declare_member_variables),
'init_member_variables':
'\n'.join(init_member_variables),
'get_unmangled_builtin':
unmangled_function_if_statements.get_switch_code(script_generated_hash_tests),
'get_builtin':
get_builtin_if_statements.get_switch_code(script_generated_hash_tests),
'max_unmangled_name_length':
unmangled_function_if_statements.get_max_name_length(),
'max_mangled_name_length':
get_builtin_if_statements.get_max_name_length(),
'script_generated_hash_tests':
'\n'.join(script_generated_hash_tests.iterkeys())
}
with open(test_filename, 'wt') as outfile_cpp:
......
......@@ -60,6 +60,7 @@ const char *FindHLSLFunction(int uniqueId)
}} // namespace sh
"""
def reject_duplicate_keys(pairs):
found_keys = {}
for key, value in pairs:
......@@ -69,12 +70,14 @@ def reject_duplicate_keys(pairs):
found_keys[key] = value
return found_keys
def load_json(path):
with open(path) as map_file:
file_data = map_file.read()
map_file.close()
return json.loads(file_data, object_pairs_hook=reject_duplicate_keys)
def enum_type(arg):
# handle 'argtype argname' and 'out argtype argname'
chunks = arg.split(' ')
......@@ -89,22 +92,24 @@ def enum_type(arg):
return 'UI' + arg_type[2:] + suffix
return arg_type.capitalize() + suffix
def gen_emulated_function(data):
func = ""
if 'comment' in data:
func += "".join([ "// " + line + "\n" for line in data['comment'] ])
func += "".join(["// " + line + "\n" for line in data['comment']])
sig = data['return_type'] + ' ' + data['op'] + '_emu(' + ', '.join(data['args']) + ')'
body = [ sig, '{' ] + [' ' + line for line in data['body']] + ['}']
body = [sig, '{'] + [' ' + line for line in data['body']] + ['}']
func += "{\n"
func += "BuiltInId::" + data['op'] + "_" + "_".join([enum_type(arg) for arg in data['args']]) + ",\n"
func += "BuiltInId::" + data['op'] + "_" + "_".join([enum_type(arg) for arg in data['args']
]) + ",\n"
if 'helper' in data:
func += '"' + '\\n"\n"'.join(data['helper']) + '\\n"\n'
func += '"' + '\\n"\n"'.join(body) + '\\n"\n'
func += "},\n"
return [ func ]
return [func]
def main():
......@@ -133,10 +138,10 @@ def main():
emulated_functions += gen_emulated_function(item)
hlsl_gen = template_emulated_builtin_functions_hlsl.format(
script_name = sys.argv[0],
data_source_name = input_script,
copyright_year = date.today().year,
emulated_functions = "".join(emulated_functions))
script_name=sys.argv[0],
data_source_name=input_script,
copyright_year=date.today().year,
emulated_functions="".join(emulated_functions))
with open(hlsl_fname, 'wt') as f:
f.write(hlsl_gen)
......
......@@ -58,12 +58,13 @@ template_format_case = """ case {texture_format}:
template_simple_case = """ case {key}:
"""
def parse_texture_format_case(texture_format, framebuffer_formats):
framebuffer_format_cases = ""
for framebuffer_format in sorted(framebuffer_formats):
framebuffer_format_cases += template_simple_case.format(key = framebuffer_format)
framebuffer_format_cases += template_simple_case.format(key=framebuffer_format)
return template_format_case.format(
texture_format = texture_format, framebuffer_format_cases = framebuffer_format_cases)
texture_format=texture_format, framebuffer_format_cases=framebuffer_format_cases)
def main():
......@@ -93,7 +94,7 @@ def main():
for texture_format, framebuffer_format in data:
if texture_format not in format_map:
format_map[texture_format] = []
format_map[texture_format] += [ framebuffer_format ]
format_map[texture_format] += [framebuffer_format]
texture_format_cases = ""
......@@ -102,10 +103,10 @@ def main():
with open(out_file_name, 'wt') as out_file:
output_cpp = template_cpp.format(
script_name = sys.argv[0],
data_source_name = data_source_name,
copyright_year = date.today().year,
texture_format_cases = texture_format_cases)
script_name=sys.argv[0],
data_source_name=data_source_name,
copyright_year=date.today().year,
texture_format_cases=texture_format_cases)
out_file.write(output_cpp)
out_file.close()
return 0
......
......@@ -113,16 +113,14 @@ template_es3_combo_type_case = """ case {type}:
def parse_type_case(type, result):
return template_simple_case.format(
key = type, result = result)
return template_simple_case.format(key=type, result=result)
def parse_format_case(format, type_map):
type_cases = ""
for type, internal_format in sorted(type_map.iteritems()):
type_cases += parse_type_case(type, internal_format)
return template_format_case.format(
format = format, type_cases = type_cases)
return template_format_case.format(format=format, type_cases=type_cases)
def main():
......@@ -188,21 +186,20 @@ def main():
internal_format_cases += " case " + internal_format + ":\n"
this_type_cases += template_es3_combo_type_case.format(
type = type, internal_format_cases = internal_format_cases)
type=type, internal_format_cases=internal_format_cases)
es3_combo_cases += template_format_case.format(
format = format, type_cases = this_type_cases)
es3_combo_cases += template_format_case.format(format=format, type_cases=this_type_cases)
with open('format_map_autogen.cpp', 'wt') as out_file:
output_cpp = template_cpp.format(
script_name = sys.argv[0],
data_source_name = input_script,
es3_data_source_name = combo_data_file,
copyright_year = date.today().year,
format_cases = format_cases,
es3_format_cases = es3_format_cases,
es3_type_cases = es3_type_cases,
es3_combo_cases = es3_combo_cases)
script_name=sys.argv[0],
data_source_name=input_script,
es3_data_source_name=combo_data_file,
copyright_year=date.today().year,
format_cases=format_cases,
es3_format_cases=es3_format_cases,
es3_type_cases=es3_type_cases,
es3_combo_cases=es3_combo_cases)
out_file.write(output_cpp)
return 0
......
......@@ -12,9 +12,11 @@ import re
kChannels = "ABDGLRSX"
def get_angle_format_map_abs_path():
return os.path.join(os.path.dirname(os.path.realpath(__file__)), 'angle_format_map.json')
def reject_duplicate_keys(pairs):
found_keys = {}
for key, value in pairs:
......@@ -24,24 +26,29 @@ def reject_duplicate_keys(pairs):
found_keys[key] = value
return found_keys
def load_json(path):
with open(path) as map_file:
return json.loads(map_file.read(), object_pairs_hook=reject_duplicate_keys)
def load_forward_table(path):
pairs = load_json(path)
reject_duplicate_keys(pairs)
return { gl: angle for gl, angle in pairs }
return {gl: angle for gl, angle in pairs}
def load_inverse_table(path):
pairs = load_json(path)
reject_duplicate_keys(pairs)
return { angle: gl for gl, angle in pairs }
return {angle: gl for gl, angle in pairs}
def load_without_override():
map_path = get_angle_format_map_abs_path()
return load_forward_table(map_path)
def load_with_override(override_path):
results = load_without_override()
overrides = load_json(override_path)
......@@ -51,10 +58,12 @@ def load_with_override(override_path):
return results
def get_all_angle_formats():
map_path = get_angle_format_map_abs_path()
return load_inverse_table(map_path).keys()
def get_component_type(format_id):
if "SNORM" in format_id:
return "snorm"
......@@ -83,10 +92,12 @@ def get_component_type(format_id):
else:
raise ValueError("Unknown component type for " + format_id)
def get_channel_tokens(format_id):
r = re.compile(r'([' + kChannels + '][\d]+)')
return filter(r.match, r.split(format_id))
def get_channels(format_id):
channels = ''
tokens = get_channel_tokens(format_id)
......@@ -97,6 +108,7 @@ def get_channels(format_id):
return channels
def get_bits(format_id):
bits = {}
tokens = get_channel_tokens(format_id)
......@@ -106,9 +118,11 @@ def get_bits(format_id):
bits[token[0]] = int(token[1:])
return bits
def get_format_info(format_id):
return get_component_type(format_id), get_bits(format_id), get_channels(format_id)
# TODO(oetuaho): Expand this code so that it could generate the gl format info tables as well.
def gl_format_channels(internal_format):
if internal_format == 'GL_BGR5_A1_ANGLEX':
......@@ -142,6 +156,7 @@ def gl_format_channels(internal_format):
return 's'
return channels_string.lower()
def get_internal_format_initializer(internal_format, format_id):
gl_channels = gl_format_channels(internal_format)
gl_format_no_alpha = gl_channels == 'rgb' or gl_channels == 'l'
......@@ -182,19 +197,22 @@ def get_internal_format_initializer(internal_format, format_id):
elif component_type == 'uint' and bits['R'] == 32:
return 'Initialize4ComponentData<GLuint, 0x00000000, 0x00000000, 0x00000000, 0x00000001>'
else:
raise ValueError('warning: internal format initializer could not be generated and may be needed for ' + internal_format)
raise ValueError(
'warning: internal format initializer could not be generated and may be needed for ' +
internal_format)
def get_vertex_copy_function(src_format, dst_format):
if dst_format == "NONE":
return "nullptr";
return "nullptr"
num_channel = len(get_channel_tokens(src_format))
if num_channel < 1 or num_channel > 4:
return "nullptr";
return "nullptr"
if 'FIXED' in src_format:
assert 'FLOAT' in dst_format, ('get_vertex_copy_function: can only convert fixed to float,'
+ ' not to ' + dst_format)
assert 'FLOAT' in dst_format, (
'get_vertex_copy_function: can only convert fixed to float,' + ' not to ' + dst_format)
return 'Copy32FixedTo32FVertexData<%d, %d>' % (num_channel, num_channel)
sign = ''
......@@ -215,14 +233,14 @@ def get_vertex_copy_function(src_format, dst_format):
sign = 'u'
if base_type is None:
return "nullptr";
return "nullptr"
gl_type = 'GL' + sign + base_type
if src_format == dst_format:
return 'CopyNativeVertexData<%s, %d, %d, 0>' % (gl_type, num_channel, num_channel)
assert 'FLOAT' in dst_format, ('get_vertex_copy_function: can only convert to float,'
+ ' not to ' + dst_format)
assert 'FLOAT' in dst_format, (
'get_vertex_copy_function: can only convert to float,' + ' not to ' + dst_format)
normalized = 'true' if 'NORM' in src_format else 'false'
return "CopyTo32FVertexData<%s, %d, %d, %s>" % (gl_type, num_channel, num_channel, normalized)
......@@ -103,8 +103,7 @@ supported_dimensions = ["2D", "3D", "2DArray"]
# field 2: Name of compiled shader
# field 3: Filename of compiled shader
blitshader_data = [
("RGBAF", "PassthroughRGBA*", "passthroughrgba*11ps.h"),
("BGRAF", "PassthroughRGBA*"),
("RGBAF", "PassthroughRGBA*", "passthroughrgba*11ps.h"), ("BGRAF", "PassthroughRGBA*"),
("RGBF", "PassthroughRGB*", "passthroughrgb*11ps.h"),
("RGF", "PassthroughRG*", "passthroughrg*11ps.h"),
("RF", "PassthroughR*", "passthroughr*11ps.h"),
......@@ -119,53 +118,35 @@ blitshader_data = [
("RGI", "PassthroughRG*I", "passthroughrg*i11ps.h"),
("RUI", "PassthroughR*UI", "passthroughr*ui11ps.h"),
("RI", "PassthroughR*I", "passthroughr*i11ps.h"),
("RGBAF_PREMULTIPLY", "FtoF_PM_RGBA_*",
"multiplyalpha_ftof_pm_rgba_*_ps.h"),
("RGBAF_PREMULTIPLY", "FtoF_PM_RGBA_*", "multiplyalpha_ftof_pm_rgba_*_ps.h"),
("RGBAF_UNMULTIPLY", "FtoF_UM_RGBA_*", "multiplyalpha_ftof_um_rgba_*_ps.h"),
("RGBF_PREMULTIPLY", "FtoF_PM_RGB_*", "multiplyalpha_ftof_pm_rgb_*_ps.h"),
("RGBF_UNMULTIPLY", "FtoF_UM_RGB_*", "multiplyalpha_ftof_um_rgb_*_ps.h"),
("RGBAF_TOUI", "FtoU_PT_RGBA_*", "multiplyalpha_ftou_pt_rgba_*_ps.h"),
("RGBAF_TOUI_PREMULTIPLY", "FtoU_PM_RGBA_*",
"multiplyalpha_ftou_pm_rgba_*_ps.h"),
("RGBAF_TOUI_UNMULTIPLY", "FtoU_UM_RGBA_*",
"multiplyalpha_ftou_um_rgba_*_ps.h"),
("RGBAF_TOUI_PREMULTIPLY", "FtoU_PM_RGBA_*", "multiplyalpha_ftou_pm_rgba_*_ps.h"),
("RGBAF_TOUI_UNMULTIPLY", "FtoU_UM_RGBA_*", "multiplyalpha_ftou_um_rgba_*_ps.h"),
("RGBF_TOUI", "FtoU_PT_RGB_*", "multiplyalpha_ftou_pt_rgb_*_ps.h"),
("RGBF_TOUI_PREMULTIPLY", "FtoU_PM_RGB_*",
"multiplyalpha_ftou_pm_rgb_*_ps.h"),
("RGBF_TOUI_UNMULTIPLY", "FtoU_UM_RGB_*",
"multiplyalpha_ftou_um_rgb_*_ps.h"),
("RGBF_TOUI_PREMULTIPLY", "FtoU_PM_RGB_*", "multiplyalpha_ftou_pm_rgb_*_ps.h"),
("RGBF_TOUI_UNMULTIPLY", "FtoU_UM_RGB_*", "multiplyalpha_ftou_um_rgb_*_ps.h"),
("RGBAF_TOI", "FtoI_PT_RGBA_*", "multiplyalpha_ftoi_pt_rgba_*_ps.h"),
("RGBAF_TOI_PREMULTIPLY", "FtoI_PM_RGBA_*",
"multiplyalpha_ftoi_pm_rgba_*_ps.h"),
("RGBAF_TOI_UNMULTIPLY", "FtoI_UM_RGBA_*",
"multiplyalpha_ftoi_um_rgba_*_ps.h"),
("RGBAF_TOI_PREMULTIPLY", "FtoI_PM_RGBA_*", "multiplyalpha_ftoi_pm_rgba_*_ps.h"),
("RGBAF_TOI_UNMULTIPLY", "FtoI_UM_RGBA_*", "multiplyalpha_ftoi_um_rgba_*_ps.h"),
("RGBF_TOI", "FtoI_PT_RGB_*", "multiplyalpha_ftoi_pt_rgb_*_ps.h"),
("RGBF_TOI_PREMULTIPLY", "FtoI_PM_RGB_*",
"multiplyalpha_ftoi_pm_rgb_*_ps.h"),
("RGBF_TOI_UNMULTIPLY", "FtoI_UM_RGB_*",
"multiplyalpha_ftoi_um_rgb_*_ps.h"),
("LUMAF_PREMULTIPLY", "FtoF_PM_LUMA_*",
"multiplyalpha_ftof_pm_luma_*_ps.h"),
("RGBF_TOI_PREMULTIPLY", "FtoI_PM_RGB_*", "multiplyalpha_ftoi_pm_rgb_*_ps.h"),
("RGBF_TOI_UNMULTIPLY", "FtoI_UM_RGB_*", "multiplyalpha_ftoi_um_rgb_*_ps.h"),
("LUMAF_PREMULTIPLY", "FtoF_PM_LUMA_*", "multiplyalpha_ftof_pm_luma_*_ps.h"),
("LUMAF_UNMULTIPLY", "FtoF_UM_LUMA_*", "multiplyalpha_ftof_um_luma_*_ps.h"),
("LUMAALPHAF_PREMULTIPLY", "FtoF_PM_LUMAALPHA_*",
"multiplyalpha_ftof_pm_lumaalpha_*_ps.h"),
("LUMAALPHAF_UNMULTIPLY", "FtoF_UM_LUMAALPHA_*",
"multiplyalpha_ftof_um_lumaalpha_*_ps.h"),
("LUMAALPHAF_PREMULTIPLY", "FtoF_PM_LUMAALPHA_*", "multiplyalpha_ftof_pm_lumaalpha_*_ps.h"),
("LUMAALPHAF_UNMULTIPLY", "FtoF_UM_LUMAALPHA_*", "multiplyalpha_ftof_um_lumaalpha_*_ps.h"),
("RGBAF_4444", "PassthroughRGBA*_4444", "passthroughrgba*_4444_11ps.h"),
("RGBAF_4444_PREMULTIPLY", "FtoF_PM_RGBA_4444_*",
"multiplyalpha_ftof_pm_rgba_4444_*_ps.h"),
("RGBAF_4444_UNMULTIPLY", "FtoF_UM_RGBA_4444_*",
"multiplyalpha_ftof_um_rgba_4444_*_ps.h"),
("RGBAF_4444_PREMULTIPLY", "FtoF_PM_RGBA_4444_*", "multiplyalpha_ftof_pm_rgba_4444_*_ps.h"),
("RGBAF_4444_UNMULTIPLY", "FtoF_UM_RGBA_4444_*", "multiplyalpha_ftof_um_rgba_4444_*_ps.h"),
("RGBF_565", "PassthroughRGB*_565", "passthroughrgb*_565_11ps.h"),
("RGBF_565_PREMULTIPLY", "FtoF_PM_RGB_565_*",
"multiplyalpha_ftof_pm_rgb_565_*_ps.h"),
("RGBF_565_UNMULTIPLY", "FtoF_UM_RGB_565_*",
"multiplyalpha_ftof_um_rgb_565_*_ps.h"),
("RGBF_565_PREMULTIPLY", "FtoF_PM_RGB_565_*", "multiplyalpha_ftof_pm_rgb_565_*_ps.h"),
("RGBF_565_UNMULTIPLY", "FtoF_UM_RGB_565_*", "multiplyalpha_ftof_um_rgb_565_*_ps.h"),
("RGBAF_5551", "PassthroughRGBA*_5551", "passthroughrgba*_5551_11ps.h"),
("RGBAF_5551_PREMULTIPLY", "FtoF_PM_RGBA_5551_*",
"multiplyalpha_ftof_pm_rgba_5551_*_ps.h"),
("RGBAF_5551_UNMULTIPLY", "FtoF_UM_RGBA_5551_*",
"multiplyalpha_ftof_um_rgba_5551_*_ps.h")
("RGBAF_5551_PREMULTIPLY", "FtoF_PM_RGBA_5551_*", "multiplyalpha_ftof_pm_rgba_5551_*_ps.h"),
("RGBAF_5551_UNMULTIPLY", "FtoF_UM_RGBA_5551_*", "multiplyalpha_ftof_um_rgba_5551_*_ps.h")
]
......@@ -177,8 +158,7 @@ def format_shader_include(dimension, blitshader):
def format_get_blitshader_case(operation):
dimension_cases = []
for dimension in supported_dimensions:
dimension_cases.append(
format_get_blitshader_case_dimension(operation, dimension))
dimension_cases.append(format_get_blitshader_case_dimension(operation, dimension))
return template_get_blitshader_case.format(
get_blitshader_dimension_cases="\n".join([c for c in dimension_cases]),
......@@ -219,8 +199,7 @@ def format_map_blitshader_case(dimension, blitshader):
def format_shader_filename(dimension, blitshader):
return "shaders/compiled/" + blitshader[2].replace("*",
dimension.lower()) + ","
return "shaders/compiled/" + blitshader[2].replace("*", dimension.lower()) + ","
def get_shader_includes():
......@@ -279,8 +258,7 @@ def get_blitshadertype_enums():
# 2D float to int shaders have not been implemented
if dimension == "2D" and blitshader[0].find("TOI") != -1:
continue
blitshaders.append(" BLITSHADER_" + dimension.upper() + "_" +
blitshader[0] + ",")
blitshaders.append(" BLITSHADER_" + dimension.upper() + "_" + blitshader[0] + ",")
blitshaders.append(" BLITSHADER_INVALID")
return blitshaders
......@@ -299,14 +277,14 @@ def get_shader_filenames():
continue
if len(blitshader) == 3:
filenames.append(
(" \"src/libANGLE/renderer/d3d/d3d11/shaders/compiled/{0}\","
).format(blitshader[2].replace("*", dimension.lower())))
(" \"src/libANGLE/renderer/d3d/d3d11/shaders/compiled/{0}\",").format(
blitshader[2].replace("*", dimension.lower())))
return filenames
def write_inc_file(get_blitshaders_case_list, add_blitshader_case_list,
shader_includes, blitshaderop_enums, blitshadertype_enums):
def write_inc_file(get_blitshaders_case_list, add_blitshader_case_list, shader_includes,
blitshaderop_enums, blitshadertype_enums):
content = template_blitshader_source.format(
script_name=os.path.basename(sys.argv[0]),
year=date.today().year,
......@@ -367,11 +345,11 @@ def main():
shader_filenames = get_shader_filenames()
write_inc_file("\n".join([d for d in blitshadertype_cases]), "\n".join(
[c for c in map_blitshader_cases]), "\n".join([i for i in shader_includes]),
"\n".join([e for e in blitshaderop_enums]), "\n".join(
[e for e in blitshadertype_enums]))
[c for c in map_blitshader_cases]), "\n".join([i for i in shader_includes]), "\n".join(
[e for e in blitshaderop_enums]), "\n".join([e for e in blitshadertype_enums]))
write_gni_file("\n".join([s for s in shader_filenames]))
return 0
if __name__ == '__main__':
sys.exit(main())
......@@ -75,13 +75,13 @@ template_undefined_case = """ case DXGI_FORMAT_{dxgi_format}:
break;
"""
def format_case(dxgi_format, result):
return template_format_case.format(
dxgi_format = dxgi_format,
result = result)
return template_format_case.format(dxgi_format=dxgi_format, result=result)
def undefined_case(dxgi_format):
return template_undefined_case.format(dxgi_format = dxgi_format)
return template_undefined_case.format(dxgi_format=dxgi_format)
def main():
......@@ -146,11 +146,11 @@ def main():
with open('dxgi_format_map_autogen.cpp', 'wt') as out_file:
output_cpp = template_cpp.format(
script_name = sys.argv[0],
data_source_name = input_data,
copyright_year = date.today().year,
component_type_cases = component_cases,
format_cases = format_cases)
script_name=sys.argv[0],
data_source_name=input_data,
copyright_year=date.today().year,
component_type_cases=component_cases,
format_cases=format_cases)
out_file.write(output_cpp)
out_file.close()
return 0
......
......@@ -168,6 +168,7 @@ const DXGISupport &GetDXGISupport(DXGI_FORMAT dxgiFormat, D3D_FEATURE_LEVEL feat
}} // namespace rx
"""
def do_format(format_data):
table_data = {'9_3': '', '10_0': '', '10_1': '', '11_0': '', '11_1': ''}
......@@ -270,24 +271,31 @@ def do_format(format_data):
never = ' | '.join(sorted(never_supported))
optional = ' | '.join(sorted(optional_for_fl))
if not always: always = '0'
if not never: never = '0'
if not optional: optional = '0'
if not always:
always = '0'
if not never:
never = '0'
if not optional:
optional = '0'
table_data[feature_level] += ' case ' + format_name + ':\n'
table_data[feature_level] += ' {\n'
table_data[feature_level] += ' static const DXGISupport info(' + always + ', ' + never + ', ' + optional + ');\n'
table_data[
feature_level] += ' static const DXGISupport info(' + always + ', ' + never + ', ' + optional + ');\n'
table_data[feature_level] += ' return info;\n'
table_data[feature_level] += ' }\n'
return table_data
def join_table_data(table_data_1, table_data_2):
return {'9_3': table_data_1['9_3'] + table_data_2['9_3'],
return {
'9_3': table_data_1['9_3'] + table_data_2['9_3'],
'10_0': table_data_1['10_0'] + table_data_2['10_0'],
'10_1': table_data_1['10_1'] + table_data_2['10_1'],
'11_0': table_data_1['11_0'] + table_data_2['11_0'],
'11_1': table_data_1['11_1'] + table_data_2['11_1']}
'11_1': table_data_1['11_1'] + table_data_2['11_1']
}
def main():
......@@ -316,7 +324,8 @@ def main():
for format_data in json_data:
table_data = join_table_data(table_data, do_format(format_data))
out_data = template.format(prefix=macro_prefix,
out_data = template.format(
prefix=macro_prefix,
table_data_9_3=table_data['9_3'],
table_data_10_0=table_data['10_0'],
table_data_10_1=table_data['10_1'],
......
......@@ -69,6 +69,7 @@ const Format &Format::Get(GLenum internalFormat, const Renderer11DeviceCaps &dev
}} // namespace rx
"""
def get_swizzle_format_id(internal_format, angle_format):
angle_format_id = angle_format["formatName"]
if (internal_format == 'GL_NONE') or (angle_format_id == 'NONE'):
......@@ -80,16 +81,20 @@ def get_swizzle_format_id(internal_format, angle_format):
return angle_format['swizzleFormat']
if 'bits' not in angle_format:
raise ValueError('no bits information for determining swizzleformat for format: ' + internal_format)
raise ValueError('no bits information for determining swizzleformat for format: ' +
internal_format)
bits = angle_format['bits']
max_component_bits = max(bits.itervalues())
channels_different = not all([component_bits == bits.itervalues().next() for component_bits in bits.itervalues()])
channels_different = not all(
[component_bits == bits.itervalues().next() for component_bits in bits.itervalues()])
# The format itself can be used for swizzles if it can be accessed as a render target and
# sampled and the bit count for all 4 channels is the same.
if "rtvFormat" in angle_format and "srvFormat" in angle_format and "uavFormat" in angle_format and not channels_different and len(angle_format['channels']) == 4:
return angle_format["glInternalFormat"] if "glInternalFormat" in angle_format else internal_format
if "rtvFormat" in angle_format and "srvFormat" in angle_format and "uavFormat" in angle_format and not channels_different and len(
angle_format['channels']) == 4:
return angle_format[
"glInternalFormat"] if "glInternalFormat" in angle_format else internal_format
b = int(math.ceil(float(max_component_bits) / 8) * 8)
......@@ -102,10 +107,12 @@ def get_swizzle_format_id(internal_format, angle_format):
return 'GL_RGBA16_EXT'
if b == 24:
raise ValueError('unexpected 24-bit format when determining swizzleformat for format: ' + internal_format)
raise ValueError('unexpected 24-bit format when determining swizzleformat for format: ' +
internal_format)
if 'componentType' not in angle_format:
raise ValueError('no component type information for determining swizzleformat for format: ' + internal_format)
raise ValueError('no component type information for determining swizzleformat for format: '
+ internal_format)
component_type = angle_format['componentType']
......@@ -127,10 +134,12 @@ def get_swizzle_format_id(internal_format, angle_format):
if (b == 16):
swizzle += "_EXT"
else:
raise ValueError('could not determine swizzleformat based on componentType for format: ' + internal_format)
raise ValueError('could not determine swizzleformat based on componentType for format: ' +
internal_format)
return swizzle
def get_blit_srv_format(angle_format):
if 'channels' not in angle_format:
return 'DXGI_FORMAT_UNKNOWN'
......@@ -171,6 +180,7 @@ split_format_entry_template = """{space} {condition}
{space} }}
"""
def json_to_table_data(internal_format, format_name, prefix, json):
table_data = ""
......@@ -201,6 +211,7 @@ def json_to_table_data(internal_format, format_name, prefix, json):
else:
return format_entry_template.format(**parsed)
def parse_json_angle_format_case(format_name, angle_format, json_data):
supported_case = {}
unsupported_case = {}
......@@ -227,8 +238,8 @@ def parse_json_angle_format_case(format_name, angle_format, json_data):
unsupported_case[k] = v
if fallback != None:
unsupported_case, _, _ = parse_json_angle_format_case(
fallback, json_data[fallback], json_data)
unsupported_case, _, _ = parse_json_angle_format_case(fallback, json_data[fallback],
json_data)
unsupported_case["formatName"] = fallback
if support_test != None:
......@@ -236,6 +247,7 @@ def parse_json_angle_format_case(format_name, angle_format, json_data):
else:
return supported_case, None, None
def parse_json_into_switch_angle_format_string(json_map, json_data):
table_data = ''
......@@ -259,8 +271,10 @@ def parse_json_into_switch_angle_format_string(json_map, json_data):
if support_test != None:
table_data += " {\n"
table_data += json_to_table_data(internal_format, format_name, "if (" + support_test + ")", supported_case)
table_data += json_to_table_data(internal_format, format_name, "else", unsupported_case)
table_data += json_to_table_data(internal_format, format_name,
"if (" + support_test + ")", supported_case)
table_data += json_to_table_data(internal_format, format_name, "else",
unsupported_case)
table_data += " }\n"
else:
table_data += json_to_table_data(internal_format, format_name, "", supported_case)
......@@ -290,10 +304,10 @@ def main():
angle_format_cases = parse_json_into_switch_angle_format_string(json_map, json_data)
output_cpp = template_texture_format_table_autogen_cpp.format(
script_name = sys.argv[0],
copyright_year = date.today().year,
angle_format_info_cases = angle_format_cases,
data_source_name = data_source_name)
script_name=sys.argv[0],
copyright_year=date.today().year,
angle_format_info_cases=angle_format_cases,
data_source_name=data_source_name)
with open('texture_format_table_autogen.cpp', 'wt') as out_file:
out_file.write(output_cpp)
out_file.close()
......
......@@ -168,7 +168,7 @@ def get_color_read_function(angle_format):
return 'ReadDepthStencil<' + channel_struct + '>'
read_component_type = get_color_read_write_component_type(angle_format)
return 'ReadColor<' + channel_struct + ', '+ read_component_type + '>'
return 'ReadColor<' + channel_struct + ', ' + read_component_type + '>'
def get_color_write_function(angle_format):
......@@ -180,12 +180,13 @@ def get_color_write_function(angle_format):
return 'WriteDepthStencil<' + channel_struct + '>'
write_component_type = get_color_read_write_component_type(angle_format)
return 'WriteColor<' + channel_struct + ', '+ write_component_type + '>'
return 'WriteColor<' + channel_struct + ', ' + write_component_type + '>'
format_entry_template = """ {{ FormatID::{id}, {glInternalFormat}, {fboImplementationInternalFormat}, {mipGenerationFunction}, {fastCopyFunctions}, {colorReadFunction}, {colorWriteFunction}, {namedComponentType}, {R}, {G}, {B}, {A}, {L}, {D}, {S}, {pixelBytes}, {componentAlignmentMask}, {isBlock}, {isFixed} }},
"""
def get_named_component_type(component_type):
if component_type == "snorm":
return "GL_SIGNED_NORMALIZED"
......@@ -283,8 +284,8 @@ def json_to_table_data(format_id, json, angle_to_gl):
sum_of_bits += int(parsed[channel])
pixel_bytes = sum_of_bits / 8
parsed["pixelBytes"] = pixel_bytes
parsed["componentAlignmentMask"] = get_component_alignment_mask(
parsed["channels"], parsed["bits"])
parsed["componentAlignmentMask"] = get_component_alignment_mask(parsed["channels"],
parsed["bits"])
parsed["isBlock"] = "true" if is_block else "false"
parsed["isFixed"] = "true" if "FIXED" in format_id else "false"
......@@ -309,21 +310,20 @@ def gen_enum_string(all_angle):
enum_data += ',\n ' + format_id
return enum_data
case_template = """ case {gl_format}:
return FormatID::{angle_format};
"""
def gen_map_switch_string(gl_to_angle):
switch_data = '';
switch_data = ''
for gl_format in sorted(gl_to_angle.keys()):
angle_format = gl_to_angle[gl_format]
switch_data += case_template.format(
gl_format=gl_format,
angle_format=angle_format)
switch_data += case_template.format(gl_format=gl_format, angle_format=angle_format)
switch_data += " default:\n"
switch_data += " return FormatID::NONE;"
return switch_data;
return switch_data
def main():
......@@ -348,15 +348,14 @@ def main():
json_data = angle_format.load_json(data_source_name)
all_angle = angle_to_gl.keys()
angle_format_cases = parse_angle_format_table(
all_angle, json_data, angle_to_gl)
angle_format_cases = parse_angle_format_table(all_angle, json_data, angle_to_gl)
switch_data = gen_map_switch_string(gl_to_angle)
output_cpp = template_autogen_inl.format(
script_name = sys.argv[0],
copyright_year = date.today().year,
angle_format_info_cases = angle_format_cases,
angle_format_switch = switch_data,
data_source_name = data_source_name)
script_name=sys.argv[0],
copyright_year=date.today().year,
angle_format_info_cases=angle_format_cases,
angle_format_switch=switch_data,
data_source_name=data_source_name)
with open('Format_table_autogen.cpp', 'wt') as out_file:
out_file.write(output_cpp)
out_file.close()
......@@ -364,11 +363,11 @@ def main():
enum_data = gen_enum_string(all_angle)
num_angle_formats = len(all_angle)
output_h = template_autogen_h.format(
script_name = sys.argv[0],
copyright_year = date.today().year,
angle_format_enum = enum_data,
data_source_name = data_source_name,
num_angle_formats = num_angle_formats)
script_name=sys.argv[0],
copyright_year=date.today().year,
angle_format_enum=enum_data,
data_source_name=data_source_name,
num_angle_formats=num_angle_formats)
with open('FormatID_autogen.h', 'wt') as out_file:
out_file.write(output_h)
out_file.close()
......
......@@ -99,12 +99,15 @@ internal_format_param = 'internalFormat'
angle_format_param = 'angleFormat'
angle_format_unknown = 'NONE'
def load_functions_name(internal_format, angle_format):
return internal_format[3:] + "_to_" + angle_format
def unknown_func_name(internal_format):
return load_functions_name(internal_format, "default")
def get_load_func(func_name, type_functions):
snippet = "LoadImageFunctionInfo " + func_name + "(GLenum type)\n"
snippet += "{\n"
......@@ -123,9 +126,12 @@ def get_load_func(func_name, type_functions):
return snippet
def get_unknown_load_func(angle_to_type_map, internal_format):
assert angle_format_unknown in angle_to_type_map
return get_load_func(unknown_func_name(internal_format), angle_to_type_map[angle_format_unknown])
return get_load_func(
unknown_func_name(internal_format), angle_to_type_map[angle_format_unknown])
def parse_json(json_data):
table_data = ''
......@@ -136,7 +142,8 @@ def parse_json(json_data):
table_data += s + 'case ' + internal_format + ':\n'
do_switch = len(angle_to_type_map) > 1 or angle_to_type_map.keys()[0] != angle_format_unknown
do_switch = len(
angle_to_type_map) > 1 or angle_to_type_map.keys()[0] != angle_format_unknown
if do_switch:
table_data += s + '{\n'
......@@ -186,6 +193,7 @@ def parse_json(json_data):
return table_data, load_functions_data
def main():
# auto_script parameters.
......@@ -205,16 +213,18 @@ def main():
json_data = angle_format.load_json('load_functions_data.json')
switch_data, load_functions_data = parse_json(json_data)
output = template.format(internal_format = internal_format_param,
angle_format = angle_format_param,
switch_data = switch_data,
load_functions_data = load_functions_data,
copyright_year = date.today().year)
output = template.format(
internal_format=internal_format_param,
angle_format=angle_format_param,
switch_data=switch_data,
load_functions_data=load_functions_data,
copyright_year=date.today().year)
with open('load_functions_table_autogen.cpp', 'wt') as out_file:
out_file.write(output)
out_file.close()
return 0
if __name__ == '__main__':
sys.exit(main())
......@@ -19,11 +19,13 @@ os.chdir(os.path.dirname(os.path.abspath(sys.argv[0])))
sys.path.append('..')
import angle_format
def safe_append(the_dict, key, element):
if key not in the_dict:
the_dict[key] = []
the_dict[key].append(element)
# Template for the header declaration of the dispatch table.
dispatch_table_header_template = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name} and gl.xml.
......@@ -79,12 +81,15 @@ class DispatchTableGL : angle::NonCopyable
#endif // LIBGLESV2_RENDERER_GL_DISPATCH_TABLE_GL_AUTOGEN_H_
"""
def first_lower(str):
return str[:1].lower() + str[1:]
def format_ep_decl(entry_point):
return " PFNGL" + entry_point.upper() + "PROC " + first_lower(entry_point) + " = nullptr;"
# Template for the initialization file of the dispatch table.
dispatch_table_source_template = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name} and gl.xml.
......@@ -155,9 +160,11 @@ void DispatchTableGL::initProcsSharedExtensionsNULL(const std::set<std::string>
}} // namespace rx
"""
def format_assign_ep(entry_point, ep):
return ' ASSIGN("' + ep + '", ' + first_lower(entry_point[2:]) + ');'
def format_requirements_lines(required, entry_points):
major, minor = required
lines = [' if (version >= gl::Version(' + major + ', ' + minor + '))', ' {']
......@@ -165,12 +172,14 @@ def format_requirements_lines(required, entry_points):
lines += [' }']
return '\n'.join(lines)
def format_extension_requirements_lines(extension, entry_points, api):
lines = [' if (extensions.count("' + extension + '") != 0)', ' {']
lines += [format_assign_ep(entry_point, ep) for entry_point, ep in sorted(entry_points)]
lines += [' }']
return '\n'.join(lines)
def assign_null_line(line):
m = re.match(r' ASSIGN\("gl.*", (.+)\);', line)
if m:
......@@ -179,15 +188,19 @@ def assign_null_line(line):
else:
return line
def assign_null(entry):
return '\n'.join([assign_null_line(line) for line in entry.split('\n')])
def nullify(data):
return [assign_null(entry) for entry in data]
def format_param(param):
return "".join(param.itertext())
null_functions_header_template = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name} and gl.xml.
//
......@@ -229,6 +242,7 @@ namespace rx
}} // namespace rx
"""
def main():
# auto_script parameters.
......@@ -269,7 +283,7 @@ def main():
core_removed_eps = []
for core_removed_ep in xml_root.findall('feature/remove'):
assert(core_removed_ep.attrib['profile'] == 'core')
assert (core_removed_ep.attrib['profile'] == 'core')
for command in core_removed_ep.findall('./command'):
core_removed_eps.append(command.attrib['name'])
......@@ -333,15 +347,18 @@ def main():
if not gl_required:
gl_required = reqs
elif entry_point in core_removed_eps:
print('Upgrade ' + entry_point + ' to ' + str(reqs) + ' instead of ' + str(gl_required))
print('Upgrade ' + entry_point + ' to ' + str(reqs) + ' instead of ' +
str(gl_required))
gl_required = reqs
else:
print('Keep ' + entry_point + ' at ' + str(gl_required) + ' instead of ' + str(reqs))
print('Keep ' + entry_point + ' at ' + str(gl_required) +
' instead of ' + str(reqs))
elif api == 'gles2':
if not gles2_required:
gles2_required = reqs
else:
print("Duplicate for " + entry_point + ": " + str(reqs) + " and " + str(gles2_required))
print("Duplicate for " + entry_point + ": " + str(reqs) + " and " +
str(gles2_required))
else:
raise Exception('Bad api type: ' + api)
......@@ -373,7 +390,8 @@ def main():
full_ep = ep
if '_KHR_' in extension:
full_ep += 'KHR'
safe_append(gles2_extension_requirements, extension, (entry_point, full_ep))
safe_append(gles2_extension_requirements, extension,
(entry_point, full_ep))
if not (gl_required or gles2_required or extension):
raise Exception('Entry point ' + entry_point + ' not found in the xml.')
......@@ -386,11 +404,11 @@ def main():
table_data.append("\n".join(formatted))
dispatch_table_header = dispatch_table_header_template.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = data_source_name,
year = date.today().year,
file_name = dispatch_header_path,
table_data = "\n\n".join(table_data))
script_name=os.path.basename(sys.argv[0]),
data_source_name=data_source_name,
year=date.today().year,
file_name=dispatch_header_path,
table_data="\n\n".join(table_data))
with open(dispatch_header_path, "w") as out:
out.write(dispatch_table_header)
......@@ -401,7 +419,8 @@ def main():
gl_extensions_data = []
for extension, entry_points in sorted(gl_extension_requirements.iteritems()):
gl_extensions_data.append(format_extension_requirements_lines(extension, entry_points, "gl"))
gl_extensions_data.append(
format_extension_requirements_lines(extension, entry_points, "gl"))
gles2_data = []
for gles2_required, entry_points in sorted(gles2_requirements.iteritems()):
......@@ -409,27 +428,29 @@ def main():
gles2_extensions_data = []
for extension, entry_points in sorted(gles2_extension_requirements.iteritems()):
gles2_extensions_data.append(format_extension_requirements_lines(extension, entry_points, "gles2"))
gles2_extensions_data.append(
format_extension_requirements_lines(extension, entry_points, "gles2"))
both_extensions_data = []
for extension, entry_points in sorted(both_extension_requirements.iteritems()):
both_extensions_data.append(format_extension_requirements_lines(extension, entry_points, "gles2|gl"))
both_extensions_data.append(
format_extension_requirements_lines(extension, entry_points, "gles2|gl"))
dispatch_table_source = dispatch_table_source_template.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = data_source_name,
year = date.today().year,
file_name = dispatch_source_path,
gl_data = "\n\n".join(gl_data),
gl_extensions_data = "\n\n".join(gl_extensions_data),
gles2_data = "\n\n".join(gles2_data),
gles2_extensions_data = "\n\n".join(gles2_extensions_data),
both_extensions_data = "\n\n".join(both_extensions_data),
gl_null_data = "\n\n".join(nullify(gl_data)),
gl_null_extensions_data = "\n\n".join(nullify(gl_extensions_data)),
gles2_null_data = "\n\n".join(nullify(gles2_data)),
gles2_null_extensions_data = "\n\n".join(nullify(gles2_extensions_data)),
both_null_extensions_data = "\n\n".join(nullify(both_extensions_data)))
script_name=os.path.basename(sys.argv[0]),
data_source_name=data_source_name,
year=date.today().year,
file_name=dispatch_source_path,
gl_data="\n\n".join(gl_data),
gl_extensions_data="\n\n".join(gl_extensions_data),
gles2_data="\n\n".join(gles2_data),
gles2_extensions_data="\n\n".join(gles2_extensions_data),
both_extensions_data="\n\n".join(both_extensions_data),
gl_null_data="\n\n".join(nullify(gl_data)),
gl_null_extensions_data="\n\n".join(nullify(gl_extensions_data)),
gles2_null_data="\n\n".join(nullify(gles2_data)),
gles2_null_extensions_data="\n\n".join(nullify(gles2_extensions_data)),
both_null_extensions_data="\n\n".join(nullify(both_extensions_data)))
with open(dispatch_source_path, "w") as out:
out.write(dispatch_table_source)
......@@ -463,21 +484,21 @@ def main():
null_stubs = [command_defs[entry_point] for entry_point in sorted(all_entry_points)]
null_functions_header = null_functions_header_template.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = data_source_name,
year = date.today().year,
file_name = null_functions_header_path,
table_data = "\n".join(null_decls))
script_name=os.path.basename(sys.argv[0]),
data_source_name=data_source_name,
year=date.today().year,
file_name=null_functions_header_path,
table_data="\n".join(null_decls))
with open(null_functions_header_path, "w") as out:
out.write(null_functions_header)
null_functions_source = null_functions_source_template.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = data_source_name,
year = date.today().year,
file_name = null_functions_source_path,
table_data = "\n\n".join(null_stubs))
script_name=os.path.basename(sys.argv[0]),
data_source_name=data_source_name,
year=date.today().year,
file_name=null_functions_source_path,
table_data="\n\n".join(null_stubs))
with open(null_functions_source_path, "w") as out:
out.write(null_functions_source)
......
......@@ -75,7 +75,7 @@ image_basic_template = """imageFormatID = {image};
vkImageFormat = {vk_image_format};
imageInitializerFunction = {image_initializer};"""
image_struct_template="{{{image}, {vk_image_format}, {image_initializer}}}"
image_struct_template = "{{{image}, {vk_image_format}, {image_initializer}}}"
image_fallback_template = """{{
static constexpr ImageFormatInitInfo kInfo[] = {{{image_list}}};
......@@ -88,7 +88,7 @@ vkBufferFormatIsPacked = {vk_buffer_format_is_packed};
vertexLoadFunction = {vertex_load_function};
vertexLoadRequiresConversion = {vertex_load_converts};"""
buffer_struct_template="""{{{buffer}, {vk_buffer_format}, {vk_buffer_format_is_packed},
buffer_struct_template = """{{{buffer}, {vk_buffer_format}, {vk_buffer_format_is_packed},
{vertex_load_function}, {vertex_load_converts}}}"""
buffer_fallback_template = """{{
......@@ -106,10 +106,7 @@ def gen_format_case(angle, internal_format, vk_json_data):
vk_overrides = vk_json_data["overrides"]
vk_fallbacks = vk_json_data["fallbacks"]
args = dict(
format_id=angle,
internal_format=internal_format,
image_template="",
buffer_template="")
format_id=angle, internal_format=internal_format, image_template="", buffer_template="")
if ((angle not in vk_map) and (angle not in vk_overrides) and
(angle not in vk_fallbacks)) or angle == 'NONE':
......@@ -136,8 +133,7 @@ def gen_format_case(angle, internal_format, vk_json_data):
buffer="angle::FormatID::" + format,
vk_buffer_format=vk_map[format],
vk_buffer_format_is_packed=is_packed(vk_map[format]),
vertex_load_function=angle_format.get_vertex_copy_function(
angle, format),
vertex_load_function=angle_format.get_vertex_copy_function(angle, format),
vertex_load_converts='false' if angle == format else 'true',
)
......@@ -148,9 +144,7 @@ def gen_format_case(angle, internal_format, vk_json_data):
elif len(images) > 1:
args.update(
image_template=image_fallback_template,
image_list=", ".join(
image_struct_template.format(**image_args(i))
for i in images))
image_list=", ".join(image_struct_template.format(**image_args(i)) for i in images))
buffers = get_formats(angle, "buffer")
if len(buffers) == 1:
......@@ -172,11 +166,7 @@ def main():
# auto_script parameters.
if len(sys.argv) > 1:
inputs = [
'../angle_format.py',
'../angle_format_map.json',
input_file_name
]
inputs = ['../angle_format.py', '../angle_format_map.json', input_file_name]
outputs = [out_file_name]
if sys.argv[1] == 'inputs':
......@@ -190,15 +180,16 @@ def main():
angle_to_gl = angle_format.load_inverse_table(os.path.join('..', 'angle_format_map.json'))
vk_json_data = angle_format.load_json(input_file_name)
vk_cases = [gen_format_case(angle, gl, vk_json_data)
for angle, gl in sorted(angle_to_gl.iteritems())]
vk_cases = [
gen_format_case(angle, gl, vk_json_data) for angle, gl in sorted(angle_to_gl.iteritems())
]
output_cpp = template_table_autogen_cpp.format(
copyright_year = date.today().year,
format_case_data = "\n".join(vk_cases),
script_name = __file__,
out_file_name = out_file_name,
input_file_name = input_file_name)
copyright_year=date.today().year,
format_case_data="\n".join(vk_cases),
script_name=__file__,
out_file_name=out_file_name,
input_file_name=input_file_name)
with open(out_file_name, 'wt') as out_file:
out_file.write(output_cpp)
......
......@@ -157,34 +157,42 @@ angle_vulkan_internal_shaders = [
]
"""
# Gets the constant variable name for a generated shader.
def get_var_name(output, prefix='k'):
return prefix + output.replace(".", "_")
# Gets the namespace name given to constants generated from shader_file
def get_namespace_name(shader_file):
return get_var_name(os.path.basename(shader_file), '')
# Gets the namespace name given to constants generated from shader_file
def get_variation_table_name(shader_file, prefix='k'):
return get_var_name(os.path.basename(shader_file), prefix) + '_shaders'
# Gets the internal ID string for a particular shader.
def get_shader_id(shader):
file = os.path.splitext(os.path.basename(shader))[0]
return file.replace(".", "_")
# Returns the name of the generated SPIR-V file for a shader.
def get_output_path(name):
return os.path.join('shaders', 'gen', name + ".inc")
# Finds a path to GN's out directory
def get_linux_glslang_exe_path():
return '../../../../tools/glslang/glslang_validator'
def get_win_glslang_exe_path():
return get_linux_glslang_exe_path() + '.exe'
def get_glslang_exe_path():
glslang_exe = get_win_glslang_exe_path() if is_windows else get_linux_glslang_exe_path()
if not os.path.isfile(glslang_exe):
......@@ -197,12 +205,15 @@ def gen_shader_blob_entry(shader):
var_name = get_var_name(os.path.basename(shader))[0:-4]
return "{%s, %s}" % (var_name, "sizeof(%s)" % var_name)
def slash(s):
return s.replace('\\', '/')
def gen_shader_include(shader):
return '#include "libANGLE/renderer/vulkan/%s"' % slash(shader)
def get_shader_variations(shader):
variation_file = shader + '.json'
if not os.path.exists(variation_file):
......@@ -227,11 +238,13 @@ def get_shader_variations(shader):
return (flags, enums)
def get_variation_bits(flags, enums):
flags_bits = len(flags)
enum_bits = [(len(enum[1]) - 1).bit_length() for enum in enums]
return (flags_bits, enum_bits)
def next_enum_variation(enums, enum_indices):
"""Loop through indices from [0, 0, ...] to [L0-1, L1-1, ...]
where Li is len(enums[i]). The list can be thought of as a number with many
......@@ -242,24 +255,29 @@ def next_enum_variation(enums, enum_indices):
# if current digit has room, increment it.
if current + 1 < len(enums[i][1]):
enum_indices[i] = current + 1
return True;
return True
# otherwise reset it to 0 and carry to the next digit.
enum_indices[i] = 0
# if this is reached, the number has overflowed and the loop is finished.
return False
compact_newlines_regex = re.compile(r"\n\s*\n", re.MULTILINE)
def cleanup_preprocessed_shader(shader_text):
return compact_newlines_regex.sub('\n\n', shader_text.strip())
class CompileQueue:
class AppendPreprocessorOutput:
def __init__(self, shader_file, preprocessor_args, output_path):
# Asynchronously launch the preprocessor job.
self.process = subprocess.Popen(preprocessor_args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.process = subprocess.Popen(
preprocessor_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Store the file name for output to be appended to.
self.output_path = output_path
# Store info for error description.
......@@ -278,12 +296,12 @@ class CompileQueue:
"Error running preprocessor on " + self.shader_file)
class CompileToSPIRV:
def __init__(self, shader_file, shader_basename, variation_string, output_path,
compile_args, preprocessor_args):
# Asynchronously launch the compile job.
self.process = subprocess.Popen(compile_args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.process = subprocess.Popen(
compile_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Store info for launching the preprocessor.
self.preprocessor_args = preprocessor_args
self.output_path = output_path
......@@ -296,8 +314,8 @@ class CompileQueue:
(out, err) = self.process.communicate()
if self.process.returncode == 0:
# Insert the preprocessor job in the queue.
queue.append(CompileQueue.AppendPreprocessorOutput(self.shader_file,
self.preprocessor_args,
queue.append(
CompileQueue.AppendPreprocessorOutput(self.shader_file, self.preprocessor_args,
self.output_path))
# If all the output says is the source file name, don't bother printing it.
if out.strip() == self.shader_file:
......@@ -341,8 +359,8 @@ class CompileQueue:
return exception_description
def add_job(self, shader_file, shader_basename, variation_string, output_path,
compile_args, preprocessor_args):
def add_job(self, shader_file, shader_basename, variation_string, output_path, compile_args,
preprocessor_args):
# If the queue is full, wait until there is at least one slot available.
while len(self.queue) >= self.thread_count:
exception = self._wait_first(False)
......@@ -352,9 +370,9 @@ class CompileQueue:
raise Exception(exception)
# Add a compile job
self.queue.append(CompileQueue.CompileToSPIRV(shader_file, shader_basename,
variation_string, output_path,
compile_args, preprocessor_args))
self.queue.append(
CompileQueue.CompileToSPIRV(shader_file, shader_basename, variation_string,
output_path, compile_args, preprocessor_args))
def finish(self):
exception = self._wait_all(False)
......@@ -362,6 +380,7 @@ class CompileQueue:
if exception is not None:
raise Exception(exception)
def compile_variation(glslang_path, compile_queue, shader_file, shader_basename, flags, enums,
flags_active, enum_indices, flags_bits, enum_bits, output_shaders):
......@@ -408,7 +427,9 @@ def compile_variation(glslang_path, compile_queue, shader_file, shader_basename,
compile_queue.add_job(shader_file, shader_basename, variation_string, output_path,
glslang_args, glslang_preprocessor_output_args)
class ShaderAndVariations:
def __init__(self, shader_file):
self.shader_file = shader_file
(self.flags, self.enums) = get_shader_variations(shader_file)
......@@ -438,15 +459,18 @@ def get_variation_definition(shader_and_variation):
enum = enums[e]
enum_name = enum[0]
definition += 'enum %s\n{\n' % enum_name
definition += ''.join(['k%s = 0x%08X,\n' %
(enum[1][v], v << current_bit_start) for v in range(len(enum[1]))])
definition += 'k%sMask = 0x%08X,\n' % (enum_name, ((1 << enum_bits[e]) - 1) << current_bit_start)
definition += ''.join([
'k%s = 0x%08X,\n' % (enum[1][v], v << current_bit_start) for v in range(len(enum[1]))
])
definition += 'k%sMask = 0x%08X,\n' % (enum_name,
((1 << enum_bits[e]) - 1) << current_bit_start)
definition += '};\n'
current_bit_start += enum_bits[e]
definition += '} // namespace %s\n' % namespace_name
return definition
def get_shader_table_h(shader_and_variation):
shader_file = shader_and_variation.shader_file
flags = shader_and_variation.flags
......@@ -477,6 +501,7 @@ def get_shader_table_h(shader_and_variation):
table += '];'
return table
def get_shader_table_cpp(shader_and_variation):
shader_file = shader_and_variation.shader_file
enums = shader_and_variation.enums
......@@ -500,7 +525,7 @@ def get_shader_table_cpp(shader_and_variation):
table = 'constexpr ShaderBlob %s[] = {\n' % table_name
# The last possible variation is every flag enabled and every enum at max
last_variation = ((1 << flags_bits) - 1) | reduce(lambda x, y: x|y, enum_maxes, 0)
last_variation = ((1 << flags_bits) - 1) | reduce(lambda x, y: x | y, enum_maxes, 0)
for variation in range(last_variation + 1):
# if any variation is invalid, output an empty entry
......@@ -513,6 +538,7 @@ def get_shader_table_cpp(shader_and_variation):
table += '};'
return table
def get_get_function_h(shader_and_variation):
shader_file = shader_and_variation.shader_file
......@@ -523,6 +549,7 @@ def get_get_function_h(shader_and_variation):
return definition
def get_get_function_cpp(shader_and_variation):
shader_file = shader_and_variation.shader_file
enums = shader_and_variation.enums
......@@ -539,6 +566,7 @@ def get_get_function_cpp(shader_and_variation):
return definition
def get_destroy_call(shader_and_variation):
shader_file = shader_and_variation.shader_file
......@@ -564,9 +592,11 @@ def main():
shader_files_to_compile = [f for f in shader_files_to_compile if f.find(sys.argv[1]) != -1]
valid_extensions = ['.vert', '.frag', '.comp']
input_shaders = sorted([os.path.join(shaders_dir, shader)
input_shaders = sorted([
os.path.join(shaders_dir, shader)
for shader in os.listdir(shaders_dir)
if any([os.path.splitext(shader)[1] == ext for ext in valid_extensions])])
if any([os.path.splitext(shader)[1] == ext for ext in valid_extensions])
])
if print_inputs:
glslang_binaries = [get_linux_glslang_exe_path(), get_win_glslang_exe_path()]
glslang_binary_hashes = [path + '.sha1' for path in glslang_binaries]
......@@ -582,7 +612,9 @@ def main():
output_shaders = []
input_shaders_and_variations = [ShaderAndVariations(shader_file) for shader_file in input_shaders]
input_shaders_and_variations = [
ShaderAndVariations(shader_file) for shader_file in input_shaders
]
compile_queue = CompileQueue()
......@@ -605,7 +637,8 @@ def main():
# with values in [0, 2^len(flags))
for flags_active in range(1 << len(flags)):
compile_variation(glslang_path, compile_queue, shader_file, output_name, flags,
enums, flags_active, enum_indices, flags_bits, enum_bits, output_shaders)
enums, flags_active, enum_indices, flags_bits, enum_bits,
output_shaders)
if not next_enum_variation(enums, enum_indices):
break
......@@ -622,51 +655,50 @@ def main():
# STEP 2: Consolidate the .inc files into an auto-generated cpp/h library.
with open(out_file_cpp, 'w') as outfile:
includes = "\n".join([gen_shader_include(shader) for shader in output_shaders])
shader_tables_cpp = '\n'.join([get_shader_table_cpp(s)
for s in input_shaders_and_variations])
shader_destroy_calls = '\n'.join([get_destroy_call(s)
for s in input_shaders_and_variations])
shader_get_functions_cpp = '\n'.join([get_get_function_cpp(s)
for s in input_shaders_and_variations])
shader_tables_cpp = '\n'.join(
[get_shader_table_cpp(s) for s in input_shaders_and_variations])
shader_destroy_calls = '\n'.join(
[get_destroy_call(s) for s in input_shaders_and_variations])
shader_get_functions_cpp = '\n'.join(
[get_get_function_cpp(s) for s in input_shaders_and_variations])
outcode = template_shader_library_cpp.format(
script_name = __file__,
copyright_year = date.today().year,
out_file_name = out_file_cpp,
input_file_name = 'shaders/src/*',
internal_shader_includes = includes,
shader_tables_cpp = shader_tables_cpp,
shader_destroy_calls = shader_destroy_calls,
shader_get_functions_cpp = shader_get_functions_cpp)
script_name=__file__,
copyright_year=date.today().year,
out_file_name=out_file_cpp,
input_file_name='shaders/src/*',
internal_shader_includes=includes,
shader_tables_cpp=shader_tables_cpp,
shader_destroy_calls=shader_destroy_calls,
shader_get_functions_cpp=shader_get_functions_cpp)
outfile.write(outcode)
outfile.close()
with open(out_file_h, 'w') as outfile:
shader_variation_definitions = '\n'.join([get_variation_definition(s)
for s in input_shaders_and_variations])
shader_get_functions_h = '\n'.join([get_get_function_h(s)
for s in input_shaders_and_variations])
shader_tables_h = '\n'.join([get_shader_table_h(s)
for s in input_shaders_and_variations])
shader_variation_definitions = '\n'.join(
[get_variation_definition(s) for s in input_shaders_and_variations])
shader_get_functions_h = '\n'.join(
[get_get_function_h(s) for s in input_shaders_and_variations])
shader_tables_h = '\n'.join([get_shader_table_h(s) for s in input_shaders_and_variations])
outcode = template_shader_library_h.format(
script_name = __file__,
copyright_year = date.today().year,
out_file_name = out_file_h,
input_file_name = 'shaders/src/*',
shader_variation_definitions = shader_variation_definitions,
shader_get_functions_h = shader_get_functions_h,
shader_tables_h = shader_tables_h)
script_name=__file__,
copyright_year=date.today().year,
out_file_name=out_file_h,
input_file_name='shaders/src/*',
shader_variation_definitions=shader_variation_definitions,
shader_get_functions_h=shader_get_functions_h,
shader_tables_h=shader_tables_h)
outfile.write(outcode)
outfile.close()
# STEP 3: Create a gni file with the generated files.
with io.open(out_file_gni, 'w', newline='\n') as outfile:
outcode = template_shader_includes_gni.format(
script_name = __file__,
copyright_year = date.today().year,
out_file_name = out_file_gni,
input_file_name = 'shaders/src/*',
shaders_list = ',\n'.join([' "' + slash(shader) + '"' for shader in output_shaders]))
script_name=__file__,
copyright_year=date.today().year,
out_file_name=out_file_gni,
input_file_name='shaders/src/*',
shaders_list=',\n'.join([' "' + slash(shader) + '"' for shader in output_shaders]))
outfile.write(outcode)
outfile.close()
......
......@@ -15,7 +15,6 @@ import angle_format
import xml.etree.ElementTree as etree
import sys, os
template_table_autogen_cpp = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {input_file_name} and
// the vk.xml file situated at
......@@ -81,9 +80,9 @@ def gen_format_case(index, vk_to_index_to_format_map, vk_map):
buffer_features_str = "0"
return template_format_property.format(
vk_format = vk_format,
optimal_features = optimal_features_str,
buffer_features = buffer_features_str)
vk_format=vk_format,
optimal_features=optimal_features_str,
buffer_features=buffer_features_str)
def main():
......@@ -121,15 +120,18 @@ def main():
vk_format_name_to_index_map[index] = vk_format
vk_map = angle_format.load_json(input_file_name)
vk_cases = [gen_format_case(index, vk_format_name_to_index_map, vk_map) for index in vk_format_name_to_index_map]
vk_cases = [
gen_format_case(index, vk_format_name_to_index_map, vk_map)
for index in vk_format_name_to_index_map
]
output_cpp = template_table_autogen_cpp.format(
copyright_year = date.today().year,
num_formats = num_formats,
format_case_data = "\n,".join(vk_cases),
script_name = __file__,
out_file_name = out_file_name,
input_file_name = input_file_name)
copyright_year=date.today().year,
num_formats=num_formats,
format_case_data="\n,".join(vk_cases),
script_name=__file__,
out_file_name=out_file_name,
input_file_name=input_file_name)
with open(out_file_name, 'wt') as out_file:
out_file.write(output_cpp)
......
......@@ -56,6 +56,7 @@ size_t g_numProcs = {num_procs};
sys.path.append('../libANGLE/renderer')
import angle_format
def main():
# auto_script parameters.
......@@ -82,21 +83,23 @@ def main():
all_functions[function] = "gl::" + function[2:]
# Special handling for EGL_ANGLE_explicit_context extension
if support_egl_ANGLE_explicit_context:
all_functions[function + "ContextANGLE"] = "gl::" + function[2:] + "ContextANGLE"
all_functions[function +
"ContextANGLE"] = "gl::" + function[2:] + "ContextANGLE"
elif function.startswith("egl"):
all_functions[function] = "EGL_" + function[3:]
else:
all_functions[function] = function
proc_data = [(' {"%s", P(%s)}' % (func, angle_func)) for func, angle_func in sorted(all_functions.iteritems())]
proc_data = [(' {"%s", P(%s)}' % (func, angle_func))
for func, angle_func in sorted(all_functions.iteritems())]
with open(out_file_name, 'w') as out_file:
output_cpp = template_cpp.format(
script_name = sys.argv[0],
data_source_name = data_source_name,
copyright_year = date.today().year,
proc_data = ",\n".join(proc_data),
num_procs = len(proc_data))
script_name=sys.argv[0],
data_source_name=data_source_name,
copyright_year=date.today().year,
proc_data=",\n".join(proc_data),
num_procs=len(proc_data))
out_file.write(output_cpp)
out_file.close()
return 0
......
......@@ -2,6 +2,7 @@ import os
import re
import sys
def ReadFileAsLines(filename):
"""Reads a file, removing blank lines and lines that start with #"""
file = open(filename, "r")
......@@ -14,17 +15,20 @@ def ReadFileAsLines(filename):
lines.append(line)
return lines
def GetSuiteName(testName):
return testName[:testName.find("/")]
def GetTestName(testName):
replacements = { ".test": "", ".": "_" }
replacements = {".test": "", ".": "_"}
splitTestName = testName.split("/")
cleanName = splitTestName[-2] + "_" + splitTestName[-1]
for replaceKey in replacements:
cleanName = cleanName.replace(replaceKey, replacements[replaceKey])
return cleanName
def GenerateTests(outFile, testNames):
# Remove duplicate tests
testNames = list(set(testNames))
......@@ -43,8 +47,9 @@ def GenerateTests(outFile, testNames):
outFile.write(" run(\"" + test + "\");\n")
outFile.write("}\n\n")
def GenerateTestList(sourceFile, rootDir):
tests = [ ]
tests = []
fileName, fileExtension = os.path.splitext(sourceFile)
if fileExtension == ".run":
lines = ReadFileAsLines(sourceFile)
......@@ -52,7 +57,8 @@ def GenerateTestList(sourceFile, rootDir):
tests += GenerateTestList(os.path.join(os.path.dirname(sourceFile), line), rootDir)
elif fileExtension == ".test":
tests.append(os.path.relpath(os.path.realpath(sourceFile), rootDir).replace("\\", "/"))
return tests;
return tests
def main(argv):
tests = GenerateTestList(argv[0], argv[1])
......@@ -64,5 +70,6 @@ def main(argv):
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
......@@ -18,12 +18,10 @@ import shutil
import subprocess
import sys
gn_args = """is_clang = true
is_debug = false
angle_enable_vulkan = true"""
is_windows = platform.system() == 'Windows'
is_linux = platform.system() == 'Linux'
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment