Commit d7d42395 by Geoff Lang Committed by Commit Bot

Format all of ANGLE's python code.

BUG=angleproject:3421 Change-Id: I1d7282ac513c046de5d8ed87f7789290780d30a6 Reviewed-on: https://chromium-review.googlesource.com/c/angle/angle/+/1595440Reviewed-by: 's avatarJamie Madill <jmadill@chromium.org> Commit-Queue: Geoff Lang <geofflang@chromium.org>
parent 8ba78da0
[style]
based_on_style = chromium
column_limit = 99
indent_width = 4
# Copyright 2019 The ANGLE Project Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for code generation.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
......@@ -10,11 +9,9 @@ for more details on the presubmit API built into depot_tools.
from subprocess import call
# Fragment of a regular expression that matches C++ and Objective-C++ implementation files.
_IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
# Fragment of a regular expression that matches C++ and Objective-C++ header files.
_HEADER_EXTENSIONS = r'\.(h|hpp|hxx)$'
......@@ -23,8 +20,10 @@ def _CheckCodeGeneration(input_api, output_api):
class Msg(output_api.PresubmitError):
"""Specialized error message"""
def __init__(self, message):
super(output_api.PresubmitError, self).__init__(message,
super(output_api.PresubmitError, self).__init__(
message,
long_text='Please ensure your ANGLE repositiory is synced to tip-of-tree\n'
'and you have an up-to-date checkout of all ANGLE dependencies.\n'
'If you are using ANGLE inside Chromium you may need to bootstrap ANGLE \n'
......@@ -34,11 +33,7 @@ def _CheckCodeGeneration(input_api, output_api):
'scripts/run_code_generation.py')
cmd_name = 'run_code_generation'
cmd = [input_api.python_executable, code_gen_path, '--verify-no-dirty']
test_cmd = input_api.Command(
name=cmd_name,
cmd=cmd,
kwargs={},
message=Msg)
test_cmd = input_api.Command(name=cmd_name, cmd=cmd, kwargs={}, message=Msg)
if input_api.verbose:
print('Running ' + cmd_name)
return input_api.RunTests([test_cmd])
......@@ -52,8 +47,7 @@ def _CheckNewHeaderWithoutGnChange(input_api, output_api):
"""
def headers(f):
return input_api.FilterSourceFile(
f, white_list=(r'.+%s' % _HEADER_EXTENSIONS, ))
return input_api.FilterSourceFile(f, white_list=(r'.+%s' % _HEADER_EXTENSIONS,))
new_headers = []
for f in input_api.AffectedSourceFiles(headers):
......@@ -62,7 +56,7 @@ def _CheckNewHeaderWithoutGnChange(input_api, output_api):
new_headers.append(f.LocalPath())
def gn_files(f):
return input_api.FilterSourceFile(f, white_list=(r'.+\.gn', ))
return input_api.FilterSourceFile(f, white_list=(r'.+\.gn',))
all_gn_changed_contents = ''
for f in input_api.AffectedSourceFiles(gn_files):
......@@ -76,35 +70,32 @@ def _CheckNewHeaderWithoutGnChange(input_api, output_api):
problems.append(header)
if problems:
return [output_api.PresubmitPromptWarning(
'Missing GN changes for new header files', items=sorted(problems),
return [
output_api.PresubmitPromptWarning(
'Missing GN changes for new header files',
items=sorted(problems),
long_text='Please double check whether newly added header files need '
'corresponding changes in gn or gni files.\nThis checking is only a '
'heuristic. Run build/check_gn_headers.py to be precise.\n'
'Read https://crbug.com/661774 for more info.')]
'Read https://crbug.com/661774 for more info.')
]
return []
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CheckCodeGeneration(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasBugField(
input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasDescription(
input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasBugField(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasDescription(input_api, output_api))
results.extend(_CheckNewHeaderWithoutGnChange(input_api, output_api))
results.extend(
input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
results.extend(input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CheckCodeGeneration(input_api, output_api))
results.extend(
input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasBugField(
input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasDescription(
input_api, output_api))
results.extend(input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasBugField(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasDescription(input_api, output_api))
return results
......@@ -16,22 +16,15 @@ import sys
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--objcopy',
required=True,
help='The objcopy binary to run',
metavar='PATH')
parser.add_argument(
'--nm', required=True, help='The nm binary to run', metavar='PATH')
'--objcopy', required=True, help='The objcopy binary to run', metavar='PATH')
parser.add_argument('--nm', required=True, help='The nm binary to run', metavar='PATH')
parser.add_argument(
'--sofile',
required=True,
help='Shared object file produced by linking command',
metavar='FILE')
parser.add_argument(
'--output',
required=True,
help='Final output shared object file',
metavar='FILE')
'--output', required=True, help='Final output shared object file', metavar='FILE')
parser.add_argument(
'--unstrippedsofile',
required=True,
......@@ -48,20 +41,16 @@ def main():
objcopy_cmd.append(args.output + '.debug')
result = subprocess.call(objcopy_cmd)
nm_cmd = subprocess.Popen(
[args.nm, args.unstrippedsofile, '--format=posix', '--defined-only'],
nm_cmd = subprocess.Popen([args.nm, args.unstrippedsofile, '--format=posix', '--defined-only'],
stdout=subprocess.PIPE)
awk_cmd = subprocess.Popen(['awk', '{ print $1}'],
stdin=nm_cmd.stdout,
stdout=subprocess.PIPE)
awk_cmd = subprocess.Popen(['awk', '{ print $1}'], stdin=nm_cmd.stdout, stdout=subprocess.PIPE)
dynsym_out = open(args.output + '.dynsyms', 'w')
sort_cmd = subprocess.Popen(['sort'], stdin=awk_cmd.stdout, stdout=dynsym_out)
dynsym_out.close()
nm_cmd = subprocess.Popen(
[args.nm, args.unstrippedsofile, '--format=posix', '--defined-only'],
nm_cmd = subprocess.Popen([args.nm, args.unstrippedsofile, '--format=posix', '--defined-only'],
stdout=subprocess.PIPE)
awk_cmd = subprocess.Popen(
......@@ -70,9 +59,7 @@ def main():
stdout=subprocess.PIPE)
funcsyms_out = open(args.output + '.funcsyms', 'w')
sort_cmd = subprocess.Popen(['sort'],
stdin=awk_cmd.stdout,
stdout=funcsyms_out)
sort_cmd = subprocess.Popen(['sort'], stdin=awk_cmd.stdout, stdout=funcsyms_out)
funcsyms_out.close()
keep_symbols = open(args.output + '.keep_symbols', 'w')
......@@ -86,15 +73,14 @@ def main():
keep_symbols.close()
objcopy_cmd = [
args.objcopy, '--rename-section', '.debug_frame=saved_debug_frame',
args.output + '.debug', args.output + ".mini_debuginfo"
args.objcopy, '--rename-section', '.debug_frame=saved_debug_frame', args.output + '.debug',
args.output + ".mini_debuginfo"
]
subprocess.check_call(objcopy_cmd)
objcopy_cmd = [
args.objcopy, '-S', '--remove-section', '.gdb_index', '--remove-section',
'.comment', '--keep-symbols=' + args.output + '.keep_symbols',
args.output + '.mini_debuginfo'
args.objcopy, '-S', '--remove-section', '.gdb_index', '--remove-section', '.comment',
'--keep-symbols=' + args.output + '.keep_symbols', args.output + '.mini_debuginfo'
]
subprocess.check_call(objcopy_cmd)
......@@ -108,16 +94,16 @@ def main():
subprocess.check_call(xz_cmd)
objcopy_cmd = [
args.objcopy, '--add-section',
'.gnu_debugdata=' + args.output + '.mini_debuginfo.xz', args.output
args.objcopy, '--add-section', '.gnu_debugdata=' + args.output + '.mini_debuginfo.xz',
args.output
]
subprocess.check_call(objcopy_cmd)
# Clean out scratch files
rm_cmd = [
'rm', '-f', args.output + '.dynsyms', args.output + '.funcsyms',
args.output + '.keep_symbols', args.output + '.debug',
args.output + '.mini_debuginfo', args.output + '.mini_debuginfo.xz'
args.output + '.keep_symbols', args.output + '.debug', args.output + '.mini_debuginfo',
args.output + '.mini_debuginfo.xz'
]
result = subprocess.call(rm_cmd)
......
......@@ -49,12 +49,12 @@ bmp_file.close()
# convert to YUV 4:4:4
converted_pixels = bytearray(pixels)
for i in range(0, width * height):
R, = struct.unpack("B", pixels[i*3+2])
G, = struct.unpack("B", pixels[i*3+1])
B, = struct.unpack("B", pixels[i*3])
converted_pixels[i*3] = ((66*R + 129*G + 25*B + 128) >> 8) + 16
converted_pixels[i*3+1] = ((-38*R - 74*G + 112*B + 128) >> 8) + 128
converted_pixels[i*3+2] = ((112*R - 94*G - 18*B + 128) >> 8) + 128
R, = struct.unpack("B", pixels[i * 3 + 2])
G, = struct.unpack("B", pixels[i * 3 + 1])
B, = struct.unpack("B", pixels[i * 3])
converted_pixels[i * 3] = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16
converted_pixels[i * 3 + 1] = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128
converted_pixels[i * 3 + 2] = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128
# downsample to packed UV buffer
uv_buffer = bytearray(width * height / 2)
......
......@@ -3,7 +3,6 @@
# Copyright 2015 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generate .gclient file for Angle.
Because gclient won't accept "--name ." use a different name then edit.
......@@ -34,5 +33,6 @@ def main():
print 'created .gclient'
if __name__ == '__main__':
main()
......@@ -9,6 +9,7 @@ from __future__ import print_function
import os, shutil, sys
def main():
if len(sys.argv) != 2:
print("Usage: %s <path>" % sys.argv[0])
......@@ -20,5 +21,6 @@ def main():
print("false")
sys.exit(0)
if __name__ == '__main__':
main()
......@@ -31,8 +31,7 @@ import sys
def get_json_description(gn_out, target_name):
try:
text_desc = subprocess.check_output(
['gn', 'desc', '--format=json', gn_out, target_name])
text_desc = subprocess.check_output(['gn', 'desc', '--format=json', gn_out, target_name])
except subprocess.CalledProcessError as e:
logging.error("e.retcode = %s" % e.returncode)
logging.error("e.cmd = %s" % e.cmd)
......@@ -40,12 +39,12 @@ def get_json_description(gn_out, target_name):
try:
json_out = json.loads(text_desc)
except ValueError:
raise ValueError("Unable to decode JSON\ncmd: %s\noutput:\n%s" %
(subprocess.list2cmdline(['gn', 'desc', '--format=json',
gn_out, target_name]), text_desc))
raise ValueError("Unable to decode JSON\ncmd: %s\noutput:\n%s" % (subprocess.list2cmdline(
['gn', 'desc', '--format=json', gn_out, target_name]), text_desc))
return json_out
def load_json_deps(desc, gn_out, target_name, all_desc, indent=" "):
"""Extracts dependencies from the given target json description
and recursively extracts json descriptions.
......@@ -60,12 +59,13 @@ def load_json_deps(desc, gn_out, target_name, all_desc, indent=" "):
text_descriptions = []
for dep in target.get('deps', []):
if dep not in all_desc:
logging.debug("dep: %s%s" % (indent,dep))
logging.debug("dep: %s%s" % (indent, dep))
new_desc = get_json_description(gn_out, dep)
all_desc[dep] = new_desc[dep]
load_json_deps(new_desc, gn_out, dep, all_desc, indent+" ")
load_json_deps(new_desc, gn_out, dep, all_desc, indent + " ")
else:
logging.debug("dup: %s%s" % (indent,dep))
logging.debug("dup: %s%s" % (indent, dep))
def create_build_description(gn_out, targets):
"""Creates the JSON build description by running GN."""
......@@ -89,8 +89,7 @@ def main():
description='Generate json build information from a GN description.')
parser.add_argument(
'--gn_out',
help=
'GN output config to use (e.g., out/Default or out/Debug.)',
help='GN output config to use (e.g., out/Default or out/Debug.)',
default='out/Default',
)
parser.add_argument(
......@@ -105,7 +104,7 @@ def main():
args = parser.parse_args()
desc = create_build_description(args.gn_out, args.targets)
fh = open(args.output,"w")
fh = open(args.output, "w")
fh.write(json.dumps(desc, indent=4, sort_keys=True))
fh.close()
......
......@@ -12,32 +12,47 @@ import sys, os, pprint, json
from datetime import date
import registry_xml
def write_header(data_source_name, all_cmds, api, preamble, path, lib, ns = "", prefix = None, export = ""):
def write_header(data_source_name,
all_cmds,
api,
preamble,
path,
lib,
ns="",
prefix=None,
export=""):
file_name = "%s_loader_autogen.h" % api
header_path = registry_xml.path_to(path, file_name)
def pre(cmd):
if prefix == None:
return cmd
return prefix + cmd[len(api):]
with open(header_path, "w") as out:
var_protos = ["%sextern PFN%sPROC %s%s;" % (export, cmd.upper(), ns, pre(cmd)) for cmd in all_cmds]
var_protos = [
"%sextern PFN%sPROC %s%s;" % (export, cmd.upper(), ns, pre(cmd)) for cmd in all_cmds
]
loader_header = template_loader_h.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = data_source_name,
year = date.today().year,
function_pointers = "\n".join(var_protos),
api_upper = api.upper(),
api_lower = api,
preamble = preamble,
export = export,
lib = lib.upper())
script_name=os.path.basename(sys.argv[0]),
data_source_name=data_source_name,
year=date.today().year,
function_pointers="\n".join(var_protos),
api_upper=api.upper(),
api_lower=api,
preamble=preamble,
export=export,
lib=lib.upper())
out.write(loader_header)
out.close()
def write_source(data_source_name, all_cmds, api, path, ns = "", prefix = None, export = ""):
def write_source(data_source_name, all_cmds, api, path, ns="", prefix=None, export=""):
file_name = "%s_loader_autogen.cpp" % api
source_path = registry_xml.path_to(path, file_name)
def pre(cmd):
if prefix == None:
return cmd
......@@ -50,17 +65,18 @@ def write_source(data_source_name, all_cmds, api, path, ns = "", prefix = None,
setters = [setter % (ns, pre(cmd), cmd.upper(), pre(cmd)) for cmd in all_cmds]
loader_source = template_loader_cpp.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = data_source_name,
year = date.today().year,
function_pointers = "\n".join(var_defs),
set_pointers = "\n".join(setters),
api_upper = api.upper(),
api_lower = api)
script_name=os.path.basename(sys.argv[0]),
data_source_name=data_source_name,
year=date.today().year,
function_pointers="\n".join(var_defs),
set_pointers="\n".join(setters),
api_upper=api.upper(),
api_lower=api)
out.write(loader_source)
out.close()
def gen_libegl_loader():
data_source_name = "egl.xml and egl_angle_ext.xml"
......@@ -82,6 +98,7 @@ def gen_libegl_loader():
write_header(data_source_name, all_cmds, "egl", libegl_preamble, path, "LIBEGL", "", "EGL_")
write_source(data_source_name, all_cmds, "egl", path, "", "EGL_")
def gen_gl_loader():
data_source_name = "gl.xml and gl_angle_ext.xml"
......@@ -113,6 +130,7 @@ def gen_gl_loader():
write_header(data_source_name, all_cmds, "gles", util_gles_preamble, path, "UTIL", export=ex)
write_source(data_source_name, all_cmds, "gles", path, export=ex)
def gen_egl_loader():
data_source_name = "egl.xml and egl_angle_ext.xml"
......@@ -135,6 +153,7 @@ def gen_egl_loader():
write_header(data_source_name, all_cmds, "egl", util_egl_preamble, path, "UTIL", export=ex)
write_source(data_source_name, all_cmds, "egl", path, export=ex)
def gen_wgl_loader():
supported_wgl_extensions = [
......@@ -162,6 +181,7 @@ def gen_wgl_loader():
write_header(source, all_cmds, "wgl", util_wgl_preamble, path, "UTIL_WINDOWS", "_")
write_source(source, all_cmds, "wgl", path, "_")
def main():
# Handle inputs/outputs for run_code_generation.py's auto_script
......
......@@ -109,11 +109,13 @@ $ImplMethodDefinitions
} // namespace rx
"""
def generate_impl_declaration(impl_stub):
# ensure the wrapped lines are aligned vertically
temp = re.sub(r'\n ', '\n', impl_stub)
return temp + ' override;\n'
def generate_impl_definition(impl_stub, typed_impl):
function_signature = impl_stub.strip()
......@@ -150,15 +152,17 @@ def generate_impl_definition(impl_stub, typed_impl):
else:
return_statement = ' return ' + return_type + '();\n'
body = '{\n' + ' UNIMPLEMENTED();\n' + return_statement +'}\n'
body = '{\n' + ' UNIMPLEMENTED();\n' + return_statement + '}\n'
return '\n' + function_signature + body
def get_constructor_args(constructor):
params = re.search(r'\((.*)\)', constructor).group(1)
args = ', '.join(re.findall(r'[^\w]?(\w+)(?:\,|$)', params))
return params, args
def parse_impl_header(base_impl):
impl_h_file_path = base_impl + '.h'
impl_h_file = open(impl_h_file_path, 'r')
......@@ -172,7 +176,7 @@ def parse_impl_header(base_impl):
for line in impl_h_file:
clean_line = line.strip()
match = re.search(r'^(?:explicit )?(' + base_impl + r'\([^\)]*\))', clean_line);
match = re.search(r'^(?:explicit )?(' + base_impl + r'\([^\)]*\))', clean_line)
if match:
constructor = match.group(1)
......@@ -200,6 +204,7 @@ def parse_impl_header(base_impl):
return impl_stubs, private_impl_stubs, constructor
def get_base_class(base_impl):
impl_h_file_path = base_impl + '.h'
with open(impl_h_file_path, 'r') as impl_h_file:
......@@ -209,6 +214,7 @@ def get_base_class(base_impl):
return match.group(1)
return False
for impl_class in impl_classes:
base_impl = impl_class + 'Impl'
......
......@@ -3,7 +3,6 @@
# Copyright 2016 The ANGLE Project Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generate copies of the Vulkan layers JSON files, with no paths, forcing
Vulkan to use the default search path to look for layers."""
......@@ -60,8 +59,7 @@ def main():
# Update the path.
if not data_key in data:
raise Exception(
"Could not find '%s' key in %s" % (data_key, json_fname))
raise Exception("Could not find '%s' key in %s" % (data_key, json_fname))
# The standard validation layer has no library path.
if 'library_path' in data[data_key]:
......@@ -93,8 +91,7 @@ def main():
# For each *.json.in template files in source dir generate actual json file
# in target dir
if (set(glob_slash(os.path.join(source_dir, '*.json.in'))) !=
set(json_in_files)):
if (set(glob_slash(os.path.join(source_dir, '*.json.in'))) != set(json_in_files)):
print('.json.in list in gn file is out-of-date', file=sys.stderr)
return 1
for json_in_name in json_in_files:
......@@ -112,5 +109,6 @@ def main():
line = line.replace('@VK_VERSION@', '1.1.' + vk_version)
json_out_file.write(line)
if __name__ == '__main__':
sys.exit(main())
......@@ -22,12 +22,14 @@ os.chdir(os.path.join(script_dir, '..'))
out_dir = 'out'
# Generate the VS solutions for any valid directory.
def generate_projects(dirname):
args = ['gn.bat', 'gen', dirname, '--ide=' + target_ide, '--sln=' + solution_name]
print('Running "' + ' '.join(args) + '"')
subprocess.call(args)
for potential_dir in os.listdir(out_dir):
path = os.path.join(out_dir, potential_dir)
build_ninja_d = os.path.join(path, 'build.ninja.d')
......
......@@ -29,19 +29,22 @@ if sys.platform == 'win32':
scores = []
# Danke to http://stackoverflow.com/a/27758326
def mean(data):
"""Return the sample arithmetic mean of data."""
n = len(data)
if n < 1:
raise ValueError('mean requires at least one data point')
return float(sum(data))/float(n) # in Python 2 use sum(data)/float(n)
return float(sum(data)) / float(n) # in Python 2 use sum(data)/float(n)
def sum_of_square_deviations(data, c):
"""Return sum of square deviations of sequence data."""
ss = sum((float(x)-c)**2 for x in data)
ss = sum((float(x) - c)**2 for x in data)
return ss
def coefficient_of_variation(data):
"""Calculates the population coefficient of variation."""
n = len(data)
......@@ -49,24 +52,28 @@ def coefficient_of_variation(data):
raise ValueError('variance requires at least two data points')
c = mean(data)
ss = sum_of_square_deviations(data, c)
pvar = ss/n # the population variance
pvar = ss / n # the population variance
stddev = (pvar**0.5) # population standard deviation
return stddev / c
def truncated_list(data, n):
"""Compute a truncated list, n is truncation size"""
if len(data) < n * 2:
raise ValueError('list not large enough to truncate')
return sorted(data)[n:-n]
def truncated_mean(data, n):
"""Compute a truncated mean, n is truncation size"""
return mean(truncated_list(data, n))
def truncated_cov(data, n):
"""Compute a truncated coefficient of variation, n is truncation size"""
return coefficient_of_variation(truncated_list(data, n))
# Find most recent binary
newest_binary = None
newest_mtime = None
......@@ -96,8 +103,12 @@ if len(sys.argv) >= 2:
print('Using test executable: ' + perftests_path)
print('Test name: ' + test_name)
def get_results(metric, extra_args=[]):
process = subprocess.Popen([perftests_path, '--gtest_filter=' + test_name] + extra_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
process = subprocess.Popen(
[perftests_path, '--gtest_filter=' + test_name] + extra_args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output, err = process.communicate()
m = re.search(r'Running (\d+) tests', output)
......@@ -115,6 +126,7 @@ def get_results(metric, extra_args=[]):
return [float(value) for value in m]
# Calibrate the number of steps
steps = get_results("steps", ["--calibration"])[0]
print("running with %d steps." % steps)
......
......@@ -108,13 +108,17 @@ strip_suffixes = ["ANGLE", "EXT", "KHR", "OES", "CHROMIUM"]
# Toggle generation here.
support_EGL_ANGLE_explicit_context = True
def script_relative(path):
return os.path.join(os.path.dirname(sys.argv[0]), path)
def path_to(folder, file):
return os.path.join(script_relative(".."), "src", folder, file)
class GLCommandNames:
def __init__(self):
self.command_names = {}
......@@ -136,8 +140,10 @@ class GLCommandNames:
# Add the commands that aren't duplicates
self.command_names[version] += commands
class RegistryXML:
def __init__(self, xml_file, ext_file = None):
def __init__(self, xml_file, ext_file=None):
tree = etree.parse(script_relative(xml_file))
self.root = tree.getroot()
if (ext_file):
......
......@@ -18,25 +18,30 @@ root_dir = os.path.abspath(os.path.join(script_dir, '..'))
# auto_script is a standard way for scripts to return their inputs and outputs.
def get_child_script_dirname(script):
# All script names are relative to ANGLE's root
return os.path.dirname(os.path.abspath(os.path.join(root_dir, script)))
# Replace all backslashes with forward slashes to be platform independent
def clean_path_slashes(path):
return path.replace("\\", "/")
# Takes a script file name which is relative to the code generation script's directory and
# changes it to be relative to the angle root directory
def rebase_script_path(script_path, relative_path):
return os.path.relpath(os.path.join(os.path.dirname(script_path), relative_path), root_dir)
def grab_from_script(script, param):
res = subprocess.check_output(['python', script, param]).strip()
if res == '':
return []
return [clean_path_slashes(rebase_script_path(script, name)) for name in res.split(',')]
def auto_script(script):
# Set the CWD to the script directory.
os.chdir(get_child_script_dirname(script))
......@@ -49,6 +54,7 @@ def auto_script(script):
os.chdir(root_dir)
return info
hash_fname = "run_code_generation_hashes.json"
generators = {
......@@ -183,7 +189,11 @@ def main():
update_output_hashes(name, info['outputs'], new_hashes)
os.chdir(script_dir)
json.dump(new_hashes, open(hash_fname, "w"), indent=2, sort_keys=True,
json.dump(
new_hashes,
open(hash_fname, "w"),
indent=2,
sort_keys=True,
separators=(',', ':\n '))
......
......@@ -4,15 +4,15 @@
"ANGLE format:src/libANGLE/renderer/Format_table_autogen.cpp":
"a4cf00b75621bc058c4a1b341bdf6989",
"ANGLE format:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798",
"7ce0869650454e1eebc93658d4d96844",
"ANGLE format:src/libANGLE/renderer/angle_format_data.json":
"288d2f350948f8b1928c249234a44b25",
"ANGLE format:src/libANGLE/renderer/angle_format_map.json":
"be9f9bdbdf785dda05920146e8c55dbb",
"ANGLE format:src/libANGLE/renderer/gen_angle_format_table.py":
"3d9f679b65f39ccf19bd7bdf5498f837",
"1443d23f2dc1e9d7dc86ae0d512e6814",
"ANGLE load functions table:src/libANGLE/renderer/gen_load_functions_table.py":
"2dcc3aa0cd700165b588cf53441e243b",
"e65c50e84fc38ad34d0eb0bebb84aab6",
"ANGLE load functions table:src/libANGLE/renderer/load_functions_data.json":
"816be111bf4d1995589350dceb367315",
"ANGLE load functions table:src/libANGLE/renderer/load_functions_table_autogen.cpp":
......@@ -22,11 +22,11 @@
"D3D11 blit shader selection:src/libANGLE/renderer/d3d/d3d11/d3d11_blit_shaders_autogen.gni":
"329dbafc64b0cb578348819198abcfea",
"D3D11 blit shader selection:src/libANGLE/renderer/d3d/d3d11/gen_blit11helper.py":
"38bff72bc17ac25c6b42c98d40c76e20",
"704a82846928d3e21fc0794dff3a08f8",
"D3D11 format:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798",
"7ce0869650454e1eebc93658d4d96844",
"D3D11 format:src/libANGLE/renderer/d3d/d3d11/gen_texture_format_table.py":
"d3260e0390ad2cd8b07420b7426fad43",
"bf11e3404d4622059b6e9c4e96abf95e",
"D3D11 format:src/libANGLE/renderer/d3d/d3d11/texture_format_data.json":
"d7483ece817e819588f4ca157716dc7b",
"D3D11 format:src/libANGLE/renderer/d3d/d3d11/texture_format_map.json":
......@@ -38,9 +38,9 @@
"DXGI format support:src/libANGLE/renderer/d3d/d3d11/dxgi_support_table_autogen.cpp":
"7ec32ce0ad41450be7493c1db1130e25",
"DXGI format support:src/libANGLE/renderer/d3d/d3d11/gen_dxgi_support_tables.py":
"389a6358534ebad5e232a44944b6123b",
"b464f153f15d60df1c6536adbfafb072",
"DXGI format:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798",
"7ce0869650454e1eebc93658d4d96844",
"DXGI format:src/libANGLE/renderer/angle_format_map.json":
"be9f9bdbdf785dda05920146e8c55dbb",
"DXGI format:src/libANGLE/renderer/d3d/d3d11/dxgi_format_data.json":
......@@ -48,7 +48,7 @@
"DXGI format:src/libANGLE/renderer/d3d/d3d11/dxgi_format_map_autogen.cpp":
"32b9860e3fd8e87a89ff9a09e848e516",
"DXGI format:src/libANGLE/renderer/d3d/d3d11/gen_dxgi_format_table.py":
"bed2688ca828fc9fd1904408d33ba007",
"411e6064b916d570fa76949820d34a45",
"ESSL static builtins:src/compiler/translator/ParseContext_autogen.h":
"6be7f97ce68aa5ba5ecf30b835bc344d",
"ESSL static builtins:src/compiler/translator/SymbolTable_autogen.cpp":
......@@ -58,11 +58,11 @@
"ESSL static builtins:src/compiler/translator/builtin_function_declarations.txt":
"e5e567406476306ea06984d885be028d",
"ESSL static builtins:src/compiler/translator/builtin_symbols_hash_autogen.txt":
"e60e2185718a035adfd19ab91536fdb7",
"05cd84d02529a1e83c88caa9097dc0ef",
"ESSL static builtins:src/compiler/translator/builtin_variables.json":
"a8f3d76c3c395e8f6a35dd22eb2e8416",
"ESSL static builtins:src/compiler/translator/gen_builtin_symbols.py":
"f056dba2fdeac5a5dbad9d8f7b17f55f",
"5d5467e17ca5ed5bf9938df9a3391e6f",
"ESSL static builtins:src/compiler/translator/tree_util/BuiltIn_autogen.h":
"6df5ab6576da4f364763b581da839b77",
"ESSL static builtins:src/tests/compiler_tests/ImmutableString_test_autogen.cpp":
......@@ -72,13 +72,13 @@
"Emulated HLSL functions:src/compiler/translator/emulated_builtin_functions_hlsl_autogen.cpp":
"1c759ffdd27a86fd8f2d590b2f3dcb56",
"Emulated HLSL functions:src/compiler/translator/gen_emulated_builtin_function_tables.py":
"c24de0c9ce5f201985c852d2b4b12b98",
"5991de4f43758f59d9d042581ae04eab",
"GL copy conversion table:src/libANGLE/es3_copy_conversion_formats.json":
"54608f6f7d9aa7c59a8458ccf3ab9935",
"GL copy conversion table:src/libANGLE/es3_copy_conversion_table_autogen.cpp":
"b20d198cf5e292c43170d4873b381b34",
"GL copy conversion table:src/libANGLE/gen_copy_conversion_table.py":
"92428cef9d97d33ee7063cfa387ccf56",
"827a4a27cea1e11bef18fed9dce6dceb",
"GL format map:src/libANGLE/es3_format_type_combinations.json":
"a232823cd6430f14e28793ccabb968ee",
"GL format map:src/libANGLE/format_map_autogen.cpp":
......@@ -86,7 +86,7 @@
"GL format map:src/libANGLE/format_map_data.json":
"779798d4879e5f73a5a108e3e3fd3095",
"GL format map:src/libANGLE/gen_format_map.py":
"0fd8c00e8b5afb28a5f8b40d9628b9a4",
"dbc855d50826670a9e1a4ff2747e7583",
"GL/EGL entry points:scripts/egl.xml":
"842e24514c4cfe09fba703c17a0fd292",
"GL/EGL entry points:scripts/egl_angle_ext.xml":
......@@ -94,13 +94,13 @@
"GL/EGL entry points:scripts/entry_point_packed_gl_enums.json":
"28238b0f52826c3794eaa1aa940238bf",
"GL/EGL entry points:scripts/generate_entry_points.py":
"83064b09d168c807431cac137b845b5f",
"e7ab486465bf7873d8f06ddd9b204539",
"GL/EGL entry points:scripts/gl.xml":
"b470cb06b06cbbe7adb2c8129ec85708",
"GL/EGL entry points:scripts/gl_angle_ext.xml":
"11e1eb2cbe51ae6e7b8705d3506846d5",
"GL/EGL entry points:scripts/registry_xml.py":
"3b9a36e0be051dc5b4e5162d54749e49",
"169e89c63aad5bde60012b64cccced27",
"GL/EGL entry points:src/libANGLE/Context_gles_1_0_autogen.h":
"fad4ec629b41e9d97ff57a132ad946cb",
"GL/EGL entry points:src/libANGLE/validationES1_autogen.h":
......@@ -144,9 +144,9 @@
"GL/EGL/WGL loader:scripts/egl_angle_ext.xml":
"745534010f31fbe8e1a1fcddce15ed2d",
"GL/EGL/WGL loader:scripts/generate_loader.py":
"475030714c1644b6dfb1f6f08572039d",
"b8c0dc876c8122bdc2447de982bcfad6",
"GL/EGL/WGL loader:scripts/registry_xml.py":
"3b9a36e0be051dc5b4e5162d54749e49",
"169e89c63aad5bde60012b64cccced27",
"GL/EGL/WGL loader:scripts/wgl.xml":
"aa96419c582af2f6673430e2847693f4",
"GL/EGL/WGL loader:src/libEGL/egl_loader_autogen.cpp":
......@@ -168,13 +168,13 @@
"OpenGL dispatch table:scripts/gl.xml":
"b470cb06b06cbbe7adb2c8129ec85708",
"OpenGL dispatch table:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798",
"7ce0869650454e1eebc93658d4d96844",
"OpenGL dispatch table:src/libANGLE/renderer/gl/DispatchTableGL_autogen.cpp":
"96d06b3acf7826aee1ec813a8fa3a867",
"OpenGL dispatch table:src/libANGLE/renderer/gl/DispatchTableGL_autogen.h":
"ea5eded625b5db7d7b2b7f689c72f14b",
"OpenGL dispatch table:src/libANGLE/renderer/gl/generate_gl_dispatch_table.py":
"7571edb9e610891ed0c95dc496120cff",
"f21314d401e650b4182c4b7d66ac5c9c",
"OpenGL dispatch table:src/libANGLE/renderer/gl/gl_bindings_data.json":
"1afca09d29ed7788c76cbc9bcfb4de0a",
"OpenGL dispatch table:src/libANGLE/renderer/gl/null_functions.cpp":
......@@ -182,17 +182,17 @@
"OpenGL dispatch table:src/libANGLE/renderer/gl/null_functions.h":
"7906751710cab691f9e7365e59b7beed",
"Vulkan format:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798",
"7ce0869650454e1eebc93658d4d96844",
"Vulkan format:src/libANGLE/renderer/angle_format_map.json":
"be9f9bdbdf785dda05920146e8c55dbb",
"Vulkan format:src/libANGLE/renderer/vulkan/gen_vk_format_table.py":
"c1f153d67fa50e5f6683170c83b610d4",
"c50c9c66b89df7179a688cda42eb85f2",
"Vulkan format:src/libANGLE/renderer/vulkan/vk_format_map.json":
"a6522dc0af17eebfee8b3d6d4723594f",
"Vulkan format:src/libANGLE/renderer/vulkan/vk_format_table_autogen.cpp":
"34dcf4f106f94b03f74c9fd08b22f6ed",
"Vulkan internal shader programs:src/libANGLE/renderer/vulkan/gen_vk_internal_shaders.py":
"1262e5e903c7dad214ded83625f9d3c4",
"4cc82aa02df5371fc2e3d7448a241fc1",
"Vulkan internal shader programs:src/libANGLE/renderer/vulkan/shaders/gen/BufferUtils.comp.00000000.inc":
"caa03e84d757844a099d0e408a162c7e",
"Vulkan internal shader programs:src/libANGLE/renderer/vulkan/shaders/gen/BufferUtils.comp.00000001.inc":
......@@ -354,9 +354,9 @@
"Vulkan internal shader programs:tools/glslang/glslang_validator.sha1":
"ea685e0867a4b3a07ad7e4246ac84e10",
"Vulkan mandatory format support table:src/libANGLE/renderer/angle_format.py":
"b18ca0fe4835114a4a2f54977b19e798",
"7ce0869650454e1eebc93658d4d96844",
"Vulkan mandatory format support table:src/libANGLE/renderer/vulkan/gen_vk_mandatory_format_support_table.py":
"417772416d3082400ce05acc2f209c9f",
"dab4614bbee0c3fbc5b3ccaaa11ba9d3",
"Vulkan mandatory format support table:src/libANGLE/renderer/vulkan/vk_mandatory_format_support_data.json":
"fa2bd54c1bb0ab2cf1d386061a4bc5c5",
"Vulkan mandatory format support table:src/libANGLE/renderer/vulkan/vk_mandatory_format_support_table_autogen.cpp":
......@@ -372,19 +372,19 @@
"packed enum:src/common/PackedGLEnums_autogen.h":
"0766f2bb7874b2b6b4aaed4a6d0ef49e",
"packed enum:src/common/gen_packed_gl_enums.py":
"0cd1a1cb6d5fde8cbac2994db24eb901",
"cc463afc5e37b0f73e119fec59a39420",
"packed enum:src/common/packed_egl_enums.json":
"5f591d220ee53b6e54a27d1523a3ab79",
"packed enum:src/common/packed_gl_enums.json":
"cd2c00958dd8cc546b816dedaf4769d3",
"proc table:src/libGLESv2/gen_proc_table.py":
"20ebe54894d613de42b0b15ca34078d9",
"3be3e8ed7fad58e8cc6fcf348da7b17d",
"proc table:src/libGLESv2/proc_table_autogen.cpp":
"1e89c264adbe7120edb636013383598b",
"proc table:src/libGLESv2/proc_table_data.json":
"04123621b8fd5e6d18f9f3c95c190693",
"uniform type:src/common/gen_uniform_type_table.py":
"fa40444d496ac07cd9dc0cd239e4a499",
"9dd389f2b5793ba635169d61cef2dde9",
"uniform type:src/common/uniform_type_info_autogen.cpp":
"b31d181bc49ad1c3540401a5c874e692"
}
\ No newline at end of file
......@@ -39,10 +39,9 @@ def main():
isolated_file = os.path.join(out_file_path, '%s.isolated' % args.test)
isolate_args = [
'python', isolate_script_path, 'archive',
'-I', 'https://isolateserver.appspot.com',
'-i', isolate_file,
'-s', isolated_file]
'python', isolate_script_path, 'archive', '-I', 'https://isolateserver.appspot.com', '-i',
isolate_file, '-s', isolated_file
]
stdout = subprocess.check_output(isolate_args)
sha = stdout[:40]
......@@ -50,14 +49,11 @@ def main():
swarming_script_path = os.path.join('tools', 'swarming_client', 'swarming.py')
swarmings_args = [
'python', swarming_script_path, 'trigger',
'-S', 'chromium-swarm.appspot.com',
'-I', 'isolateserver.appspot.com',
'-d', 'os', args.os_dim,
'-d', 'pool', args.pool,
'-d', 'gpu', args.gpu_dim,
'--shards=%d' % args.shards,
'-s', sha]
'python', swarming_script_path, 'trigger', '-S', 'chromium-swarm.appspot.com', '-I',
'isolateserver.appspot.com', '-d', 'os', args.os_dim, '-d', 'pool', args.pool, '-d', 'gpu',
args.gpu_dim,
'--shards=%d' % args.shards, '-s', sha
]
if args.extra_args:
swarmings_args += ['--'] + args.extra_args
......
......@@ -39,12 +39,15 @@ if newest_folder is None:
source_folder = newest_folder
# Is a folder a chrome binary directory?
def is_chrome_bin(str):
chrome_file = os.path.join(chrome_folder, str)
return os.path.isdir(chrome_file) and all([char.isdigit() or char == '.' for char in str])
sorted_chrome_bins = sorted([folder for folder in os.listdir(chrome_folder) if is_chrome_bin(folder)], reverse=True)
sorted_chrome_bins = sorted(
[folder for folder in os.listdir(chrome_folder) if is_chrome_bin(folder)], reverse=True)
dest_folder = os.path.join(chrome_folder, sorted_chrome_bins[0])
......
......@@ -14,9 +14,11 @@ usage = """\
Usage: commit_id.py check <angle_dir> - check if git is present
commit_id.py gen <angle_dir> <file_to_write> - generate commit.h"""
def grab_output(command, cwd):
return sp.Popen(command, stdout=sp.PIPE, shell=True, cwd=cwd).communicate()[0].strip()
if len(sys.argv) < 3:
sys.exit(usage)
......
......@@ -9,6 +9,7 @@
#include "common/mathutil.h"
def convertMantissa(i):
if i == 0:
return 0
......@@ -24,6 +25,7 @@ def convertMantissa(i):
else:
return 0x38000000 + ((i - 1024) << 13)
def convertExponent(i):
if i == 0:
return 0
......@@ -38,12 +40,14 @@ def convertExponent(i):
else:
return 0xC7800000
def convertOffset(i):
if i == 0 or i == 32:
return 0
else:
return 1024
print """//
// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
......
......@@ -28,6 +28,7 @@ Generators = [
},
]
def load_enums(path):
with open(path) as map_file:
enums_dict = json.loads(map_file.read(), object_pairs_hook=OrderedDict)
......@@ -42,18 +43,21 @@ def load_enums(path):
values.append(EnumValue(value_name, value_gl_name, i))
i += 1
assert(i < 255) # This makes sure enums fit in the uint8_t
assert (i < 255) # This makes sure enums fit in the uint8_t
enums.append(Enum(enum_name, values, i))
enums.sort(key=lambda enum: enum.name)
return enums
def generate_include_guard(path):
return path.replace(".", "_").upper()
def header_name_from_cpp_name(path):
return path.replace(".cpp", ".h")
header_template = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
......@@ -99,6 +103,7 @@ template <>
{api_enum_name} To{api_enum_name}({enum_name} from);
"""
def write_header(enums, path_prefix, file_name, data_source_name, namespace, api_enum_name):
content = ['']
......@@ -107,27 +112,27 @@ def write_header(enums, path_prefix, file_name, data_source_name, namespace, api
for value in enum.values:
value_declarations.append(' ' + value.name + ' = ' + str(value.value) + ',')
content.append(enum_declaration_template.format(
enum_name = enum.name,
max_value = str(enum.max_value),
value_declarations = '\n'.join(value_declarations),
api_enum_name = api_enum_name
))
content.append(
enum_declaration_template.format(
enum_name=enum.name,
max_value=str(enum.max_value),
value_declarations='\n'.join(value_declarations),
api_enum_name=api_enum_name))
header = header_template.format(
content = ''.join(content),
copyright_year = datetime.date.today().year,
data_source_name = data_source_name,
script_name = sys.argv[0],
file_name = file_name,
include_guard = generate_include_guard(file_name),
namespace = namespace,
api_enum_name = api_enum_name
)
content=''.join(content),
copyright_year=datetime.date.today().year,
data_source_name=data_source_name,
script_name=sys.argv[0],
file_name=file_name,
include_guard=generate_include_guard(file_name),
namespace=namespace,
api_enum_name=api_enum_name)
with (open(path_prefix + file_name, 'wt')) as f:
f.write(header)
cpp_template = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
......@@ -172,6 +177,7 @@ template <>
}}
"""
def write_cpp(enums, path_prefix, file_name, data_source_name, namespace, api_enum_name):
content = ['']
......@@ -180,27 +186,28 @@ def write_cpp(enums, path_prefix, file_name, data_source_name, namespace, api_en
to_glenum_cases = []
for value in enum.values:
qualified_name = enum.name + '::' + value.name
from_glenum_cases.append(' case ' + value.gl_name + ':\n return ' + qualified_name + ';')
to_glenum_cases.append(' case ' + qualified_name + ':\n return ' + value.gl_name + ';')
content.append(enum_implementation_template.format(
enum_name = enum.name,
from_glenum_cases = '\n'.join(from_glenum_cases),
max_value = str(enum.max_value),
to_glenum_cases = '\n'.join(to_glenum_cases),
api_enum_name = api_enum_name
))
from_glenum_cases.append(' case ' + value.gl_name + ':\n return ' +
qualified_name + ';')
to_glenum_cases.append(' case ' + qualified_name + ':\n return ' +
value.gl_name + ';')
content.append(
enum_implementation_template.format(
enum_name=enum.name,
from_glenum_cases='\n'.join(from_glenum_cases),
max_value=str(enum.max_value),
to_glenum_cases='\n'.join(to_glenum_cases),
api_enum_name=api_enum_name))
cpp = cpp_template.format(
content = ''.join(content),
copyright_year = datetime.date.today().year,
data_source_name = data_source_name,
script_name = sys.argv[0],
file_name = file_name,
header_name = header_name_from_cpp_name(file_name),
namespace = namespace,
api_enum_name = api_enum_name
)
content=''.join(content),
copyright_year=datetime.date.today().year,
data_source_name=data_source_name,
script_name=sys.argv[0],
file_name=file_name,
header_name=header_name_from_cpp_name(file_name),
namespace=namespace,
api_enum_name=api_enum_name)
with (open(path_prefix + file_name, 'wt')) as f:
f.write(cpp)
......@@ -236,8 +243,10 @@ def main():
namespace = generator['namespace']
enum_type = generator['enum_type']
enums = load_enums(path_prefix + json_file)
write_header(enums, path_prefix, output_file + '_autogen.h', json_file, namespace, enum_type)
write_cpp(enums, path_prefix, output_file + '_autogen.cpp', json_file, namespace, enum_type)
write_header(enums, path_prefix, output_file + '_autogen.h', json_file, namespace,
enum_type)
write_cpp(enums, path_prefix, output_file + '_autogen.cpp', json_file, namespace,
enum_type)
return 0
......
......@@ -12,68 +12,23 @@ from datetime import date
import sys
all_uniform_types = [
"GL_NONE",
"GL_BOOL",
"GL_BOOL_VEC2",
"GL_BOOL_VEC3",
"GL_BOOL_VEC4",
"GL_FLOAT",
"GL_FLOAT_MAT2",
"GL_FLOAT_MAT2x3",
"GL_FLOAT_MAT2x4",
"GL_FLOAT_MAT3",
"GL_FLOAT_MAT3x2",
"GL_FLOAT_MAT3x4",
"GL_FLOAT_MAT4",
"GL_FLOAT_MAT4x2",
"GL_FLOAT_MAT4x3",
"GL_FLOAT_VEC2",
"GL_FLOAT_VEC3",
"GL_FLOAT_VEC4",
"GL_IMAGE_2D",
"GL_IMAGE_2D_ARRAY",
"GL_IMAGE_3D",
"GL_IMAGE_CUBE",
"GL_INT",
"GL_INT_IMAGE_2D",
"GL_INT_IMAGE_2D_ARRAY",
"GL_INT_IMAGE_3D",
"GL_INT_IMAGE_CUBE",
"GL_INT_SAMPLER_2D",
"GL_INT_SAMPLER_2D_ARRAY",
"GL_INT_SAMPLER_2D_MULTISAMPLE",
"GL_INT_SAMPLER_2D_MULTISAMPLE_ARRAY",
"GL_INT_SAMPLER_3D",
"GL_INT_SAMPLER_CUBE",
"GL_INT_VEC2",
"GL_INT_VEC3",
"GL_INT_VEC4",
"GL_SAMPLER_2D",
"GL_SAMPLER_2D_ARRAY",
"GL_SAMPLER_2D_ARRAY_SHADOW",
"GL_SAMPLER_2D_MULTISAMPLE",
"GL_SAMPLER_2D_MULTISAMPLE_ARRAY",
"GL_SAMPLER_2D_RECT_ANGLE",
"GL_SAMPLER_2D_SHADOW",
"GL_SAMPLER_3D",
"GL_SAMPLER_CUBE",
"GL_SAMPLER_CUBE_SHADOW",
"GL_SAMPLER_EXTERNAL_OES",
"GL_UNSIGNED_INT",
"GL_UNSIGNED_INT_ATOMIC_COUNTER",
"GL_UNSIGNED_INT_IMAGE_2D",
"GL_UNSIGNED_INT_IMAGE_2D_ARRAY",
"GL_UNSIGNED_INT_IMAGE_3D",
"GL_UNSIGNED_INT_IMAGE_CUBE",
"GL_UNSIGNED_INT_SAMPLER_2D",
"GL_UNSIGNED_INT_SAMPLER_2D_ARRAY",
"GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE",
"GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE_ARRAY",
"GL_UNSIGNED_INT_SAMPLER_3D",
"GL_UNSIGNED_INT_SAMPLER_CUBE",
"GL_UNSIGNED_INT_VEC2",
"GL_UNSIGNED_INT_VEC3",
"GL_UNSIGNED_INT_VEC4"
"GL_NONE", "GL_BOOL", "GL_BOOL_VEC2", "GL_BOOL_VEC3", "GL_BOOL_VEC4", "GL_FLOAT",
"GL_FLOAT_MAT2", "GL_FLOAT_MAT2x3", "GL_FLOAT_MAT2x4", "GL_FLOAT_MAT3", "GL_FLOAT_MAT3x2",
"GL_FLOAT_MAT3x4", "GL_FLOAT_MAT4", "GL_FLOAT_MAT4x2", "GL_FLOAT_MAT4x3", "GL_FLOAT_VEC2",
"GL_FLOAT_VEC3", "GL_FLOAT_VEC4", "GL_IMAGE_2D", "GL_IMAGE_2D_ARRAY", "GL_IMAGE_3D",
"GL_IMAGE_CUBE", "GL_INT", "GL_INT_IMAGE_2D", "GL_INT_IMAGE_2D_ARRAY", "GL_INT_IMAGE_3D",
"GL_INT_IMAGE_CUBE", "GL_INT_SAMPLER_2D", "GL_INT_SAMPLER_2D_ARRAY",
"GL_INT_SAMPLER_2D_MULTISAMPLE", "GL_INT_SAMPLER_2D_MULTISAMPLE_ARRAY", "GL_INT_SAMPLER_3D",
"GL_INT_SAMPLER_CUBE", "GL_INT_VEC2", "GL_INT_VEC3", "GL_INT_VEC4", "GL_SAMPLER_2D",
"GL_SAMPLER_2D_ARRAY", "GL_SAMPLER_2D_ARRAY_SHADOW", "GL_SAMPLER_2D_MULTISAMPLE",
"GL_SAMPLER_2D_MULTISAMPLE_ARRAY", "GL_SAMPLER_2D_RECT_ANGLE", "GL_SAMPLER_2D_SHADOW",
"GL_SAMPLER_3D", "GL_SAMPLER_CUBE", "GL_SAMPLER_CUBE_SHADOW", "GL_SAMPLER_EXTERNAL_OES",
"GL_UNSIGNED_INT", "GL_UNSIGNED_INT_ATOMIC_COUNTER", "GL_UNSIGNED_INT_IMAGE_2D",
"GL_UNSIGNED_INT_IMAGE_2D_ARRAY", "GL_UNSIGNED_INT_IMAGE_3D", "GL_UNSIGNED_INT_IMAGE_CUBE",
"GL_UNSIGNED_INT_SAMPLER_2D", "GL_UNSIGNED_INT_SAMPLER_2D_ARRAY",
"GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE", "GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE_ARRAY",
"GL_UNSIGNED_INT_SAMPLER_3D", "GL_UNSIGNED_INT_SAMPLER_CUBE", "GL_UNSIGNED_INT_VEC2",
"GL_UNSIGNED_INT_VEC3", "GL_UNSIGNED_INT_VEC4"
]
# Uniform texture types. Be wary of substrings finding the wrong types.
......@@ -142,9 +97,11 @@ const UniformTypeInfo &GetUniformTypeInfo(GLenum uniformType)
type_info_data_template = """{{{type}, {component_type}, {texture_type}, {transposed_type}, {bool_type}, {sampler_format}, {rows}, {columns}, {components}, {component_size}, {internal_size}, {external_size}, {is_sampler}, {is_matrix}, {is_image} }}"""
type_index_case_template = """case {enum_value}: return {index_value};"""
def cpp_bool(value):
return "true" if value else "false"
def get_component_type(uniform_type):
if uniform_type.find("GL_BOOL") == 0:
return "GL_BOOL"
......@@ -159,21 +116,25 @@ def get_component_type(uniform_type):
else:
return "GL_INT"
def get_texture_type(uniform_type):
for sampler_type, tex_type in texture_types.items():
if uniform_type.endswith(sampler_type):
return "GL_TEXTURE_" + tex_type
return "GL_NONE"
def get_transposed_type(uniform_type):
if "_MAT" in uniform_type:
if "x" in uniform_type:
return "GL_FLOAT_MAT" + uniform_type[-1] + "x" + uniform_type[uniform_type.find("_MAT")+4]
return "GL_FLOAT_MAT" + uniform_type[-1] + "x" + uniform_type[uniform_type.find("_MAT")
+ 4]
else:
return uniform_type
else:
return "GL_NONE"
def get_bool_type(uniform_type):
if uniform_type == "GL_INT" or uniform_type == "GL_UNSIGNED_INT" or uniform_type == "GL_FLOAT":
return "GL_BOOL"
......@@ -182,6 +143,7 @@ def get_bool_type(uniform_type):
else:
return "GL_NONE"
def get_sampler_format(uniform_type):
if not "_SAMPLER_" in uniform_type:
return "SamplerFormat::InvalidEnum"
......@@ -194,6 +156,7 @@ def get_sampler_format(uniform_type):
else:
return "SamplerFormat::Float"
def get_rows(uniform_type):
if uniform_type == "GL_NONE":
return "0"
......@@ -202,6 +165,7 @@ def get_rows(uniform_type):
else:
return "1"
def get_columns(uniform_type):
if uniform_type == "GL_NONE":
return "0"
......@@ -212,9 +176,11 @@ def get_columns(uniform_type):
else:
return "1"
def get_components(uniform_type):
return str(int(get_rows(uniform_type)) * int(get_columns(uniform_type)))
def get_component_size(uniform_type):
component_type = get_component_type(uniform_type)
if (component_type) == "GL_BOOL":
......@@ -230,38 +196,45 @@ def get_component_size(uniform_type):
else:
raise "Invalid component type: " + component_type
def get_internal_size(uniform_type):
return get_component_size(uniform_type) + " * " + str(int(get_rows(uniform_type)) * 4)
def get_external_size(uniform_type):
return get_component_size(uniform_type) + " * " + get_components(uniform_type)
def get_is_sampler(uniform_type):
return cpp_bool("_SAMPLER_" in uniform_type)
def get_is_matrix(uniform_type):
return cpp_bool("_MAT" in uniform_type)
def get_is_image(uniform_type):
return cpp_bool("_IMAGE_" in uniform_type)
def gen_type_info(uniform_type):
return type_info_data_template.format(
type = uniform_type,
component_type = get_component_type(uniform_type),
texture_type = get_texture_type(uniform_type),
transposed_type = get_transposed_type(uniform_type),
bool_type = get_bool_type(uniform_type),
sampler_format = get_sampler_format(uniform_type),
rows = get_rows(uniform_type),
columns = get_columns(uniform_type),
components = get_components(uniform_type),
component_size = get_component_size(uniform_type),
internal_size = get_internal_size(uniform_type),
external_size = get_external_size(uniform_type),
is_sampler = get_is_sampler(uniform_type),
is_matrix = get_is_matrix(uniform_type),
is_image = get_is_image(uniform_type))
type=uniform_type,
component_type=get_component_type(uniform_type),
texture_type=get_texture_type(uniform_type),
transposed_type=get_transposed_type(uniform_type),
bool_type=get_bool_type(uniform_type),
sampler_format=get_sampler_format(uniform_type),
rows=get_rows(uniform_type),
columns=get_columns(uniform_type),
components=get_components(uniform_type),
component_size=get_component_size(uniform_type),
internal_size=get_internal_size(uniform_type),
external_size=get_external_size(uniform_type),
is_sampler=get_is_sampler(uniform_type),
is_matrix=get_is_matrix(uniform_type),
is_image=get_is_image(uniform_type))
def gen_type_index_case(index, uniform_type):
return "case " + uniform_type + ": return " + str(index) + ";"
......@@ -283,16 +256,20 @@ def main():
return 1
return 0
uniform_type_info_data = ",\n".join([gen_type_info(uniform_type) for uniform_type in all_uniform_types])
uniform_type_index_cases = "\n".join([gen_type_index_case(index, uniform_type) for index, uniform_type in enumerate(all_uniform_types)])
uniform_type_info_data = ",\n".join(
[gen_type_info(uniform_type) for uniform_type in all_uniform_types])
uniform_type_index_cases = "\n".join([
gen_type_index_case(index, uniform_type)
for index, uniform_type in enumerate(all_uniform_types)
])
with open('uniform_type_info_autogen.cpp', 'wt') as out_file:
output_cpp = template_cpp.format(
script_name = sys.argv[0],
copyright_year = date.today().year,
total_count = len(all_uniform_types),
uniform_type_info_data = uniform_type_info_data,
uniform_type_index_cases = uniform_type_index_cases)
script_name=sys.argv[0],
copyright_year=date.today().year,
total_count=len(all_uniform_types),
uniform_type_info_data=uniform_type_info_data,
uniform_type_index_cases=uniform_type_index_cases)
out_file.write(output_cpp)
out_file.close()
return 0
......
0af87b7f37d8a5260c859e9169a91f6a
\ No newline at end of file
defc05f112e255400323d95b3610cfeb
\ No newline at end of file
......@@ -60,6 +60,7 @@ const char *FindHLSLFunction(int uniqueId)
}} // namespace sh
"""
def reject_duplicate_keys(pairs):
found_keys = {}
for key, value in pairs:
......@@ -69,12 +70,14 @@ def reject_duplicate_keys(pairs):
found_keys[key] = value
return found_keys
def load_json(path):
with open(path) as map_file:
file_data = map_file.read()
map_file.close()
return json.loads(file_data, object_pairs_hook=reject_duplicate_keys)
def enum_type(arg):
# handle 'argtype argname' and 'out argtype argname'
chunks = arg.split(' ')
......@@ -89,22 +92,24 @@ def enum_type(arg):
return 'UI' + arg_type[2:] + suffix
return arg_type.capitalize() + suffix
def gen_emulated_function(data):
func = ""
if 'comment' in data:
func += "".join([ "// " + line + "\n" for line in data['comment'] ])
func += "".join(["// " + line + "\n" for line in data['comment']])
sig = data['return_type'] + ' ' + data['op'] + '_emu(' + ', '.join(data['args']) + ')'
body = [ sig, '{' ] + [' ' + line for line in data['body']] + ['}']
body = [sig, '{'] + [' ' + line for line in data['body']] + ['}']
func += "{\n"
func += "BuiltInId::" + data['op'] + "_" + "_".join([enum_type(arg) for arg in data['args']]) + ",\n"
func += "BuiltInId::" + data['op'] + "_" + "_".join([enum_type(arg) for arg in data['args']
]) + ",\n"
if 'helper' in data:
func += '"' + '\\n"\n"'.join(data['helper']) + '\\n"\n'
func += '"' + '\\n"\n"'.join(body) + '\\n"\n'
func += "},\n"
return [ func ]
return [func]
def main():
......@@ -133,10 +138,10 @@ def main():
emulated_functions += gen_emulated_function(item)
hlsl_gen = template_emulated_builtin_functions_hlsl.format(
script_name = sys.argv[0],
data_source_name = input_script,
copyright_year = date.today().year,
emulated_functions = "".join(emulated_functions))
script_name=sys.argv[0],
data_source_name=input_script,
copyright_year=date.today().year,
emulated_functions="".join(emulated_functions))
with open(hlsl_fname, 'wt') as f:
f.write(hlsl_gen)
......
......@@ -58,12 +58,13 @@ template_format_case = """ case {texture_format}:
template_simple_case = """ case {key}:
"""
def parse_texture_format_case(texture_format, framebuffer_formats):
framebuffer_format_cases = ""
for framebuffer_format in sorted(framebuffer_formats):
framebuffer_format_cases += template_simple_case.format(key = framebuffer_format)
framebuffer_format_cases += template_simple_case.format(key=framebuffer_format)
return template_format_case.format(
texture_format = texture_format, framebuffer_format_cases = framebuffer_format_cases)
texture_format=texture_format, framebuffer_format_cases=framebuffer_format_cases)
def main():
......@@ -93,7 +94,7 @@ def main():
for texture_format, framebuffer_format in data:
if texture_format not in format_map:
format_map[texture_format] = []
format_map[texture_format] += [ framebuffer_format ]
format_map[texture_format] += [framebuffer_format]
texture_format_cases = ""
......@@ -102,10 +103,10 @@ def main():
with open(out_file_name, 'wt') as out_file:
output_cpp = template_cpp.format(
script_name = sys.argv[0],
data_source_name = data_source_name,
copyright_year = date.today().year,
texture_format_cases = texture_format_cases)
script_name=sys.argv[0],
data_source_name=data_source_name,
copyright_year=date.today().year,
texture_format_cases=texture_format_cases)
out_file.write(output_cpp)
out_file.close()
return 0
......
......@@ -113,16 +113,14 @@ template_es3_combo_type_case = """ case {type}:
def parse_type_case(type, result):
return template_simple_case.format(
key = type, result = result)
return template_simple_case.format(key=type, result=result)
def parse_format_case(format, type_map):
type_cases = ""
for type, internal_format in sorted(type_map.iteritems()):
type_cases += parse_type_case(type, internal_format)
return template_format_case.format(
format = format, type_cases = type_cases)
return template_format_case.format(format=format, type_cases=type_cases)
def main():
......@@ -188,21 +186,20 @@ def main():
internal_format_cases += " case " + internal_format + ":\n"
this_type_cases += template_es3_combo_type_case.format(
type = type, internal_format_cases = internal_format_cases)
type=type, internal_format_cases=internal_format_cases)
es3_combo_cases += template_format_case.format(
format = format, type_cases = this_type_cases)
es3_combo_cases += template_format_case.format(format=format, type_cases=this_type_cases)
with open('format_map_autogen.cpp', 'wt') as out_file:
output_cpp = template_cpp.format(
script_name = sys.argv[0],
data_source_name = input_script,
es3_data_source_name = combo_data_file,
copyright_year = date.today().year,
format_cases = format_cases,
es3_format_cases = es3_format_cases,
es3_type_cases = es3_type_cases,
es3_combo_cases = es3_combo_cases)
script_name=sys.argv[0],
data_source_name=input_script,
es3_data_source_name=combo_data_file,
copyright_year=date.today().year,
format_cases=format_cases,
es3_format_cases=es3_format_cases,
es3_type_cases=es3_type_cases,
es3_combo_cases=es3_combo_cases)
out_file.write(output_cpp)
return 0
......
......@@ -12,9 +12,11 @@ import re
kChannels = "ABDGLRSX"
def get_angle_format_map_abs_path():
return os.path.join(os.path.dirname(os.path.realpath(__file__)), 'angle_format_map.json')
def reject_duplicate_keys(pairs):
found_keys = {}
for key, value in pairs:
......@@ -24,24 +26,29 @@ def reject_duplicate_keys(pairs):
found_keys[key] = value
return found_keys
def load_json(path):
with open(path) as map_file:
return json.loads(map_file.read(), object_pairs_hook=reject_duplicate_keys)
def load_forward_table(path):
pairs = load_json(path)
reject_duplicate_keys(pairs)
return { gl: angle for gl, angle in pairs }
return {gl: angle for gl, angle in pairs}
def load_inverse_table(path):
pairs = load_json(path)
reject_duplicate_keys(pairs)
return { angle: gl for gl, angle in pairs }
return {angle: gl for gl, angle in pairs}
def load_without_override():
map_path = get_angle_format_map_abs_path()
return load_forward_table(map_path)
def load_with_override(override_path):
results = load_without_override()
overrides = load_json(override_path)
......@@ -51,10 +58,12 @@ def load_with_override(override_path):
return results
def get_all_angle_formats():
map_path = get_angle_format_map_abs_path()
return load_inverse_table(map_path).keys()
def get_component_type(format_id):
if "SNORM" in format_id:
return "snorm"
......@@ -83,10 +92,12 @@ def get_component_type(format_id):
else:
raise ValueError("Unknown component type for " + format_id)
def get_channel_tokens(format_id):
r = re.compile(r'([' + kChannels + '][\d]+)')
return filter(r.match, r.split(format_id))
def get_channels(format_id):
channels = ''
tokens = get_channel_tokens(format_id)
......@@ -97,6 +108,7 @@ def get_channels(format_id):
return channels
def get_bits(format_id):
bits = {}
tokens = get_channel_tokens(format_id)
......@@ -106,9 +118,11 @@ def get_bits(format_id):
bits[token[0]] = int(token[1:])
return bits
def get_format_info(format_id):
return get_component_type(format_id), get_bits(format_id), get_channels(format_id)
# TODO(oetuaho): Expand this code so that it could generate the gl format info tables as well.
def gl_format_channels(internal_format):
if internal_format == 'GL_BGR5_A1_ANGLEX':
......@@ -142,6 +156,7 @@ def gl_format_channels(internal_format):
return 's'
return channels_string.lower()
def get_internal_format_initializer(internal_format, format_id):
gl_channels = gl_format_channels(internal_format)
gl_format_no_alpha = gl_channels == 'rgb' or gl_channels == 'l'
......@@ -182,19 +197,22 @@ def get_internal_format_initializer(internal_format, format_id):
elif component_type == 'uint' and bits['R'] == 32:
return 'Initialize4ComponentData<GLuint, 0x00000000, 0x00000000, 0x00000000, 0x00000001>'
else:
raise ValueError('warning: internal format initializer could not be generated and may be needed for ' + internal_format)
raise ValueError(
'warning: internal format initializer could not be generated and may be needed for ' +
internal_format)
def get_vertex_copy_function(src_format, dst_format):
if dst_format == "NONE":
return "nullptr";
return "nullptr"
num_channel = len(get_channel_tokens(src_format))
if num_channel < 1 or num_channel > 4:
return "nullptr";
return "nullptr"
if 'FIXED' in src_format:
assert 'FLOAT' in dst_format, ('get_vertex_copy_function: can only convert fixed to float,'
+ ' not to ' + dst_format)
assert 'FLOAT' in dst_format, (
'get_vertex_copy_function: can only convert fixed to float,' + ' not to ' + dst_format)
return 'Copy32FixedTo32FVertexData<%d, %d>' % (num_channel, num_channel)
sign = ''
......@@ -215,14 +233,14 @@ def get_vertex_copy_function(src_format, dst_format):
sign = 'u'
if base_type is None:
return "nullptr";
return "nullptr"
gl_type = 'GL' + sign + base_type
if src_format == dst_format:
return 'CopyNativeVertexData<%s, %d, %d, 0>' % (gl_type, num_channel, num_channel)
assert 'FLOAT' in dst_format, ('get_vertex_copy_function: can only convert to float,'
+ ' not to ' + dst_format)
assert 'FLOAT' in dst_format, (
'get_vertex_copy_function: can only convert to float,' + ' not to ' + dst_format)
normalized = 'true' if 'NORM' in src_format else 'false'
return "CopyTo32FVertexData<%s, %d, %d, %s>" % (gl_type, num_channel, num_channel, normalized)
......@@ -103,8 +103,7 @@ supported_dimensions = ["2D", "3D", "2DArray"]
# field 2: Name of compiled shader
# field 3: Filename of compiled shader
blitshader_data = [
("RGBAF", "PassthroughRGBA*", "passthroughrgba*11ps.h"),
("BGRAF", "PassthroughRGBA*"),
("RGBAF", "PassthroughRGBA*", "passthroughrgba*11ps.h"), ("BGRAF", "PassthroughRGBA*"),
("RGBF", "PassthroughRGB*", "passthroughrgb*11ps.h"),
("RGF", "PassthroughRG*", "passthroughrg*11ps.h"),
("RF", "PassthroughR*", "passthroughr*11ps.h"),
......@@ -119,53 +118,35 @@ blitshader_data = [
("RGI", "PassthroughRG*I", "passthroughrg*i11ps.h"),
("RUI", "PassthroughR*UI", "passthroughr*ui11ps.h"),
("RI", "PassthroughR*I", "passthroughr*i11ps.h"),
("RGBAF_PREMULTIPLY", "FtoF_PM_RGBA_*",
"multiplyalpha_ftof_pm_rgba_*_ps.h"),
("RGBAF_PREMULTIPLY", "FtoF_PM_RGBA_*", "multiplyalpha_ftof_pm_rgba_*_ps.h"),
("RGBAF_UNMULTIPLY", "FtoF_UM_RGBA_*", "multiplyalpha_ftof_um_rgba_*_ps.h"),
("RGBF_PREMULTIPLY", "FtoF_PM_RGB_*", "multiplyalpha_ftof_pm_rgb_*_ps.h"),
("RGBF_UNMULTIPLY", "FtoF_UM_RGB_*", "multiplyalpha_ftof_um_rgb_*_ps.h"),
("RGBAF_TOUI", "FtoU_PT_RGBA_*", "multiplyalpha_ftou_pt_rgba_*_ps.h"),
("RGBAF_TOUI_PREMULTIPLY", "FtoU_PM_RGBA_*",
"multiplyalpha_ftou_pm_rgba_*_ps.h"),
("RGBAF_TOUI_UNMULTIPLY", "FtoU_UM_RGBA_*",
"multiplyalpha_ftou_um_rgba_*_ps.h"),
("RGBAF_TOUI_PREMULTIPLY", "FtoU_PM_RGBA_*", "multiplyalpha_ftou_pm_rgba_*_ps.h"),
("RGBAF_TOUI_UNMULTIPLY", "FtoU_UM_RGBA_*", "multiplyalpha_ftou_um_rgba_*_ps.h"),
("RGBF_TOUI", "FtoU_PT_RGB_*", "multiplyalpha_ftou_pt_rgb_*_ps.h"),
("RGBF_TOUI_PREMULTIPLY", "FtoU_PM_RGB_*",
"multiplyalpha_ftou_pm_rgb_*_ps.h"),
("RGBF_TOUI_UNMULTIPLY", "FtoU_UM_RGB_*",
"multiplyalpha_ftou_um_rgb_*_ps.h"),
("RGBF_TOUI_PREMULTIPLY", "FtoU_PM_RGB_*", "multiplyalpha_ftou_pm_rgb_*_ps.h"),
("RGBF_TOUI_UNMULTIPLY", "FtoU_UM_RGB_*", "multiplyalpha_ftou_um_rgb_*_ps.h"),
("RGBAF_TOI", "FtoI_PT_RGBA_*", "multiplyalpha_ftoi_pt_rgba_*_ps.h"),
("RGBAF_TOI_PREMULTIPLY", "FtoI_PM_RGBA_*",
"multiplyalpha_ftoi_pm_rgba_*_ps.h"),
("RGBAF_TOI_UNMULTIPLY", "FtoI_UM_RGBA_*",
"multiplyalpha_ftoi_um_rgba_*_ps.h"),
("RGBAF_TOI_PREMULTIPLY", "FtoI_PM_RGBA_*", "multiplyalpha_ftoi_pm_rgba_*_ps.h"),
("RGBAF_TOI_UNMULTIPLY", "FtoI_UM_RGBA_*", "multiplyalpha_ftoi_um_rgba_*_ps.h"),
("RGBF_TOI", "FtoI_PT_RGB_*", "multiplyalpha_ftoi_pt_rgb_*_ps.h"),
("RGBF_TOI_PREMULTIPLY", "FtoI_PM_RGB_*",
"multiplyalpha_ftoi_pm_rgb_*_ps.h"),
("RGBF_TOI_UNMULTIPLY", "FtoI_UM_RGB_*",
"multiplyalpha_ftoi_um_rgb_*_ps.h"),
("LUMAF_PREMULTIPLY", "FtoF_PM_LUMA_*",
"multiplyalpha_ftof_pm_luma_*_ps.h"),
("RGBF_TOI_PREMULTIPLY", "FtoI_PM_RGB_*", "multiplyalpha_ftoi_pm_rgb_*_ps.h"),
("RGBF_TOI_UNMULTIPLY", "FtoI_UM_RGB_*", "multiplyalpha_ftoi_um_rgb_*_ps.h"),
("LUMAF_PREMULTIPLY", "FtoF_PM_LUMA_*", "multiplyalpha_ftof_pm_luma_*_ps.h"),
("LUMAF_UNMULTIPLY", "FtoF_UM_LUMA_*", "multiplyalpha_ftof_um_luma_*_ps.h"),
("LUMAALPHAF_PREMULTIPLY", "FtoF_PM_LUMAALPHA_*",
"multiplyalpha_ftof_pm_lumaalpha_*_ps.h"),
("LUMAALPHAF_UNMULTIPLY", "FtoF_UM_LUMAALPHA_*",
"multiplyalpha_ftof_um_lumaalpha_*_ps.h"),
("LUMAALPHAF_PREMULTIPLY", "FtoF_PM_LUMAALPHA_*", "multiplyalpha_ftof_pm_lumaalpha_*_ps.h"),
("LUMAALPHAF_UNMULTIPLY", "FtoF_UM_LUMAALPHA_*", "multiplyalpha_ftof_um_lumaalpha_*_ps.h"),
("RGBAF_4444", "PassthroughRGBA*_4444", "passthroughrgba*_4444_11ps.h"),
("RGBAF_4444_PREMULTIPLY", "FtoF_PM_RGBA_4444_*",
"multiplyalpha_ftof_pm_rgba_4444_*_ps.h"),
("RGBAF_4444_UNMULTIPLY", "FtoF_UM_RGBA_4444_*",
"multiplyalpha_ftof_um_rgba_4444_*_ps.h"),
("RGBAF_4444_PREMULTIPLY", "FtoF_PM_RGBA_4444_*", "multiplyalpha_ftof_pm_rgba_4444_*_ps.h"),
("RGBAF_4444_UNMULTIPLY", "FtoF_UM_RGBA_4444_*", "multiplyalpha_ftof_um_rgba_4444_*_ps.h"),
("RGBF_565", "PassthroughRGB*_565", "passthroughrgb*_565_11ps.h"),
("RGBF_565_PREMULTIPLY", "FtoF_PM_RGB_565_*",
"multiplyalpha_ftof_pm_rgb_565_*_ps.h"),
("RGBF_565_UNMULTIPLY", "FtoF_UM_RGB_565_*",
"multiplyalpha_ftof_um_rgb_565_*_ps.h"),
("RGBF_565_PREMULTIPLY", "FtoF_PM_RGB_565_*", "multiplyalpha_ftof_pm_rgb_565_*_ps.h"),
("RGBF_565_UNMULTIPLY", "FtoF_UM_RGB_565_*", "multiplyalpha_ftof_um_rgb_565_*_ps.h"),
("RGBAF_5551", "PassthroughRGBA*_5551", "passthroughrgba*_5551_11ps.h"),
("RGBAF_5551_PREMULTIPLY", "FtoF_PM_RGBA_5551_*",
"multiplyalpha_ftof_pm_rgba_5551_*_ps.h"),
("RGBAF_5551_UNMULTIPLY", "FtoF_UM_RGBA_5551_*",
"multiplyalpha_ftof_um_rgba_5551_*_ps.h")
("RGBAF_5551_PREMULTIPLY", "FtoF_PM_RGBA_5551_*", "multiplyalpha_ftof_pm_rgba_5551_*_ps.h"),
("RGBAF_5551_UNMULTIPLY", "FtoF_UM_RGBA_5551_*", "multiplyalpha_ftof_um_rgba_5551_*_ps.h")
]
......@@ -177,8 +158,7 @@ def format_shader_include(dimension, blitshader):
def format_get_blitshader_case(operation):
dimension_cases = []
for dimension in supported_dimensions:
dimension_cases.append(
format_get_blitshader_case_dimension(operation, dimension))
dimension_cases.append(format_get_blitshader_case_dimension(operation, dimension))
return template_get_blitshader_case.format(
get_blitshader_dimension_cases="\n".join([c for c in dimension_cases]),
......@@ -219,8 +199,7 @@ def format_map_blitshader_case(dimension, blitshader):
def format_shader_filename(dimension, blitshader):
return "shaders/compiled/" + blitshader[2].replace("*",
dimension.lower()) + ","
return "shaders/compiled/" + blitshader[2].replace("*", dimension.lower()) + ","
def get_shader_includes():
......@@ -279,8 +258,7 @@ def get_blitshadertype_enums():
# 2D float to int shaders have not been implemented
if dimension == "2D" and blitshader[0].find("TOI") != -1:
continue
blitshaders.append(" BLITSHADER_" + dimension.upper() + "_" +
blitshader[0] + ",")
blitshaders.append(" BLITSHADER_" + dimension.upper() + "_" + blitshader[0] + ",")
blitshaders.append(" BLITSHADER_INVALID")
return blitshaders
......@@ -299,14 +277,14 @@ def get_shader_filenames():
continue
if len(blitshader) == 3:
filenames.append(
(" \"src/libANGLE/renderer/d3d/d3d11/shaders/compiled/{0}\","
).format(blitshader[2].replace("*", dimension.lower())))
(" \"src/libANGLE/renderer/d3d/d3d11/shaders/compiled/{0}\",").format(
blitshader[2].replace("*", dimension.lower())))
return filenames
def write_inc_file(get_blitshaders_case_list, add_blitshader_case_list,
shader_includes, blitshaderop_enums, blitshadertype_enums):
def write_inc_file(get_blitshaders_case_list, add_blitshader_case_list, shader_includes,
blitshaderop_enums, blitshadertype_enums):
content = template_blitshader_source.format(
script_name=os.path.basename(sys.argv[0]),
year=date.today().year,
......@@ -367,11 +345,11 @@ def main():
shader_filenames = get_shader_filenames()
write_inc_file("\n".join([d for d in blitshadertype_cases]), "\n".join(
[c for c in map_blitshader_cases]), "\n".join([i for i in shader_includes]),
"\n".join([e for e in blitshaderop_enums]), "\n".join(
[e for e in blitshadertype_enums]))
[c for c in map_blitshader_cases]), "\n".join([i for i in shader_includes]), "\n".join(
[e for e in blitshaderop_enums]), "\n".join([e for e in blitshadertype_enums]))
write_gni_file("\n".join([s for s in shader_filenames]))
return 0
if __name__ == '__main__':
sys.exit(main())
......@@ -75,13 +75,13 @@ template_undefined_case = """ case DXGI_FORMAT_{dxgi_format}:
break;
"""
def format_case(dxgi_format, result):
return template_format_case.format(
dxgi_format = dxgi_format,
result = result)
return template_format_case.format(dxgi_format=dxgi_format, result=result)
def undefined_case(dxgi_format):
return template_undefined_case.format(dxgi_format = dxgi_format)
return template_undefined_case.format(dxgi_format=dxgi_format)
def main():
......@@ -146,11 +146,11 @@ def main():
with open('dxgi_format_map_autogen.cpp', 'wt') as out_file:
output_cpp = template_cpp.format(
script_name = sys.argv[0],
data_source_name = input_data,
copyright_year = date.today().year,
component_type_cases = component_cases,
format_cases = format_cases)
script_name=sys.argv[0],
data_source_name=input_data,
copyright_year=date.today().year,
component_type_cases=component_cases,
format_cases=format_cases)
out_file.write(output_cpp)
out_file.close()
return 0
......
......@@ -168,6 +168,7 @@ const DXGISupport &GetDXGISupport(DXGI_FORMAT dxgiFormat, D3D_FEATURE_LEVEL feat
}} // namespace rx
"""
def do_format(format_data):
table_data = {'9_3': '', '10_0': '', '10_1': '', '11_0': '', '11_1': ''}
......@@ -270,24 +271,31 @@ def do_format(format_data):
never = ' | '.join(sorted(never_supported))
optional = ' | '.join(sorted(optional_for_fl))
if not always: always = '0'
if not never: never = '0'
if not optional: optional = '0'
if not always:
always = '0'
if not never:
never = '0'
if not optional:
optional = '0'
table_data[feature_level] += ' case ' + format_name + ':\n'
table_data[feature_level] += ' {\n'
table_data[feature_level] += ' static const DXGISupport info(' + always + ', ' + never + ', ' + optional + ');\n'
table_data[
feature_level] += ' static const DXGISupport info(' + always + ', ' + never + ', ' + optional + ');\n'
table_data[feature_level] += ' return info;\n'
table_data[feature_level] += ' }\n'
return table_data
def join_table_data(table_data_1, table_data_2):
return {'9_3': table_data_1['9_3'] + table_data_2['9_3'],
return {
'9_3': table_data_1['9_3'] + table_data_2['9_3'],
'10_0': table_data_1['10_0'] + table_data_2['10_0'],
'10_1': table_data_1['10_1'] + table_data_2['10_1'],
'11_0': table_data_1['11_0'] + table_data_2['11_0'],
'11_1': table_data_1['11_1'] + table_data_2['11_1']}
'11_1': table_data_1['11_1'] + table_data_2['11_1']
}
def main():
......@@ -316,7 +324,8 @@ def main():
for format_data in json_data:
table_data = join_table_data(table_data, do_format(format_data))
out_data = template.format(prefix=macro_prefix,
out_data = template.format(
prefix=macro_prefix,
table_data_9_3=table_data['9_3'],
table_data_10_0=table_data['10_0'],
table_data_10_1=table_data['10_1'],
......
......@@ -69,6 +69,7 @@ const Format &Format::Get(GLenum internalFormat, const Renderer11DeviceCaps &dev
}} // namespace rx
"""
def get_swizzle_format_id(internal_format, angle_format):
angle_format_id = angle_format["formatName"]
if (internal_format == 'GL_NONE') or (angle_format_id == 'NONE'):
......@@ -80,16 +81,20 @@ def get_swizzle_format_id(internal_format, angle_format):
return angle_format['swizzleFormat']
if 'bits' not in angle_format:
raise ValueError('no bits information for determining swizzleformat for format: ' + internal_format)
raise ValueError('no bits information for determining swizzleformat for format: ' +
internal_format)
bits = angle_format['bits']
max_component_bits = max(bits.itervalues())
channels_different = not all([component_bits == bits.itervalues().next() for component_bits in bits.itervalues()])
channels_different = not all(
[component_bits == bits.itervalues().next() for component_bits in bits.itervalues()])
# The format itself can be used for swizzles if it can be accessed as a render target and
# sampled and the bit count for all 4 channels is the same.
if "rtvFormat" in angle_format and "srvFormat" in angle_format and "uavFormat" in angle_format and not channels_different and len(angle_format['channels']) == 4:
return angle_format["glInternalFormat"] if "glInternalFormat" in angle_format else internal_format
if "rtvFormat" in angle_format and "srvFormat" in angle_format and "uavFormat" in angle_format and not channels_different and len(
angle_format['channels']) == 4:
return angle_format[
"glInternalFormat"] if "glInternalFormat" in angle_format else internal_format
b = int(math.ceil(float(max_component_bits) / 8) * 8)
......@@ -102,10 +107,12 @@ def get_swizzle_format_id(internal_format, angle_format):
return 'GL_RGBA16_EXT'
if b == 24:
raise ValueError('unexpected 24-bit format when determining swizzleformat for format: ' + internal_format)
raise ValueError('unexpected 24-bit format when determining swizzleformat for format: ' +
internal_format)
if 'componentType' not in angle_format:
raise ValueError('no component type information for determining swizzleformat for format: ' + internal_format)
raise ValueError('no component type information for determining swizzleformat for format: '
+ internal_format)
component_type = angle_format['componentType']
......@@ -127,10 +134,12 @@ def get_swizzle_format_id(internal_format, angle_format):
if (b == 16):
swizzle += "_EXT"
else:
raise ValueError('could not determine swizzleformat based on componentType for format: ' + internal_format)
raise ValueError('could not determine swizzleformat based on componentType for format: ' +
internal_format)
return swizzle
def get_blit_srv_format(angle_format):
if 'channels' not in angle_format:
return 'DXGI_FORMAT_UNKNOWN'
......@@ -171,6 +180,7 @@ split_format_entry_template = """{space} {condition}
{space} }}
"""
def json_to_table_data(internal_format, format_name, prefix, json):
table_data = ""
......@@ -201,6 +211,7 @@ def json_to_table_data(internal_format, format_name, prefix, json):
else:
return format_entry_template.format(**parsed)
def parse_json_angle_format_case(format_name, angle_format, json_data):
supported_case = {}
unsupported_case = {}
......@@ -227,8 +238,8 @@ def parse_json_angle_format_case(format_name, angle_format, json_data):
unsupported_case[k] = v
if fallback != None:
unsupported_case, _, _ = parse_json_angle_format_case(
fallback, json_data[fallback], json_data)
unsupported_case, _, _ = parse_json_angle_format_case(fallback, json_data[fallback],
json_data)
unsupported_case["formatName"] = fallback
if support_test != None:
......@@ -236,6 +247,7 @@ def parse_json_angle_format_case(format_name, angle_format, json_data):
else:
return supported_case, None, None
def parse_json_into_switch_angle_format_string(json_map, json_data):
table_data = ''
......@@ -259,8 +271,10 @@ def parse_json_into_switch_angle_format_string(json_map, json_data):
if support_test != None:
table_data += " {\n"
table_data += json_to_table_data(internal_format, format_name, "if (" + support_test + ")", supported_case)
table_data += json_to_table_data(internal_format, format_name, "else", unsupported_case)
table_data += json_to_table_data(internal_format, format_name,
"if (" + support_test + ")", supported_case)
table_data += json_to_table_data(internal_format, format_name, "else",
unsupported_case)
table_data += " }\n"
else:
table_data += json_to_table_data(internal_format, format_name, "", supported_case)
......@@ -290,10 +304,10 @@ def main():
angle_format_cases = parse_json_into_switch_angle_format_string(json_map, json_data)
output_cpp = template_texture_format_table_autogen_cpp.format(
script_name = sys.argv[0],
copyright_year = date.today().year,
angle_format_info_cases = angle_format_cases,
data_source_name = data_source_name)
script_name=sys.argv[0],
copyright_year=date.today().year,
angle_format_info_cases=angle_format_cases,
data_source_name=data_source_name)
with open('texture_format_table_autogen.cpp', 'wt') as out_file:
out_file.write(output_cpp)
out_file.close()
......
......@@ -168,7 +168,7 @@ def get_color_read_function(angle_format):
return 'ReadDepthStencil<' + channel_struct + '>'
read_component_type = get_color_read_write_component_type(angle_format)
return 'ReadColor<' + channel_struct + ', '+ read_component_type + '>'
return 'ReadColor<' + channel_struct + ', ' + read_component_type + '>'
def get_color_write_function(angle_format):
......@@ -180,12 +180,13 @@ def get_color_write_function(angle_format):
return 'WriteDepthStencil<' + channel_struct + '>'
write_component_type = get_color_read_write_component_type(angle_format)
return 'WriteColor<' + channel_struct + ', '+ write_component_type + '>'
return 'WriteColor<' + channel_struct + ', ' + write_component_type + '>'
format_entry_template = """ {{ FormatID::{id}, {glInternalFormat}, {fboImplementationInternalFormat}, {mipGenerationFunction}, {fastCopyFunctions}, {colorReadFunction}, {colorWriteFunction}, {namedComponentType}, {R}, {G}, {B}, {A}, {L}, {D}, {S}, {pixelBytes}, {componentAlignmentMask}, {isBlock}, {isFixed} }},
"""
def get_named_component_type(component_type):
if component_type == "snorm":
return "GL_SIGNED_NORMALIZED"
......@@ -283,8 +284,8 @@ def json_to_table_data(format_id, json, angle_to_gl):
sum_of_bits += int(parsed[channel])
pixel_bytes = sum_of_bits / 8
parsed["pixelBytes"] = pixel_bytes
parsed["componentAlignmentMask"] = get_component_alignment_mask(
parsed["channels"], parsed["bits"])
parsed["componentAlignmentMask"] = get_component_alignment_mask(parsed["channels"],
parsed["bits"])
parsed["isBlock"] = "true" if is_block else "false"
parsed["isFixed"] = "true" if "FIXED" in format_id else "false"
......@@ -309,21 +310,20 @@ def gen_enum_string(all_angle):
enum_data += ',\n ' + format_id
return enum_data
case_template = """ case {gl_format}:
return FormatID::{angle_format};
"""
def gen_map_switch_string(gl_to_angle):
switch_data = '';
switch_data = ''
for gl_format in sorted(gl_to_angle.keys()):
angle_format = gl_to_angle[gl_format]
switch_data += case_template.format(
gl_format=gl_format,
angle_format=angle_format)
switch_data += case_template.format(gl_format=gl_format, angle_format=angle_format)
switch_data += " default:\n"
switch_data += " return FormatID::NONE;"
return switch_data;
return switch_data
def main():
......@@ -348,15 +348,14 @@ def main():
json_data = angle_format.load_json(data_source_name)
all_angle = angle_to_gl.keys()
angle_format_cases = parse_angle_format_table(
all_angle, json_data, angle_to_gl)
angle_format_cases = parse_angle_format_table(all_angle, json_data, angle_to_gl)
switch_data = gen_map_switch_string(gl_to_angle)
output_cpp = template_autogen_inl.format(
script_name = sys.argv[0],
copyright_year = date.today().year,
angle_format_info_cases = angle_format_cases,
angle_format_switch = switch_data,
data_source_name = data_source_name)
script_name=sys.argv[0],
copyright_year=date.today().year,
angle_format_info_cases=angle_format_cases,
angle_format_switch=switch_data,
data_source_name=data_source_name)
with open('Format_table_autogen.cpp', 'wt') as out_file:
out_file.write(output_cpp)
out_file.close()
......@@ -364,11 +363,11 @@ def main():
enum_data = gen_enum_string(all_angle)
num_angle_formats = len(all_angle)
output_h = template_autogen_h.format(
script_name = sys.argv[0],
copyright_year = date.today().year,
angle_format_enum = enum_data,
data_source_name = data_source_name,
num_angle_formats = num_angle_formats)
script_name=sys.argv[0],
copyright_year=date.today().year,
angle_format_enum=enum_data,
data_source_name=data_source_name,
num_angle_formats=num_angle_formats)
with open('FormatID_autogen.h', 'wt') as out_file:
out_file.write(output_h)
out_file.close()
......
......@@ -99,12 +99,15 @@ internal_format_param = 'internalFormat'
angle_format_param = 'angleFormat'
angle_format_unknown = 'NONE'
def load_functions_name(internal_format, angle_format):
return internal_format[3:] + "_to_" + angle_format
def unknown_func_name(internal_format):
return load_functions_name(internal_format, "default")
def get_load_func(func_name, type_functions):
snippet = "LoadImageFunctionInfo " + func_name + "(GLenum type)\n"
snippet += "{\n"
......@@ -123,9 +126,12 @@ def get_load_func(func_name, type_functions):
return snippet
def get_unknown_load_func(angle_to_type_map, internal_format):
assert angle_format_unknown in angle_to_type_map
return get_load_func(unknown_func_name(internal_format), angle_to_type_map[angle_format_unknown])
return get_load_func(
unknown_func_name(internal_format), angle_to_type_map[angle_format_unknown])
def parse_json(json_data):
table_data = ''
......@@ -136,7 +142,8 @@ def parse_json(json_data):
table_data += s + 'case ' + internal_format + ':\n'
do_switch = len(angle_to_type_map) > 1 or angle_to_type_map.keys()[0] != angle_format_unknown
do_switch = len(
angle_to_type_map) > 1 or angle_to_type_map.keys()[0] != angle_format_unknown
if do_switch:
table_data += s + '{\n'
......@@ -186,6 +193,7 @@ def parse_json(json_data):
return table_data, load_functions_data
def main():
# auto_script parameters.
......@@ -205,16 +213,18 @@ def main():
json_data = angle_format.load_json('load_functions_data.json')
switch_data, load_functions_data = parse_json(json_data)
output = template.format(internal_format = internal_format_param,
angle_format = angle_format_param,
switch_data = switch_data,
load_functions_data = load_functions_data,
copyright_year = date.today().year)
output = template.format(
internal_format=internal_format_param,
angle_format=angle_format_param,
switch_data=switch_data,
load_functions_data=load_functions_data,
copyright_year=date.today().year)
with open('load_functions_table_autogen.cpp', 'wt') as out_file:
out_file.write(output)
out_file.close()
return 0
if __name__ == '__main__':
sys.exit(main())
......@@ -19,11 +19,13 @@ os.chdir(os.path.dirname(os.path.abspath(sys.argv[0])))
sys.path.append('..')
import angle_format
def safe_append(the_dict, key, element):
if key not in the_dict:
the_dict[key] = []
the_dict[key].append(element)
# Template for the header declaration of the dispatch table.
dispatch_table_header_template = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name} and gl.xml.
......@@ -79,12 +81,15 @@ class DispatchTableGL : angle::NonCopyable
#endif // LIBGLESV2_RENDERER_GL_DISPATCH_TABLE_GL_AUTOGEN_H_
"""
def first_lower(str):
return str[:1].lower() + str[1:]
def format_ep_decl(entry_point):
return " PFNGL" + entry_point.upper() + "PROC " + first_lower(entry_point) + " = nullptr;"
# Template for the initialization file of the dispatch table.
dispatch_table_source_template = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name} and gl.xml.
......@@ -155,9 +160,11 @@ void DispatchTableGL::initProcsSharedExtensionsNULL(const std::set<std::string>
}} // namespace rx
"""
def format_assign_ep(entry_point, ep):
return ' ASSIGN("' + ep + '", ' + first_lower(entry_point[2:]) + ');'
def format_requirements_lines(required, entry_points):
major, minor = required
lines = [' if (version >= gl::Version(' + major + ', ' + minor + '))', ' {']
......@@ -165,12 +172,14 @@ def format_requirements_lines(required, entry_points):
lines += [' }']
return '\n'.join(lines)
def format_extension_requirements_lines(extension, entry_points, api):
lines = [' if (extensions.count("' + extension + '") != 0)', ' {']
lines += [format_assign_ep(entry_point, ep) for entry_point, ep in sorted(entry_points)]
lines += [' }']
return '\n'.join(lines)
def assign_null_line(line):
m = re.match(r' ASSIGN\("gl.*", (.+)\);', line)
if m:
......@@ -179,15 +188,19 @@ def assign_null_line(line):
else:
return line
def assign_null(entry):
return '\n'.join([assign_null_line(line) for line in entry.split('\n')])
def nullify(data):
return [assign_null(entry) for entry in data]
def format_param(param):
return "".join(param.itertext())
null_functions_header_template = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name} and gl.xml.
//
......@@ -229,6 +242,7 @@ namespace rx
}} // namespace rx
"""
def main():
# auto_script parameters.
......@@ -269,7 +283,7 @@ def main():
core_removed_eps = []
for core_removed_ep in xml_root.findall('feature/remove'):
assert(core_removed_ep.attrib['profile'] == 'core')
assert (core_removed_ep.attrib['profile'] == 'core')
for command in core_removed_ep.findall('./command'):
core_removed_eps.append(command.attrib['name'])
......@@ -333,15 +347,18 @@ def main():
if not gl_required:
gl_required = reqs
elif entry_point in core_removed_eps:
print('Upgrade ' + entry_point + ' to ' + str(reqs) + ' instead of ' + str(gl_required))
print('Upgrade ' + entry_point + ' to ' + str(reqs) + ' instead of ' +
str(gl_required))
gl_required = reqs
else:
print('Keep ' + entry_point + ' at ' + str(gl_required) + ' instead of ' + str(reqs))
print('Keep ' + entry_point + ' at ' + str(gl_required) +
' instead of ' + str(reqs))
elif api == 'gles2':
if not gles2_required:
gles2_required = reqs
else:
print("Duplicate for " + entry_point + ": " + str(reqs) + " and " + str(gles2_required))
print("Duplicate for " + entry_point + ": " + str(reqs) + " and " +
str(gles2_required))
else:
raise Exception('Bad api type: ' + api)
......@@ -373,7 +390,8 @@ def main():
full_ep = ep
if '_KHR_' in extension:
full_ep += 'KHR'
safe_append(gles2_extension_requirements, extension, (entry_point, full_ep))
safe_append(gles2_extension_requirements, extension,
(entry_point, full_ep))
if not (gl_required or gles2_required or extension):
raise Exception('Entry point ' + entry_point + ' not found in the xml.')
......@@ -386,11 +404,11 @@ def main():
table_data.append("\n".join(formatted))
dispatch_table_header = dispatch_table_header_template.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = data_source_name,
year = date.today().year,
file_name = dispatch_header_path,
table_data = "\n\n".join(table_data))
script_name=os.path.basename(sys.argv[0]),
data_source_name=data_source_name,
year=date.today().year,
file_name=dispatch_header_path,
table_data="\n\n".join(table_data))
with open(dispatch_header_path, "w") as out:
out.write(dispatch_table_header)
......@@ -401,7 +419,8 @@ def main():
gl_extensions_data = []
for extension, entry_points in sorted(gl_extension_requirements.iteritems()):
gl_extensions_data.append(format_extension_requirements_lines(extension, entry_points, "gl"))
gl_extensions_data.append(
format_extension_requirements_lines(extension, entry_points, "gl"))
gles2_data = []
for gles2_required, entry_points in sorted(gles2_requirements.iteritems()):
......@@ -409,27 +428,29 @@ def main():
gles2_extensions_data = []
for extension, entry_points in sorted(gles2_extension_requirements.iteritems()):
gles2_extensions_data.append(format_extension_requirements_lines(extension, entry_points, "gles2"))
gles2_extensions_data.append(
format_extension_requirements_lines(extension, entry_points, "gles2"))
both_extensions_data = []
for extension, entry_points in sorted(both_extension_requirements.iteritems()):
both_extensions_data.append(format_extension_requirements_lines(extension, entry_points, "gles2|gl"))
both_extensions_data.append(
format_extension_requirements_lines(extension, entry_points, "gles2|gl"))
dispatch_table_source = dispatch_table_source_template.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = data_source_name,
year = date.today().year,
file_name = dispatch_source_path,
gl_data = "\n\n".join(gl_data),
gl_extensions_data = "\n\n".join(gl_extensions_data),
gles2_data = "\n\n".join(gles2_data),
gles2_extensions_data = "\n\n".join(gles2_extensions_data),
both_extensions_data = "\n\n".join(both_extensions_data),
gl_null_data = "\n\n".join(nullify(gl_data)),
gl_null_extensions_data = "\n\n".join(nullify(gl_extensions_data)),
gles2_null_data = "\n\n".join(nullify(gles2_data)),
gles2_null_extensions_data = "\n\n".join(nullify(gles2_extensions_data)),
both_null_extensions_data = "\n\n".join(nullify(both_extensions_data)))
script_name=os.path.basename(sys.argv[0]),
data_source_name=data_source_name,
year=date.today().year,
file_name=dispatch_source_path,
gl_data="\n\n".join(gl_data),
gl_extensions_data="\n\n".join(gl_extensions_data),
gles2_data="\n\n".join(gles2_data),
gles2_extensions_data="\n\n".join(gles2_extensions_data),
both_extensions_data="\n\n".join(both_extensions_data),
gl_null_data="\n\n".join(nullify(gl_data)),
gl_null_extensions_data="\n\n".join(nullify(gl_extensions_data)),
gles2_null_data="\n\n".join(nullify(gles2_data)),
gles2_null_extensions_data="\n\n".join(nullify(gles2_extensions_data)),
both_null_extensions_data="\n\n".join(nullify(both_extensions_data)))
with open(dispatch_source_path, "w") as out:
out.write(dispatch_table_source)
......@@ -463,21 +484,21 @@ def main():
null_stubs = [command_defs[entry_point] for entry_point in sorted(all_entry_points)]
null_functions_header = null_functions_header_template.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = data_source_name,
year = date.today().year,
file_name = null_functions_header_path,
table_data = "\n".join(null_decls))
script_name=os.path.basename(sys.argv[0]),
data_source_name=data_source_name,
year=date.today().year,
file_name=null_functions_header_path,
table_data="\n".join(null_decls))
with open(null_functions_header_path, "w") as out:
out.write(null_functions_header)
null_functions_source = null_functions_source_template.format(
script_name = os.path.basename(sys.argv[0]),
data_source_name = data_source_name,
year = date.today().year,
file_name = null_functions_source_path,
table_data = "\n\n".join(null_stubs))
script_name=os.path.basename(sys.argv[0]),
data_source_name=data_source_name,
year=date.today().year,
file_name=null_functions_source_path,
table_data="\n\n".join(null_stubs))
with open(null_functions_source_path, "w") as out:
out.write(null_functions_source)
......
......@@ -75,7 +75,7 @@ image_basic_template = """imageFormatID = {image};
vkImageFormat = {vk_image_format};
imageInitializerFunction = {image_initializer};"""
image_struct_template="{{{image}, {vk_image_format}, {image_initializer}}}"
image_struct_template = "{{{image}, {vk_image_format}, {image_initializer}}}"
image_fallback_template = """{{
static constexpr ImageFormatInitInfo kInfo[] = {{{image_list}}};
......@@ -88,7 +88,7 @@ vkBufferFormatIsPacked = {vk_buffer_format_is_packed};
vertexLoadFunction = {vertex_load_function};
vertexLoadRequiresConversion = {vertex_load_converts};"""
buffer_struct_template="""{{{buffer}, {vk_buffer_format}, {vk_buffer_format_is_packed},
buffer_struct_template = """{{{buffer}, {vk_buffer_format}, {vk_buffer_format_is_packed},
{vertex_load_function}, {vertex_load_converts}}}"""
buffer_fallback_template = """{{
......@@ -106,10 +106,7 @@ def gen_format_case(angle, internal_format, vk_json_data):
vk_overrides = vk_json_data["overrides"]
vk_fallbacks = vk_json_data["fallbacks"]
args = dict(
format_id=angle,
internal_format=internal_format,
image_template="",
buffer_template="")
format_id=angle, internal_format=internal_format, image_template="", buffer_template="")
if ((angle not in vk_map) and (angle not in vk_overrides) and
(angle not in vk_fallbacks)) or angle == 'NONE':
......@@ -136,8 +133,7 @@ def gen_format_case(angle, internal_format, vk_json_data):
buffer="angle::FormatID::" + format,
vk_buffer_format=vk_map[format],
vk_buffer_format_is_packed=is_packed(vk_map[format]),
vertex_load_function=angle_format.get_vertex_copy_function(
angle, format),
vertex_load_function=angle_format.get_vertex_copy_function(angle, format),
vertex_load_converts='false' if angle == format else 'true',
)
......@@ -148,9 +144,7 @@ def gen_format_case(angle, internal_format, vk_json_data):
elif len(images) > 1:
args.update(
image_template=image_fallback_template,
image_list=", ".join(
image_struct_template.format(**image_args(i))
for i in images))
image_list=", ".join(image_struct_template.format(**image_args(i)) for i in images))
buffers = get_formats(angle, "buffer")
if len(buffers) == 1:
......@@ -172,11 +166,7 @@ def main():
# auto_script parameters.
if len(sys.argv) > 1:
inputs = [
'../angle_format.py',
'../angle_format_map.json',
input_file_name
]
inputs = ['../angle_format.py', '../angle_format_map.json', input_file_name]
outputs = [out_file_name]
if sys.argv[1] == 'inputs':
......@@ -190,15 +180,16 @@ def main():
angle_to_gl = angle_format.load_inverse_table(os.path.join('..', 'angle_format_map.json'))
vk_json_data = angle_format.load_json(input_file_name)
vk_cases = [gen_format_case(angle, gl, vk_json_data)
for angle, gl in sorted(angle_to_gl.iteritems())]
vk_cases = [
gen_format_case(angle, gl, vk_json_data) for angle, gl in sorted(angle_to_gl.iteritems())
]
output_cpp = template_table_autogen_cpp.format(
copyright_year = date.today().year,
format_case_data = "\n".join(vk_cases),
script_name = __file__,
out_file_name = out_file_name,
input_file_name = input_file_name)
copyright_year=date.today().year,
format_case_data="\n".join(vk_cases),
script_name=__file__,
out_file_name=out_file_name,
input_file_name=input_file_name)
with open(out_file_name, 'wt') as out_file:
out_file.write(output_cpp)
......
......@@ -15,7 +15,6 @@ import angle_format
import xml.etree.ElementTree as etree
import sys, os
template_table_autogen_cpp = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {input_file_name} and
// the vk.xml file situated at
......@@ -81,9 +80,9 @@ def gen_format_case(index, vk_to_index_to_format_map, vk_map):
buffer_features_str = "0"
return template_format_property.format(
vk_format = vk_format,
optimal_features = optimal_features_str,
buffer_features = buffer_features_str)
vk_format=vk_format,
optimal_features=optimal_features_str,
buffer_features=buffer_features_str)
def main():
......@@ -121,15 +120,18 @@ def main():
vk_format_name_to_index_map[index] = vk_format
vk_map = angle_format.load_json(input_file_name)
vk_cases = [gen_format_case(index, vk_format_name_to_index_map, vk_map) for index in vk_format_name_to_index_map]
vk_cases = [
gen_format_case(index, vk_format_name_to_index_map, vk_map)
for index in vk_format_name_to_index_map
]
output_cpp = template_table_autogen_cpp.format(
copyright_year = date.today().year,
num_formats = num_formats,
format_case_data = "\n,".join(vk_cases),
script_name = __file__,
out_file_name = out_file_name,
input_file_name = input_file_name)
copyright_year=date.today().year,
num_formats=num_formats,
format_case_data="\n,".join(vk_cases),
script_name=__file__,
out_file_name=out_file_name,
input_file_name=input_file_name)
with open(out_file_name, 'wt') as out_file:
out_file.write(output_cpp)
......
......@@ -56,6 +56,7 @@ size_t g_numProcs = {num_procs};
sys.path.append('../libANGLE/renderer')
import angle_format
def main():
# auto_script parameters.
......@@ -82,21 +83,23 @@ def main():
all_functions[function] = "gl::" + function[2:]
# Special handling for EGL_ANGLE_explicit_context extension
if support_egl_ANGLE_explicit_context:
all_functions[function + "ContextANGLE"] = "gl::" + function[2:] + "ContextANGLE"
all_functions[function +
"ContextANGLE"] = "gl::" + function[2:] + "ContextANGLE"
elif function.startswith("egl"):
all_functions[function] = "EGL_" + function[3:]
else:
all_functions[function] = function
proc_data = [(' {"%s", P(%s)}' % (func, angle_func)) for func, angle_func in sorted(all_functions.iteritems())]
proc_data = [(' {"%s", P(%s)}' % (func, angle_func))
for func, angle_func in sorted(all_functions.iteritems())]
with open(out_file_name, 'w') as out_file:
output_cpp = template_cpp.format(
script_name = sys.argv[0],
data_source_name = data_source_name,
copyright_year = date.today().year,
proc_data = ",\n".join(proc_data),
num_procs = len(proc_data))
script_name=sys.argv[0],
data_source_name=data_source_name,
copyright_year=date.today().year,
proc_data=",\n".join(proc_data),
num_procs=len(proc_data))
out_file.write(output_cpp)
out_file.close()
return 0
......
......@@ -2,6 +2,7 @@ import os
import re
import sys
def ReadFileAsLines(filename):
"""Reads a file, removing blank lines and lines that start with #"""
file = open(filename, "r")
......@@ -14,17 +15,20 @@ def ReadFileAsLines(filename):
lines.append(line)
return lines
def GetSuiteName(testName):
return testName[:testName.find("/")]
def GetTestName(testName):
replacements = { ".test": "", ".": "_" }
replacements = {".test": "", ".": "_"}
splitTestName = testName.split("/")
cleanName = splitTestName[-2] + "_" + splitTestName[-1]
for replaceKey in replacements:
cleanName = cleanName.replace(replaceKey, replacements[replaceKey])
return cleanName
def GenerateTests(outFile, testNames):
# Remove duplicate tests
testNames = list(set(testNames))
......@@ -43,8 +47,9 @@ def GenerateTests(outFile, testNames):
outFile.write(" run(\"" + test + "\");\n")
outFile.write("}\n\n")
def GenerateTestList(sourceFile, rootDir):
tests = [ ]
tests = []
fileName, fileExtension = os.path.splitext(sourceFile)
if fileExtension == ".run":
lines = ReadFileAsLines(sourceFile)
......@@ -52,7 +57,8 @@ def GenerateTestList(sourceFile, rootDir):
tests += GenerateTestList(os.path.join(os.path.dirname(sourceFile), line), rootDir)
elif fileExtension == ".test":
tests.append(os.path.relpath(os.path.realpath(sourceFile), rootDir).replace("\\", "/"))
return tests;
return tests
def main(argv):
tests = GenerateTestList(argv[0], argv[1])
......@@ -64,5 +70,6 @@ def main(argv):
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
......@@ -18,12 +18,10 @@ import shutil
import subprocess
import sys
gn_args = """is_clang = true
is_debug = false
angle_enable_vulkan = true"""
is_windows = platform.system() == 'Windows'
is_linux = platform.system() == 'Linux'
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment