Tests: add CMake option to run Cycles regression tests on GPU devices
CYCLES_TEST_DEVICES is a list of devices (CPU, CUDA, OPTIX, OPENCL). It is set to CPU only by default. Test output is now writen to build/tests/cycles/<device>, and the HTML report has separate report pages for the different devices, with option to compare between CPU and GPU renders. Various GPU tests are still failing due to CPU/GPU differences, these are to be fixed or blacklisted still. Ref T82193
This commit is contained in:
@@ -102,6 +102,7 @@ class Report:
|
||||
__slots__ = (
|
||||
'title',
|
||||
'output_dir',
|
||||
'global_dir',
|
||||
'reference_dir',
|
||||
'idiff',
|
||||
'pixelated',
|
||||
@@ -112,17 +113,24 @@ class Report:
|
||||
'failed_tests',
|
||||
'passed_tests',
|
||||
'compare_tests',
|
||||
'compare_engines'
|
||||
'compare_engine',
|
||||
'device'
|
||||
)
|
||||
|
||||
def __init__(self, title, output_dir, idiff):
|
||||
def __init__(self, title, output_dir, idiff, device=None):
|
||||
self.title = title
|
||||
self.output_dir = output_dir
|
||||
self.global_dir = os.path.dirname(output_dir)
|
||||
self.reference_dir = 'reference_renders'
|
||||
self.idiff = idiff
|
||||
self.compare_engines = None
|
||||
self.compare_engine = None
|
||||
self.fail_threshold = 0.016
|
||||
self.fail_percent = 1
|
||||
self.device = device
|
||||
|
||||
if device:
|
||||
self.title = self._engine_title(title, device)
|
||||
self.output_dir = self._engine_path(self.output_dir, device.lower())
|
||||
|
||||
self.pixelated = False
|
||||
self.verbose = os.environ.get("BLENDER_VERBOSE") is not None
|
||||
@@ -147,8 +155,8 @@ class Report:
|
||||
def set_reference_dir(self, reference_dir):
|
||||
self.reference_dir = reference_dir
|
||||
|
||||
def set_compare_engines(self, engine, other_engine):
|
||||
self.compare_engines = (engine, other_engine)
|
||||
def set_compare_engine(self, other_engine, other_device=None):
|
||||
self.compare_engine = (other_engine, other_device)
|
||||
|
||||
def run(self, dirpath, blender, arguments_cb, batch=False):
|
||||
# Run tests and output report.
|
||||
@@ -156,7 +164,7 @@ class Report:
|
||||
ok = self._run_all_tests(dirname, dirpath, blender, arguments_cb, batch)
|
||||
self._write_data(dirname)
|
||||
self._write_html()
|
||||
if self.compare_engines:
|
||||
if self.compare_engine:
|
||||
self._write_html(comparison=True)
|
||||
return ok
|
||||
|
||||
@@ -171,7 +179,7 @@ class Report:
|
||||
filepath = os.path.join(outdir, "passed.data")
|
||||
pathlib.Path(filepath).write_text(self.passed_tests)
|
||||
|
||||
if self.compare_engines:
|
||||
if self.compare_engine:
|
||||
filepath = os.path.join(outdir, "compare.data")
|
||||
pathlib.Path(filepath).write_text(self.compare_tests)
|
||||
|
||||
@@ -181,12 +189,26 @@ class Report:
|
||||
else:
|
||||
return """<li class="breadcrumb-item"><a href="%s">%s</a></li>""" % (href, title)
|
||||
|
||||
def _engine_title(self, engine, device):
|
||||
if device:
|
||||
return engine.title() + ' ' + device
|
||||
else:
|
||||
return engine.title()
|
||||
|
||||
def _engine_path(self, path, device):
|
||||
if device:
|
||||
return os.path.join(path, device.lower())
|
||||
else:
|
||||
return path
|
||||
|
||||
def _navigation_html(self, comparison):
|
||||
html = """<nav aria-label="breadcrumb"><ol class="breadcrumb">"""
|
||||
html += self._navigation_item("Test Reports", "../report.html", False)
|
||||
base_path = os.path.relpath(self.global_dir, self.output_dir)
|
||||
global_report_path = os.path.join(base_path, "report.html")
|
||||
html += self._navigation_item("Test Reports", global_report_path, False)
|
||||
html += self._navigation_item(self.title, "report.html", not comparison)
|
||||
if self.compare_engines:
|
||||
compare_title = "Compare with %s" % self.compare_engines[1].capitalize()
|
||||
if self.compare_engine:
|
||||
compare_title = "Compare with %s" % self._engine_title(*self.compare_engine)
|
||||
html += self._navigation_item(compare_title, "compare.html", comparison)
|
||||
html += """</ol></nav>"""
|
||||
|
||||
@@ -233,8 +255,8 @@ class Report:
|
||||
|
||||
if comparison:
|
||||
title = self.title + " Test Compare"
|
||||
engine_self = self.compare_engines[0].capitalize()
|
||||
engine_other = self.compare_engines[1].capitalize()
|
||||
engine_self = self.title
|
||||
engine_other = self._engine_title(*self.compare_engine)
|
||||
columns_html = "<tr><th>Name</th><th>%s</th><th>%s</th>" % (engine_self, engine_other)
|
||||
else:
|
||||
title = self.title + " Test Report"
|
||||
@@ -300,9 +322,8 @@ class Report:
|
||||
|
||||
# Update global report
|
||||
if not comparison:
|
||||
global_output_dir = os.path.dirname(self.output_dir)
|
||||
global_failed = failed if not comparison else None
|
||||
global_report.add(global_output_dir, "Render", self.title, filepath, global_failed)
|
||||
global_report.add(self.global_dir, "Render", self.title, filepath, global_failed)
|
||||
|
||||
def _relative_url(self, filepath):
|
||||
relpath = os.path.relpath(filepath, self.output_dir)
|
||||
@@ -340,8 +361,9 @@ class Report:
|
||||
else:
|
||||
self.passed_tests += test_html
|
||||
|
||||
if self.compare_engines:
|
||||
ref_url = os.path.join("..", self.compare_engines[1], new_url)
|
||||
if self.compare_engine:
|
||||
base_path = os.path.relpath(self.global_dir, self.output_dir)
|
||||
ref_url = os.path.join(base_path, self._engine_path(*self.compare_engine), new_url)
|
||||
|
||||
test_html = """
|
||||
<tr{tr_style}>
|
||||
@@ -445,6 +467,9 @@ class Report:
|
||||
if not batch:
|
||||
break
|
||||
|
||||
if self.device:
|
||||
command.extend(['--', '--cycles-device', self.device])
|
||||
|
||||
# Run process
|
||||
crash = False
|
||||
output = None
|
||||
|
||||
Reference in New Issue
Block a user