Buildbot: Support running benchmarks for PRs #9

Open
Lukas Stockner wants to merge 1 commits from LukasStockner/blender-devops:support-patch-benchmark into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
4 changed files with 113 additions and 29 deletions
Showing only changes of commit 31bfb2f10f - Show all commits

View File

@ -50,6 +50,8 @@ code_pipeline_patch_step_names = [
"compile-code", "compile-code",
"compile-gpu", "compile-gpu",
"compile-install", "compile-install",
"benchmark-code",
"deliver-benchmark-results",
"test-code", "test-code",
"sign-code-binaries", "sign-code-binaries",
"package-code-binaries", "package-code-binaries",
@ -72,6 +74,12 @@ code_pipeline_lint_step_names = [
"lint-code", "lint-code",
] ]
# Steps for benchmarking.
code_pipeline_benchmark_step_names = [
"benchmark-code",
"deliver-benchmark-results",
]
# Steps for testing. # Steps for testing.
code_pipeline_test_step_names = [ code_pipeline_test_step_names = [
"test-code", "test-code",
@ -213,6 +221,13 @@ scheduler_properties_patch = [
buildbot.plugins.util.StringParameter( buildbot.plugins.util.StringParameter(
name="pull_revision", label="Pull Revision:", required=False, hide=True, size=80, default="" name="pull_revision", label="Pull Revision:", required=False, hide=True, size=80, default=""
), ),
buildbot.plugins.util.BooleanParameter(
name="needs_benchmark",
label="Benchmark -> run benchmark to compare performace",
required=True,
strict=True,
default=False,
),
] ]
scheduler_properties_patch += scheduler_properties_common scheduler_properties_patch += scheduler_properties_common
@ -300,6 +315,7 @@ def needs_do_code_pipeline_step(step):
) )
needs_package_delivery = step.getProperty("needs_package_delivery") needs_package_delivery = step.getProperty("needs_package_delivery")
needs_gpu_binaries = step.getProperty("needs_gpu_binaries") needs_gpu_binaries = step.getProperty("needs_gpu_binaries")
needs_benchmark = step.getProperty("needs_benchmark")
needs_skip_tests = step.getProperty("needs_skip_tests") needs_skip_tests = step.getProperty("needs_skip_tests")
python_module = step.getProperty("python_module") python_module = step.getProperty("python_module")
@ -310,6 +326,8 @@ def needs_do_code_pipeline_step(step):
needs_do_it = not needs_skip_tests needs_do_it = not needs_skip_tests
elif step.name == "compile-gpu": elif step.name == "compile-gpu":
needs_do_it = needs_package_delivery or needs_gpu_binaries needs_do_it = needs_package_delivery or needs_gpu_binaries
elif step.name in code_pipeline_benchmark_step_names:
needs_do_it = needs_benchmark
elif is_package_delivery_step: elif is_package_delivery_step:
needs_do_it = needs_package_delivery needs_do_it = needs_package_delivery
@ -357,6 +375,31 @@ def create_deliver_code_binaries_step(worker_config, track_id, pipeline_type):
) )
def create_deliver_benchmark_results_step(worker_config, track_id, pipeline_type):
file_size_in_mb = 500 * 1024 * 1024
worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package")
master_dest_path = pathlib.Path(
f"{worker_config.buildbot_download_folder}/{pipeline_type}"
).expanduser()
benchmark_worker_source_path = worker_source_path / "benchmark"
benchmark_master_dest_path = master_dest_path / "benchmarks"
return LinkMultipleFileUpload(
name="deliver-benchmark-results",
maxsize=file_size_in_mb,
workdir=f"{benchmark_worker_source_path}",
glob=True,
workersrcs=["*"],
masterdest=f"{benchmark_master_dest_path}",
mode=0o644,
url=f"../download/{pipeline_type}/benchmarks",
description="running",
descriptionDone="completed",
alwaysRun=True,
)
def create_deliver_test_results_step(worker_config, track_id, pipeline_type): def create_deliver_test_results_step(worker_config, track_id, pipeline_type):
file_size_in_mb = 500 * 1024 * 1024 file_size_in_mb = 500 * 1024 * 1024
worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package") worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package")
@ -391,8 +434,12 @@ def create_deliver_test_results_step(worker_config, track_id, pipeline_type):
def next_worker_code(worker_names_gpu, builder, workers, request): def next_worker_code(worker_names_gpu, builder, workers, request):
# Use a GPU worker if needed and supported for this platform. # Use a GPU worker if needed and supported for this platform.
# NVIDIA worker is currently reserved for GPU builds only. # NVIDIA worker is currently reserved for GPU builds only.
# Benchmark also needs a GPU worker so that results are comparable
# to the daily benchmark run.
compatible_workers = [] compatible_workers = []
if request.properties.getProperty("needs_gpu_tests", False) and worker_names_gpu: needs_gpu_tests = request.properties.getProperty("needs_gpu_tests", False)
needs_benchmark = request.properties.getProperty("needs_benchmark", False)
if (needs_gpu_tests or needs_benchmark) and worker_names_gpu:
for worker in workers: for worker in workers:
if worker.worker.workername in worker_names_gpu: if worker.worker.workername in worker_names_gpu:
compatible_workers.append(worker) compatible_workers.append(worker)
@ -459,6 +506,8 @@ def populate(devops_env_id):
step = create_deliver_code_binaries_step(worker_config, track_id, pipeline_type) step = create_deliver_code_binaries_step(worker_config, track_id, pipeline_type)
elif step_name == "deliver-test-results": elif step_name == "deliver-test-results":
step = create_deliver_test_results_step(worker_config, track_id, pipeline_type) step = create_deliver_test_results_step(worker_config, track_id, pipeline_type)
elif step_name == "deliver-benchmark-results":
step = create_deliver_benchmark_results_step(worker_config, track_id, pipeline_type)
else: else:
needs_halt_on_failure = True needs_halt_on_failure = True
if step_name in code_pipeline_test_step_names: if step_name in code_pipeline_test_step_names:
@ -618,6 +667,7 @@ def populate(devops_env_id):
), ),
} }
if pipeline_type == "patch": if pipeline_type == "patch":
trigger_properties["needs_benchmark"] = buildbot.plugins.util.Property("needs_benchmark")
trigger_properties["patch_id"] = buildbot.plugins.util.Property("patch_id") trigger_properties["patch_id"] = buildbot.plugins.util.Property("patch_id")
trigger_properties["revision"] = buildbot.plugins.util.Property("revision") trigger_properties["revision"] = buildbot.plugins.util.Property("revision")
trigger_properties["build_configuration"] = buildbot.plugins.util.Property( trigger_properties["build_configuration"] = buildbot.plugins.util.Property(

View File

@ -36,7 +36,7 @@ def create_deliver_step(devops_env_id):
return LinkMultipleFileUpload( return LinkMultipleFileUpload(
name="deliver", name="deliver",
maxsize=file_size_in_mb, maxsize=file_size_in_mb,
workdir=f"{worker_source_path}", workdir=f"{worker_source_path}/benchmark",
glob=True, glob=True,
workersrcs=["main-*"], workersrcs=["main-*"],
masterdest=f"{master_dest_path}", masterdest=f"{master_dest_path}",

View File

@ -16,18 +16,7 @@ import worker.utils
import urllib.request import urllib.request
def create_upload( def fetch_previous(builder: worker.blender.CodeBuilder, json_url: str, json_path: pathlib.Path):
builder: worker.blender.CodeBuilder, benchmark_path: pathlib.Path, revision: str
) -> None:
# Create package directory.
branch = builder.branch_id.replace("blender-", "").replace("-release", "")
name = f"{branch}-{builder.platform}-{builder.architecture}"
package_dir = builder.package_dir / name
worker.utils.remove_dir(package_dir)
os.makedirs(package_dir, exist_ok=True)
# Fetch existing summary
worker_config = conf.worker.get_config(builder.service_env_id) worker_config = conf.worker.get_config(builder.service_env_id)
base_urls = { base_urls = {
"LOCAL": str(worker_config.buildbot_download_folder), "LOCAL": str(worker_config.buildbot_download_folder),
@ -35,37 +24,80 @@ def create_upload(
"PROD": "https://builder.blender.org/download", "PROD": "https://builder.blender.org/download",
} }
base_url = base_urls[builder.service_env_id] base_url = base_urls[builder.service_env_id]
json_url = f"{base_url}/{json_url}"
summary_json_url = f"{base_url}/daily/benchmarks/{name}/summary.json"
summary_json_path = package_dir / "summary.json"
try: try:
if builder.service_env_id == "LOCAL": if builder.service_env_id == "LOCAL":
worker.utils.copy_file(pathlib.Path(summary_json_url), summary_json_path) worker.utils.copy_file(pathlib.Path(json_url), json_path)
else: else:
urllib.request.urlretrieve(summary_json_url, summary_json_path) urllib.request.urlretrieve(json_url, json_path)
return None
except Exception as e: except Exception as e:
error_msg = str(e) return str(e)
worker.utils.warning(f"Could not retrieve benchmark summary.json: {error_msg}")
def merge_results(old: pathlib.Path, new: pathlib.Path, out: pathlib.Path):
combined = []
if old.exists():
combined = json.loads(old.read_text())
combined += json.loads(new.read_text())
out.write_text(json.dumps(combined, indent=2))
def create_upload(
builder: worker.blender.CodeBuilder, benchmark_path: pathlib.Path, revision: str
) -> None:
# Create package directory.
branch = builder.branch_id.replace("blender-", "").replace("-release", "")
baseline_name = f"{branch}-{builder.platform}-{builder.architecture}"
if builder.patch_id:
name = f"PR{builder.patch_id}-{builder.platform}-{builder.architecture}"
else:
name = baseline_name
package_benchmark_dir = builder.package_dir / "benchmark" / name
worker.utils.remove_dir(package_benchmark_dir)
os.makedirs(package_benchmark_dir, exist_ok=True)
# Fetch baseline summary
baseline_json_url = f"daily/benchmarks/{baseline_name}/summary.json"
baseline_json_path = package_benchmark_dir / "baseline.json"
if error_msg := fetch_previous(builder, baseline_json_url, baseline_json_path):
worker.utils.warning(f"Could not retrieve previous benchmark data: {error_msg}")
# Create json files in package directory. # Create json files in package directory.
results_json_path = benchmark_path / "results.json" results_json_path = benchmark_path / "results.json"
revision_json_path = package_dir / f"{revision}.json" revision_json_path = package_benchmark_dir / f"{revision}.json"
worker.utils.copy_file(results_json_path, revision_json_path) worker.utils.copy_file(results_json_path, revision_json_path)
summary_json = [] # Build new summary
if summary_json_path.exists(): summary_json_path = package_benchmark_dir / "summary.json"
summary_json = json.loads(summary_json_path.read_text()) if builder.patch_id:
summary_json += json.loads(results_json_path.read_text()) # Fetch patch summary
summary_json_path.write_text(json.dumps(summary_json, indent=2)) patch_json_url = f"patch/benchmarks/{name}/summary.json"
patch_json_path = package_benchmark_dir / "patch.json"
# No need to warn on errors here, it's normal that there might not be a result yet
fetch_previous(builder, patch_json_url, patch_json_path)
# Add new result to patch summary
merge_results(patch_json_path, results_json_path, summary_json_path)
# Generate diff between baseline result and the patch results
cmd = ["diff", baseline_json_path, summary_json_path]
else:
# Add new result to baseline summary
merge_results(baseline_json_path, results_json_path, summary_json_path)
# Generate graph showing results over time
cmd = ["graph", summary_json_path]
# Create html file in package directory. # Create html file in package directory.
report_html_path = package_dir / "report.html" report_html_path = package_benchmark_dir / "report.html"
cmd = [ cmd = [
sys.executable, sys.executable,
builder.code_path / "tests" / "performance" / "benchmark.py", builder.code_path / "tests" / "performance" / "benchmark.py",
"graph", ] + cmd + [
summary_json_path,
"-o", "-o",
report_html_path, report_html_path,
] ]

View File

@ -17,6 +17,7 @@ import worker.utils
import worker.blender.update import worker.blender.update
import worker.blender.lint import worker.blender.lint
import worker.blender.compile import worker.blender.compile
import worker.blender.benchmark
import worker.blender.test import worker.blender.test
import worker.blender.sign import worker.blender.sign
import worker.blender.pack import worker.blender.pack
@ -30,6 +31,7 @@ if __name__ == "__main__":
steps["compile-code"] = worker.blender.compile.compile_code steps["compile-code"] = worker.blender.compile.compile_code
steps["compile-gpu"] = worker.blender.compile.compile_gpu steps["compile-gpu"] = worker.blender.compile.compile_gpu
steps["compile-install"] = worker.blender.compile.compile_install steps["compile-install"] = worker.blender.compile.compile_install
steps["benchmark-code"] = worker.blender.benchmark.benchmark
steps["test-code"] = worker.blender.test.test steps["test-code"] = worker.blender.test.test
steps["sign-code-binaries"] = worker.blender.sign.sign steps["sign-code-binaries"] = worker.blender.sign.sign
steps["package-code-binaries"] = worker.blender.pack.pack steps["package-code-binaries"] = worker.blender.pack.pack