From c7261ae5aafda63b4041e45e4e76a3500d3eb545 Mon Sep 17 00:00:00 2001 From: Mrityunjay Raj Date: Wed, 18 Feb 2026 00:45:36 +0530 Subject: [PATCH] benchmark crud: suppress compact warnings during benchmark runs, fixes #9365 --- src/borg/archiver/benchmark_cmd.py | 125 ++++++++++++++++------------- 1 file changed, 68 insertions(+), 57 deletions(-) diff --git a/src/borg/archiver/benchmark_cmd.py b/src/borg/archiver/benchmark_cmd.py index 028ef11f6..d3ecdc890 100644 --- a/src/borg/archiver/benchmark_cmd.py +++ b/src/borg/archiver/benchmark_cmd.py @@ -2,6 +2,7 @@ import argparse from contextlib import contextmanager import functools import json +import logging import os import tempfile import time @@ -29,67 +30,77 @@ class BenchmarkMixIn: return self.parse_args(cmd) def measurement_run(repo, path): - compression = "--compression=none" - # measure create perf (without files cache to always have it chunking) - t_start = time.monotonic() - rc = get_reset_ec( - self.do_create( - parse_args( - args, - [ - f"--repo={repo}", - "create", - compression, - "--files-cache=disabled", - "borg-benchmark-crud1", - path, - ], + # Suppress "Done. Run borg compact..." warnings from internal do_delete() calls — + # they clutter benchmark output and are irrelevant here (repo is temporary). + archiver_logger = logging.getLogger("borg.archiver") + original_level = archiver_logger.level + archiver_logger.setLevel(logging.ERROR) + try: + compression = "--compression=none" + # measure create perf (without files cache to always have it chunking) + t_start = time.monotonic() + rc = get_reset_ec( + self.do_create( + parse_args( + args, + [ + f"--repo={repo}", + "create", + compression, + "--files-cache=disabled", + "borg-benchmark-crud1", + path, + ], + ) ) ) - ) - t_end = time.monotonic() - dt_create = t_end - t_start - assert rc == 0 - # now build files cache - rc1 = get_reset_ec( - self.do_create( - parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path]) + t_end = time.monotonic() + dt_create = t_end - t_start + assert rc == 0 + # now build files cache + rc1 = get_reset_ec( + self.do_create( + parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path]) + ) ) - ) - rc2 = get_reset_ec( - self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"])) - ) - assert rc1 == rc2 == 0 - # measure a no-change update (archive1 is still present) - t_start = time.monotonic() - rc1 = get_reset_ec( - self.do_create( - parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path]) + rc2 = get_reset_ec( + self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"])) ) - ) - t_end = time.monotonic() - dt_update = t_end - t_start - rc2 = get_reset_ec( - self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"])) - ) - assert rc1 == rc2 == 0 - # measure extraction (dry-run: without writing result to disk) - t_start = time.monotonic() - rc = get_reset_ec( - self.do_extract(parse_args(args, [f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"])) - ) - t_end = time.monotonic() - dt_extract = t_end - t_start - assert rc == 0 - # measure archive deletion (of LAST present archive with the data) - t_start = time.monotonic() - rc = get_reset_ec( - self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"])) - ) - t_end = time.monotonic() - dt_delete = t_end - t_start - assert rc == 0 - return dt_create, dt_update, dt_extract, dt_delete + assert rc1 == rc2 == 0 + # measure a no-change update (archive1 is still present) + t_start = time.monotonic() + rc1 = get_reset_ec( + self.do_create( + parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path]) + ) + ) + t_end = time.monotonic() + dt_update = t_end - t_start + rc2 = get_reset_ec( + self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"])) + ) + assert rc1 == rc2 == 0 + # measure extraction (dry-run: without writing result to disk) + t_start = time.monotonic() + rc = get_reset_ec( + self.do_extract( + parse_args(args, [f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"]) + ) + ) + t_end = time.monotonic() + dt_extract = t_end - t_start + assert rc == 0 + # measure archive deletion (of LAST present archive with the data) + t_start = time.monotonic() + rc = get_reset_ec( + self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"])) + ) + t_end = time.monotonic() + dt_delete = t_end - t_start + assert rc == 0 + return dt_create, dt_update, dt_extract, dt_delete + finally: + archiver_logger.setLevel(original_level) @contextmanager def test_files(path, count, size, random):