benchmark crud: suppress compact warnings during benchmark runs, fixes #9365

This commit is contained in:
Mrityunjay Raj 2026-02-18 00:45:36 +05:30
parent 1c0bf36275
commit c7261ae5aa

View file

@ -2,6 +2,7 @@ import argparse
from contextlib import contextmanager
import functools
import json
import logging
import os
import tempfile
import time
@ -29,67 +30,77 @@ class BenchmarkMixIn:
return self.parse_args(cmd)
def measurement_run(repo, path):
compression = "--compression=none"
# measure create perf (without files cache to always have it chunking)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_create(
parse_args(
args,
[
f"--repo={repo}",
"create",
compression,
"--files-cache=disabled",
"borg-benchmark-crud1",
path,
],
# Suppress "Done. Run borg compact..." warnings from internal do_delete() calls —
# they clutter benchmark output and are irrelevant here (repo is temporary).
archiver_logger = logging.getLogger("borg.archiver")
original_level = archiver_logger.level
archiver_logger.setLevel(logging.ERROR)
try:
compression = "--compression=none"
# measure create perf (without files cache to always have it chunking)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_create(
parse_args(
args,
[
f"--repo={repo}",
"create",
compression,
"--files-cache=disabled",
"borg-benchmark-crud1",
path,
],
)
)
)
)
t_end = time.monotonic()
dt_create = t_end - t_start
assert rc == 0
# now build files cache
rc1 = get_reset_ec(
self.do_create(
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path])
t_end = time.monotonic()
dt_create = t_end - t_start
assert rc == 0
# now build files cache
rc1 = get_reset_ec(
self.do_create(
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path])
)
)
)
rc2 = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"]))
)
assert rc1 == rc2 == 0
# measure a no-change update (archive1 is still present)
t_start = time.monotonic()
rc1 = get_reset_ec(
self.do_create(
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path])
rc2 = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"]))
)
)
t_end = time.monotonic()
dt_update = t_end - t_start
rc2 = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"]))
)
assert rc1 == rc2 == 0
# measure extraction (dry-run: without writing result to disk)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_extract(parse_args(args, [f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"]))
)
t_end = time.monotonic()
dt_extract = t_end - t_start
assert rc == 0
# measure archive deletion (of LAST present archive with the data)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"]))
)
t_end = time.monotonic()
dt_delete = t_end - t_start
assert rc == 0
return dt_create, dt_update, dt_extract, dt_delete
assert rc1 == rc2 == 0
# measure a no-change update (archive1 is still present)
t_start = time.monotonic()
rc1 = get_reset_ec(
self.do_create(
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path])
)
)
t_end = time.monotonic()
dt_update = t_end - t_start
rc2 = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"]))
)
assert rc1 == rc2 == 0
# measure extraction (dry-run: without writing result to disk)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_extract(
parse_args(args, [f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"])
)
)
t_end = time.monotonic()
dt_extract = t_end - t_start
assert rc == 0
# measure archive deletion (of LAST present archive with the data)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"]))
)
t_end = time.monotonic()
dt_delete = t_end - t_start
assert rc == 0
return dt_create, dt_update, dt_extract, dt_delete
finally:
archiver_logger.setLevel(original_level)
@contextmanager
def test_files(path, count, size, random):