Merge pull request #9366 from mr-raj12/fix-benchmark-compact-warnings
Some checks failed
Lint / lint (push) Has been cancelled
CI / lint (push) Has been cancelled
CI / security (push) Has been cancelled
CodeQL / Analyze (push) Has been cancelled
CI / asan_ubsan (push) Has been cancelled
CI / native_tests (push) Has been cancelled
CI / vm_tests (Haiku, false, haiku, r1beta5) (push) Has been cancelled
CI / vm_tests (NetBSD, false, netbsd, 10.1) (push) Has been cancelled
CI / vm_tests (OmniOS, false, omnios, r151056) (push) Has been cancelled
CI / vm_tests (OpenBSD, false, openbsd, 7.7) (push) Has been cancelled
CI / vm_tests (borg-freebsd-14-x86_64-gh, FreeBSD, true, freebsd, 14.3) (push) Has been cancelled
CI / windows_tests (push) Has been cancelled

benchmark crud: suppress compact warnings during benchmark runs, fixes #9365
This commit is contained in:
TW 2026-02-17 20:55:16 +01:00 committed by GitHub
commit 3f6282c3bc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -2,6 +2,7 @@ import argparse
from contextlib import contextmanager
import functools
import json
import logging
import os
import tempfile
import time
@ -29,67 +30,77 @@ class BenchmarkMixIn:
return self.parse_args(cmd)
def measurement_run(repo, path):
compression = "--compression=none"
# measure create perf (without files cache to always have it chunking)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_create(
parse_args(
args,
[
f"--repo={repo}",
"create",
compression,
"--files-cache=disabled",
"borg-benchmark-crud1",
path,
],
# Suppress "Done. Run borg compact..." warnings from internal do_delete() calls —
# they clutter benchmark output and are irrelevant here (repo is temporary).
archiver_logger = logging.getLogger("borg.archiver")
original_level = archiver_logger.level
archiver_logger.setLevel(logging.ERROR)
try:
compression = "--compression=none"
# measure create perf (without files cache to always have it chunking)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_create(
parse_args(
args,
[
f"--repo={repo}",
"create",
compression,
"--files-cache=disabled",
"borg-benchmark-crud1",
path,
],
)
)
)
)
t_end = time.monotonic()
dt_create = t_end - t_start
assert rc == 0
# now build files cache
rc1 = get_reset_ec(
self.do_create(
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path])
t_end = time.monotonic()
dt_create = t_end - t_start
assert rc == 0
# now build files cache
rc1 = get_reset_ec(
self.do_create(
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path])
)
)
)
rc2 = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"]))
)
assert rc1 == rc2 == 0
# measure a no-change update (archive1 is still present)
t_start = time.monotonic()
rc1 = get_reset_ec(
self.do_create(
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path])
rc2 = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"]))
)
)
t_end = time.monotonic()
dt_update = t_end - t_start
rc2 = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"]))
)
assert rc1 == rc2 == 0
# measure extraction (dry-run: without writing result to disk)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_extract(parse_args(args, [f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"]))
)
t_end = time.monotonic()
dt_extract = t_end - t_start
assert rc == 0
# measure archive deletion (of LAST present archive with the data)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"]))
)
t_end = time.monotonic()
dt_delete = t_end - t_start
assert rc == 0
return dt_create, dt_update, dt_extract, dt_delete
assert rc1 == rc2 == 0
# measure a no-change update (archive1 is still present)
t_start = time.monotonic()
rc1 = get_reset_ec(
self.do_create(
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path])
)
)
t_end = time.monotonic()
dt_update = t_end - t_start
rc2 = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"]))
)
assert rc1 == rc2 == 0
# measure extraction (dry-run: without writing result to disk)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_extract(
parse_args(args, [f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"])
)
)
t_end = time.monotonic()
dt_extract = t_end - t_start
assert rc == 0
# measure archive deletion (of LAST present archive with the data)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"]))
)
t_end = time.monotonic()
dt_delete = t_end - t_start
assert rc == 0
return dt_create, dt_update, dt_extract, dt_delete
finally:
archiver_logger.setLevel(original_level)
@contextmanager
def test_files(path, count, size, random):