mirror of
https://github.com/borgbackup/borg.git
synced 2026-02-18 18:19:16 -05:00
benchmark crud: suppress compact warnings during benchmark runs, fixes #9365
This commit is contained in:
parent
1c0bf36275
commit
c7261ae5aa
1 changed files with 68 additions and 57 deletions
|
|
@ -2,6 +2,7 @@ import argparse
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
import functools
|
import functools
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
|
@ -29,67 +30,77 @@ class BenchmarkMixIn:
|
||||||
return self.parse_args(cmd)
|
return self.parse_args(cmd)
|
||||||
|
|
||||||
def measurement_run(repo, path):
|
def measurement_run(repo, path):
|
||||||
compression = "--compression=none"
|
# Suppress "Done. Run borg compact..." warnings from internal do_delete() calls —
|
||||||
# measure create perf (without files cache to always have it chunking)
|
# they clutter benchmark output and are irrelevant here (repo is temporary).
|
||||||
t_start = time.monotonic()
|
archiver_logger = logging.getLogger("borg.archiver")
|
||||||
rc = get_reset_ec(
|
original_level = archiver_logger.level
|
||||||
self.do_create(
|
archiver_logger.setLevel(logging.ERROR)
|
||||||
parse_args(
|
try:
|
||||||
args,
|
compression = "--compression=none"
|
||||||
[
|
# measure create perf (without files cache to always have it chunking)
|
||||||
f"--repo={repo}",
|
t_start = time.monotonic()
|
||||||
"create",
|
rc = get_reset_ec(
|
||||||
compression,
|
self.do_create(
|
||||||
"--files-cache=disabled",
|
parse_args(
|
||||||
"borg-benchmark-crud1",
|
args,
|
||||||
path,
|
[
|
||||||
],
|
f"--repo={repo}",
|
||||||
|
"create",
|
||||||
|
compression,
|
||||||
|
"--files-cache=disabled",
|
||||||
|
"borg-benchmark-crud1",
|
||||||
|
path,
|
||||||
|
],
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
t_end = time.monotonic()
|
||||||
t_end = time.monotonic()
|
dt_create = t_end - t_start
|
||||||
dt_create = t_end - t_start
|
assert rc == 0
|
||||||
assert rc == 0
|
# now build files cache
|
||||||
# now build files cache
|
rc1 = get_reset_ec(
|
||||||
rc1 = get_reset_ec(
|
self.do_create(
|
||||||
self.do_create(
|
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path])
|
||||||
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path])
|
)
|
||||||
)
|
)
|
||||||
)
|
rc2 = get_reset_ec(
|
||||||
rc2 = get_reset_ec(
|
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"]))
|
||||||
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"]))
|
|
||||||
)
|
|
||||||
assert rc1 == rc2 == 0
|
|
||||||
# measure a no-change update (archive1 is still present)
|
|
||||||
t_start = time.monotonic()
|
|
||||||
rc1 = get_reset_ec(
|
|
||||||
self.do_create(
|
|
||||||
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path])
|
|
||||||
)
|
)
|
||||||
)
|
assert rc1 == rc2 == 0
|
||||||
t_end = time.monotonic()
|
# measure a no-change update (archive1 is still present)
|
||||||
dt_update = t_end - t_start
|
t_start = time.monotonic()
|
||||||
rc2 = get_reset_ec(
|
rc1 = get_reset_ec(
|
||||||
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"]))
|
self.do_create(
|
||||||
)
|
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path])
|
||||||
assert rc1 == rc2 == 0
|
)
|
||||||
# measure extraction (dry-run: without writing result to disk)
|
)
|
||||||
t_start = time.monotonic()
|
t_end = time.monotonic()
|
||||||
rc = get_reset_ec(
|
dt_update = t_end - t_start
|
||||||
self.do_extract(parse_args(args, [f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"]))
|
rc2 = get_reset_ec(
|
||||||
)
|
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"]))
|
||||||
t_end = time.monotonic()
|
)
|
||||||
dt_extract = t_end - t_start
|
assert rc1 == rc2 == 0
|
||||||
assert rc == 0
|
# measure extraction (dry-run: without writing result to disk)
|
||||||
# measure archive deletion (of LAST present archive with the data)
|
t_start = time.monotonic()
|
||||||
t_start = time.monotonic()
|
rc = get_reset_ec(
|
||||||
rc = get_reset_ec(
|
self.do_extract(
|
||||||
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"]))
|
parse_args(args, [f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"])
|
||||||
)
|
)
|
||||||
t_end = time.monotonic()
|
)
|
||||||
dt_delete = t_end - t_start
|
t_end = time.monotonic()
|
||||||
assert rc == 0
|
dt_extract = t_end - t_start
|
||||||
return dt_create, dt_update, dt_extract, dt_delete
|
assert rc == 0
|
||||||
|
# measure archive deletion (of LAST present archive with the data)
|
||||||
|
t_start = time.monotonic()
|
||||||
|
rc = get_reset_ec(
|
||||||
|
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"]))
|
||||||
|
)
|
||||||
|
t_end = time.monotonic()
|
||||||
|
dt_delete = t_end - t_start
|
||||||
|
assert rc == 0
|
||||||
|
return dt_create, dt_update, dt_extract, dt_delete
|
||||||
|
finally:
|
||||||
|
archiver_logger.setLevel(original_level)
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def test_files(path, count, size, random):
|
def test_files(path, count, size, random):
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue