Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
125 changes: 68 additions & 57 deletions src/borg/archiver/benchmark_cmd.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from contextlib import contextmanager
import functools
import json
import logging
import os
import tempfile
import time
Expand Down Expand Up @@ -29,67 +30,77 @@ def parse_args(args, cmd):
return self.parse_args(cmd)

def measurement_run(repo, path):
compression = "--compression=none"
# measure create perf (without files cache to always have it chunking)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_create(
parse_args(
args,
[
f"--repo={repo}",
"create",
compression,
"--files-cache=disabled",
"borg-benchmark-crud1",
path,
],
# Suppress "Done. Run borg compact..." warnings from internal do_delete() calls —
# they clutter benchmark output and are irrelevant here (repo is temporary).
archiver_logger = logging.getLogger("borg.archiver")
original_level = archiver_logger.level
archiver_logger.setLevel(logging.ERROR)
try:
compression = "--compression=none"
# measure create perf (without files cache to always have it chunking)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_create(
parse_args(
args,
[
f"--repo={repo}",
"create",
compression,
"--files-cache=disabled",
"borg-benchmark-crud1",
path,
],
)
)
)
t_end = time.monotonic()
dt_create = t_end - t_start
assert rc == 0
# now build files cache
rc1 = get_reset_ec(
self.do_create(
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path])
)
)
rc2 = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"]))
)
assert rc1 == rc2 == 0
# measure a no-change update (archive1 is still present)
t_start = time.monotonic()
rc1 = get_reset_ec(
self.do_create(
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path])
)
)
)
t_end = time.monotonic()
dt_create = t_end - t_start
assert rc == 0
# now build files cache
rc1 = get_reset_ec(
self.do_create(
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path])
t_end = time.monotonic()
dt_update = t_end - t_start
rc2 = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"]))
)
assert rc1 == rc2 == 0
# measure extraction (dry-run: without writing result to disk)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_extract(
parse_args(args, [f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"])
)
)
)
rc2 = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"]))
)
assert rc1 == rc2 == 0
# measure a no-change update (archive1 is still present)
t_start = time.monotonic()
rc1 = get_reset_ec(
self.do_create(
parse_args(args, [f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path])
t_end = time.monotonic()
dt_extract = t_end - t_start
assert rc == 0
# measure archive deletion (of LAST present archive with the data)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"]))
)
)
t_end = time.monotonic()
dt_update = t_end - t_start
rc2 = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"]))
)
assert rc1 == rc2 == 0
# measure extraction (dry-run: without writing result to disk)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_extract(parse_args(args, [f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"]))
)
t_end = time.monotonic()
dt_extract = t_end - t_start
assert rc == 0
# measure archive deletion (of LAST present archive with the data)
t_start = time.monotonic()
rc = get_reset_ec(
self.do_delete(parse_args(args, [f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"]))
)
t_end = time.monotonic()
dt_delete = t_end - t_start
assert rc == 0
return dt_create, dt_update, dt_extract, dt_delete
t_end = time.monotonic()
dt_delete = t_end - t_start
assert rc == 0
return dt_create, dt_update, dt_extract, dt_delete
finally:
archiver_logger.setLevel(original_level)

@contextmanager
def test_files(path, count, size, random):
Expand Down