Skip to content

Commit

Permalink
Add initial stats output in the logs
Browse files Browse the repository at this point in the history
This is the basis of what we could then send over MQTT
  • Loading branch information
d0ugal committed Oct 7, 2018
1 parent ad5db79 commit 4d5a5ed
Show file tree
Hide file tree
Showing 3 changed files with 35 additions and 10 deletions.
4 changes: 3 additions & 1 deletion dropbox-upload/dropbox_upload/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,10 @@ def main(config_file, sleeper=time.sleep, DropboxAPI=dropbox.Dropbox):
LOG.info("Starting Snapshot backup")
snapshots = hassio.list_snapshots()

backup.backup(dbx, cfg, snapshots)
stats = backup.backup(dbx, cfg, snapshots)
stats # make pyflakes think stats is used. It doesn't detect fstring usage.
LOG.info("Uploads complete")
LOG.info("Total size: {stats['size_human']}")

limit.limit_snapshots(dbx, cfg, snapshots)
LOG.info("Snapshot cleanup complete")
Expand Down
21 changes: 15 additions & 6 deletions dropbox-upload/dropbox_upload/backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,33 +33,42 @@ def backup(dbx, config, snapshots):
LOG.info(f"Only backing up the first {config['keep']} snapshots")
snapshots = snapshots[: config["keep"]]

total_size = 0

for i, snapshot in enumerate(snapshots, start=1):
LOG.info(f"Snapshot: {snapshot['name']} ({i}/{len(snapshots)})")
try:
process_snapshot(config, dbx, snapshot)
stats = process_snapshot(config, dbx, snapshot)
if not stats:
continue
total_size += stats["size_bytes"]
except Exception:
LOG.exception(
"Snapshot backup failed. If this happens after the addon is "
"restarted, please open a bug."
)

return {"size_bytes": total_size, "size_human": util.bytes_to_human(total_size)}


def process_snapshot(config, dbx, snapshot):
path = local_path(snapshot)
created = arrow.get(snapshot["date"])
if not os.path.isfile(path):
LOG.warning("The snapshot no longer exists")
return
size = util.bytes_to_human(os.path.getsize(path))
bytes_ = os.path.getsize(path)
size = util.bytes_to_human(bytes_)
target = str(dropbox_path(config, snapshot))
LOG.info(f"Slug: {snapshot['slug']}")
LOG.info(f"Slug: {snapshot['slug']} Size: {size}")
LOG.info(f"Created: {created}")
LOG.info(f"Size: {size}")
LOG.info(f"Uploading to: {target}")
try:
if dropbox.file_exists(dbx, path, target):
LOG.info("Already found in Dropbox with the same hash")
return
dropbox.upload_file(dbx, path, target)
else:
dropbox.upload_file(dbx, path, target)
except Exception:
LOG.exception("Upload failed")

return {"size_bytes": bytes_, "size_human": size}
20 changes: 17 additions & 3 deletions tests/test_backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,17 +35,31 @@ def test_snapshot_deleted(cfg, snapshot, caplog):
) in caplog.record_tuples


def test_backup_keep_limit(cfg, dropbox_fake, snapshots, caplog):
def test_snapshot_stats(cfg, snapshot, caplog, tmpdir, dropbox_fake):
file_ = tmpdir.mkdir("sub").join("hello.txt")
file_.write("testing content 24 bytes" * 1000)
with mock.patch("dropbox_upload.backup.local_path") as local_path:
local_path.return_value = str(file_)
result = backup.process_snapshot(cfg, dropbox_fake(), snapshot)
assert result["size_bytes"] == 24000
assert result["size_human"] == "23.44 KB"


def test_backup_keep_limit(cfg, dropbox_fake, snapshots, caplog, tmpdir):
caplog.set_level(logging.DEBUG)
cfg["keep"] = 2
file_ = tmpdir.mkdir("sub").join("hello.txt")
file_.write("testing content 24 bytes" * 1000)
with mock.patch("dropbox_upload.backup.local_path") as local_path:
local_path.return_value = __file__
backup.backup(dropbox_fake(), cfg, snapshots)
local_path.return_value = str(file_)
result = backup.backup(dropbox_fake(), cfg, snapshots)
assert (
"dropbox_upload.backup",
logging.INFO,
"Only backing up the first 2 snapshots",
) in caplog.record_tuples
assert result["size_bytes"] == 24000 * 2
assert result["size_human"] == "46.88 KB"


def test_backup_file_exists(cfg, dropbox_fake, snapshot, caplog):
Expand Down

0 comments on commit 4d5a5ed

Please sign in to comment.