Skip to content

Commit

Permalink
Merge pull request #21 from d0ugal/dev
Browse files Browse the repository at this point in the history
Version 1.0
  • Loading branch information
d0ugal authored Oct 5, 2018
2 parents 763e1af + dda10fc commit ed1e069
Show file tree
Hide file tree
Showing 18 changed files with 557 additions and 308 deletions.
35 changes: 28 additions & 7 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,37 @@ services:
language: python
python:
- "3.6"
env:
- TOXENV=py36
- TOXENV=lint
- TOXENV=docker-build ARCH=aarch64
- TOXENV=docker-build ARCH=i386
- TOXENV=docker-build ARCH=amd64
- TOXENV=docker-build ARCH=armhf
install:
- pip install -U pip
- pip install tox
- pip freeze
script:
- tox

stages:
- lint
- test
- docker-build
- name: release
if: tag IS present

jobs:
include:
- env: TOXENV=py36

- stage: lint
env: TOXENV=lint

- stage: docker-build
env: TOXENV=docker-build ARCH=aarch64
- stage: docker-build
env: TOXENV=docker-build ARCH=i386
- stage: docker-build
env: TOXENV=docker-build ARCH=amd64
- stage: docker-build
env: TOXENV=docker-build ARCH=armhf

- stage: release
script:
- echo "$DOCKER_PASSWORD" | docker login -u ${DOCKER_USERNAME} --password-stdin
- bash scripts/docker-build.sh
5 changes: 2 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ a directory name.
{
"access_token": "ACCESS TOKEN",
"dropbox_dir": "/hass-snapshots/"
"keep": 10,
"mins_between_backups": 30
}
```

Expand All @@ -41,9 +43,6 @@ Here is the automation I use to create a snapshot and upload it to Dropbox.
- service: hassio.snapshot_full
data_template:
name: Automated Backup {{ now().strftime('%Y-%m-%d') }}
- service: hassio.addon_start
data:
addon: 8aef3602_dropbox_upload
```


Expand Down
2 changes: 1 addition & 1 deletion dropbox-upload/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,4 @@ ADD . /app
WORKDIR /app
RUN pip3 install -U pip
RUN pip3 install -r requirements.txt
CMD python3 upload.py
CMD python3 -m dropbox_upload
9 changes: 7 additions & 2 deletions dropbox-upload/config.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "Dropbox Upload",
"version": "1",
"version": "1.0",
"slug": "dropbox_upload",
"description": "Upload snapshots to Dropbox!",
"startup": "application",
Expand All @@ -14,11 +14,16 @@
],
"options": {
"access_token": "<YOUR_ACCESS_TOKEN>",
"dropbox_dir": "<DROPBOX_UPLOAD_LOCATION>"
"dropbox_dir": "/snapshots",
"keep": 10,
"mins_between_backups": 10,
"debug": false
},
"schema": {
"access_token": "str",
"dropbox_dir": "str",
"keep": "int?",
"mins_between_backups": "int?",
"debug": "bool?"
}
}
Empty file.
47 changes: 47 additions & 0 deletions dropbox-upload/dropbox_upload/__main__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import logging
import time

import dropbox
from dropbox import exceptions

from . import backup, config, hassio, limit

LOG = logging.getLogger(__name__)


def main(config_file, sleeper=time.sleep, DropboxAPI=dropbox.Dropbox):

cfg = config.load_config(config_file)
copy = cfg.copy()
copy["access_token"] = "HIDDEN"
LOG.debug(copy)
config.setup_logging(cfg)

try:
dbx = DropboxAPI(cfg["access_token"])
dbx.users_get_current_account()
except exceptions.AuthError:
LOG.error("Invalid access token")
return

while True:
try:
LOG.info("Starting Snapshot backup")
snapshots = hassio.list_snapshots()

backup.backup(dbx, cfg, snapshots)
LOG.info("Uploads complete")

limit.limit_snapshots(dbx, cfg, snapshots)
LOG.info("Snapshot cleanup complete")
except Exception:
LOG.exception("Unhandled error")

sleep = cfg.get("mins_between_backups", 10)
LOG.info("Sleeping for {sleep} minutes")
if sleeper(sleep * 60):
return


if __name__ == "__main__":
main(config.DEFAULT_CONFIG)
65 changes: 65 additions & 0 deletions dropbox-upload/dropbox_upload/backup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
import logging
import os
import pathlib

import arrow

from . import dropbox, util

LOG = logging.getLogger(__name__)
BACKUP_DIR = pathlib.Path("/backup/")


def local_path(snapshot):
return BACKUP_DIR / f"{snapshot['slug']}.tar"


def dropbox_path(config, snapshot):
dropbox_dir = pathlib.Path(config["dropbox_dir"])
name = snapshot["slug"]
return dropbox_dir / f"{name}.tar"


def backup(dbx, config, snapshots):

LOG.info(f"Found {len(snapshots)} snapshots")
LOG.info(f"Backing up to Dropbox directory: {config['dropbox_dir']}")

if not snapshots:
LOG.warning("No snapshots found to backup")
return

if config.get("keep") and len(snapshots) > config.get("keep"):
LOG.info(f"Only backing up the first {config['keep']} snapshots")
snapshots = snapshots[: config["keep"]]

for i, snapshot in enumerate(snapshots, start=1):
LOG.info(f"Snapshot: {snapshot['name']} ({i}/{len(snapshots)})")
try:
process_snapshot(config, dbx, snapshot)
except Exception:
LOG.exception(
"Snapshot backup failed. If this happens after the addon is "
"restarted, please open a bug."
)


def process_snapshot(config, dbx, snapshot):
path = local_path(snapshot)
created = arrow.get(snapshot["date"])
if not os.path.isfile(path):
LOG.warning("The snapshot no longer exists")
return
size = util.bytes_to_human(os.path.getsize(path))
target = str(dropbox_path(config, snapshot))
LOG.info(f"Slug: {snapshot['slug']}")
LOG.info(f"Created: {created}")
LOG.info(f"Size: {size}")
LOG.info(f"Uploading to: {target}")
try:
if dropbox.file_exists(dbx, path, target):
LOG.info("Already found in Dropbox with the same hash")
return
dropbox.upload_file(dbx, path, target)
except Exception:
LOG.exception("Upload failed")
24 changes: 24 additions & 0 deletions dropbox-upload/dropbox_upload/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import json
import logging
import sys

DEFAULT_CONFIG = "/data/options.json"


def load_config(path=DEFAULT_CONFIG):
with open(path) as f:
return json.load(f)


def setup_logging(config):
log = logging.getLogger("dropbox_upload")
log.setLevel(logging.DEBUG if config.get("debug") else logging.INFO)

ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
ch.setFormatter(formatter)
# Remove existing handlers. This should be an issue in unit tests.
log.handlers = []
log.addHandler(ch)
return log
80 changes: 80 additions & 0 deletions dropbox-upload/dropbox_upload/dropbox.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
import hashlib
import logging
import os

import dropbox
import retrace

LOG = logging.getLogger(__name__)
CHUNK_SIZE = 4 * 1024 * 1024


@retrace.retry(limit=4)
def upload_file(dbx, file_path, dest_path):

f = open(file_path, "rb")
file_size = os.path.getsize(file_path)
if file_size <= CHUNK_SIZE:
return dbx.files_upload(f, dest_path)

upload_session_start_result = dbx.files_upload_session_start(f.read(CHUNK_SIZE))
cursor = dropbox.files.UploadSessionCursor(
session_id=upload_session_start_result.session_id, offset=f.tell()
)
commit = dropbox.files.CommitInfo(path=dest_path)
prev = None
while f.tell() < file_size:
percentage = round((f.tell() / file_size) * 100)

if not prev or percentage > prev + 5:
LOG.info(f"{percentage:3} %")
prev = percentage

if (file_size - f.tell()) <= CHUNK_SIZE:
dbx.files_upload_session_finish(f.read(CHUNK_SIZE), cursor, commit)
else:
dbx.files_upload_session_append(
f.read(CHUNK_SIZE), cursor.session_id, cursor.offset
)
cursor.offset = f.tell()
LOG.info("100 %")


def compute_dropbox_hash(filename):

with open(filename, "rb") as f:
block_hashes = b""
while True:
chunk = f.read(CHUNK_SIZE)
if not chunk:
break
block_hashes += hashlib.sha256(chunk).digest()
return hashlib.sha256(block_hashes).hexdigest()


def file_exists(dbx, file_path, dest_path):
try:
metadata = dbx.files_get_metadata(dest_path)
except Exception:
LOG.info("No existing snapshot in dropox with this name")
return False

dropbox_hash = metadata.content_hash
local_hash = compute_dropbox_hash(file_path)
LOG.debug(f"Dropbox hash: {dropbox_hash}")
LOG.debug(f"Local hash: {local_hash}")
if local_hash == dropbox_hash:
return True

# If the hash doesn't match, delete the file so we can re-upload it.
# We might want to make this optional? a safer mode might be to
# add a suffix?
LOG.warn(
"The snapshot conflicts with a file name in dropbox, the contents "
"are different. The dropbox file will be deleted and replaced. "
"Local hash: %s, Dropbox hash: %s",
local_hash,
dropbox_hash,
)
dbx.files_delete(dest_path)
return False
36 changes: 36 additions & 0 deletions dropbox-upload/dropbox_upload/hassio.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import logging
import os

import requests

import arrow

LOG = logging.getLogger(__name__)


def hassio_req(method, path):
auth_headers = {"X-HASSIO-KEY": os.environ.get("HASSIO_TOKEN")}
LOG.debug(f"Auth headers: {auth_headers}")
r = method(f"http://hassio/{path}", headers=auth_headers)
LOG.debug(r)
r.raise_for_status()
j = r.json()
LOG.debug(j)
return j["data"]


def hassio_get(path):
return hassio_req(requests.get, path)


def hassio_post(path):
return hassio_req(requests.post, path)


def list_snapshots():
snapshots = hassio_get("snapshots")["snapshots"]
# Sort them by creation date, and reverse.
# We want to backup the most recent first
snapshots.sort(key=lambda x: arrow.get(x["date"]))
snapshots.reverse()
return snapshots
35 changes: 35 additions & 0 deletions dropbox-upload/dropbox_upload/limit.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import logging

import arrow

from . import backup, hassio

LOG = logging.getLogger(__name__)


def limit_snapshots(dbx, config, snapshots):

keep = config.get("keep")

if not keep:
LOG.warning("keep not set. We wont remove old snapshots")
return

if len(snapshots) <= keep:
LOG.info("Not reached the maximum number of snapshots")
return

LOG.info(f"Limiting snapshots to the {keep} most recent")

snapshots.sort(key=lambda x: arrow.get(x["date"]))
snapshots.reverse()

expired_snapshots = snapshots[keep:]

LOG.info(f"Deleting {len(expired_snapshots)} snapshots")

for snapshot in expired_snapshots:
LOG.info(f"Deleting {snapshot['name']} (slug: {snapshot['slug']}")
hassio.hassio_post(f"snapshots/{snapshot['slug']}/remove")
path = str(backup.dropbox_path(config, snapshot))
dbx.files_delete(path)
8 changes: 8 additions & 0 deletions dropbox-upload/dropbox_upload/util.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
def bytes_to_human(nbytes):
suffixes = ["B", "KB", "MB", "GB", "TB", "PB"]
i = 0
while nbytes >= 1024 and i < len(suffixes) - 1:
nbytes /= 1024.0
i += 1
f = ("%.2f" % nbytes).rstrip("0").rstrip(".")
return "%s %s" % (f, suffixes[i])
Loading

0 comments on commit ed1e069

Please sign in to comment.