Skip to content

Commit

Permalink
Merge pull request #6 from Oxalide/fix/backup_root_files
Browse files Browse the repository at this point in the history
fix backup files in args.dir
  • Loading branch information
rekcah78 authored Jun 20, 2017
2 parents 00ba4df + cd327d0 commit 8724d35
Showing 1 changed file with 46 additions and 12 deletions.
58 changes: 46 additions & 12 deletions controller.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/python

import os, argparse, subprocess, logging, boto3
import os, argparse, subprocess, logging, boto3, hashlib

parser = argparse.ArgumentParser(description='Multi runner backup to S3.')
parser.add_argument('--dir', '-D', required=True, help='root backup (ex: /home/)')
Expand Down Expand Up @@ -35,16 +35,50 @@ def add_msg(args, job):
except:
logging.error('Adding message '+directory+' in queue is failed')

def runner_files(args, directories):
excludes = ' --exclude '.join(directories)
try:
rcode = subprocess.call(["/usr/local/bin/rclone", "sync", "--exclude", excludes, args.dir, args.rclone+":"+args.bucket, "--quiet"])
if rcode != 0:
logging.error('Backup of files in %s : Failed', args.dir)
else:
logging.info('Backup of files in %s : OK', args.dir)
except:
logging.error('rclone not found')
def md5(fname):
hash_md5 = hashlib.md5()
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()

def runner_files(args):
s3 = boto3.client('s3')
updatelist = list()
deletelist = list()
noupdate = list()
paginator = s3.get_paginator('list_objects')
for result in paginator.paginate(Bucket=args.bucket, Delimiter='/', Prefix=''):
if result.get('Contents') is not None:
for i in result.get('Contents'):
dfile = i['Key']
filename = os.path.join(args.dir, dfile)
if os.path.isfile(filename):
dETag = str(i['ETag']).replace('"', '')
if str(md5(filename)) != dETag:
updatelist.append(filename)
else:
noupdate.append(filename)
else:
deletelist.append(dfile)
for fname in os.listdir(args.dir):
path = os.path.join(args.dir, fname)
if not os.path.isdir(path):
if path not in noupdate:
updatelist.append(path)
for file in deletelist:
s3.delete_object(Bucket=args.bucket, Key=file)
for file in updatelist:
try:
rcode = subprocess.call(["/usr/local/bin/rclone", "copy", file, args.rclone+":"+args.bucket, "--quiet"])
if rcode != 0:
logging.error('Backup of file %s : Failed', file)
else:
logging.info('Backup of file %s : OK', file)
except:
logging.error('rclone not found')



directories = list()
for fname in os.listdir(args.dir):
Expand All @@ -58,4 +92,4 @@ def runner_files(args, directories):
exit(1)

add_msg(args, directories)
runner_files(args, directories)
runner_files(args)

0 comments on commit 8724d35

Please sign in to comment.