Skip to content

Commit

Permalink
Minor fixes, comments, etc
Browse files Browse the repository at this point in the history
  • Loading branch information
sabeechen committed Nov 13, 2023
1 parent dc85eb4 commit f8d846d
Show file tree
Hide file tree
Showing 10 changed files with 20 additions and 61 deletions.
6 changes: 0 additions & 6 deletions hassio-google-drive-backup/backup/config/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,6 @@ class Setting(Enum):
DEPRECTAED_ENABLE_BACKUP_STATE_SENSOR = "enable_snapshot_state_sensor"

UPLOAD_LIMIT_BYTES_PER_SECOND = "upload_limit_bytes_per_second"
UPLOAD_ALLOWED_START = "upload_allowed_start"
UPLOAD_ALLOWED_END = "upload_allowed_end"

def default(self):
if "staging" in VERSION and self in _STAGING_DEFAULTS:
Expand Down Expand Up @@ -301,8 +299,6 @@ def key(self):
Setting.MAX_BACKOFF_SECONDS: 60 * 60 * 2, # 2 hours

Setting.UPLOAD_LIMIT_BYTES_PER_SECOND: 0,
Setting.UPLOAD_ALLOWED_START: "",
Setting.UPLOAD_ALLOWED_END: ""
}

_STAGING_DEFAULTS = {
Expand Down Expand Up @@ -445,8 +441,6 @@ def key(self):
Setting.MAX_BACKOFF_SECONDS: "int(3600,)?",

Setting.UPLOAD_LIMIT_BYTES_PER_SECOND: "float(0,)?",
Setting.UPLOAD_ALLOWED_START: "match(^[0-2]\\d:[0-5]\\d$)?",
Setting.UPLOAD_ALLOWED_END: "match(^[0-2]\\d:[0-5]\\d$)?",
}

PRIVATE = [
Expand Down
6 changes: 4 additions & 2 deletions hassio-google-drive-backup/backup/drive/driverequests.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from ..logger import getLogger
from backup.creds import Creds, Exchanger, DriveRequester
from datetime import timezone
from ..config.byteformatter import ByteFormatter

logger = getLogger(__name__)

Expand Down Expand Up @@ -74,7 +75,7 @@
@singleton
class DriveRequests():
@inject
def __init__(self, config: Config, time: Time, drive: DriveRequester, session: ClientSession, exchanger: Exchanger):
def __init__(self, config: Config, time: Time, drive: DriveRequester, session: ClientSession, exchanger: Exchanger, byte_formatter: ByteFormatter):
self.session = session
self.config = config
self.time = time
Expand All @@ -87,6 +88,7 @@ def __init__(self, config: Config, time: Time, drive: DriveRequester, session: C
self.last_attempt_location = None
self.last_attempt_count = 0
self.last_attempt_start_time = None
self.bytes_formatter = byte_formatter
self.tryLoadCredentials()

async def _getHeaders(self):
Expand Down Expand Up @@ -309,7 +311,7 @@ async def create(self, stream, metadata, mime_type):
"Content-Range": "bytes {0}-{1}/{2}".format(start, start + chunk_size - 1, total_size)
}
startTime = self.time.now()
logger.debug("Sending {0} bytes to Google Drive".format(chunk_size))
logger.debug("Sending {0} to Google Drive".format(self.bytes_formatter.format(chunk_size)))
try:
async with await self.retryRequest("PUT", location, headers=headers, data=data, patch_url=False) as partial:
# Base the next chunk size on how long it took to send the last chunk.
Expand Down
1 change: 0 additions & 1 deletion hassio-google-drive-backup/backup/model/coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,6 @@ def nextSyncAttempt(self):
else:
scheduled += timedelta(seconds=self.nextSyncCheckOffset())
next_backup = self.nextBackupTime()
# TODO: This should check for when drive is allowed to backup
if next_backup is None:
return scheduled
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -548,42 +548,6 @@
second.</span>
</div>
</div>
<div class="col s11 offset-s1 row">
<div class="input-field col m6 s12">
<i class="material-icons prefix">alarm_on</i>
<input type="text" id="upload_allowed_start" name="upload_allowed_start" pattern="^[0-2]\d:[0-5]\d$"
class="validate" />
<label for="upload_allowed_start">Upload Start Time</label>
</div>
<div class="input-field col m6 s12">
<i class="material-icons prefix">alarm_off</i>
<input type="text" id="upload_allowed_end" name="upload_allowed_end" pattern="^[0-2]\d:[0-5]\d$"
class="validate" />
<label for="upload_allowed_end">Upload End Time</label>
</div>
<div class="col s11 offset-s1">
<span class="helper-text">
Specify times of day in 24 hour local time when uploads to Google Drive are permitted (eg "16:30" for 4:30
PM). Backups can still be created outside this time, but uploads to Google Drive wait to start. If
start time is after end time, then its assumed the desired time span crosses midnight. If you have large
backups or slow internet, please ensure you give the addon enough time for the upload to complete.</span>
</div>
</div>
<div class="col s11 offset-s1 row">
<div class="input-field col s12 m12 s12">
<i class="material-icons prefix">refresh</i>
<input type="text" id="maximum_upload_chunk_bytes" name="maximum_upload_chunk_bytes"
pattern="^[ ]*([0-9,]*\.?[0-9]*)[ ]*(b|B|k|K|m|M|g|G|t|T|p|P|e|E|z|Z|y|Y)[a-zA-Z ]*[ ]*$"
class="validate" />
<label for="maximum_upload_chunk_bytes">Maximum Drive Upload Chunk Size</label>
<span class="helper-text">
Sets the maximum "chunk" size allowed for uploads to Google Drive. Larger sizes will upload faster but
delay reports
of progress and may cause interruptions on some network hardware. Between 1MB and 20MB is recommended,
minimum is 256 kB.
The value can be provided in any binary-prefix format, e.g. '256 Kb', '10.5 Mb', '3000Kb', etc.</span>
</div>
</div>
<div class="col s11 offset-s1 row">
<div class="input-field col m6 s12">
<i class="material-icons prefix">timelapse</i>
Expand Down
2 changes: 1 addition & 1 deletion hassio-google-drive-backup/backup/time.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from dateutil.parser import parse


# this hack if for dateutil, it imports Callable from the wrong place
# this hack is for dateutil, it imports Callable from the wrong place
collections.Callable = collections.abc.Callable


Expand Down
1 change: 0 additions & 1 deletion hassio-google-drive-backup/backup/util/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,3 @@
from .rangelookup import RangeLookup
from .data_cache import DataCache, KEY_CREATED, KEY_I_MADE_THIS, KEY_PENDING, KEY_NOTE, KEY_IGNORE, KEY_LAST_SEEN, KEY_NAME, CACHE_EXPIRATION_DAYS, UpgradeFlags
from .token_bucket import TokenBucket
from .wrapper import Wrapper
14 changes: 14 additions & 0 deletions hassio-google-drive-backup/backup/util/token_bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@

@singleton
class TokenBucket:
"""
Implements a "leaky bucket" token algorithm, used to limit upload speed to Google Drive.
"""
@inject
def __init__(self, time: Time, capacity, fill_rate, initial_tokens=None):
self.capacity = float(capacity)
Expand All @@ -16,13 +19,24 @@ def __init__(self, time: Time, capacity, fill_rate, initial_tokens=None):
self.timestamp = self._time.monotonic()

def consume(self, tokens):
"""
Attempts to consume the given number of tokens, returning true if there were enough tokenas availabel and
false otherwise.
"""
self._refill()
if self.tokens >= tokens:
self.tokens -= tokens
return True
return False

async def consumeWithWait(self, min_tokens: int, max_tokens: int):
"""
Consumes a number of tokens between min_tokens and max_tokens
- If at least max_tokens are available, consumes that many and returns immediately
- If less than min_tokens are available, waits until min_tokens are availabel and consumes them
- Else consumes as many tokens as are available
Always returns the positive number of tokens consumed.
"""
self._refill()
if self.tokens >= max_tokens:
self.consume(max_tokens)
Expand Down
10 changes: 0 additions & 10 deletions hassio-google-drive-backup/backup/util/wrapper.py

This file was deleted.

4 changes: 1 addition & 3 deletions hassio-google-drive-backup/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -97,9 +97,7 @@
"maximum_upload_chunk_bytes": "float(262144,)?",
"ha_reporting_interval_seconds": "int(1,)?",

"upload_limit_bytes_per_second": "float(0,)?",
"upload_allowed_start": "match(^[0-2]\\d:[0-5]\\d$)?",
"upload_allowed_end": "match(^[0-2]\\d:[0-5]\\d$)?"
"upload_limit_bytes_per_second": "float(0,)?"
},
"ports": {
"1627/tcp": 1627
Expand Down
1 change: 0 additions & 1 deletion hassio-google-drive-backup/dev/data/dev_options.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,5 @@
"console_log_level": "TRACE",
"ingress_port": 56152,
"port": 56151,
"max_sync_interval_seconds": 600,
"cache_warmup_max_seconds": 300
}

0 comments on commit f8d846d

Please sign in to comment.