diff --git a/apprise/persistent_store.py b/apprise/persistent_store.py index cb9bb88fa2..f1d0b8355b 100644 --- a/apprise/persistent_store.py +++ b/apprise/persistent_store.py @@ -25,6 +25,7 @@ import os import re import gzip +import zlib import base64 import glob import tempfile @@ -114,7 +115,7 @@ def sha1(self): str(self).encode('utf-8'), usedforsecurity=False).hexdigest() except TypeError: - # Python <= v3.7 - usedforsecurity flag does not work + # Python <= v3.8 - usedforsecurity flag does not work return hashlib.sha1(str(self).encode('utf-8')).hexdigest() def json(self): @@ -299,7 +300,7 @@ class PersistentStore: __cache_key = 'cache' # Our Temporary working directory - temp_dir = '.tmp' + temp_dir = 'tmp' # The directory our persistent store content gets placed in data_dir = 'var' @@ -367,8 +368,8 @@ def __init__(self, path=None, namespace='default', mode=None): self.__dirty = False # A caching value to track persistent storage disk size - self.__size = None - self.__files = {} + self.__cache_size = None + self.__cache_files = {} # Internal Cache self._cache = None @@ -433,7 +434,8 @@ def read(self, key=None, compress=True): # No problem pass - except (OSError, UnicodeDecodeError, IOError) as e: + except (OSError, zlib.error, EOFError, UnicodeDecodeError, + IOError) as e: # We can't access the file or it does not exist logger.warning('Could not read with persistent key: %s', key) logger.debug('Persistent Storage Exception: %s' % str(e)) @@ -547,7 +549,7 @@ def write(self, data, key=None, compress=True): logger.trace('Removed temporary file: %s', ntf.name) except FileNotFoundError: - # no worries + # no worries; we were removing it anyway pass except (OSError, IOError) as e: @@ -577,7 +579,7 @@ def write(self, data, key=None, compress=True): 'Removed temporary file: %s', ntf.name) except FileNotFoundError: - # Not a problem + # no worries; we were removing it anyway pass except (OSError, IOError) as e: @@ -586,14 +588,14 @@ def write(self, data, key=None, compress=True): logger.debug('Persistent Storage Exception: %s' % str(e)) return False - # Reset our reference variables - self.__size = None - self.__files.clear() + # Resetour reference variables + self.__cache_size = None + self.__cache_files.clear() # Content installed return True - def __move(self, src, dst, keep_backup=True): + def __move(self, src, dst): """ Moves the new file in place and handles the old if it exists already If the transaction fails in any way, the old file is swapped back. @@ -616,7 +618,7 @@ def __move(self, src, dst, keep_backup=True): 'Removed previous persistent backup file: %s', dst_backup) except FileNotFoundError: - # Not a problem + # no worries; we were removing it anyway pass except (OSError, IOError) as e: @@ -668,36 +670,13 @@ def __move(self, src, dst, keep_backup=True): # Not a problem pass - except OSError as e: + except (OSError, IOError) as e: logger.warning( 'Failed to restore original persistent file: %s', dst) logger.debug('Persistent Storage Exception: %s' % str(e)) return False - if not keep_backup: - # - # Remove old backup file we do not wish to keep around - # - try: - # make sure the file isn't already present; if it is; remove it - os.unlink(dst_backup) - logger.trace( - 'Removed persistent backup file: %s', dst_backup) - - except FileNotFoundError: - # Very strange; likely a racing condition somewhere else, but - # we should not fail as a result due to our expectations - # having been met. We will create a log entry though.. - logger.debug( - 'A persistent storage disk/io write race ' - 'condition was detected') - - except (OSError, IOError) as e: - logger.warning( - 'Failed to remove persistent backup file: %s', dst_backup) - logger.debug('Persistent Storage Exception: %s' % str(e)) - return True def open(self, key=None, mode='r', buffering=-1, encoding=None, @@ -721,9 +700,6 @@ def open(self, key=None, mode='r', buffering=-1, encoding=None, # Nothing further can be done raise FileNotFoundError() - if key is None: - key = self.base_key - io_file = os.path.join(self.__data_path, f"{key}{self.__extension}") return open( io_file, mode=mode, buffering=buffering, encoding=encoding, @@ -769,11 +745,11 @@ def set(self, key, value, expires=None, persistent=True, lazy=True): if self.__dirty and self.__mode == PersistentStoreMode.FLUSH: # Flush changes to disk - return self.flush(sync=True) + return self.flush() return True - def clear(self, *args, all=False): + def clear(self, *args): """ Remove one or more cache entry by it's key @@ -781,12 +757,12 @@ def clear(self, *args, all=False): clear('key1', 'key2', key-12') Or clear everything: - clear(all=True) + clear() """ if self._cache is None and not self.__load_cache(): return False - if not all: + if args: for arg in args: try: @@ -809,7 +785,7 @@ def clear(self, *args, all=False): if self.__dirty and self.__mode == PersistentStoreMode.FLUSH: # Flush changes to disk - return self.flush(sync=True) + return self.flush() def prune(self): """ @@ -820,8 +796,8 @@ def prune(self): change = False for key in list(self._cache.keys()): - if not self._cache: - + if key not in self: + # It's identified as being expired if not change and self._cache[key].persistent: # track change only if content was persistent change = True @@ -833,7 +809,7 @@ def prune(self): if self.__dirty and self.__mode == PersistentStoreMode.FLUSH: # Flush changes to disk - return self.flush(sync=True) + return self.flush() return change @@ -854,7 +830,7 @@ def __load_cache(self): cache_file = self.cache_file try: with gzip.open(cache_file, 'rb') as f: - # Read our content from disk + # Read our ontent from disk self._cache = {} for k, v in json.loads(f.read().decode(self.encoding)).items(): co = CacheObject.instantiate(v) @@ -866,12 +842,14 @@ def __load_cache(self): # Track changes from our loadset self.__dirty = True - except (UnicodeDecodeError, json.decoder.JSONDecodeError): + except (UnicodeDecodeError, json.decoder.JSONDecodeError, zlib.error, + EOFError): # Let users known there was a problem self._cache = {} logger.warning( - 'Corrupted access persistent cache content' - f' {cache_file}') + 'Corrupted access persistent cache content: %s', + cache_file) + return False except FileNotFoundError: # No problem; no cache to load @@ -888,9 +866,9 @@ def __load_cache(self): # Ensure our dirty flag is set to False return True - def flush(self, sync=None, force=False): + def flush(self, force=False): """ - Save's our cache + Save's our cache to disk """ if self._cache is None or self.__mode == PersistentStoreMode.MEMORY: @@ -902,13 +880,9 @@ def flush(self, sync=None, force=False): logger.trace('Persistent cache is consistent with memory map') return True - if sync is None: - # Default based on Store Mode - sync = False if self.__mode == PersistentStoreMode.AUTO else True - # Unset our size lazy setting - self.__size = None - self.__files.clear() + self.__cache_size = None + self.__cache_files.clear() # Prepare our cache file cache_file = self.cache_file @@ -926,7 +900,7 @@ def flush(self, sync=None, force=False): backup_file) except FileNotFoundError: - # Not a problem + # no worries; we were removing it anyway pass except (OSError, IOError) as e: @@ -934,6 +908,7 @@ def flush(self, sync=None, force=False): 'Could not remove persistent cache backup: %s', backup_file) logger.debug('Persistent Storage Exception: %s' % str(e)) + return False try: os.rename(cache_file, backup_file) @@ -942,7 +917,7 @@ def flush(self, sync=None, force=False): backup_file) except FileNotFoundError: - # Not a problem + # Not a problem; do not create a log entry pass except (OSError, IOError) as e: @@ -985,7 +960,11 @@ def flush(self, sync=None, force=False): logger.trace( 'Persistent temporary file removed: %s', ntf.name) - except OSError as e: + except FileNotFoundError: + # no worries; we were removing it anyway + pass + + except (OSError, IOError) as e: logger.error( 'Persistent temporary file removal failed: %s', ntf.name) @@ -1010,7 +989,7 @@ def flush(self, sync=None, force=False): logger.trace('Removed temporary file: %s', ntf.name) except FileNotFoundError: - # Not a problem + # no worries; we were removing it anyway pass except (OSError, IOError) as e: @@ -1022,10 +1001,6 @@ def flush(self, sync=None, force=False): # Ensure our dirty flag is set to False self.__dirty = False - if sync: - # Flush our content to disk - os.sync() - return True def files(self, exclude=True, lazy=True): @@ -1033,16 +1008,16 @@ def files(self, exclude=True, lazy=True): Returns the total files """ - if lazy and exclude in self.__files: + if lazy and exclude in self.__cache_files: # Take an early exit with our cached results - return self.__files[exclude] + return self.__cache_files[exclude] elif self.__mode == PersistentStoreMode.MEMORY: # Take an early exit # exclude is our cache switch and can be either True or False. # For the below, we just set both cases and set them up as an # empty record - self.__files.update({True: [], False: []}) + self.__cache_files.update({True: [], False: []}) return [] if not lazy or self.__exclude_list is None: @@ -1064,48 +1039,49 @@ def files(self, exclude=True, lazy=True): try: if exclude: - self.__files[exclude] = \ + self.__cache_files[exclude] = \ [path for path in filter(os.path.isfile, glob.glob( self.__base_path + '/**/*', recursive=True)) if next((False for p in self.__exclude_list if p.match(path)), True)] else: # No exclusion list applied - self.__files[exclude] = \ + self.__cache_files[exclude] = \ [path for path in filter(os.path.isfile, glob.glob( self.__base_path + '/**/*', recursive=True))] except (OSError, IOError): # We can't access the directory or it does not exist + self.__cache_files[exclude] = [] pass - return self.__files[exclude] + return self.__cache_files[exclude] def size(self, exclude=True, lazy=True): """ Returns the total size of the persistent storage in bytes """ - if lazy and self.__size is not None: + if lazy and self.__cache_size is not None: # Take an early exit - return self.__size + return self.__cache_size elif self.__mode == PersistentStoreMode.MEMORY: # Take an early exit - self.__size = 0 - return self.__size + self.__cache_size = 0 + return self.__cache_size # Get a list of files (file paths) in the given directory try: - self.__size = sum( + self.__cache_size = sum( [os.stat(path).st_size for path in self.files(exclude=exclude, lazy=lazy)]) except (OSError, IOError): # We can't access the directory or it does not exist - pass + self.__cache_size = 0 - return self.__size + return self.__cache_size def __del__(self): """ @@ -1137,7 +1113,7 @@ def __delitem__(self, key): if self.__dirty and self.__mode == PersistentStoreMode.FLUSH: # Flush changes to disk - self.flush(sync=True) + self.flush() return @@ -1158,7 +1134,7 @@ def __setitem__(self, key, value): """ if self._cache is None and not self.__load_cache(): - return False + raise OSError("Could not set cache") if key not in self._cache and not self.set(key, value): raise OSError("Could not set cache") @@ -1173,7 +1149,7 @@ def __setitem__(self, key, value): if self.__dirty and self.__mode == PersistentStoreMode.FLUSH: # Flush changes to disk - self.flush(sync=True) + self.flush() return @@ -1222,6 +1198,12 @@ def delete(self, *args, all=None, temp=None, cache=None, validate=True): if cache is None: cache = True if all else False + if cache and self._cache: + # Reset our object + self._cache.clear() + # Reset dirt flag + self.__dirty = False + for path in self.files(exclude=False): # Some information we use to validate the actions of our clean() @@ -1239,13 +1221,7 @@ def delete(self, *args, all=None, temp=None, cache=None, validate=True): result['key'] if self.__valid_key.match(result['key']) else None) - if validate and key is None: - # we're set to validate and a non-valid file was found - logger.debug( - 'Persistent File cleanup ignoring file: %s', path) - continue - - elif key != self.__cache_key: + if validate and key != self.__cache_key: # We're not dealing with a cache key logger.debug( 'Persistent File cleanup ignoring file: %s', path) @@ -1295,7 +1271,7 @@ def delete(self, *args, all=None, temp=None, cache=None, validate=True): logger.info('Removed persistent file: %s', ppath) except FileNotFoundError: - # no worries + # no worries; we were removing it anyway pass except (OSError, IOError) as e: @@ -1305,8 +1281,8 @@ def delete(self, *args, all=None, temp=None, cache=None, validate=True): logger.debug('Persistent Storage Exception: %s' % str(e)) # Reset our reference variables - self.__size = None - self.__files.clear() + self.__cache_size = None + self.__cache_files.clear() return not has_error diff --git a/apprise/url.py b/apprise/url.py index 6f5726b4cd..9499deb350 100644 --- a/apprise/url.py +++ b/apprise/url.py @@ -235,13 +235,9 @@ def __init__(self, asset=None, **kwargs): # Secure Mode self.secure = kwargs.get('secure', None) - try: - if not isinstance(self.secure, bool): - # Attempt to detect - self.secure = self.schema[-1] == 's' - - except (TypeError, IndexError): - self.secure = False + if not isinstance(self.secure, bool): + # Attempt to detect + self.secure = self.schema[-1:] == 's' self.host = URLBase.unquote(kwargs.get('host')) self.port = kwargs.get('port') @@ -293,10 +289,10 @@ def __init__(self, asset=None, **kwargs): self.url_identifier_salt = \ kwargs.get('salt').encode(self.asset.encoding) - except (TypeError, ValueError): + except (UnicodeEncodeError, TypeError, ValueError): self.logger.warning( 'Invalid Unique URL Identifier Salt value (salt) was ' - 'specified {}'.format(kwargs.get('cto'))) + 'specified {}'.format(kwargs.get('salt'))) if 'tag' in kwargs: # We want to associate some tags with our notification service. @@ -378,7 +374,7 @@ def url(self, privacy=False, *args, **kwargs): default_port = 443 if self.secure else 80 - return '{schema}://{auth}{hostname}{port}{fullpath}?{params}'.format( + return '{schema}://{auth}{hostname}{port}{fullpath}{params}'.format( schema='https' if self.secure else 'http', auth=auth, # never encode hostname since we're expecting it to be a valid one @@ -387,7 +383,7 @@ def url(self, privacy=False, *args, **kwargs): else ':{}'.format(self.port), fullpath=URLBase.quote(self.fullpath, safe='/') if self.fullpath else '/', - params=URLBase.urlencode(params), + params=('?' + URLBase.urlencode(params) if params else ''), ) def url_id(self, lazy=True, hash_engine=hashlib.sha1): @@ -430,9 +426,9 @@ def url_id(self, lazy=True, hash_engine=hashlib.sha1): if lazy and self.__url_identifier is not False: return self.__url_identifier - # Python v3.8 introduces usedforsecurity argument + # Python v3.9 introduces usedforsecurity argument kwargs = {'usedforsecurity': False} \ - if sys.version_info >= (3, 8) else {} + if sys.version_info >= (3, 9) else {} if self.url_identifier is False: # Disabled @@ -812,14 +808,32 @@ def url_parameters(self, *args, **kwargs): this class. """ - return { - # The socket read timeout - 'rto': str(self.socket_read_timeout), - # The request/socket connect timeout - 'cto': str(self.socket_connect_timeout), - # Certificate verification - 'verify': 'yes' if self.verify_certificate else 'no', - } + # parameters are only provided on demand to keep the URL short + params = {} + + # The socket read timeout + if self.socket_read_timeout != URLBase.socket_read_timeout: + params['rto'] = str(self.socket_read_timeout) + + # The request/socket connect timeout + if self.socket_connect_timeout != URLBase.socket_connect_timeout: + params['cto'] = str(self.socket_connect_timeout) + + # Certificate verification + if self.verify_certificate != URLBase.verify_certificate: + params['verify'] = 'yes' if self.verify_certificate else 'no' + + # Persistent Data Salt + if self.url_identifier_salt != URLBase.url_identifier_salt: + try: + params['salt'] = \ + self.url_identifier_salt.decode(self.asset.encoding) + + except UnicodeDecodeError: + # Bad data; don't pass it along + pass + + return params @staticmethod def post_process_parse_url_results(results): diff --git a/test/test_api.py b/test/test_api.py index 3808c771f8..75b57bb392 100644 --- a/test/test_api.py +++ b/test/test_api.py @@ -234,10 +234,6 @@ def __init__(self, **kwargs): # We fail whenever we're initialized raise TypeError() - def url(self, **kwargs): - # Support URL - return '' - @staticmethod def parse_url(url, *args, **kwargs): # always parseable @@ -248,10 +244,6 @@ def __init__(self, **kwargs): super().__init__( notify_format=NotifyFormat.HTML, **kwargs) - def url(self, **kwargs): - # Support URL - return '' - def send(self, **kwargs): # Pretend everything is okay return True @@ -347,10 +339,6 @@ async def async_notify(self, **kwargs): # Pretend everything is okay (async) raise TypeError() - def url(self, **kwargs): - # Support URL - return '' - class RuntimeNotification(NotifyBase): def notify(self, **kwargs): # Pretend everything is okay @@ -360,10 +348,6 @@ async def async_notify(self, **kwargs): # Pretend everything is okay (async) raise TypeError() - def url(self, **kwargs): - # Support URL - return '' - class FailNotification(NotifyBase): def notify(self, **kwargs): @@ -374,10 +358,6 @@ async def async_notify(self, **kwargs): # Pretend everything is okay (async) raise TypeError() - def url(self, **kwargs): - # Support URL - return '' - # Store our bad notification in our schema map N_MGR['throw'] = ThrowNotification @@ -409,10 +389,6 @@ def __init__(self, **kwargs): # Pretend everything is okay raise TypeError() - def url(self, **kwargs): - # Support URL - return '' - N_MGR.unload_modules() N_MGR['throw'] = ThrowInstantiateNotification @@ -440,6 +416,30 @@ def url(self, **kwargs): a.clear() assert len(a) == 0 + # Test our salt used for our persistent storage + plugin = a.instantiate('good://localhost?salt=abc123') + assert isinstance(plugin, NotifyBase) + assert plugin.url_identifier_salt == b'abc123' + assert plugin.url_id(lazy=False) + + asset = AppriseAsset(encoding='ascii') + plugin = a.instantiate('good://localhost?salt=ボールト"', asset=asset) + assert isinstance(plugin, NotifyBase) + # Encoding error makes our information ignored; we'll log it and + # keep our original value + assert plugin.url_identifier_salt == URLBase.url_identifier_salt + assert plugin.url_id(lazy=False) + + # Support those who may set a value afterwards as a string + assert isinstance(plugin, NotifyBase) + plugin = a.instantiate('good://localhost', asset=asset) + # should be a byte object, but we accomodate string for ease.. + plugin.url_identifier_salt = \ + (chr(40960) + u'abcd' + chr(1972)).encode('utf-8') + assert plugin.url_id(lazy=False) + # We could not generate the salt due to the encoding + assert 'salt' not in plugin.url() + # Instantiate a bad object plugin = a.instantiate(object, tag="bad_object") assert plugin is None @@ -830,6 +830,19 @@ def test_apprise_urlbase_object(): assert base.request_url == 'http://127.0.0.1/path/' assert base.url().startswith('http://user@127.0.0.1/path/') + # Generic initialization + base = URLBase(**{'schema': ''}) + assert base.request_timeout == (4.0, 4.0) + assert base.request_auth is None + assert base.request_url == 'http:///' + assert base.url().startswith('http:///') + + base = URLBase() + assert base.request_timeout == (4.0, 4.0) + assert base.request_auth is None + assert base.request_url == 'http:///' + assert base.url().startswith('http:///') + def test_apprise_unique_id(): """ @@ -908,6 +921,10 @@ def test_apprise_unique_id(): obj.url_identifier = b'test' assert obj.url_id() is not None + obj = Apprise.instantiate(url) + obj.url_identifier = 'test' + assert obj.url_id() is not None + # Testing Garbage for x in (31, object, 43.1): obj = Apprise.instantiate(url) @@ -939,12 +956,7 @@ def notify(self, **kwargs): # Pretend everything is okay return True - def url(self, **kwargs): - # Support URL - return '' - class HtmlNotification(NotifyBase): - # set our default notification format notify_format = NotifyFormat.HTML @@ -955,12 +967,7 @@ def notify(self, **kwargs): # Pretend everything is okay return True - def url(self, **kwargs): - # Support URL - return '' - class MarkDownNotification(NotifyBase): - # set our default notification format notify_format = NotifyFormat.MARKDOWN @@ -971,10 +978,6 @@ def notify(self, **kwargs): # Pretend everything is okay return True - def url(self, **kwargs): - # Support URL - return '' - # Store our notifications into our schema map N_MGR['text'] = TextNotification N_MGR['html'] = HtmlNotification @@ -1183,10 +1186,6 @@ class TestDisabled01Notification(NotifyBase): # in the next part of the testing service_name = 'na01' - def url(self, **kwargs): - # Support URL - return '' - def notify(self, **kwargs): # Pretend everything is okay (so we don't break other tests) return True @@ -1208,10 +1207,6 @@ def __init__(self, *args, **kwargs): # enable state changes **AFTER** we initialize self.enabled = False - def url(self, **kwargs): - # Support URL - return '' - def notify(self, **kwargs): # Pretend everything is okay (so we don't break other tests) return True @@ -1266,10 +1261,6 @@ class TesEnabled01Notification(NotifyBase): # in the next part of the testing service_name = 'good' - def url(self, **kwargs): - # Support URL - return '' - def send(self, **kwargs): # Pretend everything is okay (so we don't break other tests) return True @@ -1402,10 +1393,6 @@ class TestDetailNotification(NotifyBase): } }) - def url(self, **kwargs): - # Support URL - return '' - def send(self, **kwargs): # Pretend everything is okay (so we don't break other tests) return True @@ -1428,10 +1415,6 @@ class TestReq01Notification(NotifyBase): 'packages_recommended': 'django', } - def url(self, **kwargs): - # Support URL - return '' - def send(self, **kwargs): # Pretend everything is okay (so we don't break other tests) return True @@ -1458,10 +1441,6 @@ class TestReq02Notification(NotifyBase): ] } - def url(self, **kwargs): - # Support URL - return '' - def send(self, **kwargs): # Pretend everything is okay (so we don't break other tests) return True @@ -1484,10 +1463,6 @@ class TestReq03Notification(NotifyBase): 'packages_recommended': 'cryptography <= 3.4' } - def url(self, **kwargs): - # Support URL - return '' - def send(self, **kwargs): # Pretend everything is okay (so we don't break other tests) return True @@ -1504,10 +1479,6 @@ class TestReq04Notification(NotifyBase): # This is the same as saying there are no requirements requirements = None - def url(self, **kwargs): - # Support URL - return '' - def send(self, **kwargs): # Pretend everything is okay (so we don't break other tests) return True @@ -1526,10 +1497,6 @@ class TestReq05Notification(NotifyBase): 'packages_recommended': 'cryptography <= 3.4' } - def url(self, **kwargs): - # Support URL - return '' - def send(self, **kwargs): # Pretend everything is okay (so we don't break other tests) return True diff --git a/test/test_attach_http.py b/test/test_attach_http.py index ad58ed9117..36ecbad58f 100644 --- a/test/test_attach_http.py +++ b/test/test_attach_http.py @@ -86,15 +86,21 @@ def test_attach_http_query_string_dictionary(): """ - # no qsd specified - results = AttachHTTP.parse_url('http://localhost') + # Set verify off + results = AttachHTTP.parse_url('http://localhost?verify=no&rto=9&cto=8') assert isinstance(results, dict) # Create our object obj = AttachHTTP(**results) assert isinstance(obj, AttachHTTP) - assert re.search(r'[?&]verify=yes', obj.url()) + # verify is disabled and therefore set + assert re.search(r'[?&]verify=no', obj.url()) + + # Our connect timeout flag is set since it differs from the default + assert re.search(r'[?&]cto=8', obj.url()) + # Our read timeout flag is set since it differs from the default + assert re.search(r'[?&]rto=9', obj.url()) # Now lets create a URL with a custom Query String entry @@ -106,7 +112,8 @@ def test_attach_http_query_string_dictionary(): obj = AttachHTTP(**results) assert isinstance(obj, AttachHTTP) - assert re.search(r'[?&]verify=yes', obj.url()) + # verify is not in the URL as it is implied (default) + assert not re.search(r'[?&]verify=yes', obj.url()) # But now test that our custom arguments have also been set assert re.search(r'[?&]dl=1', obj.url()) diff --git a/test/test_persistent_store.py b/test/test_persistent_store.py index 3f523023df..feaf9a7f5f 100644 --- a/test/test_persistent_store.py +++ b/test/test_persistent_store.py @@ -26,6 +26,7 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. +import time import os import pytest import json @@ -78,6 +79,32 @@ def test_disabled_persistent_storage(tmpdir): assert pc.set('key', 'value') assert pc.get('key') == 'value' + assert pc.set('key2', 'value') + pc.clear('key', 'key-not-previously-set') + assert pc.get('key2') == 'value' + assert pc.get('key') is None + + # Set it again + assert pc.set('key', 'another-value') + # Clears all + pc.clear() + assert pc.get('key2') is None + assert pc.get('key') is None + # A second call to clear on an already empty cache set + pc.clear() + + # No dirty flag is set as ther is nothing to write to disk + pc.set('not-persistent', 'value', persistent=False) + del pc['not-persistent'] + with pytest.raises(KeyError): + # Can't delete it twice + del pc['not-persistent'] + + # A Persistent key + pc.set('persistent', 'value') + # Removes it and sets/clears the dirty flag + del pc['persistent'] + # After all of the above, nothing was done to the directory assert len(os.listdir(str(tmpdir))) == 0 @@ -126,6 +153,16 @@ def test_persistent_storage_general(tmpdir): # Default mode when a path is not provided assert pc.mode == PersistentStoreMode.MEMORY + assert pc.size() == 0 + assert pc.files() == [] + assert pc.files(exclude=True, lazy=False) == [] + assert pc.files(exclude=False, lazy=False) == [] + pc.set('key', 'value') + # There is no disk size utilized + assert pc.size() == 0 + assert pc.files(exclude=True, lazy=False) == [] + assert pc.files(exclude=False, lazy=False) == [] + # Create ourselves an attachment object pc = PersistentStore( namespace=namespace, path=str(tmpdir)) @@ -159,6 +196,47 @@ def test_persistent_storage_general(tmpdir): pc['unassigned_key'] +def test_persistent_storage_auto_mode(tmpdir): + """ + Persistent Storage Auto Write Testing + + """ + namespace = 'abc' + # Create ourselves an attachment object + pc = PersistentStore( + namespace=namespace, path=str(tmpdir), + mode=PersistentStoreMode.AUTO) + + pc.write(b'test') + with mock.patch('os.unlink', side_effect=FileNotFoundError()): + assert pc.delete(all=True) is True + + # Create a temporary file we can delete + with open(os.path.join(pc.path, pc.temp_dir, 'test.file'), 'wb') as fd: + fd.write(b'data') + + # Delete just the temporary files + assert pc.delete(temp=True) is True + + # Delete just the temporary files + # Create a cache entry and delete it + assert pc.set('key', 'value') is True + pc.write(b'test') + assert pc.delete(cache=True) is True + # Verify our data entry wasn't removed + assert pc.read() == b'test' + # But our cache was + assert pc.get('key') is None + + # A reverse of the above... create a cache an data variable and + # Clear the data; make sure our cache is still there + assert pc.set('key', 'value') is True + pc.write(b'test', key='iokey') is True + assert pc.delete('iokey') is True + assert pc.get('key') == 'value' + assert pc.read('iokey') is None + + def test_persistent_storage_flush_mode(tmpdir): """ Persistent Storage Forced Write Testing @@ -198,7 +276,7 @@ def test_persistent_storage_flush_mode(tmpdir): assert pc.set('key', 'value') path_content = os.listdir(path) - # var, cache.psdata, and .tmp + # var, cache.psdata, and tmp assert len(path_content) == 3 # Assignments (causes another disk write) @@ -244,11 +322,11 @@ def test_persistent_storage_flush_mode(tmpdir): assert len(path_content) == 4 # Our temporary directory used for all file handling in this namespace - assert '.tmp' in path_content + assert pc.temp_dir in path_content # Our cache file assert os.path.basename(pc.cache_file) in path_content - path = os.path.join(pc.path, '.tmp') + path = os.path.join(pc.path, pc.temp_dir) path_content = os.listdir(path) # We always do our best to clean any temporary files up @@ -348,6 +426,104 @@ def read(*args, **kwargs): # Restore setting pc.max_file_size = _prev_max_file_size + # Reset + pc.delete() + + assert pc.write('data') + # Corrupt our data + data = pc.read(compress=False)[:20] + pc.read(compress=False)[:10] + pc.write(data, compress=False) + + # Now we'll get an exception reading back the corrupted data + assert pc.read() is None + + # Keep in mind though the data is still there; operator should write + # and read the way they expect to and things will work out fine + # This test just proves that Apprise Peresistent storage still + # gracefully handles bad data + assert pc.read(compress=False) == data + + # No key exists also returns None + assert pc.read('no-key-exists') is None + + pc.write(b'test') + pc['key'] = 'value' + with mock.patch('os.unlink', side_effect=FileNotFoundError()): + assert pc.delete(all=True) is True + with mock.patch('os.unlink', side_effect=OSError()): + assert pc.delete(all=True) is False + + # Create a temporary file we can delete + tmp_file = os.path.join(pc.path, pc.temp_dir, 'test.file') + with open(tmp_file, 'wb') as fd: + fd.write(b'data') + + assert pc.set('key', 'value') is True + pc.write(b'test', key='iokey') is True + # Delete just the temporary files + assert pc.delete(temp=True) is True + assert os.path.exists(tmp_file) is False + # our other entries are untouched + assert pc.get('key') == 'value' + assert pc.read('iokey') == b'test' + + # Delete just the temporary files + # Create a cache entry and delete it + assert pc.set('key', 'value') is True + pc.write(b'test') + assert pc.delete(cache=True) is True + # Verify our data entry wasn't removed + assert pc.read() == b'test' + # But our cache was + assert pc.get('key') is None + + # A reverse of the above... create a cache an data variable and + # Clear the data; make sure our cache is still there + assert pc.set('key', 'value') is True + pc.write(b'test', key='iokey') is True + assert pc.delete('iokey') is True + assert pc.get('key') == 'value' + assert pc.read('iokey') is None + + # Create some custom files + cust1_file = os.path.join(pc.path, 'test.file') + cust2_file = os.path.join(pc.path, pc.data_dir, 'test.file') + with open(cust1_file, 'wb') as fd: + fd.write(b'data') + with open(cust2_file, 'wb') as fd: + fd.write(b'data') + + # Even after a full flush our files will exist + assert pc.delete() + assert os.path.exists(cust1_file) is True + assert os.path.exists(cust2_file) is True + + # However, if we turn off validate, we do a full sweep because these + # unknown files are lingering in our directory space + assert pc.delete(validate=False) + assert os.path.exists(cust1_file) is False + assert os.path.exists(cust2_file) is False + + pc['key'] = 'value' + pc['key2'] = 'value2' + assert 'key' in pc + assert 'key2' in pc + pc.clear('key') + assert 'key' not in pc + assert 'key2' in pc + + # Set expired content + pc.set( + 'expired', 'expired-content', + expires=datetime.now() - timedelta(days=1)) + + # It's actually there... but it's expired so our persistent + # storage is behaving as it should + assert 'expired' not in pc + assert pc.get('expired') is None + # Prune our content + pc.prune() + def test_persistent_storage_corruption_handling(tmpdir): """ @@ -405,6 +581,11 @@ def test_persistent_storage_corruption_handling(tmpdir): namespace=namespace, path=str(tmpdir), mode=PersistentStoreMode.FLUSH) + # Test our force flush + pc.flush(force=True) + # double call + pc.flush(force=True) + # File is corrupted assert 'mykey' not in pc pc['mykey'] = 42 @@ -436,26 +617,52 @@ def test_persistent_storage_corruption_handling(tmpdir): with mock.patch('tempfile.NamedTemporaryFile', side_effect=OSError()): assert not pc.flush(force=True) - # Temporary file cleanup failure - with mock.patch('tempfile._TemporaryFileWrapper.close', - side_effect=OSError()): - assert not pc.flush(force=True) + # Temporary file cleanup failure + with mock.patch('tempfile._TemporaryFileWrapper.close', + side_effect=OSError()): + assert not pc.flush(force=True) with mock.patch('tempfile._TemporaryFileWrapper.close', side_effect=(OSError(), None)): with mock.patch('os.unlink', side_effect=(OSError())): assert not pc.flush(force=True) + with mock.patch( + 'tempfile._TemporaryFileWrapper.close', side_effect=OSError()): + assert not pc.flush(force=True) + + with mock.patch( + 'tempfile._TemporaryFileWrapper.close', + side_effect=(OSError(), None)): + with mock.patch('os.unlink', side_effect=OSError()): + assert not pc.flush(force=True) + + with mock.patch( + 'tempfile._TemporaryFileWrapper.close', + side_effect=(OSError(), None)): + with mock.patch('os.unlink', side_effect=FileNotFoundError()): + assert not pc.flush(force=True) + del pc + # directory initialization okay + pc = PersistentStore( + namespace=namespace, path=str(tmpdir), + mode=PersistentStoreMode.FLUSH) -def test_persistent_storage_cache_io_errors(tmpdir): - """ - Test persistent storage when there is a variety of disk issues - """ + # Allows us to play with encoding errors + pc.encoding = 'ascii' - # Namespace - namespace = 'abc123' + # Handle write() calls + with mock.patch('os.stat', side_effect=OSError()): + # We fail to fetch the filesize of our old file causing us to fail + assert pc.write('abcd') is False + + # ボールト translates to vault (no bad word here) :) + data = "ボールト" + + # We'll have encoding issues + assert pc.write(data) is False with mock.patch('gzip.open', side_effect=OSError()): pc = PersistentStore(namespace=namespace, path=str(tmpdir)) @@ -463,9 +670,158 @@ def test_persistent_storage_cache_io_errors(tmpdir): # Falls to default assert pc.get('key') is None + pc = PersistentStore(namespace=namespace, path=str(tmpdir)) + with pytest.raises(OSError): + pc['key'] = 'value' + + pc = PersistentStore(namespace=namespace, path=str(tmpdir)) with pytest.raises(KeyError): pc['key'] + pc = PersistentStore(namespace=namespace, path=str(tmpdir)) + with pytest.raises(KeyError): + del pc['key'] + + pc = PersistentStore(namespace=namespace, path=str(tmpdir)) + # Fails to set key + assert pc.set('key', 'value') is False + + pc = PersistentStore(namespace=namespace, path=str(tmpdir)) + # Fails to clear + assert pc.clear() is False + + pc = PersistentStore(namespace=namespace, path=str(tmpdir)) + # Fails to prune + assert pc.prune() is False + + # Set some expired content + pc.set( + 'key', 'value', persistent=False, + expires=datetime.now() - timedelta(days=1)) + pc.set( + 'key2', 'value2', persistent=True, + expires=datetime.now() - timedelta(days=1)) + + # Set some un-expired content + pc.set('key3', 'value3', persistent=True) + pc.set('key4', 'value4', persistent=False) + assert pc.prune() is True + + # Second call has no change made + assert pc.prune() is False + + # Reset + pc.delete() + + # directory initialization okay + pc = PersistentStore( + namespace=namespace, path=str(tmpdir), + mode=PersistentStoreMode.FLUSH) + + # Write some content that expires almost immediately + pc.set( + 'key1', 'value', persistent=True, + expires=datetime.now() + timedelta(seconds=1)) + pc.set( + 'key2', 'value', persistent=True, + expires=datetime.now() + timedelta(seconds=1)) + pc.set( + 'key3', 'value', persistent=True, + expires=datetime.now() + timedelta(seconds=1)) + pc.flush() + + # Wait out our expiry + time.sleep(1.3) + + # now initialize our storage again + pc = PersistentStore( + namespace=namespace, path=str(tmpdir), + mode=PersistentStoreMode.FLUSH) + + # This triggers our __load_cache() which reads in a value + # determined to have already been expired + assert 'key1' not in pc + assert 'key2' not in pc + assert 'key3' not in pc + + # Sweep + pc.delete() + pc.set('key', 'value') + pc.set('key2', 'value2') + pc.write('more-content') + # Flush our content to disk + pc.flush() + + # Ideally we'd use os.stat below, but it is called inside a list + # comprehension block and mock doesn't appear to throw the exception + # there. So this is a bit of a cheat, but it works + with mock.patch('builtins.sum', side_effect=OSError()): + assert pc.size(exclude=True, lazy=False) == 0 + assert pc.size(exclude=False, lazy=False) == 0 + + pc = PersistentStore(namespace=namespace, path=str(tmpdir)) + with mock.patch('glob.glob', side_effect=OSError()): + assert pc.files(exclude=True, lazy=False) == [] + assert pc.files(exclude=False, lazy=False) == [] + + pc = PersistentStore( + namespace=namespace, path=str(tmpdir), + mode=PersistentStoreMode.FLUSH) + + # Causes an initialization + pc['abc'] = 1 + with mock.patch('os.unlink', side_effect=OSError()): + # Now we can't set data + with pytest.raises(OSError): + pc['new-key'] = 'value' + # However keys that alrady exist don't get caught in check + # and therefore won't throw + pc['abc'] = 'value' + + # + # Handles flush() when the queue is empty + # + pc.clear() + with mock.patch('os.unlink', side_effect=OSError()): + # We can't remove backup cache file + assert pc.flush(force=True) is False + + with mock.patch('os.unlink', side_effect=FileNotFoundError()): + # FileNotFound is not an issue + assert pc.flush(force=True) is True + + with mock.patch('os.rename', side_effect=OSError()): + # We can't create a backup + assert pc.flush(force=True) is False + + with mock.patch('os.rename', side_effect=FileNotFoundError()): + # FileNotFound is not an issue + assert pc.flush(force=True) is True + + # Flush any previous cache and data + pc.delete() + + # + # Handles flush() cases where is data to write + # + + # Create a key + pc.set('abc', 'a-test-value') + with mock.patch( + 'os.unlink', side_effect=(OSError(), None)): + # We failed to move our content in place + assert pc.flush(force=True) is False + + with mock.patch( + 'os.unlink', side_effect=(OSError(), FileNotFoundError())): + # We failed to move our content in place + assert pc.flush(force=True) is False + + with mock.patch( + 'os.unlink', side_effect=(OSError(), OSError())): + # We failed to move our content in place + assert pc.flush(force=True) is False + def test_persistent_custom_io(tmpdir): """ @@ -473,7 +829,7 @@ def test_persistent_custom_io(tmpdir): """ # Initialize it for memory only - pc = PersistentStore() + pc = PersistentStore(path=str(tmpdir)) with pytest.raises(AttributeError): pc.open('!invalid#-Key') @@ -504,6 +860,73 @@ def test_persistent_custom_io(tmpdir): fd.write(b'test') fd.close() + with pytest.raises(AttributeError): + pc.write(b'data', key='!invalid#-Key') + + pc.delete() + with mock.patch('os.unlink', side_effect=OSError()): + # Write our data and the __move() will fail under the hood + assert pc.write(b'test') is False + + pc.delete() + with mock.patch('os.rename', side_effect=OSError()): + # Write our data and the __move() will fail under the hood + assert pc.write(b'test') is False + + pc.delete() + with mock.patch('os.unlink', side_effect=(OSError(), FileNotFoundError())): + # Write our data and the __move() will fail under the hood + assert pc.write(b'test') is False + + pc.delete() + with mock.patch('os.unlink', side_effect=(OSError(), None)): + # Write our data and the __move() will fail under the hood + assert pc.write(b'test') is False + + pc.delete() + with mock.patch('os.unlink', side_effect=(OSError(), OSError())): + # Write our data and the __move() will fail under the hood + assert pc.write(b'test') is False + + pc.delete() + with mock.patch('os.rename', side_effect=(None, OSError(), None)): + assert pc.write(b'test') is False + + with mock.patch('os.rename', side_effect=(None, OSError(), OSError())): + assert pc.write(b'test') is False + + with mock.patch('os.rename', side_effect=( + None, OSError(), FileNotFoundError())): + assert pc.write(b'test') is False + + pc.delete() + with mock.patch('os.rename', side_effect=(None, None, None, OSError())): + # not enough reason to fail + assert pc.write(b'test') is True + + with mock.patch('os.stat', side_effect=OSError()): + with mock.patch('os.close', side_effect=(None, OSError())): + assert pc.write(b'test') is False + + pc.delete() + with mock.patch( + 'tempfile._TemporaryFileWrapper.close', side_effect=OSError()): + assert pc.write(b'test') is False + + pc.delete() + with mock.patch( + 'tempfile._TemporaryFileWrapper.close', + side_effect=(OSError(), None)): + with mock.patch('os.unlink', side_effect=OSError()): + assert pc.write(b'test') is False + + pc.delete() + with mock.patch( + 'tempfile._TemporaryFileWrapper.close', + side_effect=(OSError(), None)): + with mock.patch('os.unlink', side_effect=FileNotFoundError()): + assert pc.write(b'test') is False + def test_persistent_storage_cache_object(tmpdir): """ diff --git a/test/test_plugin_sfr.py b/test/test_plugin_sfr.py index 7f052ecc7c..82430bdfc0 100644 --- a/test/test_plugin_sfr.py +++ b/test/test_plugin_sfr.py @@ -113,8 +113,7 @@ 'privacy_url': ( 'sfr://service_id:****@0...0/0000000000?' 'from=MyApp&timeout=30&voice=claire08s&' - 'lang=fr_FR&media=SMSUnicode&format=text' - '&overflow=upstream&rto=4.0&cto=4.0&verify=yes'), + 'lang=fr_FR&media=SMSUnicode'), # Our response expected server response 'requests_response_text': SFR_GOOD_RESPONSE, }), @@ -126,8 +125,7 @@ 'privacy_url': ( 'sfr://service_id:****@0...0/0000000000?' 'from=&timeout=2880&voice=laura8k&' - 'lang=en_US&media=SMSUnicode&format=text' - '&overflow=upstream&rto=4.0&cto=4.0&verify=yes'), + 'lang=en_US&media=SMSUnicode'), # Our response expected server response 'requests_response_text': SFR_GOOD_RESPONSE, }), @@ -139,8 +137,7 @@ 'privacy_url': ( 'sfr://service_id:****@0...0/0000000000?' 'from=&timeout=2880&voice=claire08s&' - 'lang=fr_FR&media=SMS&format=text' - '&overflow=upstream&rto=4.0&cto=4.0&verify=yes'), + 'lang=fr_FR&media=SMS'), # Our response expected server response 'requests_response_text': SFR_GOOD_RESPONSE, }), @@ -152,8 +149,7 @@ 'privacy_url': ( 'sfr://service_id:****@0...0/0000000000?' 'from=&timeout=2880&voice=claire08s&' - 'lang=fr_FR&media=SMSUnicode&format=text' - '&overflow=upstream&rto=4.0&cto=4.0&verify=yes'), + 'lang=fr_FR&media=SMSUnicode'), # Our failed notification expected server response 'requests_response_text': SFR_BAD_RESPONSE, 'requests_response_code': requests.codes.ok,