From 985017c99f43e418fd83433c5402a0e84091ff9b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marcel=20Kr=C3=B6ker?= Date: Thu, 27 Oct 2022 11:56:55 +0200 Subject: [PATCH] Raise coverate to 96% --- dictdatabase/models.py | 6 ++-- dictdatabase/session.py | 7 +++-- pyproject.toml | 2 +- tests/test_delete.py | 13 ++++++++ tests/test_excepts.py | 5 +++ tests/test_partial.py | 2 ++ tests/test_read.py | 21 +++++++++++++ tests/test_threaded_sessions.py | 56 +++++++++++++++++++++++++++++++++ tests/test_write.py | 19 +++++++++++ 9 files changed, 124 insertions(+), 7 deletions(-) create mode 100644 tests/test_delete.py create mode 100644 tests/test_threaded_sessions.py diff --git a/dictdatabase/models.py b/dictdatabase/models.py index df5ba51..353877c 100644 --- a/dictdatabase/models.py +++ b/dictdatabase/models.py @@ -31,7 +31,7 @@ def exists(self, key=None) -> bool: return True # Key is passed and occurs is True try: - io_safe.subread(self.path, key=key) + io_safe.partial_read(self.path, key=key) return True except KeyError: return False @@ -73,7 +73,7 @@ def read(self, key: str = None, as_type: T = None) -> dict | T: - `as_type`: If provided, return the value as the given type. Eg. as=str will return str(value). """ if key is not None: - if "*" in key: + if "*" in self.path: raise ValueError("A key cannot be specified with a wildcard.") # Subread _, json_exists, _, ddb_exists = utils.db_paths(self.path) @@ -84,7 +84,7 @@ def read(self, key: str = None, as_type: T = None) -> dict | T: elif "*" in self.path: # Multiread pattern_paths = utils.expand_find_path_pattern(self.path) - data = {db_name: io_safe.read(db_name) for db_name in pattern_paths} + data = {n.split("/")[-1]: io_safe.read(n) for n in pattern_paths} else: # Normal read data = io_safe.read(self.path) diff --git a/dictdatabase/session.py b/dictdatabase/session.py index 4e8dbd1..4e78ce8 100644 --- a/dictdatabase/session.py +++ b/dictdatabase/session.py @@ -30,7 +30,7 @@ def __init__(self, db_name: str, key: str = None, as_type: T = None): self.db_name = db_name self.as_type = as_type if key is not None: - if "*" in key: + if "*" in db_name: raise ValueError("A key cannot be specified with a wildcard.") self.key = key self.session_type = SessionType.SUB @@ -64,7 +64,8 @@ def __enter__(self) -> Tuple["DDBSession", JSONSerializable | T]: dh = self.partial_handle.key_value self.data_handle = dh elif self.session_type == SessionType.MULTI: - self.data_handle = {n: io_unsafe.read(n) for n in self.db_name} + dh = {n.split("/")[-1]: io_unsafe.read(n) for n in self.db_name} + self.data_handle = dh return self, self.as_type(dh) if self.as_type is not None else dh except BaseException as e: if self.session_type == SessionType.MULTI: @@ -93,4 +94,4 @@ def write(self): io_unsafe.partial_write(self.partial_handle) elif self.session_type == SessionType.MULTI: for name in self.db_name: - io_unsafe.write(name, self.data_handle[name]) + io_unsafe.write(name, self.data_handle[name.split("/")[-1]]) diff --git a/pyproject.toml b/pyproject.toml index 4896059..5e7bb1a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "dictdatabase" -version = "2.0.2" +version = "2.0.3" repository = "https://github.com/mkrd/DictDataBase" description = "Easy-to-use database using dicts" authors = ["Marcel Kröker "] diff --git a/tests/test_delete.py b/tests/test_delete.py new file mode 100644 index 0000000..4a5510e --- /dev/null +++ b/tests/test_delete.py @@ -0,0 +1,13 @@ +import dictdatabase as DDB + + +def test_delete(env, use_compression): + DDB.at("test_delete").create({"a": 1}, force_overwrite=True) + assert DDB.at("test_delete").read() == {"a": 1} + DDB.at("test_delete").delete() + assert DDB.at("test_delete").read() is None + + + +def test_delete_nonexistent(): + DDB.at("test_delete_nonexistent").delete() diff --git a/tests/test_excepts.py b/tests/test_excepts.py index 069087b..bfa2ebe 100644 --- a/tests/test_excepts.py +++ b/tests/test_excepts.py @@ -31,3 +31,8 @@ def test_except_on_session_in_session(env, use_compression, use_orjson, sort_key with DDB.at(name).session(as_type=pd) as (session, test): with DDB.at(name).session(as_type=pd) as (session2, test2): pass + + +def test_wildcard_and_subkey_except(): + with pytest.raises(ValueError): + DDB.at("test_wildcard_and_subkey_except/*").read(key="key") diff --git a/tests/test_partial.py b/tests/test_partial.py index c943ec3..ab055e3 100644 --- a/tests/test_partial.py +++ b/tests/test_partial.py @@ -26,6 +26,8 @@ def test_subread(env, use_compression, use_orjson, sort_keys, indent): DDB.at("test_subread2").create(j2, force_overwrite=True) assert DDB.at("test_subread2").read("b") == {"d": "e"} + assert DDB.at("none").read("none") is None + def test_subwrite(env, use_compression, use_orjson, sort_keys, indent): name = "test_subwrite" diff --git a/tests/test_read.py b/tests/test_read.py index 95462e6..7269774 100644 --- a/tests/test_read.py +++ b/tests/test_read.py @@ -10,6 +10,27 @@ def test_non_existent(env, use_compression, use_orjson, sort_keys, indent): assert d is None +def test_file_exists_error(env): + with open(f"{DDB.config.storage_directory}/test_file_exists_error.json", "w") as f: + f.write("") + with open(f"{DDB.config.storage_directory}/test_file_exists_error.ddb", "w") as f: + f.write("") + with pytest.raises(FileExistsError): + DDB.at("test_file_exists_error").read() + + +def test_exists(env): + DDB.at("test_exists").create({"a": 1}, force_overwrite=True) + assert DDB.at("test_exists").exists() + assert not DDB.at("test_exists/nonexistent").exists() + assert DDB.at("test_exists").exists("a") + assert not DDB.at("test_exists").exists("b") + + + + + + def test_read_integrity(): cases = [ r'{"a": "\\", "b": 2}', diff --git a/tests/test_threaded_sessions.py b/tests/test_threaded_sessions.py new file mode 100644 index 0000000..c57fd0a --- /dev/null +++ b/tests/test_threaded_sessions.py @@ -0,0 +1,56 @@ +import dictdatabase as DDB +from path_dict import pd +from concurrent.futures import ThreadPoolExecutor, wait + + +def increment_counters(n, tables): + for _ in range(n): + for t in range(tables): + # Perform a useless read operation + d = DDB.at(f"test_stress_threaded{t}").read() + # Perform a counter increment + with DDB.at(f"test_stress_threaded{t}").session(as_type=pd) as (session, d): + d["counter"] = lambda x: (x or 0) + 1 + session.write() + return True + + + +def run_threaded(fns_args: list, max_threads=None): + """ + Run a list of tasks concurrently, and return their results as + a list in the same order. A task is a 2-tuple of the function and an + n-tuple of the function's n arguments. + Remember: A 1-tuple needs a trailing comma, eg. (x,) + Return: A list of results, in the order of the input tasks. + """ + if max_threads is None: + max_threads = len(fns_args) + results = [] + with ThreadPoolExecutor(max_threads) as pool: + for fn, args in fns_args: + future = pool.submit(fn, *args) + results.append(future) + wait(results) + return [r.result() for r in results] + + +def test_stress_threaded(env): + per_thread = 8 + tables = 2 + threads = 4 + # Create tables + for t in range(tables): + DDB.at(f"test_stress_threaded{t}").create({}, force_overwrite=True) + + # Create tasks for concurrent execution + tasks = [(increment_counters, (per_thread, tables)) for _ in range(threads)] + + # Run tasks concurrently + results = run_threaded(tasks, max_threads=threads) + + # Check correctness of results + assert results == [True] * threads + for t in range(tables): + db = DDB.at(f"test_stress_threaded{t}").read() + assert db["counter"] == threads * per_thread diff --git a/tests/test_write.py b/tests/test_write.py index 073caf2..c56932a 100644 --- a/tests/test_write.py +++ b/tests/test_write.py @@ -39,3 +39,22 @@ def test_write_compression_switching(env, use_orjson, sort_keys, indent): assert d == dd session.write() assert DDB.at(name).read() == d + + +def test_multi_session(env): + a = {"a": 1} + b = {"b": 2} + + DDB.at("test_multi_session/d1").create(a, force_overwrite=True) + DDB.at("test_multi_session/d2").create(b, force_overwrite=True) + + with DDB.at("test_multi_session/*").session() as (session, d): + assert d == {"d1": a, "d2": b} + session.write() + assert DDB.at("test_multi_session/*").read() == {"d1": a, "d2": b} + + +def test_write_wildcard_key_except(env): + with pytest.raises(ValueError): + with DDB.at("test/*").session(key="any") as (session, d): + pass