Skip to content

Commit

Permalink
Raise coverate to 96%
Browse files Browse the repository at this point in the history
  • Loading branch information
mkrd committed Oct 27, 2022
1 parent fb842ea commit 985017c
Show file tree
Hide file tree
Showing 9 changed files with 124 additions and 7 deletions.
6 changes: 3 additions & 3 deletions dictdatabase/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def exists(self, key=None) -> bool:
return True
# Key is passed and occurs is True
try:
io_safe.subread(self.path, key=key)
io_safe.partial_read(self.path, key=key)
return True
except KeyError:
return False
Expand Down Expand Up @@ -73,7 +73,7 @@ def read(self, key: str = None, as_type: T = None) -> dict | T:
- `as_type`: If provided, return the value as the given type. Eg. as=str will return str(value).
"""
if key is not None:
if "*" in key:
if "*" in self.path:
raise ValueError("A key cannot be specified with a wildcard.")
# Subread
_, json_exists, _, ddb_exists = utils.db_paths(self.path)
Expand All @@ -84,7 +84,7 @@ def read(self, key: str = None, as_type: T = None) -> dict | T:
elif "*" in self.path:
# Multiread
pattern_paths = utils.expand_find_path_pattern(self.path)
data = {db_name: io_safe.read(db_name) for db_name in pattern_paths}
data = {n.split("/")[-1]: io_safe.read(n) for n in pattern_paths}
else:
# Normal read
data = io_safe.read(self.path)
Expand Down
7 changes: 4 additions & 3 deletions dictdatabase/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def __init__(self, db_name: str, key: str = None, as_type: T = None):
self.db_name = db_name
self.as_type = as_type
if key is not None:
if "*" in key:
if "*" in db_name:
raise ValueError("A key cannot be specified with a wildcard.")
self.key = key
self.session_type = SessionType.SUB
Expand Down Expand Up @@ -64,7 +64,8 @@ def __enter__(self) -> Tuple["DDBSession", JSONSerializable | T]:
dh = self.partial_handle.key_value
self.data_handle = dh
elif self.session_type == SessionType.MULTI:
self.data_handle = {n: io_unsafe.read(n) for n in self.db_name}
dh = {n.split("/")[-1]: io_unsafe.read(n) for n in self.db_name}
self.data_handle = dh
return self, self.as_type(dh) if self.as_type is not None else dh
except BaseException as e:
if self.session_type == SessionType.MULTI:
Expand Down Expand Up @@ -93,4 +94,4 @@ def write(self):
io_unsafe.partial_write(self.partial_handle)
elif self.session_type == SessionType.MULTI:
for name in self.db_name:
io_unsafe.write(name, self.data_handle[name])
io_unsafe.write(name, self.data_handle[name.split("/")[-1]])
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "dictdatabase"
version = "2.0.2"
version = "2.0.3"
repository = "https://github.com/mkrd/DictDataBase"
description = "Easy-to-use database using dicts"
authors = ["Marcel Kröker <[email protected]>"]
Expand Down
13 changes: 13 additions & 0 deletions tests/test_delete.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import dictdatabase as DDB


def test_delete(env, use_compression):
DDB.at("test_delete").create({"a": 1}, force_overwrite=True)
assert DDB.at("test_delete").read() == {"a": 1}
DDB.at("test_delete").delete()
assert DDB.at("test_delete").read() is None



def test_delete_nonexistent():
DDB.at("test_delete_nonexistent").delete()
5 changes: 5 additions & 0 deletions tests/test_excepts.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,3 +31,8 @@ def test_except_on_session_in_session(env, use_compression, use_orjson, sort_key
with DDB.at(name).session(as_type=pd) as (session, test):
with DDB.at(name).session(as_type=pd) as (session2, test2):
pass


def test_wildcard_and_subkey_except():
with pytest.raises(ValueError):
DDB.at("test_wildcard_and_subkey_except/*").read(key="key")
2 changes: 2 additions & 0 deletions tests/test_partial.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@ def test_subread(env, use_compression, use_orjson, sort_keys, indent):
DDB.at("test_subread2").create(j2, force_overwrite=True)
assert DDB.at("test_subread2").read("b") == {"d": "e"}

assert DDB.at("none").read("none") is None


def test_subwrite(env, use_compression, use_orjson, sort_keys, indent):
name = "test_subwrite"
Expand Down
21 changes: 21 additions & 0 deletions tests/test_read.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,27 @@ def test_non_existent(env, use_compression, use_orjson, sort_keys, indent):
assert d is None


def test_file_exists_error(env):
with open(f"{DDB.config.storage_directory}/test_file_exists_error.json", "w") as f:
f.write("")
with open(f"{DDB.config.storage_directory}/test_file_exists_error.ddb", "w") as f:
f.write("")
with pytest.raises(FileExistsError):
DDB.at("test_file_exists_error").read()


def test_exists(env):
DDB.at("test_exists").create({"a": 1}, force_overwrite=True)
assert DDB.at("test_exists").exists()
assert not DDB.at("test_exists/nonexistent").exists()
assert DDB.at("test_exists").exists("a")
assert not DDB.at("test_exists").exists("b")






def test_read_integrity():
cases = [
r'{"a": "\\", "b": 2}',
Expand Down
56 changes: 56 additions & 0 deletions tests/test_threaded_sessions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import dictdatabase as DDB
from path_dict import pd
from concurrent.futures import ThreadPoolExecutor, wait


def increment_counters(n, tables):
for _ in range(n):
for t in range(tables):
# Perform a useless read operation
d = DDB.at(f"test_stress_threaded{t}").read()
# Perform a counter increment
with DDB.at(f"test_stress_threaded{t}").session(as_type=pd) as (session, d):
d["counter"] = lambda x: (x or 0) + 1
session.write()
return True



def run_threaded(fns_args: list, max_threads=None):
"""
Run a list of tasks concurrently, and return their results as
a list in the same order. A task is a 2-tuple of the function and an
n-tuple of the function's n arguments.
Remember: A 1-tuple needs a trailing comma, eg. (x,)
Return: A list of results, in the order of the input tasks.
"""
if max_threads is None:
max_threads = len(fns_args)
results = []
with ThreadPoolExecutor(max_threads) as pool:
for fn, args in fns_args:
future = pool.submit(fn, *args)
results.append(future)
wait(results)
return [r.result() for r in results]


def test_stress_threaded(env):
per_thread = 8
tables = 2
threads = 4
# Create tables
for t in range(tables):
DDB.at(f"test_stress_threaded{t}").create({}, force_overwrite=True)

# Create tasks for concurrent execution
tasks = [(increment_counters, (per_thread, tables)) for _ in range(threads)]

# Run tasks concurrently
results = run_threaded(tasks, max_threads=threads)

# Check correctness of results
assert results == [True] * threads
for t in range(tables):
db = DDB.at(f"test_stress_threaded{t}").read()
assert db["counter"] == threads * per_thread
19 changes: 19 additions & 0 deletions tests/test_write.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,3 +39,22 @@ def test_write_compression_switching(env, use_orjson, sort_keys, indent):
assert d == dd
session.write()
assert DDB.at(name).read() == d


def test_multi_session(env):
a = {"a": 1}
b = {"b": 2}

DDB.at("test_multi_session/d1").create(a, force_overwrite=True)
DDB.at("test_multi_session/d2").create(b, force_overwrite=True)

with DDB.at("test_multi_session/*").session() as (session, d):
assert d == {"d1": a, "d2": b}
session.write()
assert DDB.at("test_multi_session/*").read() == {"d1": a, "d2": b}


def test_write_wildcard_key_except(env):
with pytest.raises(ValueError):
with DDB.at("test/*").session(key="any") as (session, d):
pass

0 comments on commit 985017c

Please sign in to comment.