-
Notifications
You must be signed in to change notification settings - Fork 11
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #14 from mkrd/benchmark-scenarios
Add automatic indexing
- Loading branch information
Showing
9 changed files
with
163 additions
and
53 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,6 +1,7 @@ | ||
.venv/ | ||
.ddb_storage_testing/ | ||
.ddb_pytest_storage | ||
.ddb/ | ||
ddb_storage | ||
test_db/ | ||
*.prof | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
TEST_DIR = ".ddb_pytest_storage" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,52 +1,79 @@ | ||
from calendar import c | ||
import json | ||
import dictdatabase as DDB | ||
from multiprocessing import Pool | ||
import shutil | ||
import time | ||
import os | ||
|
||
from utils import incrementor, print_and_assert_results | ||
from utils import print_and_assert_results, db_job, make_table | ||
|
||
|
||
def proc_job(i, n, tables, sd, uc, uo, id, sk): | ||
def proc_job(mode, n, tables, sd, uc, uo, id, sk): | ||
DDB.config.storage_directory = sd | ||
DDB.config.use_compression = uc | ||
DDB.config.use_orjson = uo | ||
DDB.config.indent = id | ||
DDB.config.sort_keys = sk | ||
DDB.locking.SLEEP_TIMEOUT = 0.0 if i % 4 < 2 else 0.001 | ||
incrementor(i, n, tables) | ||
DDB.locking.SLEEP_TIMEOUT = 0.001 | ||
db_job(mode, tables, n) | ||
|
||
|
||
def parallel_stress(tables=2, proc_count=8, per_process=512): | ||
def parallel_stressor(file_count, readers, writers, operations_per_process, big_file, compression): | ||
# Create Tables | ||
for t in range(tables): | ||
DDB.at(f"incr{t}").create({"counter": 0}, force_overwrite=True) | ||
for t in range(file_count): | ||
if big_file: | ||
with open(os.path.join(os.getcwd(), "test_db/production_database/tasks.json"), "r") as f: | ||
db = json.loads(f.read()) | ||
db["counter"] = {"counter": 0} | ||
else: | ||
db = {"counter": {"counter": 0}} | ||
|
||
DDB.at(f"incr{t}").create(db, force_overwrite=True) | ||
|
||
# Execute process pool running incrementor as the target task | ||
t1 = time.monotonic() | ||
pool = Pool(processes=proc_count) | ||
for i in range(proc_count): | ||
# Each process will enter this file again, but not as __main__ | ||
# So only the outside context is executed, and then the incrementor function | ||
# This means we need to pass the config since the process is "fresh" | ||
pool.apply_async(proc_job, args=(i, per_process, tables, | ||
pool = Pool(processes=readers + writers) | ||
for mode in "w" * writers + "r" * readers: | ||
pool.apply_async(proc_job, args=(mode, operations_per_process, file_count, | ||
DDB.config.storage_directory, | ||
DDB.config.use_compression, | ||
compression, | ||
DDB.config.use_orjson, | ||
DDB.config.indent, | ||
DDB.config.sort_keys, | ||
)) | ||
pool.close() | ||
pool.join() | ||
t2 = time.monotonic() | ||
print_and_assert_results(proc_count, per_process, tables, t1, t2) | ||
print_and_assert_results(readers, writers, operations_per_process, file_count, big_file, compression, t1, t2) | ||
|
||
|
||
|
||
|
||
if __name__ == "__main__": | ||
DDB.config.storage_directory = ".ddb_bench_parallel" | ||
try: | ||
shutil.rmtree(".ddb_bench_parallel", ignore_errors=True) | ||
os.mkdir(".ddb_bench_parallel") | ||
parallel_stress() | ||
finally: | ||
shutil.rmtree(".ddb_bench_parallel", ignore_errors=True) | ||
operations_per_process = 4 | ||
for file_count, readers, writers in [(1, 4, 4), (1, 8, 1), (1, 1, 8), (4, 8, 8)]: | ||
print("") | ||
print(f"✨ Scenario: {file_count} files, {readers} readers, {writers} writers, {operations_per_process} operations per process") | ||
for big_file, compression in [(False, False), (False, True), (True, False), (True, True)]: | ||
try: | ||
shutil.rmtree(".ddb_bench_parallel", ignore_errors=True) | ||
os.mkdir(".ddb_bench_parallel") | ||
parallel_stressor(file_count, readers, writers, operations_per_process, big_file, compression) | ||
finally: | ||
shutil.rmtree(".ddb_bench_parallel", ignore_errors=True) | ||
|
||
|
||
|
||
# Test Matrix Rows (Scenarios) | ||
# 1: 1 file, 4 reading 4 writing 200 times each | ||
# 2: 1 file, 8 reading 200 times each | ||
# 3: 1 file, 8 writing 200 times each | ||
# 4: 4 files, 8 reading 8 writing 200 times each | ||
|
||
# Test Matrix Columns (Configurations) | ||
# 1: Big File (50mb), compression | ||
# 2: Small File, compression | ||
# 3: Big File (50mb), no compression | ||
# 4: Small File, no compression |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
from dictdatabase.locking import path_str | ||
from tests import TEST_DIR | ||
|
||
|
||
def test_path_str(): | ||
# Testing the function path_str. | ||
assert path_str("db", "1", 1, "1") == f"{TEST_DIR}/.ddb/db.1.1.1.lock" | ||
assert path_str("db/nest", "1", 1, "1") == f"{TEST_DIR}/.ddb/db/nest.1.1.1.lock" |