-
Notifications
You must be signed in to change notification settings - Fork 11
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Better locking #12
Labels
enhancement
New feature or request
Comments
Proof of concept with fcntl: from multiprocessing.pool import Pool
import os
import json
import contextlib
import fcntl
import time
RUNS_PER_WORKER = 30_000
WORKERS = 10
def writer():
for _ in range(RUNS_PER_WORKER):
with open("test.json", "r+b") as f:
fcntl.lockf(f, fcntl.LOCK_EX)
counter = json.loads(f.read())
counter["counter"] += 1
f.seek(0)
f.write(json.dumps(counter).encode())
f.truncate()
fcntl.lockf(f, fcntl.LOCK_UN)
def reader():
for _ in range(RUNS_PER_WORKER):
with open("test.json", "r+") as f:
fcntl.lockf(f, fcntl.LOCK_SH)
counter = json.loads(f.read())
fcntl.lockf(f, fcntl.LOCK_UN)
if __name__ == "__main__":
t1 = time.time()
with contextlib.suppress(FileExistsError):
fd = os.open("test.json", os.O_CREAT | os.O_RDWR | os.O_EXCL)
os.write(fd, json.dumps({"counter": 0}).encode())
os.close(fd)
pool = Pool(WORKERS)
for _ in range(WORKERS):
pool.apply_async(writer)
pool.apply_async(reader)
pool.close()
pool.join()
td = time.time() - t1
print(f"Time: {td:.2f} seconds, per second: {WORKERS * RUNS_PER_WORKER / td:.2f}") -> 19076 op/s |
Current solution: from multiprocessing.pool import Pool
import os
import json
import contextlib
import time
import dictdatabase as DDB
RUNS_PER_WORKER = 3_000
WORKERS = 10
def writer():
DDB.config.storage_directory = "."
for _ in range(RUNS_PER_WORKER):
with DDB.at("test").session() as (session, t):
t["counter"] = t["counter"] + 1
session.write()
def reader():
DDB.config.storage_directory = "."
for _ in range(RUNS_PER_WORKER):
DDB.at("test").read()
if __name__ == "__main__":
t1 = time.time()
with contextlib.suppress(FileExistsError):
fd = os.open("test.json", os.O_CREAT | os.O_RDWR | os.O_EXCL)
os.write(fd, json.dumps({"counter": 0}).encode())
os.close(fd)
pool = Pool(WORKERS)
for _ in range(WORKERS):
pool.apply_async(writer)
pool.apply_async(reader)
pool.close()
pool.join()
td = time.time() - t1
print(f"Time: {td:.2f} seconds, per second: {WORKERS * RUNS_PER_WORKER / td:.2f}") -> 538 op/s |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Problems
Solution for 1
Solution for 1 and 2
Using a different locking mechanism could fix both problems at once
The text was updated successfully, but these errors were encountered: