Skip to content

Commit

Permalink
added support for valkey (experimental)
Browse files Browse the repository at this point in the history
  • Loading branch information
amirreza8002 committed Oct 29, 2024
1 parent 3288a34 commit a06d157
Showing 1 changed file with 18 additions and 18 deletions.
36 changes: 18 additions & 18 deletions panther/caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import orjson as json

from panther.configs import config
from panther.db.connections import redis
from panther.db.connections import redis, valkey
from panther.request import Request
from panther.response import Response, ResponseDataTypes
from panther.throttling import throttling_storage
Expand Down Expand Up @@ -38,14 +38,14 @@ def throttling_cache_key(request: Request, duration: timedelta) -> str:

async def get_response_from_cache(*, request: Request, cache_exp_time: timedelta) -> CachedResponse | None:
"""
If redis.is_connected:
Get Cached Data From Redis
If redis.is_connected or valkey.is_connected:
Get Cached Data From Redis or Valkey
else:
Get Cached Data From Memory
"""
if redis.is_connected:
if server:= redis.is_connected or valkey.is_connected:
key = api_cache_key(request=request)
data = (await redis.get(key) or b'{}').decode()
data = (await server.get(key) or b'{}').decode()
if cached_value := json.loads(data):
return CachedResponse(*cached_value)

Expand All @@ -57,15 +57,15 @@ async def get_response_from_cache(*, request: Request, cache_exp_time: timedelta

async def set_response_in_cache(*, request: Request, response: Response, cache_exp_time: timedelta | int) -> None:
"""
If redis.is_connected:
Cache The Data In Redis
If redis.is_connected or valkey.is_connected:
Cache The Data In Redis or Valkey
else:
Cache The Data In Memory
"""

cache_data: tuple[ResponseDataTypes, int] = (response.data, response.status_code)

if redis.is_connected:
if server := redis.is_connected or valkey.is_connected:
key = api_cache_key(request=request)

cache_exp_time = cache_exp_time or config.DEFAULT_CACHE_EXP
Expand All @@ -80,9 +80,9 @@ async def set_response_in_cache(*, request: Request, response: Response, cache_e
'your response are going to cache in redis forever '
'** set DEFAULT_CACHE_EXP in `configs` or set the `cache_exp_time` in `@API.get()` to prevent this **'
)
await redis.set(key, cache_data)
await server.set(key, cache_data)
else:
await redis.set(key, cache_data, ex=cache_exp_time)
await server.set(key, cache_data, ex=cache_exp_time)

else:
key = api_cache_key(request=request, cache_exp_time=cache_exp_time)
Expand All @@ -94,15 +94,15 @@ async def set_response_in_cache(*, request: Request, response: Response, cache_e

async def get_throttling_from_cache(request: Request, duration: timedelta) -> int:
"""
If redis.is_connected:
Get Cached Data From Redis
If redis.is_connected or valkey.is_connected:
Get Cached Data From Redis or Valkey
else:
Get Cached Data From Memory
"""
key = throttling_cache_key(request=request, duration=duration)

if redis.is_connected:
data = (await redis.get(key) or b'0').decode()
if server := redis.is_connected or valkey.is_connected:
data = (await server.get(key) or b'0').decode()
return json.loads(data)

else:
Expand All @@ -111,15 +111,15 @@ async def get_throttling_from_cache(request: Request, duration: timedelta) -> in

async def increment_throttling_in_cache(request: Request, duration: timedelta) -> None:
"""
If redis.is_connected:
Increment The Data In Redis
If redis.is_connected or valkey.is_connected:
Increment The Data In Redis or Valkey
else:
Increment The Data In Memory
"""
key = throttling_cache_key(request=request, duration=duration)

if redis.is_connected:
await redis.incrby(key, amount=1)
if server:= redis.is_connected or valkey.is_connected:
await server.incrby(key, amount=1)

else:
throttling_storage[key] += 1

0 comments on commit a06d157

Please sign in to comment.