Skip to content

Commit

Permalink
add dividend cron job
Browse files Browse the repository at this point in the history
  • Loading branch information
MuslemRahimi committed Aug 5, 2024
1 parent f8b9280 commit 531d5f4
Show file tree
Hide file tree
Showing 3 changed files with 57 additions and 1 deletion.
1 change: 1 addition & 0 deletions app/cron_congress_trading.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ def replace_representative(office):
'Thune, John (Senator)': 'John Thune',
'Rosen, Jacky (Senator)': 'Jacky Rosen',
'Britt, Katie (Senator)': 'Katie Britt',
'Britt, Katie': 'Katie Britt',
'James Costa': 'Jim Costa',
'Lummis, Cynthia (Senator)': 'Cynthia Lummis',
'Coons, Chris (Senator)': 'Chris Coons',
Expand Down
40 changes: 39 additions & 1 deletion app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -3319,20 +3319,58 @@ async def get_reddit_tracker(api_key: str = Security(get_api_key)):

@app.get("/dividend-kings")
async def get_dividend_kings():
cache_key = f"dividend-kings"
cached_result = redis_client.get(cache_key)
if cached_result:
return StreamingResponse(
io.BytesIO(cached_result),
media_type="application/json",
headers={"Content-Encoding": "gzip"}
)
try:
with open(f"json/stocks-list/dividend-kings.json", 'rb') as file:
res = orjson.loads(file.read())
except:
res = []
return res

data = orjson.dumps(res)
compressed_data = gzip.compress(data)

redis_client.set(cache_key, compressed_data)
redis_client.expire(cache_key, 60*20)

return StreamingResponse(
io.BytesIO(compressed_data),
media_type="application/json",
headers={"Content-Encoding": "gzip"}
)

@app.get("/dividend-aristocrats")
async def get_dividend_kings():
cache_key = f"dividend-aristocrats"
cached_result = redis_client.get(cache_key)
if cached_result:
return StreamingResponse(
io.BytesIO(cached_result),
media_type="application/json",
headers={"Content-Encoding": "gzip"}
)
try:
with open(f"json/stocks-list/dividend-aristocrats.json", 'rb') as file:
res = orjson.loads(file.read())
except:
res = []
data = orjson.dumps(res)
compressed_data = gzip.compress(data)

redis_client.set(cache_key, compressed_data)
redis_client.expire(cache_key, 60*20)

return StreamingResponse(
io.BytesIO(compressed_data),
media_type="application/json",
headers={"Content-Encoding": "gzip"}
)
return res

@app.get("/newsletter")
Expand Down
17 changes: 17 additions & 0 deletions app/primary_cron_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,22 @@ def run_congress_trading():
]
run_command(command)

def run_dividend_list():
week = datetime.today().weekday()
current_time = datetime.now().time()
start_time = datetime_time(15, 30)
end_time = datetime_time(22, 30)

if week <= 4 and start_time <= current_time < end_time:
run_command(["python3", "cron_dividend_kings.py"])
run_command(["python3", "cron_dividend_aristocrats.py"])
command = [
"sudo", "rsync", "-avz", "-e", "ssh",
"/root/backend/app/json/stocks-list",
f"root@{useast_ip_address}:/root/backend/app/json"
]
run_command(command)

def run_cron_var():
week = datetime.today().weekday()
if week <= 5:
Expand Down Expand Up @@ -486,6 +502,7 @@ def run_threaded(job_func):
schedule.every(5).minutes.do(run_threaded, run_cron_market_movers).tag('market_movers_job')
schedule.every(2).minutes.do(run_threaded, run_dashboard).tag('dashboard_job')

schedule.every(30).minutes.do(run_threaded, run_dividend_list).tag('dividend_list_job')
schedule.every(15).minutes.do(run_threaded, run_cron_market_news).tag('market_news_job')
schedule.every(10).minutes.do(run_threaded, run_one_day_price).tag('one_day_price_job')
schedule.every(15).minutes.do(run_threaded, run_cron_heatmap).tag('heatmap_job')
Expand Down

0 comments on commit 531d5f4

Please sign in to comment.