Skip to content

Commit

Permalink
bugfixing etf provider
Browse files Browse the repository at this point in the history
  • Loading branch information
MuslemRahimi committed Jul 15, 2024
1 parent 6bf4195 commit 8dde297
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 16 deletions.
9 changes: 3 additions & 6 deletions app/cron_market_maker.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
api_secret = os.getenv('FINRA_API_SECRET')
api_token = finra_api_queries.retrieve_api_token(finra_api_key_input=api_key, finra_api_secret_input=api_secret)

start_date = datetime.today() - timedelta(365)
start_date = datetime.today() - timedelta(180)
end_date = datetime.today()
start_date = start_date.strftime("%Y-%m-%d")
end_date = end_date.strftime("%Y-%m-%d")
Expand Down Expand Up @@ -83,7 +83,6 @@ async def save_json(symbol, data):
# Use async file writing to avoid blocking the event loop
loop = asyncio.get_event_loop()
path = f"json/market-maker/companies/{symbol}.json"
os.makedirs(os.path.dirname(path), exist_ok=True)
await loop.run_in_executor(None, ujson.dump, data, open(path, 'w'))

async def process_ticker(ticker):
Expand Down Expand Up @@ -111,12 +110,10 @@ async def run():
total_symbols = stocks_symbols + etf_symbols

async with aiohttp.ClientSession() as session:
tasks = []
for ticker in total_symbols:
tasks.append(process_ticker(ticker))
tasks = [process_ticker(ticker) for ticker in total_symbols]

# Run tasks concurrently in batches to avoid too many open connections
batch_size = 1 # Adjust based on your system's capacity
batch_size = 10 # Adjust based on your system's capacity
for i in tqdm(range(0, len(tasks), batch_size)):
batch = tasks[i:i + batch_size]
await asyncio.gather(*batch)
Expand Down
6 changes: 5 additions & 1 deletion app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -2095,7 +2095,11 @@ async def etf_provider(data: ETFProviderData):
cursor.close()

# Extract only relevant data and sort it
res = [{'symbol': row[0], 'name': row[1], 'expenseRatio': row[2], 'totalAssets': row[3], 'numberOfHoldings': row[4]} for row in raw_data]
# Extract only relevant data and filter only integer totalAssets
res = [
{'symbol': row[0], 'name': row[1], 'expenseRatio': row[2], 'totalAssets': row[3], 'numberOfHoldings': row[4]}
for row in raw_data if isinstance(row[3], int)
]
sorted_res = sorted(res, key=lambda x: x['totalAssets'], reverse=True)
redis_client.set(cache_key, orjson.dumps(sorted_res))
redis_client.expire(cache_key, 3600 * 24) # Set cache expiration time to 1 day
Expand Down
16 changes: 7 additions & 9 deletions app/primary_cron_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,15 +347,13 @@ def run_dark_pool_flow():


def run_market_maker():
week = datetime.today().weekday()
if week <= 5:
run_command(["python3", "cron_market_maker.py"])
command = [
"sudo", "rsync", "-avz", "-e", "ssh",
"/root/backend/app/json/market-maker",
f"root@{useast_ip_address}:/root/backend/app/json"
]
run_command(command)
run_command(["python3", "cron_market_maker.py"])
command = [
"sudo", "rsync", "-avz", "-e", "ssh",
"/root/backend/app/json/market-maker",
f"root@{useast_ip_address}:/root/backend/app/json"
]
run_command(command)

def run_ownership_stats():
week = datetime.today().weekday()
Expand Down

0 comments on commit 8dde297

Please sign in to comment.