From 8dde297a6b594145cc5b407612f6dbdda46742d3 Mon Sep 17 00:00:00 2001 From: MuslemRahimi Date: Mon, 15 Jul 2024 14:19:16 +0200 Subject: [PATCH] bugfixing etf provider --- app/cron_market_maker.py | 9 +++------ app/main.py | 6 +++++- app/primary_cron_job.py | 16 +++++++--------- 3 files changed, 15 insertions(+), 16 deletions(-) diff --git a/app/cron_market_maker.py b/app/cron_market_maker.py index 970af71..5f3e953 100644 --- a/app/cron_market_maker.py +++ b/app/cron_market_maker.py @@ -15,7 +15,7 @@ api_secret = os.getenv('FINRA_API_SECRET') api_token = finra_api_queries.retrieve_api_token(finra_api_key_input=api_key, finra_api_secret_input=api_secret) -start_date = datetime.today() - timedelta(365) +start_date = datetime.today() - timedelta(180) end_date = datetime.today() start_date = start_date.strftime("%Y-%m-%d") end_date = end_date.strftime("%Y-%m-%d") @@ -83,7 +83,6 @@ async def save_json(symbol, data): # Use async file writing to avoid blocking the event loop loop = asyncio.get_event_loop() path = f"json/market-maker/companies/{symbol}.json" - os.makedirs(os.path.dirname(path), exist_ok=True) await loop.run_in_executor(None, ujson.dump, data, open(path, 'w')) async def process_ticker(ticker): @@ -111,12 +110,10 @@ async def run(): total_symbols = stocks_symbols + etf_symbols async with aiohttp.ClientSession() as session: - tasks = [] - for ticker in total_symbols: - tasks.append(process_ticker(ticker)) + tasks = [process_ticker(ticker) for ticker in total_symbols] # Run tasks concurrently in batches to avoid too many open connections - batch_size = 1 # Adjust based on your system's capacity + batch_size = 10 # Adjust based on your system's capacity for i in tqdm(range(0, len(tasks), batch_size)): batch = tasks[i:i + batch_size] await asyncio.gather(*batch) diff --git a/app/main.py b/app/main.py index a157a94..e2777a1 100755 --- a/app/main.py +++ b/app/main.py @@ -2095,7 +2095,11 @@ async def etf_provider(data: ETFProviderData): cursor.close() # Extract only relevant data and sort it - res = [{'symbol': row[0], 'name': row[1], 'expenseRatio': row[2], 'totalAssets': row[3], 'numberOfHoldings': row[4]} for row in raw_data] + # Extract only relevant data and filter only integer totalAssets + res = [ + {'symbol': row[0], 'name': row[1], 'expenseRatio': row[2], 'totalAssets': row[3], 'numberOfHoldings': row[4]} + for row in raw_data if isinstance(row[3], int) + ] sorted_res = sorted(res, key=lambda x: x['totalAssets'], reverse=True) redis_client.set(cache_key, orjson.dumps(sorted_res)) redis_client.expire(cache_key, 3600 * 24) # Set cache expiration time to 1 day diff --git a/app/primary_cron_job.py b/app/primary_cron_job.py index c60b49e..bbb3aa2 100755 --- a/app/primary_cron_job.py +++ b/app/primary_cron_job.py @@ -347,15 +347,13 @@ def run_dark_pool_flow(): def run_market_maker(): - week = datetime.today().weekday() - if week <= 5: - run_command(["python3", "cron_market_maker.py"]) - command = [ - "sudo", "rsync", "-avz", "-e", "ssh", - "/root/backend/app/json/market-maker", - f"root@{useast_ip_address}:/root/backend/app/json" - ] - run_command(command) + run_command(["python3", "cron_market_maker.py"]) + command = [ + "sudo", "rsync", "-avz", "-e", "ssh", + "/root/backend/app/json/market-maker", + f"root@{useast_ip_address}:/root/backend/app/json" + ] + run_command(command) def run_ownership_stats(): week = datetime.today().weekday()