Skip to content

Commit

Permalink
update cron job
Browse files Browse the repository at this point in the history
  • Loading branch information
MuslemRahimi committed Jan 8, 2025
1 parent bc7e43c commit 7e43b3b
Show file tree
Hide file tree
Showing 3 changed files with 79 additions and 16 deletions.
83 changes: 72 additions & 11 deletions app/cron_options_gex_dex.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,14 +49,10 @@ def get_tickers_from_directory(directory: str):
print(f"An error occurred: {e}")
return []

directory_path = "json/gex-dex"
total_symbols = get_tickers_from_directory(directory_path)

if len(total_symbols) < 100:
total_symbols = stocks_symbols+etf_symbols


def save_json(data, symbol):
def save_json(data, symbol, directory_path):
os.makedirs(directory_path, exist_ok=True) # Ensure the directory exists
with open(f"{directory_path}/{symbol}.json", 'wb') as file: # Use binary mode for orjson
file.write(orjson.dumps(data))
Expand All @@ -69,7 +65,7 @@ def safe_round(value, decimals=2):
return value


def prepare_data(data, symbol):
def prepare_data(data, symbol, directory_path, sort_by = "date"):
data = [{k: v for k, v in item.items() if "charm" not in k and "vanna" not in k} for item in data]
res_list = []
for item in data:
Expand All @@ -84,11 +80,16 @@ def prepare_data(data, symbol):
pass

if res_list:
res_list = sorted(res_list, key=lambda x: x['date'], reverse=True)
save_json(res_list, symbol)
res_list = sorted(res_list, key=lambda x: x[sort_by], reverse=True)
save_json(res_list, symbol, directory_path)


def get_data():
def get_overview_data():
directory_path = "json/gex-dex/overview"
total_symbols = get_tickers_from_directory(directory_path)
if len(total_symbols) < 100:
total_symbols = stocks_symbols+etf_symbols

counter = 0
total_symbols = ['GME']
for symbol in tqdm(total_symbols):
Expand All @@ -98,7 +99,7 @@ def get_data():
response = requests.get(url, headers=headers)
if response.status_code == 200:
data = response.json()['data']
prepare_data(data, symbol)
prepare_data(data, symbol, directory_path)

counter +=1

Expand All @@ -113,5 +114,65 @@ def get_data():



def get_strike_data():
directory_path = "json/gex-dex/strike"
total_symbols = get_tickers_from_directory(directory_path)
if len(total_symbols) < 100:
total_symbols = stocks_symbols+etf_symbols

counter = 0
total_symbols = ['GME']
for symbol in tqdm(total_symbols):
try:
url = f"https://api.unusualwhales.com/api/stock/{symbol}/greek-exposure/strike"

response = requests.get(url, headers=headers)
if response.status_code == 200:
data = response.json()['data']
prepare_data(data, symbol, directory_path, sort_by = 'strike')

counter +=1

# If 50 chunks have been processed, sleep for 60 seconds
if counter == 260:
print("Sleeping...")
time.sleep(60)
counter = 0

except Exception as e:
print(f"Error for {symbol}:{e}")

def get_expiry_data():
directory_path = "json/gex-dex/expiry"
total_symbols = get_tickers_from_directory(directory_path)
if len(total_symbols) < 100:
total_symbols = stocks_symbols+etf_symbols

counter = 0
total_symbols = ['GME']
for symbol in tqdm(total_symbols):
try:
url = f"https://api.unusualwhales.com/api/stock/{symbol}/greek-exposure/expiry"

response = requests.get(url, headers=headers)
if response.status_code == 200:
data = response.json()['data']
prepare_data(data, symbol, directory_path)

counter +=1

# If 50 chunks have been processed, sleep for 60 seconds
if counter == 260:
print("Sleeping...")
time.sleep(60)
counter = 0

except Exception as e:
print(f"Error for {symbol}:{e}")


if __name__ == '__main__':
get_data()
#get_overview_data()
get_strike_data()
get_expiry_data()

10 changes: 6 additions & 4 deletions app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -2659,9 +2659,11 @@ async def get_data(data:GeneralData, api_key: str = Security(get_api_key)):
)

@app.post("/options-gex-dex")
async def get_data(data:TickerData, api_key: str = Security(get_api_key)):
ticker = data.ticker.upper()
cache_key = f"options-gex-dex-{ticker}"
async def get_data(data:ParamsData, api_key: str = Security(get_api_key)):
ticker = data.params.upper()
category = data.category.lower()

cache_key = f"options-gex-dex-{ticker}-{category}"
cached_result = redis_client.get(cache_key)
if cached_result:
return StreamingResponse(
Expand All @@ -2670,7 +2672,7 @@ async def get_data(data:TickerData, api_key: str = Security(get_api_key)):
headers={"Content-Encoding": "gzip"})

try:
with open(f"json/gex-dex/{ticker}.json", 'rb') as file:
with open(f"json/gex-dex/{category}/{ticker}.json", 'rb') as file:
res = orjson.loads(file.read())
except:
res = []
Expand Down
2 changes: 1 addition & 1 deletion app/primary_cron_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,7 +402,7 @@ def run_threaded(job_func):
schedule.every(1).hours.do(run_threaded, run_fda_calendar).tag('fda_calendar_job')

schedule.every(5).minutes.do(run_threaded, run_market_flow).tag('market_flow_job')
schedule.every(5).minutes.do(run_threaded, run_dark_pool_level).tag('dark_pool_level_job')
schedule.every(30).minutes.do(run_threaded, run_dark_pool_level).tag('dark_pool_level_job')
schedule.every(10).seconds.do(run_threaded, run_dark_pool_flow).tag('dark_pool_flow_job')

schedule.every(2).minutes.do(run_threaded, run_dashboard).tag('dashboard_job')
Expand Down

0 comments on commit 7e43b3b

Please sign in to comment.