Skip to content

Commit

Permalink
update cronjob
Browse files Browse the repository at this point in the history
  • Loading branch information
MuslemRahimi committed Jan 4, 2025
1 parent 738a0d2 commit 402af97
Show file tree
Hide file tree
Showing 2 changed files with 159 additions and 99 deletions.
92 changes: 47 additions & 45 deletions app/cron_financial_statements.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,37 @@

# Configurations
include_current_quarter = False
max_concurrent_requests = 100 # Limit concurrent requests

async def fetch_data(session, url, symbol, attempt=0):
max_concurrent_requests = 100

class RateLimiter:
def __init__(self, max_requests, time_window):
self.max_requests = max_requests
self.time_window = time_window
self.requests = 0
self.last_reset = asyncio.get_event_loop().time()

async def acquire(self):
current_time = asyncio.get_event_loop().time()
if current_time - self.last_reset >= self.time_window:
self.requests = 0
self.last_reset = current_time

if self.requests >= self.max_requests:
wait_time = self.time_window - (current_time - self.last_reset)
if wait_time > 0:
print(f"\nRate limit reached. Waiting {wait_time:.2f} seconds...")
await asyncio.sleep(wait_time)
self.requests = 0
self.last_reset = asyncio.get_event_loop().time()

self.requests += 1

async def fetch_data(session, url, symbol, rate_limiter):
await rate_limiter.acquire()
try:
async with session.get(url) as response:
if response.status == 200:
data = await response.json()
return data
return await response.json()
else:
print(f"Error fetching data for {symbol}: HTTP {response.status}")
return None
Expand All @@ -31,7 +54,6 @@ async def save_json(symbol, period, data_type, data):
with open(f"json/financial-statements/{data_type}/{period}/{symbol}.json", 'w') as file:
ujson.dump(data, file)


async def calculate_margins(symbol):
for period in ['annual', 'quarter']:
try:
Expand All @@ -49,39 +71,35 @@ async def calculate_margins(symbol):
ratios_path = f"json/financial-statements/ratios/{period}/{symbol}.json"
with open(ratios_path, "r") as file:
ratio_data = ujson.load(file)
# Ensure all datasets are available and iterate through the items

if income_data and cash_flow_data and ratio_data:
for ratio_item, income_item, cash_flow_item in zip(ratio_data, income_data, cash_flow_data):
# Extract required data
revenue = income_item.get('revenue', 0)
ebitda = income_item.get('ebitda',0)
ebitda = income_item.get('ebitda', 0)
free_cash_flow = cash_flow_item.get('freeCashFlow', 0)

# Calculate freeCashFlowMargin if data is valid
if revenue != 0: # Avoid division by zero
if revenue != 0:
ratio_item['freeCashFlowMargin'] = round((free_cash_flow / revenue) * 100, 2)
ratio_item['ebitdaMargin'] = round((ebitda / revenue) * 100,2)
ratio_item['grossProfitMargin'] = round(ratio_item['grossProfitMargin']*100,2)
ratio_item['operatingProfitMargin'] = round(ratio_item['operatingProfitMargin']*100,2)
ratio_item['pretaxProfitMargin'] = round(ratio_item['pretaxProfitMargin']*100,2)
ratio_item['netProfitMargin'] = round(ratio_item['netProfitMargin']*100,2)
ratio_item['ebitdaMargin'] = round((ebitda / revenue) * 100, 2)
ratio_item['grossProfitMargin'] = round(ratio_item['grossProfitMargin'] * 100, 2)
ratio_item['operatingProfitMargin'] = round(ratio_item['operatingProfitMargin'] * 100, 2)
ratio_item['pretaxProfitMargin'] = round(ratio_item['pretaxProfitMargin'] * 100, 2)
ratio_item['netProfitMargin'] = round(ratio_item['netProfitMargin'] * 100, 2)
else:
ratio_item['freeCashFlowMargin'] = None # Handle missing or zero revenue
ratio_item['freeCashFlowMargin'] = None
ratio_item['ebitdaMargin'] = None
ratio_item['grossProfitMargin'] = None
ratio_item['operatingProfitMargin'] = None
ratio_item['pretaxProfitMargin'] = None
ratio_item['netProfitMargin'] = None

# Save the updated ratios data back to the JSON file

with open(ratios_path, "w") as file:
ujson.dump(ratio_data,file)
ujson.dump(ratio_data, file)

except Exception as e:
print(e)
print(f"Error calculating margins for {symbol}: {e}")

async def get_financial_statements(session, symbol, semaphore, request_counter):
async def get_financial_statements(session, symbol, semaphore, rate_limiter):
base_url = "https://financialmodelingprep.com/api/v3"
periods = ['quarter', 'annual']
financial_data_types = ['key-metrics', 'income-statement', 'balance-sheet-statement', 'cash-flow-statement', 'ratios']
Expand All @@ -92,46 +110,30 @@ async def get_financial_statements(session, symbol, semaphore, request_counter):
# Fetch regular financial statements
for data_type in financial_data_types:
url = f"{base_url}/{data_type}/{symbol}?period={period}&apikey={api_key}"
data = await fetch_data(session, url, symbol)
data = await fetch_data(session, url, symbol, rate_limiter)
if data:
await save_json(symbol, period, data_type, data)

request_counter[0] += 1 # Increment the request counter
if request_counter[0] >= 500:
await asyncio.sleep(60) # Pause for 60 seconds
request_counter[0] = 0 # Reset the request counter after the pause

# Fetch financial statement growth data
for growth_type in growth_data_types:
growth_url = f"{base_url}/{growth_type}/{symbol}?period={period}&apikey={api_key}"
growth_data = await fetch_data(session, growth_url, symbol)
growth_data = await fetch_data(session, growth_url, symbol, rate_limiter)
if growth_data:
await save_json(symbol, period, growth_type, growth_data)

request_counter[0] += 1 # Increment the request counter
if request_counter[0] >= 500:
await asyncio.sleep(60) # Pause for 60 seconds
request_counter[0] = 0 # Reset the request counter after the pause


# Fetch TTM metrics
url = f"https://financialmodelingprep.com/api/v3/key-metrics-ttm/{symbol}?apikey={api_key}"
data = await fetch_data(session, url, symbol)
data = await fetch_data(session, url, symbol, rate_limiter)
if data:
await save_json(symbol, 'ttm', 'key-metrics', data)

# Fetch owner earnings data
owner_earnings_url = f"https://financialmodelingprep.com/api/v4/owner_earnings?symbol={symbol}&apikey={api_key}"
owner_earnings_data = await fetch_data(session, owner_earnings_url, symbol)
owner_earnings_data = await fetch_data(session, owner_earnings_url, symbol, rate_limiter)
if owner_earnings_data:
await save_json(symbol, 'quarter', 'owner-earnings', owner_earnings_data)

await calculate_margins(symbol)
request_counter[0] += 1 # Increment the request counter
if request_counter[0] >= 500:
await asyncio.sleep(60) # Pause for 60 seconds
request_counter[0] = 0 # Reset the request counter after the pause



async def run():
con = sqlite3.connect('stocks.db')
Expand All @@ -141,13 +143,13 @@ async def run():
symbols = [row[0] for row in cursor.fetchall()]
con.close()

rate_limiter = RateLimiter(max_requests=500, time_window=60)
semaphore = asyncio.Semaphore(max_concurrent_requests)
request_counter = [0] # Using a list to keep a mutable counter across async tasks

async with aiohttp.ClientSession() as session:
tasks = []
for symbol in tqdm(symbols):
task = asyncio.create_task(get_financial_statements(session, symbol, semaphore, request_counter))
task = asyncio.create_task(get_financial_statements(session, symbol, semaphore, rate_limiter))
tasks.append(task)

await asyncio.gather(*tasks)
Expand Down
166 changes: 112 additions & 54 deletions app/cron_options_hottest_contracts.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
load_dotenv()

api_key = os.getenv('UNUSUAL_WHALES_API_KEY')
headers = {"Accept": "application/json, text/plain", "Authorization": api_key}

# Connect to the databases
con = sqlite3.connect('stocks.db')
Expand All @@ -29,20 +30,9 @@

con.close()
etf_con.close()
# Combine the lists of stock and ETF symbols
total_symbols = stocks_symbols + etf_symbols


def get_tickers_from_directory(directory: str):
"""
Retrieves all tickers from JSON filenames in the specified directory.
Args:
directory (str): Path to the directory containing JSON files.
Returns:
list: A list of tickers extracted from filenames.
"""
try:
# Ensure the directory exists
if not os.path.exists(directory):
Expand All @@ -58,6 +48,8 @@ def get_tickers_from_directory(directory: str):
directory_path = "json/hottest-contracts/companies"
total_symbols = get_tickers_from_directory(directory_path)

if len(total_symbols) < 100:
total_symbols = stocks_symbols+etf_symbols

def save_json(data, symbol,directory="json/hottest-contracts/companies"):
os.makedirs(directory, exist_ok=True) # Ensure the directory exists
Expand Down Expand Up @@ -92,53 +84,119 @@ def prepare_data(data, symbol):

res_list = []
for item in data:
if float(item['volume']) > 0:
# Parse option_symbol
date_expiration, option_type, strike_price = parse_option_symbol(item['option_symbol'])

# Round numerical and numerical-string values
new_item = {
key: safe_round(value) if isinstance(value, (int, float, str)) else value
for key, value in item.items()
}
try:
if float(item['volume']) > 0:
# Parse option_symbol
date_expiration, option_type, strike_price = parse_option_symbol(item['option_symbol'])

# Round numerical and numerical-string values
new_item = {
key: safe_round(value) if isinstance(value, (int, float, str)) else value
for key, value in item.items()
}

# Add parsed fields
new_item['date_expiration'] = date_expiration
new_item['option_type'] = option_type
new_item['strike_price'] = strike_price

# Calculate open_interest_change
new_item['open_interest_change'] = safe_round(
new_item.get('open_interest', 0) - new_item.get('prev_oi', 0)
)

res_list.append(new_item)
except:
pass

# Add parsed fields
new_item['date_expiration'] = date_expiration
new_item['option_type'] = option_type
new_item['strike_price'] = strike_price
if res_list:
highest_volume = sorted(res_list, key=lambda x: x['volume'], reverse=True)[:10]
highest_open_interest = sorted(res_list, key=lambda x: x['open_interest'], reverse=True)[:10]
res_dict = {'volume': highest_volume, 'openInterest': highest_open_interest}
save_json(res_dict, symbol,"json/hottest-contracts/companies")


def get_hottest_contracts():
counter = 0
for symbol in tqdm(total_symbols):
try:

url = f"https://api.unusualwhales.com/api/stock/{symbol}/option-contracts"

response = requests.get(url, headers=headers)
if response.status_code == 200:
data = response.json()['data']
prepare_data(data, symbol)
counter +=1
# If 50 chunks have been processed, sleep for 60 seconds
if counter == 100:
print("Sleeping...")
time.sleep(30) # Sleep for 60 seconds
counter = 0

except Exception as e:
print(f"Error for {symbol}:{e}")


def get_single_contract_historical_data(contract_id):
keys_to_remove = {'high_price', 'low_price', 'iv_low', 'iv_high', 'last_tape_time'}

url = f"https://api.unusualwhales.com/api/option-contract/{contract_id}/historic"
response = requests.get(url, headers=headers)
data = response.json()['chains']
data = sorted(data, key=lambda x: datetime.strptime(x.get('date', ''), '%Y-%m-%d'))
res_list = []
for i, item in enumerate(data):
new_item = {
key: safe_round(value) if isinstance(value, (int, float, str)) else value
for key, value in item.items()
}

# Compute open interest change and percent if not the first item
if i > 0:
previous_open_interest = safe_round(data[i-1].get('open_interest', 0))
open_interest = safe_round(item.get('open_interest', 0))

# Calculate open_interest_change
new_item['open_interest_change'] = safe_round(
new_item.get('open_interest', 0) - new_item.get('prev_oi', 0)
)
if previous_open_interest > 0:
new_item['open_interest_change'] = safe_round(open_interest - previous_open_interest)
new_item['open_interest_change_percent'] = safe_round((open_interest / previous_open_interest - 1) * 100)
else:
new_item['open_interest_change'] = 0
new_item['open_interest_change_percent'] = 0

res_list.append(new_item)
res_list.append(new_item)

if res_list:
save_json(res_list, symbol,"json/hottest-contracts/companies")
res_list = [{key: value for key, value in item.items() if key not in keys_to_remove} for item in res_list]
res_list = sorted(res_list, key=lambda x: datetime.strptime(x.get('date', ''), '%Y-%m-%d'), reverse=True)

save_json(res_list, contract_id,"json/hottest-contracts/contracts")

counter = 0
for symbol in tqdm(total_symbols):
try:

url = f"https://api.unusualwhales.com/api/stock/{symbol}/option-contracts"

headers = {
"Accept": "application/json, text/plain",
"Authorization": api_key
}

response = requests.get(url, headers=headers)
if response.status_code == 200:
data = response.json()['data']
prepare_data(data, symbol)
counter +=1
# If 50 chunks have been processed, sleep for 60 seconds
if counter == 100:
print("Sleeping...")
time.sleep(30) # Sleep for 60 seconds
counter = 0

except Exception as e:
print(f"Error for {symbol}:{e}")

if __name__ == '__main__':
get_hottest_contracts()

'''
total_symbols = get_tickers_from_directory(directory_path)
contract_id_set = set() # Use a set to ensure uniqueness
for symbol in total_symbols:
try:
with open(f"json/hottest-contracts/companies/{symbol}.json", "r") as file:
data = orjson.loads(file.read())
for item in data:
try:
contract_id_set.add(item['option_symbol']) # Add to the set
except KeyError:
pass # Handle missing 'option_symbol' keys gracefully
except FileNotFoundError:
pass # Handle missing files gracefully
# Convert the set to a list if needed
contract_id_list = list(contract_id_set)
print(len(contract_id_list))
print(contract_id_list[0])
get_single_contract_historical_data('GME250117C00125000')
'''

0 comments on commit 402af97

Please sign in to comment.