Skip to content

Commit

Permalink
slice historical price data for faster loading time
Browse files Browse the repository at this point in the history
  • Loading branch information
MuslemRahimi committed Jun 13, 2024
1 parent 17861cc commit 9010c1c
Show file tree
Hide file tree
Showing 2 changed files with 34 additions and 34 deletions.
54 changes: 23 additions & 31 deletions app/cron_historical_price.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,19 +13,6 @@
load_dotenv()
api_key = os.getenv('FMP_API_KEY')

# Define a function to remove duplicates based on a key
def remove_duplicates(data, key):
seen = set()
new_data = []
for item in data:
if item[key] not in seen:
seen.add(item[key])
new_data.append(item)
return new_data

async def save_price_data(symbol, data):
async with aiofiles.open(f"json/historical-price/{symbol}.json", 'w') as file:
await file.write(ujson.dumps(data))

async def fetch_and_save_symbols_data(symbols, etf_symbols, crypto_symbols, session):
tasks = []
Expand All @@ -39,10 +26,9 @@ async def fetch_and_save_symbols_data(symbols, etf_symbols, crypto_symbols, sess

task = asyncio.create_task(get_historical_data(symbol, query_con, session))
tasks.append(task)
responses = await asyncio.gather(*tasks)

for symbol, response in zip(symbols, responses):
await save_price_data(symbol, response)
await asyncio.gather(*tasks)


async def get_historical_data(ticker, query_con, session):
try:
Expand Down Expand Up @@ -70,23 +56,29 @@ async def get_historical_data(ticker, query_con, session):
df_1y = pd.read_sql_query(query, query_con, params=(start_date_1y, end_date)).round(2).rename(columns={"date": "time"})
df_max = pd.read_sql_query(query, query_con, params=(start_date_max, end_date)).round(2).rename(columns={"date": "time"})

res = {
'1W': ujson.loads(data[0]) if data else [],
'1M': ujson.loads(data[1]) if len(data) > 1 else [],
'6M': ujson.loads(df_6m.to_json(orient="records")),
'1Y': ujson.loads(df_1y.to_json(orient="records")),
'MAX': ujson.loads(df_max.to_json(orient="records"))
}

async with aiofiles.open(f"json/historical-price/one-week/{ticker}.json", 'w') as file:
res = ujson.loads(data[0]) if data else []
await file.write(ujson.dumps(res))

async with aiofiles.open(f"json/historical-price/one-month/{ticker}.json", 'w') as file:
res = ujson.loads(data[1]) if len(data) > 1 else []
await file.write(ujson.dumps(res))

async with aiofiles.open(f"json/historical-price/six-months/{ticker}.json", 'w') as file:
res = ujson.loads(df_6m.to_json(orient="records"))
await file.write(ujson.dumps(res))

async with aiofiles.open(f"json/historical-price/one-year/{ticker}.json", 'w') as file:
res = ujson.loads(df_1y.to_json(orient="records"))
await file.write(ujson.dumps(res))

async with aiofiles.open(f"json/historical-price/max/{ticker}.json", 'w') as file:
res = ujson.loads(df_max.to_json(orient="records"))
await file.write(ujson.dumps(res))

except Exception as e:
print(f"Failed to fetch data for {ticker}: {e}")
res = {
'1W': [],
'1M': [],
'6M': [],
'1Y': [],
'MAX': []
}
return res

async def run():
total_symbols = []
Expand Down
14 changes: 11 additions & 3 deletions app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,11 @@ async def openapi(username: str = Depends(get_current_username)):
class TickerData(BaseModel):
ticker: str


class HistoricalPrice(BaseModel):
ticker: str
timePeriod: str

class AnalystId(BaseModel):
analystId: str

Expand Down Expand Up @@ -320,10 +325,11 @@ async def rating_stock(data: TickerData):
return res

@app.post("/historical-price")
async def get_stock(data: TickerData):
async def get_stock(data: HistoricalPrice):
ticker = data.ticker.upper()
time_period = data.timePeriod

cache_key = f"historical-price-{ticker}"
cache_key = f"historical-price-{ticker}-{time_period}"
cached_result = redis_client.get(cache_key)
if cached_result:
return StreamingResponse(
Expand All @@ -333,7 +339,7 @@ async def get_stock(data: TickerData):
)

try:
with open(f"json/historical-price/{ticker}.json", 'r') as file:
with open(f"json/historical-price/{time_period}/{ticker}.json", 'r') as file:
res = ujson.load(file)
except:
res = []
Expand All @@ -349,6 +355,8 @@ async def get_stock(data: TickerData):
headers={"Content-Encoding": "gzip"}
)



@app.post("/one-day-price")
async def get_stock(data: TickerData):
data = data.dict()
Expand Down

0 comments on commit 9010c1c

Please sign in to comment.