diff --git a/app/cron_list.py b/app/cron_list.py index 303d581..833891c 100644 --- a/app/cron_list.py +++ b/app/cron_list.py @@ -96,9 +96,7 @@ async def get_etf_holding(etf_symbols, etf_con): 'weightPercentage': item.get('weightPercentage', None), 'sharesNumber': item.get('marketValue', None) if not item.get('asset') and item.get('sharesNumber') == 0 else item.get('sharesNumber', None) } - for item in data - if item.get('marketValue', 0) >= 0 and item.get('weightPercentage', 0) > 0 # Exclude items with negative marketValue or non-positive weightPercentage - ] + for item in data if item.get('marketValue', 0) >= 0 and item.get('weightPercentage', 0) > 0] for item in res: try: @@ -211,6 +209,7 @@ async def get_magnificent_seven(): with open(f"json/stocks-list/list/magnificent-seven.json", 'wb') as file: file.write(orjson.dumps(res_list)) + print(res_list) async def get_faang(): diff --git a/app/main.py b/app/main.py index ef81a08..f9cbbb3 100755 --- a/app/main.py +++ b/app/main.py @@ -1215,6 +1215,7 @@ async def get_indicator(data: IndicatorListData, api_key: str = Security(get_api ticker_list = [t.upper() for t in data.tickerList if t is not None] combined_results = [] + # Load quote data in parallel quote_data = await asyncio.gather(*[load_json_async(f"json/quote/{ticker}.json") for ticker in ticker_list]) quote_dict = {ticker: data for ticker, data in zip(ticker_list, quote_data) if data} @@ -1228,23 +1229,25 @@ async def get_indicator(data: IndicatorListData, api_key: str = Security(get_api 'stock' ) - # Filter the quote based on keys in rule_of_list + # Filter the quote based on keys in rule_of_list (use data only from quote.json for these) filtered_quote = {key: quote.get(key) for key in rule_of_list if key in quote} filtered_quote['type'] = ticker_type # Add the result to combined_results combined_results.append(filtered_quote) - - # Fetch and merge data from stock_screener_data + # Fetch and merge data from stock_screener_data, but exclude price, volume, and changesPercentage screener_keys = [key for key in rule_of_list if key not in ['volume', 'marketCap', 'changesPercentage', 'price', 'symbol', 'name']] if screener_keys: screener_dict = {item['symbol']: {k: v for k, v in item.items() if k in screener_keys} for item in stock_screener_data} for result in combined_results: symbol = result.get('symbol') if symbol in screener_dict: + # Only merge screener data for keys that are not price, volume, or changesPercentage result.update(screener_dict[symbol]) + + # Serialize and compress the response res = orjson.dumps(combined_results) compressed_data = gzip.compress(res) @@ -1261,9 +1264,9 @@ async def process_watchlist_ticker(ticker, rule_of_list, quote_keys_to_include, """Process a single ticker concurrently.""" ticker = ticker.upper() ticker_type = 'stocks' - if ticker in etf_symbols: + if ticker in etf_set: ticker_type = 'etf' - elif ticker in crypto_symbols: + elif ticker in crypto_set: ticker_type = 'crypto' # Concurrent loading of quote, news, and earnings data @@ -1283,11 +1286,20 @@ async def process_watchlist_ticker(ticker, rule_of_list, quote_keys_to_include, key: quote_dict.get(key) for key in rule_of_list if key in quote_dict or key in quote_keys_to_include } + + # Ensure price, volume, and changesPercentage are taken from quote_dict + for key in ['price', 'volume', 'changesPercentage']: + if key in quote_dict: + filtered_quote[key] = quote_dict[key] + filtered_quote['type'] = ticker_type - # Merge with screener data + # Merge with screener data, but only for fields not in quote_dict symbol = filtered_quote.get('symbol') if symbol and symbol in screener_dict: + # Exclude price, volume, and changesPercentage from screener_dict update + for key in ['price', 'volume', 'changesPercentage']: + screener_dict[symbol].pop(key, None) filtered_quote.update(screener_dict[symbol]) result = filtered_quote @@ -1308,7 +1320,6 @@ async def process_watchlist_ticker(ticker, rule_of_list, quote_keys_to_include, - @app.post("/get-watchlist") async def get_watchlist(data: GetWatchList, api_key: str = Security(get_api_key)): """Optimized watchlist endpoint with concurrent processing and earnings data.""" @@ -1328,8 +1339,12 @@ async def get_watchlist(data: GetWatchList, api_key: str = Security(get_api_key) # Normalize rule_of_list if not rule_of_list or not isinstance(rule_of_list, list): - rule_of_list = quote_keys_to_include + rule_of_list = [] + # Remove 'price', 'volume', and 'changesPercentage' from rule_of_list + rule_of_list = [rule for rule in rule_of_list if rule not in ['price', 'volume', 'changesPercentage']] + + # Ensure 'symbol' and 'name' are included in rule_of_list rule_of_list = list(set(rule_of_list + ['symbol', 'name'])) # Prepare screener dictionary for fast lookup @@ -1379,7 +1394,6 @@ async def get_watchlist(data: GetWatchList, api_key: str = Security(get_api_key) - @app.post("/get-price-alert") async def get_price_alert(data: dict, api_key: str = Security(get_api_key)): user_id = data.get('userId') @@ -4055,6 +4069,7 @@ async def get_statistics(data: FilterStockList, api_key: str = Security(get_api_ except: res = [] data = orjson.dumps(res) + print(res) compressed_data = gzip.compress(data) redis_client.set(cache_key, compressed_data) diff --git a/fastify/app.js b/fastify/app.js index f82211c..01b953c 100755 --- a/fastify/app.js +++ b/fastify/app.js @@ -317,10 +317,10 @@ const sendData = async () => { } } } else { - console.error("File not found for ticker:", symbol); + //console.error("File not found for ticker:", symbol); } } catch (err) { - console.error("Error processing data for ticker:", symbol, err); + //console.error("Error processing data for ticker:", symbol, err); } }