diff --git a/app/cron_insider_tracker.py b/app/cron_insider_tracker.py index f8dac9b..f113553 100644 --- a/app/cron_insider_tracker.py +++ b/app/cron_insider_tracker.py @@ -61,7 +61,7 @@ def format_name(name): # Join the parts to form the final name return " ".join(formatted_parts) - + def aggregate_transactions(transactions, min_value=100_000): @@ -111,7 +111,7 @@ def aggregate_transactions(transactions, min_value=100_000): async def get_data(session, symbols): res_list = [] - for page in range(0, 20): # Adjust the number of pages as needed + for page in range(0, 100): # Adjust the number of pages as needed url = f"https://financialmodelingprep.com/stable/insider-trading/latest?page={page}&apikey={api_key}" async with session.get(url) as response: try: diff --git a/app/primary_cron_job.py b/app/primary_cron_job.py index 9c848e4..4304ef7 100755 --- a/app/primary_cron_job.py +++ b/app/primary_cron_job.py @@ -157,7 +157,7 @@ def run_similar_stocks(): def run_historical_price(): week = datetime.today().weekday() - if week <= 4: + if week <= 5: run_command(["python3", "cron_historical_price.py"]) def run_one_day_price(): diff --git a/app/test.py b/app/test.py index 86a805c..11dd5b9 100644 --- a/app/test.py +++ b/app/test.py @@ -1,5 +1,8 @@ import requests +from bs4 import BeautifulSoup -url = "https://api.stocktwits.com/api/2/streams/symbol/AAPL.json?filter=top" +url = "https://twitter.com/search?q=%24AAPL&src=typed_query" response = requests.get(url) -print(response) \ No newline at end of file +soup = BeautifulSoup(response.content, 'html.parser') + +print(soup) \ No newline at end of file