diff --git a/app/cron_analyst_insight.py b/app/cron_analyst_insight.py new file mode 100644 index 0000000..1e4bdce --- /dev/null +++ b/app/cron_analyst_insight.py @@ -0,0 +1,113 @@ +from openai import OpenAI +import time +import ujson +import sqlite3 +import requests +import os +from dotenv import load_dotenv +from tqdm import tqdm +from datetime import datetime + +# Load environment variables +load_dotenv() + +# Initialize OpenAI client + +benzinga_api_key = os.getenv('BENZINGA_API_KEY') + +openai_api_key = os.getenv('OPENAI_API_KEY') +org_id = os.getenv('OPENAI_ORG') +client = OpenAI( + api_key=openai_api_key, + organization=org_id, +) + + +headers = {"accept": "application/json"} +url = "https://api.benzinga.com/api/v1/analyst/insights" + + +def save_json(symbol, data): + with open(f"json/analyst/insight/{symbol}.json", 'w') as file: + ujson.dump(data, file) + +def get_analyst_insight(ticker): + + res_dict = {} + + try: + querystring = {"token": benzinga_api_key,"symbols": ticker} + response = requests.request("GET", url, params=querystring) + output = ujson.loads(response.text)['analyst-insights'][0] #get the latest insight only + # Extracting required fields + res_dict = { + 'insight': output['analyst_insights'], + 'id': output['id'], + 'date': datetime.strptime(output['date'], "%Y-%m-%d").strftime("%b %d, %Y") + } + except: + pass + + return res_dict + + +# Function to summarize the text using GPT-3.5-turbo +def get_summary(data): + # Define the data to be summarized + + # Format the data as a string + data_string = ( + f"Insights: {data['insight']}" + ) + + response = client.chat.completions.create( + model="gpt-3.5-turbo-0125", + messages=[ + {"role": "system", "content": "Summarize analyst insights clearly and concisely in under 400 characters. Ensure the summary is professional and easy to understand. Conclude with whether the report is bullish or bearish."}, + {"role": "user", "content": data_string} + ], + max_tokens=150, + temperature=0.7 + ) + + + summary = response.choices[0].message.content + data = { + 'insight': summary, + 'id': data['id'], + 'date': data['date'] + } + + return data + + +try: + stock_con = sqlite3.connect('stocks.db') + stock_cursor = stock_con.cursor() + stock_cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE marketCap >= 100E6 AND symbol NOT LIKE '%.%'") + stock_symbols = [row[0] for row in stock_cursor.fetchall()] + + + stock_con.close() + + for symbol in tqdm(stock_symbols): + try: + data = get_analyst_insight(symbol) + new_report_id = data.get('id', '') + try: + with open(f"json/analyst/insight/{symbol}.json", 'r') as file: + old_report_id = ujson.load(file).get('id', '') + except: + old_report_id = '' + #check first if new report id exist already to save money before sending it to closedai company + if new_report_id != old_report_id and len(data['insight']) > 0: + res = get_summary(data) + save_json(symbol, res) + else: + print('skipped') + except: + pass + + +except Exception as e: + print(e) diff --git a/app/main.py b/app/main.py index d693813..64810af 100755 --- a/app/main.py +++ b/app/main.py @@ -3020,6 +3020,25 @@ async def get_borrowed_share(data:TickerData): redis_client.expire(cache_key, 3600*3600) # Set cache expiration time to 1 day return res + +@app.post("/analyst-insight") +async def get_analyst_insight(data:TickerData): + ticker = data.ticker.upper() + cache_key = f"analyst-insight-{ticker}" + cached_result = redis_client.get(cache_key) + if cached_result: + return ujson.loads(cached_result) + try: + with open(f"json/analyst/insight/{ticker}.json", 'r') as file: + res = ujson.load(file) + except: + res = {} + + redis_client.set(cache_key, ujson.dumps(res)) + redis_client.expire(cache_key, 3600*3600) # Set cache expiration time to 1 day + return res + + @app.post("/implied-volatility") async def get_clinical_trial(data:TickerData): ticker = data.ticker.upper() diff --git a/app/primary_cron_job.py b/app/primary_cron_job.py index 26fef79..75b0a2d 100755 --- a/app/primary_cron_job.py +++ b/app/primary_cron_job.py @@ -270,6 +270,7 @@ def run_analyst_rating(): if week <= 5: subprocess.run(["python3", "cron_analyst_db.py"]) subprocess.run(["python3", "cron_analyst_ticker.py"]) + subprocess.run(["python3", "cron_analyst_insight.py"]) command = ["sudo", "rsync", "-avz", "-e", "ssh", "/root/backend/app/json/analyst", f"root@{useast_ip_address}:/root/backend/app/json"] subprocess.run(command) diff --git a/app/quant-analysis/daily_return.png b/app/quant-analysis/daily_return.png new file mode 100644 index 0000000..8661585 Binary files /dev/null and b/app/quant-analysis/daily_return.png differ diff --git a/app/quant-analysis/histogram.png b/app/quant-analysis/histogram.png new file mode 100644 index 0000000..ee73df5 Binary files /dev/null and b/app/quant-analysis/histogram.png differ diff --git a/app/quant-analysis/mc-simulation.py b/app/quant-analysis/mc-simulation.py new file mode 100644 index 0000000..601c0d4 --- /dev/null +++ b/app/quant-analysis/mc-simulation.py @@ -0,0 +1,105 @@ +import matplotlib.pyplot as plt +import seaborn as sns +import pandas as pd +import numpy as np +from datetime import datetime +import yfinance as yf +from tqdm import tqdm + +#correlated_stocks = ['AQN', 'PACB', 'ZI', 'IPG', 'EW'] +ticker = 'GME' +start_date = datetime(2024,5,1) +end_date = datetime.today() +df = yf.download(ticker, start=start_date, end=end_date, interval="1d").reset_index() +#df = df.rename(columns={'Adj Close': 'close', 'Date': 'date'}) +df['daily_return'] = df['Adj Close'].pct_change() +df = df.dropna() + + + +fig, ax = plt.subplots(figsize=(14,5)) + +ax.plot(df['Date'], df['daily_return']*100, linestyle='--', marker='o',color='blue',label='Daily Returns') + +legend = ax.legend(loc="best", shadow=True, fontsize=15) +plt.xlabel("Date",fontsize = 14) +plt.ylabel("Percentage %", fontsize=15) +plt.grid(True) +plt.savefig('daily_return.png') + + + + + + +fig, ax = plt.subplots(figsize=(14,5)) + +days = 365 + +#delta t +dt = 1/365 + +mu = df['daily_return'].mean() + +sigma = df['daily_return'].std() + +#Function takes in stock price, number of days to run, mean and standard deviation values +def stock_monte_carlo(start_price,days,mu,sigma): + + price = np.zeros(days) + price[0] = start_price + + shock = np.zeros(days) + drift = np.zeros(days) + + for x in range(1,days): + + #Shock and drift formulas taken from the Monte Carlo formula + shock[x] = np.random.normal(loc=mu*dt,scale=sigma*np.sqrt(dt)) + + drift[x] = mu * dt + + #New price = Old price + Old price*(shock+drift) + price[x] = price[x-1] + (price[x-1] * (drift[x]+shock[x])) + + return price + +start_price = df['Adj Close'].iloc[-1] #Taken from above + + + +for run in tqdm(range(200)): + ax.plot(stock_monte_carlo(start_price,days,mu,sigma)) + +plt.xlabel('Days') +plt.ylabel('Price') +plt.title('Monte Carlo Analysis for GME') +plt.savefig('simulation.png') + + + + +fig, ax = plt.subplots(figsize=(14,5)) +runs = 10000 + +simulations = np.zeros(runs) +for run in tqdm(range(runs)): + simulations[run] = stock_monte_carlo(start_price,days,mu,sigma)[days-1] + +q = np.percentile(simulations,1) + +plt.hist(simulations,bins=200) + +plt.figtext(0.6,0.8,s="Start price: $%.2f" %start_price) + +plt.figtext(0.6,0.7,"Mean final price: $%.2f" % simulations.mean()) + +plt.figtext(0.6,0.6,"VaR(0.99): $%.2f" % (start_price -q,)) + +plt.figtext(0.15,0.6, "q(0.99): $%.2f" % q) + +plt.axvline(x=q, linewidth=4, color='r') + +plt.title(u"Final price distribution for Gamestop Stock after %s days" %days, weight='bold') +plt.savefig('histogram.png') + diff --git a/app/quant-analysis/simulation.png b/app/quant-analysis/simulation.png new file mode 100644 index 0000000..346105a Binary files /dev/null and b/app/quant-analysis/simulation.png differ