add past earnings volatility
This commit is contained in:
parent
488db802dc
commit
19a17ae343
@ -1,8 +1,10 @@
|
||||
import aiohttp
|
||||
import aiofiles
|
||||
import ujson
|
||||
import orjson
|
||||
import sqlite3
|
||||
import asyncio
|
||||
import pandas as pd
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
from datetime import datetime, timedelta
|
||||
@ -16,8 +18,15 @@ api_key = os.getenv('BENZINGA_API_KEY')
|
||||
|
||||
ny_tz = pytz.timezone('America/New_York')
|
||||
today = datetime.now(ny_tz).replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
min_date = ny_tz.localize(datetime.strptime("2015-01-01", "%Y-%m-%d"))
|
||||
N_days_ago = today - timedelta(days=10)
|
||||
|
||||
query_template = """
|
||||
SELECT date, open, high, low, close
|
||||
FROM "{ticker}"
|
||||
WHERE date >= ?
|
||||
"""
|
||||
|
||||
|
||||
def check_existing_file(ticker, folder_name):
|
||||
file_path = f"json/earnings/{folder_name}/{ticker}.json"
|
||||
@ -49,11 +58,87 @@ async def save_json(data, symbol, dir_path):
|
||||
async with aiofiles.open(file_path, 'w') as file:
|
||||
await file.write(ujson.dumps(data))
|
||||
|
||||
async def get_data(session, ticker):
|
||||
async def get_past_data(data, ticker, con):
|
||||
# Filter data based on date constraints
|
||||
filtered_data = []
|
||||
for item in data:
|
||||
try:
|
||||
item_date = ny_tz.localize(datetime.strptime(item["date"], "%Y-%m-%d"))
|
||||
if min_date <= item_date <= today:
|
||||
filtered_data.append(
|
||||
{
|
||||
'revenue': float(item['revenue']),
|
||||
'revenueEst': float(item['revenue_est']),
|
||||
'revenueSurprisePercent': round(float(item['revenue_surprise_percent'])*100, 2),
|
||||
'eps': round(float(item['eps']), 2),
|
||||
'epsEst': round(float(item['eps_est']), 2),
|
||||
'epsSurprisePercent': round(float(item['eps_surprise_percent'])*100, 2),
|
||||
'year': item['period_year'],
|
||||
'quarter': item['period'],
|
||||
'date': item['date']
|
||||
}
|
||||
)
|
||||
except:
|
||||
pass
|
||||
|
||||
# Sort the filtered data by date
|
||||
if len(filtered_data) > 0:
|
||||
filtered_data.sort(key=lambda x: x['date'], reverse=True)
|
||||
|
||||
try:
|
||||
# Load the price history data
|
||||
with open(f"json/historical-price/max/{ticker}.json") as file:
|
||||
price_history = orjson.loads(file.read())
|
||||
|
||||
# Convert price_history dates to datetime objects for easy comparison
|
||||
price_history_dict = {
|
||||
datetime.strptime(item['time'], "%Y-%m-%d"): item for item in price_history
|
||||
}
|
||||
|
||||
# Calculate volatility for each earnings release
|
||||
for entry in filtered_data:
|
||||
earnings_date = datetime.strptime(entry['date'], "%Y-%m-%d")
|
||||
volatility_prices = []
|
||||
|
||||
# Collect prices from (X-2) to (X+1)
|
||||
for i in range(-2, 2):
|
||||
current_date = earnings_date + timedelta(days=i)
|
||||
if current_date in price_history_dict:
|
||||
volatility_prices.append(price_history_dict[current_date])
|
||||
|
||||
# Calculate volatility if we have at least one price entry
|
||||
if volatility_prices:
|
||||
high_prices = [day['high'] for day in volatility_prices]
|
||||
low_prices = [day['low'] for day in volatility_prices]
|
||||
close_prices = [day['close'] for day in volatility_prices]
|
||||
|
||||
max_high = max(high_prices)
|
||||
min_low = min(low_prices)
|
||||
avg_close = sum(close_prices) / len(close_prices)
|
||||
|
||||
# Volatility percentage calculation
|
||||
volatility = round(((max_high - min_low) / avg_close) * 100, 2)
|
||||
else:
|
||||
volatility = None # No data available for volatility calculation
|
||||
|
||||
# Add the volatility to the entry
|
||||
entry['volatility'] = volatility
|
||||
|
||||
# Save the updated filtered_data
|
||||
await save_json(filtered_data, ticker, 'json/earnings/past')
|
||||
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
async def get_data(session, ticker, con):
|
||||
querystring = {"token": api_key, "parameters[tickers]": ticker}
|
||||
try:
|
||||
async with session.get(url, params=querystring, headers=headers) as response:
|
||||
data = ujson.loads(await response.text())['earnings']
|
||||
|
||||
await get_past_data(data, ticker, con)
|
||||
|
||||
# Filter for future earnings
|
||||
future_dates = [item for item in data if ny_tz.localize(datetime.strptime(item["date"], "%Y-%m-%d")) >= today]
|
||||
if future_dates:
|
||||
@ -113,9 +198,9 @@ async def get_data(session, ticker):
|
||||
print(e)
|
||||
#pass
|
||||
|
||||
async def run(stock_symbols):
|
||||
async def run(stock_symbols, con):
|
||||
async with aiohttp.ClientSession() as session:
|
||||
tasks = [get_data(session, symbol) for symbol in stock_symbols]
|
||||
tasks = [get_data(session, symbol, con) for symbol in stock_symbols]
|
||||
for f in tqdm(asyncio.as_completed(tasks), total=len(stock_symbols)):
|
||||
await f
|
||||
|
||||
@ -126,8 +211,11 @@ try:
|
||||
cursor.execute("PRAGMA journal_mode = wal")
|
||||
cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol NOT LIKE '%.%' AND symbol NOT LIKE '%-%'")
|
||||
stock_symbols = [row[0] for row in cursor.fetchall()]
|
||||
con.close()
|
||||
asyncio.run(run(stock_symbols))
|
||||
#stock_symbols = ['TSLA']
|
||||
|
||||
asyncio.run(run(stock_symbols, con))
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
finally:
|
||||
con.close()
|
||||
61
app/main.py
61
app/main.py
@ -3847,17 +3847,35 @@ async def get_next_earnings(data:TickerData, api_key: str = Security(get_api_key
|
||||
cache_key = f"next-earnings-{ticker}"
|
||||
cached_result = redis_client.get(cache_key)
|
||||
if cached_result:
|
||||
return orjson.loads(cached_result)
|
||||
return StreamingResponse(
|
||||
io.BytesIO(cached_result),
|
||||
media_type="application/json",
|
||||
headers={"Content-Encoding": "gzip"}
|
||||
)
|
||||
try:
|
||||
with open(f"json/earnings/next/{ticker}.json", 'rb') as file:
|
||||
res = orjson.loads(file.read())
|
||||
except:
|
||||
res = {}
|
||||
|
||||
redis_client.set(cache_key, orjson.dumps(res))
|
||||
redis_client.expire(cache_key,5*60)
|
||||
try:
|
||||
with open(f"json/earnings/past/{ticker}.json", 'rb') as file:
|
||||
past_earnings = orjson.loads(file.read())
|
||||
except:
|
||||
past_earnings = []
|
||||
|
||||
return res
|
||||
final_res = {'next': res, 'past': past_earnings}
|
||||
data = orjson.dumps(final_res)
|
||||
compressed_data = gzip.compress(data)
|
||||
|
||||
redis_client.set(cache_key, compressed_data)
|
||||
redis_client.expire(cache_key,15*60)
|
||||
|
||||
return StreamingResponse(
|
||||
io.BytesIO(compressed_data),
|
||||
media_type="application/json",
|
||||
headers={"Content-Encoding": "gzip"}
|
||||
)
|
||||
|
||||
@app.post("/earnings-surprise")
|
||||
async def get_surprise_earnings(data:TickerData, api_key: str = Security(get_api_key)):
|
||||
@ -3865,35 +3883,28 @@ async def get_surprise_earnings(data:TickerData, api_key: str = Security(get_api
|
||||
cache_key = f"earnings-surprise-{ticker}"
|
||||
cached_result = redis_client.get(cache_key)
|
||||
if cached_result:
|
||||
return orjson.loads(cached_result)
|
||||
return StreamingResponse(
|
||||
io.BytesIO(cached_result),
|
||||
media_type="application/json",
|
||||
headers={"Content-Encoding": "gzip"}
|
||||
)
|
||||
try:
|
||||
with open(f"json/earnings/surprise/{ticker}.json", 'rb') as file:
|
||||
res = orjson.loads(file.read())
|
||||
except:
|
||||
res = {}
|
||||
|
||||
redis_client.set(cache_key, orjson.dumps(res))
|
||||
redis_client.expire(cache_key,5*60)
|
||||
data = orjson.dumps(res)
|
||||
compressed_data = gzip.compress(data)
|
||||
|
||||
return res
|
||||
redis_client.set(cache_key, compressed_data)
|
||||
redis_client.expire(cache_key,15*60)
|
||||
|
||||
@app.post("/dividend-announcement")
|
||||
async def get_dividend_announcement(data:TickerData, api_key: str = Security(get_api_key)):
|
||||
ticker = data.ticker.upper()
|
||||
cache_key = f"dividend-announcement-{ticker}"
|
||||
cached_result = redis_client.get(cache_key)
|
||||
if cached_result:
|
||||
return orjson.loads(cached_result)
|
||||
try:
|
||||
with open(f"json/dividends/announcement/{ticker}.json", 'rb') as file:
|
||||
res = orjson.loads(file.read())
|
||||
except:
|
||||
res = {}
|
||||
|
||||
redis_client.set(cache_key, orjson.dumps(res))
|
||||
redis_client.expire(cache_key,3600*3600)
|
||||
|
||||
return res
|
||||
return StreamingResponse(
|
||||
io.BytesIO(compressed_data),
|
||||
media_type="application/json",
|
||||
headers={"Content-Encoding": "gzip"}
|
||||
)
|
||||
|
||||
@app.post("/info-text")
|
||||
async def get_info_text(data:InfoText, api_key: str = Security(get_api_key)):
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user