add options chain
This commit is contained in:
parent
e1c2c9c9fc
commit
23d5605c2c
@ -13,8 +13,8 @@ load_dotenv()
|
||||
api_key = os.getenv('BENZINGA_API_KEY')
|
||||
fin = financial_data.Benzinga(api_key)
|
||||
|
||||
def save_json(symbol, data):
|
||||
with open(f'json/options-gex/companies/{symbol}.json', 'w') as file:
|
||||
def save_json(symbol, data, file_path):
|
||||
with open(f'{file_path}/{symbol}.json', 'w') as file:
|
||||
ujson.dump(data, file)
|
||||
|
||||
def calculate_volatility(prices_df):
|
||||
@ -83,6 +83,82 @@ def compute_daily_gex(option_data_list, volatility):
|
||||
daily_gex['date'] = daily_gex['date'].astype(str)
|
||||
return daily_gex
|
||||
|
||||
def summarize_option_chain(option_data_list):
|
||||
summary_data = []
|
||||
|
||||
for option_data in option_data_list:
|
||||
try:
|
||||
date = datetime.strptime(option_data['date'], "%Y-%m-%d").date()
|
||||
open_interest = int(option_data.get('open_interest', 0))
|
||||
volume = int(option_data.get('volume', 0))
|
||||
price = float(option_data.get('price', 0))
|
||||
strike_price = float(option_data.get('strike_price', 0))
|
||||
put_call = option_data.get('put_call', 'CALL')
|
||||
sentiment = option_data.get('sentiment', 'NEUTRAL')
|
||||
|
||||
# Safely convert premium to float, default to 0 if missing or invalid
|
||||
try:
|
||||
premium = float(option_data.get('cost_basis', 0))
|
||||
except (TypeError, ValueError):
|
||||
premium = 0
|
||||
|
||||
# Calculate Bull/Bear/Neutral premiums based on sentiment
|
||||
if sentiment == 'BULLISH':
|
||||
bull_premium = premium
|
||||
bear_premium = 0
|
||||
neutral_premium = 0
|
||||
elif sentiment == 'BEARISH':
|
||||
bull_premium = 0
|
||||
bear_premium = premium
|
||||
neutral_premium = 0
|
||||
else:
|
||||
bull_premium = 0
|
||||
bear_premium = 0
|
||||
neutral_premium = premium
|
||||
|
||||
summary_data.append({
|
||||
'date': date,
|
||||
'open_interest': open_interest,
|
||||
'c_vol': volume if put_call == 'CALL' else 0,
|
||||
'p_vol': volume if put_call == 'PUT' else 0,
|
||||
'bull_premium': bull_premium,
|
||||
'bear_premium': bear_premium,
|
||||
'neutral_premium': neutral_premium
|
||||
})
|
||||
except:
|
||||
pass
|
||||
|
||||
# Summarize by date
|
||||
df_summary = pd.DataFrame(summary_data)
|
||||
daily_summary = df_summary.groupby('date').agg(
|
||||
total_oi=('open_interest', 'sum'),
|
||||
total_bull_prem=('bull_premium', 'sum'),
|
||||
total_bear_prem=('bear_premium', 'sum'),
|
||||
total_neutral_prem=('neutral_premium', 'sum'),
|
||||
c_vol=('c_vol', 'sum'),
|
||||
p_vol=('p_vol', 'sum')
|
||||
).reset_index()
|
||||
|
||||
# Calculate Bull/Bear ratio
|
||||
|
||||
try:
|
||||
daily_summary['bear_ratio'] = round(daily_summary['total_bear_prem'] / (daily_summary['total_bull_prem']+daily_summary['total_bear_prem']+daily_summary['total_neutral_prem']) * 100, 2)
|
||||
daily_summary['bull_ratio'] = round(daily_summary['total_bull_prem'] / (daily_summary['total_bull_prem']+daily_summary['total_bear_prem']+daily_summary['total_neutral_prem']) * 100, 2)
|
||||
daily_summary['neutral_ratio'] = round(daily_summary['total_neutral_prem'] / (daily_summary['total_bull_prem']+daily_summary['total_bear_prem']+daily_summary['total_neutral_prem']) * 100, 2)
|
||||
except:
|
||||
daily_summary['bear_ratio'] = None
|
||||
daily_summary['bull_ratio'] = None
|
||||
daily_summary['neutral_ratio'] = None
|
||||
|
||||
|
||||
daily_summary['total_volume'] = round(daily_summary['c_vol'] + daily_summary['p_vol'],2)
|
||||
daily_summary['total_neutral_prem'] = round(daily_summary['total_neutral_prem'],2)
|
||||
daily_summary['date'] = daily_summary['date'].astype(str)
|
||||
daily_summary = daily_summary.sort_values(by='date', ascending=False)
|
||||
# Return the summarized dataframe
|
||||
return daily_summary
|
||||
|
||||
|
||||
def get_data(ticker):
|
||||
res_list = []
|
||||
page = 0
|
||||
@ -121,7 +197,7 @@ etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
||||
total_symbols = stock_symbols + etf_symbols
|
||||
|
||||
query_template = """
|
||||
SELECT date, close
|
||||
SELECT date, close,change_percent
|
||||
FROM "{ticker}"
|
||||
WHERE date BETWEEN ? AND ?
|
||||
"""
|
||||
@ -131,13 +207,20 @@ for ticker in total_symbols:
|
||||
try:
|
||||
query = query_template.format(ticker=ticker)
|
||||
df_price = pd.read_sql_query(query, stock_con if ticker in stock_symbols else etf_con, params=(start_date_str, end_date_str)).round(2)
|
||||
df_price = df_price.rename(columns={"change_percent": "changesPercentage"})
|
||||
|
||||
volatility = calculate_volatility(df_price)
|
||||
|
||||
ticker_data = get_data(ticker)
|
||||
daily_option_chain = summarize_option_chain(ticker_data)
|
||||
daily_option_chain = daily_option_chain.merge(df_price[['date', 'changesPercentage']], on='date', how='inner')
|
||||
if not daily_option_chain.empty:
|
||||
save_json(ticker, daily_option_chain.to_dict('records'), 'json/options-chain/companies')
|
||||
|
||||
daily_gex = compute_daily_gex(ticker_data, volatility)
|
||||
daily_gex = daily_gex.merge(df_price, on='date', how='inner')
|
||||
daily_gex = daily_gex.merge(df_price[['date', 'close']], on='date', how='inner')
|
||||
if not daily_gex.empty:
|
||||
save_json(ticker, daily_gex.to_dict('records'))
|
||||
save_json(ticker, daily_gex.to_dict('records'),'json/options-gex/companies')
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
29
app/main.py
29
app/main.py
@ -2563,6 +2563,35 @@ async def get_options_flow_ticker(data:TickerData, api_key: str = Security(get_a
|
||||
headers={"Content-Encoding": "gzip"}
|
||||
)
|
||||
|
||||
@app.post("/options-chain-ticker")
|
||||
async def get_options_chain(data:TickerData, api_key: str = Security(get_api_key)):
|
||||
ticker = data.ticker.upper()
|
||||
cache_key = f"options-chain-{ticker}"
|
||||
|
||||
cached_result = redis_client.get(cache_key)
|
||||
if cached_result:
|
||||
return StreamingResponse(
|
||||
io.BytesIO(cached_result),
|
||||
media_type="application/json",
|
||||
headers={"Content-Encoding": "gzip"})
|
||||
try:
|
||||
with open(f"json/options-chain/companies/{ticker}.json", 'rb') as file:
|
||||
res_list = orjson.loads(file.read())
|
||||
except:
|
||||
res_list = []
|
||||
|
||||
data = orjson.dumps(res_list)
|
||||
compressed_data = gzip.compress(data)
|
||||
redis_client.set(cache_key, compressed_data)
|
||||
redis_client.expire(cache_key, 3600*3600) # Set cache expiration time to 5 min
|
||||
|
||||
return StreamingResponse(
|
||||
io.BytesIO(compressed_data),
|
||||
media_type="application/json",
|
||||
headers={"Content-Encoding": "gzip"}
|
||||
)
|
||||
|
||||
|
||||
'''
|
||||
@app.post("/options-flow-feed")
|
||||
async def get_options_flow_feed(data: LastOptionId, api_key: str = Security(get_api_key)):
|
||||
|
||||
@ -464,6 +464,13 @@ def run_options_gex():
|
||||
]
|
||||
run_command(command)
|
||||
|
||||
command = [
|
||||
"sudo", "rsync", "-avz", "-e", "ssh",
|
||||
"/root/backend/app/json/options-chain",
|
||||
f"root@{useast_ip_address}:/root/backend/app/json"
|
||||
]
|
||||
run_command(command)
|
||||
|
||||
def run_government_contract():
|
||||
run_command(["python3", "cron_government_contract.py"])
|
||||
command = [
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user