add options rules
This commit is contained in:
parent
84a8db1f26
commit
a069f1f985
@ -602,8 +602,37 @@ data = {
|
||||
},
|
||||
"underlying_type": {
|
||||
"text": "The underlying type refers to the asset upon which the option is based. It can be an 'ETF' (Exchange-Traded Fund), which tracks a basket of assets, or a 'Stock,' which represents shares of a single company."
|
||||
}
|
||||
|
||||
},
|
||||
"callVolume": {
|
||||
"text": "Call volume refers to the total number of call option contracts traded during a given period. It indicates the level of interest or activity in call options, which grant the holder the right to buy the underlying asset at a specified price before expiration."
|
||||
},
|
||||
"putVolume": {
|
||||
"text": "Put volume refers to the total number of put option contracts traded during a given period. It indicates the level of interest or activity in put options, which grant the holder the right to sell the underlying asset at a specified price before expiration."
|
||||
},
|
||||
"gexRatio": {
|
||||
"text": "The GEX ratio or Gamma Exposure ratio, measures the sensitivity of the options market to changes in the price of the underlying asset. It is calculated by comparing the net gamma exposure of call and put options, providing insight into potential price stability or volatility."
|
||||
},
|
||||
"ivRank": {
|
||||
"text": "Implied Volatility (IV) Rank, measures the current level of implied volatility relative to its range over a specific period. It is expressed as a percentage, helping traders assess whether options are relatively expensive or cheap compared to their historical levels."
|
||||
},
|
||||
"iv30d": {
|
||||
"text": "IV30d refers to the Implied Volatility over the past 30 days. It represents the market's expectations of the underlying asset's volatility over the next 30 days, as implied by the pricing of options, and is often used to gauge short-term market sentiment."
|
||||
},
|
||||
"totalOI": {
|
||||
"text": "Total Open Interest (OI), represents the total number of outstanding options contracts (both calls and puts) that have not been settled or exercised. It provides insight into the overall activity and liquidity in the options market for a particular asset."
|
||||
},
|
||||
"changeOI": {
|
||||
"text": "Change in Open Interest (Change OI) refers to the difference in the number of outstanding options contracts from one trading session to the next. A positive change indicates new positions are being opened, while a negative change suggests positions are being closed."
|
||||
},
|
||||
"netCallPrem": {
|
||||
"text": "Net Call Premium (Net Call Prem) represents the net amount of premium paid for call options, calculated by subtracting the premium received from the premium paid. It provides insight into market sentiment and the demand for call options on the underlying asset."
|
||||
},
|
||||
"netPutPrem": {
|
||||
"text": "Net Put Premium (Net Put Prem) represents the net amount of premium paid for put options, calculated by subtracting the premium received from the premium paid. It indicates the demand for put options and can signal bearish market sentiment for the underlying asset."
|
||||
},
|
||||
"pcRatio": {
|
||||
"text": "The Put/Call Ratio (P/C Ratio) measures the volume of put options traded relative to call options. A higher ratio suggests more bearish sentiment, while a lower ratio indicates more bullish sentiment, helping traders gauge market outlook and investor sentiment."
|
||||
},
|
||||
|
||||
}
|
||||
|
||||
|
||||
125
app/cron_options_stats.py
Normal file
125
app/cron_options_stats.py
Normal file
@ -0,0 +1,125 @@
|
||||
import requests
|
||||
import orjson
|
||||
from dotenv import load_dotenv
|
||||
import os
|
||||
import sqlite3
|
||||
import time
|
||||
load_dotenv()
|
||||
|
||||
api_key = os.getenv('UNUSUAL_WHALES_API_KEY')
|
||||
|
||||
# Connect to the databases
|
||||
con = sqlite3.connect('stocks.db')
|
||||
etf_con = sqlite3.connect('etf.db')
|
||||
cursor = con.cursor()
|
||||
cursor.execute("PRAGMA journal_mode = wal")
|
||||
cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol NOT LIKE '%.%'")
|
||||
stocks_symbols = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
etf_cursor = etf_con.cursor()
|
||||
etf_cursor.execute("PRAGMA journal_mode = wal")
|
||||
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
||||
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
||||
|
||||
con.close()
|
||||
etf_con.close()
|
||||
|
||||
# Combine the lists of stock and ETF symbols
|
||||
total_symbols = stocks_symbols + etf_symbols
|
||||
|
||||
|
||||
|
||||
def save_json(data, symbol):
|
||||
directory = "json/options-stats/companies"
|
||||
os.makedirs(directory, exist_ok=True) # Ensure the directory exists
|
||||
with open(f"{directory}/{symbol}.json", 'wb') as file: # Use binary mode for orjson
|
||||
file.write(orjson.dumps(data))
|
||||
|
||||
def safe_round(value):
|
||||
"""Attempt to convert a value to float and round it. Return the original value if not possible."""
|
||||
try:
|
||||
return round(float(value), 2)
|
||||
except (ValueError, TypeError):
|
||||
return value
|
||||
|
||||
def calculate_neutral_premium(data_item):
|
||||
"""Calculate the neutral premium for a data item."""
|
||||
call_premium = float(data_item['call_premium'])
|
||||
put_premium = float(data_item['put_premium'])
|
||||
bearish_premium = float(data_item['bearish_premium'])
|
||||
bullish_premium = float(data_item['bullish_premium'])
|
||||
|
||||
total_premiums = bearish_premium + bullish_premium
|
||||
observed_premiums = call_premium + put_premium
|
||||
neutral_premium = observed_premiums - total_premiums
|
||||
|
||||
return safe_round(neutral_premium)
|
||||
|
||||
|
||||
def prepare_data(data):
|
||||
for item in data:
|
||||
symbol = item['ticker']
|
||||
bearish_premium = float(item['bearish_premium'])
|
||||
bullish_premium = float(item['bullish_premium'])
|
||||
neutral_premium = calculate_neutral_premium(item)
|
||||
|
||||
new_item = {
|
||||
key: safe_round(value)
|
||||
for key, value in item.items()
|
||||
if key != 'in_out_flow'
|
||||
}
|
||||
|
||||
|
||||
new_item['premium_ratio'] = [
|
||||
safe_round(bearish_premium),
|
||||
neutral_premium,
|
||||
safe_round(bullish_premium)
|
||||
]
|
||||
try:
|
||||
new_item['open_interest_change'] = new_item['total_open_interest'] - (new_item.get('prev_call_oi',0) + new_item.get('prev_put_oi',0))
|
||||
except:
|
||||
new_item['open_interest_change'] = None
|
||||
|
||||
if len(new_item) > 0:
|
||||
save_json(new_item, symbol)
|
||||
|
||||
def chunk_symbols(symbols, chunk_size=50):
|
||||
for i in range(0, len(symbols), chunk_size):
|
||||
yield symbols[i:i + chunk_size]
|
||||
|
||||
|
||||
chunks = chunk_symbols(total_symbols)
|
||||
chunk_counter = 0 # To keep track of how many chunks have been processed
|
||||
|
||||
for chunk in chunks:
|
||||
try:
|
||||
chunk_str = ",".join(chunk)
|
||||
print(chunk_str)
|
||||
|
||||
url = "https://api.unusualwhales.com/api/screener/stocks"
|
||||
querystring = {"ticker": chunk_str}
|
||||
|
||||
headers = {
|
||||
"Accept": "application/json, text/plain",
|
||||
"Authorization": api_key
|
||||
}
|
||||
|
||||
response = requests.get(url, headers=headers, params=querystring)
|
||||
if response.status_code == 200:
|
||||
data = response.json()['data']
|
||||
prepare_data(data)
|
||||
print(f"Chunk processed. Number of results: {len(data)}")
|
||||
else:
|
||||
print(f"Error fetching data for chunk {chunk_str}: {response.status_code}")
|
||||
|
||||
# Increment the chunk counter
|
||||
chunk_counter += 1
|
||||
|
||||
# If 50 chunks have been processed, sleep for 60 seconds
|
||||
if chunk_counter == 50:
|
||||
print("Processed 50 chunks. Sleeping for 60 seconds...")
|
||||
time.sleep(60) # Sleep for 60 seconds
|
||||
chunk_counter = 0 # Reset the chunk counter after sleep
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error processing chunk {chunk_str}: {e}")
|
||||
@ -901,8 +901,6 @@ async def get_stock_screener(con):
|
||||
item['payoutRatio'] = None
|
||||
item['dividendGrowth'] = None
|
||||
|
||||
|
||||
|
||||
try:
|
||||
with open(f"json/share-statistics/{symbol}.json", 'r') as file:
|
||||
res = orjson.loads(file.read())
|
||||
@ -916,6 +914,31 @@ async def get_stock_screener(con):
|
||||
item['shortOutStandingPercent'] = None
|
||||
item['shortFloatPercent'] = None
|
||||
|
||||
try:
|
||||
with open(f"json/options-stats/companies/{symbol}.json", "r") as file:
|
||||
res = orjson.loads(file.read())
|
||||
item['gexRatio'] = res['gex_ratio']
|
||||
item['ivRank'] = res['iv_rank']
|
||||
item['iv30d'] = res['iv30d']
|
||||
item['totalOI'] = res['total_open_interest']
|
||||
item['changeOI'] = res['open_interest_change']
|
||||
item['netCallPrem'] = res['net_call_premium']
|
||||
item['netPutPrem'] = res['net_put_premium']
|
||||
item['callVolume'] = res['call_volume']
|
||||
item['putVolume'] = res['put_volume']
|
||||
item['pcRatio'] = res['put_call_ratio']
|
||||
except:
|
||||
item['gexRatio'] = None
|
||||
item['ivRank'] = None
|
||||
item['iv30d'] = None
|
||||
item['totalOI'] = None
|
||||
item['changeOI'] = None
|
||||
item['netCallPrem'] = None
|
||||
item['netPutPrem'] = None
|
||||
item['callVolume'] = None
|
||||
item['putVolume'] = None
|
||||
item['pcRatio'] = None
|
||||
|
||||
|
||||
try:
|
||||
with open(f"json/analyst-estimate/{symbol}.json", 'r') as file:
|
||||
@ -952,6 +975,8 @@ async def get_stock_screener(con):
|
||||
item['netIncomeGrowthYears'] = None
|
||||
item['grossProfitGrowthYears'] = None
|
||||
|
||||
|
||||
|
||||
for item in stock_screener_data:
|
||||
for key, value in item.items():
|
||||
if isinstance(value, float):
|
||||
|
||||
119
app/test.py
119
app/test.py
@ -1,14 +1,16 @@
|
||||
import requests
|
||||
import orjson
|
||||
from dotenv import load_dotenv
|
||||
import os
|
||||
import sqlite3
|
||||
|
||||
import time
|
||||
load_dotenv()
|
||||
|
||||
api_key = os.getenv('UNUSUAL_WHALES_API_KEY')
|
||||
|
||||
# Connect to the databases
|
||||
con = sqlite3.connect('stocks.db')
|
||||
etf_con = sqlite3.connect('etf.db')
|
||||
|
||||
cursor = con.cursor()
|
||||
cursor.execute("PRAGMA journal_mode = wal")
|
||||
cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol NOT LIKE '%.%'")
|
||||
@ -22,23 +24,102 @@ etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
||||
con.close()
|
||||
etf_con.close()
|
||||
|
||||
total_symbols = stocks_symbols[:1000] #+ etf_symbols
|
||||
total_symbols = ",".join(total_symbols)
|
||||
print(total_symbols)
|
||||
url = "https://api.unusualwhales.com/api/screener/stocks"
|
||||
|
||||
querystring = {"ticker": total_symbols}
|
||||
|
||||
headers = {
|
||||
"Accept": "application/json, text/plain",
|
||||
"Authorization": api_key
|
||||
}
|
||||
|
||||
response = requests.get(url, headers=headers, params=querystring)
|
||||
|
||||
data = response.json()['data']
|
||||
# Combine the lists of stock and ETF symbols
|
||||
total_symbols = stocks_symbols + etf_symbols
|
||||
|
||||
|
||||
print(len(data))
|
||||
print(data[-1]['ticker'])
|
||||
|
||||
def save_json(data, symbol):
|
||||
directory = "json/options-stats/companies"
|
||||
os.makedirs(directory, exist_ok=True) # Ensure the directory exists
|
||||
with open(f"{directory}/{symbol}.json", 'wb') as file: # Use binary mode for orjson
|
||||
file.write(orjson.dumps(data))
|
||||
|
||||
def safe_round(value):
|
||||
"""Attempt to convert a value to float and round it. Return the original value if not possible."""
|
||||
try:
|
||||
return round(float(value), 2)
|
||||
except (ValueError, TypeError):
|
||||
return value
|
||||
|
||||
def calculate_neutral_premium(data_item):
|
||||
"""Calculate the neutral premium for a data item."""
|
||||
call_premium = float(data_item['call_premium'])
|
||||
put_premium = float(data_item['put_premium'])
|
||||
bearish_premium = float(data_item['bearish_premium'])
|
||||
bullish_premium = float(data_item['bullish_premium'])
|
||||
|
||||
total_premiums = bearish_premium + bullish_premium
|
||||
observed_premiums = call_premium + put_premium
|
||||
neutral_premium = observed_premiums - total_premiums
|
||||
|
||||
return safe_round(neutral_premium)
|
||||
|
||||
|
||||
def prepare_data(data):
|
||||
for item in data:
|
||||
symbol = item['ticker']
|
||||
bearish_premium = float(item['bearish_premium'])
|
||||
bullish_premium = float(item['bullish_premium'])
|
||||
neutral_premium = calculate_neutral_premium(item)
|
||||
|
||||
new_item = {
|
||||
key: safe_round(value)
|
||||
for key, value in item.items()
|
||||
if key != 'in_out_flow'
|
||||
}
|
||||
|
||||
|
||||
new_item['premium_ratio'] = [
|
||||
safe_round(bearish_premium),
|
||||
neutral_premium,
|
||||
safe_round(bullish_premium)
|
||||
]
|
||||
try:
|
||||
new_item['open_interest_change'] = new_item['total_open_interest'] - (new_item.get('prev_call_oi',0) + new_item.get('prev_put_oi',0))
|
||||
except:
|
||||
new_item['open_interest_change'] = None
|
||||
|
||||
if len(new_item) > 0:
|
||||
save_json(new_item, symbol)
|
||||
|
||||
def chunk_symbols(symbols, chunk_size=50):
|
||||
for i in range(0, len(symbols), chunk_size):
|
||||
yield symbols[i:i + chunk_size]
|
||||
|
||||
|
||||
chunks = chunk_symbols(total_symbols)
|
||||
chunk_counter = 0 # To keep track of how many chunks have been processed
|
||||
|
||||
for chunk in chunks:
|
||||
try:
|
||||
chunk_str = ",".join(chunk)
|
||||
print(chunk_str)
|
||||
|
||||
url = "https://api.unusualwhales.com/api/screener/stocks"
|
||||
querystring = {"ticker": chunk_str}
|
||||
|
||||
headers = {
|
||||
"Accept": "application/json, text/plain",
|
||||
"Authorization": api_key
|
||||
}
|
||||
|
||||
response = requests.get(url, headers=headers, params=querystring)
|
||||
if response.status_code == 200:
|
||||
data = response.json()['data']
|
||||
prepare_data(data)
|
||||
print(f"Chunk processed. Number of results: {len(data)}")
|
||||
else:
|
||||
print(f"Error fetching data for chunk {chunk_str}: {response.status_code}")
|
||||
|
||||
# Increment the chunk counter
|
||||
chunk_counter += 1
|
||||
|
||||
# If 50 chunks have been processed, sleep for 60 seconds
|
||||
if chunk_counter == 50:
|
||||
print("Processed 50 chunks. Sleeping for 60 seconds...")
|
||||
time.sleep(60) # Sleep for 60 seconds
|
||||
chunk_counter = 0 # Reset the chunk counter after sleep
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error processing chunk {chunk_str}: {e}")
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user