add oi cronjob

This commit is contained in:
MuslemRahimi 2025-01-10 00:36:50 +01:00
parent 55311d6bbf
commit 3ec5ca581c
3 changed files with 178 additions and 0 deletions

147
app/cron_options_oi.py Normal file
View File

@ -0,0 +1,147 @@
import requests
import orjson
import re
from datetime import datetime
from dotenv import load_dotenv
import os
import sqlite3
import time
from tqdm import tqdm
load_dotenv()
api_key = os.getenv('UNUSUAL_WHALES_API_KEY')
headers = {"Accept": "application/json, text/plain", "Authorization": api_key}
# Connect to the databases
con = sqlite3.connect('stocks.db')
etf_con = sqlite3.connect('etf.db')
cursor = con.cursor()
cursor.execute("PRAGMA journal_mode = wal")
#cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol NOT LIKE '%.%' AND marketCap > 1E9")
cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol NOT LIKE '%.%'")
stocks_symbols = [row[0] for row in cursor.fetchall()]
etf_cursor = etf_con.cursor()
etf_cursor.execute("PRAGMA journal_mode = wal")
#etf_cursor.execute("SELECT DISTINCT symbol FROM etfs WHERE marketCap > 1E9")
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
con.close()
etf_con.close()
def get_tickers_from_directory(directory: str):
try:
# Ensure the directory exists
if not os.path.exists(directory):
raise FileNotFoundError(f"The directory '{directory}' does not exist.")
# Get all tickers from filenames
return [file.replace(".json", "") for file in os.listdir(directory) if file.endswith(".json")]
except Exception as e:
print(f"An error occurred: {e}")
return []
def save_json(data, symbol, directory_path):
os.makedirs(directory_path, exist_ok=True) # Ensure the directory exists
with open(f"{directory_path}/{symbol}.json", 'wb') as file: # Use binary mode for orjson
file.write(orjson.dumps(data))
def safe_round(value, decimals=2):
try:
return round(float(value), decimals)
except (ValueError, TypeError):
return value
def prepare_data(data, symbol, directory_path, sort_by = "date"):
data = [{k: v for k, v in item.items() if "charm" not in k and "vanna" not in k} for item in data]
res_list = []
for item in data:
try:
new_item = {
key: safe_round(value) if isinstance(value, (int, float, str)) else value
for key, value in item.items()
}
res_list.append(new_item)
except:
pass
if res_list:
res_list = sorted(res_list, key=lambda x: x[sort_by], reverse=True)
save_json(res_list, symbol, directory_path)
def get_strike_data():
print("Starting to download strike data...")
directory_path = "json/oi/strike"
total_symbols = get_tickers_from_directory(directory_path)
if len(total_symbols) < 100:
total_symbols = stocks_symbols+etf_symbols
counter = 0
#Test mode
#total_symbols = ['GME','SPY']
for symbol in tqdm(total_symbols):
try:
url = f"https://api.unusualwhales.com/api/stock/{symbol}/oi-per-strike"
response = requests.get(url, headers=headers)
if response.status_code == 200:
data = response.json()['data']
prepare_data(data, symbol, directory_path, sort_by = 'strike')
counter +=1
# If 50 chunks have been processed, sleep for 60 seconds
if counter == 260:
print("Sleeping...")
time.sleep(60)
counter = 0
except Exception as e:
print(f"Error for {symbol}:{e}")
def get_expiry_data():
print("Starting to download expiry data...")
directory_path = "json/oi/expiry"
total_symbols = get_tickers_from_directory(directory_path)
if len(total_symbols) < 100:
total_symbols = stocks_symbols+etf_symbols
counter = 0
for symbol in tqdm(total_symbols):
try:
url = f"https://api.unusualwhales.com/api/stock/{symbol}/oi-per-expiry"
response = requests.get(url, headers=headers)
if response.status_code == 200:
data = response.json()['data']
prepare_data(data, symbol, directory_path)
counter +=1
# If 50 chunks have been processed, sleep for 60 seconds
if counter == 260:
print("Sleeping...")
time.sleep(60)
counter = 0
except Exception as e:
print(f"Error for {symbol}:{e}")
if __name__ == '__main__':
get_strike_data()
#time.sleep(60)
get_expiry_data()

View File

@ -2702,6 +2702,35 @@ async def get_data(data:ParamsData, api_key: str = Security(get_api_key)):
headers={"Content-Encoding": "gzip"}
)
@app.post("/options-oi")
async def get_data(data:ParamsData, api_key: str = Security(get_api_key)):
ticker = data.params.upper()
category = data.category.lower()
cache_key = f"options-oi-{ticker}-{category}"
cached_result = redis_client.get(cache_key)
if cached_result:
return StreamingResponse(
io.BytesIO(cached_result),
media_type="application/json",
headers={"Content-Encoding": "gzip"})
try:
with open(f"json/oi/{category}/{ticker}.json", 'rb') as file:
data = orjson.loads(file.read())
except:
data = []
data = orjson.dumps(data)
compressed_data = gzip.compress(data)
redis_client.set(cache_key, compressed_data)
redis_client.expire(cache_key, 3600*60)
return StreamingResponse(
io.BytesIO(compressed_data),
media_type="application/json",
headers={"Content-Encoding": "gzip"}
)
@app.post("/options-stats-ticker")
async def get_options_stats_ticker(data:TickerData, api_key: str = Security(get_api_key)):
ticker = data.ticker.upper()

View File

@ -93,6 +93,8 @@ def run_options_jobs():
if week <= 5:
run_command(["python3", "cron_options_gex_dex.py"])
time.sleep(60)
run_command(["python3", "cron_options_oi.py"])
time.sleep(60)
run_command(["python3", "cron_options_stats.py"])
time.sleep(60)
run_command(["python3", "cron_options_hottest_contracts.py"])