update cron job

This commit is contained in:
MuslemRahimi 2024-12-31 19:13:31 +01:00
parent ee69d1564b
commit ada99daa4f
3 changed files with 74 additions and 107 deletions

View File

@ -3488,7 +3488,7 @@ async def get_fail_to_deliver(data:TickerData, api_key: str = Security(get_api_k
media_type="application/json",
headers={"Content-Encoding": "gzip"}
)
try:
with open(f"json/fail-to-deliver/companies/{ticker}.json", 'rb') as file:
res = orjson.loads(file.read())
@ -3509,23 +3509,6 @@ async def get_fail_to_deliver(data:TickerData, api_key: str = Security(get_api_k
headers={"Content-Encoding": "gzip"}
)
@app.post("/borrowed-share")
async def get_borrowed_share(data:TickerData, api_key: str = Security(get_api_key)):
ticker = data.ticker.upper()
cache_key = f"borrowed-share-{ticker}"
cached_result = redis_client.get(cache_key)
if cached_result:
return orjson.loads(cached_result)
try:
with open(f"json/borrowed-share/companies/{ticker}.json", 'rb') as file:
res = orjson.loads(file.read())
except:
res = []
redis_client.set(cache_key, orjson.dumps(res))
redis_client.expire(cache_key, 3600*3600) # Set cache expiration time to 1 day
return res
@app.post("/analyst-insight")
async def get_analyst_insight(data:TickerData, api_key: str = Security(get_api_key)):
@ -3575,10 +3558,10 @@ async def get_clinical_trial(data:TickerData, api_key: str = Security(get_api_ke
headers={"Content-Encoding": "gzip"}
)
@app.post("/swap-ticker")
async def get_swap_data(data:TickerData, api_key: str = Security(get_api_key)):
@app.post("/hottest-contracts")
async def get_data(data:TickerData, api_key: str = Security(get_api_key)):
ticker = data.ticker.upper()
cache_key = f"swap-{ticker}"
cache_key = f"hottest-contracts-{ticker}"
cached_result = redis_client.get(cache_key)
if cached_result:
return StreamingResponse(
@ -3588,7 +3571,7 @@ async def get_swap_data(data:TickerData, api_key: str = Security(get_api_key)):
)
try:
with open(f"json/swap/companies/{ticker}.json", 'rb') as file:
with open(f"json/hottest-contracts/companies/{ticker}.json", 'rb') as file:
res = orjson.loads(file.read())
except:
res = []
@ -3597,7 +3580,7 @@ async def get_swap_data(data:TickerData, api_key: str = Security(get_api_key)):
compressed_data = gzip.compress(data)
redis_client.set(cache_key, compressed_data)
redis_client.expire(cache_key, 3600*3600)
redis_client.expire(cache_key, 60*10)
return StreamingResponse(
io.BytesIO(compressed_data),

View File

@ -231,10 +231,8 @@ def run_analyst_rating():
def run_market_moods():
week = datetime.today().weekday()
if week <= 4:
run_command(["python3", "cron_bull_bear_say.py"])
run_command(["python3", "cron_wiim.py"])
def run_db_schedule_job():
#update db daily
week = datetime.today().weekday()
@ -248,12 +246,6 @@ def run_ownership_stats():
run_command(["python3", "cron_ownership_stats.py"])
def run_options_net_flow():
week = datetime.today().weekday()
if week <= 5:
run_command(["python3", "cron_options_net_flow.py"])
def run_options_gex():
week = datetime.today().weekday()
if week <= 5:
@ -404,7 +396,6 @@ schedule.every(20).minutes.do(run_threaded, run_tracker).tag('tracker_job')
schedule.every(15).minutes.do(run_threaded, run_market_moods).tag('market_moods_job')
schedule.every(10).minutes.do(run_threaded, run_earnings).tag('earnings_job')
schedule.every(3).hours.do(run_threaded, run_options_net_flow).tag('options_net_flow_job')
#schedule.every(4).hours.do(run_threaded, run_share_statistics).tag('share_statistics_job')
schedule.every(2).hours.do(run_threaded, run_analyst_rating).tag('analyst_job')
@ -420,8 +411,6 @@ schedule.every(10).seconds.do(run_threaded, run_dark_pool_flow).tag('dark_pool_f
schedule.every(2).minutes.do(run_threaded, run_dashboard).tag('dashboard_job')
schedule.every(10).seconds.do(run_threaded, run_if_not_running(run_cron_options_flow, 'options_flow_job')).tag('options_flow_job')

View File

@ -1,9 +1,13 @@
import requests
import orjson
import re
from datetime import datetime
from dotenv import load_dotenv
import os
import sqlite3
import time
from tqdm import tqdm
load_dotenv()
api_key = os.getenv('UNUSUAL_WHALES_API_KEY')
@ -13,12 +17,12 @@ con = sqlite3.connect('stocks.db')
etf_con = sqlite3.connect('etf.db')
cursor = con.cursor()
cursor.execute("PRAGMA journal_mode = wal")
cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol NOT LIKE '%.%'")
cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol NOT LIKE '%.%' AND marketCap > 1E9")
stocks_symbols = [row[0] for row in cursor.fetchall()]
etf_cursor = etf_con.cursor()
etf_cursor.execute("PRAGMA journal_mode = wal")
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs WHERE marketCap > 1E9")
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
con.close()
@ -27,99 +31,90 @@ etf_con.close()
# Combine the lists of stock and ETF symbols
total_symbols = stocks_symbols + etf_symbols
print(len(total_symbols))
def save_json(data, symbol):
directory = "json/options-stats/companies"
directory = "json/hottest-contracts/companies"
os.makedirs(directory, exist_ok=True) # Ensure the directory exists
with open(f"{directory}/{symbol}.json", 'wb') as file: # Use binary mode for orjson
file.write(orjson.dumps(data))
def safe_round(value):
"""Attempt to convert a value to float and round it. Return the original value if not possible."""
def parse_option_symbol(option_symbol):
# Define regex pattern to match the symbol structure
match = re.match(r"([A-Z]+)(\d{6})([CP])(\d+)", option_symbol)
if not match:
raise ValueError(f"Invalid option_symbol format: {option_symbol}")
ticker, expiration, option_type, strike_price = match.groups()
# Convert expiration to datetime
date_expiration = datetime.strptime(expiration, "%y%m%d").date()
# Convert strike price to float
strike_price = int(strike_price) / 1000
return date_expiration, option_type, strike_price
def safe_round(value, decimals=2):
try:
return round(float(value), 2)
return round(float(value), decimals)
except (ValueError, TypeError):
return value
def calculate_neutral_premium(data_item):
"""Calculate the neutral premium for a data item."""
call_premium = float(data_item['call_premium'])
put_premium = float(data_item['put_premium'])
bearish_premium = float(data_item['bearish_premium'])
bullish_premium = float(data_item['bullish_premium'])
total_premiums = bearish_premium + bullish_premium
observed_premiums = call_premium + put_premium
neutral_premium = observed_premiums - total_premiums
return safe_round(neutral_premium)
def prepare_data(data, symbol):
highest_volume = sorted(data, key=lambda x: x['volume'], reverse=True)[:20]
res_list = []
for item in highest_volume:
if float(item['volume']) > 0:
# Parse option_symbol
date_expiration, option_type, strike_price = parse_option_symbol(item['option_symbol'])
# Round numerical and numerical-string values
new_item = {
key: safe_round(value) if isinstance(value, (int, float, str)) else value
for key, value in item.items()
}
# Add parsed fields
new_item['date_expiration'] = date_expiration
new_item['option_type'] = option_type
new_item['strike_price'] = strike_price
# Calculate open_interest_change
new_item['open_interest_change'] = safe_round(
new_item.get('open_interest', 0) - new_item.get('prev_oi', 0)
)
res_list.append(new_item)
if res_list:
save_json(res_list, symbol)
def prepare_data(data):
for item in data:
symbol = item['ticker']
bearish_premium = float(item['bearish_premium'])
bullish_premium = float(item['bullish_premium'])
neutral_premium = calculate_neutral_premium(item)
new_item = {
key: safe_round(value)
for key, value in item.items()
if key != 'in_out_flow'
}
new_item['premium_ratio'] = [
safe_round(bearish_premium),
neutral_premium,
safe_round(bullish_premium)
]
try:
new_item['open_interest_change'] = new_item['total_open_interest'] - (new_item.get('prev_call_oi',0) + new_item.get('prev_put_oi',0))
except:
new_item['open_interest_change'] = None
if len(new_item) > 0:
save_json(new_item, symbol)
def chunk_symbols(symbols, chunk_size=50):
for i in range(0, len(symbols), chunk_size):
yield symbols[i:i + chunk_size]
chunks = chunk_symbols(total_symbols)
chunk_counter = 0 # To keep track of how many chunks have been processed
for chunk in chunks:
counter = 0
for symbol in tqdm(total_symbols):
try:
chunk_str = ",".join(chunk)
print(chunk_str)
url = "https://api.unusualwhales.com/api/screener/stocks"
querystring = {"ticker": chunk_str}
url = f"https://api.unusualwhales.com/api/stock/{symbol}/option-contracts"
headers = {
"Accept": "application/json, text/plain",
"Authorization": api_key
}
response = requests.get(url, headers=headers, params=querystring)
response = requests.get(url, headers=headers)
if response.status_code == 200:
data = response.json()['data']
prepare_data(data)
print(f"Chunk processed. Number of results: {len(data)}")
else:
print(f"Error fetching data for chunk {chunk_str}: {response.status_code}")
# Increment the chunk counter
chunk_counter += 1
prepare_data(data, symbol)
counter +=1
# If 50 chunks have been processed, sleep for 60 seconds
if chunk_counter == 50:
print("Processed 50 chunks. Sleeping for 60 seconds...")
time.sleep(60) # Sleep for 60 seconds
chunk_counter = 0 # Reset the chunk counter after sleep
if counter == 200:
print("Sleeping...")
time.sleep(45) # Sleep for 60 seconds
counter = 0
except Exception as e:
print(f"Error processing chunk {chunk_str}: {e}")
print(f"Error for {symbol}:{e}")