update cron list
This commit is contained in:
parent
24a02a98ca
commit
e609d05159
@ -681,6 +681,51 @@ async def get_most_shorted_stocks():
|
||||
file.write(orjson.dumps(res_list))
|
||||
|
||||
|
||||
async def get_hottest_contracts():
|
||||
with sqlite3.connect('stocks.db') as con:
|
||||
cursor = con.cursor()
|
||||
cursor.execute("PRAGMA journal_mode = wal")
|
||||
cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol NOT LIKE '%.%' AND symbol NOT LIKE '%-%'")
|
||||
symbols = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
res_list = []
|
||||
for symbol in symbols:
|
||||
try:
|
||||
# Load quote data from JSON file
|
||||
change_oi = stock_screener_data_dict[symbol].get('changeOI',None)
|
||||
if change_oi > 0:
|
||||
quote_data = await get_quote_data(symbol)
|
||||
# Assign price and volume, and check if they meet the penny stock criteria
|
||||
if quote_data:
|
||||
price = round(quote_data.get('price',None), 2)
|
||||
changesPercentage = round(quote_data.get('changesPercentage'), 2)
|
||||
market_cap = round(quote_data.get('marketCap',None), 2)
|
||||
name = quote_data.get('name')
|
||||
|
||||
# Append stock data to res_list if it meets the criteria
|
||||
if changesPercentage != 0:
|
||||
res_list.append({
|
||||
'symbol': symbol,
|
||||
'name': name,
|
||||
'price': price,
|
||||
'changesPercentage': changesPercentage,
|
||||
'changeOI': change_oi,
|
||||
})
|
||||
except:
|
||||
pass
|
||||
|
||||
if res_list:
|
||||
# Sort by market cap in descending order
|
||||
res_list = sorted(res_list, key=lambda x: x['changeOI'], reverse=True)[:100]
|
||||
|
||||
# Assign rank to each stock
|
||||
for rank, item in enumerate(res_list, start=1):
|
||||
item['rank'] = rank
|
||||
|
||||
# Write the filtered and ranked penny stocks to a JSON file
|
||||
with open("json/stocks-list/list/hottest-contracts.json", 'wb') as file:
|
||||
file.write(orjson.dumps(res_list))
|
||||
|
||||
async def etf_bitcoin_list():
|
||||
try:
|
||||
with sqlite3.connect('etf.db') as etf_con:
|
||||
@ -915,6 +960,7 @@ async def run():
|
||||
get_most_employees(),
|
||||
get_most_ftd_shares(),
|
||||
get_most_shorted_stocks(),
|
||||
get_hottest_contracts(),
|
||||
)
|
||||
|
||||
|
||||
|
||||
120
app/cron_options_hottest_contracts.py
Normal file
120
app/cron_options_hottest_contracts.py
Normal file
@ -0,0 +1,120 @@
|
||||
import requests
|
||||
import orjson
|
||||
import re
|
||||
from datetime import datetime
|
||||
from dotenv import load_dotenv
|
||||
import os
|
||||
import sqlite3
|
||||
import time
|
||||
from tqdm import tqdm
|
||||
|
||||
load_dotenv()
|
||||
|
||||
api_key = os.getenv('UNUSUAL_WHALES_API_KEY')
|
||||
|
||||
# Connect to the databases
|
||||
con = sqlite3.connect('stocks.db')
|
||||
etf_con = sqlite3.connect('etf.db')
|
||||
cursor = con.cursor()
|
||||
cursor.execute("PRAGMA journal_mode = wal")
|
||||
#cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol NOT LIKE '%.%' AND marketCap > 1E9")
|
||||
cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol NOT LIKE '%.%'")
|
||||
stocks_symbols = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
etf_cursor = etf_con.cursor()
|
||||
etf_cursor.execute("PRAGMA journal_mode = wal")
|
||||
#etf_cursor.execute("SELECT DISTINCT symbol FROM etfs WHERE marketCap > 1E9")
|
||||
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
||||
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
||||
|
||||
con.close()
|
||||
etf_con.close()
|
||||
|
||||
# Combine the lists of stock and ETF symbols
|
||||
total_symbols = stocks_symbols + etf_symbols
|
||||
|
||||
print(len(total_symbols))
|
||||
|
||||
def save_json(data, symbol,directory="json/hottest-contracts/companies"):
|
||||
os.makedirs(directory, exist_ok=True) # Ensure the directory exists
|
||||
with open(f"{directory}/{symbol}.json", 'wb') as file: # Use binary mode for orjson
|
||||
file.write(orjson.dumps(data))
|
||||
|
||||
|
||||
def parse_option_symbol(option_symbol):
|
||||
# Define regex pattern to match the symbol structure
|
||||
match = re.match(r"([A-Z]+)(\d{6})([CP])(\d+)", option_symbol)
|
||||
if not match:
|
||||
raise ValueError(f"Invalid option_symbol format: {option_symbol}")
|
||||
|
||||
ticker, expiration, option_type, strike_price = match.groups()
|
||||
|
||||
# Convert expiration to datetime
|
||||
date_expiration = datetime.strptime(expiration, "%y%m%d").date()
|
||||
|
||||
# Convert strike price to float
|
||||
strike_price = int(strike_price) / 1000
|
||||
|
||||
return date_expiration, option_type, strike_price
|
||||
|
||||
def safe_round(value, decimals=2):
|
||||
try:
|
||||
return round(float(value), decimals)
|
||||
except (ValueError, TypeError):
|
||||
return value
|
||||
|
||||
|
||||
def prepare_data(data, symbol):
|
||||
|
||||
res_list = []
|
||||
for item in data:
|
||||
if float(item['volume']) > 0:
|
||||
# Parse option_symbol
|
||||
date_expiration, option_type, strike_price = parse_option_symbol(item['option_symbol'])
|
||||
|
||||
# Round numerical and numerical-string values
|
||||
new_item = {
|
||||
key: safe_round(value) if isinstance(value, (int, float, str)) else value
|
||||
for key, value in item.items()
|
||||
}
|
||||
|
||||
# Add parsed fields
|
||||
new_item['date_expiration'] = date_expiration
|
||||
new_item['option_type'] = option_type
|
||||
new_item['strike_price'] = strike_price
|
||||
|
||||
# Calculate open_interest_change
|
||||
new_item['open_interest_change'] = safe_round(
|
||||
new_item.get('open_interest', 0) - new_item.get('prev_oi', 0)
|
||||
)
|
||||
|
||||
res_list.append(new_item)
|
||||
|
||||
if res_list:
|
||||
save_json(res_list, symbol,"json/hottest-contracts/companies")
|
||||
|
||||
|
||||
counter = 0
|
||||
for symbol in tqdm(total_symbols):
|
||||
try:
|
||||
|
||||
url = f"https://api.unusualwhales.com/api/stock/{symbol}/option-contracts"
|
||||
|
||||
headers = {
|
||||
"Accept": "application/json, text/plain",
|
||||
"Authorization": api_key
|
||||
}
|
||||
|
||||
response = requests.get(url, headers=headers)
|
||||
if response.status_code == 200:
|
||||
data = response.json()['data']
|
||||
prepare_data(data, symbol)
|
||||
counter +=1
|
||||
# If 50 chunks have been processed, sleep for 60 seconds
|
||||
if counter == 100:
|
||||
print("Sleeping...")
|
||||
time.sleep(30) # Sleep for 60 seconds
|
||||
counter = 0
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error for {symbol}:{e}")
|
||||
@ -4033,7 +4033,7 @@ async def get_statistics(data: FilterStockList, api_key: str = Security(get_api_
|
||||
category_type = 'sector'
|
||||
elif filter_list == 'reits':
|
||||
category_type = 'industry'
|
||||
elif filter_list in ['most-shorted-stocks','most-ftd-shares','highest-income-tax','most-employees','highest-revenue','top-rated-dividend-stocks','penny-stocks','overbought-stocks','oversold-stocks','faang','magnificent-seven','ca','cn','de','gb','il','in','jp','nyse','nasdaq','amex','dowjones','sp500','nasdaq100','all-stock-tickers']:
|
||||
elif filter_list in ['hottest-contracts','most-shorted-stocks','most-ftd-shares','highest-income-tax','most-employees','highest-revenue','top-rated-dividend-stocks','penny-stocks','overbought-stocks','oversold-stocks','faang','magnificent-seven','ca','cn','de','gb','il','in','jp','nyse','nasdaq','amex','dowjones','sp500','nasdaq100','all-stock-tickers']:
|
||||
category_type = 'stocks-list'
|
||||
elif filter_list in ['dividend-kings','dividend-aristocrats']:
|
||||
category_type = 'dividends'
|
||||
|
||||
@ -90,6 +90,7 @@ def run_options_stats():
|
||||
week = now.weekday()
|
||||
if week <= 5:
|
||||
run_command(["python3", "cron_options_stats.py"])
|
||||
run_command(["python3", "cron_options_historical_volume.py"])
|
||||
|
||||
def run_fda_calendar():
|
||||
now = datetime.now(ny_tz)
|
||||
@ -346,7 +347,7 @@ schedule.every().day.at("02:00").do(run_threaded, run_db_schedule_job)
|
||||
#schedule.every().day.at("05:00").do(run_threaded, run_options_gex).tag('options_gex_job')
|
||||
schedule.every().day.at("05:00").do(run_threaded, run_export_price).tag('export_price_job')
|
||||
|
||||
schedule.every().day.at("05:30").do(run_threaded, run_options_stats).tag('options_stats_job')
|
||||
schedule.every().day.at("03:30").do(run_threaded, run_options_stats).tag('options_stats_job')
|
||||
|
||||
|
||||
schedule.every().day.at("06:00").do(run_threaded, run_historical_price).tag('historical_job')
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user