update implied volatility cron job

This commit is contained in:
MuslemRahimi 2024-07-17 23:41:55 +02:00
parent 3857597791
commit 384cbe9375
2 changed files with 18 additions and 9 deletions

View File

@ -34,10 +34,11 @@ def filter_past_six_months(data):
sorted_data = sorted(filtered_data, key=lambda x: datetime.strptime(x['date'], '%Y-%m-%d'))
return sorted_data
async def get_data(ticker_list):
ticker_str = ','.join(ticker_list)
async def get_data(ticker):
#ticker_str = ','.join(ticker_list)
async with aiohttp.ClientSession() as session:
url = url = f"https://data.nasdaq.com/api/v3/datatables/ORATS/OPT?date={date_str}&ticker={ticker_str}&api_key={api_key}"
url = url = f"https://data.nasdaq.com/api/v3/datatables/ORATS/OPT?date={date_str}&ticker={ticker}&api_key={api_key}"
async with session.get(url) as response:
if response.status == 200:
res = await response.json()
@ -54,7 +55,7 @@ async def run():
cursor = con.cursor()
cursor.execute("PRAGMA journal_mode = wal")
cursor.execute("SELECT DISTINCT symbol FROM stocks")
cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE marketcap >=10E6 AND symbol NOT LIKE '%.%'")
stocks_symbols = [row[0] for row in cursor.fetchall()]
etf_cursor = etf_con.cursor()
@ -69,14 +70,21 @@ async def run():
chunk_size = len(total_symbols) // 70 # Divide the list into N chunks
chunks = [total_symbols[i:i + chunk_size] for i in range(0, len(total_symbols), chunk_size)]
for chunk in tqdm(chunks):
data, columns = await get_data(chunk)
transformed_data = []
for ticker in tqdm(total_symbols):
data, columns = await get_data(ticker)
filtered_data = []
for element in tqdm(data):
# Assuming the number of columns matches the length of each element in `data`
transformed_data.append({columns[i]["name"]: element[i] for i in range(len(columns))})
filtered_data.append({columns[i]["name"]: element[i] for i in range(len(columns))})
try:
sorted_data = sorted(filtered_data, key=lambda x: datetime.strptime(x['date'], '%Y-%m-%d'))
if len(sorted_data) > 0:
await save_json(ticker, sorted_data)
except Exception as e:
print(e)
'''
for symbol in chunk:
try:
filtered_data = [item for item in transformed_data if symbol == item['ticker']]
@ -85,6 +93,7 @@ async def run():
await save_json(symbol, sorted_data)
except Exception as e:
print(e)
'''
con.close()
etf_con.close()

View File

@ -447,7 +447,7 @@ schedule.every().day.at("10:30").do(run_threaded, run_sec_filings).tag('sec_fili
schedule.every().day.at("11:00").do(run_threaded, run_executive).tag('executive_job')
schedule.every().day.at("03:00").do(run_threaded, run_retail_volume).tag('retail_volume_job')
schedule.every().day.at("11:45").do(run_threaded, run_clinical_trial).tag('clinical_trial_job')
schedule.every().day.at("02:00").do(run_threaded, run_implied_volatility).tag('implied_volatility_job')
schedule.every().day.at("05:00").do(run_threaded, run_implied_volatility).tag('implied_volatility_job')
schedule.every().day.at("13:30").do(run_threaded, run_stockdeck).tag('stockdeck_job')