update
This commit is contained in:
parent
00dfe24cdb
commit
1682e32c48
@ -31,11 +31,6 @@ async def save_json(data, symbol, dir_path):
|
||||
await file.write(ujson.dumps(data))
|
||||
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
|
||||
ny_tz = pytz.timezone("America/New_York")
|
||||
|
||||
async def compute_rsi(price_history, time_period=14):
|
||||
df_price = pd.DataFrame(price_history)
|
||||
df_price['rsi'] = rsi(df_price['close'], window=time_period)
|
||||
|
||||
@ -83,11 +83,10 @@ def prepare_data(data, symbol, directory_path, sort_by = "date"):
|
||||
def get_overview_data():
|
||||
print("Starting to download overview data...")
|
||||
directory_path = "json/gex-dex/overview"
|
||||
total_symbols = get_tickers_from_directory(directory_path)
|
||||
if len(total_symbols) < 100:
|
||||
total_symbols = stocks_symbols+etf_symbols
|
||||
total_symbols = stocks_symbols+etf_symbols
|
||||
|
||||
counter = 0
|
||||
|
||||
#Test mode
|
||||
#total_symbols = ['GME','SPY']
|
||||
for symbol in tqdm(total_symbols):
|
||||
@ -111,13 +110,10 @@ def get_overview_data():
|
||||
print(f"Error for {symbol}:{e}")
|
||||
|
||||
|
||||
|
||||
def get_strike_data():
|
||||
print("Starting to download strike data...")
|
||||
directory_path = "json/gex-dex/strike"
|
||||
total_symbols = get_tickers_from_directory(directory_path)
|
||||
if len(total_symbols) < 100:
|
||||
total_symbols = stocks_symbols+etf_symbols
|
||||
total_symbols = stocks_symbols+etf_symbols
|
||||
|
||||
counter = 0
|
||||
#Test mode
|
||||
@ -145,9 +141,7 @@ def get_strike_data():
|
||||
def get_expiry_data():
|
||||
print("Starting to download expiry data...")
|
||||
directory_path = "json/gex-dex/expiry"
|
||||
total_symbols = get_tickers_from_directory(directory_path)
|
||||
if len(total_symbols) < 100:
|
||||
total_symbols = stocks_symbols+etf_symbols
|
||||
total_symbols = stocks_symbols+etf_symbols
|
||||
|
||||
counter = 0
|
||||
#total_symbols = ['GME','SPY']
|
||||
@ -175,8 +169,9 @@ def get_expiry_data():
|
||||
|
||||
if __name__ == '__main__':
|
||||
get_overview_data()
|
||||
'''
|
||||
time.sleep(60)
|
||||
get_strike_data()
|
||||
time.sleep(60)
|
||||
get_expiry_data()
|
||||
|
||||
'''
|
||||
@ -44,6 +44,10 @@ def aggregate_data_by_date(symbol):
|
||||
"put_open_interest": 0,
|
||||
"call_premium": 0,
|
||||
"put_premium": 0,
|
||||
"call_gex": 0,
|
||||
"put_gex": 0,
|
||||
"call_dex": 0,
|
||||
"put_dex": 0,
|
||||
"iv": 0.0, # Sum of implied volatilities
|
||||
"iv_count": 0, # Count of entries for IV
|
||||
})
|
||||
@ -77,7 +81,10 @@ def aggregate_data_by_date(symbol):
|
||||
open_interest = entry.get('open_interest', 0) or 0
|
||||
total_premium = entry.get('total_premium', 0) or 0
|
||||
implied_volatility = entry.get('implied_volatility', 0) or 0
|
||||
|
||||
gamma = entry.get('gamma',0) or 0
|
||||
delta = entry.get('delta',0) or 0
|
||||
gex = 100 * open_interest *
|
||||
|
||||
daily_data = data_by_date[date]
|
||||
daily_data["date"] = date
|
||||
|
||||
|
||||
@ -2763,7 +2763,7 @@ async def get_options_stats_ticker(data:TickerData, api_key: str = Security(get_
|
||||
data = orjson.dumps(res)
|
||||
compressed_data = gzip.compress(data)
|
||||
redis_client.set(cache_key, compressed_data)
|
||||
redis_client.expire(cache_key, 60*5)
|
||||
redis_client.expire(cache_key, 60*1)
|
||||
return StreamingResponse(
|
||||
io.BytesIO(compressed_data),
|
||||
media_type="application/json",
|
||||
|
||||
@ -71,8 +71,8 @@ def run_market_flow():
|
||||
current_time = now.time()
|
||||
hour = now.hour
|
||||
if week <= 4 and 8 <= hour < 17:
|
||||
run_command(["python3", "cron_market_flow.py"])
|
||||
run_command(["python3", "cron_option_stats.py"])
|
||||
run_command(["python3", "cron_market_flow.py"])
|
||||
run_command(["python3", "cron_unusual_activity.py"])
|
||||
|
||||
|
||||
@ -358,7 +358,7 @@ def run_threaded(job_func):
|
||||
# Schedule the job to run
|
||||
|
||||
schedule.every().day.at("01:00").do(run_threaded, run_db_schedule_job)
|
||||
schedule.every().day.at("01:00").do(run_threaded, run_options_jobs).tag('options_job')
|
||||
schedule.every().day.at("22:30").do(run_threaded, run_options_jobs).tag('options_job')
|
||||
schedule.every().day.at("05:00").do(run_threaded, run_options_historical_flow).tag('options_historical_flow_job')
|
||||
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user