bugfixing cron job
This commit is contained in:
parent
0a8bbcb8d2
commit
52420a61cf
@ -82,7 +82,7 @@ def get_data():
|
|||||||
sources = ['utp_delayed', 'cta_a_delayed', 'cta_b_delayed']
|
sources = ['utp_delayed', 'cta_a_delayed', 'cta_b_delayed']
|
||||||
page_size = 50000
|
page_size = 50000
|
||||||
min_size = 2000
|
min_size = 2000
|
||||||
threshold = 1E6 # Define threshold
|
threshold = 1E5 # Define threshold
|
||||||
|
|
||||||
for source in tqdm(sources):
|
for source in tqdm(sources):
|
||||||
try:
|
try:
|
||||||
|
|||||||
@ -103,18 +103,11 @@ def run():
|
|||||||
etf_con.close()
|
etf_con.close()
|
||||||
|
|
||||||
total_symbols = stocks_symbols+ etf_symbols
|
total_symbols = stocks_symbols+ etf_symbols
|
||||||
data = []
|
with open(f"json/dark-pool/feed/data.json", "r") as file:
|
||||||
weekdays = get_last_N_weekdays()
|
|
||||||
for date in weekdays:
|
|
||||||
try:
|
|
||||||
with open(f"json/dark-pool/historical-flow/{date}.json", "r") as file:
|
|
||||||
raw_data = orjson.loads(file.read())
|
raw_data = orjson.loads(file.read())
|
||||||
data +=raw_data
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
for symbol in tqdm(total_symbols):
|
for symbol in tqdm(total_symbols):
|
||||||
try:
|
try:
|
||||||
res_list = [item for item in data if isinstance(item, dict) and item['ticker'] == symbol]
|
res_list = [item for item in raw_data if isinstance(item, dict) and item['ticker'] == symbol]
|
||||||
dark_pool_levels = analyze_dark_pool_levels(
|
dark_pool_levels = analyze_dark_pool_levels(
|
||||||
trades=res_list,
|
trades=res_list,
|
||||||
size_threshold=0.8, # Look for levels with volume in top 20%
|
size_threshold=0.8, # Look for levels with volume in top 20%
|
||||||
|
|||||||
@ -3022,6 +3022,8 @@ async def get_dark_pool_feed(api_key: str = Security(get_api_key)):
|
|||||||
with open(f"json/dark-pool/feed/data.json", "r") as file:
|
with open(f"json/dark-pool/feed/data.json", "r") as file:
|
||||||
res_list = orjson.loads(file.read())
|
res_list = orjson.loads(file.read())
|
||||||
|
|
||||||
|
res_list = [item for item in res_list if float(item['premium']) >= 1E6]
|
||||||
|
|
||||||
except:
|
except:
|
||||||
res_list = []
|
res_list = []
|
||||||
|
|
||||||
|
|||||||
@ -81,7 +81,7 @@ def run_dark_pool_level():
|
|||||||
now = datetime.now(ny_tz)
|
now = datetime.now(ny_tz)
|
||||||
week = now.weekday()
|
week = now.weekday()
|
||||||
hour = now.hour
|
hour = now.hour
|
||||||
if week <= 4 and 8 <= hour < 20:
|
if week <= 4 and 8 <= hour <= 22:
|
||||||
run_command(["python3", "cron_dark_pool_level.py"])
|
run_command(["python3", "cron_dark_pool_level.py"])
|
||||||
|
|
||||||
def run_dark_pool_ticker():
|
def run_dark_pool_ticker():
|
||||||
@ -421,8 +421,8 @@ schedule.every(5).minutes.do(run_threaded, run_list).tag('stock_list_job')
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
#schedule.every(30).minutes.do(run_threaded, run_dark_pool_level).tag('dark_pool_level_job')
|
schedule.every(30).minutes.do(run_threaded, run_dark_pool_level).tag('dark_pool_level_job')
|
||||||
schedule.every(5).minutes.do(run_threaded, run_dark_pool_flow).tag('dark_pool_flow_job')
|
schedule.every(10).minutes.do(run_threaded, run_dark_pool_flow).tag('dark_pool_flow_job')
|
||||||
|
|
||||||
schedule.every(2).minutes.do(run_threaded, run_dashboard).tag('dashboard_job')
|
schedule.every(2).minutes.do(run_threaded, run_dashboard).tag('dashboard_job')
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user