add dividend cron job
This commit is contained in:
parent
f8b9280509
commit
531d5f4f62
@ -99,6 +99,7 @@ def replace_representative(office):
|
|||||||
'Thune, John (Senator)': 'John Thune',
|
'Thune, John (Senator)': 'John Thune',
|
||||||
'Rosen, Jacky (Senator)': 'Jacky Rosen',
|
'Rosen, Jacky (Senator)': 'Jacky Rosen',
|
||||||
'Britt, Katie (Senator)': 'Katie Britt',
|
'Britt, Katie (Senator)': 'Katie Britt',
|
||||||
|
'Britt, Katie': 'Katie Britt',
|
||||||
'James Costa': 'Jim Costa',
|
'James Costa': 'Jim Costa',
|
||||||
'Lummis, Cynthia (Senator)': 'Cynthia Lummis',
|
'Lummis, Cynthia (Senator)': 'Cynthia Lummis',
|
||||||
'Coons, Chris (Senator)': 'Chris Coons',
|
'Coons, Chris (Senator)': 'Chris Coons',
|
||||||
|
|||||||
40
app/main.py
40
app/main.py
@ -3319,20 +3319,58 @@ async def get_reddit_tracker(api_key: str = Security(get_api_key)):
|
|||||||
|
|
||||||
@app.get("/dividend-kings")
|
@app.get("/dividend-kings")
|
||||||
async def get_dividend_kings():
|
async def get_dividend_kings():
|
||||||
|
cache_key = f"dividend-kings"
|
||||||
|
cached_result = redis_client.get(cache_key)
|
||||||
|
if cached_result:
|
||||||
|
return StreamingResponse(
|
||||||
|
io.BytesIO(cached_result),
|
||||||
|
media_type="application/json",
|
||||||
|
headers={"Content-Encoding": "gzip"}
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
with open(f"json/stocks-list/dividend-kings.json", 'rb') as file:
|
with open(f"json/stocks-list/dividend-kings.json", 'rb') as file:
|
||||||
res = orjson.loads(file.read())
|
res = orjson.loads(file.read())
|
||||||
except:
|
except:
|
||||||
res = []
|
res = []
|
||||||
return res
|
|
||||||
|
data = orjson.dumps(res)
|
||||||
|
compressed_data = gzip.compress(data)
|
||||||
|
|
||||||
|
redis_client.set(cache_key, compressed_data)
|
||||||
|
redis_client.expire(cache_key, 60*20)
|
||||||
|
|
||||||
|
return StreamingResponse(
|
||||||
|
io.BytesIO(compressed_data),
|
||||||
|
media_type="application/json",
|
||||||
|
headers={"Content-Encoding": "gzip"}
|
||||||
|
)
|
||||||
|
|
||||||
@app.get("/dividend-aristocrats")
|
@app.get("/dividend-aristocrats")
|
||||||
async def get_dividend_kings():
|
async def get_dividend_kings():
|
||||||
|
cache_key = f"dividend-aristocrats"
|
||||||
|
cached_result = redis_client.get(cache_key)
|
||||||
|
if cached_result:
|
||||||
|
return StreamingResponse(
|
||||||
|
io.BytesIO(cached_result),
|
||||||
|
media_type="application/json",
|
||||||
|
headers={"Content-Encoding": "gzip"}
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
with open(f"json/stocks-list/dividend-aristocrats.json", 'rb') as file:
|
with open(f"json/stocks-list/dividend-aristocrats.json", 'rb') as file:
|
||||||
res = orjson.loads(file.read())
|
res = orjson.loads(file.read())
|
||||||
except:
|
except:
|
||||||
res = []
|
res = []
|
||||||
|
data = orjson.dumps(res)
|
||||||
|
compressed_data = gzip.compress(data)
|
||||||
|
|
||||||
|
redis_client.set(cache_key, compressed_data)
|
||||||
|
redis_client.expire(cache_key, 60*20)
|
||||||
|
|
||||||
|
return StreamingResponse(
|
||||||
|
io.BytesIO(compressed_data),
|
||||||
|
media_type="application/json",
|
||||||
|
headers={"Content-Encoding": "gzip"}
|
||||||
|
)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
@app.get("/newsletter")
|
@app.get("/newsletter")
|
||||||
|
|||||||
@ -72,6 +72,22 @@ def run_congress_trading():
|
|||||||
]
|
]
|
||||||
run_command(command)
|
run_command(command)
|
||||||
|
|
||||||
|
def run_dividend_list():
|
||||||
|
week = datetime.today().weekday()
|
||||||
|
current_time = datetime.now().time()
|
||||||
|
start_time = datetime_time(15, 30)
|
||||||
|
end_time = datetime_time(22, 30)
|
||||||
|
|
||||||
|
if week <= 4 and start_time <= current_time < end_time:
|
||||||
|
run_command(["python3", "cron_dividend_kings.py"])
|
||||||
|
run_command(["python3", "cron_dividend_aristocrats.py"])
|
||||||
|
command = [
|
||||||
|
"sudo", "rsync", "-avz", "-e", "ssh",
|
||||||
|
"/root/backend/app/json/stocks-list",
|
||||||
|
f"root@{useast_ip_address}:/root/backend/app/json"
|
||||||
|
]
|
||||||
|
run_command(command)
|
||||||
|
|
||||||
def run_cron_var():
|
def run_cron_var():
|
||||||
week = datetime.today().weekday()
|
week = datetime.today().weekday()
|
||||||
if week <= 5:
|
if week <= 5:
|
||||||
@ -486,6 +502,7 @@ schedule.every(1).minutes.do(run_threaded, run_cron_portfolio).tag('portfolio_jo
|
|||||||
schedule.every(5).minutes.do(run_threaded, run_cron_market_movers).tag('market_movers_job')
|
schedule.every(5).minutes.do(run_threaded, run_cron_market_movers).tag('market_movers_job')
|
||||||
schedule.every(2).minutes.do(run_threaded, run_dashboard).tag('dashboard_job')
|
schedule.every(2).minutes.do(run_threaded, run_dashboard).tag('dashboard_job')
|
||||||
|
|
||||||
|
schedule.every(30).minutes.do(run_threaded, run_dividend_list).tag('dividend_list_job')
|
||||||
schedule.every(15).minutes.do(run_threaded, run_cron_market_news).tag('market_news_job')
|
schedule.every(15).minutes.do(run_threaded, run_cron_market_news).tag('market_news_job')
|
||||||
schedule.every(10).minutes.do(run_threaded, run_one_day_price).tag('one_day_price_job')
|
schedule.every(10).minutes.do(run_threaded, run_one_day_price).tag('one_day_price_job')
|
||||||
schedule.every(15).minutes.do(run_threaded, run_cron_heatmap).tag('heatmap_job')
|
schedule.every(15).minutes.do(run_threaded, run_cron_heatmap).tag('heatmap_job')
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user