add endpoint && cron job ownership stats
This commit is contained in:
parent
c31c627503
commit
055d6b61fb
58
app/cron_ownership_stats.py
Normal file
58
app/cron_ownership_stats.py
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
from datetime import datetime, timedelta
|
||||||
|
import ujson
|
||||||
|
import time
|
||||||
|
import sqlite3
|
||||||
|
import pandas as pd
|
||||||
|
import numpy as np
|
||||||
|
from collections import defaultdict
|
||||||
|
import time
|
||||||
|
import asyncio
|
||||||
|
import aiohttp
|
||||||
|
from faker import Faker
|
||||||
|
from tqdm import tqdm
|
||||||
|
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
import os
|
||||||
|
load_dotenv()
|
||||||
|
api_key = os.getenv('FMP_API_KEY')
|
||||||
|
|
||||||
|
|
||||||
|
async def get_data(session, symbol):
|
||||||
|
url = f"https://financialmodelingprep.com/api/v4/institutional-ownership/symbol-ownership?symbol={symbol}&includeCurrentQuarter=true&apikey={api_key}"
|
||||||
|
async with session.get(url) as response:
|
||||||
|
data = await response.json()
|
||||||
|
if len(data) > 0:
|
||||||
|
await save_json(symbol, data[0]) #return only the latest ownership stats
|
||||||
|
|
||||||
|
|
||||||
|
async def save_json(symbol, data):
|
||||||
|
with open(f"json/ownership-stats/{symbol}.json", 'w') as file:
|
||||||
|
ujson.dump(data, file)
|
||||||
|
|
||||||
|
async def run():
|
||||||
|
|
||||||
|
con = sqlite3.connect('stocks.db')
|
||||||
|
cursor = con.cursor()
|
||||||
|
cursor.execute("PRAGMA journal_mode = wal")
|
||||||
|
cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol NOT LIKE '%.%'")
|
||||||
|
symbols = [row[0] for row in cursor.fetchall()]
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
tasks = []
|
||||||
|
i = 0
|
||||||
|
for symbol in tqdm(symbols):
|
||||||
|
tasks.append(get_data(session, symbol))
|
||||||
|
i += 1
|
||||||
|
if i % 400 == 0:
|
||||||
|
await asyncio.gather(*tasks)
|
||||||
|
tasks = []
|
||||||
|
print('sleeping mode: ', i)
|
||||||
|
await asyncio.sleep(60) # Pause for 60 seconds
|
||||||
|
|
||||||
|
if tasks:
|
||||||
|
await asyncio.gather(*tasks)
|
||||||
|
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
loop.run_until_complete(run())
|
||||||
15
app/main.py
15
app/main.py
@ -1349,9 +1349,20 @@ async def get_fair_price(data: TickerData):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
with open(f"json/shareholders/{ticker}.json", 'r') as file:
|
with open(f"json/shareholders/{ticker}.json", 'r') as file:
|
||||||
res = ujson.load(file)
|
shareholder_list = ujson.load(file)
|
||||||
except:
|
except:
|
||||||
res = []
|
shareholder_list = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(f"json/ownership-stats/{ticker}.json", 'r') as file:
|
||||||
|
stats = ujson.load(file)
|
||||||
|
except:
|
||||||
|
stats = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
res = {**stats, 'shareholders': shareholder_list}
|
||||||
|
except:
|
||||||
|
res = {}
|
||||||
|
|
||||||
redis_client.set(cache_key, ujson.dumps(res))
|
redis_client.set(cache_key, ujson.dumps(res))
|
||||||
redis_client.expire(cache_key, 3600 * 24) # Set cache expiration time to 1 day
|
redis_client.expire(cache_key, 3600 * 24) # Set cache expiration time to 1 day
|
||||||
|
|||||||
@ -320,6 +320,17 @@ def run_market_maker():
|
|||||||
]
|
]
|
||||||
subprocess.run(command)
|
subprocess.run(command)
|
||||||
|
|
||||||
|
def run_ownership_stats():
|
||||||
|
week = datetime.today().weekday()
|
||||||
|
if week <= 5:
|
||||||
|
subprocess.run(["python3", "cron_ownership_stats.py"])
|
||||||
|
command = [
|
||||||
|
"sudo", "rsync", "-avz", "-e", "ssh",
|
||||||
|
"/root/backend/app/json/ownership-stats",
|
||||||
|
f"root@{useast_ip_address}:/root/backend/app/json"
|
||||||
|
]
|
||||||
|
subprocess.run(command)
|
||||||
|
|
||||||
# Create functions to run each schedule in a separate thread
|
# Create functions to run each schedule in a separate thread
|
||||||
def run_threaded(job_func):
|
def run_threaded(job_func):
|
||||||
job_thread = threading.Thread(target=job_func)
|
job_thread = threading.Thread(target=job_func)
|
||||||
@ -352,6 +363,8 @@ schedule.every().day.at("14:00").do(run_threaded, run_cron_var).tag('var_job')
|
|||||||
schedule.every().day.at("15:45").do(run_threaded, run_restart_cache)
|
schedule.every().day.at("15:45").do(run_threaded, run_restart_cache)
|
||||||
|
|
||||||
schedule.every().saturday.at("01:00").do(run_threaded, run_market_maker).tag('markt_maker_job')
|
schedule.every().saturday.at("01:00").do(run_threaded, run_market_maker).tag('markt_maker_job')
|
||||||
|
schedule.every().saturday.at("05:00").do(run_threaded, run_ownership_stats).tag('ownership_stats_job')
|
||||||
|
|
||||||
|
|
||||||
schedule.every(1).minutes.do(run_threaded, run_cron_portfolio).tag('portfolio_job')
|
schedule.every(1).minutes.do(run_threaded, run_cron_portfolio).tag('portfolio_job')
|
||||||
schedule.every(5).minutes.do(run_threaded, run_cron_market_movers).tag('market_movers_job')
|
schedule.every(5).minutes.do(run_threaded, run_cron_market_movers).tag('market_movers_job')
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user