bugfixing etf provider

This commit is contained in:
MuslemRahimi 2024-07-15 14:19:16 +02:00
parent 6bf4195b3b
commit 8dde297a6b
3 changed files with 15 additions and 16 deletions

View File

@ -15,7 +15,7 @@ api_key = os.getenv('FINRA_API_KEY')
api_secret = os.getenv('FINRA_API_SECRET') api_secret = os.getenv('FINRA_API_SECRET')
api_token = finra_api_queries.retrieve_api_token(finra_api_key_input=api_key, finra_api_secret_input=api_secret) api_token = finra_api_queries.retrieve_api_token(finra_api_key_input=api_key, finra_api_secret_input=api_secret)
start_date = datetime.today() - timedelta(365) start_date = datetime.today() - timedelta(180)
end_date = datetime.today() end_date = datetime.today()
start_date = start_date.strftime("%Y-%m-%d") start_date = start_date.strftime("%Y-%m-%d")
end_date = end_date.strftime("%Y-%m-%d") end_date = end_date.strftime("%Y-%m-%d")
@ -83,7 +83,6 @@ async def save_json(symbol, data):
# Use async file writing to avoid blocking the event loop # Use async file writing to avoid blocking the event loop
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
path = f"json/market-maker/companies/{symbol}.json" path = f"json/market-maker/companies/{symbol}.json"
os.makedirs(os.path.dirname(path), exist_ok=True)
await loop.run_in_executor(None, ujson.dump, data, open(path, 'w')) await loop.run_in_executor(None, ujson.dump, data, open(path, 'w'))
async def process_ticker(ticker): async def process_ticker(ticker):
@ -111,12 +110,10 @@ async def run():
total_symbols = stocks_symbols + etf_symbols total_symbols = stocks_symbols + etf_symbols
async with aiohttp.ClientSession() as session: async with aiohttp.ClientSession() as session:
tasks = [] tasks = [process_ticker(ticker) for ticker in total_symbols]
for ticker in total_symbols:
tasks.append(process_ticker(ticker))
# Run tasks concurrently in batches to avoid too many open connections # Run tasks concurrently in batches to avoid too many open connections
batch_size = 1 # Adjust based on your system's capacity batch_size = 10 # Adjust based on your system's capacity
for i in tqdm(range(0, len(tasks), batch_size)): for i in tqdm(range(0, len(tasks), batch_size)):
batch = tasks[i:i + batch_size] batch = tasks[i:i + batch_size]
await asyncio.gather(*batch) await asyncio.gather(*batch)

View File

@ -2095,7 +2095,11 @@ async def etf_provider(data: ETFProviderData):
cursor.close() cursor.close()
# Extract only relevant data and sort it # Extract only relevant data and sort it
res = [{'symbol': row[0], 'name': row[1], 'expenseRatio': row[2], 'totalAssets': row[3], 'numberOfHoldings': row[4]} for row in raw_data] # Extract only relevant data and filter only integer totalAssets
res = [
{'symbol': row[0], 'name': row[1], 'expenseRatio': row[2], 'totalAssets': row[3], 'numberOfHoldings': row[4]}
for row in raw_data if isinstance(row[3], int)
]
sorted_res = sorted(res, key=lambda x: x['totalAssets'], reverse=True) sorted_res = sorted(res, key=lambda x: x['totalAssets'], reverse=True)
redis_client.set(cache_key, orjson.dumps(sorted_res)) redis_client.set(cache_key, orjson.dumps(sorted_res))
redis_client.expire(cache_key, 3600 * 24) # Set cache expiration time to 1 day redis_client.expire(cache_key, 3600 * 24) # Set cache expiration time to 1 day

View File

@ -347,15 +347,13 @@ def run_dark_pool_flow():
def run_market_maker(): def run_market_maker():
week = datetime.today().weekday() run_command(["python3", "cron_market_maker.py"])
if week <= 5: command = [
run_command(["python3", "cron_market_maker.py"]) "sudo", "rsync", "-avz", "-e", "ssh",
command = [ "/root/backend/app/json/market-maker",
"sudo", "rsync", "-avz", "-e", "ssh", f"root@{useast_ip_address}:/root/backend/app/json"
"/root/backend/app/json/market-maker", ]
f"root@{useast_ip_address}:/root/backend/app/json" run_command(command)
]
run_command(command)
def run_ownership_stats(): def run_ownership_stats():
week = datetime.today().weekday() week = datetime.today().weekday()