bugfixing etf provider
This commit is contained in:
parent
6bf4195b3b
commit
8dde297a6b
@ -15,7 +15,7 @@ api_key = os.getenv('FINRA_API_KEY')
|
||||
api_secret = os.getenv('FINRA_API_SECRET')
|
||||
api_token = finra_api_queries.retrieve_api_token(finra_api_key_input=api_key, finra_api_secret_input=api_secret)
|
||||
|
||||
start_date = datetime.today() - timedelta(365)
|
||||
start_date = datetime.today() - timedelta(180)
|
||||
end_date = datetime.today()
|
||||
start_date = start_date.strftime("%Y-%m-%d")
|
||||
end_date = end_date.strftime("%Y-%m-%d")
|
||||
@ -83,7 +83,6 @@ async def save_json(symbol, data):
|
||||
# Use async file writing to avoid blocking the event loop
|
||||
loop = asyncio.get_event_loop()
|
||||
path = f"json/market-maker/companies/{symbol}.json"
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
await loop.run_in_executor(None, ujson.dump, data, open(path, 'w'))
|
||||
|
||||
async def process_ticker(ticker):
|
||||
@ -111,12 +110,10 @@ async def run():
|
||||
total_symbols = stocks_symbols + etf_symbols
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
tasks = []
|
||||
for ticker in total_symbols:
|
||||
tasks.append(process_ticker(ticker))
|
||||
tasks = [process_ticker(ticker) for ticker in total_symbols]
|
||||
|
||||
# Run tasks concurrently in batches to avoid too many open connections
|
||||
batch_size = 1 # Adjust based on your system's capacity
|
||||
batch_size = 10 # Adjust based on your system's capacity
|
||||
for i in tqdm(range(0, len(tasks), batch_size)):
|
||||
batch = tasks[i:i + batch_size]
|
||||
await asyncio.gather(*batch)
|
||||
|
||||
@ -2095,7 +2095,11 @@ async def etf_provider(data: ETFProviderData):
|
||||
cursor.close()
|
||||
|
||||
# Extract only relevant data and sort it
|
||||
res = [{'symbol': row[0], 'name': row[1], 'expenseRatio': row[2], 'totalAssets': row[3], 'numberOfHoldings': row[4]} for row in raw_data]
|
||||
# Extract only relevant data and filter only integer totalAssets
|
||||
res = [
|
||||
{'symbol': row[0], 'name': row[1], 'expenseRatio': row[2], 'totalAssets': row[3], 'numberOfHoldings': row[4]}
|
||||
for row in raw_data if isinstance(row[3], int)
|
||||
]
|
||||
sorted_res = sorted(res, key=lambda x: x['totalAssets'], reverse=True)
|
||||
redis_client.set(cache_key, orjson.dumps(sorted_res))
|
||||
redis_client.expire(cache_key, 3600 * 24) # Set cache expiration time to 1 day
|
||||
|
||||
@ -347,8 +347,6 @@ def run_dark_pool_flow():
|
||||
|
||||
|
||||
def run_market_maker():
|
||||
week = datetime.today().weekday()
|
||||
if week <= 5:
|
||||
run_command(["python3", "cron_market_maker.py"])
|
||||
command = [
|
||||
"sudo", "rsync", "-avz", "-e", "ssh",
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user