refactor etf provider
This commit is contained in:
parent
d0b5ac80de
commit
b8ec4e9d2d
@ -13,7 +13,7 @@ stock_screener_data_dict = {item['symbol']: item for item in stock_screener_data
|
|||||||
|
|
||||||
|
|
||||||
query_etf_holding = f"SELECT holding from etfs WHERE symbol = ?"
|
query_etf_holding = f"SELECT holding from etfs WHERE symbol = ?"
|
||||||
|
quote_cache = {}
|
||||||
|
|
||||||
async def save_json(category, data, category_type='market-cap'):
|
async def save_json(category, data, category_type='market-cap'):
|
||||||
with open(f"json/{category_type}/list/{category}.json", 'wb') as file:
|
with open(f"json/{category_type}/list/{category}.json", 'wb') as file:
|
||||||
@ -21,11 +21,16 @@ async def save_json(category, data, category_type='market-cap'):
|
|||||||
|
|
||||||
async def get_quote_data(symbol):
|
async def get_quote_data(symbol):
|
||||||
"""Get quote data for a symbol from JSON file"""
|
"""Get quote data for a symbol from JSON file"""
|
||||||
try:
|
if symbol in quote_cache:
|
||||||
with open(f"json/quote/{symbol}.json", 'r') as file:
|
return quote_cache[symbol]
|
||||||
return orjson.loads(file.read())
|
else:
|
||||||
except FileNotFoundError:
|
try:
|
||||||
return None
|
with open(f"json/quote/{symbol}.json") as file:
|
||||||
|
quote_data = orjson.loads(file.read())
|
||||||
|
quote_cache[symbol] = quote_data # Cache the loaded data
|
||||||
|
return quote_data
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
|
||||||
async def process_category(cursor, category, condition, category_type='market-cap'):
|
async def process_category(cursor, category, condition, category_type='market-cap'):
|
||||||
"""
|
"""
|
||||||
@ -80,7 +85,6 @@ async def process_category(cursor, category, condition, category_type='market-ca
|
|||||||
return sorted_result
|
return sorted_result
|
||||||
|
|
||||||
def get_etf_holding(etf_symbols, etf_con):
|
def get_etf_holding(etf_symbols, etf_con):
|
||||||
quote_cache = {}
|
|
||||||
|
|
||||||
for ticker in tqdm(etf_symbols):
|
for ticker in tqdm(etf_symbols):
|
||||||
res = []
|
res = []
|
||||||
@ -120,6 +124,58 @@ def get_etf_holding(etf_symbols, etf_con):
|
|||||||
with open(f"json/etf/holding/{ticker}.json", 'wb') as file:
|
with open(f"json/etf/holding/{ticker}.json", 'wb') as file:
|
||||||
file.write(orjson.dumps(res))
|
file.write(orjson.dumps(res))
|
||||||
|
|
||||||
|
def get_etf_provider(etf_symbols, etf_con):
|
||||||
|
|
||||||
|
|
||||||
|
cursor = etf_con.cursor()
|
||||||
|
cursor.execute("SELECT DISTINCT etfProvider FROM etfs")
|
||||||
|
etf_provider = [row[0] for row in cursor.fetchall()]
|
||||||
|
print(etf_provider)
|
||||||
|
query = "SELECT symbol, name, expenseRatio, totalAssets, numberOfHoldings FROM etfs WHERE etfProvider = ?"
|
||||||
|
|
||||||
|
for provider in etf_provider:
|
||||||
|
try:
|
||||||
|
cursor.execute(query, (provider,))
|
||||||
|
raw_data = cursor.fetchall()
|
||||||
|
# Extract only relevant data and sort it
|
||||||
|
# Extract only relevant data and filter only integer totalAssets
|
||||||
|
res = [
|
||||||
|
{'symbol': row[0], 'name': row[1], 'expenseRatio': row[2], 'totalAssets': row[3], 'numberOfHoldings': row[4]}
|
||||||
|
for row in raw_data if isinstance(row[3], float) or isinstance(row[3], int)
|
||||||
|
]
|
||||||
|
for item in res:
|
||||||
|
try:
|
||||||
|
symbol = item['symbol']
|
||||||
|
if symbol in quote_cache:
|
||||||
|
quote_data = quote_cache[symbol]
|
||||||
|
else:
|
||||||
|
# Load the quote data from file if not in cache
|
||||||
|
try:
|
||||||
|
with open(f"json/quote/{symbol}.json") as file:
|
||||||
|
quote_data = orjson.loads(file.read())
|
||||||
|
quote_cache[symbol] = quote_data # Cache the loaded data
|
||||||
|
except:
|
||||||
|
quote_data = None
|
||||||
|
|
||||||
|
# Assign price and changesPercentage if available, otherwise set to None
|
||||||
|
item['price'] = round(quote_data.get('price'), 2) if quote_data else None
|
||||||
|
item['changesPercentage'] = round(quote_data.get('changesPercentage'), 2) if quote_data else None
|
||||||
|
item['name'] = quote_data.get('name') if quote_data else None
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
sorted_res = sorted(res, key=lambda x: x['totalAssets'], reverse=True)
|
||||||
|
|
||||||
|
|
||||||
|
# Save results to a file if there's data to write
|
||||||
|
if sorted_res:
|
||||||
|
with open(f"json/etf/provider/{provider}.json", 'wb') as file:
|
||||||
|
file.write(orjson.dumps(sorted_res))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
cursor.close()
|
||||||
|
|
||||||
|
|
||||||
async def run():
|
async def run():
|
||||||
"""Main function to run the analysis for all categories"""
|
"""Main function to run the analysis for all categories"""
|
||||||
market_cap_conditions = {
|
market_cap_conditions = {
|
||||||
@ -157,7 +213,7 @@ async def run():
|
|||||||
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
||||||
|
|
||||||
# Process market cap categories
|
# Process market cap categories
|
||||||
|
'''
|
||||||
for category, condition in market_cap_conditions.items():
|
for category, condition in market_cap_conditions.items():
|
||||||
await process_category(cursor, category, condition, 'market-cap')
|
await process_category(cursor, category, condition, 'market-cap')
|
||||||
await asyncio.sleep(1) # Small delay between categories
|
await asyncio.sleep(1) # Small delay between categories
|
||||||
@ -169,6 +225,8 @@ async def run():
|
|||||||
|
|
||||||
|
|
||||||
get_etf_holding(etf_symbols, etf_con)
|
get_etf_holding(etf_symbols, etf_con)
|
||||||
|
'''
|
||||||
|
get_etf_provider(etf_symbols, etf_con)
|
||||||
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
48
app/main.py
48
app/main.py
@ -2318,33 +2318,37 @@ async def get_all_etf_providers(api_key: str = Security(get_api_key)):
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
@app.post("/etf-provider")
|
|
||||||
async def etf_provider(data: ETFProviderData, api_key: str = Security(get_api_key)):
|
|
||||||
data = data.dict()
|
|
||||||
etf_provider = data['etfProvider'].lower()
|
|
||||||
|
|
||||||
|
@app.post("/etf-provider")
|
||||||
|
async def etf_holdings(data: ETFProviderData, api_key: str = Security(get_api_key)):
|
||||||
|
etf_provider = data.etfProvider.lower()
|
||||||
cache_key = f"etf-provider-{etf_provider}"
|
cache_key = f"etf-provider-{etf_provider}"
|
||||||
cached_result = redis_client.get(cache_key)
|
cached_result = redis_client.get(cache_key)
|
||||||
|
|
||||||
if cached_result:
|
if cached_result:
|
||||||
return orjson.loads(cached_result)
|
return StreamingResponse(
|
||||||
|
io.BytesIO(cached_result),
|
||||||
|
media_type="application/json",
|
||||||
|
headers={"Content-Encoding": "gzip"}
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(f"json/etf/provider/{etf_provider}.json", 'rb') as file:
|
||||||
|
res = orjson.loads(file.read())
|
||||||
|
except:
|
||||||
|
res = []
|
||||||
|
|
||||||
|
data = orjson.dumps(res)
|
||||||
|
compressed_data = gzip.compress(data)
|
||||||
|
|
||||||
|
redis_client.set(cache_key, compressed_data)
|
||||||
|
redis_client.expire(cache_key,60*10)
|
||||||
|
|
||||||
|
return StreamingResponse(
|
||||||
|
io.BytesIO(compressed_data),
|
||||||
|
media_type="application/json",
|
||||||
|
headers={"Content-Encoding": "gzip"}
|
||||||
|
)
|
||||||
|
|
||||||
# Check if data is cached; if not, fetch and cache it
|
|
||||||
cursor = etf_con.cursor()
|
|
||||||
query = "SELECT symbol, name, expenseRatio, totalAssets, numberOfHoldings FROM etfs WHERE etfProvider = ?"
|
|
||||||
cursor.execute(query, (etf_provider,))
|
|
||||||
raw_data = cursor.fetchall()
|
|
||||||
cursor.close()
|
|
||||||
# Extract only relevant data and sort it
|
|
||||||
# Extract only relevant data and filter only integer totalAssets
|
|
||||||
res = [
|
|
||||||
{'symbol': row[0], 'name': row[1], 'expenseRatio': row[2], 'totalAssets': row[3], 'numberOfHoldings': row[4]}
|
|
||||||
for row in raw_data if isinstance(row[3], float) or isinstance(row[3], int)
|
|
||||||
]
|
|
||||||
sorted_res = sorted(res, key=lambda x: x['totalAssets'], reverse=True)
|
|
||||||
redis_client.set(cache_key, orjson.dumps(sorted_res))
|
|
||||||
redis_client.expire(cache_key, 3600 * 24) # Set cache expiration time to 1 day
|
|
||||||
return sorted_res
|
|
||||||
|
|
||||||
@app.get("/etf-new-launches")
|
@app.get("/etf-new-launches")
|
||||||
async def etf_provider(api_key: str = Security(get_api_key)):
|
async def etf_provider(api_key: str = Security(get_api_key)):
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user