diff --git a/app/cron_list.py b/app/cron_list.py index 488a0e1..1424b0c 100644 --- a/app/cron_list.py +++ b/app/cron_list.py @@ -2,6 +2,7 @@ import orjson import sqlite3 import asyncio import aiohttp +import pandas as pd from tqdm import tqdm @@ -11,6 +12,9 @@ with open(f"json/stock-screener/data.json", 'rb') as file: stock_screener_data_dict = {item['symbol']: item for item in stock_screener_data} +query_etf_holding = f"SELECT holding from etfs WHERE symbol = ?" + + async def save_json(category, data, category_type='market-cap'): with open(f"json/{category_type}/list/{category}.json", 'wb') as file: file.write(orjson.dumps(data)) @@ -75,6 +79,46 @@ async def process_category(cursor, category, condition, category_type='market-ca print(f"Processed and saved {len(sorted_result)} stocks for {category}") return sorted_result +def get_etf_holding(etf_symbols, etf_con): + quote_cache = {} + + for ticker in tqdm(etf_symbols): + res = [] + df = pd.read_sql_query(query_etf_holding, etf_con, params=(ticker,)) + + try: + # Load holdings data from the SQL query result + data = orjson.loads(df['holding'].iloc[0]) + res = [{key: item[key] for key in ('asset', 'weightPercentage', 'sharesNumber')} for item in data] + + for item in res: + asset = item['asset'] + + # Check if the asset data is already in the cache + if asset in quote_cache: + quote_data = quote_cache[asset] + else: + # Load the quote data from file if not in cache + try: + with open(f"json/quote/{asset}.json") as file: + quote_data = orjson.loads(file.read()) + quote_cache[asset] = quote_data # Cache the loaded data + except: + quote_data = None + + # Assign price and changesPercentage if available, otherwise set to None + item['price'] = round(quote_data.get('price'), 2) if quote_data else None + item['changesPercentage'] = round(quote_data.get('changesPercentage'), 2) if quote_data else None + item['name'] = quote_data.get('name') if quote_data else None + + except Exception as e: + print(e) + res = [] + + # Save results to a file if there's data to write + if res: + with open(f"json/etf/holding/{ticker}.json", 'wb') as file: + file.write(orjson.dumps(res)) async def run(): """Main function to run the analysis for all categories""" @@ -106,7 +150,14 @@ async def run(): cursor = con.cursor() cursor.execute("PRAGMA journal_mode = wal") + etf_con = sqlite3.connect('etf.db') + etf_cursor = etf_con.cursor() + etf_cursor.execute("PRAGMA journal_mode = wal") + etf_cursor.execute("SELECT DISTINCT symbol FROM etfs") + etf_symbols = [row[0] for row in etf_cursor.fetchall()] + # Process market cap categories + for category, condition in market_cap_conditions.items(): await process_category(cursor, category, condition, 'market-cap') await asyncio.sleep(1) # Small delay between categories @@ -115,12 +166,17 @@ async def run(): for category, condition in sector_conditions.items(): await process_category(cursor, category, condition, 'sector') await asyncio.sleep(1) # Small delay between categories + + + get_etf_holding(etf_symbols, etf_con) + except Exception as e: print(e) raise finally: con.close() + etf_con.close() if __name__ == "__main__": diff --git a/app/main.py b/app/main.py index c1fe400..0edcec1 100755 --- a/app/main.py +++ b/app/main.py @@ -1765,26 +1765,37 @@ async def get_fair_price(data: TickerData, api_key: str = Security(get_api_key)) return profile_list + @app.post("/etf-holdings") async def etf_holdings(data: TickerData, api_key: str = Security(get_api_key)): ticker = data.ticker.upper() cache_key = f"etf-holdings-{ticker}" - cached_result = redis_client.get(cache_key) if cached_result: - return orjson.loads(cached_result) - - - query_template = f"SELECT holding from etfs WHERE symbol = ?" - df = pd.read_sql_query(query_template, etf_con, params=(ticker,)) + return StreamingResponse( + io.BytesIO(cached_result), + media_type="application/json", + headers={"Content-Encoding": "gzip"} + ) try: - res = orjson.loads(df['holding'].iloc[0]) + with open(f"json/etf/holding/{ticker}.json", 'rb') as file: + res = orjson.loads(file.read()) except: res = [] - redis_client.set(cache_key, orjson.dumps(res), 3600*3600) # Set cache expiration time to 1 hour - return res + data = orjson.dumps(res) + compressed_data = gzip.compress(data) + + redis_client.set(cache_key, compressed_data) + redis_client.expire(cache_key,60*10) + + return StreamingResponse( + io.BytesIO(compressed_data), + media_type="application/json", + headers={"Content-Encoding": "gzip"} + ) + @app.post("/etf-country-weighting") async def etf_holdings(data: TickerData, api_key: str = Security(get_api_key)):