refactor all stock list page
This commit is contained in:
parent
49d8fa25bc
commit
a36aa83337
@ -363,9 +363,64 @@ async def get_index_list():
|
||||
with open(f"json/stocks-list/list/{index_list+extension}.json", 'wb') as file:
|
||||
file.write(orjson.dumps(res_list))
|
||||
|
||||
async def get_all_stock_tickers():
|
||||
try:
|
||||
'''
|
||||
with sqlite3.connect('etf.db') as etf_con:
|
||||
etf_cursor = etf_con.cursor()
|
||||
etf_cursor.execute("PRAGMA journal_mode = wal")
|
||||
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
||||
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
||||
'''
|
||||
with sqlite3.connect('stocks.db') as con:
|
||||
cursor = con.cursor()
|
||||
cursor.execute("PRAGMA journal_mode = wal")
|
||||
cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol NOT LIKE '%.%'")
|
||||
stock_symbols = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
res_list = []
|
||||
for symbol in stock_symbols:
|
||||
try:
|
||||
|
||||
try:
|
||||
with open(f"json/quote/{symbol}.json", "rb") as file:
|
||||
quote_data = orjson.loads(file.read())
|
||||
except (FileNotFoundError, orjson.JSONDecodeError):
|
||||
quote_data = None
|
||||
|
||||
if quote_data:
|
||||
item = {
|
||||
'symbol': symbol,
|
||||
'name': quote_data.get('name',None),
|
||||
'price': round(quote_data.get('price'), 2) if quote_data.get('price') is not None else None,
|
||||
'changesPercentage': round(quote_data.get('changesPercentage'), 2) if quote_data.get('changesPercentage') is not None else None,
|
||||
'marketCap': quote_data.get('marketCap', None),
|
||||
'revenue': None,
|
||||
}
|
||||
|
||||
# Add screener data if available
|
||||
if symbol in stock_screener_data_dict:
|
||||
item['revenue'] = stock_screener_data_dict[symbol].get('revenue')
|
||||
|
||||
if item['marketCap'] > 0:
|
||||
res_list.append(item)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error processing symbol {symbol}: {e}")
|
||||
|
||||
if res_list:
|
||||
res_list = sorted(res_list, key=lambda x: x['symbol'], reverse=False)
|
||||
|
||||
with open("json/stocks-list/list/all-stock-tickers.json", 'wb') as file:
|
||||
file.write(orjson.dumps(res_list))
|
||||
|
||||
except Exception as e:
|
||||
print(f"Database error: {e}")
|
||||
|
||||
async def run():
|
||||
await asyncio.gather(
|
||||
get_all_stock_tickers(),
|
||||
get_index_list(),
|
||||
etf_bitcoin_list(),
|
||||
get_magnificent_seven()
|
||||
|
||||
32
app/main.py
32
app/main.py
@ -1796,36 +1796,6 @@ async def etf_holdings(data: TickerData, api_key: str = Security(get_api_key)):
|
||||
|
||||
|
||||
|
||||
@app.get("/all-stock-tickers")
|
||||
async def get_all_stock_tickers(api_key: str = Security(get_api_key)):
|
||||
cache_key = f"all_stock_tickers"
|
||||
cached_result = redis_client.get(cache_key)
|
||||
if cached_result:
|
||||
return StreamingResponse(
|
||||
io.BytesIO(cached_result),
|
||||
media_type="application/json",
|
||||
headers={"Content-Encoding": "gzip"}
|
||||
)
|
||||
try:
|
||||
with open(f"json/all-symbols/stocks.json", 'rb') as file:
|
||||
res = orjson.loads(file.read())
|
||||
except:
|
||||
res = []
|
||||
|
||||
# Compress the JSON data
|
||||
data = orjson.dumps(res)
|
||||
compressed_data = gzip.compress(data)
|
||||
|
||||
redis_client.set(cache_key, compressed_data)
|
||||
redis_client.expire(cache_key, 3600 * 24) # Set cache expiration time to 1 day
|
||||
|
||||
return StreamingResponse(
|
||||
io.BytesIO(compressed_data),
|
||||
media_type="application/json",
|
||||
headers={"Content-Encoding": "gzip"}
|
||||
)
|
||||
|
||||
|
||||
@app.get("/all-etf-tickers")
|
||||
async def get_all_etf_tickers(api_key: str = Security(get_api_key)):
|
||||
cache_key = f"all-etf-tickers"
|
||||
@ -3884,7 +3854,7 @@ async def get_statistics(data: FilterStockList, api_key: str = Security(get_api_
|
||||
category_type = 'sector'
|
||||
elif filter_list == 'reits':
|
||||
category_type = 'industry'
|
||||
elif filter_list in ['ca','cn','de','gb','il','in','jp','nyse','nasdaq','amex','dowjones','sp500','nasdaq100']:
|
||||
elif filter_list in ['ca','cn','de','gb','il','in','jp','nyse','nasdaq','amex','dowjones','sp500','nasdaq100','all-stock-tickers']:
|
||||
category_type = 'stocks-list'
|
||||
elif filter_list in ['dividend-kings','dividend-aristocrats']:
|
||||
category_type = 'dividends'
|
||||
|
||||
@ -1664,15 +1664,6 @@ async def ticker_mentioning(con):
|
||||
return sorted_symbol_list
|
||||
|
||||
|
||||
async def get_all_stock_tickers(con):
|
||||
cursor = con.cursor()
|
||||
cursor.execute("SELECT symbol, name, marketCap, sector FROM stocks WHERE symbol != ? AND marketCap IS NOT NULL", ('%5EGSPC',))
|
||||
raw_data = cursor.fetchall()
|
||||
|
||||
# Extract only relevant data and sort it
|
||||
stock_list_data = sorted([{'symbol': row[0], 'name': row[1], 'marketCap': row[2], 'sector': row[3]} for row in raw_data], key=custom_symbol_sort)
|
||||
return stock_list_data
|
||||
|
||||
async def get_all_etf_tickers(etf_con):
|
||||
cursor = etf_con.cursor()
|
||||
cursor.execute("SELECT symbol, name, totalAssets, numberOfHoldings FROM etfs WHERE totalAssets IS NOT NULL")
|
||||
@ -1961,10 +1952,6 @@ async def save_json_files():
|
||||
with open(f"json/ipo-calendar/data.json", 'w') as file:
|
||||
ujson.dump(data, file)
|
||||
|
||||
data = await get_all_stock_tickers(con)
|
||||
with open(f"json/all-symbols/stocks.json", 'w') as file:
|
||||
ujson.dump(data, file)
|
||||
|
||||
data = await get_all_etf_tickers(etf_con)
|
||||
with open(f"json/all-symbols/etfs.json", 'w') as file:
|
||||
ujson.dump(data, file)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user