bugfixing market movers

This commit is contained in:
MuslemRahimi 2024-11-20 12:09:15 +01:00
parent 9af776be5b
commit a6be85adf0
2 changed files with 34 additions and 40 deletions

View File

@ -289,13 +289,13 @@ async def get_gainer_loser_active_stocks(symbols):
#Get the latest quote of all unique symbol and map it back to the original data list to update all values
'''
latest_quote = await get_quote_of_stocks(unique_symbols_list)
# Updating values in the data list based on matching symbols from the quote list
for category in data.keys():
# Only proceed if the time period is "1D"
for time_period in data[category].keys():
if time_period != "1D":
if time_period == "1D":
for stock_data in data[category][time_period]:
symbol = stock_data["symbol"]
quote_stock = next((item for item in latest_quote if item["symbol"] == symbol), None)
@ -304,7 +304,7 @@ async def get_gainer_loser_active_stocks(symbols):
stock_data['changesPercentage'] = quote_stock['changesPercentage']
stock_data['marketCap'] = quote_stock['marketCap']
stock_data['volume'] = quote_stock['volume']
'''
return data

View File

@ -1152,58 +1152,52 @@ async def get_analyst_ticke_history(data: TickerData, api_key: str = Security(ge
@app.post("/indicator-data")
async def get_indicator(data: IndicatorListData, api_key: str = Security(get_api_key)):
# Define default fields if none are provided
rule_of_list = data.ruleOfList or ['volume', 'marketCap', 'changesPercentage', 'price', 'symbol', 'name']
# Ensure 'symbol' and 'name' are always included in rule_of_list
rule_of_list_set = set(rule_of_list).union({'symbol', 'name'})
# Convert ticker list to uppercase and filter existing symbols in stock_screener_data
ticker_list = set(map(str.upper, data.tickerList))
cache_key = f"indicator-data-{','.join(sorted(ticker_list))}-{','.join(sorted(rule_of_list_set))}"
# Check if the result is cached
cached_result = redis_client.get(cache_key)
if cached_result:
return StreamingResponse(
io.BytesIO(cached_result),
media_type="application/json",
headers={"Content-Encoding": "gzip"}
)
# Prepare the result list and determine type
stock_screener_dict = {item['symbol']: item for item in stock_screener_data if item['symbol'] in ticker_list}
# Ensure 'symbol' and 'name' are always included in the rule_of_list
if 'symbol' not in rule_of_list:
rule_of_list.append('symbol')
if 'name' not in rule_of_list:
rule_of_list.append('name')
ticker_list = [t.upper() for t in data.tickerList]
combined_results = []
# Process each ticker in the filtered stock_screener_dict
for ticker, ticker_data in stock_screener_dict.items():
# Determine the ticker type
ticker_type = (
'etf' if ticker in etf_set else
'crypto' if ticker in crypto_set else
'stock'
)
# Filter data according to rule_of_list and add the ticker type
filtered_data = {key: ticker_data.get(key) for key in rule_of_list_set}
filtered_data['type'] = ticker_type
combined_results.append(filtered_data)
# Load quote data in parallel
quote_data = await asyncio.gather(*[load_json_async(f"json/quote/{ticker}.json") for ticker in ticker_list])
quote_dict = {ticker: data for ticker, data in zip(ticker_list, quote_data) if data}
# Categorize tickers and extract data
for ticker, quote in quote_dict.items():
ticker_type = 'stock'
if ticker in etf_symbols:
ticker_type = 'etf'
elif ticker in crypto_symbols:
ticker_type = 'crypto'
filtered_quote = {key: quote.get(key) for key in rule_of_list if key in quote}
filtered_quote['type'] = ticker_type
combined_results.append(filtered_quote)
# Fetch and merge data from stock_screener_data
screener_keys = [key for key in rule_of_list if key not in ['volume', 'marketCap', 'changesPercentage', 'price', 'symbol', 'name']]
if screener_keys:
screener_dict = {item['symbol']: {k: v for k, v in item.items() if k in screener_keys} for item in stock_screener_data}
for result in combined_results:
symbol = result.get('symbol')
if symbol in screener_dict:
result.update(screener_dict[symbol])
# Serialize and compress the response
res = orjson.dumps(combined_results)
compressed_data = gzip.compress(res)
# Cache the result
redis_client.set(cache_key, compressed_data)
redis_client.expire(cache_key, 60) # Cache expires after 1 minute
return StreamingResponse(
io.BytesIO(compressed_data),
media_type="application/json",
headers={"Content-Encoding": "gzip"}
)
@app.post("/get-watchlist")
async def get_watchlist(data: GetWatchList, api_key: str = Security(get_api_key)):
data = data.dict()