optimize indicator endpoint

This commit is contained in:
MuslemRahimi 2024-11-13 19:21:30 +01:00
parent dbfb568b6a
commit 5b9ee7d7e7

View File

@ -17,6 +17,7 @@ from dotenv import load_dotenv
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
import requests import requests
from pathlib import Path from pathlib import Path
import asyncio
# Database related imports # Database related imports
import sqlite3 import sqlite3
@ -330,6 +331,28 @@ def replace_nan_inf_with_none(obj):
else: else:
return obj return obj
def load_json(file_path):
try:
with open(file_path, 'rb') as file:
return orjson.loads(file.read())
except FileNotFoundError:
return None
async def load_json_async(file_path):
# Check if the data is cached in Redis
cached_data = redis_client.get(file_path)
if cached_data:
return orjson.loads(cached_data)
try:
with open(file_path, 'r') as f:
data = orjson.loads(f.read())
# Cache the data in Redis for 10 minutes
redis_client.set(file_path, orjson.dumps(data), ex=600)
return data
except Exception:
return None
@app.get("/") @app.get("/")
async def hello_world(api_key: str = Security(get_api_key)): async def hello_world(api_key: str = Security(get_api_key)):
@ -1125,66 +1148,44 @@ async def get_analyst_ticke_history(data: TickerData, api_key: str = Security(ge
@app.post("/indicator-data") @app.post("/indicator-data")
async def get_indicator_data(data: IndicatorListData, api_key: str = Security(get_api_key)): async def get_indicator(data: IndicatorListData, api_key: str = Security(get_api_key)):
rule_of_list = data.ruleOfList rule_of_list = data.ruleOfList or ['volume', 'marketCap', 'changesPercentage', 'price', 'symbol', 'name']
ticker_list = data.tickerList
combined_results = [] # List to store the combined results
# Keys that should be read from the quote files if they are in rule_of_list # Ensure 'symbol' and 'name' are always included in the rule_of_list
quote_keys_to_include = ['volume', 'marketCap', 'changesPercentage', 'price', 'symbol', 'name']
def load_json(file_path):
try:
with open(file_path, 'rb') as file:
return orjson.loads(file.read())
except FileNotFoundError:
return None
# Ensure rule_of_list contains valid keys (fall back to defaults if necessary)
if not rule_of_list or not isinstance(rule_of_list, list):
rule_of_list = quote_keys_to_include # Default keys
# Make sure 'symbol' and 'name' are always included in the rule_of_list
if 'symbol' not in rule_of_list: if 'symbol' not in rule_of_list:
rule_of_list.append('symbol') rule_of_list.append('symbol')
if 'name' not in rule_of_list: if 'name' not in rule_of_list:
rule_of_list.append('name') rule_of_list.append('name')
# Categorize tickers and fetch data ticker_list = [t.upper() for t in data.tickerList]
for ticker in map(str.upper, ticker_list): combined_results = []
# Load quote data in parallel
quote_data = await asyncio.gather(*[load_json_async(f"json/quote/{ticker}.json") for ticker in ticker_list])
quote_dict = {ticker: data for ticker, data in zip(ticker_list, quote_data) if data}
# Categorize tickers and extract data
for ticker, quote in quote_dict.items():
ticker_type = 'stock' ticker_type = 'stock'
if ticker in etf_symbols: if ticker in etf_symbols:
ticker_type = 'etf' ticker_type = 'etf'
elif ticker in crypto_symbols: elif ticker in crypto_symbols:
ticker_type = 'crypto' ticker_type = 'crypto'
# Load quote data and filter to include only selected keys from rule_of_list filtered_quote = {key: quote.get(key) for key in rule_of_list if key in quote}
quote_dict = load_json(f"json/quote/{ticker}.json") filtered_quote['type'] = ticker_type
if quote_dict:
filtered_quote = {key: quote_dict.get(key) for key in rule_of_list if key in quote_dict}
filtered_quote['type'] = ticker_type # Include ticker type
combined_results.append(filtered_quote) combined_results.append(filtered_quote)
# Fetch and merge data from stock_screener_data
try: screener_keys = [key for key in rule_of_list if key not in ['volume', 'marketCap', 'changesPercentage', 'price', 'symbol', 'name']]
# Filter out the keys that need to be fetched from the screener if screener_keys:
screener_keys = [key for key in rule_of_list if key not in quote_keys_to_include] screener_dict = {item['symbol']: {k: v for k, v in item.items() if k in screener_keys} for item in stock_screener_data}
# Create a mapping of stock_screener_data based on symbol for fast lookup
screener_dict = {
item['symbol']: {key: item.get(key) for key in screener_keys if key in item}
for item in stock_screener_data
}
# Merge the filtered stock_screener_data into combined_results for non-quote keys
for result in combined_results: for result in combined_results:
symbol = result.get('symbol') symbol = result.get('symbol')
if symbol in screener_dict: if symbol in screener_dict:
result.update(screener_dict[symbol]) result.update(screener_dict[symbol])
except Exception as e: # Serialize and compress the response
print(f"An error occurred while merging data: {e}")
res = orjson.dumps(combined_results) res = orjson.dumps(combined_results)
compressed_data = gzip.compress(res) compressed_data = gzip.compress(res)
@ -1193,8 +1194,6 @@ async def get_indicator_data(data: IndicatorListData, api_key: str = Security(ge
media_type="application/json", media_type="application/json",
headers={"Content-Encoding": "gzip"} headers={"Content-Encoding": "gzip"}
) )
@app.post("/get-watchlist") @app.post("/get-watchlist")
async def get_watchlist(data: GetWatchList, api_key: str = Security(get_api_key)): async def get_watchlist(data: GetWatchList, api_key: str = Security(get_api_key)):
data = data.dict() data = data.dict()
@ -1208,13 +1207,6 @@ async def get_watchlist(data: GetWatchList, api_key: str = Security(get_api_key)
# Keys that should be read from the quote files if they are in rule_of_list # Keys that should be read from the quote files if they are in rule_of_list
quote_keys_to_include = ['volume', 'marketCap', 'changesPercentage', 'price', 'symbol', 'name'] quote_keys_to_include = ['volume', 'marketCap', 'changesPercentage', 'price', 'symbol', 'name']
def load_json(file_path):
try:
with open(file_path, 'rb') as file:
return orjson.loads(file.read())
except FileNotFoundError:
return None
# Ensure rule_of_list contains valid keys (fall back to defaults if necessary) # Ensure rule_of_list contains valid keys (fall back to defaults if necessary)
if not rule_of_list or not isinstance(rule_of_list, list): if not rule_of_list or not isinstance(rule_of_list, list):
rule_of_list = quote_keys_to_include # Default keys rule_of_list = quote_keys_to_include # Default keys