slice historical price data for faster loading time
This commit is contained in:
parent
17861ccda5
commit
9010c1c231
@ -13,19 +13,6 @@ import os
|
|||||||
load_dotenv()
|
load_dotenv()
|
||||||
api_key = os.getenv('FMP_API_KEY')
|
api_key = os.getenv('FMP_API_KEY')
|
||||||
|
|
||||||
# Define a function to remove duplicates based on a key
|
|
||||||
def remove_duplicates(data, key):
|
|
||||||
seen = set()
|
|
||||||
new_data = []
|
|
||||||
for item in data:
|
|
||||||
if item[key] not in seen:
|
|
||||||
seen.add(item[key])
|
|
||||||
new_data.append(item)
|
|
||||||
return new_data
|
|
||||||
|
|
||||||
async def save_price_data(symbol, data):
|
|
||||||
async with aiofiles.open(f"json/historical-price/{symbol}.json", 'w') as file:
|
|
||||||
await file.write(ujson.dumps(data))
|
|
||||||
|
|
||||||
async def fetch_and_save_symbols_data(symbols, etf_symbols, crypto_symbols, session):
|
async def fetch_and_save_symbols_data(symbols, etf_symbols, crypto_symbols, session):
|
||||||
tasks = []
|
tasks = []
|
||||||
@ -39,10 +26,9 @@ async def fetch_and_save_symbols_data(symbols, etf_symbols, crypto_symbols, sess
|
|||||||
|
|
||||||
task = asyncio.create_task(get_historical_data(symbol, query_con, session))
|
task = asyncio.create_task(get_historical_data(symbol, query_con, session))
|
||||||
tasks.append(task)
|
tasks.append(task)
|
||||||
responses = await asyncio.gather(*tasks)
|
|
||||||
|
|
||||||
for symbol, response in zip(symbols, responses):
|
await asyncio.gather(*tasks)
|
||||||
await save_price_data(symbol, response)
|
|
||||||
|
|
||||||
async def get_historical_data(ticker, query_con, session):
|
async def get_historical_data(ticker, query_con, session):
|
||||||
try:
|
try:
|
||||||
@ -70,23 +56,29 @@ async def get_historical_data(ticker, query_con, session):
|
|||||||
df_1y = pd.read_sql_query(query, query_con, params=(start_date_1y, end_date)).round(2).rename(columns={"date": "time"})
|
df_1y = pd.read_sql_query(query, query_con, params=(start_date_1y, end_date)).round(2).rename(columns={"date": "time"})
|
||||||
df_max = pd.read_sql_query(query, query_con, params=(start_date_max, end_date)).round(2).rename(columns={"date": "time"})
|
df_max = pd.read_sql_query(query, query_con, params=(start_date_max, end_date)).round(2).rename(columns={"date": "time"})
|
||||||
|
|
||||||
res = {
|
|
||||||
'1W': ujson.loads(data[0]) if data else [],
|
async with aiofiles.open(f"json/historical-price/one-week/{ticker}.json", 'w') as file:
|
||||||
'1M': ujson.loads(data[1]) if len(data) > 1 else [],
|
res = ujson.loads(data[0]) if data else []
|
||||||
'6M': ujson.loads(df_6m.to_json(orient="records")),
|
await file.write(ujson.dumps(res))
|
||||||
'1Y': ujson.loads(df_1y.to_json(orient="records")),
|
|
||||||
'MAX': ujson.loads(df_max.to_json(orient="records"))
|
async with aiofiles.open(f"json/historical-price/one-month/{ticker}.json", 'w') as file:
|
||||||
}
|
res = ujson.loads(data[1]) if len(data) > 1 else []
|
||||||
|
await file.write(ujson.dumps(res))
|
||||||
|
|
||||||
|
async with aiofiles.open(f"json/historical-price/six-months/{ticker}.json", 'w') as file:
|
||||||
|
res = ujson.loads(df_6m.to_json(orient="records"))
|
||||||
|
await file.write(ujson.dumps(res))
|
||||||
|
|
||||||
|
async with aiofiles.open(f"json/historical-price/one-year/{ticker}.json", 'w') as file:
|
||||||
|
res = ujson.loads(df_1y.to_json(orient="records"))
|
||||||
|
await file.write(ujson.dumps(res))
|
||||||
|
|
||||||
|
async with aiofiles.open(f"json/historical-price/max/{ticker}.json", 'w') as file:
|
||||||
|
res = ujson.loads(df_max.to_json(orient="records"))
|
||||||
|
await file.write(ujson.dumps(res))
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Failed to fetch data for {ticker}: {e}")
|
print(f"Failed to fetch data for {ticker}: {e}")
|
||||||
res = {
|
|
||||||
'1W': [],
|
|
||||||
'1M': [],
|
|
||||||
'6M': [],
|
|
||||||
'1Y': [],
|
|
||||||
'MAX': []
|
|
||||||
}
|
|
||||||
return res
|
|
||||||
|
|
||||||
async def run():
|
async def run():
|
||||||
total_symbols = []
|
total_symbols = []
|
||||||
|
|||||||
14
app/main.py
14
app/main.py
@ -157,6 +157,11 @@ async def openapi(username: str = Depends(get_current_username)):
|
|||||||
class TickerData(BaseModel):
|
class TickerData(BaseModel):
|
||||||
ticker: str
|
ticker: str
|
||||||
|
|
||||||
|
|
||||||
|
class HistoricalPrice(BaseModel):
|
||||||
|
ticker: str
|
||||||
|
timePeriod: str
|
||||||
|
|
||||||
class AnalystId(BaseModel):
|
class AnalystId(BaseModel):
|
||||||
analystId: str
|
analystId: str
|
||||||
|
|
||||||
@ -320,10 +325,11 @@ async def rating_stock(data: TickerData):
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
@app.post("/historical-price")
|
@app.post("/historical-price")
|
||||||
async def get_stock(data: TickerData):
|
async def get_stock(data: HistoricalPrice):
|
||||||
ticker = data.ticker.upper()
|
ticker = data.ticker.upper()
|
||||||
|
time_period = data.timePeriod
|
||||||
|
|
||||||
cache_key = f"historical-price-{ticker}"
|
cache_key = f"historical-price-{ticker}-{time_period}"
|
||||||
cached_result = redis_client.get(cache_key)
|
cached_result = redis_client.get(cache_key)
|
||||||
if cached_result:
|
if cached_result:
|
||||||
return StreamingResponse(
|
return StreamingResponse(
|
||||||
@ -333,7 +339,7 @@ async def get_stock(data: TickerData):
|
|||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(f"json/historical-price/{ticker}.json", 'r') as file:
|
with open(f"json/historical-price/{time_period}/{ticker}.json", 'r') as file:
|
||||||
res = ujson.load(file)
|
res = ujson.load(file)
|
||||||
except:
|
except:
|
||||||
res = []
|
res = []
|
||||||
@ -349,6 +355,8 @@ async def get_stock(data: TickerData):
|
|||||||
headers={"Content-Encoding": "gzip"}
|
headers={"Content-Encoding": "gzip"}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@app.post("/one-day-price")
|
@app.post("/one-day-price")
|
||||||
async def get_stock(data: TickerData):
|
async def get_stock(data: TickerData):
|
||||||
data = data.dict()
|
data = data.dict()
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user