optimize backend

This commit is contained in:
MuslemRahimi 2025-02-23 11:50:33 +01:00
parent 70532a4c86
commit 7742eae2be
2 changed files with 21 additions and 10 deletions

View File

@ -62,6 +62,8 @@ PRIORITY_STRATEGIES = {
'name_contains': 5
}
client = httpx.AsyncClient(http2=True, timeout=10.0)
def calculate_score(item: Dict, search_query: str) -> int:
name_lower = item['name'].lower()
symbol_lower = item['symbol'].lower()
@ -4269,8 +4271,13 @@ async def get_data(data:TickerData, api_key: str = Security(get_api_key)):
async def fetch_data(client, endpoint, ticker):
url = f"{API_URL}{endpoint}"
try:
response = await client.post(url, json={"ticker": ticker}, headers={"X-API-KEY": STOCKNEAR_API_KEY})
response = await client.post(
url,
json={"ticker": ticker},
headers={"X-API-KEY": STOCKNEAR_API_KEY}
)
response.raise_for_status()
# Parse the JSON response
return {endpoint: response.json()}
except Exception as e:
return {endpoint: {"error": str(e)}}
@ -4280,13 +4287,13 @@ async def get_stock_data(data:BulkList, api_key: str = Security(get_api_key)):
endpoints = data.endpoints
ticker = data.ticker.upper()
async with httpx.AsyncClient() as client:
# Create tasks for each endpoint concurrently.
tasks = [fetch_data(client, endpoint, ticker) for endpoint in endpoints]
results = await asyncio.gather(*tasks)
# Combine results
data = {k: v for result in results for k, v in result.items()}
return data
# Combine the results into a single dictionary.
combined_data = {k: v for result in results for k, v in result.items()}
return combined_data
@app.get("/newsletter")
@ -4298,3 +4305,6 @@ async def get_newsletter():
res = []
return res
@app.on_event("shutdown")
async def shutdown_event():
await client.aclose()

View File

@ -21,6 +21,7 @@ redis[hiredis]
asyncio
aiohttp
httpx
httpx[http2]
prophet
schedule
pocketbase