bugfixing
This commit is contained in:
parent
e44a160b3e
commit
7c2a442903
@ -1071,8 +1071,6 @@ async def stock_cash_flow(data: TickerData, api_key: str = Security(get_api_key)
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@app.get("/economic-calendar")
|
@app.get("/economic-calendar")
|
||||||
async def economic_calendar(api_key: str = Security(get_api_key)):
|
async def economic_calendar(api_key: str = Security(get_api_key)):
|
||||||
|
|
||||||
@ -1095,7 +1093,7 @@ async def economic_calendar(api_key: str = Security(get_api_key)):
|
|||||||
compressed_data = gzip.compress(res)
|
compressed_data = gzip.compress(res)
|
||||||
|
|
||||||
redis_client.set(cache_key, compressed_data)
|
redis_client.set(cache_key, compressed_data)
|
||||||
redis_client.expire(cache_key, 3600 * 24) # Set cache expiration time to 1 day
|
redis_client.expire(cache_key, 60 * 15) # Set cache expiration time to 1 day
|
||||||
|
|
||||||
return StreamingResponse(
|
return StreamingResponse(
|
||||||
io.BytesIO(compressed_data),
|
io.BytesIO(compressed_data),
|
||||||
|
|||||||
@ -1250,41 +1250,26 @@ async def get_stock_splits_calendar(con,symbols):
|
|||||||
return filtered_data
|
return filtered_data
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
async def get_economic_calendar():
|
async def get_economic_calendar():
|
||||||
ny_tz = pytz.timezone('America/New_York')
|
ny_tz = pytz.timezone('America/New_York')
|
||||||
today = datetime.now(ny_tz)
|
today = datetime.now(ny_tz)
|
||||||
|
|
||||||
start_date = today - timedelta(weeks=3)
|
start_date = (today - timedelta(weeks=4)).strftime("%Y-%m-%d")
|
||||||
start_date = start_date - timedelta(days=(start_date.weekday() - 0) % 7) # Align to Monday
|
end_date = (today + timedelta(weeks=4)).strftime("%Y-%m-%d")
|
||||||
|
|
||||||
end_date = today + timedelta(weeks=3)
|
|
||||||
end_date = end_date + timedelta(days=(4 - end_date.weekday()) % 7) # Align to Friday
|
|
||||||
|
|
||||||
all_data = []
|
|
||||||
current_date = start_date
|
|
||||||
|
|
||||||
async with aiohttp.ClientSession() as session:
|
async with aiohttp.ClientSession() as session:
|
||||||
while current_date <= end_date:
|
url = f"https://financialmodelingprep.com/api/v3/economic_calendar?from={start_date}&to={end_date}&apikey={api_key}"
|
||||||
date_str = current_date.strftime("%Y-%m-%d") # Convert date to string for API request
|
|
||||||
url = f"https://financialmodelingprep.com/api/v3/economic_calendar?from={date_str}&to={date_str}&limit=2000&apikey={api_key}"
|
|
||||||
|
|
||||||
try:
|
|
||||||
async with session.get(url) as response:
|
async with session.get(url) as response:
|
||||||
data = await response.json()
|
data = await response.json()
|
||||||
if data: # Check if data is not empty
|
print(f"Fetched data: {len(data)} events")
|
||||||
all_data.extend(data)
|
|
||||||
print(f"Fetched data for {date_str}: {len(data)} events")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error fetching data for {date_str}: {e}")
|
|
||||||
|
|
||||||
current_date += timedelta(days=1) # Move to next day
|
|
||||||
|
|
||||||
filtered_data = []
|
filtered_data = []
|
||||||
|
# Iterate over the fetched data directly
|
||||||
|
for item in data:
|
||||||
for item in all_data:
|
|
||||||
try:
|
try:
|
||||||
matching_country = next((c['short'] for c in country_list if c['long'] == item['country']), None)
|
matching_country = next((c['short'] for c in country_list if c['long'] == item['country']), None)
|
||||||
|
print(matching_country)
|
||||||
# Special case for USA
|
# Special case for USA
|
||||||
if item['country'] == 'USA':
|
if item['country'] == 'USA':
|
||||||
country_code = 'us'
|
country_code = 'us'
|
||||||
@ -1294,7 +1279,6 @@ async def get_economic_calendar():
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
impact = item.get('impact', None)
|
impact = item.get('impact', None)
|
||||||
importance = 1
|
|
||||||
if impact == 'High':
|
if impact == 'High':
|
||||||
importance = 3
|
importance = 3
|
||||||
elif impact == 'Medium':
|
elif impact == 'Medium':
|
||||||
@ -1302,12 +1286,12 @@ async def get_economic_calendar():
|
|||||||
else:
|
else:
|
||||||
importance = 1
|
importance = 1
|
||||||
|
|
||||||
dt = datetime.strptime(item['date'], "%Y-%m-%d %H:%M:%S") # Convert to datetime object
|
dt = datetime.strptime(item['date'], "%Y-%m-%d %H:%M:%S")
|
||||||
filtered_data.append({
|
filtered_data.append({
|
||||||
'countryCode': country_code,
|
'countryCode': country_code,
|
||||||
'country': item['country'],
|
'country': item['country'],
|
||||||
'time': dt.strftime("%H:%M"), # Extract hour and minute
|
'time': dt.strftime("%H:%M"),
|
||||||
'date': dt.strftime("%Y-%m-%d"), # Extract year, month, day
|
'date': dt.strftime("%Y-%m-%d"),
|
||||||
'prior': item['previous'],
|
'prior': item['previous'],
|
||||||
'consensus': item['estimate'],
|
'consensus': item['estimate'],
|
||||||
'actual': item['actual'],
|
'actual': item['actual'],
|
||||||
@ -1315,14 +1299,11 @@ async def get_economic_calendar():
|
|||||||
'event': item['event'],
|
'event': item['event'],
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error processing item: {e}")
|
print(f"Error processing item: {e}")
|
||||||
|
|
||||||
|
|
||||||
return filtered_data
|
return filtered_data
|
||||||
|
|
||||||
|
|
||||||
def replace_representative(office):
|
def replace_representative(office):
|
||||||
replacements = {
|
replacements = {
|
||||||
'Moody, Ashley B. (Senator)': 'Ashley Moody',
|
'Moody, Ashley B. (Senator)': 'Ashley Moody',
|
||||||
@ -1676,7 +1657,12 @@ async def save_json_files():
|
|||||||
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
||||||
|
|
||||||
|
|
||||||
|
economic_list = await get_economic_calendar()
|
||||||
|
if len(economic_list) > 0:
|
||||||
|
with open(f"json/economic-calendar/calendar.json", 'w') as file:
|
||||||
|
ujson.dump(economic_list, file)
|
||||||
|
|
||||||
|
'''
|
||||||
stock_screener_data = await get_stock_screener(con)
|
stock_screener_data = await get_stock_screener(con)
|
||||||
with open(f"json/stock-screener/data.json", 'w') as file:
|
with open(f"json/stock-screener/data.json", 'w') as file:
|
||||||
ujson.dump(stock_screener_data, file)
|
ujson.dump(stock_screener_data, file)
|
||||||
@ -1686,11 +1672,6 @@ async def save_json_files():
|
|||||||
with open(f"json/congress-trading/rss-feed/data.json", 'w') as file:
|
with open(f"json/congress-trading/rss-feed/data.json", 'w') as file:
|
||||||
ujson.dump(data, file)
|
ujson.dump(data, file)
|
||||||
|
|
||||||
economic_list = await get_economic_calendar()
|
|
||||||
if len(economic_list) > 0:
|
|
||||||
with open(f"json/economic-calendar/calendar.json", 'w') as file:
|
|
||||||
ujson.dump(economic_list, file)
|
|
||||||
|
|
||||||
|
|
||||||
data = await get_ipo_calendar(con, symbols)
|
data = await get_ipo_calendar(con, symbols)
|
||||||
with open(f"json/ipo-calendar/data.json", 'w') as file:
|
with open(f"json/ipo-calendar/data.json", 'w') as file:
|
||||||
@ -1708,7 +1689,7 @@ async def save_json_files():
|
|||||||
data = await etf_providers(etf_con, etf_symbols)
|
data = await etf_providers(etf_con, etf_symbols)
|
||||||
with open(f"json/all-etf-providers/data.json", 'w') as file:
|
with open(f"json/all-etf-providers/data.json", 'w') as file:
|
||||||
ujson.dump(data, file)
|
ujson.dump(data, file)
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user