update
This commit is contained in:
parent
4388739a17
commit
d3efac141d
@ -255,12 +255,14 @@ class ETFDatabase:
|
|||||||
for key in fundamental_data
|
for key in fundamental_data
|
||||||
}
|
}
|
||||||
|
|
||||||
|
'''
|
||||||
if len(json.loads(fundamental_data['holding'])) == 0:
|
if len(json.loads(fundamental_data['holding'])) == 0:
|
||||||
self.cursor.execute("DELETE FROM etfs WHERE symbol = ?", (symbol,))
|
self.cursor.execute("DELETE FROM etfs WHERE symbol = ?", (symbol,))
|
||||||
#self.cursor.execute("DELETE FROM symbol WHERE symbol = ?", (symbol,))
|
#self.cursor.execute("DELETE FROM symbol WHERE symbol = ?", (symbol,))
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
print(f"Delete {symbol}")
|
print(f"Delete {symbol}")
|
||||||
return
|
return
|
||||||
|
'''
|
||||||
|
|
||||||
for column, (column_type, value) in column_definitions.items():
|
for column, (column_type, value) in column_definitions.items():
|
||||||
if column not in columns and column_type:
|
if column not in columns and column_type:
|
||||||
@ -392,5 +394,10 @@ async def fetch_tickers():
|
|||||||
db = ETFDatabase('backup_db/etf.db')
|
db = ETFDatabase('backup_db/etf.db')
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
all_tickers = loop.run_until_complete(fetch_tickers())
|
all_tickers = loop.run_until_complete(fetch_tickers())
|
||||||
|
'''
|
||||||
|
for item in all_tickers:
|
||||||
|
if item['symbol'] == 'GLD':
|
||||||
|
print(item)
|
||||||
|
'''
|
||||||
loop.run_until_complete(db.save_etfs(all_tickers))
|
loop.run_until_complete(db.save_etfs(all_tickers))
|
||||||
db.close_connection()
|
db.close_connection()
|
||||||
@ -27,7 +27,6 @@ async def get_data(ticker, con):
|
|||||||
analyst_estimates = ujson.loads(data['analyst_estimates'].iloc[0])
|
analyst_estimates = ujson.loads(data['analyst_estimates'].iloc[0])
|
||||||
income = ujson.loads(data['income'].iloc[0])
|
income = ujson.loads(data['income'].iloc[0])
|
||||||
combined_data = defaultdict(dict)
|
combined_data = defaultdict(dict)
|
||||||
|
|
||||||
#Sum up quarter results
|
#Sum up quarter results
|
||||||
eps_sums = {}
|
eps_sums = {}
|
||||||
revenue_sums = {}
|
revenue_sums = {}
|
||||||
|
|||||||
@ -5,8 +5,14 @@ import asyncio
|
|||||||
import pandas as pd
|
import pandas as pd
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
import orjson
|
import orjson
|
||||||
|
from GetStartEndDate import GetStartEndDate
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
|
import os
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
load_dotenv()
|
||||||
|
api_key = os.getenv('FMP_API_KEY')
|
||||||
|
|
||||||
with open(f"json/stock-screener/data.json", 'rb') as file:
|
with open(f"json/stock-screener/data.json", 'rb') as file:
|
||||||
stock_screener_data = orjson.loads(file.read())
|
stock_screener_data = orjson.loads(file.read())
|
||||||
|
|
||||||
@ -14,20 +20,30 @@ with open(f"json/stock-screener/data.json", 'rb') as file:
|
|||||||
stock_screener_data_dict = {item['symbol']: item for item in stock_screener_data}
|
stock_screener_data_dict = {item['symbol']: item for item in stock_screener_data}
|
||||||
|
|
||||||
|
|
||||||
def save_as_json(data):
|
date, _ = GetStartEndDate().run()
|
||||||
with open(f"json/industry/overview.json", 'w') as file:
|
date = date.strftime('%Y-%m-%d')
|
||||||
|
|
||||||
|
def save_as_json(data, filename):
|
||||||
|
with open(f"json/industry/{filename}.json", 'w') as file:
|
||||||
ujson.dump(data, file)
|
ujson.dump(data, file)
|
||||||
|
|
||||||
|
|
||||||
#async def get_data():
|
# Function to fetch data from the API
|
||||||
|
async def get_data(session, class_type='sector'):
|
||||||
|
if class_type == 'sector':
|
||||||
|
url = f"https://financialmodelingprep.com/api/v4/sector_price_earning_ratio?date={date}&exchange=NYSE&apikey={api_key}"
|
||||||
|
else:
|
||||||
|
url = f"https://financialmodelingprep.com/api/v4/industry_price_earning_ratio?date={date}&exchange=NYSE&apikey={api_key}"
|
||||||
|
async with session.get(url) as response:
|
||||||
|
data = await response.json()
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
def run():
|
async def run():
|
||||||
# Initialize a dictionary to store stock count, market cap, and other totals for each industry
|
# Initialize a dictionary to store stock count, market cap, and other totals for each industry
|
||||||
sector_industry_data = defaultdict(lambda: defaultdict(lambda: {
|
sector_industry_data = defaultdict(lambda: defaultdict(lambda: {
|
||||||
'numStocks': 0,
|
'numStocks': 0,
|
||||||
'totalMarketCap': 0.0,
|
'totalMarketCap': 0.0,
|
||||||
'totalPE': 0.0,
|
|
||||||
'totalDividendYield': 0.0,
|
'totalDividendYield': 0.0,
|
||||||
'totalNetIncome': 0.0,
|
'totalNetIncome': 0.0,
|
||||||
'totalRevenue': 0.0,
|
'totalRevenue': 0.0,
|
||||||
@ -44,7 +60,6 @@ def run():
|
|||||||
sector = stock.get('sector')
|
sector = stock.get('sector')
|
||||||
industry = stock.get('industry')
|
industry = stock.get('industry')
|
||||||
market_cap = stock.get('marketCap')
|
market_cap = stock.get('marketCap')
|
||||||
pe = stock.get('pe')
|
|
||||||
dividend_yield = stock.get('dividendYield')
|
dividend_yield = stock.get('dividendYield')
|
||||||
net_income = stock.get('netIncome')
|
net_income = stock.get('netIncome')
|
||||||
revenue = stock.get('revenue')
|
revenue = stock.get('revenue')
|
||||||
@ -57,11 +72,6 @@ def run():
|
|||||||
sector_industry_data[sector][industry]['numStocks'] += 1
|
sector_industry_data[sector][industry]['numStocks'] += 1
|
||||||
sector_industry_data[sector][industry]['totalMarketCap'] += float(market_cap)
|
sector_industry_data[sector][industry]['totalMarketCap'] += float(market_cap)
|
||||||
|
|
||||||
# Accumulate PE ratio if available
|
|
||||||
if pe is not None:
|
|
||||||
sector_industry_data[sector][industry]['totalPE'] += float(pe)
|
|
||||||
sector_industry_data[sector][industry]['peCount'] += 1
|
|
||||||
|
|
||||||
# Accumulate dividend yield if available
|
# Accumulate dividend yield if available
|
||||||
if dividend_yield is not None:
|
if dividend_yield is not None:
|
||||||
sector_industry_data[sector][industry]['totalDividendYield'] += float(dividend_yield)
|
sector_industry_data[sector][industry]['totalDividendYield'] += float(dividend_yield)
|
||||||
@ -83,19 +93,18 @@ def run():
|
|||||||
sector_industry_data[sector][industry]['change1YCount'] += 1
|
sector_industry_data[sector][industry]['change1YCount'] += 1
|
||||||
|
|
||||||
# Prepare the final data in the requested format
|
# Prepare the final data in the requested format
|
||||||
result = {}
|
overview = {}
|
||||||
|
|
||||||
for sector, industries in sector_industry_data.items():
|
for sector, industries in sector_industry_data.items():
|
||||||
# Sort industries by stock count in descending order
|
# Sort industries by stock count in descending order
|
||||||
sorted_industries = sorted(industries.items(), key=lambda x: x[1]['numStocks'], reverse=True)
|
sorted_industries = sorted(industries.items(), key=lambda x: x[1]['numStocks'], reverse=True)
|
||||||
|
|
||||||
# Add sorted industries with averages to the result for each sector
|
# Add sorted industries with averages to the overview for each sector
|
||||||
result[sector] = [
|
overview[sector] = [
|
||||||
{
|
{
|
||||||
'industry': industry,
|
'industry': industry,
|
||||||
'numStocks': data['numStocks'],
|
'numStocks': data['numStocks'],
|
||||||
'totalMarketCap': data['totalMarketCap'],
|
'totalMarketCap': data['totalMarketCap'],
|
||||||
'pe': round((data['totalMarketCap'] / data['totalNetIncome']),2) if data['totalNetIncome'] > 0 else None,
|
|
||||||
'avgDividendYield': round((data['totalDividendYield'] / data['dividendCount']),2) if data['dividendCount'] > 0 else None,
|
'avgDividendYield': round((data['totalDividendYield'] / data['dividendCount']),2) if data['dividendCount'] > 0 else None,
|
||||||
'profitMargin': round((data['totalNetIncome'] / data['totalRevenue'])*100,2) if data['totalRevenue'] > 0 else None,
|
'profitMargin': round((data['totalNetIncome'] / data['totalRevenue'])*100,2) if data['totalRevenue'] > 0 else None,
|
||||||
'avgChange1M': round((data['totalChange1M'] / data['change1MCount']),2) if data['change1MCount'] > 0 else None,
|
'avgChange1M': round((data['totalChange1M'] / data['change1MCount']),2) if data['change1MCount'] > 0 else None,
|
||||||
@ -103,9 +112,87 @@ def run():
|
|||||||
} for industry, data in sorted_industries
|
} for industry, data in sorted_industries
|
||||||
]
|
]
|
||||||
|
|
||||||
print(result)
|
# Assign the P/E values from pe_industry to the overview
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
pe_industry = await get_data(session, class_type='industry')
|
||||||
|
for sector, industries in overview.items():
|
||||||
|
for industry_data in industries:
|
||||||
|
industry_name = industry_data['industry']
|
||||||
|
|
||||||
save_as_json(result)
|
# Look for a matching industry in pe_industry to assign the P/E ratio
|
||||||
|
matching_pe = next((item['pe'] for item in pe_industry if item['industry'] == industry_name), None)
|
||||||
|
|
||||||
|
if matching_pe is not None:
|
||||||
|
industry_data['pe'] = round(float(matching_pe), 2)
|
||||||
|
|
||||||
|
save_as_json(overview, filename = 'overview')
|
||||||
|
|
||||||
|
industry_overview = []
|
||||||
|
|
||||||
|
for key in overview:
|
||||||
|
industry_overview.extend(overview[key])
|
||||||
|
|
||||||
|
industry_overview = sorted(industry_overview, key= lambda x: x['numStocks'], reverse=True)
|
||||||
|
|
||||||
|
save_as_json(industry_overview, filename='industry-overview')
|
||||||
|
|
||||||
|
|
||||||
run()
|
sector_overview = []
|
||||||
|
|
||||||
|
for sector, industries in sector_industry_data.items():
|
||||||
|
total_market_cap = 0
|
||||||
|
total_stocks = 0
|
||||||
|
total_dividend_yield = 0
|
||||||
|
total_net_income = 0
|
||||||
|
total_revenue = 0
|
||||||
|
total_change_1m = 0
|
||||||
|
total_change_1y = 0
|
||||||
|
|
||||||
|
dividend_count = 0
|
||||||
|
change_1m_count = 0
|
||||||
|
change_1y_count = 0
|
||||||
|
|
||||||
|
for industry, data in industries.items():
|
||||||
|
# Sum up values across industries for the sector summary
|
||||||
|
total_market_cap += data['totalMarketCap']
|
||||||
|
total_stocks += data['numStocks']
|
||||||
|
total_net_income += data['totalNetIncome']
|
||||||
|
total_revenue += data['totalRevenue']
|
||||||
|
total_change_1m += data['totalChange1M']
|
||||||
|
total_change_1y += data['totalChange1Y']
|
||||||
|
|
||||||
|
dividend_count += data['dividendCount']
|
||||||
|
change_1m_count += data['change1MCount']
|
||||||
|
change_1y_count += data['change1YCount']
|
||||||
|
total_dividend_yield += data['totalDividendYield']
|
||||||
|
|
||||||
|
# Calculate averages and profit margin for the sector
|
||||||
|
sector_overview.append({
|
||||||
|
'sector': sector,
|
||||||
|
'numStocks': total_stocks,
|
||||||
|
'totalMarketCap': total_market_cap,
|
||||||
|
'avgDividendYield': round((total_dividend_yield / dividend_count), 2) if dividend_count > 0 else None,
|
||||||
|
'profitMargin': round((total_net_income / total_revenue) * 100, 2) if total_revenue > 0 else None,
|
||||||
|
'avgChange1M': round((total_change_1m / change_1m_count), 2) if change_1m_count > 0 else None,
|
||||||
|
'avgChange1Y': round((total_change_1y / change_1y_count), 2) if change_1y_count > 0 else None
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
# Assign the P/E values from pe_industry to the overview
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
pe_sector = await get_data(session, class_type='sector')
|
||||||
|
# Loop through sector_overview to update P/E ratios from pe_sector
|
||||||
|
for sector_data in sector_overview:
|
||||||
|
sector_name = sector_data['sector']
|
||||||
|
|
||||||
|
# Find the matching sector in pe_sector and assign the P/E ratio
|
||||||
|
matching_pe = next((item['pe'] for item in pe_sector if item['sector'] == sector_name), None)
|
||||||
|
|
||||||
|
if matching_pe is not None:
|
||||||
|
sector_data['pe'] = round(float(matching_pe), 2)
|
||||||
|
sector_overview = sorted(sector_overview, key= lambda x: x['numStocks'], reverse=True)
|
||||||
|
|
||||||
|
save_as_json(sector_overview, filename='sector-overview')
|
||||||
|
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
sector_results = loop.run_until_complete(run())
|
||||||
@ -82,11 +82,13 @@ async def get_data(ticker, con):
|
|||||||
async def run():
|
async def run():
|
||||||
|
|
||||||
con = sqlite3.connect('stocks.db')
|
con = sqlite3.connect('stocks.db')
|
||||||
|
etf_con = sqlite3.connect('etf.db')
|
||||||
|
|
||||||
cursor = con.cursor()
|
cursor = con.cursor()
|
||||||
cursor.execute("PRAGMA journal_mode = wal")
|
cursor.execute("PRAGMA journal_mode = wal")
|
||||||
cursor.execute("SELECT DISTINCT symbol FROM stocks")
|
cursor.execute("SELECT DISTINCT symbol FROM stocks")
|
||||||
stock_symbols = [row[0] for row in cursor.fetchall()]
|
stock_symbols = [row[0] for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
|
||||||
counter = 0
|
counter = 0
|
||||||
|
|
||||||
|
|||||||
60
app/main.py
60
app/main.py
@ -3607,9 +3607,9 @@ async def get_economic_indicator(api_key: str = Security(get_api_key)):
|
|||||||
headers={"Content-Encoding": "gzip"}
|
headers={"Content-Encoding": "gzip"}
|
||||||
)
|
)
|
||||||
|
|
||||||
@app.get("/industry-overview")
|
@app.get("/sector-industry-overview")
|
||||||
async def get_industry_overview(api_key: str = Security(get_api_key)):
|
async def get_industry_overview(api_key: str = Security(get_api_key)):
|
||||||
cache_key = f"industry_overview"
|
cache_key = f"sector-industry-overview"
|
||||||
cached_result = redis_client.get(cache_key)
|
cached_result = redis_client.get(cache_key)
|
||||||
if cached_result:
|
if cached_result:
|
||||||
return StreamingResponse(
|
return StreamingResponse(
|
||||||
@ -3635,6 +3635,62 @@ async def get_industry_overview(api_key: str = Security(get_api_key)):
|
|||||||
headers={"Content-Encoding": "gzip"}
|
headers={"Content-Encoding": "gzip"}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@app.get("/sector-overview")
|
||||||
|
async def get_sector_overview(api_key: str = Security(get_api_key)):
|
||||||
|
cache_key = f"sector-overview"
|
||||||
|
cached_result = redis_client.get(cache_key)
|
||||||
|
if cached_result:
|
||||||
|
return StreamingResponse(
|
||||||
|
io.BytesIO(cached_result),
|
||||||
|
media_type="application/json",
|
||||||
|
headers={"Content-Encoding": "gzip"}
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
with open(f"json/industry/sector-overview.json", 'rb') as file:
|
||||||
|
res = orjson.loads(file.read())
|
||||||
|
except:
|
||||||
|
res = []
|
||||||
|
|
||||||
|
data = orjson.dumps(res)
|
||||||
|
compressed_data = gzip.compress(data)
|
||||||
|
|
||||||
|
redis_client.set(cache_key, compressed_data)
|
||||||
|
redis_client.expire(cache_key,3600*3600)
|
||||||
|
|
||||||
|
return StreamingResponse(
|
||||||
|
io.BytesIO(compressed_data),
|
||||||
|
media_type="application/json",
|
||||||
|
headers={"Content-Encoding": "gzip"}
|
||||||
|
)
|
||||||
|
|
||||||
|
@app.get("/industry-overview")
|
||||||
|
async def get_industry_overview(api_key: str = Security(get_api_key)):
|
||||||
|
cache_key = f"industry-overview"
|
||||||
|
cached_result = redis_client.get(cache_key)
|
||||||
|
if cached_result:
|
||||||
|
return StreamingResponse(
|
||||||
|
io.BytesIO(cached_result),
|
||||||
|
media_type="application/json",
|
||||||
|
headers={"Content-Encoding": "gzip"}
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
with open(f"json/industry/industry-overview.json", 'rb') as file:
|
||||||
|
res = orjson.loads(file.read())
|
||||||
|
except:
|
||||||
|
res = []
|
||||||
|
|
||||||
|
data = orjson.dumps(res)
|
||||||
|
compressed_data = gzip.compress(data)
|
||||||
|
|
||||||
|
redis_client.set(cache_key, compressed_data)
|
||||||
|
redis_client.expire(cache_key,3600*3600)
|
||||||
|
|
||||||
|
return StreamingResponse(
|
||||||
|
io.BytesIO(compressed_data),
|
||||||
|
media_type="application/json",
|
||||||
|
headers={"Content-Encoding": "gzip"}
|
||||||
|
)
|
||||||
|
|
||||||
@app.post("/next-earnings")
|
@app.post("/next-earnings")
|
||||||
async def get_next_earnings(data:TickerData, api_key: str = Security(get_api_key)):
|
async def get_next_earnings(data:TickerData, api_key: str = Security(get_api_key)):
|
||||||
ticker = data.ticker
|
ticker = data.ticker
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user