bugfixing market movers

This commit is contained in:
MuslemRahimi 2024-11-20 11:30:20 +01:00
parent cb0f763d28
commit 9af776be5b
3 changed files with 74 additions and 51 deletions

View File

@ -1,5 +1,6 @@
from datetime import date, datetime, timedelta, time from datetime import date, datetime, timedelta, time
import ujson import ujson
import orjson
import sqlite3 import sqlite3
import pandas as pd import pandas as pd
import asyncio import asyncio
@ -99,7 +100,7 @@ async def get_todays_data(ticker):
df_1d = pd.concat([df_1d, remaining_df[1:: ]]) df_1d = pd.concat([df_1d, remaining_df[1:: ]])
#To-do FutureWarning: The behavior of DataFrame concatenation with empty or all-NA entries is deprecated. In a future version, this will no longer exclude empty or all-NA columns when determining the result dtypes. To retain the old behavior, exclude the relevant entries before the concat operation. #To-do FutureWarning: The behavior of DataFrame concatenation with empty or all-NA entries is deprecated. In a future version, this will no longer exclude empty or all-NA columns when determining the result dtypes. To retain the old behavior, exclude the relevant entries before the concat operation.
df_1d = ujson.loads(df_1d.to_json(orient="records")) df_1d = orjson.loads(df_1d.to_json(orient="records"))
except: except:
df_1d = [] df_1d = []
return df_1d return df_1d
@ -110,12 +111,23 @@ async def get_jsonparsed_data(session, url):
return data return data
async def get_quote_of_stocks(ticker_list): async def get_quote_of_stocks(ticker_list):
'''
ticker_str = ','.join(ticker_list) ticker_str = ','.join(ticker_list)
async with aiohttp.ClientSession() as session: async with aiohttp.ClientSession() as session:
url = f"https://financialmodelingprep.com/api/v3/quote/{ticker_str}?apikey={api_key}" url = f"https://financialmodelingprep.com/api/v3/quote/{ticker_str}?apikey={api_key}"
async with session.get(url) as response: async with session.get(url) as response:
df = await response.json() df = await response.json()
return df '''
res_list = []
for symbol in ticker_list:
try:
with open(f"json/quote/{symbol}.json") as file:
data = orjson.loads(file.read())
res_list.append(data)
except:
pass
return res_list
def add_rank(data): def add_rank(data):
for key in data: for key in data:
@ -264,10 +276,10 @@ async def get_gainer_loser_active_stocks(symbols):
unique_symbols = set() unique_symbols = set()
# Iterate through time periods, categories, and symbols # Iterate through time periods, categories, and symbols
for time_period in data.keys(): for category in data.keys():
for category in data[time_period].keys(): for time_period in data[category].keys():
# Add rank and process symbols # Add rank and process symbols
for index, stock_data in enumerate(data[time_period][category], start=1): for index, stock_data in enumerate(data[category][time_period], start=1):
stock_data['rank'] = index # Add rank field stock_data['rank'] = index # Add rank field
symbol = stock_data["symbol"] symbol = stock_data["symbol"]
unique_symbols.add(symbol) unique_symbols.add(symbol)
@ -277,13 +289,14 @@ async def get_gainer_loser_active_stocks(symbols):
#Get the latest quote of all unique symbol and map it back to the original data list to update all values #Get the latest quote of all unique symbol and map it back to the original data list to update all values
'''
latest_quote = await get_quote_of_stocks(unique_symbols_list) latest_quote = await get_quote_of_stocks(unique_symbols_list)
# Updating values in the data list based on matching symbols from the quote list # Updating values in the data list based on matching symbols from the quote list
for time_period in data.keys(): for category in data.keys():
# Only proceed if the time period is "1D" # Only proceed if the time period is "1D"
if time_period == "1D": for time_period in data[category].keys():
for category in data[time_period].keys(): if time_period != "1D":
for stock_data in data[time_period][category]: for stock_data in data[category][time_period]:
symbol = stock_data["symbol"] symbol = stock_data["symbol"]
quote_stock = next((item for item in latest_quote if item["symbol"] == symbol), None) quote_stock = next((item for item in latest_quote if item["symbol"] == symbol), None)
if quote_stock: if quote_stock:
@ -291,7 +304,8 @@ async def get_gainer_loser_active_stocks(symbols):
stock_data['changesPercentage'] = quote_stock['changesPercentage'] stock_data['changesPercentage'] = quote_stock['changesPercentage']
stock_data['marketCap'] = quote_stock['marketCap'] stock_data['marketCap'] = quote_stock['marketCap']
stock_data['volume'] = quote_stock['volume'] stock_data['volume'] = quote_stock['volume']
'''
return data return data
@ -316,18 +330,18 @@ async def get_pre_after_market_movers(symbols):
try: try:
# Load the main quote JSON file # Load the main quote JSON file
with open(f"json/quote/{symbol}.json", "r") as file: with open(f"json/quote/{symbol}.json", "r") as file:
data = ujson.load(file) data = orjson.loads(file.read())
market_cap = int(data.get('marketCap', 0)) market_cap = int(data.get('marketCap', 0))
name = data.get('name',None) name = data.get('name',None)
if market_cap >= market_cap_threshold: if market_cap >= market_cap_threshold:
try: try:
with open(f"json/pre-post-quote/{symbol}.json", "r") as file: with open(f"json/pre-post-quote/{symbol}.json", "r") as file:
pre_post_data = ujson.load(file) pre_post_data = orjson.loads(file.read())
price = pre_post_data.get("price", None) price = pre_post_data.get("price", None)
changes_percentage = pre_post_data.get("changesPercentage", None) changes_percentage = pre_post_data.get("changesPercentage", None)
with open(f"json/one-day-price/{symbol}.json", 'rb') as file: with open(f"json/one-day-price/{symbol}.json", 'rb') as file:
one_day_price = ujson.load(file) one_day_price = orjson.loads(file.read())
# Filter out entries where 'close' is None # Filter out entries where 'close' is None
filtered_prices = [price for price in one_day_price if price['close'] is not None] filtered_prices = [price for price in one_day_price if price['close'] is not None]
@ -394,23 +408,21 @@ try:
#symbols = [symbol for symbol in symbols if symbol != "STEC"] #symbols = [symbol for symbol in symbols if symbol != "STEC"]
data = asyncio.run(get_gainer_loser_active_stocks(symbols)) data = asyncio.run(get_gainer_loser_active_stocks(symbols))
with open(f"json/market-movers/data.json", 'w') as file: with open(f"json/market-movers/data.json", 'w') as file:
ujson.dump(data, file) file.write(orjson.dumps(data).decode("utf-8"))
data = asyncio.run(get_historical_data()) data = asyncio.run(get_historical_data())
with open(f"json/mini-plots-index/data.json", 'w') as file: with open(f"json/mini-plots-index/data.json", 'w') as file:
ujson.dump(data, file) file.write(orjson.dumps(data).decode("utf-8"))
data = asyncio.run(get_pre_after_market_movers(symbols)) data = asyncio.run(get_pre_after_market_movers(symbols))
if market_status == 1: if market_status == 1:
with open(f"json/market-movers/premarket.json", 'w') as file: with open(f"json/market-movers/premarket.json", 'w') as file:
ujson.dump(data, file) file.write(orjson.dumps(data).decode("utf-8"))
elif market_status == 2: elif market_status == 2:
with open(f"json/market-movers/afterhours.json", 'w') as file: with open(f"json/market-movers/afterhours.json", 'w') as file:
ujson.dump(data, file) file.write(orjson.dumps(data).decode("utf-8"))
con.close() con.close()
except Exception as e: except Exception as e:

View File

@ -125,6 +125,9 @@ stock_screener_data_dict = {item['symbol']: item for item in stock_screener_data
#------End Stock Screener--------# #------End Stock Screener--------#
etf_set, crypto_set = set(etf_symbols), set(crypto_symbols)
### TECH DEBT ### ### TECH DEBT ###
con = sqlite3.connect('stocks.db') con = sqlite3.connect('stocks.db')
etf_con = sqlite3.connect('etf.db') etf_con = sqlite3.connect('etf.db')
@ -1149,51 +1152,58 @@ async def get_analyst_ticke_history(data: TickerData, api_key: str = Security(ge
@app.post("/indicator-data") @app.post("/indicator-data")
async def get_indicator(data: IndicatorListData, api_key: str = Security(get_api_key)): async def get_indicator(data: IndicatorListData, api_key: str = Security(get_api_key)):
# Define default fields if none are provided
rule_of_list = data.ruleOfList or ['volume', 'marketCap', 'changesPercentage', 'price', 'symbol', 'name'] rule_of_list = data.ruleOfList or ['volume', 'marketCap', 'changesPercentage', 'price', 'symbol', 'name']
# Ensure 'symbol' and 'name' are always included in the rule_of_list # Ensure 'symbol' and 'name' are always included in rule_of_list
if 'symbol' not in rule_of_list: rule_of_list_set = set(rule_of_list).union({'symbol', 'name'})
rule_of_list.append('symbol')
if 'name' not in rule_of_list: # Convert ticker list to uppercase and filter existing symbols in stock_screener_data
rule_of_list.append('name') ticker_list = set(map(str.upper, data.tickerList))
cache_key = f"indicator-data-{','.join(sorted(ticker_list))}-{','.join(sorted(rule_of_list_set))}"
ticker_list = [t.upper() for t in data.tickerList]
# Check if the result is cached
cached_result = redis_client.get(cache_key)
if cached_result:
return StreamingResponse(
io.BytesIO(cached_result),
media_type="application/json",
headers={"Content-Encoding": "gzip"}
)
# Prepare the result list and determine type
stock_screener_dict = {item['symbol']: item for item in stock_screener_data if item['symbol'] in ticker_list}
combined_results = [] combined_results = []
# Process each ticker in the filtered stock_screener_dict
for ticker, ticker_data in stock_screener_dict.items():
# Determine the ticker type
ticker_type = (
'etf' if ticker in etf_set else
'crypto' if ticker in crypto_set else
'stock'
)
# Load quote data in parallel # Filter data according to rule_of_list and add the ticker type
quote_data = await asyncio.gather(*[load_json_async(f"json/quote/{ticker}.json") for ticker in ticker_list]) filtered_data = {key: ticker_data.get(key) for key in rule_of_list_set}
quote_dict = {ticker: data for ticker, data in zip(ticker_list, quote_data) if data} filtered_data['type'] = ticker_type
combined_results.append(filtered_data)
# Categorize tickers and extract data
for ticker, quote in quote_dict.items():
ticker_type = 'stock'
if ticker in etf_symbols:
ticker_type = 'etf'
elif ticker in crypto_symbols:
ticker_type = 'crypto'
filtered_quote = {key: quote.get(key) for key in rule_of_list if key in quote}
filtered_quote['type'] = ticker_type
combined_results.append(filtered_quote)
# Fetch and merge data from stock_screener_data
screener_keys = [key for key in rule_of_list if key not in ['volume', 'marketCap', 'changesPercentage', 'price', 'symbol', 'name']]
if screener_keys:
screener_dict = {item['symbol']: {k: v for k, v in item.items() if k in screener_keys} for item in stock_screener_data}
for result in combined_results:
symbol = result.get('symbol')
if symbol in screener_dict:
result.update(screener_dict[symbol])
# Serialize and compress the response # Serialize and compress the response
res = orjson.dumps(combined_results) res = orjson.dumps(combined_results)
compressed_data = gzip.compress(res) compressed_data = gzip.compress(res)
# Cache the result
redis_client.set(cache_key, compressed_data)
redis_client.expire(cache_key, 60) # Cache expires after 1 minute
return StreamingResponse( return StreamingResponse(
io.BytesIO(compressed_data), io.BytesIO(compressed_data),
media_type="application/json", media_type="application/json",
headers={"Content-Encoding": "gzip"} headers={"Content-Encoding": "gzip"}
) )
@app.post("/get-watchlist") @app.post("/get-watchlist")
async def get_watchlist(data: GetWatchList, api_key: str = Security(get_api_key)): async def get_watchlist(data: GetWatchList, api_key: str = Security(get_api_key)):
data = data.dict() data = data.dict()

View File

@ -107,7 +107,8 @@ class Past_Market_Movers:
for symbol, name, price, changes_percentage, volume, market_cap in high_volume: for symbol, name, price, changes_percentage, volume, market_cap in high_volume:
active_data.append({'symbol': symbol, 'name': name, 'price': price, 'changesPercentage': changes_percentage, 'volume': volume, 'marketCap': market_cap}) active_data.append({'symbol': symbol, 'name': name, 'price': price, 'changesPercentage': changes_percentage, 'volume': volume, 'marketCap': market_cap})
loser_data.sort(key=lambda x: x['changesPercentage'], reverse=False)
if time_period == 7: if time_period == 7:
gainer_json['1W'] = gainer_data gainer_json['1W'] = gainer_data
loser_json['1W'] = loser_data loser_json['1W'] = loser_data