update market movers
This commit is contained in:
parent
59ab55c8cf
commit
15da925dc4
@ -82,7 +82,12 @@ async def get_quote_of_stocks(ticker_list):
|
||||
df = await response.json()
|
||||
return df
|
||||
|
||||
|
||||
def add_rank(data):
|
||||
for key in data:
|
||||
for index, item in enumerate(data[key], start=1):
|
||||
item['rank'] = index
|
||||
return data
|
||||
|
||||
async def get_gainer_loser_active_stocks():
|
||||
|
||||
#Database read 1y and 3y data
|
||||
@ -218,7 +223,9 @@ async def get_gainer_loser_active_stocks():
|
||||
# Iterate through time periods, categories, and symbols
|
||||
for time_period in data.keys():
|
||||
for category in data[time_period].keys():
|
||||
for stock_data in data[time_period][category]:
|
||||
# Add rank and process symbols
|
||||
for index, stock_data in enumerate(data[time_period][category], start=1):
|
||||
stock_data['rank'] = index # Add rank field
|
||||
symbol = stock_data["symbol"]
|
||||
unique_symbols.add(symbol)
|
||||
|
||||
@ -230,16 +237,17 @@ async def get_gainer_loser_active_stocks():
|
||||
latest_quote = await get_quote_of_stocks(unique_symbols_list)
|
||||
# Updating values in the data list based on matching symbols from the quote list
|
||||
for time_period in data.keys():
|
||||
for category in data[time_period].keys():
|
||||
for stock_data in data[time_period][category]:
|
||||
symbol = stock_data["symbol"]
|
||||
quote_stock = next((item for item in latest_quote if item["symbol"] == symbol), None)
|
||||
if quote_stock:
|
||||
stock_data['price'] = quote_stock['price']
|
||||
stock_data['changesPercentage'] = quote_stock['changesPercentage']
|
||||
stock_data['marketCap'] = quote_stock['marketCap']
|
||||
stock_data['volume'] = quote_stock['volume']
|
||||
|
||||
# Only proceed if the time period is "1D"
|
||||
if time_period == "1D":
|
||||
for category in data[time_period].keys():
|
||||
for stock_data in data[time_period][category]:
|
||||
symbol = stock_data["symbol"]
|
||||
quote_stock = next((item for item in latest_quote if item["symbol"] == symbol), None)
|
||||
if quote_stock:
|
||||
stock_data['price'] = quote_stock['price']
|
||||
stock_data['changesPercentage'] = quote_stock['changesPercentage']
|
||||
stock_data['marketCap'] = quote_stock['marketCap']
|
||||
stock_data['volume'] = quote_stock['volume']
|
||||
|
||||
return data
|
||||
|
||||
@ -310,17 +318,20 @@ try:
|
||||
#Filter out tickers
|
||||
symbols = [symbol for symbol in symbols if symbol != "STEC"]
|
||||
|
||||
data = asyncio.run(get_historical_data())
|
||||
with open(f"json/mini-plots-index/data.json", 'w') as file:
|
||||
ujson.dump(data, file)
|
||||
|
||||
|
||||
data = asyncio.run(get_gainer_loser_active_stocks())
|
||||
with open(f"json/market-movers/data.json", 'w') as file:
|
||||
ujson.dump(data, file)
|
||||
'''
|
||||
data = asyncio.run(get_historical_data())
|
||||
with open(f"json/mini-plots-index/data.json", 'w') as file:
|
||||
ujson.dump(data, file)
|
||||
|
||||
data = asyncio.run(get_pre_post_market_movers(symbols))
|
||||
with open(f"json/market-movers/pre-post-data.json", 'w') as file:
|
||||
ujson.dump(data, file)
|
||||
'''
|
||||
|
||||
con.close()
|
||||
except Exception as e:
|
||||
|
||||
309
app/market_movers.py
Executable file → Normal file
309
app/market_movers.py
Executable file → Normal file
@ -1,152 +1,157 @@
|
||||
import sqlite3
|
||||
from datetime import datetime, timedelta
|
||||
import pandas as pd
|
||||
import json
|
||||
import time
|
||||
|
||||
class Past_Market_Movers:
|
||||
def __init__(self):
|
||||
self.con = sqlite3.connect('backup_db/stocks.db')
|
||||
self.cursor = self.con.cursor()
|
||||
self.cursor.execute("PRAGMA journal_mode = wal")
|
||||
self.symbols = self.get_stock_symbols()
|
||||
|
||||
def get_stock_symbols(self):
|
||||
self.cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol != ?", ('%5EGSPC',))
|
||||
return [row[0] for row in self.cursor.fetchall()]
|
||||
|
||||
@staticmethod
|
||||
def check_if_holiday():
|
||||
holidays = {
|
||||
datetime(2023, 5, 29): 'memorial_day',
|
||||
datetime(2023, 6, 19): 'independence_day',
|
||||
datetime(2023, 6, 20): 'independence_day+1',
|
||||
datetime(2023, 9, 4): 'labor_day',
|
||||
}
|
||||
|
||||
current_datetime = datetime.today()
|
||||
for holiday_date, holiday_name in holidays.items():
|
||||
if current_datetime == holiday_date:
|
||||
return holiday_name
|
||||
return None
|
||||
|
||||
def correct_weekday_interval(self, prev_day):
|
||||
holiday = self.check_if_holiday()
|
||||
if holiday:
|
||||
if holiday == 'memorial_day':
|
||||
start_date = datetime(2023, 5, 26)
|
||||
elif holiday in ('independence_day', 'independence_day+1'):
|
||||
start_date = datetime(2023, 6, 16)
|
||||
else:
|
||||
start_date = datetime(2023, 9, 1)
|
||||
else:
|
||||
current_date = datetime.today() - timedelta(prev_day)
|
||||
current_weekday = current_date.weekday()
|
||||
if current_weekday in (5, 6): # Saturday or Sunday
|
||||
start_date = current_date - timedelta(days=current_weekday % 5 + 1)
|
||||
else:
|
||||
start_date = current_date
|
||||
return start_date.strftime("%Y-%m-%d")
|
||||
|
||||
def run(self, time_periods=[7,30,90,180]):
|
||||
performance_data = []
|
||||
query_template = """
|
||||
SELECT date, close, volume FROM "{ticker}" WHERE date >= ?
|
||||
"""
|
||||
query_fundamental_template = """
|
||||
SELECT marketCap, name FROM stocks WHERE symbol = ?
|
||||
"""
|
||||
gainer_json = {}
|
||||
loser_json = {}
|
||||
active_json = {}
|
||||
|
||||
for time_period in time_periods:
|
||||
performance_data = []
|
||||
high_volume = []
|
||||
gainer_data = []
|
||||
loser_data = []
|
||||
active_data = []
|
||||
|
||||
start_date = self.correct_weekday_interval(time_period)
|
||||
for ticker in self.symbols:
|
||||
try:
|
||||
query = query_template.format(ticker=ticker)
|
||||
df = pd.read_sql_query(query, self.con, params=(start_date,))
|
||||
if not df.empty:
|
||||
fundamental_data = pd.read_sql_query(query_fundamental_template, self.con, params=(ticker,))
|
||||
avg_volume = df['volume'].mean()
|
||||
if avg_volume > 1E6 and df['close'].mean() > 1:
|
||||
changes_percentage = ((df['close'].iloc[-1] - df['close'].iloc[0]) / df['close'].iloc[0]) * 100
|
||||
performance_data.append((ticker, fundamental_data['name'].iloc[0], df['close'].iloc[-1], changes_percentage, avg_volume, int(fundamental_data['marketCap'].iloc[0])))
|
||||
except:
|
||||
pass
|
||||
|
||||
# Sort the stocks by percentage change in descending order
|
||||
performance_data.sort(key=lambda x: x[3], reverse=True)
|
||||
high_volume = sorted(performance_data, key=lambda x: x[4], reverse=True)
|
||||
|
||||
for symbol, name, price, changes_percentage, volume, market_cap in performance_data[:20]:
|
||||
gainer_data.append({'symbol': symbol, 'name': name, 'price': price, 'changesPercentage': changes_percentage, 'volume': volume, 'marketCap': market_cap})
|
||||
for symbol, name, price, changes_percentage, volume, market_cap in performance_data[-20:]:
|
||||
loser_data.append({'symbol': symbol, 'name': name, 'price': price, 'changesPercentage': changes_percentage, 'volume': volume, 'marketCap': market_cap})
|
||||
for symbol, name, price, changes_percentage, volume, market_cap in high_volume[:20]:
|
||||
active_data.append({'symbol': symbol, 'name': name, 'price': price, 'changesPercentage': changes_percentage, 'volume': volume, 'marketCap': market_cap})
|
||||
|
||||
|
||||
if time_period == 7:
|
||||
gainer_json['1W'] = gainer_data
|
||||
loser_json['1W'] = loser_data
|
||||
active_json['1W'] = active_data
|
||||
elif time_period == 30:
|
||||
gainer_json['1M'] = gainer_data
|
||||
loser_json['1M'] = loser_data
|
||||
active_json['1M'] = active_data
|
||||
elif time_period == 90:
|
||||
gainer_json['3M'] = gainer_data
|
||||
loser_json['3M'] = loser_data
|
||||
active_json['3M'] = active_data
|
||||
elif time_period == 180:
|
||||
gainer_json['6M'] = gainer_data
|
||||
loser_json['6M'] = loser_data
|
||||
active_json['6M'] = active_data
|
||||
|
||||
return gainer_json, loser_json, active_json
|
||||
|
||||
|
||||
def create_table(self):
|
||||
"""
|
||||
Create the 'market_movers' table if it doesn't exist and add 'gainer', 'loser', and 'most_active' columns.
|
||||
"""
|
||||
query_drop = "DROP TABLE IF EXISTS market_movers"
|
||||
self.con.execute(query_drop)
|
||||
query_create = """
|
||||
CREATE TABLE IF NOT EXISTS market_movers (
|
||||
gainer TEXT,
|
||||
loser TEXT,
|
||||
most_active TEXT
|
||||
)
|
||||
"""
|
||||
self.con.execute(query_create)
|
||||
self.con.commit()
|
||||
|
||||
|
||||
def update_database(self, gainer_json, loser_json, active_json):
|
||||
"""
|
||||
Update the 'gainer', 'loser', and 'most_active' columns in the 'market_movers' table with the provided JSON data.
|
||||
"""
|
||||
query = "INSERT INTO market_movers (gainer, loser, most_active) VALUES (?, ?, ?)"
|
||||
gainer_json_str = json.dumps(gainer_json)
|
||||
loser_json_str = json.dumps(loser_json)
|
||||
active_json_str = json.dumps(active_json)
|
||||
self.con.execute(query, (gainer_json_str, loser_json_str, active_json_str))
|
||||
self.con.commit()
|
||||
|
||||
def close_database_connection(self):
|
||||
self.con.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
analyzer = Past_Market_Movers()
|
||||
analyzer.create_table() # Create the 'market_movers' table with the 'gainer', 'loser', and 'most_active' columns
|
||||
gainer_json, loser_json, active_json = analyzer.run() # Retrieve the gainer_json, loser_json, and active_json data
|
||||
analyzer.update_database(gainer_json, loser_json, active_json) # Update the 'gainer', 'loser', and 'most_active' columns with the respective data
|
||||
analyzer.close_database_connection()
|
||||
import sqlite3
|
||||
from datetime import datetime, timedelta
|
||||
import pandas as pd
|
||||
import json
|
||||
import time
|
||||
|
||||
class Past_Market_Movers:
|
||||
def __init__(self):
|
||||
self.con = sqlite3.connect('backup_db/stocks.db')
|
||||
self.cursor = self.con.cursor()
|
||||
self.cursor.execute("PRAGMA journal_mode = wal")
|
||||
self.symbols = self.get_stock_symbols()
|
||||
|
||||
def get_stock_symbols(self):
|
||||
self.cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol != ?", ('%5EGSPC',))
|
||||
return [row[0] for row in self.cursor.fetchall()]
|
||||
|
||||
@staticmethod
|
||||
def check_if_holiday():
|
||||
holidays = {
|
||||
datetime(2023, 5, 29): 'memorial_day',
|
||||
datetime(2023, 6, 19): 'independence_day',
|
||||
datetime(2023, 6, 20): 'independence_day+1',
|
||||
datetime(2023, 9, 4): 'labor_day',
|
||||
}
|
||||
|
||||
current_datetime = datetime.today()
|
||||
for holiday_date, holiday_name in holidays.items():
|
||||
if current_datetime == holiday_date:
|
||||
return holiday_name
|
||||
return None
|
||||
|
||||
def correct_weekday_interval(self, prev_day):
|
||||
holiday = self.check_if_holiday()
|
||||
if holiday:
|
||||
if holiday == 'memorial_day':
|
||||
start_date = datetime(2023, 5, 26)
|
||||
elif holiday in ('independence_day', 'independence_day+1'):
|
||||
start_date = datetime(2023, 6, 16)
|
||||
else:
|
||||
start_date = datetime(2023, 9, 1)
|
||||
else:
|
||||
current_date = datetime.today() - timedelta(prev_day)
|
||||
current_weekday = current_date.weekday()
|
||||
if current_weekday in (5, 6): # Saturday or Sunday
|
||||
start_date = current_date - timedelta(days=current_weekday % 5 + 1)
|
||||
else:
|
||||
start_date = current_date
|
||||
return start_date.strftime("%Y-%m-%d")
|
||||
|
||||
def run(self, time_periods=[7,20,252,756,1260]):
|
||||
performance_data = []
|
||||
query_template = """
|
||||
SELECT date, close, volume FROM "{ticker}" WHERE date >= ?
|
||||
"""
|
||||
query_fundamental_template = """
|
||||
SELECT marketCap, name FROM stocks WHERE symbol = ?
|
||||
"""
|
||||
gainer_json = {}
|
||||
loser_json = {}
|
||||
active_json = {}
|
||||
|
||||
for time_period in time_periods:
|
||||
performance_data = []
|
||||
high_volume = []
|
||||
gainer_data = []
|
||||
loser_data = []
|
||||
active_data = []
|
||||
|
||||
start_date = self.correct_weekday_interval(time_period)
|
||||
for ticker in self.symbols:
|
||||
try:
|
||||
query = query_template.format(ticker=ticker)
|
||||
df = pd.read_sql_query(query, self.con, params=(start_date,))
|
||||
if not df.empty:
|
||||
fundamental_data = pd.read_sql_query(query_fundamental_template, self.con, params=(ticker,))
|
||||
avg_volume = df['volume'].mean()
|
||||
market_cap = int(fundamental_data['marketCap'].iloc[0])
|
||||
if avg_volume > 1E6 and df['close'].mean() > 1 and market_cap >=50E6:
|
||||
changes_percentage = ((df['close'].iloc[-1] - df['close'].iloc[0]) / df['close'].iloc[0]) * 100
|
||||
performance_data.append((ticker, fundamental_data['name'].iloc[0], df['close'].iloc[-1], changes_percentage, avg_volume, market_cap))
|
||||
except:
|
||||
pass
|
||||
|
||||
# Sort the stocks by percentage change in descending order
|
||||
performance_data.sort(key=lambda x: x[3], reverse=True)
|
||||
high_volume = sorted(performance_data, key=lambda x: x[4], reverse=True)
|
||||
|
||||
for symbol, name, price, changes_percentage, volume, market_cap in performance_data[:20]:
|
||||
gainer_data.append({'symbol': symbol, 'name': name, 'price': price, 'changesPercentage': changes_percentage, 'volume': volume, 'marketCap': market_cap})
|
||||
for symbol, name, price, changes_percentage, volume, market_cap in performance_data[-20:]:
|
||||
loser_data.append({'symbol': symbol, 'name': name, 'price': price, 'changesPercentage': changes_percentage, 'volume': volume, 'marketCap': market_cap})
|
||||
for symbol, name, price, changes_percentage, volume, market_cap in high_volume[:20]:
|
||||
active_data.append({'symbol': symbol, 'name': name, 'price': price, 'changesPercentage': changes_percentage, 'volume': volume, 'marketCap': market_cap})
|
||||
|
||||
|
||||
if time_period == 7:
|
||||
gainer_json['1W'] = gainer_data
|
||||
loser_json['1W'] = loser_data
|
||||
active_json['1W'] = active_data
|
||||
elif time_period == 20:
|
||||
gainer_json['1M'] = gainer_data
|
||||
loser_json['1M'] = loser_data
|
||||
active_json['1M'] = active_data
|
||||
elif time_period == 252:
|
||||
gainer_json['1Y'] = gainer_data
|
||||
loser_json['1Y'] = loser_data
|
||||
active_json['1Y'] = active_data
|
||||
elif time_period == 756:
|
||||
gainer_json['3Y'] = gainer_data
|
||||
loser_json['3Y'] = loser_data
|
||||
active_json['3Y'] = active_data
|
||||
elif time_period == 1260:
|
||||
gainer_json['5Y'] = gainer_data
|
||||
loser_json['5Y'] = loser_data
|
||||
active_json['5Y'] = active_data
|
||||
|
||||
return gainer_json, loser_json, active_json
|
||||
|
||||
|
||||
def create_table(self):
|
||||
"""
|
||||
Create the 'market_movers' table if it doesn't exist and add 'gainer', 'loser', and 'most_active' columns.
|
||||
"""
|
||||
query_drop = "DROP TABLE IF EXISTS market_movers"
|
||||
self.con.execute(query_drop)
|
||||
query_create = """
|
||||
CREATE TABLE IF NOT EXISTS market_movers (
|
||||
gainer TEXT,
|
||||
loser TEXT,
|
||||
most_active TEXT
|
||||
)
|
||||
"""
|
||||
self.con.execute(query_create)
|
||||
self.con.commit()
|
||||
|
||||
|
||||
def update_database(self, gainer_json, loser_json, active_json):
|
||||
"""
|
||||
Update the 'gainer', 'loser', and 'most_active' columns in the 'market_movers' table with the provided JSON data.
|
||||
"""
|
||||
query = "INSERT INTO market_movers (gainer, loser, most_active) VALUES (?, ?, ?)"
|
||||
gainer_json_str = json.dumps(gainer_json)
|
||||
loser_json_str = json.dumps(loser_json)
|
||||
active_json_str = json.dumps(active_json)
|
||||
self.con.execute(query, (gainer_json_str, loser_json_str, active_json_str))
|
||||
self.con.commit()
|
||||
|
||||
def close_database_connection(self):
|
||||
self.con.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
analyzer = Past_Market_Movers()
|
||||
analyzer.create_table() # Create the 'market_movers' table with the 'gainer', 'loser', and 'most_active' columns
|
||||
gainer_json, loser_json, active_json = analyzer.run() # Retrieve the gainer_json, loser_json, and active_json data
|
||||
analyzer.update_database(gainer_json, loser_json, active_json) # Update the 'gainer', 'loser', and 'most_active' columns with the respective data
|
||||
analyzer.close_database_connection()
|
||||
Loading…
x
Reference in New Issue
Block a user