update job
This commit is contained in:
parent
d48f92f786
commit
8b7b481535
@ -5,96 +5,104 @@ import pandas as pd
|
|||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import yfinance as yf
|
import yfinance as yf
|
||||||
|
import time
|
||||||
|
|
||||||
# Constants
|
|
||||||
JSON_DIR = "json/"
|
|
||||||
QUARTERLY_FREQ = 'QE'
|
|
||||||
|
|
||||||
# SQL Query
|
async def save_as_json(symbol, forward_pe_dict, short_dict):
|
||||||
QUERY_TEMPLATE = """
|
with open(f"json/share-statistics/{symbol}.json", 'w') as file:
|
||||||
SELECT historicalShares
|
ujson.dump(short_dict, file)
|
||||||
FROM stocks
|
with open(f"json/forward-pe/{symbol}.json", 'w') as file:
|
||||||
WHERE symbol = ?
|
ujson.dump(forward_pe_dict, file)
|
||||||
|
|
||||||
|
|
||||||
|
query_template = f"""
|
||||||
|
SELECT
|
||||||
|
historicalShares
|
||||||
|
FROM
|
||||||
|
stocks
|
||||||
|
WHERE
|
||||||
|
symbol = ?
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def filter_quarterly_data(data):
|
def filter_data_quarterly(data):
|
||||||
"""Filter data to keep only quarter-end dates."""
|
# Generate a range of quarter-end dates from the start to the end date
|
||||||
quarter_ends = pd.date_range(start=data[0]['date'], end=datetime.now(), freq=QUARTERLY_FREQ).strftime('%Y-%m-%d').tolist()
|
start_date = data[0]['date']
|
||||||
return [entry for entry in data if entry['date'] in quarter_ends]
|
end_date = datetime.today().strftime('%Y-%m-%d')
|
||||||
|
quarter_ends = pd.date_range(start=start_date, end=end_date, freq='QE').strftime('%Y-%m-%d').tolist()
|
||||||
|
|
||||||
def get_yahoo_finance_data(ticker, shares):
|
# Filter data to keep only entries with dates matching quarter-end dates
|
||||||
"""Fetch and process Yahoo Finance data."""
|
filtered_data = [entry for entry in data if entry['date'] in quarter_ends]
|
||||||
|
|
||||||
|
return filtered_data
|
||||||
|
|
||||||
|
def get_yahoo_data(ticker, outstanding_shares, float_shares):
|
||||||
try:
|
try:
|
||||||
info = yf.Ticker(ticker).info
|
data_dict = yf.Ticker(ticker).info
|
||||||
return {
|
forward_pe = round(data_dict['forwardPE'],2)
|
||||||
'forwardPE': round(info.get('forwardPE', 0), 2),
|
short_outstanding_percent = round((data_dict['sharesShort']/outstanding_shares)*100,2)
|
||||||
'short': {
|
short_float_percent = round((data_dict['sharesShort']/float_shares)*100,2)
|
||||||
'shares': info.get('sharesShort', 0),
|
return {'forwardPE': forward_pe}, {'sharesShort': data_dict['sharesShort'], 'shortRatio': data_dict['shortRatio'], 'sharesShortPriorMonth': data_dict['sharesShortPriorMonth'], 'shortOutStandingPercent': short_outstanding_percent, 'shortFloatPercent': short_float_percent}
|
||||||
'ratio': info.get('shortRatio', 0),
|
except:
|
||||||
'priorMonth': info.get('sharesShortPriorMonth', 0),
|
return {'forwardPE': 0}, {'sharesShort': 0, 'shortRatio': 0, 'sharesShortPriorMonth': 0, 'shortOutStandingPercent': 0, 'shortFloatPercent': 0}
|
||||||
'outstandingPercent': round((info.get('sharesShort', 0) / shares['outstandingShares']) * 100, 2),
|
|
||||||
'floatPercent': round((info.get('sharesShort', 0) / shares['floatShares']) * 100, 2)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
#print(ticker)
|
|
||||||
#print(e)
|
|
||||||
#print("============")
|
|
||||||
return {'forwardPE': 0, 'short': {k: 0 for k in ['shares', 'ratio', 'priorMonth', 'outstandingPercent', 'floatPercent']}}
|
|
||||||
|
|
||||||
async def save_json(symbol, data):
|
|
||||||
"""Save data to JSON files."""
|
|
||||||
for key, path in [("forwardPE", f"{JSON_DIR}forward-pe/{symbol}.json"), ("short", f"{JSON_DIR}share-statistics/{symbol}.json")]:
|
|
||||||
with open(path, 'w') as file:
|
|
||||||
ujson.dump(data.get(key, {}), file)
|
|
||||||
|
|
||||||
async def process_ticker(ticker, con):
|
async def get_data(ticker, con):
|
||||||
"""Process a single ticker."""
|
|
||||||
try:
|
try:
|
||||||
df = pd.read_sql_query(QUERY_TEMPLATE, con, params=(ticker,))
|
df = pd.read_sql_query(query_template, con, params=(ticker,))
|
||||||
stats = ujson.loads(df.to_dict()['historicalShares'][0])
|
shareholder_statistics = ujson.loads(df.to_dict()['historicalShares'][0])
|
||||||
|
# Keys to keep
|
||||||
# Filter and convert data
|
keys_to_keep = ["date","floatShares", "outstandingShares"]
|
||||||
filtered_stats = [
|
|
||||||
{k: int(v) if k in ["floatShares", "outstandingShares"] else v
|
# Create new list with only the specified keys and convert floatShares and outstandingShares to integers
|
||||||
for k, v in d.items() if k in ["date", "floatShares", "outstandingShares"]}
|
shareholder_statistics = [
|
||||||
for d in sorted(stats, key=lambda x: datetime.strptime(x['date'], '%Y-%m-%d'))
|
{key: int(d[key]) if key in ["floatShares", "outstandingShares"] else d[key]
|
||||||
|
for key in keys_to_keep}
|
||||||
|
for d in shareholder_statistics
|
||||||
]
|
]
|
||||||
|
|
||||||
|
shareholder_statistics = sorted(shareholder_statistics, key=lambda x: datetime.strptime(x['date'], '%Y-%m-%d'), reverse=False)
|
||||||
|
|
||||||
latest_shares = filtered_stats[-1]
|
latest_outstanding_shares = shareholder_statistics[-1]['outstandingShares']
|
||||||
|
latest_float_shares = shareholder_statistics[-1]['floatShares']
|
||||||
quarterly_stats = filter_quarterly_data(filtered_stats)
|
|
||||||
|
# Filter out only quarter-end dates
|
||||||
data = get_yahoo_finance_data(ticker, latest_shares)
|
historical_shares = filter_data_quarterly(shareholder_statistics)
|
||||||
data['short'].update({
|
|
||||||
'latestOutstandingShares': latest_shares['outstandingShares'],
|
forward_pe_data, short_data = get_yahoo_data(ticker, latest_outstanding_shares, latest_float_shares)
|
||||||
'latestFloatShares': latest_shares['floatShares'],
|
short_data = {**short_data, 'latestOutstandingShares': latest_outstanding_shares, 'latestFloatShares': latest_float_shares,'historicalShares': historical_shares}
|
||||||
'historicalShares': quarterly_stats
|
|
||||||
})
|
|
||||||
|
|
||||||
await save_json(ticker, data)
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error processing {ticker}: {e}")
|
print(e)
|
||||||
return False
|
short_data = {}
|
||||||
|
forward_pe_data = {}
|
||||||
|
|
||||||
|
return forward_pe_data, short_data
|
||||||
|
|
||||||
|
|
||||||
async def run():
|
async def run():
|
||||||
"""Main function to process all tickers."""
|
|
||||||
con = sqlite3.connect('stocks.db')
|
|
||||||
con.execute("PRAGMA journal_mode = wal")
|
|
||||||
|
|
||||||
with con:
|
|
||||||
stock_symbols = [row[0] for row in con.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol NOT LIKE '%.%'")]
|
|
||||||
|
|
||||||
processed = 0
|
con = sqlite3.connect('stocks.db')
|
||||||
|
|
||||||
|
cursor = con.cursor()
|
||||||
|
cursor.execute("PRAGMA journal_mode = wal")
|
||||||
|
cursor.execute("SELECT DISTINCT symbol FROM stocks")
|
||||||
|
stock_symbols = [row[0] for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
counter = 0
|
||||||
|
|
||||||
for ticker in tqdm(stock_symbols):
|
for ticker in tqdm(stock_symbols):
|
||||||
if await process_ticker(ticker, con):
|
forward_pe_dict, short_dict = await get_data(ticker, con)
|
||||||
processed += 1
|
if forward_pe_dict.keys() and short_dict.keys():
|
||||||
if processed % 50 == 0:
|
await save_as_json(ticker, forward_pe_dict, short_dict)
|
||||||
print(f"Processed {processed} tickers, waiting for 60 seconds...")
|
|
||||||
await asyncio.sleep(60)
|
counter += 1
|
||||||
|
if counter % 100 == 0:
|
||||||
|
print(f"Processed {counter} tickers, waiting for 10 seconds...")
|
||||||
|
await asyncio.sleep(30)
|
||||||
|
|
||||||
con.close()
|
con.close()
|
||||||
|
|
||||||
if __name__ == "__main__":
|
try:
|
||||||
asyncio.run(run())
|
asyncio.run(run())
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
Loading…
x
Reference in New Issue
Block a user