add revenue cron job && bugfixing options stats
This commit is contained in:
parent
57d295910f
commit
32a7793b33
94
app/cron_revenue.py
Normal file
94
app/cron_revenue.py
Normal file
@ -0,0 +1,94 @@
|
||||
from datetime import datetime, timedelta
|
||||
import orjson
|
||||
import ujson
|
||||
import time
|
||||
import sqlite3
|
||||
import asyncio
|
||||
import aiohttp
|
||||
import random
|
||||
from tqdm import tqdm
|
||||
import os
|
||||
|
||||
|
||||
current_year = datetime.now().year
|
||||
cutoff_year = current_year - 5
|
||||
|
||||
# Load stock screener data
|
||||
with open(f"json/stock-screener/data.json", 'rb') as file:
|
||||
stock_screener_data = orjson.loads(file.read())
|
||||
stock_screener_data_dict = {item['symbol']: item for item in stock_screener_data}
|
||||
|
||||
async def save_json(symbol, data):
|
||||
path = f"json/revenue/companies/{symbol}.json"
|
||||
directory = os.path.dirname(path)
|
||||
|
||||
# Ensure the directory exists
|
||||
os.makedirs(directory, exist_ok=True)
|
||||
|
||||
# Write the JSON data
|
||||
with open(path, 'w') as file:
|
||||
ujson.dump(data, file)
|
||||
|
||||
|
||||
async def get_statistics(symbol):
|
||||
"""Extract specified columns data for a given symbol."""
|
||||
columns = ['revenue','growthRevenue','priceToSalesRatio','revenuePerEmployee','employees']
|
||||
|
||||
if symbol in stock_screener_data_dict:
|
||||
result = {}
|
||||
for column in columns:
|
||||
try:
|
||||
result[column] = stock_screener_data_dict[symbol].get(column, None)
|
||||
except:
|
||||
pass
|
||||
return result
|
||||
return {}
|
||||
|
||||
|
||||
|
||||
async def get_data(symbol):
|
||||
with open(f"json/financial-statements/income-statement/annual/{symbol}.json", "r") as file:
|
||||
annual_data = orjson.loads(file.read())
|
||||
|
||||
with open(f"json/financial-statements/income-statement/quarter/{symbol}.json", "r") as file:
|
||||
quarter_data = orjson.loads(file.read())
|
||||
|
||||
|
||||
# Filter the data for the last 5 years
|
||||
annual_data = [
|
||||
{"date": item["date"], "revenue": item["revenue"]}
|
||||
for item in annual_data
|
||||
if int(item["date"][:4]) >= cutoff_year # Extract year from date and filter
|
||||
]
|
||||
|
||||
# Filter the data for the last 5 years
|
||||
quarter_data = [
|
||||
{"date": item["date"], "revenue": item["revenue"]}
|
||||
for item in quarter_data
|
||||
if int(item["date"][:4]) >= cutoff_year # Extract year from date and filter
|
||||
]
|
||||
|
||||
stats = await get_statistics(symbol)
|
||||
|
||||
res_dict = {**stats, 'annual': annual_data, 'quarter': quarter_data}
|
||||
|
||||
if annual_data and quarter_data:
|
||||
await save_json(symbol, res_dict)
|
||||
|
||||
async def run():
|
||||
con = sqlite3.connect('stocks.db')
|
||||
cursor = con.cursor()
|
||||
cursor.execute("PRAGMA journal_mode = wal")
|
||||
cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol NOT LIKE '%.%'")
|
||||
total_symbols = [row[0] for row in cursor.fetchall()]
|
||||
con.close()
|
||||
total_symbols = ['PLTR']
|
||||
for symbol in tqdm(total_symbols):
|
||||
try:
|
||||
await get_data(symbol)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
if __name__ == "__main__":
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(run())
|
||||
@ -26,7 +26,7 @@ query_template = """
|
||||
|
||||
async def get_data(symbol):
|
||||
"""Extract specified columns data for a given symbol."""
|
||||
columns = ['dividendYield', 'employees', 'marketCap','relativeFTD','name']
|
||||
columns = ['dividendYield', 'employees', 'marketCap','relativeFTD','name','revenue']
|
||||
|
||||
if symbol in stock_screener_data_dict:
|
||||
result = {}
|
||||
|
||||
30
app/main.py
30
app/main.py
@ -3553,6 +3553,36 @@ async def get_historical_market_cap(data:TickerData, api_key: str = Security(get
|
||||
headers={"Content-Encoding": "gzip"}
|
||||
)
|
||||
|
||||
@app.post("/historical-revenue")
|
||||
async def get_data(data:TickerData, api_key: str = Security(get_api_key)):
|
||||
ticker = data.ticker.upper()
|
||||
cache_key = f"historical-revenue-{ticker}"
|
||||
cached_result = redis_client.get(cache_key)
|
||||
if cached_result:
|
||||
return StreamingResponse(
|
||||
io.BytesIO(cached_result),
|
||||
media_type="application/json",
|
||||
headers={"Content-Encoding": "gzip"}
|
||||
)
|
||||
try:
|
||||
with open(f"json/revenue/companies/{ticker}.json", 'rb') as file:
|
||||
res = orjson.loads(file.read())
|
||||
except:
|
||||
res = {}
|
||||
|
||||
data = orjson.dumps(res)
|
||||
compressed_data = gzip.compress(data)
|
||||
|
||||
redis_client.set(cache_key, compressed_data)
|
||||
redis_client.expire(cache_key,3600*3600)
|
||||
|
||||
return StreamingResponse(
|
||||
io.BytesIO(compressed_data),
|
||||
media_type="application/json",
|
||||
headers={"Content-Encoding": "gzip"}
|
||||
)
|
||||
|
||||
|
||||
@app.get("/economic-indicator")
|
||||
async def get_economic_indicator(api_key: str = Security(get_api_key)):
|
||||
cache_key = f"economic-indicator"
|
||||
|
||||
@ -72,7 +72,7 @@ def run_market_flow():
|
||||
current_time = now.time()
|
||||
hour = now.hour
|
||||
if week <= 4 and 8 <= hour < 17:
|
||||
run_command(["python3", "cron_option_stats.py"])
|
||||
run_command(["python3", "cron_options_stats.py"])
|
||||
run_command(["python3", "cron_market_flow.py"])
|
||||
run_command(["python3", "cron_unusual_activity.py"])
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user