add insider tracker
This commit is contained in:
parent
50b9bc72a9
commit
fe3ab4899e
90
app/cron_insider_tracker.py
Normal file
90
app/cron_insider_tracker.py
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
import ujson
|
||||||
|
import asyncio
|
||||||
|
import aiohttp
|
||||||
|
import sqlite3
|
||||||
|
from datetime import datetime
|
||||||
|
from aiofiles import open as async_open
|
||||||
|
from tqdm import tqdm
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
import os
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
api_key = os.getenv('FMP_API_KEY')
|
||||||
|
|
||||||
|
|
||||||
|
async def save_json(data):
|
||||||
|
async with async_open(f"json/tracker/insider/data.json", 'w') as file:
|
||||||
|
await file.write(ujson.dumps(data))
|
||||||
|
|
||||||
|
|
||||||
|
async def get_data(session, symbols):
|
||||||
|
res_list = []
|
||||||
|
for page in range(0, 3): # Adjust the number of pages as needed
|
||||||
|
url = f"https://financialmodelingprep.com/api/v4/insider-trading?page={page}&apikey={api_key}"
|
||||||
|
async with session.get(url) as response:
|
||||||
|
try:
|
||||||
|
if response.status == 200:
|
||||||
|
data = await response.json()
|
||||||
|
|
||||||
|
# Filter and adjust transactionType based on acquistionOrDisposition
|
||||||
|
filtered_data = [
|
||||||
|
{
|
||||||
|
"reportingName": item.get("reportingName"),
|
||||||
|
"symbol": item.get("symbol"),
|
||||||
|
"filingDate": item.get("filingDate"),
|
||||||
|
"value": round(item.get("securitiesTransacted") * item.get("price"),2),
|
||||||
|
"transactionType": "Buy" if item.get("acquistionOrDisposition") == "A"
|
||||||
|
else "Sell" if item.get("acquistionOrDisposition") == "D"
|
||||||
|
else None, # None if neither "A" nor "D"
|
||||||
|
}
|
||||||
|
for item in data
|
||||||
|
if item.get("acquistionOrDisposition") in ["A", "D"] and item.get('price') > 0 and item.get("securitiesTransacted") > 0 # Filter out if not "A" or "D"
|
||||||
|
]
|
||||||
|
|
||||||
|
res_list += filtered_data
|
||||||
|
else:
|
||||||
|
print(f"Failed to fetch data. Status code: {response.status}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error while fetching data: {e}")
|
||||||
|
break
|
||||||
|
|
||||||
|
new_data = []
|
||||||
|
for item in res_list:
|
||||||
|
try:
|
||||||
|
symbol = item['symbol']
|
||||||
|
with open(f"json/quote/{symbol}.json") as file:
|
||||||
|
stock_data = ujson.load(file)
|
||||||
|
item['marketCap'] = stock_data['marketCap']
|
||||||
|
item['price'] = round(stock_data['price'],2)
|
||||||
|
item['changesPercentage'] = round(stock_data['changesPercentage'],2)
|
||||||
|
new_data.append({**item})
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return new_data
|
||||||
|
|
||||||
|
|
||||||
|
async def run():
|
||||||
|
# Connect to SQLite
|
||||||
|
con = sqlite3.connect('stocks.db')
|
||||||
|
cursor = con.cursor()
|
||||||
|
cursor.execute("PRAGMA journal_mode = wal")
|
||||||
|
|
||||||
|
# Fetch stock symbols
|
||||||
|
cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol NOT LIKE '%.%'")
|
||||||
|
stock_symbols = [row[0] for row in cursor.fetchall()]
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
# Fetch data asynchronously using aiohttp
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
data = await get_data(session, stock_symbols)
|
||||||
|
if len(data) > 0:
|
||||||
|
print(data)
|
||||||
|
print(f"Fetched {len(data)} records.")
|
||||||
|
await save_json(data)
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
asyncio.run(run())
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error: {e}")
|
||||||
@ -13,7 +13,7 @@ load_dotenv()
|
|||||||
api_key = os.getenv('FMP_API_KEY')
|
api_key = os.getenv('FMP_API_KEY')
|
||||||
|
|
||||||
async def save_json(data):
|
async def save_json(data):
|
||||||
with open(f"json/sentiment-tracker/data.json", 'wb') as file:
|
with open(f"json/tracker/sentiment/data.json", 'wb') as file:
|
||||||
file.write(orjson.dumps(data))
|
file.write(orjson.dumps(data))
|
||||||
|
|
||||||
|
|
||||||
@ -60,6 +60,11 @@ async def get_data(session, total_symbols):
|
|||||||
|
|
||||||
# Convert the result_data dictionary to a list of items
|
# Convert the result_data dictionary to a list of items
|
||||||
final_result = list(result_data.values())
|
final_result = list(result_data.values())
|
||||||
|
final_result = sorted(final_result, key=lambda x: x['sentiment'], reverse=True)
|
||||||
|
|
||||||
|
for index, stock in enumerate(final_result, start=1):
|
||||||
|
stock['rank'] = index
|
||||||
|
|
||||||
|
|
||||||
# Save the combined result as a single JSON file
|
# Save the combined result as a single JSON file
|
||||||
if final_result:
|
if final_result:
|
||||||
|
|||||||
32
app/main.py
32
app/main.py
@ -4037,7 +4037,7 @@ async def get_fomc_impact(api_key: str = Security(get_api_key)):
|
|||||||
headers={"Content-Encoding": "gzip"}
|
headers={"Content-Encoding": "gzip"}
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
with open(f"json/sentiment-tracker/data.json", 'rb') as file:
|
with open(f"json/tracker/sentiment/data.json", 'rb') as file:
|
||||||
res = orjson.loads(file.read())
|
res = orjson.loads(file.read())
|
||||||
except:
|
except:
|
||||||
res = []
|
res = []
|
||||||
@ -4083,6 +4083,36 @@ async def get_fomc_impact(data: TickerData, api_key: str = Security(get_api_key)
|
|||||||
headers={"Content-Encoding": "gzip"}
|
headers={"Content-Encoding": "gzip"}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/insider-tracker")
|
||||||
|
async def get_insider_tracker(api_key: str = Security(get_api_key)):
|
||||||
|
cache_key = f"insider-tracker"
|
||||||
|
cached_result = redis_client.get(cache_key)
|
||||||
|
if cached_result:
|
||||||
|
return StreamingResponse(
|
||||||
|
io.BytesIO(cached_result),
|
||||||
|
media_type="application/json",
|
||||||
|
headers={"Content-Encoding": "gzip"}
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
with open(f"json/tracker/insider/data.json", 'rb') as file:
|
||||||
|
res = orjson.loads(file.read())
|
||||||
|
except:
|
||||||
|
res = []
|
||||||
|
|
||||||
|
data = orjson.dumps(res)
|
||||||
|
compressed_data = gzip.compress(data)
|
||||||
|
|
||||||
|
redis_client.set(cache_key, compressed_data)
|
||||||
|
redis_client.expire(cache_key,3600*3600)
|
||||||
|
|
||||||
|
return StreamingResponse(
|
||||||
|
io.BytesIO(compressed_data),
|
||||||
|
media_type="application/json",
|
||||||
|
headers={"Content-Encoding": "gzip"}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/newsletter")
|
@app.get("/newsletter")
|
||||||
async def get_newsletter():
|
async def get_newsletter():
|
||||||
try:
|
try:
|
||||||
|
|||||||
@ -79,7 +79,12 @@ def get_data(ticker):
|
|||||||
|
|
||||||
|
|
||||||
ticker_data = get_data(ticker)
|
ticker_data = get_data(ticker)
|
||||||
ticker_data = [item for item in ticker_data if datetime.strptime(item['date_expiration'], '%Y-%m-%d') >= datetime.now()]
|
ticker_data = [
|
||||||
|
item for item in ticker_data
|
||||||
|
if datetime.strptime(item['date_expiration'], '%Y-%m-%d') >= datetime.now() and
|
||||||
|
datetime.strptime(item['date_expiration'], '%Y-%m-%d') <= datetime.now() + timedelta(days=5)
|
||||||
|
]
|
||||||
|
|
||||||
print(len(ticker_data))
|
print(len(ticker_data))
|
||||||
|
|
||||||
def calculate_option_greeks(S, K, T, r, sigma, option_type='CALL'):
|
def calculate_option_greeks(S, K, T, r, sigma, option_type='CALL'):
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user