update insider cron job

This commit is contained in:
MuslemRahimi 2024-10-27 10:09:26 +01:00
parent 3d02d2ba12
commit 3c29c992e5

View File

@ -3,10 +3,13 @@ import asyncio
import aiohttp import aiohttp
import sqlite3 import sqlite3
from datetime import datetime from datetime import datetime
from itertools import groupby
from operator import itemgetter
from aiofiles import open as async_open from aiofiles import open as async_open
from tqdm import tqdm from tqdm import tqdm
from dotenv import load_dotenv from dotenv import load_dotenv
import os import os
import time
load_dotenv() load_dotenv()
api_key = os.getenv('FMP_API_KEY') api_key = os.getenv('FMP_API_KEY')
@ -17,9 +20,81 @@ async def save_json(data):
await file.write(ujson.dumps(data)) await file.write(ujson.dumps(data))
def format_name(name):
"""
Formats a name from "LASTNAME MIDDLE FIRSTNAME" format to "Firstname Middle Lastname"
Args:
name (str): Name in uppercase format (e.g., "SINGLETON J MATTHEW")
Returns:
str: Formatted name (e.g., "Matthew J Singleton")
"""
# Split the name into parts
parts = name.strip().split()
# Handle empty string or single word
if not parts:
return ""
if len(parts) == 1:
return parts[0].capitalize()
# The first part is the last name
lastname = parts[0].capitalize()
# The remaining parts are in reverse order
other_parts = parts[1:]
other_parts.reverse()
# Capitalize each part
other_parts = [part.capitalize() for part in other_parts]
# Join all parts
return " ".join(other_parts + [lastname])
def aggregate_transactions(transactions, min_value=100_000):
# Sort transactions by the keys we want to group by
sorted_transactions = sorted(
transactions,
key=lambda x: (x['reportingName'], x['symbol'], x['transactionType'])
)
# Group by reportingName, symbol, and transactionType
result = []
for key, group in groupby(
sorted_transactions,
key=lambda x: (x['reportingName'], x['symbol'], x['transactionType'])
):
group_list = list(group)
# Calculate average value
avg_value = sum(t['value'] for t in group_list) / len(group_list)
# Only include transactions with average value >= min_value
if avg_value >= min_value:
# Find latest filing date
latest_date = max(
datetime.strptime(t['filingDate'], '%Y-%m-%d %H:%M:%S')
for t in group_list
).strftime('%Y-%m-%d %H:%M:%S')
# Create aggregated transaction with formatted name
result.append({
'reportingName': format_name(key[0]),
'symbol': key[1],
'transactionType': key[2],
'filingDate': latest_date,
'avgValue': avg_value
})
# Sort the final result by filingDate
return sorted(result, key=lambda x: x['filingDate'], reverse=True)
async def get_data(session, symbols): async def get_data(session, symbols):
res_list = [] res_list = []
for page in range(0, 3): # Adjust the number of pages as needed for page in range(0, 20): # Adjust the number of pages as needed
url = f"https://financialmodelingprep.com/api/v4/insider-trading?page={page}&apikey={api_key}" url = f"https://financialmodelingprep.com/api/v4/insider-trading?page={page}&apikey={api_key}"
async with session.get(url) as response: async with session.get(url) as response:
try: try:
@ -48,6 +123,9 @@ async def get_data(session, symbols):
print(f"Error while fetching data: {e}") print(f"Error while fetching data: {e}")
break break
res_list = aggregate_transactions(res_list)
new_data = [] new_data = []
for item in res_list: for item in res_list:
try: try:
@ -79,7 +157,6 @@ async def run():
async with aiohttp.ClientSession() as session: async with aiohttp.ClientSession() as session:
data = await get_data(session, stock_symbols) data = await get_data(session, stock_symbols)
if len(data) > 0: if len(data) > 0:
print(data)
print(f"Fetched {len(data)} records.") print(f"Fetched {len(data)} records.")
await save_json(data) await save_json(data)