update cron job

This commit is contained in:
MuslemRahimi 2024-07-26 21:45:48 +02:00
parent c4889bc081
commit 266016e46d
2 changed files with 77 additions and 10 deletions

View File

@ -2,27 +2,56 @@ from datetime import datetime, timedelta
import ujson import ujson
import time import time
import sqlite3 import sqlite3
import pandas as pd
import numpy as np
from collections import defaultdict
import time import time
import asyncio import asyncio
import aiohttp import aiohttp
from faker import Faker import random
from tqdm import tqdm from tqdm import tqdm
from dotenv import load_dotenv from dotenv import load_dotenv
import os import os
load_dotenv() load_dotenv()
api_key = os.getenv('FMP_API_KEY') api_key = os.getenv('FMP_API_KEY')
async def get_data(session, symbol): include_current_quarter = False
url = f"https://financialmodelingprep.com/api/v4/institutional-ownership/symbol-ownership?symbol={symbol}&includeCurrentQuarter=true&apikey={api_key}"
async def get_data(session, symbol, max_retries=3, initial_delay=1):
url = f"https://financialmodelingprep.com/api/v4/institutional-ownership/symbol-ownership?symbol={symbol}&includeCurrentQuarter={include_current_quarter}&apikey={api_key}"
for attempt in range(max_retries):
try:
async with session.get(url) as response: async with session.get(url) as response:
if response.status == 200:
content_type = response.headers.get('Content-Type', '')
if 'application/json' in content_type:
data = await response.json() data = await response.json()
if len(data) > 0: if len(data) > 0:
await save_json(symbol, data[0]) #return only the latest ownership stats await save_json(symbol, data[0])
return
else:
print(f"Unexpected content type for {symbol}: {content_type}")
elif response.status == 504:
if attempt < max_retries - 1:
delay = initial_delay * (2 ** attempt) + random.uniform(0, 1)
print(f"Gateway Timeout for {symbol}. Retrying in {delay:.2f} seconds...")
await asyncio.sleep(delay)
else:
print(f"Max retries reached for {symbol} after Gateway Timeout")
else:
print(f"Error fetching data for {symbol}: HTTP {response.status}")
return
except Exception as e:
print(f"Error processing {symbol}: {str(e)}")
if attempt < max_retries - 1:
delay = initial_delay * (2 ** attempt) + random.uniform(0, 1)
print(f"Retrying in {delay:.2f} seconds...")
await asyncio.sleep(delay)
else:
print(f"Max retries reached for {symbol}")
async def save_json(symbol, data): async def save_json(symbol, data):

View File

@ -2569,6 +2569,44 @@ async def get_options_flow_ticker(data:TickerData, api_key: str = Security(get_a
headers={"Content-Encoding": "gzip"} headers={"Content-Encoding": "gzip"}
) )
'''
@app.post("/options-flow-feed")
async def get_options_flow_feed(data: LastOptionId, api_key: str = Security(get_api_key)):
last_option_id = data.lastId
try:
with open(f"json/options-flow/feed/data.json", 'rb') as file:
all_data = orjson.loads(file.read())
if len(last_option_id) == 0:
res_list = all_data[0:100]
else:
# Find the index of the element with the last known ID
start_index = next((i for i, item in enumerate(all_data) if item["id"] == last_option_id), -1)
if start_index == -1:
raise ValueError("Last known ID not found in data")
# Get the next 100 elements
res_list = all_data[start_index + 1:start_index + 101]
# Compress the data
compressed_data = gzip.compress(orjson.dumps(res_list))
return StreamingResponse(
io.BytesIO(compressed_data),
media_type="application/json",
headers={"Content-Encoding": "gzip"}
)
except Exception as e:
# Log the error for debugging
print(f"Error: {str(e)}")
return StreamingResponse(
io.BytesIO(gzip.compress(orjson.dumps([]))),
media_type="application/json",
headers={"Content-Encoding": "gzip"}
)
'''
@app.get("/options-flow-feed") @app.get("/options-flow-feed")
async def get_options_flow_feed(api_key: str = Security(get_api_key)): async def get_options_flow_feed(api_key: str = Security(get_api_key)):
try: try: