add transaction api
This commit is contained in:
parent
da3f7a8b52
commit
7f31d2541f
@ -14,9 +14,13 @@ load_dotenv()
|
||||
api_key = os.getenv('BENZINGA_API_KEY')
|
||||
fin = financial_data.Benzinga(api_key)
|
||||
|
||||
def save_json(symbol, data, file_path):
|
||||
with open(f'{file_path}/{symbol}.json', 'w') as file:
|
||||
ujson.dump(data, file)
|
||||
def save_json(symbol, data, file_path,filename=None):
|
||||
if filename == None:
|
||||
with open(f'{file_path}/{symbol}.json', 'w') as file:
|
||||
ujson.dump(data, file)
|
||||
else:
|
||||
with open(f'{file_path}/{filename}.json', 'w') as file:
|
||||
ujson.dump(data, file)
|
||||
|
||||
|
||||
# Define the keys to keep
|
||||
@ -344,14 +348,30 @@ for ticker in total_symbols:
|
||||
# Group ticker_data by 'date' and collect all items for each date
|
||||
grouped_history = defaultdict(list)
|
||||
for item in ticker_data:
|
||||
filtered_item = filter_data(item)
|
||||
grouped_history[filtered_item['date']].append(filtered_item)
|
||||
try:
|
||||
filtered_item = filter_data(item)
|
||||
grouped_history[filtered_item['date']].append(filtered_item)
|
||||
# Save each date's transactions separately
|
||||
except:
|
||||
pass
|
||||
#save all single transaction from the daily date separately for faster performance of the end user. File would be too big.
|
||||
for date, data in grouped_history.items():
|
||||
try:
|
||||
# Create a filename based on ticker and date, e.g., "AAPL_2024-09-07.json"
|
||||
filename = f"{ticker}-{date}"
|
||||
|
||||
# Save the JSON to the specified folder for historical data
|
||||
save_json(ticker, data, 'json/options-historical-data/history', filename)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
daily_historical_option_data = get_historical_option_data(ticker_data, df_price)
|
||||
daily_historical_option_data = daily_historical_option_data.merge(df_price[['date', 'changesPercentage']], on='date', how='inner')
|
||||
|
||||
|
||||
# Add "history" column containing all filtered items with the same date
|
||||
daily_historical_option_data['history'] = daily_historical_option_data['date'].apply(lambda x: grouped_history.get(x, []))
|
||||
#daily_historical_option_data['history'] = daily_historical_option_data['date'].apply(lambda x: grouped_history.get(x, []))
|
||||
|
||||
if not daily_historical_option_data.empty:
|
||||
save_json(ticker, daily_historical_option_data.to_dict('records'), 'json/options-historical-data/companies')
|
||||
@ -367,7 +387,7 @@ for ticker in total_symbols:
|
||||
daily_gex = daily_gex.merge(df_price[['date', 'close']], on='date', how='inner')
|
||||
if not daily_gex.empty:
|
||||
save_json(ticker, daily_gex.to_dict('records'), 'json/options-gex/companies')
|
||||
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
pass
|
||||
|
||||
32
app/main.py
32
app/main.py
@ -292,6 +292,8 @@ class HeatMapData(BaseModel):
|
||||
class StockScreenerData(BaseModel):
|
||||
ruleOfList: List[str]
|
||||
|
||||
class TransactionId(BaseModel):
|
||||
transactionId: str
|
||||
|
||||
|
||||
# Replace NaN values with None in the resulting JSON object
|
||||
@ -2619,6 +2621,36 @@ async def get_options_chain(data:TickerData, api_key: str = Security(get_api_key
|
||||
headers={"Content-Encoding": "gzip"}
|
||||
)
|
||||
|
||||
@app.post("/options-daily-transactions")
|
||||
async def get_options_chain(data:TransactionId, api_key: str = Security(get_api_key)):
|
||||
transactionId = data.transactionId
|
||||
print(transactionId)
|
||||
cache_key = f"options-daily-transactions-{transactionId}"
|
||||
|
||||
cached_result = redis_client.get(cache_key)
|
||||
if cached_result:
|
||||
return StreamingResponse(
|
||||
io.BytesIO(cached_result),
|
||||
media_type="application/json",
|
||||
headers={"Content-Encoding": "gzip"})
|
||||
try:
|
||||
with open(f"json/options-historical-data/history/{transactionId}.json", 'rb') as file:
|
||||
res_list = orjson.loads(file.read())
|
||||
except Exception as e:
|
||||
print(e)
|
||||
res_list = []
|
||||
|
||||
data = orjson.dumps(res_list)
|
||||
compressed_data = gzip.compress(data)
|
||||
redis_client.set(cache_key, compressed_data)
|
||||
redis_client.expire(cache_key, 3600*3600) # Set cache expiration time to 5 min
|
||||
|
||||
return StreamingResponse(
|
||||
io.BytesIO(compressed_data),
|
||||
media_type="application/json",
|
||||
headers={"Content-Encoding": "gzip"}
|
||||
)
|
||||
|
||||
'''
|
||||
@app.post("/options-flow-feed")
|
||||
async def get_options_flow_feed(data: LastOptionId, api_key: str = Security(get_api_key)):
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user