clean code
This commit is contained in:
parent
2e098f0763
commit
cd794ad2f1
@ -14,13 +14,13 @@ load_dotenv()
|
|||||||
api_key = os.getenv('FMP_API_KEY')
|
api_key = os.getenv('FMP_API_KEY')
|
||||||
|
|
||||||
|
|
||||||
async def fetch_and_save_symbols_data(symbols, etf_symbols, crypto_symbols, session):
|
async def fetch_and_save_symbols_data(symbols, etf_symbols, index_symbols, session):
|
||||||
tasks = []
|
tasks = []
|
||||||
for symbol in symbols:
|
for symbol in symbols:
|
||||||
if symbol in etf_symbols:
|
if symbol in etf_symbols:
|
||||||
query_con = etf_con
|
query_con = etf_con
|
||||||
elif symbol in crypto_symbols:
|
elif symbol in index_symbols:
|
||||||
query_con = crypto_con
|
query_con = index_con
|
||||||
else:
|
else:
|
||||||
query_con = con
|
query_con = con
|
||||||
|
|
||||||
@ -104,12 +104,8 @@ async def run():
|
|||||||
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
||||||
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
||||||
|
|
||||||
crypto_cursor = crypto_con.cursor()
|
index_symbols =["^SPX","^VIX"]
|
||||||
crypto_cursor.execute("PRAGMA journal_mode = wal")
|
total_symbols = stock_symbols + etf_symbols +index_symbols
|
||||||
crypto_cursor.execute("SELECT DISTINCT symbol FROM cryptos")
|
|
||||||
crypto_symbols = [row[0] for row in crypto_cursor.fetchall()]
|
|
||||||
|
|
||||||
total_symbols = stock_symbols + etf_symbols + crypto_symbols
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Failed to fetch symbols: {e}")
|
print(f"Failed to fetch symbols: {e}")
|
||||||
return
|
return
|
||||||
@ -119,7 +115,7 @@ async def run():
|
|||||||
async with aiohttp.ClientSession(connector=connector) as session:
|
async with aiohttp.ClientSession(connector=connector) as session:
|
||||||
for i in range(0, len(total_symbols), chunk_size):
|
for i in range(0, len(total_symbols), chunk_size):
|
||||||
symbols_chunk = total_symbols[i:i + chunk_size]
|
symbols_chunk = total_symbols[i:i + chunk_size]
|
||||||
await fetch_and_save_symbols_data(symbols_chunk, etf_symbols, crypto_symbols, session)
|
await fetch_and_save_symbols_data(symbols_chunk, etf_symbols, index_symbols, session)
|
||||||
print('sleeping for 30 sec')
|
print('sleeping for 30 sec')
|
||||||
await asyncio.sleep(30) # Wait for 60 seconds between chunks
|
await asyncio.sleep(30) # Wait for 60 seconds between chunks
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -128,7 +124,7 @@ async def run():
|
|||||||
try:
|
try:
|
||||||
con = sqlite3.connect('stocks.db')
|
con = sqlite3.connect('stocks.db')
|
||||||
etf_con = sqlite3.connect('etf.db')
|
etf_con = sqlite3.connect('etf.db')
|
||||||
crypto_con = sqlite3.connect('crypto.db')
|
index_con = sqlite3.connect('index.db')
|
||||||
|
|
||||||
berlin_tz = pytz.timezone('Europe/Berlin')
|
berlin_tz = pytz.timezone('Europe/Berlin')
|
||||||
end_date = datetime.now(berlin_tz)
|
end_date = datetime.now(berlin_tz)
|
||||||
@ -143,7 +139,7 @@ try:
|
|||||||
asyncio.run(run())
|
asyncio.run(run())
|
||||||
con.close()
|
con.close()
|
||||||
etf_con.close()
|
etf_con.close()
|
||||||
crypto_con.close()
|
index_con.close()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
|
|
||||||
|
|||||||
@ -1,13 +1,8 @@
|
|||||||
import requests
|
|
||||||
import orjson
|
import orjson
|
||||||
import ujson
|
|
||||||
import re
|
|
||||||
from datetime import datetime
|
|
||||||
import os
|
import os
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import time
|
import time
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
import pandas as pd
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
@ -25,6 +20,7 @@ etf_cursor.execute("PRAGMA journal_mode = wal")
|
|||||||
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
||||||
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
||||||
|
|
||||||
|
index_symbols = ["^SPX","^VIX"]
|
||||||
con.close()
|
con.close()
|
||||||
etf_con.close()
|
etf_con.close()
|
||||||
|
|
||||||
@ -145,7 +141,7 @@ def compute_realized_volatility(data, window_size=20):
|
|||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
directory_path = "json/implied-volatility"
|
directory_path = "json/implied-volatility"
|
||||||
total_symbols = stocks_symbols + etf_symbols
|
total_symbols = stocks_symbols + etf_symbols + index_symbols
|
||||||
|
|
||||||
for symbol in tqdm(total_symbols):
|
for symbol in tqdm(total_symbols):
|
||||||
try:
|
try:
|
||||||
|
|||||||
@ -106,7 +106,9 @@ async def run():
|
|||||||
con.close()
|
con.close()
|
||||||
etf_con.close()
|
etf_con.close()
|
||||||
|
|
||||||
total_symbols = stocks_symbols + etf_symbols
|
index_symbols = ['^SPX','^VIX']
|
||||||
|
|
||||||
|
total_symbols = stocks_symbols + etf_symbols + index_symbols
|
||||||
total_symbols = sorted(total_symbols, key=lambda x: '.' in x)
|
total_symbols = sorted(total_symbols, key=lambda x: '.' in x)
|
||||||
|
|
||||||
chunk_size = 1000
|
chunk_size = 1000
|
||||||
|
|||||||
@ -29,6 +29,9 @@ etf_cursor.execute("PRAGMA journal_mode = wal")
|
|||||||
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
||||||
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
||||||
|
|
||||||
|
index_symbols =["^SPX","^VIX"]
|
||||||
|
|
||||||
|
|
||||||
con.close()
|
con.close()
|
||||||
etf_con.close()
|
etf_con.close()
|
||||||
|
|
||||||
@ -225,9 +228,8 @@ def aggregate_data_by_expiration(symbol):
|
|||||||
|
|
||||||
def get_overview_data():
|
def get_overview_data():
|
||||||
directory_path = "json/gex-dex/overview"
|
directory_path = "json/gex-dex/overview"
|
||||||
total_symbols = stocks_symbols+etf_symbols
|
total_symbols = stocks_symbols+etf_symbols+index_symbols
|
||||||
|
|
||||||
counter = 0
|
|
||||||
|
|
||||||
#Test mode
|
#Test mode
|
||||||
#total_symbols = ['TSLA']
|
#total_symbols = ['TSLA']
|
||||||
|
|||||||
@ -309,7 +309,10 @@ etf_cursor.execute("PRAGMA journal_mode = wal")
|
|||||||
#etf_cursor.execute("SELECT DISTINCT symbol FROM etfs WHERE marketCap > 1E9")
|
#etf_cursor.execute("SELECT DISTINCT symbol FROM etfs WHERE marketCap > 1E9")
|
||||||
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
||||||
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
||||||
total_symbols = stocks_symbols + etf_symbols
|
|
||||||
|
index_symbols =["^SPX","^VIX"]
|
||||||
|
|
||||||
|
total_symbols = stocks_symbols + etf_symbols + index_symbols
|
||||||
|
|
||||||
|
|
||||||
for symbol in tqdm(total_symbols):
|
for symbol in tqdm(total_symbols):
|
||||||
|
|||||||
@ -16,21 +16,7 @@ today = datetime.today().date()
|
|||||||
|
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|
||||||
# Connect to the databases
|
|
||||||
con = sqlite3.connect('stocks.db')
|
|
||||||
etf_con = sqlite3.connect('etf.db')
|
|
||||||
cursor = con.cursor()
|
|
||||||
cursor.execute("PRAGMA journal_mode = wal")
|
|
||||||
cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol NOT LIKE '%.%'")
|
|
||||||
stocks_symbols = [row[0] for row in cursor.fetchall()]
|
|
||||||
|
|
||||||
etf_cursor = etf_con.cursor()
|
|
||||||
etf_cursor.execute("PRAGMA journal_mode = wal")
|
|
||||||
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
|
||||||
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
|
||||||
|
|
||||||
con.close()
|
|
||||||
etf_con.close()
|
|
||||||
|
|
||||||
def get_tickers_from_directory(directory: str):
|
def get_tickers_from_directory(directory: str):
|
||||||
try:
|
try:
|
||||||
|
|||||||
@ -1,9 +1,7 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import asyncio
|
import asyncio
|
||||||
import time
|
|
||||||
import intrinio_sdk as intrinio
|
import intrinio_sdk as intrinio
|
||||||
from intrinio_sdk.rest import ApiException
|
from intrinio_sdk.rest import ApiException
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import orjson
|
import orjson
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
import os
|
import os
|
||||||
|
|||||||
@ -22,6 +22,11 @@ directory_path = "json/all-options-contracts"
|
|||||||
current_date = datetime.now().date()
|
current_date = datetime.now().date()
|
||||||
|
|
||||||
async def save_json(data, symbol, contract_id):
|
async def save_json(data, symbol, contract_id):
|
||||||
|
if symbol in ['SPX', 'VIX']:
|
||||||
|
symbol = '^SPX'
|
||||||
|
elif symbol == 'VIX':
|
||||||
|
symbol = '^VIX'
|
||||||
|
|
||||||
directory_path = f"json/all-options-contracts/{symbol}"
|
directory_path = f"json/all-options-contracts/{symbol}"
|
||||||
os.makedirs(directory_path, exist_ok=True) # Ensure the directory exists
|
os.makedirs(directory_path, exist_ok=True) # Ensure the directory exists
|
||||||
with open(f"{directory_path}/{contract_id}.json", 'wb') as file: # Use binary mode for orjson
|
with open(f"{directory_path}/{contract_id}.json", 'wb') as file: # Use binary mode for orjson
|
||||||
@ -250,7 +255,9 @@ def get_total_symbols():
|
|||||||
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
||||||
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
||||||
|
|
||||||
return stocks_symbols + etf_symbols
|
#important: don't add ^ since intrino doesn't add it to the symbol
|
||||||
|
index_symbols =["SPX","VIX"]
|
||||||
|
return stocks_symbols + etf_symbols +index_symbols
|
||||||
|
|
||||||
|
|
||||||
def get_expiration_date(option_symbol):
|
def get_expiration_date(option_symbol):
|
||||||
|
|||||||
@ -25,7 +25,8 @@ def get_total_symbols():
|
|||||||
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
||||||
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
||||||
|
|
||||||
return stocks_symbols + etf_symbols
|
index_symbols =["^SPX","^VIX"]
|
||||||
|
return stocks_symbols + etf_symbols +index_symbols
|
||||||
|
|
||||||
|
|
||||||
def save_json(data, symbol):
|
def save_json(data, symbol):
|
||||||
|
|||||||
@ -1,18 +1,10 @@
|
|||||||
import pytz
|
import pytz
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from urllib.request import urlopen
|
from urllib.request import urlopen
|
||||||
import certifi
|
|
||||||
import json
|
import json
|
||||||
import ujson
|
|
||||||
import schedule
|
|
||||||
import time
|
|
||||||
import subprocess
|
|
||||||
from pocketbase import PocketBase # Client also works the same
|
from pocketbase import PocketBase # Client also works the same
|
||||||
import asyncio
|
import asyncio
|
||||||
import aiohttp
|
|
||||||
import pytz
|
import pytz
|
||||||
import pandas as pd
|
|
||||||
import numpy as np
|
|
||||||
import requests
|
import requests
|
||||||
import hashlib
|
import hashlib
|
||||||
import orjson
|
import orjson
|
||||||
@ -56,6 +48,8 @@ with sqlite3.connect('etf.db') as etf_con:
|
|||||||
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
||||||
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
||||||
|
|
||||||
|
index_symbols =["^SPX","^VIX"]
|
||||||
|
|
||||||
|
|
||||||
def generate_unique_id(date, text):
|
def generate_unique_id(date, text):
|
||||||
# Concatenate the title and date to form a string
|
# Concatenate the title and date to form a string
|
||||||
@ -127,13 +121,20 @@ async def push_wiim(user_id):
|
|||||||
if exist == False:
|
if exist == False:
|
||||||
#check if user is subscribed to pushSubscription to receive push notifications
|
#check if user is subscribed to pushSubscription to receive push notifications
|
||||||
|
|
||||||
|
if symbol in stocks_symbols:
|
||||||
|
asset_type = 'stock'
|
||||||
|
elif symbol in etf_symbols:
|
||||||
|
asset_type = 'etf'
|
||||||
|
else:
|
||||||
|
asset_type = 'index'
|
||||||
|
|
||||||
newNotification = {
|
newNotification = {
|
||||||
'opUser': user_id,
|
'opUser': user_id,
|
||||||
'user': '9ncz4wunmhk0k52', #stocknear bot id
|
'user': '9ncz4wunmhk0k52', #stocknear bot id
|
||||||
'notifyType': 'wiim',
|
'notifyType': 'wiim',
|
||||||
'sent': True,
|
'sent': True,
|
||||||
'pushHash': unique_id,
|
'pushHash': unique_id,
|
||||||
'liveResults': {'symbol': symbol, 'assetType': 'stocks' if symbol in stocks_symbols else 'etf'},
|
'liveResults': {'symbol': symbol, 'assetType': asset_type},
|
||||||
}
|
}
|
||||||
|
|
||||||
notify_item = pb.collection('notifications').create(newNotification)
|
notify_item = pb.collection('notifications').create(newNotification)
|
||||||
@ -191,13 +192,20 @@ async def push_earnings_release(user_id):
|
|||||||
if exist == False:
|
if exist == False:
|
||||||
#check if user is subscribed to pushSubscription to receive push notifications
|
#check if user is subscribed to pushSubscription to receive push notifications
|
||||||
|
|
||||||
|
if symbol in stocks_symbols:
|
||||||
|
asset_type = 'stock'
|
||||||
|
elif symbol in etf_symbols:
|
||||||
|
asset_type = 'etf'
|
||||||
|
else:
|
||||||
|
asset_type = 'index'
|
||||||
|
|
||||||
newNotification = {
|
newNotification = {
|
||||||
'opUser': user_id,
|
'opUser': user_id,
|
||||||
'user': '9ncz4wunmhk0k52', #stocknear bot id
|
'user': '9ncz4wunmhk0k52', #stocknear bot id
|
||||||
'notifyType': 'earningsSurprise',
|
'notifyType': 'earningsSurprise',
|
||||||
'sent': True,
|
'sent': True,
|
||||||
'pushHash': unique_id,
|
'pushHash': unique_id,
|
||||||
'liveResults': {'symbol': symbol, 'assetType': 'stocks' if symbol in stocks_symbols else 'etf'},
|
'liveResults': {'symbol': symbol, 'assetType': asset_type},
|
||||||
}
|
}
|
||||||
|
|
||||||
notify_item = pb.collection('notifications').create(newNotification)
|
notify_item = pb.collection('notifications').create(newNotification)
|
||||||
|
|||||||
@ -97,7 +97,6 @@ async def save_bid_ask_as_json(symbol, data):
|
|||||||
async def run():
|
async def run():
|
||||||
con = sqlite3.connect('stocks.db')
|
con = sqlite3.connect('stocks.db')
|
||||||
etf_con = sqlite3.connect('etf.db')
|
etf_con = sqlite3.connect('etf.db')
|
||||||
crypto_con = sqlite3.connect('crypto.db')
|
|
||||||
|
|
||||||
cursor = con.cursor()
|
cursor = con.cursor()
|
||||||
cursor.execute("PRAGMA journal_mode = wal")
|
cursor.execute("PRAGMA journal_mode = wal")
|
||||||
@ -109,14 +108,11 @@ async def run():
|
|||||||
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
||||||
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
||||||
|
|
||||||
crypto_cursor = crypto_con.cursor()
|
|
||||||
crypto_cursor.execute("PRAGMA journal_mode = wal")
|
index_symbols = ['^SPX','^VIX']
|
||||||
crypto_cursor.execute("SELECT DISTINCT symbol FROM cryptos")
|
|
||||||
crypto_symbols = [row[0] for row in crypto_cursor.fetchall()]
|
|
||||||
|
|
||||||
con.close()
|
con.close()
|
||||||
etf_con.close()
|
etf_con.close()
|
||||||
crypto_con.close()
|
|
||||||
|
|
||||||
new_york_tz = pytz.timezone('America/New_York')
|
new_york_tz = pytz.timezone('America/New_York')
|
||||||
current_time_new_york = datetime.now(new_york_tz)
|
current_time_new_york = datetime.now(new_york_tz)
|
||||||
@ -126,15 +122,16 @@ async def run():
|
|||||||
|
|
||||||
|
|
||||||
#Crypto Quotes
|
#Crypto Quotes
|
||||||
|
'''
|
||||||
latest_quote = await get_quote_of_stocks(crypto_symbols)
|
latest_quote = await get_quote_of_stocks(crypto_symbols)
|
||||||
for item in latest_quote:
|
for item in latest_quote:
|
||||||
symbol = item['symbol']
|
symbol = item['symbol']
|
||||||
|
|
||||||
await save_quote_as_json(symbol, item)
|
await save_quote_as_json(symbol, item)
|
||||||
|
'''
|
||||||
# Stock and ETF Quotes
|
# Stock and ETF Quotes
|
||||||
|
|
||||||
total_symbols = stocks_symbols+etf_symbols
|
total_symbols = stocks_symbols+etf_symbols+index_symbols
|
||||||
|
|
||||||
chunk_size = len(total_symbols) // 20 # Divide the list into N chunks
|
chunk_size = len(total_symbols) // 20 # Divide the list into N chunks
|
||||||
chunks = [total_symbols[i:i + chunk_size] for i in range(0, len(total_symbols), chunk_size)]
|
chunks = [total_symbols[i:i + chunk_size] for i in range(0, len(total_symbols), chunk_size)]
|
||||||
|
|||||||
@ -854,7 +854,7 @@ async def stock_dividend(data: TickerData, api_key: str = Security(get_api_key))
|
|||||||
@app.post("/stock-quote")
|
@app.post("/stock-quote")
|
||||||
async def stock_dividend(data: TickerData, api_key: str = Security(get_api_key)):
|
async def stock_dividend(data: TickerData, api_key: str = Security(get_api_key)):
|
||||||
ticker = data.ticker.upper()
|
ticker = data.ticker.upper()
|
||||||
|
print(ticker)
|
||||||
cache_key = f"stock-quote-{ticker}"
|
cache_key = f"stock-quote-{ticker}"
|
||||||
cached_result = redis_client.get(cache_key)
|
cached_result = redis_client.get(cache_key)
|
||||||
if cached_result:
|
if cached_result:
|
||||||
@ -866,6 +866,7 @@ async def stock_dividend(data: TickerData, api_key: str = Security(get_api_key))
|
|||||||
except:
|
except:
|
||||||
res = {}
|
res = {}
|
||||||
|
|
||||||
|
print(ticker)
|
||||||
redis_client.set(cache_key, orjson.dumps(res))
|
redis_client.set(cache_key, orjson.dumps(res))
|
||||||
redis_client.expire(cache_key, 60)
|
redis_client.expire(cache_key, 60)
|
||||||
return res
|
return res
|
||||||
@ -3130,6 +3131,7 @@ async def get_all_analysts(data:AnalystId, api_key: str = Security(get_api_key))
|
|||||||
async def get_wiim(data:TickerData, api_key: str = Security(get_api_key)):
|
async def get_wiim(data:TickerData, api_key: str = Security(get_api_key)):
|
||||||
ticker = data.ticker.upper()
|
ticker = data.ticker.upper()
|
||||||
cache_key = f"wiim-{ticker}"
|
cache_key = f"wiim-{ticker}"
|
||||||
|
print(ticker)
|
||||||
cached_result = redis_client.get(cache_key)
|
cached_result = redis_client.get(cache_key)
|
||||||
if cached_result:
|
if cached_result:
|
||||||
return StreamingResponse(
|
return StreamingResponse(
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user