update cron job
This commit is contained in:
parent
b79624d1f9
commit
77d6afd9b3
@ -12,16 +12,40 @@ from tqdm import tqdm
|
|||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import re
|
import re
|
||||||
from itertools import combinations
|
from itertools import combinations
|
||||||
|
|
||||||
|
from dotenv import load_dotenv
|
||||||
import os
|
import os
|
||||||
import gc
|
import gc
|
||||||
from utils.feature_engineering import *
|
from utils.feature_engineering import *
|
||||||
#Enable automatic garbage collection
|
#Enable automatic garbage collection
|
||||||
gc.enable()
|
gc.enable()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
api_key = os.getenv('FMP_API_KEY')
|
||||||
|
|
||||||
|
|
||||||
async def save_json(symbol, data):
|
async def save_json(symbol, data):
|
||||||
with open(f"json/ai-score/companies/{symbol}.json", 'wb') as file:
|
with open(f"json/ai-score/companies/{symbol}.json", 'wb') as file:
|
||||||
file.write(orjson.dumps(data))
|
file.write(orjson.dumps(data))
|
||||||
|
|
||||||
|
async def fetch_historical_price(ticker):
|
||||||
|
url = f"https://financialmodelingprep.com/api/v3/historical-price-full/{ticker}?from=1995-10-10&apikey={api_key}"
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
async with session.get(url) as response:
|
||||||
|
# Check if the request was successful
|
||||||
|
if response.status == 200:
|
||||||
|
data = await response.json()
|
||||||
|
# Extract historical price data
|
||||||
|
historical_data = data.get('historical', [])
|
||||||
|
# Convert to DataFrame
|
||||||
|
df = pd.DataFrame(historical_data).reset_index(drop=True)
|
||||||
|
return df
|
||||||
|
else:
|
||||||
|
raise Exception(f"Error fetching data: {response.status} {response.reason}")
|
||||||
|
|
||||||
|
|
||||||
def top_uncorrelated_features(df, target_col='Target', top_n=10, threshold=0.75):
|
def top_uncorrelated_features(df, target_col='Target', top_n=10, threshold=0.75):
|
||||||
# Drop the columns to exclude from the DataFrame
|
# Drop the columns to exclude from the DataFrame
|
||||||
df_filtered = df.drop(columns=['date','price'])
|
df_filtered = df.drop(columns=['date','price'])
|
||||||
@ -139,10 +163,7 @@ async def download_data(ticker, con, start_date, end_date):
|
|||||||
combined_data = list(combined_data.values())
|
combined_data = list(combined_data.values())
|
||||||
|
|
||||||
# Download historical stock data using yfinance
|
# Download historical stock data using yfinance
|
||||||
df = yf.download(ticker, start=start_date, end=end_date, interval="1d").reset_index()
|
df = await fetch_historical_price(ticker)
|
||||||
df = df.rename(columns={'Adj Close': 'close', 'Date': 'date', 'Open': 'open', 'High': 'high', 'Low': 'low', 'Volume': 'volume'})
|
|
||||||
df['date'] = df['date'].dt.strftime('%Y-%m-%d')
|
|
||||||
|
|
||||||
|
|
||||||
# Get the list of columns in df
|
# Get the list of columns in df
|
||||||
df_columns = df.columns
|
df_columns = df.columns
|
||||||
@ -305,7 +326,7 @@ async def warm_start_training(tickers, con):
|
|||||||
df_test = pd.DataFrame()
|
df_test = pd.DataFrame()
|
||||||
test_size = 0.2
|
test_size = 0.2
|
||||||
|
|
||||||
dfs = await chunked_gather(tickers, con, start_date, end_date, chunk_size=1)
|
dfs = await chunked_gather(tickers, con, start_date, end_date, chunk_size=10)
|
||||||
|
|
||||||
train_list = []
|
train_list = []
|
||||||
test_list = []
|
test_list = []
|
||||||
@ -386,7 +407,7 @@ async def run():
|
|||||||
else:
|
else:
|
||||||
# Fine-tuning and evaluation for all stocks
|
# Fine-tuning and evaluation for all stocks
|
||||||
cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE marketCap >= 1E9 AND symbol NOT LIKE '%.%'")
|
cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE marketCap >= 1E9 AND symbol NOT LIKE '%.%'")
|
||||||
stock_symbols = ['AWR'] #[row[0] for row in cursor.fetchall()]
|
stock_symbols = [row[0] for row in cursor.fetchall()]
|
||||||
|
|
||||||
print(f"Total tickers for fine-tuning: {len(stock_symbols)}")
|
print(f"Total tickers for fine-tuning: {len(stock_symbols)}")
|
||||||
start_date = datetime(1995, 1, 1).strftime("%Y-%m-%d")
|
start_date = datetime(1995, 1, 1).strftime("%Y-%m-%d")
|
||||||
|
|||||||
@ -558,14 +558,6 @@ def run_economy_indicator():
|
|||||||
]
|
]
|
||||||
run_command(command)
|
run_command(command)
|
||||||
|
|
||||||
def run_trend_analysis():
|
|
||||||
run_command(["python3", "cron_trend_analysis.py"])
|
|
||||||
command = [
|
|
||||||
"sudo", "rsync", "-avz", "-e", "ssh",
|
|
||||||
"/root/backend/app/json/trend-analysis",
|
|
||||||
f"root@{useast_ip_address}:/root/backend/app/json"
|
|
||||||
]
|
|
||||||
run_command(command)
|
|
||||||
|
|
||||||
def run_sentiment_analysis():
|
def run_sentiment_analysis():
|
||||||
run_command(["python3", "cron_sentiment_analysis.py"])
|
run_command(["python3", "cron_sentiment_analysis.py"])
|
||||||
@ -586,15 +578,6 @@ def run_price_analysis():
|
|||||||
run_command(command)
|
run_command(command)
|
||||||
|
|
||||||
|
|
||||||
def run_fundamental_predictor():
|
|
||||||
run_command(["python3", "cron_fundamental_predictor.py"])
|
|
||||||
command = [
|
|
||||||
"sudo", "rsync", "-avz", "-e", "ssh",
|
|
||||||
"/root/backend/app/json/fundamental-predictor-analysis",
|
|
||||||
f"root@{useast_ip_address}:/root/backend/app/json"
|
|
||||||
]
|
|
||||||
run_command(command)
|
|
||||||
|
|
||||||
# Create functions to run each schedule in a separate thread
|
# Create functions to run each schedule in a separate thread
|
||||||
def run_threaded(job_func):
|
def run_threaded(job_func):
|
||||||
job_thread = threading.Thread(target=job_func)
|
job_thread = threading.Thread(target=job_func)
|
||||||
@ -636,10 +619,8 @@ schedule.every().day.at("14:00").do(run_threaded, run_cron_sector).tag('sector_j
|
|||||||
schedule.every(2).days.at("01:00").do(run_threaded, run_market_maker).tag('markt_maker_job')
|
schedule.every(2).days.at("01:00").do(run_threaded, run_market_maker).tag('markt_maker_job')
|
||||||
schedule.every(2).days.at("08:30").do(run_threaded, run_financial_score).tag('financial_score_job')
|
schedule.every(2).days.at("08:30").do(run_threaded, run_financial_score).tag('financial_score_job')
|
||||||
schedule.every().saturday.at("05:00").do(run_threaded, run_ownership_stats).tag('ownership_stats_job')
|
schedule.every().saturday.at("05:00").do(run_threaded, run_ownership_stats).tag('ownership_stats_job')
|
||||||
schedule.every().saturday.at("06:00").do(run_threaded, run_trend_analysis).tag('trend_analysis_job')
|
schedule.every().saturday.at("06:00").do(run_threaded, run_sentiment_analysis).tag('sentiment_analysis_job')
|
||||||
schedule.every().saturday.at("08:00").do(run_threaded, run_sentiment_analysis).tag('sentiment_analysis_job')
|
|
||||||
schedule.every().saturday.at("10:00").do(run_threaded, run_price_analysis).tag('price_analysis_job')
|
schedule.every().saturday.at("10:00").do(run_threaded, run_price_analysis).tag('price_analysis_job')
|
||||||
schedule.every().saturday.at("12:00").do(run_threaded, run_fundamental_predictor).tag('fundamental_predictor_job')
|
|
||||||
|
|
||||||
|
|
||||||
schedule.every(5).minutes.do(run_threaded, run_cron_market_movers).tag('market_movers_job')
|
schedule.every(5).minutes.do(run_threaded, run_cron_market_movers).tag('market_movers_job')
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user