diff --git a/app/cron_cramer_tracker.py b/app/cron_cramer_tracker.py new file mode 100644 index 0000000..68bd4ce --- /dev/null +++ b/app/cron_cramer_tracker.py @@ -0,0 +1,82 @@ +import os +import pandas as pd +import ujson +from selenium import webdriver +from selenium.webdriver.common.by import By +from selenium.webdriver.chrome.service import Service +from selenium.webdriver.chrome.options import Options +from webdriver_manager.chrome import ChromeDriverManager +from dotenv import load_dotenv +import sqlite3 + +def save_json(data, file_path): + with open(file_path, 'w') as file: + ujson.dump(data, file) + + +query_template = """ + SELECT + name, sector + FROM + stocks + WHERE + symbol = ? +""" + + +def main(): + # Load environment variables + con = sqlite3.connect('stocks.db') + load_dotenv() + url = os.getenv('CRAMER_WEBSITE') + + # Set up the WebDriver options + options = Options() + options.headless = True # Run in headless mode + + # Initialize the WebDriver + driver = webdriver.Chrome(service=Service(ChromeDriverManager().install()), options=options) + + try: + # Fetch the website + driver.get(url) + + # Find the table element + table = driver.find_element(By.TAG_NAME, 'table') + + # Extract the table HTML + table_html = table.get_attribute('outerHTML') + + # Use pandas to read the HTML table + df = pd.read_html(table_html)[0] + + # Rename the columns + df = df.rename(columns={ + 'Ticker': 'ticker', + 'Direction': 'sentiment', + 'Date': 'date', + 'Return Since': 'returnSince' + }) + + # Convert the DataFrame to JSON + data = ujson.loads(df.to_json(orient='records')) + + res = [] + for item in data: + symbol = item['ticker'] + try: + db_data = pd.read_sql_query(query_template, con, params=(symbol,)) + res.append({**item, 'name': db_data['name'].iloc[0], 'sector': db_data['sector'].iloc[0]}) + except Exception as e: + pass + + # Save the JSON data + save_json(res, 'json/cramer-tracker/data.json') + + finally: + # Ensure the WebDriver is closed + driver.quit() + con.close() + +if __name__ == '__main__': + main() diff --git a/app/main.py b/app/main.py index cb6481e..f272a73 100755 --- a/app/main.py +++ b/app/main.py @@ -3181,7 +3181,7 @@ async def get_clinical_trial(data:TickerData, api_key: str = Security(get_api_ke ) @app.post("/swap-ticker") -async def get_clinical_trial(data:TickerData, api_key: str = Security(get_api_key)): +async def get_swap_data(data:TickerData, api_key: str = Security(get_api_key)): ticker = data.ticker.upper() cache_key = f"swap-{ticker}" cached_result = redis_client.get(cache_key) @@ -3204,6 +3204,35 @@ async def get_clinical_trial(data:TickerData, api_key: str = Security(get_api_ke redis_client.set(cache_key, compressed_data) redis_client.expire(cache_key, 3600*3600) + return StreamingResponse( + io.BytesIO(compressed_data), + media_type="application/json", + headers={"Content-Encoding": "gzip"} + ) + +@app.get("/cramer-tracker") +async def get_cramer_tracker(api_key: str = Security(get_api_key)): + cache_key = f"cramer-tracker" + cached_result = redis_client.get(cache_key) + if cached_result: + return StreamingResponse( + io.BytesIO(cached_result), + media_type="application/json", + headers={"Content-Encoding": "gzip"} + ) + + try: + with open(f"json/cramer-tracker/data.json", 'rb') as file: + res = orjson.loads(file.read()) + except: + res = [] + + data = orjson.dumps(res) + compressed_data = gzip.compress(data) + + redis_client.set(cache_key, compressed_data) + redis_client.expire(cache_key, 3600*3600) + return StreamingResponse( io.BytesIO(compressed_data), media_type="application/json",