update reddit statistics
This commit is contained in:
parent
606d25294c
commit
f44edfd594
@ -2,18 +2,34 @@ import json
|
|||||||
import re
|
import re
|
||||||
import requests
|
import requests
|
||||||
import praw
|
import praw
|
||||||
from datetime import datetime
|
from datetime import datetime, timedelta
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
con = sqlite3.connect('stocks.db')
|
||||||
|
|
||||||
|
cursor = con.cursor()
|
||||||
|
cursor.execute("PRAGMA journal_mode = wal")
|
||||||
|
cursor.execute("SELECT DISTINCT symbol FROM stocks")
|
||||||
|
stock_symbols = [row[0] for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
etf_con = sqlite3.connect('etf.db')
|
||||||
|
etf_cursor = etf_con.cursor()
|
||||||
|
etf_cursor.execute("PRAGMA journal_mode = wal")
|
||||||
|
etf_cursor.execute("SELECT DISTINCT symbol FROM etfs")
|
||||||
|
etf_symbols = [row[0] for row in etf_cursor.fetchall()]
|
||||||
|
|
||||||
|
con.close()
|
||||||
|
etf_con.close()
|
||||||
|
|
||||||
|
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
client_key = os.getenv('REDDIT_API_KEY')
|
client_key = os.getenv('REDDIT_API_KEY')
|
||||||
client_secret = os.getenv('REDDIT_API_SECRET')
|
client_secret = os.getenv('REDDIT_API_SECRET')
|
||||||
user_agent = os.getenv('REDDIT_USER_AGENT')
|
user_agent = os.getenv('REDDIT_USER_AGENT')
|
||||||
|
|
||||||
|
|
||||||
# Initialize Reddit instance
|
# Initialize Reddit instance
|
||||||
reddit = praw.Reddit(
|
reddit = praw.Reddit(
|
||||||
client_id=client_key,
|
client_id=client_key,
|
||||||
@ -22,11 +38,10 @@ reddit = praw.Reddit(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Function to save data
|
# Function to save data
|
||||||
def save_data(data):
|
def save_data(data, filename):
|
||||||
with open('json/reddit-tracker/wallstreetbets/stats.json', 'w', encoding='utf-8') as f:
|
with open(f'json/reddit-tracker/wallstreetbets/{filename}', 'w', encoding='utf-8') as f:
|
||||||
json.dump(data, f, ensure_ascii=False, indent=2)
|
json.dump(data, f, ensure_ascii=False, indent=2)
|
||||||
|
|
||||||
|
|
||||||
def compute_daily_statistics(file_path):
|
def compute_daily_statistics(file_path):
|
||||||
# Load the data from the JSON file
|
# Load the data from the JSON file
|
||||||
with open(file_path, 'r', encoding='utf-8') as f:
|
with open(file_path, 'r', encoding='utf-8') as f:
|
||||||
@ -72,9 +87,39 @@ def compute_daily_statistics(file_path):
|
|||||||
'tickerMentions': dict(stats['ticker_mentions']) # Optional: include detailed ticker mentions
|
'tickerMentions': dict(stats['ticker_mentions']) # Optional: include detailed ticker mentions
|
||||||
})
|
})
|
||||||
|
|
||||||
return formatted_stats
|
return formatted_stats, daily_stats
|
||||||
|
|
||||||
|
# Function to compute trending tickers
|
||||||
|
def compute_trending_tickers(daily_stats):
|
||||||
|
today = datetime.now().date()
|
||||||
|
seven_days_ago = today - timedelta(days=14)
|
||||||
|
|
||||||
|
trending = defaultdict(int)
|
||||||
|
|
||||||
|
for date, stats in daily_stats.items():
|
||||||
|
if seven_days_ago <= date <= today:
|
||||||
|
for ticker, count in stats['ticker_mentions'].items():
|
||||||
|
trending[ticker] += count
|
||||||
|
|
||||||
|
trending_list = [{'symbol': symbol, 'count': count} for symbol, count in trending.items()]
|
||||||
|
trending_list.sort(key=lambda x: x['count'], reverse=True)
|
||||||
|
|
||||||
|
for item in trending_list:
|
||||||
|
symbol = item['symbol']
|
||||||
|
if symbol in stock_symbols:
|
||||||
|
item['assetType'] = 'stocks'
|
||||||
|
elif symbol in etf_symbols:
|
||||||
|
item['assetType'] = 'etf'
|
||||||
|
else:
|
||||||
|
item['assetType'] = ''
|
||||||
|
|
||||||
|
return trending_list
|
||||||
|
|
||||||
# Usage
|
# Usage
|
||||||
file_path = 'json/reddit-tracker/wallstreetbets/data.json'
|
file_path = 'json/reddit-tracker/wallstreetbets/data.json'
|
||||||
daily_statistics = compute_daily_statistics(file_path)
|
daily_statistics, daily_stats_dict = compute_daily_statistics(file_path)
|
||||||
save_data(daily_statistics)
|
save_data(daily_statistics, 'stats.json')
|
||||||
|
|
||||||
|
# Compute and save trending tickers
|
||||||
|
trending_tickers = compute_trending_tickers(daily_stats_dict)
|
||||||
|
save_data(trending_tickers, 'trending.json')
|
||||||
@ -3295,7 +3295,13 @@ async def get_reddit_tracker(api_key: str = Security(get_api_key)):
|
|||||||
except:
|
except:
|
||||||
stats = []
|
stats = []
|
||||||
|
|
||||||
res = {'posts': latest_post, 'stats': stats}
|
try:
|
||||||
|
with open(f"json/reddit-tracker/wallstreetbets/trending.json", 'rb') as file:
|
||||||
|
trending = orjson.loads(file.read())[0:10]
|
||||||
|
except:
|
||||||
|
trending = []
|
||||||
|
|
||||||
|
res = {'posts': latest_post, 'stats': stats, 'trending': trending}
|
||||||
|
|
||||||
data = orjson.dumps(res)
|
data = orjson.dumps(res)
|
||||||
compressed_data = gzip.compress(data)
|
compressed_data = gzip.compress(data)
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user