add most shorted stocks

This commit is contained in:
MuslemRahimi 2024-06-10 14:03:38 +02:00
parent ca4322feba
commit abd2ac252a
3 changed files with 95 additions and 3 deletions

View File

@ -41,7 +41,7 @@ def get_short_data(ticker, outstanding_shares, float_shares):
short_float_percent = round((data_dict['sharesShort']/float_shares)*100,2) short_float_percent = round((data_dict['sharesShort']/float_shares)*100,2)
return {'sharesShort': data_dict['sharesShort'], 'shortRatio': data_dict['shortRatio'], 'sharesShortPriorMonth': data_dict['sharesShortPriorMonth'], 'shortOutStandingPercent': short_outstanding_percent, 'shortFloatPercent': short_float_percent} return {'sharesShort': data_dict['sharesShort'], 'shortRatio': data_dict['shortRatio'], 'sharesShortPriorMonth': data_dict['sharesShortPriorMonth'], 'shortOutStandingPercent': short_outstanding_percent, 'shortFloatPercent': short_float_percent}
except: except:
return {'sharesShort': '-', 'shortRatio': '-', 'sharesShortPriorMonth': '-', 'shortOutStandingPercent': '-', 'shortFloatPercent': '-'} return {'sharesShort': 0, 'shortRatio': 0, 'sharesShortPriorMonth': 0, 'shortOutStandingPercent': 0, 'shortFloatPercent': 0}
async def get_data(ticker, con): async def get_data(ticker, con):

View File

@ -2797,4 +2797,21 @@ async def get_all_politician():
io.BytesIO(compressed_data), io.BytesIO(compressed_data),
media_type="application/json", media_type="application/json",
headers={"Content-Encoding": "gzip"} headers={"Content-Encoding": "gzip"}
) )
@app.get("/most-shorted-stocks")
async def get_most_shorted_stocks():
cache_key = f"most-shorted-stocks"
cached_result = redis_client.get(cache_key)
if cached_result:
return ujson.loads(cached_result)
try:
with open(f"json/most-shorted-stocks/data.json", 'r') as file:
res = ujson.load(file)
except:
res = []
redis_client.set(cache_key, ujson.dumps(res))
redis_client.expire(cache_key, 3600*3600) # Set cache expiration time to 1 day
return res

View File

@ -9,6 +9,7 @@ import time
import subprocess import subprocess
import asyncio import asyncio
import aiohttp import aiohttp
import aiofiles
import pytz import pytz
import sqlite3 import sqlite3
import pandas as pd import pandas as pd
@ -17,6 +18,7 @@ from pocketbase import PocketBase
from collections import Counter from collections import Counter
import re import re
import hashlib import hashlib
import glob
from dotenv import load_dotenv from dotenv import load_dotenv
import os import os
@ -1052,6 +1054,75 @@ async def get_ipo_calendar(con, symbols):
return res_sorted return res_sorted
async def get_most_shorted_stocks(con):
directory_path = 'json/share-statistics/*.json'
def filename_has_no_dot(file_path):
filename = os.path.basename(file_path)
if filename.endswith('.json'):
base_name = filename[:-5] # Remove the .json part
# Return True only if there is no dot in the base name
if '.' not in base_name:
return True
return False
async def read_json_files(directory_path):
for file_path in glob.glob(directory_path):
if filename_has_no_dot(file_path):
try:
async with aiofiles.open(file_path, 'r') as file:
data = await file.read()
json_data = json.loads(data)
yield file_path, json_data
except (json.JSONDecodeError, IOError) as e:
print(f"Error reading {file_path}: {e}")
def extract_elements(file_path, data):
symbol = os.path.basename(file_path).rsplit('.', 1)[0]
return {
"symbol": symbol,
"sharesShort": data.get("sharesShort"),
"shortRatio": data.get("shortRatio"),
"sharesShortPriorMonth": data.get("sharesShortPriorMonth"),
"shortOutStandingPercent": data.get("shortOutStandingPercent"),
"shortFloatPercent": data.get("shortFloatPercent"),
"latestOutstandingShares": data.get("latestOutstandingShares"),
"latestFloatShares": data.get("latestFloatShares")
}
# Initialize a list to hold the extracted data
extracted_data = []
# Read and process JSON files
async for file_path, json_data in read_json_files(directory_path):
element = extract_elements(file_path, json_data)
short_outstanding_percent = element.get("shortOutStandingPercent")
# Check if shortOutStandingPercent is at least 20
if short_outstanding_percent is not None and float(short_outstanding_percent) >= 20 and float(short_outstanding_percent) < 100:
extracted_data.append(element)
sorted_list = sorted(extracted_data, key=lambda x: x['shortOutStandingPercent'], reverse=True)
query_template = """
SELECT
name, sector
FROM
stocks
WHERE
symbol = ?
"""
for item in sorted_list:
try:
symbol = item['symbol']
data = pd.read_sql_query(query_template, con, params=(symbol,))
item['name'] = data['name'].iloc[0]
item['sector'] = data['sector'].iloc[0]
except Exception as e:
print(e)
return sorted_list
async def save_json_files(): async def save_json_files():
week = datetime.today().weekday() week = datetime.today().weekday()
@ -1075,7 +1146,11 @@ async def save_json_files():
crypto_cursor.execute("SELECT DISTINCT symbol FROM cryptos") crypto_cursor.execute("SELECT DISTINCT symbol FROM cryptos")
crypto_symbols = [row[0] for row in crypto_cursor.fetchall()] crypto_symbols = [row[0] for row in crypto_cursor.fetchall()]
data = await get_most_shorted_stocks(con)
with open(f"json/most-shorted-stocks/data.json", 'w') as file:
ujson.dump(data, file)
data = await get_congress_rss_feed(symbols, etf_symbols, crypto_symbols) data = await get_congress_rss_feed(symbols, etf_symbols, crypto_symbols)
with open(f"json/congress-trading/rss-feed/data.json", 'w') as file: with open(f"json/congress-trading/rss-feed/data.json", 'w') as file:
ujson.dump(data, file) ujson.dump(data, file)
@ -1152,7 +1227,7 @@ async def save_json_files():
with open(f"json/stock-screener/data.json", 'w') as file: with open(f"json/stock-screener/data.json", 'w') as file:
ujson.dump(stock_screener_data, file) ujson.dump(stock_screener_data, file)
con.close() con.close()
etf_con.close() etf_con.close()