add new list
This commit is contained in:
parent
60748bf1e3
commit
50372c9f9b
@ -592,6 +592,55 @@ async def get_most_employees():
|
|||||||
file.write(orjson.dumps(res_list))
|
file.write(orjson.dumps(res_list))
|
||||||
|
|
||||||
|
|
||||||
|
async def get_most_ftd_shares():
|
||||||
|
with sqlite3.connect('stocks.db') as con:
|
||||||
|
cursor = con.cursor()
|
||||||
|
cursor.execute("PRAGMA journal_mode = wal")
|
||||||
|
cursor.execute("SELECT DISTINCT symbol FROM stocks WHERE symbol NOT LIKE '%.%' AND symbol NOT LIKE '%-%'")
|
||||||
|
symbols = [row[0] for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
res_list = []
|
||||||
|
for symbol in symbols:
|
||||||
|
try:
|
||||||
|
# Load quote data from JSON file
|
||||||
|
relative_ftd = stock_screener_data_dict[symbol].get('relativeFTD',None)
|
||||||
|
ftd_shares = stock_screener_data_dict[symbol].get('failToDeliver',None)
|
||||||
|
country = stock_screener_data_dict[symbol].get('country',None)
|
||||||
|
if relative_ftd > 10 and ftd_shares > 10_000 and country == 'United States':
|
||||||
|
quote_data = await get_quote_data(symbol)
|
||||||
|
# Assign price and volume, and check if they meet the penny stock criteria
|
||||||
|
if quote_data:
|
||||||
|
price = round(quote_data.get('price',None), 2)
|
||||||
|
changesPercentage = round(quote_data.get('changesPercentage'), 2)
|
||||||
|
name = quote_data.get('name')
|
||||||
|
|
||||||
|
# Append stock data to res_list if it meets the criteria
|
||||||
|
if changesPercentage != 0:
|
||||||
|
res_list.append({
|
||||||
|
'symbol': symbol,
|
||||||
|
'name': name,
|
||||||
|
'price': price,
|
||||||
|
'changesPercentage': changesPercentage,
|
||||||
|
'relativeFTD': relative_ftd,
|
||||||
|
'failToDeliver': ftd_shares
|
||||||
|
})
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if res_list:
|
||||||
|
# Sort by market cap in descending order
|
||||||
|
res_list = sorted(res_list, key=lambda x: x['relativeFTD'], reverse=True)[:100]
|
||||||
|
|
||||||
|
# Assign rank to each stock
|
||||||
|
for rank, item in enumerate(res_list, start=1):
|
||||||
|
item['rank'] = rank
|
||||||
|
|
||||||
|
# Write the filtered and ranked penny stocks to a JSON file
|
||||||
|
with open("json/stocks-list/list/most-ftd-shares.json", 'wb') as file:
|
||||||
|
file.write(orjson.dumps(res_list))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
async def etf_bitcoin_list():
|
async def etf_bitcoin_list():
|
||||||
try:
|
try:
|
||||||
with sqlite3.connect('etf.db') as etf_con:
|
with sqlite3.connect('etf.db') as etf_con:
|
||||||
@ -824,6 +873,7 @@ async def run():
|
|||||||
get_highest_revenue(),
|
get_highest_revenue(),
|
||||||
get_highest_income_tax(),
|
get_highest_income_tax(),
|
||||||
get_most_employees(),
|
get_most_employees(),
|
||||||
|
get_most_ftd_shares(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -3968,7 +3968,7 @@ async def get_statistics(data: FilterStockList, api_key: str = Security(get_api_
|
|||||||
category_type = 'sector'
|
category_type = 'sector'
|
||||||
elif filter_list == 'reits':
|
elif filter_list == 'reits':
|
||||||
category_type = 'industry'
|
category_type = 'industry'
|
||||||
elif filter_list in ['highest-income-tax','most-employees','highest-revenue','top-rated-dividend-stocks','penny-stocks','overbought-stocks','oversold-stocks','faang','magnificent-seven','ca','cn','de','gb','il','in','jp','nyse','nasdaq','amex','dowjones','sp500','nasdaq100','all-stock-tickers']:
|
elif filter_list in ['most-ftd-shares','highest-income-tax','most-employees','highest-revenue','top-rated-dividend-stocks','penny-stocks','overbought-stocks','oversold-stocks','faang','magnificent-seven','ca','cn','de','gb','il','in','jp','nyse','nasdaq','amex','dowjones','sp500','nasdaq100','all-stock-tickers']:
|
||||||
category_type = 'stocks-list'
|
category_type = 'stocks-list'
|
||||||
elif filter_list in ['dividend-kings','dividend-aristocrats']:
|
elif filter_list in ['dividend-kings','dividend-aristocrats']:
|
||||||
category_type = 'dividends'
|
category_type = 'dividends'
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user