bugfixing
This commit is contained in:
parent
be2fbd03a2
commit
21ba8b8a5b
@ -380,15 +380,15 @@ def create_dataset():
|
|||||||
con = sqlite3.connect('stocks.db')
|
con = sqlite3.connect('stocks.db')
|
||||||
cursor = con.cursor()
|
cursor = con.cursor()
|
||||||
cursor.execute("PRAGMA journal_mode = wal")
|
cursor.execute("PRAGMA journal_mode = wal")
|
||||||
cursor.execute("SELECT DISTINCT symbol, name FROM stocks WHERE marketCap >= 10E9 AND symbol NOT LIKE '%.%' AND symbol NOT LIKE '%-%'")
|
cursor.execute("SELECT DISTINCT symbol, name FROM stocks WHERE symbol NOT LIKE '%.%' AND symbol NOT LIKE '%-%'")
|
||||||
stock_data = [{'symbol': row[0], 'name': row[1]} for row in cursor.fetchall()]
|
stock_data = [{'symbol': row[0], 'name': row[1]} for row in cursor.fetchall()]
|
||||||
print(f"Total stocks: {len(stock_data)}")
|
print(f"Total stocks: {len(stock_data)}")
|
||||||
con.close()
|
con.close()
|
||||||
|
|
||||||
batch_size = 3
|
batch_size = 5
|
||||||
stock_batches = [stock_data[i:i+batch_size] for i in range(0, len(stock_data), batch_size)]
|
stock_batches = [stock_data[i:i+batch_size] for i in range(0, len(stock_data), batch_size)]
|
||||||
|
|
||||||
with concurrent.futures.ProcessPoolExecutor(max_workers=4) as executor:
|
with concurrent.futures.ProcessPoolExecutor(max_workers=5) as executor:
|
||||||
futures = [executor.submit(process_stocks_batch, batch, csv_files, reports_folder, threshold) for batch in stock_batches]
|
futures = [executor.submit(process_stocks_batch, batch, csv_files, reports_folder, threshold) for batch in stock_batches]
|
||||||
|
|
||||||
for future in concurrent.futures.as_completed(futures):
|
for future in concurrent.futures.as_completed(futures):
|
||||||
|
|||||||
@ -8,7 +8,7 @@ import pytz
|
|||||||
import time
|
import time
|
||||||
import os
|
import os
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta, date
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
||||||
|
|
||||||
@ -34,6 +34,20 @@ async def save_json(data):
|
|||||||
ujson.dump(data, file)
|
ujson.dump(data, file)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_time(time_str):
|
||||||
|
try:
|
||||||
|
# Try parsing as full datetime
|
||||||
|
return datetime.strptime(time_str, '%Y-%m-%d %H:%M:%S')
|
||||||
|
except ValueError:
|
||||||
|
try:
|
||||||
|
# Try parsing as time only
|
||||||
|
time_obj = datetime.strptime(time_str, '%H:%M:%S').time()
|
||||||
|
# Combine with today's date
|
||||||
|
return datetime.combine(date.today(), time_obj)
|
||||||
|
except ValueError:
|
||||||
|
# If all else fails, return a default datetime
|
||||||
|
return datetime.min
|
||||||
|
|
||||||
def remove_duplicates(elements):
|
def remove_duplicates(elements):
|
||||||
seen = set()
|
seen = set()
|
||||||
unique_elements = []
|
unique_elements = []
|
||||||
@ -151,7 +165,8 @@ async def get_recent_earnings(session):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
pass
|
pass
|
||||||
res_list = remove_duplicates(res_list)
|
res_list = remove_duplicates(res_list)
|
||||||
res_list.sort(key=lambda x: x['marketCap'], reverse=True)
|
#res_list.sort(key=lambda x: x['marketCap'], reverse=True)
|
||||||
|
res_list.sort(key=lambda x: (-parse_time(x['time']).timestamp(), -x['marketCap']))
|
||||||
res_list = [{k: v for k, v in d.items() if k != 'marketCap'} for d in res_list]
|
res_list = [{k: v for k, v in d.items() if k != 'marketCap'} for d in res_list]
|
||||||
return res_list[0:5]
|
return res_list[0:5]
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user