add fda calendar
This commit is contained in:
parent
24d0848f6c
commit
c9ff6b8e8a
@ -48,6 +48,7 @@ async def get_data(company_name):
|
|||||||
|
|
||||||
df_sorted['Interventions'] = df_sorted['Interventions'].apply(extract_drug)
|
df_sorted['Interventions'] = df_sorted['Interventions'].apply(extract_drug)
|
||||||
data = df_sorted.to_dict('records')
|
data = df_sorted.to_dict('records')
|
||||||
|
print(data)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -70,11 +71,11 @@ async def run():
|
|||||||
|
|
||||||
cursor = con.cursor()
|
cursor = con.cursor()
|
||||||
cursor.execute("PRAGMA journal_mode = wal")
|
cursor.execute("PRAGMA journal_mode = wal")
|
||||||
cursor.execute("SELECT DISTINCT symbol, name FROM stocks WHERE (industry = 'Biotechnology' OR industry LIKE '%Drug%') AND symbol NOT LIKE '%.%'")
|
cursor.execute("SELECT DISTINCT symbol, name FROM stocks WHERE sector = 'Healthcare' AND symbol NOT LIKE '%.%'")
|
||||||
company_data = [{'symbol': row[0], 'name': row[1]} for row in cursor.fetchall()]
|
company_data = [{'symbol': row[0], 'name': row[1]} for row in cursor.fetchall()]
|
||||||
con.close()
|
con.close()
|
||||||
#test mode
|
#test mode
|
||||||
#company_data = [{'symbol': 'MRK', 'name': 'Merck & Co. Inc.'}]
|
company_data = [{'symbol': 'NEOG', 'name': 'Neogen Corporation Inc.'}]
|
||||||
print(len(company_data))
|
print(len(company_data))
|
||||||
async with aiohttp.ClientSession() as session:
|
async with aiohttp.ClientSession() as session:
|
||||||
tasks = []
|
tasks = []
|
||||||
|
|||||||
86
app/cron_fda_calendar.py
Normal file
86
app/cron_fda_calendar.py
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
import ujson
|
||||||
|
import asyncio
|
||||||
|
import aiohttp
|
||||||
|
import os
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
import requests
|
||||||
|
|
||||||
|
# Load environment variables
|
||||||
|
load_dotenv()
|
||||||
|
benzinga_api_key = os.getenv('BENZINGA_API_KEY_EXTRA')
|
||||||
|
fmp_api_key = os.getenv('FMP_API_KEY')
|
||||||
|
|
||||||
|
|
||||||
|
url = "https://api.benzinga.com/api/v2.1/calendar/fda"
|
||||||
|
querystring = {"token":benzinga_api_key}
|
||||||
|
headers = {"accept": "application/json"}
|
||||||
|
|
||||||
|
|
||||||
|
async def save_json(data):
|
||||||
|
with open(f"json/fda-calendar/data.json", 'w') as file:
|
||||||
|
ujson.dump(data, file)
|
||||||
|
|
||||||
|
async def get_quote_of_stocks(ticker):
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
url = f"https://financialmodelingprep.com/api/v3/quote/{ticker}?apikey={fmp_api_key}"
|
||||||
|
async with session.get(url) as response:
|
||||||
|
if response.status == 200:
|
||||||
|
return await response.json()
|
||||||
|
else:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
async def get_data():
|
||||||
|
try:
|
||||||
|
response = requests.request("GET", url, headers=headers, params=querystring)
|
||||||
|
data = ujson.loads(response.text)['fda']
|
||||||
|
# New list to store the extracted information
|
||||||
|
extracted_data = []
|
||||||
|
|
||||||
|
# Iterate over the original data to extract required fields
|
||||||
|
for entry in data:
|
||||||
|
try:
|
||||||
|
symbol = entry['companies'][0]['securities'][0]['symbol']
|
||||||
|
name = entry['companies'][0]['name']
|
||||||
|
drug_name = entry['drug']['name'].capitalize()
|
||||||
|
indication = entry['drug']['indication_symptom']
|
||||||
|
outcome = entry['outcome']
|
||||||
|
source_type = entry['source_type']
|
||||||
|
status = entry['status']
|
||||||
|
target_date = entry['target_date']
|
||||||
|
|
||||||
|
changes_percentage = round((await get_quote_of_stocks(symbol))[0]['changesPercentage'] ,2)
|
||||||
|
|
||||||
|
# Create a new dictionary with the extracted information
|
||||||
|
new_entry = {
|
||||||
|
'symbol': symbol,
|
||||||
|
'name': name,
|
||||||
|
'drugName': drug_name,
|
||||||
|
'indication': indication,
|
||||||
|
'outcome': outcome,
|
||||||
|
'sourceType': source_type,
|
||||||
|
'status': status,
|
||||||
|
'targetDate': target_date,
|
||||||
|
'changesPercentage': changes_percentage
|
||||||
|
}
|
||||||
|
|
||||||
|
# Append the new dictionary to the new list
|
||||||
|
extracted_data.append(new_entry)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Output the new list
|
||||||
|
return extracted_data
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error fetching data: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
async def run():
|
||||||
|
data = await get_data()
|
||||||
|
await save_json(data)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
try:
|
||||||
|
asyncio.run(run())
|
||||||
|
except Exception as e:
|
||||||
|
print(f"An error occurred: {e}")
|
||||||
19
app/main.py
19
app/main.py
@ -2931,4 +2931,21 @@ async def get_clinical_trial(data:TickerData):
|
|||||||
io.BytesIO(compressed_data),
|
io.BytesIO(compressed_data),
|
||||||
media_type="application/json",
|
media_type="application/json",
|
||||||
headers={"Content-Encoding": "gzip"}
|
headers={"Content-Encoding": "gzip"}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/fda-calendar")
|
||||||
|
async def get_market_maker():
|
||||||
|
cache_key = f"fda-calendar"
|
||||||
|
cached_result = redis_client.get(cache_key)
|
||||||
|
if cached_result:
|
||||||
|
return ujson.loads(cached_result)
|
||||||
|
try:
|
||||||
|
with open(f"json/fda-calendar/data.json", 'r') as file:
|
||||||
|
res = ujson.load(file)
|
||||||
|
except:
|
||||||
|
res = []
|
||||||
|
|
||||||
|
redis_client.set(cache_key, ujson.dumps(res))
|
||||||
|
redis_client.expire(cache_key, 3600*3600) # Set cache expiration time to 1 day
|
||||||
|
return res
|
||||||
@ -342,6 +342,17 @@ def run_clinical_trial():
|
|||||||
]
|
]
|
||||||
subprocess.run(command)
|
subprocess.run(command)
|
||||||
|
|
||||||
|
def run_fda_calendar():
|
||||||
|
week = datetime.today().weekday()
|
||||||
|
if week <= 5:
|
||||||
|
subprocess.run(["python3", "cron_fda_calendar.py"])
|
||||||
|
command = [
|
||||||
|
"sudo", "rsync", "-avz", "-e", "ssh",
|
||||||
|
"/root/backend/app/json/fda-calendar",
|
||||||
|
f"root@{useast_ip_address}:/root/backend/app/json"
|
||||||
|
]
|
||||||
|
subprocess.run(command)
|
||||||
|
|
||||||
# Create functions to run each schedule in a separate thread
|
# Create functions to run each schedule in a separate thread
|
||||||
def run_threaded(job_func):
|
def run_threaded(job_func):
|
||||||
job_thread = threading.Thread(target=job_func)
|
job_thread = threading.Thread(target=job_func)
|
||||||
@ -388,6 +399,7 @@ schedule.every(5).minutes.do(run_threaded, run_cron_heatmap).tag('heatmap_job')
|
|||||||
schedule.every(1).minutes.do(run_threaded, run_cron_quote).tag('quote_job')
|
schedule.every(1).minutes.do(run_threaded, run_cron_quote).tag('quote_job')
|
||||||
schedule.every(1).minutes.do(run_threaded, run_cron_price_alert).tag('price_alert_job')
|
schedule.every(1).minutes.do(run_threaded, run_cron_price_alert).tag('price_alert_job')
|
||||||
schedule.every(15).minutes.do(run_threaded, run_market_moods).tag('market_moods_job')
|
schedule.every(15).minutes.do(run_threaded, run_market_moods).tag('market_moods_job')
|
||||||
|
schedule.every(2).hours.do(run_threaded, run_fda_calendar).tag('fda_calendar_job')
|
||||||
schedule.every(3).hours.do(run_threaded, run_json_job).tag('json_job')
|
schedule.every(3).hours.do(run_threaded, run_json_job).tag('json_job')
|
||||||
schedule.every(6).hours.do(run_threaded, run_analyst_rating).tag('analyst_job')
|
schedule.every(6).hours.do(run_threaded, run_analyst_rating).tag('analyst_job')
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user