remove endpoints in fastify && update model
This commit is contained in:
parent
60cd644afa
commit
1ee1e10e72
@ -243,7 +243,7 @@ async def download_data(ticker, con, start_date, end_date, skip_downloading):
|
|||||||
if not df_copy.empty:
|
if not df_copy.empty:
|
||||||
with open(file_path, 'wb') as file:
|
with open(file_path, 'wb') as file:
|
||||||
file.write(orjson.dumps(df_copy.to_dict(orient='records')))
|
file.write(orjson.dumps(df_copy.to_dict(orient='records')))
|
||||||
print(df_copy)
|
|
||||||
return df_copy
|
return df_copy
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -266,6 +266,7 @@ async def chunked_gather(tickers, con, skip_downloading, chunk_size):
|
|||||||
|
|
||||||
for chunk in tqdm(chunks(tickers, chunk_size)):
|
for chunk in tqdm(chunks(tickers, chunk_size)):
|
||||||
# Create tasks for each chunk
|
# Create tasks for each chunk
|
||||||
|
print(f"chunk size: {len(chunk)}")
|
||||||
tasks = [download_data(ticker, con, start_date, end_date, skip_downloading) for ticker in chunk]
|
tasks = [download_data(ticker, con, start_date, end_date, skip_downloading) for ticker in chunk]
|
||||||
# Await the results for the current chunk
|
# Await the results for the current chunk
|
||||||
chunk_results = await asyncio.gather(*tasks)
|
chunk_results = await asyncio.gather(*tasks)
|
||||||
@ -357,7 +358,7 @@ async def run():
|
|||||||
AND symbol NOT LIKE '%.%'
|
AND symbol NOT LIKE '%.%'
|
||||||
AND symbol NOT LIKE '%-%'
|
AND symbol NOT LIKE '%-%'
|
||||||
""")
|
""")
|
||||||
warm_start_symbols = ['AAPL'] #[row[0] for row in cursor.fetchall()]
|
warm_start_symbols = ['PEP'] #[row[0] for row in cursor.fetchall()]
|
||||||
|
|
||||||
print(f'Warm Start Training: Total Tickers {len(warm_start_symbols)}')
|
print(f'Warm Start Training: Total Tickers {len(warm_start_symbols)}')
|
||||||
await warm_start_training(warm_start_symbols, con, skip_downloading)
|
await warm_start_training(warm_start_symbols, con, skip_downloading)
|
||||||
|
|||||||
Binary file not shown.
@ -23,16 +23,11 @@ class ScorePredictor:
|
|||||||
self.pca = PCA(n_components=0.95) # Retain components explaining 95% variance
|
self.pca = PCA(n_components=0.95) # Retain components explaining 95% variance
|
||||||
self.warm_start_model_path = 'ml_models/weights/ai-score/warm_start_weights.pkl'
|
self.warm_start_model_path = 'ml_models/weights/ai-score/warm_start_weights.pkl'
|
||||||
self.model = lgb.LGBMClassifier(
|
self.model = lgb.LGBMClassifier(
|
||||||
n_estimators=200, # Number of boosting iterations - good balance between performance and training time
|
n_estimators=20_000, # Number of boosting iterations - good balance between performance and training time
|
||||||
learning_rate=0.005, # Smaller learning rate for better generalization
|
learning_rate=0.001, # Smaller learning rate for better generalization
|
||||||
max_depth=5, # Controlled depth to prevent overfitting
|
max_depth=6, # Controlled depth to prevent overfitting
|
||||||
num_leaves=2**5-1, # 2^max_depth, prevents overfitting while maintaining model complexity
|
num_leaves=2**6-1, # 2^max_depth, prevents overfitting while maintaining model complexity
|
||||||
colsample_bytree=0.8, # Use 80% of features per tree to reduce overfitting
|
colsample_bytree=0.1,
|
||||||
subsample=0.8, # Use 80% of data per tree to reduce overfitting
|
|
||||||
min_child_samples=5, # Minimum samples per leaf to ensure reliable splits
|
|
||||||
random_state=42, # For reproducibility
|
|
||||||
reg_alpha=0.1, # L1 regularization
|
|
||||||
reg_lambda=0.1, # L2 regularization
|
|
||||||
n_jobs=10, # Use N CPU cores
|
n_jobs=10, # Use N CPU cores
|
||||||
verbose=0, # Reduce output noise
|
verbose=0, # Reduce output noise
|
||||||
)
|
)
|
||||||
|
|||||||
@ -61,10 +61,6 @@ const corsMiddleware = (request, reply, done) => {
|
|||||||
fastify.addHook("onRequest", corsMiddleware);
|
fastify.addHook("onRequest", corsMiddleware);
|
||||||
|
|
||||||
//fastify.register(require('./mixpanel/server'), { mixpanel, UAParser });
|
//fastify.register(require('./mixpanel/server'), { mixpanel, UAParser });
|
||||||
fastify.register(require("./get-user-stats/server"), { pb });
|
|
||||||
fastify.register(require("./get-community-stats/server"), { pb });
|
|
||||||
fastify.register(require("./get-moderators/server"), { pb });
|
|
||||||
fastify.register(require("./get-user-data/server"), { pb });
|
|
||||||
fastify.register(require("./get-all-comments/server"), { pb });
|
fastify.register(require("./get-all-comments/server"), { pb });
|
||||||
fastify.register(require("./get-post/server"), { pb });
|
fastify.register(require("./get-post/server"), { pb });
|
||||||
fastify.register(require("./get-one-post/server"), { pb });
|
fastify.register(require("./get-one-post/server"), { pb });
|
||||||
|
|||||||
@ -1,27 +0,0 @@
|
|||||||
module.exports = function (fastify, opts, done) {
|
|
||||||
const pb = opts.pb;
|
|
||||||
|
|
||||||
fastify.get('/get-community-stats', async (request, reply) => {
|
|
||||||
let output;
|
|
||||||
let totalUsers = 0;
|
|
||||||
let totalPosts = 0;
|
|
||||||
let totalComments = 0;
|
|
||||||
|
|
||||||
try {
|
|
||||||
totalUsers = (await pb.collection("users").getList(1, 1))?.totalItems;
|
|
||||||
totalPosts = (await pb.collection("posts").getList(1, 1))?.totalItems;
|
|
||||||
totalComments = (await pb.collection("comments").getList(1, 1))?.totalItems;
|
|
||||||
|
|
||||||
|
|
||||||
output = { totalUsers, totalPosts, totalComments };
|
|
||||||
|
|
||||||
} catch (e) {
|
|
||||||
console.error(e);
|
|
||||||
output = { totalUsers, totalPosts, totalComments };
|
|
||||||
}
|
|
||||||
|
|
||||||
reply.send({ items: output });
|
|
||||||
});
|
|
||||||
|
|
||||||
done();
|
|
||||||
};
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
// Declare a route
|
|
||||||
module.exports = function (fastify, opts, done) {
|
|
||||||
|
|
||||||
const pb = opts.pb;
|
|
||||||
|
|
||||||
fastify.get('/get-moderators', async (request, reply) => {
|
|
||||||
let output;
|
|
||||||
|
|
||||||
try {
|
|
||||||
output = await pb.collection("moderators").getFullList({
|
|
||||||
expand: 'user'
|
|
||||||
})
|
|
||||||
}
|
|
||||||
catch(e) {
|
|
||||||
output = [];
|
|
||||||
}
|
|
||||||
|
|
||||||
reply.send({ items: output })
|
|
||||||
});
|
|
||||||
|
|
||||||
done();
|
|
||||||
};
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
// Declare a route
|
|
||||||
module.exports = function (fastify, opts, done) {
|
|
||||||
|
|
||||||
const pb = opts.pb;
|
|
||||||
|
|
||||||
fastify.post('/get-user-data', async (request, reply) => {
|
|
||||||
const data = request.body;
|
|
||||||
const userId = data?.userId
|
|
||||||
let output;
|
|
||||||
|
|
||||||
try {
|
|
||||||
output = await pb.collection("users").getOne(userId)
|
|
||||||
}
|
|
||||||
catch(e) {
|
|
||||||
output = {};
|
|
||||||
}
|
|
||||||
|
|
||||||
reply.send({ items: output })
|
|
||||||
});
|
|
||||||
|
|
||||||
done();
|
|
||||||
};
|
|
||||||
@ -1,36 +0,0 @@
|
|||||||
// Declare a route
|
|
||||||
module.exports = function (fastify, opts, done) {
|
|
||||||
|
|
||||||
const pb = opts.pb;
|
|
||||||
|
|
||||||
fastify.post('/get-user-stats', async (request, reply) => {
|
|
||||||
const data = request.body;
|
|
||||||
const userId = data?.userId;
|
|
||||||
|
|
||||||
let output;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const getNumberOfPosts = await pb.collection("posts").getList(1,1, {
|
|
||||||
filter: `user="${userId}"`,
|
|
||||||
});
|
|
||||||
const numberOfPosts = getNumberOfPosts?.totalItems
|
|
||||||
|
|
||||||
|
|
||||||
const getNumberOfComments = await pb.collection("comments").getList(1,1, {
|
|
||||||
filter: `user="${userId}"`,
|
|
||||||
});
|
|
||||||
const numberOfComments = getNumberOfComments?.totalItems
|
|
||||||
|
|
||||||
output = {numberOfPosts, numberOfComments}
|
|
||||||
console.log(output)
|
|
||||||
|
|
||||||
}
|
|
||||||
catch(e) {
|
|
||||||
output = {};
|
|
||||||
}
|
|
||||||
|
|
||||||
reply.send({ items: output })
|
|
||||||
});
|
|
||||||
|
|
||||||
done();
|
|
||||||
};
|
|
||||||
Loading…
x
Reference in New Issue
Block a user