refactoring to remove a lot of junk

This commit is contained in:
Boki 2025-06-22 16:57:08 -04:00
parent 5318158e59
commit d858222af7
33 changed files with 505 additions and 367 deletions

View file

@ -0,0 +1,3 @@
export { updateCeoChannels } from './update-ceo-channels.action';
export { updateUniqueSymbols } from './update-unique-symbols.action';
export { processIndividualSymbol } from './process-individual-symbol.action';

View file

@ -0,0 +1,111 @@
import { getRandomUserAgent } from '@stock-bot/utils';
import type { CeoHandler } from '../ceo.handler';
export async function processIndividualSymbol(this: CeoHandler, payload: any, _context: any): Promise<unknown> {
const { ceoId, symbol, timestamp } = payload;
const proxy = this.proxy?.getProxy();
if(!proxy) {
this.logger.warn('No proxy available for processing individual CEO symbol');
return;
}
this.logger.debug('Processing individual CEO symbol', {
ceoId,
timestamp,
});
try {
// Fetch detailed information for the individual symbol
const response = await this.http.get(`https://api.ceo.ca/api/get_spiels?channel=${ceoId}&load_more=top`
+ (timestamp ? `&until=${timestamp}` : ''),
{
proxy: proxy,
headers: {
'User-Agent': getRandomUserAgent()
}
});
if (!response.ok) {
throw new Error(`Failed to fetch details for ceoId ${ceoId}: ${response.statusText}`);
}
const data = await response.json();
const spielCount = data.spiels.length;
if(spielCount === 0) {
this.logger.warn(`No spiels found for ceoId ${ceoId}`);
return null; // No data to process
}
const latestSpielTime = data.spiels[0]?.timestamp;
const posts = data.spiels.map((spiel: any) => ({
ceoId,
spiel: spiel.spiel,
spielReplyToId: spiel.spiel_reply_to_id,
spielReplyTo: spiel.spiel_reply_to,
spielReplyToName: spiel.spiel_reply_to_name,
spielReplyToEdited: spiel.spiel_reply_to_edited,
userId: spiel.user_id,
name: spiel.name,
timestamp: spiel.timestamp,
spielId: spiel.spiel_id,
color: spiel.color,
parentId: spiel.parent_id,
publicId: spiel.public_id,
parentChannel: spiel.parent_channel,
parentTimestamp: spiel.parent_timestamp,
votes: spiel.votes,
editable: spiel.editable,
edited: spiel.edited,
featured: spiel.featured,
verified: spiel.verified,
fake: spiel.fake,
bot: spiel.bot,
voted: spiel.voted,
flagged: spiel.flagged,
ownSpiel: spiel.own_spiel,
score: spiel.score,
savedId: spiel.saved_id,
savedTimestamp: spiel.saved_timestamp,
poll: spiel.poll,
votedInPoll: spiel.voted_in_poll
}));
await this.mongodb.batchUpsert('ceoPosts', posts, ['spielId']);
this.logger.info(`Fetched ${spielCount} spiels for ceoId ${ceoId}`);
// Update Shorts
const shortRes = await this.http.get(`https://api.ceo.ca/api/short_positions/one?symbol=${symbol}`,
{
proxy: proxy,
headers: {
'User-Agent': getRandomUserAgent()
}
});
if (shortRes.ok) {
const shortData = await shortRes.json();
if(shortData && shortData.positions) {
await this.mongodb.batchUpsert('ceoShorts', shortData.positions, ['id']);
}
await this.scheduleOperation('process-individual-symbol', {
ceoId: ceoId,
timestamp: latestSpielTime
});
}
this.logger.info(`Successfully processed channel ${ceoId} and added channel ${ceoId} at timestamp ${latestSpielTime}`);
return { ceoId, spielCount, timestamp };
} catch (error) {
this.logger.error('Failed to process individual symbol', {
error,
ceoId,
timestamp
});
throw error;
}
}

View file

@ -0,0 +1,67 @@
import { getRandomUserAgent } from '@stock-bot/utils';
import type { CeoHandler } from '../ceo.handler';
export async function updateCeoChannels(this: CeoHandler, payload: number | undefined): Promise<unknown> {
const proxy = this.proxy?.getProxy();
if(!proxy) {
this.logger.warn('No proxy available for CEO channels update');
return;
}
let page;
if(payload === undefined) {
page = 1
}else{
page = payload;
}
this.logger.info(`Fetching CEO channels for page ${page} with proxy ${proxy}`);
const response = await this.http.get('https://api.ceo.ca/api/home?exchange=all&sort_by=symbol&sector=All&tab=companies&page='+page, {
proxy: proxy,
headers: {
'User-Agent': getRandomUserAgent()
}
})
const results = await response.json();
const channels = results.channel_categories[0].channels;
const totalChannels = results.channel_categories[0].total_channels;
const totalPages = Math.ceil(totalChannels / channels.length);
const exchanges: {exchange: string, countryCode: string}[] = []
const symbols = channels.map((channel: any) =>{
// check if exchange is in the exchanges array object
if(!exchanges.find((e: any) => e.exchange === channel.exchange)) {
exchanges.push({
exchange: channel.exchange,
countryCode: 'CA'
});
}
const details = channel.company_details || {};
return {
symbol: channel.symbol,
exchange: channel.exchange,
name: channel.title,
type: channel.type,
ceoId: channel.channel,
marketCap: details.market_cap,
volumeRatio: details.volume_ratio,
avgVolume: details.avg_volume,
stockType: details.stock_type,
issueType: details.issue_type,
sharesOutstanding: details.shares_outstanding,
float: details.float,
}
})
await this.mongodb.batchUpsert('ceoSymbols', symbols, ['symbol', 'exchange']);
await this.mongodb.batchUpsert('ceoExchanges', exchanges, ['exchange']);
if(page === 1) {
for( let i = 2; i <= totalPages; i++) {
this.logger.info(`Scheduling page ${i} of ${totalPages} for CEO channels`);
await this.scheduleOperation('update-ceo-channels', i)
}
}
this.logger.info(`Fetched CEO channels for page ${page}/${totalPages}`);
return { page, totalPages };
}

View file

@ -0,0 +1,63 @@
import type { CeoHandler } from '../ceo.handler';
export async function updateUniqueSymbols(this: CeoHandler, _payload: unknown, _context: any): Promise<unknown> {
this.logger.info('Starting update to get unique CEO symbols by ceoId');
try {
// Get unique ceoId values from the ceoSymbols collection
const uniqueCeoIds = await this.mongodb.collection('ceoSymbols').distinct('ceoId');
this.logger.info(`Found ${uniqueCeoIds.length} unique CEO IDs`);
// Get detailed records for each unique ceoId (latest/first record)
const uniqueSymbols = [];
for (const ceoId of uniqueCeoIds) {
const symbol = await this.mongodb.collection('ceoSymbols')
.findOne({ ceoId }, { sort: { _id: -1 } }); // Get latest record
if (symbol) {
uniqueSymbols.push(symbol);
}
}
this.logger.info(`Retrieved ${uniqueSymbols.length} unique symbol records`);
// Schedule individual jobs for each unique symbol
let scheduledJobs = 0;
for (const symbol of uniqueSymbols) {
// Schedule a job to process this individual symbol
await this.scheduleOperation('process-individual-symbol', {
ceoId: symbol.ceoId,
symbol: symbol.symbol,
});
scheduledJobs++;
// Add small delay to avoid overwhelming the queue
if (scheduledJobs % 10 === 0) {
this.logger.debug(`Scheduled ${scheduledJobs} jobs so far`);
}
}
this.logger.info(`Successfully scheduled ${scheduledJobs} individual symbol update jobs`);
// Cache the results for monitoring
await this.cacheSet('unique-symbols-last-run', {
timestamp: new Date().toISOString(),
totalUniqueIds: uniqueCeoIds.length,
totalRecords: uniqueSymbols.length,
scheduledJobs
}, 1800); // Cache for 30 minutes
return {
success: true,
uniqueCeoIds: uniqueCeoIds.length,
uniqueRecords: uniqueSymbols.length,
scheduledJobs,
timestamp: new Date().toISOString()
};
} catch (error) {
this.logger.error('Failed to update unique CEO symbols', { error });
throw error;
}
}

View file

@ -1,15 +1,18 @@
import {
BaseHandler,
Disabled,
Handler,
Operation,
ScheduledOperation,
type IServiceContainer
} from '@stock-bot/handlers';
import { getRandomUserAgent } from '@stock-bot/utils';
import {
processIndividualSymbol,
updateCeoChannels,
updateUniqueSymbols
} from './actions';
@Handler('ceo')
@Disabled()
// @Disabled()
export class CeoHandler extends BaseHandler {
constructor(services: IServiceContainer) {
super(services); // Handler name read from @Handler decorator
@ -20,246 +23,16 @@ export class CeoHandler extends BaseHandler {
immediately: false,
description: 'Get all CEO symbols and exchanges'
})
async updateCeoChannels(payload: number | undefined): Promise<unknown> {
const proxy = this.proxy?.getProxy();
if(!proxy) {
this.logger.warn('No proxy available for CEO channels update');
return;
}
let page;
if(payload === undefined) {
page = 1
}else{
page = payload;
}
this.logger.info(`Fetching CEO channels for page ${page} with proxy ${proxy}`);
const response = await this.http.get('https://api.ceo.ca/api/home?exchange=all&sort_by=symbol&sector=All&tab=companies&page='+page, {
proxy: proxy,
headers: {
'User-Agent': getRandomUserAgent()
}
})
const results = await response.json();
const channels = results.channel_categories[0].channels;
const totalChannels = results.channel_categories[0].total_channels;
const totalPages = Math.ceil(totalChannels / channels.length);
const exchanges: {exchange: string, countryCode: string}[] = []
const symbols = channels.map((channel: any) =>{
// check if exchange is in the exchanges array object
if(!exchanges.find((e: any) => e.exchange === channel.exchange)) {
exchanges.push({
exchange: channel.exchange,
countryCode: 'CA'
});
}
const details = channel.company_details || {};
return {
symbol: channel.symbol,
exchange: channel.exchange,
name: channel.title,
type: channel.type,
ceoId: channel.channel,
marketCap: details.market_cap,
volumeRatio: details.volume_ratio,
avgVolume: details.avg_volume,
stockType: details.stock_type,
issueType: details.issue_type,
sharesOutstanding: details.shares_outstanding,
float: details.float,
}
})
await this.mongodb.batchUpsert('ceoSymbols', symbols, ['symbol', 'exchange']);
await this.mongodb.batchUpsert('ceoExchanges', exchanges, ['exchange']);
if(page === 1) {
for( let i = 2; i <= totalPages; i++) {
this.logger.info(`Scheduling page ${i} of ${totalPages} for CEO channels`);
await this.scheduleOperation('update-ceo-channels', i)
}
}
this.logger.info(`Fetched CEO channels for page ${page}/${totalPages}`);
return { page, totalPages };
}
updateCeoChannels = updateCeoChannels;
@Operation('update-unique-symbols')
@ScheduledOperation('process-unique-symbols', '0 0 1 * *', {//'0 */30 * * *', {
@ScheduledOperation('process-unique-symbols', '0 0 1 * *', {
priority: 5,
immediately: false,
description: 'Process unique CEO symbols and schedule individual jobs'
})
async updateUniqueSymbols(_payload: unknown, _context: any): Promise<unknown> {
this.logger.info('Starting update to get unique CEO symbols by ceoId');
try {
// Get unique ceoId values from the ceoSymbols collection
const uniqueCeoIds = await this.mongodb.collection('ceoSymbols').distinct('ceoId');
this.logger.info(`Found ${uniqueCeoIds.length} unique CEO IDs`);
// Get detailed records for each unique ceoId (latest/first record)
const uniqueSymbols = [];
for (const ceoId of uniqueCeoIds) {
const symbol = await this.mongodb.collection('ceoSymbols')
.findOne({ ceoId }, { sort: { _id: -1 } }); // Get latest record
if (symbol) {
uniqueSymbols.push(symbol);
}
}
this.logger.info(`Retrieved ${uniqueSymbols.length} unique symbol records`);
// Schedule individual jobs for each unique symbol
let scheduledJobs = 0;
for (const symbol of uniqueSymbols) {
// Schedule a job to process this individual symbol
await this.scheduleOperation('process-individual-symbol', {
ceoId: symbol.ceoId,
symbol: symbol.symbol,
});
scheduledJobs++;
// Add small delay to avoid overwhelming the queue
if (scheduledJobs % 10 === 0) {
this.logger.debug(`Scheduled ${scheduledJobs} jobs so far`);
}
}
this.logger.info(`Successfully scheduled ${scheduledJobs} individual symbol update jobs`);
// Cache the results for monitoring
await this.cacheSet('unique-symbols-last-run', {
timestamp: new Date().toISOString(),
totalUniqueIds: uniqueCeoIds.length,
totalRecords: uniqueSymbols.length,
scheduledJobs
}, 1800); // Cache for 30 minutes
return {
success: true,
uniqueCeoIds: uniqueCeoIds.length,
uniqueRecords: uniqueSymbols.length,
scheduledJobs,
timestamp: new Date().toISOString()
};
} catch (error) {
this.logger.error('Failed to update unique CEO symbols', { error });
throw error;
}
}
updateUniqueSymbols = updateUniqueSymbols;
@Operation('process-individual-symbol')
async processIndividualSymbol(payload: any, _context: any): Promise<unknown> {
const { ceoId, symbol, timestamp } = payload;
const proxy = this.proxy?.getProxy();
if(!proxy) {
this.logger.warn('No proxy available for processing individual CEO symbol');
return;
}
this.logger.debug('Processing individual CEO symbol', {
ceoId,
timestamp,
});
try {
// Fetch detailed information for the individual symbol
const response = await this.http.get(`https://api.ceo.ca/api/get_spiels?channel=${ceoId}&load_more=top`
+ (timestamp ? `&until=${timestamp}` : ''),
{
proxy: proxy,
headers: {
'User-Agent': getRandomUserAgent()
}
});
if (!response.ok) {
throw new Error(`Failed to fetch details for ceoId ${ceoId}: ${response.statusText}`);
}
const data = await response.json();
const spielCount = data.spiels.length;
if(spielCount === 0) {
this.logger.warn(`No spiels found for ceoId ${ceoId}`);
return null; // No data to process
}
const latestSpielTime = data.spiels[0]?.timestamp;
const posts = data.spiels.map((spiel: any) => ({
ceoId,
spiel: spiel.spiel,
spielReplyToId: spiel.spiel_reply_to_id,
spielReplyTo: spiel.spiel_reply_to,
spielReplyToName: spiel.spiel_reply_to_name,
spielReplyToEdited: spiel.spiel_reply_to_edited,
userId: spiel.user_id,
name: spiel.name,
timestamp: spiel.timestamp,
spielId: spiel.spiel_id,
color: spiel.color,
parentId: spiel.parent_id,
publicId: spiel.public_id,
parentChannel: spiel.parent_channel,
parentTimestamp: spiel.parent_timestamp,
votes: spiel.votes,
editable: spiel.editable,
edited: spiel.edited,
featured: spiel.featured,
verified: spiel.verified,
fake: spiel.fake,
bot: spiel.bot,
voted: spiel.voted,
flagged: spiel.flagged,
ownSpiel: spiel.own_spiel,
score: spiel.score,
savedId: spiel.saved_id,
savedTimestamp: spiel.saved_timestamp,
poll: spiel.poll,
votedInPoll: spiel.voted_in_poll
}));
await this.mongodb.batchUpsert('ceoPosts', posts, ['spielId']);
this.logger.info(`Fetched ${spielCount} spiels for ceoId ${ceoId}`);
// Update Shorts
const shortRes = await this.http.get(`https://api.ceo.ca/api/short_positions/one?symbol=${symbol}`,
{
proxy: proxy,
headers: {
'User-Agent': getRandomUserAgent()
}
});
if (shortRes.ok) {
const shortData = await shortRes.json();
if(shortData && shortData.positions) {
await this.mongodb.batchUpsert('ceoShorts', shortData.positions, ['id']);
}
await this.scheduleOperation('process-individual-symbol', {
ceoId: ceoId,
timestamp: latestSpielTime
});
}
this.logger.info(`Successfully processed channel ${ceoId} and added channel ${ceoId} at timestamp ${latestSpielTime}`);
return { ceoId, spielCount, timestamp };
} catch (error) {
this.logger.error('Failed to process individual symbol', {
error,
ceoId,
timestamp
});
throw error;
}
}
}
processIndividualSymbol = processIndividualSymbol;
}

View file

@ -63,7 +63,7 @@ async function testCeoOperations() {
if (count > 0) {
// Test 2: Run process-unique-symbols operation
logger.info('Testing process-unique-symbols operation...');
const result = await ceoHandler.processUniqueSymbols(undefined, {});
const result = await ceoHandler.updateUniqueSymbols(undefined, {});
logger.info('Process unique symbols result:', result);
// Test 3: Test individual symbol processing

View file

@ -0,0 +1,27 @@
import { PostgreSQLClient } from '@stock-bot/postgres';
import { MongoDBClient } from '@stock-bot/mongodb';
let postgresClient: PostgreSQLClient | null = null;
let mongodbClient: MongoDBClient | null = null;
export function setPostgreSQLClient(client: PostgreSQLClient): void {
postgresClient = client;
}
export function getPostgreSQLClient(): PostgreSQLClient {
if (!postgresClient) {
throw new Error('PostgreSQL client not initialized. Call setPostgreSQLClient first.');
}
return postgresClient;
}
export function setMongoDBClient(client: MongoDBClient): void {
mongodbClient = client;
}
export function getMongoDBClient(): MongoDBClient {
if (!mongodbClient) {
throw new Error('MongoDB client not initialized. Call setMongoDBClient first.');
}
return mongodbClient;
}

View file

@ -1,5 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getPostgreSQLClient } from '../../../clients';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-clear-postgresql-data');

View file

@ -1,5 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getPostgreSQLClient } from '../../../clients';
import type { JobPayload, SyncStatus } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-status');

View file

@ -1,5 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getPostgreSQLClient } from '../../../clients';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-exchange-stats');

View file

@ -1,5 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getPostgreSQLClient } from '../../../clients';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-provider-mapping-stats');

View file

@ -1,6 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getMongoDBClient } from '@stock-bot/mongodb-client';
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getMongoDBClient, getPostgreSQLClient } from '../../../clients';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('sync-qm-exchanges');

View file

@ -1,6 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getMongoDBClient } from "@stock-bot/mongodb-client";
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getMongoDBClient, getPostgreSQLClient } from '../../../clients';
import type { JobPayload, SyncResult } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-all-exchanges');

View file

@ -1,7 +1,7 @@
import { getLogger } from '@stock-bot/logger';
import { getMongoDBClient } from '@stock-bot/mongodb-client';
import { getMongoDBClient } from '../../../clients';
import type { JobPayload } from '../../../types/job-payloads';
import type { MasterExchange } from '@stock-bot/mongodb-client';
import type { MasterExchange } from '@stock-bot/mongodb';
const logger = getLogger('sync-ib-exchanges');

View file

@ -1,6 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getMongoDBClient } from "@stock-bot/mongodb-client";
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getMongoDBClient, getPostgreSQLClient } from '../../../clients';
import type { JobPayload, SyncResult } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-qm-provider-mappings');

View file

@ -1,6 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getMongoDBClient } from '@stock-bot/mongodb-client';
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getMongoDBClient, getPostgreSQLClient } from '../../../clients';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('sync-qm-symbols');

View file

@ -1,5 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getPostgreSQLClient } from '../../../clients';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('sync-status');

View file

@ -1,6 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getMongoDBClient } from "@stock-bot/mongodb-client";
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getMongoDBClient, getPostgreSQLClient } from '../../../clients';
import type { JobPayload, SyncResult } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-symbols-from-provider');

View file

@ -4,12 +4,13 @@ import { Hono } from 'hono';
import { cors } from 'hono/cors';
// Library imports
import { getLogger, setLoggerConfig, shutdownLoggers } from '@stock-bot/logger';
import { connectMongoDB } from '@stock-bot/mongodb-client';
import { connectPostgreSQL } from '@stock-bot/postgres-client';
import { MongoDBClient } from '@stock-bot/mongodb';
import { PostgreSQLClient } from '@stock-bot/postgres';
import { QueueManager, type QueueManagerConfig } from '@stock-bot/queue';
import { Shutdown } from '@stock-bot/shutdown';
// Local imports
import { enhancedSyncRoutes, healthRoutes, statsRoutes, syncRoutes } from './routes';
import { setMongoDBClient, setPostgreSQLClient } from './clients';
const config = initializeServiceConfig();
console.log('Data Sync Service Configuration:', JSON.stringify(config, null, 2));
@ -44,7 +45,8 @@ app.use(
);
const PORT = serviceConfig.port;
let server: ReturnType<typeof Bun.serve> | null = null;
// Singleton clients are managed in libraries
let mongoClient: MongoDBClient | null = null;
let postgresClient: PostgreSQLClient | null = null;
let queueManager: QueueManager | null = null;
// Initialize shutdown manager
@ -61,10 +63,10 @@ async function initializeServices() {
logger.info('Initializing data sync service...');
try {
// Initialize MongoDB client singleton
// Initialize MongoDB client
logger.debug('Connecting to MongoDB...');
const mongoConfig = databaseConfig.mongodb;
await connectMongoDB({
mongoClient = new MongoDBClient({
uri: mongoConfig.uri,
database: mongoConfig.database,
host: mongoConfig.host || 'localhost',
@ -74,13 +76,15 @@ async function initializeServices() {
socketTimeout: 30000,
serverSelectionTimeout: 5000,
},
});
}, logger);
await mongoClient.connect();
setMongoDBClient(mongoClient);
logger.info('MongoDB connected');
// Initialize PostgreSQL client singleton
// Initialize PostgreSQL client
logger.debug('Connecting to PostgreSQL...');
const pgConfig = databaseConfig.postgres;
await connectPostgreSQL({
postgresClient = new PostgreSQLClient({
host: pgConfig.host,
port: pgConfig.port,
database: pgConfig.database,
@ -91,7 +95,9 @@ async function initializeServices() {
max: pgConfig.poolSize || 10,
idleTimeoutMillis: pgConfig.idleTimeout || 30000,
},
});
}, logger);
await postgresClient.connect();
setPostgreSQLClient(postgresClient);
logger.info('PostgreSQL connected');
// Initialize queue system (with delayed worker start)
@ -236,11 +242,12 @@ shutdown.onShutdownHigh(async () => {
shutdown.onShutdownMedium(async () => {
logger.info('Disconnecting from databases...');
try {
const { disconnectMongoDB } = await import('@stock-bot/mongodb-client');
const { disconnectPostgreSQL } = await import('@stock-bot/postgres-client');
await disconnectMongoDB();
await disconnectPostgreSQL();
if (mongoClient) {
await mongoClient.disconnect();
}
if (postgresClient) {
await postgresClient.disconnect();
}
logger.info('Database connections closed');
} catch (error) {
logger.error('Error closing database connections', { error });

View file

@ -1,5 +1,5 @@
import { PostgreSQLClient } from '@stock-bot/postgres-client';
import { MongoDBClient } from '@stock-bot/mongodb-client';
import { PostgreSQLClient } from '@stock-bot/postgres';
import { MongoDBClient } from '@stock-bot/mongodb';
let postgresClient: PostgreSQLClient | null = null;
let mongodbClient: MongoDBClient | null = null;

View file

@ -5,8 +5,8 @@ import { Hono } from 'hono';
import { cors } from 'hono/cors';
import { initializeServiceConfig } from '@stock-bot/config';
import { getLogger, setLoggerConfig, shutdownLoggers } from '@stock-bot/logger';
import { createAndConnectMongoDBClient, MongoDBClient } from '@stock-bot/mongodb-client';
import { createAndConnectPostgreSQLClient, PostgreSQLClient } from '@stock-bot/postgres-client';
import { MongoDBClient } from '@stock-bot/mongodb';
import { PostgreSQLClient } from '@stock-bot/postgres';
import { Shutdown } from '@stock-bot/shutdown';
import { exchangeRoutes } from './routes/exchange.routes';
import { healthRoutes } from './routes/health.routes';
@ -77,7 +77,7 @@ async function initializeServices() {
// Initialize MongoDB client
logger.debug('Connecting to MongoDB...');
const mongoConfig = databaseConfig.mongodb;
mongoClient = await createAndConnectMongoDBClient({
mongoClient = new MongoDBClient({
uri: mongoConfig.uri,
database: mongoConfig.database,
host: mongoConfig.host,
@ -87,14 +87,15 @@ async function initializeServices() {
socketTimeout: 30000,
serverSelectionTimeout: 5000,
},
});
}, logger);
await mongoClient.connect();
setMongoDBClient(mongoClient);
logger.info('MongoDB connected');
// Initialize PostgreSQL client
logger.debug('Connecting to PostgreSQL...');
const pgConfig = databaseConfig.postgres;
postgresClient = await createAndConnectPostgreSQLClient({
postgresClient = new PostgreSQLClient({
host: pgConfig.host,
port: pgConfig.port,
database: pgConfig.database,
@ -105,7 +106,8 @@ async function initializeServices() {
max: pgConfig.poolSize || 10,
idleTimeoutMillis: pgConfig.idleTimeout || 30000,
},
});
}, logger);
await postgresClient.connect();
setPostgreSQLClient(postgresClient);
logger.info('PostgreSQL connected');