refactoring to remove a lot of junk

This commit is contained in:
Boki 2025-06-22 16:57:08 -04:00
parent 5318158e59
commit d858222af7
33 changed files with 505 additions and 367 deletions

View file

@ -0,0 +1,3 @@
export { updateCeoChannels } from './update-ceo-channels.action';
export { updateUniqueSymbols } from './update-unique-symbols.action';
export { processIndividualSymbol } from './process-individual-symbol.action';

View file

@ -0,0 +1,111 @@
import { getRandomUserAgent } from '@stock-bot/utils';
import type { CeoHandler } from '../ceo.handler';
export async function processIndividualSymbol(this: CeoHandler, payload: any, _context: any): Promise<unknown> {
const { ceoId, symbol, timestamp } = payload;
const proxy = this.proxy?.getProxy();
if(!proxy) {
this.logger.warn('No proxy available for processing individual CEO symbol');
return;
}
this.logger.debug('Processing individual CEO symbol', {
ceoId,
timestamp,
});
try {
// Fetch detailed information for the individual symbol
const response = await this.http.get(`https://api.ceo.ca/api/get_spiels?channel=${ceoId}&load_more=top`
+ (timestamp ? `&until=${timestamp}` : ''),
{
proxy: proxy,
headers: {
'User-Agent': getRandomUserAgent()
}
});
if (!response.ok) {
throw new Error(`Failed to fetch details for ceoId ${ceoId}: ${response.statusText}`);
}
const data = await response.json();
const spielCount = data.spiels.length;
if(spielCount === 0) {
this.logger.warn(`No spiels found for ceoId ${ceoId}`);
return null; // No data to process
}
const latestSpielTime = data.spiels[0]?.timestamp;
const posts = data.spiels.map((spiel: any) => ({
ceoId,
spiel: spiel.spiel,
spielReplyToId: spiel.spiel_reply_to_id,
spielReplyTo: spiel.spiel_reply_to,
spielReplyToName: spiel.spiel_reply_to_name,
spielReplyToEdited: spiel.spiel_reply_to_edited,
userId: spiel.user_id,
name: spiel.name,
timestamp: spiel.timestamp,
spielId: spiel.spiel_id,
color: spiel.color,
parentId: spiel.parent_id,
publicId: spiel.public_id,
parentChannel: spiel.parent_channel,
parentTimestamp: spiel.parent_timestamp,
votes: spiel.votes,
editable: spiel.editable,
edited: spiel.edited,
featured: spiel.featured,
verified: spiel.verified,
fake: spiel.fake,
bot: spiel.bot,
voted: spiel.voted,
flagged: spiel.flagged,
ownSpiel: spiel.own_spiel,
score: spiel.score,
savedId: spiel.saved_id,
savedTimestamp: spiel.saved_timestamp,
poll: spiel.poll,
votedInPoll: spiel.voted_in_poll
}));
await this.mongodb.batchUpsert('ceoPosts', posts, ['spielId']);
this.logger.info(`Fetched ${spielCount} spiels for ceoId ${ceoId}`);
// Update Shorts
const shortRes = await this.http.get(`https://api.ceo.ca/api/short_positions/one?symbol=${symbol}`,
{
proxy: proxy,
headers: {
'User-Agent': getRandomUserAgent()
}
});
if (shortRes.ok) {
const shortData = await shortRes.json();
if(shortData && shortData.positions) {
await this.mongodb.batchUpsert('ceoShorts', shortData.positions, ['id']);
}
await this.scheduleOperation('process-individual-symbol', {
ceoId: ceoId,
timestamp: latestSpielTime
});
}
this.logger.info(`Successfully processed channel ${ceoId} and added channel ${ceoId} at timestamp ${latestSpielTime}`);
return { ceoId, spielCount, timestamp };
} catch (error) {
this.logger.error('Failed to process individual symbol', {
error,
ceoId,
timestamp
});
throw error;
}
}

View file

@ -0,0 +1,67 @@
import { getRandomUserAgent } from '@stock-bot/utils';
import type { CeoHandler } from '../ceo.handler';
export async function updateCeoChannels(this: CeoHandler, payload: number | undefined): Promise<unknown> {
const proxy = this.proxy?.getProxy();
if(!proxy) {
this.logger.warn('No proxy available for CEO channels update');
return;
}
let page;
if(payload === undefined) {
page = 1
}else{
page = payload;
}
this.logger.info(`Fetching CEO channels for page ${page} with proxy ${proxy}`);
const response = await this.http.get('https://api.ceo.ca/api/home?exchange=all&sort_by=symbol&sector=All&tab=companies&page='+page, {
proxy: proxy,
headers: {
'User-Agent': getRandomUserAgent()
}
})
const results = await response.json();
const channels = results.channel_categories[0].channels;
const totalChannels = results.channel_categories[0].total_channels;
const totalPages = Math.ceil(totalChannels / channels.length);
const exchanges: {exchange: string, countryCode: string}[] = []
const symbols = channels.map((channel: any) =>{
// check if exchange is in the exchanges array object
if(!exchanges.find((e: any) => e.exchange === channel.exchange)) {
exchanges.push({
exchange: channel.exchange,
countryCode: 'CA'
});
}
const details = channel.company_details || {};
return {
symbol: channel.symbol,
exchange: channel.exchange,
name: channel.title,
type: channel.type,
ceoId: channel.channel,
marketCap: details.market_cap,
volumeRatio: details.volume_ratio,
avgVolume: details.avg_volume,
stockType: details.stock_type,
issueType: details.issue_type,
sharesOutstanding: details.shares_outstanding,
float: details.float,
}
})
await this.mongodb.batchUpsert('ceoSymbols', symbols, ['symbol', 'exchange']);
await this.mongodb.batchUpsert('ceoExchanges', exchanges, ['exchange']);
if(page === 1) {
for( let i = 2; i <= totalPages; i++) {
this.logger.info(`Scheduling page ${i} of ${totalPages} for CEO channels`);
await this.scheduleOperation('update-ceo-channels', i)
}
}
this.logger.info(`Fetched CEO channels for page ${page}/${totalPages}`);
return { page, totalPages };
}

View file

@ -0,0 +1,63 @@
import type { CeoHandler } from '../ceo.handler';
export async function updateUniqueSymbols(this: CeoHandler, _payload: unknown, _context: any): Promise<unknown> {
this.logger.info('Starting update to get unique CEO symbols by ceoId');
try {
// Get unique ceoId values from the ceoSymbols collection
const uniqueCeoIds = await this.mongodb.collection('ceoSymbols').distinct('ceoId');
this.logger.info(`Found ${uniqueCeoIds.length} unique CEO IDs`);
// Get detailed records for each unique ceoId (latest/first record)
const uniqueSymbols = [];
for (const ceoId of uniqueCeoIds) {
const symbol = await this.mongodb.collection('ceoSymbols')
.findOne({ ceoId }, { sort: { _id: -1 } }); // Get latest record
if (symbol) {
uniqueSymbols.push(symbol);
}
}
this.logger.info(`Retrieved ${uniqueSymbols.length} unique symbol records`);
// Schedule individual jobs for each unique symbol
let scheduledJobs = 0;
for (const symbol of uniqueSymbols) {
// Schedule a job to process this individual symbol
await this.scheduleOperation('process-individual-symbol', {
ceoId: symbol.ceoId,
symbol: symbol.symbol,
});
scheduledJobs++;
// Add small delay to avoid overwhelming the queue
if (scheduledJobs % 10 === 0) {
this.logger.debug(`Scheduled ${scheduledJobs} jobs so far`);
}
}
this.logger.info(`Successfully scheduled ${scheduledJobs} individual symbol update jobs`);
// Cache the results for monitoring
await this.cacheSet('unique-symbols-last-run', {
timestamp: new Date().toISOString(),
totalUniqueIds: uniqueCeoIds.length,
totalRecords: uniqueSymbols.length,
scheduledJobs
}, 1800); // Cache for 30 minutes
return {
success: true,
uniqueCeoIds: uniqueCeoIds.length,
uniqueRecords: uniqueSymbols.length,
scheduledJobs,
timestamp: new Date().toISOString()
};
} catch (error) {
this.logger.error('Failed to update unique CEO symbols', { error });
throw error;
}
}

View file

@ -1,15 +1,18 @@
import {
BaseHandler,
Disabled,
Handler,
Operation,
ScheduledOperation,
type IServiceContainer
} from '@stock-bot/handlers';
import { getRandomUserAgent } from '@stock-bot/utils';
import {
processIndividualSymbol,
updateCeoChannels,
updateUniqueSymbols
} from './actions';
@Handler('ceo')
@Disabled()
// @Disabled()
export class CeoHandler extends BaseHandler {
constructor(services: IServiceContainer) {
super(services); // Handler name read from @Handler decorator
@ -20,246 +23,16 @@ export class CeoHandler extends BaseHandler {
immediately: false,
description: 'Get all CEO symbols and exchanges'
})
async updateCeoChannels(payload: number | undefined): Promise<unknown> {
const proxy = this.proxy?.getProxy();
if(!proxy) {
this.logger.warn('No proxy available for CEO channels update');
return;
}
let page;
if(payload === undefined) {
page = 1
}else{
page = payload;
}
this.logger.info(`Fetching CEO channels for page ${page} with proxy ${proxy}`);
const response = await this.http.get('https://api.ceo.ca/api/home?exchange=all&sort_by=symbol&sector=All&tab=companies&page='+page, {
proxy: proxy,
headers: {
'User-Agent': getRandomUserAgent()
}
})
const results = await response.json();
const channels = results.channel_categories[0].channels;
const totalChannels = results.channel_categories[0].total_channels;
const totalPages = Math.ceil(totalChannels / channels.length);
const exchanges: {exchange: string, countryCode: string}[] = []
const symbols = channels.map((channel: any) =>{
// check if exchange is in the exchanges array object
if(!exchanges.find((e: any) => e.exchange === channel.exchange)) {
exchanges.push({
exchange: channel.exchange,
countryCode: 'CA'
});
}
const details = channel.company_details || {};
return {
symbol: channel.symbol,
exchange: channel.exchange,
name: channel.title,
type: channel.type,
ceoId: channel.channel,
marketCap: details.market_cap,
volumeRatio: details.volume_ratio,
avgVolume: details.avg_volume,
stockType: details.stock_type,
issueType: details.issue_type,
sharesOutstanding: details.shares_outstanding,
float: details.float,
}
})
await this.mongodb.batchUpsert('ceoSymbols', symbols, ['symbol', 'exchange']);
await this.mongodb.batchUpsert('ceoExchanges', exchanges, ['exchange']);
if(page === 1) {
for( let i = 2; i <= totalPages; i++) {
this.logger.info(`Scheduling page ${i} of ${totalPages} for CEO channels`);
await this.scheduleOperation('update-ceo-channels', i)
}
}
this.logger.info(`Fetched CEO channels for page ${page}/${totalPages}`);
return { page, totalPages };
}
updateCeoChannels = updateCeoChannels;
@Operation('update-unique-symbols')
@ScheduledOperation('process-unique-symbols', '0 0 1 * *', {//'0 */30 * * *', {
@ScheduledOperation('process-unique-symbols', '0 0 1 * *', {
priority: 5,
immediately: false,
description: 'Process unique CEO symbols and schedule individual jobs'
})
async updateUniqueSymbols(_payload: unknown, _context: any): Promise<unknown> {
this.logger.info('Starting update to get unique CEO symbols by ceoId');
try {
// Get unique ceoId values from the ceoSymbols collection
const uniqueCeoIds = await this.mongodb.collection('ceoSymbols').distinct('ceoId');
this.logger.info(`Found ${uniqueCeoIds.length} unique CEO IDs`);
// Get detailed records for each unique ceoId (latest/first record)
const uniqueSymbols = [];
for (const ceoId of uniqueCeoIds) {
const symbol = await this.mongodb.collection('ceoSymbols')
.findOne({ ceoId }, { sort: { _id: -1 } }); // Get latest record
if (symbol) {
uniqueSymbols.push(symbol);
}
}
this.logger.info(`Retrieved ${uniqueSymbols.length} unique symbol records`);
// Schedule individual jobs for each unique symbol
let scheduledJobs = 0;
for (const symbol of uniqueSymbols) {
// Schedule a job to process this individual symbol
await this.scheduleOperation('process-individual-symbol', {
ceoId: symbol.ceoId,
symbol: symbol.symbol,
});
scheduledJobs++;
// Add small delay to avoid overwhelming the queue
if (scheduledJobs % 10 === 0) {
this.logger.debug(`Scheduled ${scheduledJobs} jobs so far`);
}
}
this.logger.info(`Successfully scheduled ${scheduledJobs} individual symbol update jobs`);
// Cache the results for monitoring
await this.cacheSet('unique-symbols-last-run', {
timestamp: new Date().toISOString(),
totalUniqueIds: uniqueCeoIds.length,
totalRecords: uniqueSymbols.length,
scheduledJobs
}, 1800); // Cache for 30 minutes
return {
success: true,
uniqueCeoIds: uniqueCeoIds.length,
uniqueRecords: uniqueSymbols.length,
scheduledJobs,
timestamp: new Date().toISOString()
};
} catch (error) {
this.logger.error('Failed to update unique CEO symbols', { error });
throw error;
}
}
updateUniqueSymbols = updateUniqueSymbols;
@Operation('process-individual-symbol')
async processIndividualSymbol(payload: any, _context: any): Promise<unknown> {
const { ceoId, symbol, timestamp } = payload;
const proxy = this.proxy?.getProxy();
if(!proxy) {
this.logger.warn('No proxy available for processing individual CEO symbol');
return;
}
this.logger.debug('Processing individual CEO symbol', {
ceoId,
timestamp,
});
try {
// Fetch detailed information for the individual symbol
const response = await this.http.get(`https://api.ceo.ca/api/get_spiels?channel=${ceoId}&load_more=top`
+ (timestamp ? `&until=${timestamp}` : ''),
{
proxy: proxy,
headers: {
'User-Agent': getRandomUserAgent()
}
});
if (!response.ok) {
throw new Error(`Failed to fetch details for ceoId ${ceoId}: ${response.statusText}`);
}
const data = await response.json();
const spielCount = data.spiels.length;
if(spielCount === 0) {
this.logger.warn(`No spiels found for ceoId ${ceoId}`);
return null; // No data to process
}
const latestSpielTime = data.spiels[0]?.timestamp;
const posts = data.spiels.map((spiel: any) => ({
ceoId,
spiel: spiel.spiel,
spielReplyToId: spiel.spiel_reply_to_id,
spielReplyTo: spiel.spiel_reply_to,
spielReplyToName: spiel.spiel_reply_to_name,
spielReplyToEdited: spiel.spiel_reply_to_edited,
userId: spiel.user_id,
name: spiel.name,
timestamp: spiel.timestamp,
spielId: spiel.spiel_id,
color: spiel.color,
parentId: spiel.parent_id,
publicId: spiel.public_id,
parentChannel: spiel.parent_channel,
parentTimestamp: spiel.parent_timestamp,
votes: spiel.votes,
editable: spiel.editable,
edited: spiel.edited,
featured: spiel.featured,
verified: spiel.verified,
fake: spiel.fake,
bot: spiel.bot,
voted: spiel.voted,
flagged: spiel.flagged,
ownSpiel: spiel.own_spiel,
score: spiel.score,
savedId: spiel.saved_id,
savedTimestamp: spiel.saved_timestamp,
poll: spiel.poll,
votedInPoll: spiel.voted_in_poll
}));
await this.mongodb.batchUpsert('ceoPosts', posts, ['spielId']);
this.logger.info(`Fetched ${spielCount} spiels for ceoId ${ceoId}`);
// Update Shorts
const shortRes = await this.http.get(`https://api.ceo.ca/api/short_positions/one?symbol=${symbol}`,
{
proxy: proxy,
headers: {
'User-Agent': getRandomUserAgent()
}
});
if (shortRes.ok) {
const shortData = await shortRes.json();
if(shortData && shortData.positions) {
await this.mongodb.batchUpsert('ceoShorts', shortData.positions, ['id']);
}
await this.scheduleOperation('process-individual-symbol', {
ceoId: ceoId,
timestamp: latestSpielTime
});
}
this.logger.info(`Successfully processed channel ${ceoId} and added channel ${ceoId} at timestamp ${latestSpielTime}`);
return { ceoId, spielCount, timestamp };
} catch (error) {
this.logger.error('Failed to process individual symbol', {
error,
ceoId,
timestamp
});
throw error;
}
}
}
processIndividualSymbol = processIndividualSymbol;
}

View file

@ -63,7 +63,7 @@ async function testCeoOperations() {
if (count > 0) {
// Test 2: Run process-unique-symbols operation
logger.info('Testing process-unique-symbols operation...');
const result = await ceoHandler.processUniqueSymbols(undefined, {});
const result = await ceoHandler.updateUniqueSymbols(undefined, {});
logger.info('Process unique symbols result:', result);
// Test 3: Test individual symbol processing

View file

@ -0,0 +1,27 @@
import { PostgreSQLClient } from '@stock-bot/postgres';
import { MongoDBClient } from '@stock-bot/mongodb';
let postgresClient: PostgreSQLClient | null = null;
let mongodbClient: MongoDBClient | null = null;
export function setPostgreSQLClient(client: PostgreSQLClient): void {
postgresClient = client;
}
export function getPostgreSQLClient(): PostgreSQLClient {
if (!postgresClient) {
throw new Error('PostgreSQL client not initialized. Call setPostgreSQLClient first.');
}
return postgresClient;
}
export function setMongoDBClient(client: MongoDBClient): void {
mongodbClient = client;
}
export function getMongoDBClient(): MongoDBClient {
if (!mongodbClient) {
throw new Error('MongoDB client not initialized. Call setMongoDBClient first.');
}
return mongodbClient;
}

View file

@ -1,5 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getPostgreSQLClient } from '../../../clients';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-clear-postgresql-data');

View file

@ -1,5 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getPostgreSQLClient } from '../../../clients';
import type { JobPayload, SyncStatus } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-status');

View file

@ -1,5 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getPostgreSQLClient } from '../../../clients';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-exchange-stats');

View file

@ -1,5 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getPostgreSQLClient } from '../../../clients';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-provider-mapping-stats');

View file

@ -1,6 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getMongoDBClient } from '@stock-bot/mongodb-client';
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getMongoDBClient, getPostgreSQLClient } from '../../../clients';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('sync-qm-exchanges');

View file

@ -1,6 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getMongoDBClient } from "@stock-bot/mongodb-client";
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getMongoDBClient, getPostgreSQLClient } from '../../../clients';
import type { JobPayload, SyncResult } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-all-exchanges');

View file

@ -1,7 +1,7 @@
import { getLogger } from '@stock-bot/logger';
import { getMongoDBClient } from '@stock-bot/mongodb-client';
import { getMongoDBClient } from '../../../clients';
import type { JobPayload } from '../../../types/job-payloads';
import type { MasterExchange } from '@stock-bot/mongodb-client';
import type { MasterExchange } from '@stock-bot/mongodb';
const logger = getLogger('sync-ib-exchanges');

View file

@ -1,6 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getMongoDBClient } from "@stock-bot/mongodb-client";
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getMongoDBClient, getPostgreSQLClient } from '../../../clients';
import type { JobPayload, SyncResult } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-qm-provider-mappings');

View file

@ -1,6 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getMongoDBClient } from '@stock-bot/mongodb-client';
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getMongoDBClient, getPostgreSQLClient } from '../../../clients';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('sync-qm-symbols');

View file

@ -1,5 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getPostgreSQLClient } from '../../../clients';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('sync-status');

View file

@ -1,6 +1,5 @@
import { getLogger } from '@stock-bot/logger';
import { getMongoDBClient } from "@stock-bot/mongodb-client";
import { getPostgreSQLClient } from '@stock-bot/postgres-client';
import { getMongoDBClient, getPostgreSQLClient } from '../../../clients';
import type { JobPayload, SyncResult } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-symbols-from-provider');

View file

@ -4,12 +4,13 @@ import { Hono } from 'hono';
import { cors } from 'hono/cors';
// Library imports
import { getLogger, setLoggerConfig, shutdownLoggers } from '@stock-bot/logger';
import { connectMongoDB } from '@stock-bot/mongodb-client';
import { connectPostgreSQL } from '@stock-bot/postgres-client';
import { MongoDBClient } from '@stock-bot/mongodb';
import { PostgreSQLClient } from '@stock-bot/postgres';
import { QueueManager, type QueueManagerConfig } from '@stock-bot/queue';
import { Shutdown } from '@stock-bot/shutdown';
// Local imports
import { enhancedSyncRoutes, healthRoutes, statsRoutes, syncRoutes } from './routes';
import { setMongoDBClient, setPostgreSQLClient } from './clients';
const config = initializeServiceConfig();
console.log('Data Sync Service Configuration:', JSON.stringify(config, null, 2));
@ -44,7 +45,8 @@ app.use(
);
const PORT = serviceConfig.port;
let server: ReturnType<typeof Bun.serve> | null = null;
// Singleton clients are managed in libraries
let mongoClient: MongoDBClient | null = null;
let postgresClient: PostgreSQLClient | null = null;
let queueManager: QueueManager | null = null;
// Initialize shutdown manager
@ -61,10 +63,10 @@ async function initializeServices() {
logger.info('Initializing data sync service...');
try {
// Initialize MongoDB client singleton
// Initialize MongoDB client
logger.debug('Connecting to MongoDB...');
const mongoConfig = databaseConfig.mongodb;
await connectMongoDB({
mongoClient = new MongoDBClient({
uri: mongoConfig.uri,
database: mongoConfig.database,
host: mongoConfig.host || 'localhost',
@ -74,13 +76,15 @@ async function initializeServices() {
socketTimeout: 30000,
serverSelectionTimeout: 5000,
},
});
}, logger);
await mongoClient.connect();
setMongoDBClient(mongoClient);
logger.info('MongoDB connected');
// Initialize PostgreSQL client singleton
// Initialize PostgreSQL client
logger.debug('Connecting to PostgreSQL...');
const pgConfig = databaseConfig.postgres;
await connectPostgreSQL({
postgresClient = new PostgreSQLClient({
host: pgConfig.host,
port: pgConfig.port,
database: pgConfig.database,
@ -91,7 +95,9 @@ async function initializeServices() {
max: pgConfig.poolSize || 10,
idleTimeoutMillis: pgConfig.idleTimeout || 30000,
},
});
}, logger);
await postgresClient.connect();
setPostgreSQLClient(postgresClient);
logger.info('PostgreSQL connected');
// Initialize queue system (with delayed worker start)
@ -236,11 +242,12 @@ shutdown.onShutdownHigh(async () => {
shutdown.onShutdownMedium(async () => {
logger.info('Disconnecting from databases...');
try {
const { disconnectMongoDB } = await import('@stock-bot/mongodb-client');
const { disconnectPostgreSQL } = await import('@stock-bot/postgres-client');
await disconnectMongoDB();
await disconnectPostgreSQL();
if (mongoClient) {
await mongoClient.disconnect();
}
if (postgresClient) {
await postgresClient.disconnect();
}
logger.info('Database connections closed');
} catch (error) {
logger.error('Error closing database connections', { error });

View file

@ -1,5 +1,5 @@
import { PostgreSQLClient } from '@stock-bot/postgres-client';
import { MongoDBClient } from '@stock-bot/mongodb-client';
import { PostgreSQLClient } from '@stock-bot/postgres';
import { MongoDBClient } from '@stock-bot/mongodb';
let postgresClient: PostgreSQLClient | null = null;
let mongodbClient: MongoDBClient | null = null;

View file

@ -5,8 +5,8 @@ import { Hono } from 'hono';
import { cors } from 'hono/cors';
import { initializeServiceConfig } from '@stock-bot/config';
import { getLogger, setLoggerConfig, shutdownLoggers } from '@stock-bot/logger';
import { createAndConnectMongoDBClient, MongoDBClient } from '@stock-bot/mongodb-client';
import { createAndConnectPostgreSQLClient, PostgreSQLClient } from '@stock-bot/postgres-client';
import { MongoDBClient } from '@stock-bot/mongodb';
import { PostgreSQLClient } from '@stock-bot/postgres';
import { Shutdown } from '@stock-bot/shutdown';
import { exchangeRoutes } from './routes/exchange.routes';
import { healthRoutes } from './routes/health.routes';
@ -77,7 +77,7 @@ async function initializeServices() {
// Initialize MongoDB client
logger.debug('Connecting to MongoDB...');
const mongoConfig = databaseConfig.mongodb;
mongoClient = await createAndConnectMongoDBClient({
mongoClient = new MongoDBClient({
uri: mongoConfig.uri,
database: mongoConfig.database,
host: mongoConfig.host,
@ -87,14 +87,15 @@ async function initializeServices() {
socketTimeout: 30000,
serverSelectionTimeout: 5000,
},
});
}, logger);
await mongoClient.connect();
setMongoDBClient(mongoClient);
logger.info('MongoDB connected');
// Initialize PostgreSQL client
logger.debug('Connecting to PostgreSQL...');
const pgConfig = databaseConfig.postgres;
postgresClient = await createAndConnectPostgreSQLClient({
postgresClient = new PostgreSQLClient({
host: pgConfig.host,
port: pgConfig.port,
database: pgConfig.database,
@ -105,7 +106,8 @@ async function initializeServices() {
max: pgConfig.poolSize || 10,
idleTimeoutMillis: pgConfig.idleTimeout || 30000,
},
});
}, logger);
await postgresClient.connect();
setPostgreSQLClient(postgresClient);
logger.info('PostgreSQL connected');

157
libs/LIBRARY_STANDARDS.md Normal file
View file

@ -0,0 +1,157 @@
# Library Standards and Patterns
This document defines the standardized patterns for all libraries in the @stock-bot ecosystem.
## Export Patterns
### Standard: Named Exports Only
All libraries should use **named exports only**. Default exports have been removed for consistency and better tree-shaking.
**Example:**
```typescript
// ✅ Good - Named exports
export { createCache } from './cache';
export type { CacheOptions } from './types';
// ❌ Bad - Default export
export default createCache;
```
## Initialization Patterns
Libraries follow different initialization patterns based on their purpose:
### 1. Singleton with Global State
**Use for:** Global services that should have only one instance (config, logger)
**Example:** Config library
```typescript
let configInstance: ConfigManager | null = null;
export function initializeConfig(): AppConfig {
if (!configInstance) {
configInstance = new ConfigManager();
}
return configInstance.initialize();
}
export function getConfig(): AppConfig {
if (!configInstance) {
throw new Error('Config not initialized');
}
return configInstance.get();
}
```
### 2. Factory with Registry
**Use for:** Services that need instance reuse based on configuration (cache, logger instances)
**Example:** Cache library
```typescript
const cacheInstances = new Map<string, CacheProvider>();
export function createCache(options: CacheOptions): CacheProvider {
if (options.shared) {
const key = generateKey(options);
if (cacheInstances.has(key)) {
return cacheInstances.get(key)!;
}
const cache = new RedisCache(options);
cacheInstances.set(key, cache);
return cache;
}
return new RedisCache(options);
}
```
### 3. Pure Factory Functions
**Use for:** Services that need creation logic beyond simple instantiation
**Example:** Event bus with configuration processing
```typescript
export function createEventBus(config: EventBusConfig): EventBus {
// Process config, set defaults, etc.
const processedConfig = { ...defaultConfig, ...config };
return new EventBus(processedConfig);
}
```
**Note:** Simple instantiation doesn't need factories - use direct class instantiation or DI container.
### 4. Direct Class Exports
**Use for:** Simple utilities or services managed by DI container
**Example:** MongoDB library
```typescript
export { MongoDBClient } from './client';
// No factory function - let DI container handle instantiation
```
### 5. Singleton Classes
**Use for:** Manager classes that coordinate multiple instances
**Example:** QueueManager
```typescript
export class QueueManager {
private static instance: QueueManager | null = null;
static initialize(config: QueueConfig): QueueManager {
if (!QueueManager.instance) {
QueueManager.instance = new QueueManager(config);
}
return QueueManager.instance;
}
static getInstance(): QueueManager {
if (!QueueManager.instance) {
throw new Error('QueueManager not initialized');
}
return QueueManager.instance;
}
}
```
## Pattern Selection Guide
Choose the initialization pattern based on these criteria:
| Pattern | When to Use | Examples |
|---------|-------------|----------|
| **Singleton with Global State** | - One instance per process<br>- Stateful configuration<br>- Process-wide settings | config, logger setup |
| **Factory with Registry** | - Multiple instances with same config should share<br>- Connection pooling<br>- Resource optimization | cache, logger instances |
| **Pure Factory** | - Complex initialization logic<br>- Configuration processing needed<br>- Defaults to apply | event bus (if needed) |
| **Direct Class Export** | - DI container manages lifecycle<br>- Simple initialization<br>- No special setup needed | database clients (MongoDB, PostgreSQL, QuestDB), utilities |
| **Singleton Class** | - Coordinates multiple resources<br>- Central management point<br>- Graceful shutdown needed | QueueManager, ConnectionManager |
## Additional Standards
### Error Handling
- All libraries should throw descriptive errors
- Consider creating custom error classes for domain-specific errors
- Always include context in error messages
### Configuration
- Accept configuration through constructor/factory parameters
- Validate configuration using Zod schemas
- Provide sensible defaults where appropriate
### Testing
- All libraries must have unit tests
- Use consistent test file naming: `*.test.ts`
- Mock external dependencies
### Documentation
- Every library must have a README.md
- Include usage examples
- Document all public APIs with JSDoc
### TypeScript
- Export all public types
- Use strict TypeScript settings
- Avoid `any` types
### Dependencies
- Minimize external dependencies
- Use exact versions for critical dependencies
- Document peer dependencies clearly

View file

@ -8,9 +8,9 @@ import { createCache, type CacheProvider } from '@stock-bot/cache';
import type { IServiceContainer } from '@stock-bot/handlers';
import { getLogger, type Logger } from '@stock-bot/logger';
import { MongoDBClient } from '@stock-bot/mongodb';
import { createPostgreSQLClient, type PostgreSQLClient } from '@stock-bot/postgres';
import { PostgreSQLClient } from '@stock-bot/postgres';
import { ProxyManager } from '@stock-bot/proxy';
import { createQuestDBClient, type QuestDBClient } from '@stock-bot/questdb';
import { QuestDBClient } from '@stock-bot/questdb';
import { type QueueManager } from '@stock-bot/queue';
import { asFunction, asValue, createContainer, InjectionMode, type AwilixContainer } from 'awilix';
import { z } from 'zod';
@ -146,7 +146,7 @@ export function createServiceContainer(rawConfig: unknown): AwilixContainer<Serv
// Conditionally register PostgreSQL client
if (config.postgres?.enabled !== false) {
registrations.postgresClient = asFunction(({ postgresConfig, logger }) => {
return createPostgreSQLClient(
return new PostgreSQLClient(
{
host: postgresConfig.host,
port: postgresConfig.port,
@ -165,7 +165,7 @@ export function createServiceContainer(rawConfig: unknown): AwilixContainer<Serv
if (config.questdb?.enabled !== false) {
registrations.questdbClient = asFunction(({ questdbConfig, logger }) => {
console.log('Creating QuestDB client with config:', questdbConfig);
return createQuestDBClient(
return new QuestDBClient(
{
host: questdbConfig.host,
httpPort: questdbConfig.httpPort,

View file

@ -1,9 +1,9 @@
import { getLogger } from '@stock-bot/logger';
import { createJobHandler, handlerRegistry, type HandlerConfigWithSchedule } from '@stock-bot/types';
import { fetch } from '@stock-bot/utils';
import type { Collection } from 'mongodb';
import type { IServiceContainer } from '../types/service-container';
import type { ExecutionContext, IHandler } from '../types/types';
import type { Collection } from 'mongodb';
/**
* Abstract base class for all handlers with improved DI
@ -196,7 +196,7 @@ export abstract class BaseHandler implements IHandler {
*/
protected hasService(name: keyof IServiceContainer): boolean {
const service = this[name as keyof this];
return service != null;
return service !== null;
}
/**

View file

@ -9,6 +9,3 @@ export { Logger, getLogger, shutdownLoggers, setLoggerConfig } from './logger';
// Type definitions
export type { LogLevel, LogContext, LogMetadata, LoggerConfig } from './types';
// Default export
export { getLogger as default } from './logger';

View file

@ -45,6 +45,3 @@ export type {
export { RedisConnectionManager } from './connection-manager';
export { CacheKeyGenerator } from './key-generator';
export { RedisCache } from './redis-cache';
// Default export for convenience
export default createCache;

View file

@ -1,5 +0,0 @@
// This factory is no longer needed when using Awilix DI
// The MongoDBClient is now registered directly in the DI container
// See: libs/core/di/src/awilix-container.ts
export { MongoDBClient } from './client';

View file

@ -1,29 +0,0 @@
import { PostgreSQLClient } from './client';
import type { PostgreSQLClientConfig, PostgreSQLConnectionOptions, ConnectionEvents } from './types';
/**
* Factory function to create a PostgreSQL client instance
*/
export function createPostgreSQLClient(
config: PostgreSQLClientConfig,
logger?: any,
options?: PostgreSQLConnectionOptions,
events?: ConnectionEvents
): PostgreSQLClient {
return new PostgreSQLClient(config, logger, options, events);
}
/**
* Create and connect a PostgreSQL client
*/
export async function createAndConnectPostgreSQLClient(
config: PostgreSQLClientConfig,
logger?: any,
options?: PostgreSQLConnectionOptions,
events?: ConnectionEvents
): Promise<PostgreSQLClient> {
const client = createPostgreSQLClient(config, logger, options, events);
await client.connect();
return client;
}

View file

@ -33,10 +33,5 @@ export type {
DynamicPoolConfig,
} from './types';
// Factory functions
export {
createPostgreSQLClient,
createAndConnectPostgreSQLClient,
} from './factory';
// Singleton pattern removed - use factory functions instead
// Note: Factory functions removed - instantiate directly with new PostgreSQLClient()
// or use the Awilix DI container (recommended)

View file

@ -1,26 +0,0 @@
import { QuestDBClient } from './client';
import type { QuestDBClientConfig, QuestDBConnectionOptions } from './types';
/**
* Factory function to create a QuestDB client instance
*/
export function createQuestDBClient(
config: QuestDBClientConfig,
logger?: any,
options?: QuestDBConnectionOptions
): QuestDBClient {
return new QuestDBClient(config, logger, options);
}
/**
* Create and connect a QuestDB client
*/
export async function createAndConnectQuestDBClient(
config: QuestDBClientConfig,
logger?: any,
options?: QuestDBConnectionOptions
): Promise<QuestDBClient> {
const client = createQuestDBClient(config, logger, options);
await client.connect();
return client;
}

View file

@ -28,5 +28,5 @@ export type {
InsertResult,
} from './types';
// Utils
export { createQuestDBClient, createAndConnectQuestDBClient } from './factory';
// Note: Factory functions removed - instantiate directly with new QuestDBClient()
// or use the Awilix DI container (recommended)

View file

@ -7,7 +7,6 @@
import { afterEach, describe, expect, it } from 'bun:test';
import {
createQuestDBClient,
QuestDBClient,
QuestDBHealthMonitor,
QuestDBInfluxWriter,
@ -40,9 +39,17 @@ describe('QuestDB Client Integration', () => {
});
describe('Client Initialization', () => {
it('should create client with factory function', () => {
const factoryClient = createQuestDBClient();
expect(factoryClient).toBeInstanceOf(QuestDBClient);
it('should create client with constructor', () => {
const newClient = new QuestDBClient({
host: 'localhost',
httpPort: 9000,
pgPort: 8812,
influxPort: 9009,
database: 'questdb',
user: 'admin',
password: 'quest',
});
expect(newClient).toBeInstanceOf(QuestDBClient);
});
it('should initialize all supporting classes', () => {

View file

@ -10,7 +10,4 @@ export function createEventBus(config: EventBusConfig): EventBus {
// Re-export everything
export { EventBus } from './event-bus';
export * from './types';
// Default export
export default createEventBus;
export * from './types';