From d858222af7f4539991615192f4f7ee35236cda28 Mon Sep 17 00:00:00 2001 From: Boki Date: Sun, 22 Jun 2025 16:57:08 -0400 Subject: [PATCH] refactoring to remove a lot of junk --- .../src/handlers/ceo/actions/index.ts | 3 + .../process-individual-symbol.action.ts | 111 ++++++++ .../ceo/actions/update-ceo-channels.action.ts | 67 +++++ .../actions/update-unique-symbols.action.ts | 63 +++++ .../src/handlers/ceo/ceo.handler.ts | 249 +----------------- apps/data-ingestion/test-ceo-operations.ts | 2 +- apps/data-pipeline/src/clients.ts | 27 ++ .../clear-postgresql-data.operations.ts | 2 +- .../enhanced-sync-status.operations.ts | 2 +- .../operations/exchange-stats.operations.ts | 2 +- .../provider-mapping-stats.operations.ts | 2 +- .../operations/qm-exchanges.operations.ts | 3 +- .../sync-all-exchanges.operations.ts | 3 +- .../sync-ib-exchanges.operations.ts | 4 +- .../sync-qm-provider-mappings.operations.ts | 3 +- .../operations/qm-symbols.operations.ts | 3 +- .../operations/sync-status.operations.ts | 2 +- .../sync-symbols-from-provider.operations.ts | 3 +- apps/data-pipeline/src/index.ts | 35 ++- apps/web-api/src/clients.ts | 4 +- apps/web-api/src/index.ts | 14 +- libs/LIBRARY_STANDARDS.md | 157 +++++++++++ libs/core/di/src/awilix-container.ts | 8 +- libs/core/handlers/src/base/BaseHandler.ts | 4 +- libs/core/logger/src/index.ts | 3 - libs/data/cache/src/index.ts | 3 - libs/data/mongodb/src/factory.ts | 5 - libs/data/postgres/src/factory.ts | 29 -- libs/data/postgres/src/index.ts | 9 +- libs/data/questdb/src/factory.ts | 26 -- libs/data/questdb/src/index.ts | 4 +- libs/data/questdb/test/integration.test.ts | 15 +- libs/services/event-bus/src/index.ts | 5 +- 33 files changed, 505 insertions(+), 367 deletions(-) create mode 100644 apps/data-ingestion/src/handlers/ceo/actions/index.ts create mode 100644 apps/data-ingestion/src/handlers/ceo/actions/process-individual-symbol.action.ts create mode 100644 apps/data-ingestion/src/handlers/ceo/actions/update-ceo-channels.action.ts create mode 100644 apps/data-ingestion/src/handlers/ceo/actions/update-unique-symbols.action.ts create mode 100644 apps/data-pipeline/src/clients.ts create mode 100644 libs/LIBRARY_STANDARDS.md delete mode 100644 libs/data/mongodb/src/factory.ts delete mode 100644 libs/data/postgres/src/factory.ts delete mode 100644 libs/data/questdb/src/factory.ts diff --git a/apps/data-ingestion/src/handlers/ceo/actions/index.ts b/apps/data-ingestion/src/handlers/ceo/actions/index.ts new file mode 100644 index 0000000..c5ac377 --- /dev/null +++ b/apps/data-ingestion/src/handlers/ceo/actions/index.ts @@ -0,0 +1,3 @@ +export { updateCeoChannels } from './update-ceo-channels.action'; +export { updateUniqueSymbols } from './update-unique-symbols.action'; +export { processIndividualSymbol } from './process-individual-symbol.action'; \ No newline at end of file diff --git a/apps/data-ingestion/src/handlers/ceo/actions/process-individual-symbol.action.ts b/apps/data-ingestion/src/handlers/ceo/actions/process-individual-symbol.action.ts new file mode 100644 index 0000000..b7a7daa --- /dev/null +++ b/apps/data-ingestion/src/handlers/ceo/actions/process-individual-symbol.action.ts @@ -0,0 +1,111 @@ +import { getRandomUserAgent } from '@stock-bot/utils'; +import type { CeoHandler } from '../ceo.handler'; + +export async function processIndividualSymbol(this: CeoHandler, payload: any, _context: any): Promise { + const { ceoId, symbol, timestamp } = payload; + const proxy = this.proxy?.getProxy(); + if(!proxy) { + this.logger.warn('No proxy available for processing individual CEO symbol'); + return; + } + + this.logger.debug('Processing individual CEO symbol', { + ceoId, + timestamp, + }); + try { + // Fetch detailed information for the individual symbol + const response = await this.http.get(`https://api.ceo.ca/api/get_spiels?channel=${ceoId}&load_more=top` + + (timestamp ? `&until=${timestamp}` : ''), + { + proxy: proxy, + headers: { + + 'User-Agent': getRandomUserAgent() + } + }); + + if (!response.ok) { + throw new Error(`Failed to fetch details for ceoId ${ceoId}: ${response.statusText}`); + } + + const data = await response.json(); + + const spielCount = data.spiels.length; + if(spielCount === 0) { + this.logger.warn(`No spiels found for ceoId ${ceoId}`); + return null; // No data to process + } + const latestSpielTime = data.spiels[0]?.timestamp; + const posts = data.spiels.map((spiel: any) => ({ + ceoId, + spiel: spiel.spiel, + spielReplyToId: spiel.spiel_reply_to_id, + spielReplyTo: spiel.spiel_reply_to, + spielReplyToName: spiel.spiel_reply_to_name, + spielReplyToEdited: spiel.spiel_reply_to_edited, + userId: spiel.user_id, + name: spiel.name, + timestamp: spiel.timestamp, + spielId: spiel.spiel_id, + color: spiel.color, + parentId: spiel.parent_id, + publicId: spiel.public_id, + parentChannel: spiel.parent_channel, + parentTimestamp: spiel.parent_timestamp, + votes: spiel.votes, + editable: spiel.editable, + edited: spiel.edited, + featured: spiel.featured, + verified: spiel.verified, + fake: spiel.fake, + bot: spiel.bot, + voted: spiel.voted, + flagged: spiel.flagged, + ownSpiel: spiel.own_spiel, + score: spiel.score, + savedId: spiel.saved_id, + savedTimestamp: spiel.saved_timestamp, + poll: spiel.poll, + votedInPoll: spiel.voted_in_poll + })); + + await this.mongodb.batchUpsert('ceoPosts', posts, ['spielId']); + this.logger.info(`Fetched ${spielCount} spiels for ceoId ${ceoId}`); + + // Update Shorts + const shortRes = await this.http.get(`https://api.ceo.ca/api/short_positions/one?symbol=${symbol}`, + { + proxy: proxy, + headers: { + 'User-Agent': getRandomUserAgent() + } + }); + + if (shortRes.ok) { + const shortData = await shortRes.json(); + if(shortData && shortData.positions) { + await this.mongodb.batchUpsert('ceoShorts', shortData.positions, ['id']); + } + + await this.scheduleOperation('process-individual-symbol', { + ceoId: ceoId, + timestamp: latestSpielTime + }); + } + + + + this.logger.info(`Successfully processed channel ${ceoId} and added channel ${ceoId} at timestamp ${latestSpielTime}`); + + return { ceoId, spielCount, timestamp }; + + } catch (error) { + this.logger.error('Failed to process individual symbol', { + error, + ceoId, + timestamp + }); + throw error; + } +} \ No newline at end of file diff --git a/apps/data-ingestion/src/handlers/ceo/actions/update-ceo-channels.action.ts b/apps/data-ingestion/src/handlers/ceo/actions/update-ceo-channels.action.ts new file mode 100644 index 0000000..92c9bb5 --- /dev/null +++ b/apps/data-ingestion/src/handlers/ceo/actions/update-ceo-channels.action.ts @@ -0,0 +1,67 @@ +import { getRandomUserAgent } from '@stock-bot/utils'; +import type { CeoHandler } from '../ceo.handler'; + +export async function updateCeoChannels(this: CeoHandler, payload: number | undefined): Promise { + const proxy = this.proxy?.getProxy(); + if(!proxy) { + this.logger.warn('No proxy available for CEO channels update'); + return; + } + let page; + if(payload === undefined) { + page = 1 + }else{ + page = payload; + } + + + this.logger.info(`Fetching CEO channels for page ${page} with proxy ${proxy}`); + const response = await this.http.get('https://api.ceo.ca/api/home?exchange=all&sort_by=symbol§or=All&tab=companies&page='+page, { + proxy: proxy, + headers: { + 'User-Agent': getRandomUserAgent() + } + }) + const results = await response.json(); + const channels = results.channel_categories[0].channels; + const totalChannels = results.channel_categories[0].total_channels; + const totalPages = Math.ceil(totalChannels / channels.length); + const exchanges: {exchange: string, countryCode: string}[] = [] + const symbols = channels.map((channel: any) =>{ + // check if exchange is in the exchanges array object + if(!exchanges.find((e: any) => e.exchange === channel.exchange)) { + exchanges.push({ + exchange: channel.exchange, + countryCode: 'CA' + }); + } + const details = channel.company_details || {}; + return { + symbol: channel.symbol, + exchange: channel.exchange, + name: channel.title, + type: channel.type, + ceoId: channel.channel, + marketCap: details.market_cap, + volumeRatio: details.volume_ratio, + avgVolume: details.avg_volume, + stockType: details.stock_type, + issueType: details.issue_type, + sharesOutstanding: details.shares_outstanding, + float: details.float, + } + }) + + await this.mongodb.batchUpsert('ceoSymbols', symbols, ['symbol', 'exchange']); + await this.mongodb.batchUpsert('ceoExchanges', exchanges, ['exchange']); + + if(page === 1) { + for( let i = 2; i <= totalPages; i++) { + this.logger.info(`Scheduling page ${i} of ${totalPages} for CEO channels`); + await this.scheduleOperation('update-ceo-channels', i) + } + } + + this.logger.info(`Fetched CEO channels for page ${page}/${totalPages}`); + return { page, totalPages }; +} \ No newline at end of file diff --git a/apps/data-ingestion/src/handlers/ceo/actions/update-unique-symbols.action.ts b/apps/data-ingestion/src/handlers/ceo/actions/update-unique-symbols.action.ts new file mode 100644 index 0000000..655dbe0 --- /dev/null +++ b/apps/data-ingestion/src/handlers/ceo/actions/update-unique-symbols.action.ts @@ -0,0 +1,63 @@ +import type { CeoHandler } from '../ceo.handler'; + +export async function updateUniqueSymbols(this: CeoHandler, _payload: unknown, _context: any): Promise { + this.logger.info('Starting update to get unique CEO symbols by ceoId'); + + try { + // Get unique ceoId values from the ceoSymbols collection + const uniqueCeoIds = await this.mongodb.collection('ceoSymbols').distinct('ceoId'); + + this.logger.info(`Found ${uniqueCeoIds.length} unique CEO IDs`); + + // Get detailed records for each unique ceoId (latest/first record) + const uniqueSymbols = []; + for (const ceoId of uniqueCeoIds) { + const symbol = await this.mongodb.collection('ceoSymbols') + .findOne({ ceoId }, { sort: { _id: -1 } }); // Get latest record + + if (symbol) { + uniqueSymbols.push(symbol); + } + } + + this.logger.info(`Retrieved ${uniqueSymbols.length} unique symbol records`); + + // Schedule individual jobs for each unique symbol + let scheduledJobs = 0; + for (const symbol of uniqueSymbols) { + // Schedule a job to process this individual symbol + await this.scheduleOperation('process-individual-symbol', { + ceoId: symbol.ceoId, + symbol: symbol.symbol, + }); + scheduledJobs++; + + // Add small delay to avoid overwhelming the queue + if (scheduledJobs % 10 === 0) { + this.logger.debug(`Scheduled ${scheduledJobs} jobs so far`); + } + } + + this.logger.info(`Successfully scheduled ${scheduledJobs} individual symbol update jobs`); + + // Cache the results for monitoring + await this.cacheSet('unique-symbols-last-run', { + timestamp: new Date().toISOString(), + totalUniqueIds: uniqueCeoIds.length, + totalRecords: uniqueSymbols.length, + scheduledJobs + }, 1800); // Cache for 30 minutes + + return { + success: true, + uniqueCeoIds: uniqueCeoIds.length, + uniqueRecords: uniqueSymbols.length, + scheduledJobs, + timestamp: new Date().toISOString() + }; + + } catch (error) { + this.logger.error('Failed to update unique CEO symbols', { error }); + throw error; + } +} \ No newline at end of file diff --git a/apps/data-ingestion/src/handlers/ceo/ceo.handler.ts b/apps/data-ingestion/src/handlers/ceo/ceo.handler.ts index 0d27587..22c5a94 100644 --- a/apps/data-ingestion/src/handlers/ceo/ceo.handler.ts +++ b/apps/data-ingestion/src/handlers/ceo/ceo.handler.ts @@ -1,15 +1,18 @@ import { BaseHandler, - Disabled, Handler, Operation, ScheduledOperation, type IServiceContainer } from '@stock-bot/handlers'; -import { getRandomUserAgent } from '@stock-bot/utils'; +import { + processIndividualSymbol, + updateCeoChannels, + updateUniqueSymbols +} from './actions'; @Handler('ceo') -@Disabled() +// @Disabled() export class CeoHandler extends BaseHandler { constructor(services: IServiceContainer) { super(services); // Handler name read from @Handler decorator @@ -20,246 +23,16 @@ export class CeoHandler extends BaseHandler { immediately: false, description: 'Get all CEO symbols and exchanges' }) - async updateCeoChannels(payload: number | undefined): Promise { - const proxy = this.proxy?.getProxy(); - if(!proxy) { - this.logger.warn('No proxy available for CEO channels update'); - return; - } - let page; - if(payload === undefined) { - page = 1 - }else{ - page = payload; - } - - - this.logger.info(`Fetching CEO channels for page ${page} with proxy ${proxy}`); - const response = await this.http.get('https://api.ceo.ca/api/home?exchange=all&sort_by=symbol§or=All&tab=companies&page='+page, { - proxy: proxy, - headers: { - 'User-Agent': getRandomUserAgent() - } - }) - const results = await response.json(); - const channels = results.channel_categories[0].channels; - const totalChannels = results.channel_categories[0].total_channels; - const totalPages = Math.ceil(totalChannels / channels.length); - const exchanges: {exchange: string, countryCode: string}[] = [] - const symbols = channels.map((channel: any) =>{ - // check if exchange is in the exchanges array object - if(!exchanges.find((e: any) => e.exchange === channel.exchange)) { - exchanges.push({ - exchange: channel.exchange, - countryCode: 'CA' - }); - } - const details = channel.company_details || {}; - return { - symbol: channel.symbol, - exchange: channel.exchange, - name: channel.title, - type: channel.type, - ceoId: channel.channel, - marketCap: details.market_cap, - volumeRatio: details.volume_ratio, - avgVolume: details.avg_volume, - stockType: details.stock_type, - issueType: details.issue_type, - sharesOutstanding: details.shares_outstanding, - float: details.float, - } - }) - - await this.mongodb.batchUpsert('ceoSymbols', symbols, ['symbol', 'exchange']); - await this.mongodb.batchUpsert('ceoExchanges', exchanges, ['exchange']); - - if(page === 1) { - for( let i = 2; i <= totalPages; i++) { - this.logger.info(`Scheduling page ${i} of ${totalPages} for CEO channels`); - await this.scheduleOperation('update-ceo-channels', i) - } - } - - this.logger.info(`Fetched CEO channels for page ${page}/${totalPages}`); - return { page, totalPages }; - } + updateCeoChannels = updateCeoChannels; @Operation('update-unique-symbols') - @ScheduledOperation('process-unique-symbols', '0 0 1 * *', {//'0 */30 * * *', { + @ScheduledOperation('process-unique-symbols', '0 0 1 * *', { priority: 5, immediately: false, description: 'Process unique CEO symbols and schedule individual jobs' }) - async updateUniqueSymbols(_payload: unknown, _context: any): Promise { - this.logger.info('Starting update to get unique CEO symbols by ceoId'); - - try { - // Get unique ceoId values from the ceoSymbols collection - const uniqueCeoIds = await this.mongodb.collection('ceoSymbols').distinct('ceoId'); - - this.logger.info(`Found ${uniqueCeoIds.length} unique CEO IDs`); - - // Get detailed records for each unique ceoId (latest/first record) - const uniqueSymbols = []; - for (const ceoId of uniqueCeoIds) { - const symbol = await this.mongodb.collection('ceoSymbols') - .findOne({ ceoId }, { sort: { _id: -1 } }); // Get latest record - - if (symbol) { - uniqueSymbols.push(symbol); - } - } - - this.logger.info(`Retrieved ${uniqueSymbols.length} unique symbol records`); - - // Schedule individual jobs for each unique symbol - let scheduledJobs = 0; - for (const symbol of uniqueSymbols) { - // Schedule a job to process this individual symbol - await this.scheduleOperation('process-individual-symbol', { - ceoId: symbol.ceoId, - symbol: symbol.symbol, - }); - scheduledJobs++; - - // Add small delay to avoid overwhelming the queue - if (scheduledJobs % 10 === 0) { - this.logger.debug(`Scheduled ${scheduledJobs} jobs so far`); - } - } - - this.logger.info(`Successfully scheduled ${scheduledJobs} individual symbol update jobs`); - - // Cache the results for monitoring - await this.cacheSet('unique-symbols-last-run', { - timestamp: new Date().toISOString(), - totalUniqueIds: uniqueCeoIds.length, - totalRecords: uniqueSymbols.length, - scheduledJobs - }, 1800); // Cache for 30 minutes - - return { - success: true, - uniqueCeoIds: uniqueCeoIds.length, - uniqueRecords: uniqueSymbols.length, - scheduledJobs, - timestamp: new Date().toISOString() - }; - - } catch (error) { - this.logger.error('Failed to update unique CEO symbols', { error }); - throw error; - } - } + updateUniqueSymbols = updateUniqueSymbols; @Operation('process-individual-symbol') - async processIndividualSymbol(payload: any, _context: any): Promise { - const { ceoId, symbol, timestamp } = payload; - const proxy = this.proxy?.getProxy(); - if(!proxy) { - this.logger.warn('No proxy available for processing individual CEO symbol'); - return; - } - - this.logger.debug('Processing individual CEO symbol', { - ceoId, - timestamp, - }); - try { - // Fetch detailed information for the individual symbol - const response = await this.http.get(`https://api.ceo.ca/api/get_spiels?channel=${ceoId}&load_more=top` - + (timestamp ? `&until=${timestamp}` : ''), - { - proxy: proxy, - headers: { - - 'User-Agent': getRandomUserAgent() - } - }); - - if (!response.ok) { - throw new Error(`Failed to fetch details for ceoId ${ceoId}: ${response.statusText}`); - } - - const data = await response.json(); - - const spielCount = data.spiels.length; - if(spielCount === 0) { - this.logger.warn(`No spiels found for ceoId ${ceoId}`); - return null; // No data to process - } - const latestSpielTime = data.spiels[0]?.timestamp; - const posts = data.spiels.map((spiel: any) => ({ - ceoId, - spiel: spiel.spiel, - spielReplyToId: spiel.spiel_reply_to_id, - spielReplyTo: spiel.spiel_reply_to, - spielReplyToName: spiel.spiel_reply_to_name, - spielReplyToEdited: spiel.spiel_reply_to_edited, - userId: spiel.user_id, - name: spiel.name, - timestamp: spiel.timestamp, - spielId: spiel.spiel_id, - color: spiel.color, - parentId: spiel.parent_id, - publicId: spiel.public_id, - parentChannel: spiel.parent_channel, - parentTimestamp: spiel.parent_timestamp, - votes: spiel.votes, - editable: spiel.editable, - edited: spiel.edited, - featured: spiel.featured, - verified: spiel.verified, - fake: spiel.fake, - bot: spiel.bot, - voted: spiel.voted, - flagged: spiel.flagged, - ownSpiel: spiel.own_spiel, - score: spiel.score, - savedId: spiel.saved_id, - savedTimestamp: spiel.saved_timestamp, - poll: spiel.poll, - votedInPoll: spiel.voted_in_poll - })); - - await this.mongodb.batchUpsert('ceoPosts', posts, ['spielId']); - this.logger.info(`Fetched ${spielCount} spiels for ceoId ${ceoId}`); - - // Update Shorts - const shortRes = await this.http.get(`https://api.ceo.ca/api/short_positions/one?symbol=${symbol}`, - { - proxy: proxy, - headers: { - 'User-Agent': getRandomUserAgent() - } - }); - - if (shortRes.ok) { - const shortData = await shortRes.json(); - if(shortData && shortData.positions) { - await this.mongodb.batchUpsert('ceoShorts', shortData.positions, ['id']); - } - - await this.scheduleOperation('process-individual-symbol', { - ceoId: ceoId, - timestamp: latestSpielTime - }); - } - - - - this.logger.info(`Successfully processed channel ${ceoId} and added channel ${ceoId} at timestamp ${latestSpielTime}`); - - return { ceoId, spielCount, timestamp }; - - } catch (error) { - this.logger.error('Failed to process individual symbol', { - error, - ceoId, - timestamp - }); - throw error; - } - } -} + processIndividualSymbol = processIndividualSymbol; +} \ No newline at end of file diff --git a/apps/data-ingestion/test-ceo-operations.ts b/apps/data-ingestion/test-ceo-operations.ts index dd63bf7..9250e72 100755 --- a/apps/data-ingestion/test-ceo-operations.ts +++ b/apps/data-ingestion/test-ceo-operations.ts @@ -63,7 +63,7 @@ async function testCeoOperations() { if (count > 0) { // Test 2: Run process-unique-symbols operation logger.info('Testing process-unique-symbols operation...'); - const result = await ceoHandler.processUniqueSymbols(undefined, {}); + const result = await ceoHandler.updateUniqueSymbols(undefined, {}); logger.info('Process unique symbols result:', result); // Test 3: Test individual symbol processing diff --git a/apps/data-pipeline/src/clients.ts b/apps/data-pipeline/src/clients.ts new file mode 100644 index 0000000..4bc8217 --- /dev/null +++ b/apps/data-pipeline/src/clients.ts @@ -0,0 +1,27 @@ +import { PostgreSQLClient } from '@stock-bot/postgres'; +import { MongoDBClient } from '@stock-bot/mongodb'; + +let postgresClient: PostgreSQLClient | null = null; +let mongodbClient: MongoDBClient | null = null; + +export function setPostgreSQLClient(client: PostgreSQLClient): void { + postgresClient = client; +} + +export function getPostgreSQLClient(): PostgreSQLClient { + if (!postgresClient) { + throw new Error('PostgreSQL client not initialized. Call setPostgreSQLClient first.'); + } + return postgresClient; +} + +export function setMongoDBClient(client: MongoDBClient): void { + mongodbClient = client; +} + +export function getMongoDBClient(): MongoDBClient { + if (!mongodbClient) { + throw new Error('MongoDB client not initialized. Call setMongoDBClient first.'); + } + return mongodbClient; +} \ No newline at end of file diff --git a/apps/data-pipeline/src/handlers/exchanges/operations/clear-postgresql-data.operations.ts b/apps/data-pipeline/src/handlers/exchanges/operations/clear-postgresql-data.operations.ts index 8880530..d7148ed 100644 --- a/apps/data-pipeline/src/handlers/exchanges/operations/clear-postgresql-data.operations.ts +++ b/apps/data-pipeline/src/handlers/exchanges/operations/clear-postgresql-data.operations.ts @@ -1,5 +1,5 @@ import { getLogger } from '@stock-bot/logger'; -import { getPostgreSQLClient } from '@stock-bot/postgres-client'; +import { getPostgreSQLClient } from '../../../clients'; import type { JobPayload } from '../../../types/job-payloads'; const logger = getLogger('enhanced-sync-clear-postgresql-data'); diff --git a/apps/data-pipeline/src/handlers/exchanges/operations/enhanced-sync-status.operations.ts b/apps/data-pipeline/src/handlers/exchanges/operations/enhanced-sync-status.operations.ts index 2a42451..275ac82 100644 --- a/apps/data-pipeline/src/handlers/exchanges/operations/enhanced-sync-status.operations.ts +++ b/apps/data-pipeline/src/handlers/exchanges/operations/enhanced-sync-status.operations.ts @@ -1,5 +1,5 @@ import { getLogger } from '@stock-bot/logger'; -import { getPostgreSQLClient } from '@stock-bot/postgres-client'; +import { getPostgreSQLClient } from '../../../clients'; import type { JobPayload, SyncStatus } from '../../../types/job-payloads'; const logger = getLogger('enhanced-sync-status'); diff --git a/apps/data-pipeline/src/handlers/exchanges/operations/exchange-stats.operations.ts b/apps/data-pipeline/src/handlers/exchanges/operations/exchange-stats.operations.ts index 74806d3..b67170e 100644 --- a/apps/data-pipeline/src/handlers/exchanges/operations/exchange-stats.operations.ts +++ b/apps/data-pipeline/src/handlers/exchanges/operations/exchange-stats.operations.ts @@ -1,5 +1,5 @@ import { getLogger } from '@stock-bot/logger'; -import { getPostgreSQLClient } from '@stock-bot/postgres-client'; +import { getPostgreSQLClient } from '../../../clients'; import type { JobPayload } from '../../../types/job-payloads'; const logger = getLogger('enhanced-sync-exchange-stats'); diff --git a/apps/data-pipeline/src/handlers/exchanges/operations/provider-mapping-stats.operations.ts b/apps/data-pipeline/src/handlers/exchanges/operations/provider-mapping-stats.operations.ts index 416f8dc..9e04eca 100644 --- a/apps/data-pipeline/src/handlers/exchanges/operations/provider-mapping-stats.operations.ts +++ b/apps/data-pipeline/src/handlers/exchanges/operations/provider-mapping-stats.operations.ts @@ -1,5 +1,5 @@ import { getLogger } from '@stock-bot/logger'; -import { getPostgreSQLClient } from '@stock-bot/postgres-client'; +import { getPostgreSQLClient } from '../../../clients'; import type { JobPayload } from '../../../types/job-payloads'; const logger = getLogger('enhanced-sync-provider-mapping-stats'); diff --git a/apps/data-pipeline/src/handlers/exchanges/operations/qm-exchanges.operations.ts b/apps/data-pipeline/src/handlers/exchanges/operations/qm-exchanges.operations.ts index 5646854..13ebe7e 100644 --- a/apps/data-pipeline/src/handlers/exchanges/operations/qm-exchanges.operations.ts +++ b/apps/data-pipeline/src/handlers/exchanges/operations/qm-exchanges.operations.ts @@ -1,6 +1,5 @@ import { getLogger } from '@stock-bot/logger'; -import { getMongoDBClient } from '@stock-bot/mongodb-client'; -import { getPostgreSQLClient } from '@stock-bot/postgres-client'; +import { getMongoDBClient, getPostgreSQLClient } from '../../../clients'; import type { JobPayload } from '../../../types/job-payloads'; const logger = getLogger('sync-qm-exchanges'); diff --git a/apps/data-pipeline/src/handlers/exchanges/operations/sync-all-exchanges.operations.ts b/apps/data-pipeline/src/handlers/exchanges/operations/sync-all-exchanges.operations.ts index 5c289dd..636347c 100644 --- a/apps/data-pipeline/src/handlers/exchanges/operations/sync-all-exchanges.operations.ts +++ b/apps/data-pipeline/src/handlers/exchanges/operations/sync-all-exchanges.operations.ts @@ -1,6 +1,5 @@ import { getLogger } from '@stock-bot/logger'; -import { getMongoDBClient } from "@stock-bot/mongodb-client"; -import { getPostgreSQLClient } from '@stock-bot/postgres-client'; +import { getMongoDBClient, getPostgreSQLClient } from '../../../clients'; import type { JobPayload, SyncResult } from '../../../types/job-payloads'; const logger = getLogger('enhanced-sync-all-exchanges'); diff --git a/apps/data-pipeline/src/handlers/exchanges/operations/sync-ib-exchanges.operations.ts b/apps/data-pipeline/src/handlers/exchanges/operations/sync-ib-exchanges.operations.ts index 3f924e6..d8ba87d 100644 --- a/apps/data-pipeline/src/handlers/exchanges/operations/sync-ib-exchanges.operations.ts +++ b/apps/data-pipeline/src/handlers/exchanges/operations/sync-ib-exchanges.operations.ts @@ -1,7 +1,7 @@ import { getLogger } from '@stock-bot/logger'; -import { getMongoDBClient } from '@stock-bot/mongodb-client'; +import { getMongoDBClient } from '../../../clients'; import type { JobPayload } from '../../../types/job-payloads'; -import type { MasterExchange } from '@stock-bot/mongodb-client'; +import type { MasterExchange } from '@stock-bot/mongodb'; const logger = getLogger('sync-ib-exchanges'); diff --git a/apps/data-pipeline/src/handlers/exchanges/operations/sync-qm-provider-mappings.operations.ts b/apps/data-pipeline/src/handlers/exchanges/operations/sync-qm-provider-mappings.operations.ts index 73a7107..15e2fa0 100644 --- a/apps/data-pipeline/src/handlers/exchanges/operations/sync-qm-provider-mappings.operations.ts +++ b/apps/data-pipeline/src/handlers/exchanges/operations/sync-qm-provider-mappings.operations.ts @@ -1,6 +1,5 @@ import { getLogger } from '@stock-bot/logger'; -import { getMongoDBClient } from "@stock-bot/mongodb-client"; -import { getPostgreSQLClient } from '@stock-bot/postgres-client'; +import { getMongoDBClient, getPostgreSQLClient } from '../../../clients'; import type { JobPayload, SyncResult } from '../../../types/job-payloads'; const logger = getLogger('enhanced-sync-qm-provider-mappings'); diff --git a/apps/data-pipeline/src/handlers/symbols/operations/qm-symbols.operations.ts b/apps/data-pipeline/src/handlers/symbols/operations/qm-symbols.operations.ts index 6f86841..0181983 100644 --- a/apps/data-pipeline/src/handlers/symbols/operations/qm-symbols.operations.ts +++ b/apps/data-pipeline/src/handlers/symbols/operations/qm-symbols.operations.ts @@ -1,6 +1,5 @@ import { getLogger } from '@stock-bot/logger'; -import { getMongoDBClient } from '@stock-bot/mongodb-client'; -import { getPostgreSQLClient } from '@stock-bot/postgres-client'; +import { getMongoDBClient, getPostgreSQLClient } from '../../../clients'; import type { JobPayload } from '../../../types/job-payloads'; const logger = getLogger('sync-qm-symbols'); diff --git a/apps/data-pipeline/src/handlers/symbols/operations/sync-status.operations.ts b/apps/data-pipeline/src/handlers/symbols/operations/sync-status.operations.ts index 768b199..ea83dbf 100644 --- a/apps/data-pipeline/src/handlers/symbols/operations/sync-status.operations.ts +++ b/apps/data-pipeline/src/handlers/symbols/operations/sync-status.operations.ts @@ -1,5 +1,5 @@ import { getLogger } from '@stock-bot/logger'; -import { getPostgreSQLClient } from '@stock-bot/postgres-client'; +import { getPostgreSQLClient } from '../../../clients'; import type { JobPayload } from '../../../types/job-payloads'; const logger = getLogger('sync-status'); diff --git a/apps/data-pipeline/src/handlers/symbols/operations/sync-symbols-from-provider.operations.ts b/apps/data-pipeline/src/handlers/symbols/operations/sync-symbols-from-provider.operations.ts index 7a8b9a6..2e3308f 100644 --- a/apps/data-pipeline/src/handlers/symbols/operations/sync-symbols-from-provider.operations.ts +++ b/apps/data-pipeline/src/handlers/symbols/operations/sync-symbols-from-provider.operations.ts @@ -1,6 +1,5 @@ import { getLogger } from '@stock-bot/logger'; -import { getMongoDBClient } from "@stock-bot/mongodb-client"; -import { getPostgreSQLClient } from '@stock-bot/postgres-client'; +import { getMongoDBClient, getPostgreSQLClient } from '../../../clients'; import type { JobPayload, SyncResult } from '../../../types/job-payloads'; const logger = getLogger('enhanced-sync-symbols-from-provider'); diff --git a/apps/data-pipeline/src/index.ts b/apps/data-pipeline/src/index.ts index 6357c01..4ad4f33 100644 --- a/apps/data-pipeline/src/index.ts +++ b/apps/data-pipeline/src/index.ts @@ -4,12 +4,13 @@ import { Hono } from 'hono'; import { cors } from 'hono/cors'; // Library imports import { getLogger, setLoggerConfig, shutdownLoggers } from '@stock-bot/logger'; -import { connectMongoDB } from '@stock-bot/mongodb-client'; -import { connectPostgreSQL } from '@stock-bot/postgres-client'; +import { MongoDBClient } from '@stock-bot/mongodb'; +import { PostgreSQLClient } from '@stock-bot/postgres'; import { QueueManager, type QueueManagerConfig } from '@stock-bot/queue'; import { Shutdown } from '@stock-bot/shutdown'; // Local imports import { enhancedSyncRoutes, healthRoutes, statsRoutes, syncRoutes } from './routes'; +import { setMongoDBClient, setPostgreSQLClient } from './clients'; const config = initializeServiceConfig(); console.log('Data Sync Service Configuration:', JSON.stringify(config, null, 2)); @@ -44,7 +45,8 @@ app.use( ); const PORT = serviceConfig.port; let server: ReturnType | null = null; -// Singleton clients are managed in libraries +let mongoClient: MongoDBClient | null = null; +let postgresClient: PostgreSQLClient | null = null; let queueManager: QueueManager | null = null; // Initialize shutdown manager @@ -61,10 +63,10 @@ async function initializeServices() { logger.info('Initializing data sync service...'); try { - // Initialize MongoDB client singleton + // Initialize MongoDB client logger.debug('Connecting to MongoDB...'); const mongoConfig = databaseConfig.mongodb; - await connectMongoDB({ + mongoClient = new MongoDBClient({ uri: mongoConfig.uri, database: mongoConfig.database, host: mongoConfig.host || 'localhost', @@ -74,13 +76,15 @@ async function initializeServices() { socketTimeout: 30000, serverSelectionTimeout: 5000, }, - }); + }, logger); + await mongoClient.connect(); + setMongoDBClient(mongoClient); logger.info('MongoDB connected'); - // Initialize PostgreSQL client singleton + // Initialize PostgreSQL client logger.debug('Connecting to PostgreSQL...'); const pgConfig = databaseConfig.postgres; - await connectPostgreSQL({ + postgresClient = new PostgreSQLClient({ host: pgConfig.host, port: pgConfig.port, database: pgConfig.database, @@ -91,7 +95,9 @@ async function initializeServices() { max: pgConfig.poolSize || 10, idleTimeoutMillis: pgConfig.idleTimeout || 30000, }, - }); + }, logger); + await postgresClient.connect(); + setPostgreSQLClient(postgresClient); logger.info('PostgreSQL connected'); // Initialize queue system (with delayed worker start) @@ -236,11 +242,12 @@ shutdown.onShutdownHigh(async () => { shutdown.onShutdownMedium(async () => { logger.info('Disconnecting from databases...'); try { - const { disconnectMongoDB } = await import('@stock-bot/mongodb-client'); - const { disconnectPostgreSQL } = await import('@stock-bot/postgres-client'); - - await disconnectMongoDB(); - await disconnectPostgreSQL(); + if (mongoClient) { + await mongoClient.disconnect(); + } + if (postgresClient) { + await postgresClient.disconnect(); + } logger.info('Database connections closed'); } catch (error) { logger.error('Error closing database connections', { error }); diff --git a/apps/web-api/src/clients.ts b/apps/web-api/src/clients.ts index 5dfe266..4bc8217 100644 --- a/apps/web-api/src/clients.ts +++ b/apps/web-api/src/clients.ts @@ -1,5 +1,5 @@ -import { PostgreSQLClient } from '@stock-bot/postgres-client'; -import { MongoDBClient } from '@stock-bot/mongodb-client'; +import { PostgreSQLClient } from '@stock-bot/postgres'; +import { MongoDBClient } from '@stock-bot/mongodb'; let postgresClient: PostgreSQLClient | null = null; let mongodbClient: MongoDBClient | null = null; diff --git a/apps/web-api/src/index.ts b/apps/web-api/src/index.ts index e4b5f3d..939b743 100644 --- a/apps/web-api/src/index.ts +++ b/apps/web-api/src/index.ts @@ -5,8 +5,8 @@ import { Hono } from 'hono'; import { cors } from 'hono/cors'; import { initializeServiceConfig } from '@stock-bot/config'; import { getLogger, setLoggerConfig, shutdownLoggers } from '@stock-bot/logger'; -import { createAndConnectMongoDBClient, MongoDBClient } from '@stock-bot/mongodb-client'; -import { createAndConnectPostgreSQLClient, PostgreSQLClient } from '@stock-bot/postgres-client'; +import { MongoDBClient } from '@stock-bot/mongodb'; +import { PostgreSQLClient } from '@stock-bot/postgres'; import { Shutdown } from '@stock-bot/shutdown'; import { exchangeRoutes } from './routes/exchange.routes'; import { healthRoutes } from './routes/health.routes'; @@ -77,7 +77,7 @@ async function initializeServices() { // Initialize MongoDB client logger.debug('Connecting to MongoDB...'); const mongoConfig = databaseConfig.mongodb; - mongoClient = await createAndConnectMongoDBClient({ + mongoClient = new MongoDBClient({ uri: mongoConfig.uri, database: mongoConfig.database, host: mongoConfig.host, @@ -87,14 +87,15 @@ async function initializeServices() { socketTimeout: 30000, serverSelectionTimeout: 5000, }, - }); + }, logger); + await mongoClient.connect(); setMongoDBClient(mongoClient); logger.info('MongoDB connected'); // Initialize PostgreSQL client logger.debug('Connecting to PostgreSQL...'); const pgConfig = databaseConfig.postgres; - postgresClient = await createAndConnectPostgreSQLClient({ + postgresClient = new PostgreSQLClient({ host: pgConfig.host, port: pgConfig.port, database: pgConfig.database, @@ -105,7 +106,8 @@ async function initializeServices() { max: pgConfig.poolSize || 10, idleTimeoutMillis: pgConfig.idleTimeout || 30000, }, - }); + }, logger); + await postgresClient.connect(); setPostgreSQLClient(postgresClient); logger.info('PostgreSQL connected'); diff --git a/libs/LIBRARY_STANDARDS.md b/libs/LIBRARY_STANDARDS.md new file mode 100644 index 0000000..5b78cbb --- /dev/null +++ b/libs/LIBRARY_STANDARDS.md @@ -0,0 +1,157 @@ +# Library Standards and Patterns + +This document defines the standardized patterns for all libraries in the @stock-bot ecosystem. + +## Export Patterns + +### Standard: Named Exports Only + +All libraries should use **named exports only**. Default exports have been removed for consistency and better tree-shaking. + +**Example:** +```typescript +// ✅ Good - Named exports +export { createCache } from './cache'; +export type { CacheOptions } from './types'; + +// ❌ Bad - Default export +export default createCache; +``` + +## Initialization Patterns + +Libraries follow different initialization patterns based on their purpose: + +### 1. Singleton with Global State +**Use for:** Global services that should have only one instance (config, logger) + +**Example:** Config library +```typescript +let configInstance: ConfigManager | null = null; + +export function initializeConfig(): AppConfig { + if (!configInstance) { + configInstance = new ConfigManager(); + } + return configInstance.initialize(); +} + +export function getConfig(): AppConfig { + if (!configInstance) { + throw new Error('Config not initialized'); + } + return configInstance.get(); +} +``` + +### 2. Factory with Registry +**Use for:** Services that need instance reuse based on configuration (cache, logger instances) + +**Example:** Cache library +```typescript +const cacheInstances = new Map(); + +export function createCache(options: CacheOptions): CacheProvider { + if (options.shared) { + const key = generateKey(options); + if (cacheInstances.has(key)) { + return cacheInstances.get(key)!; + } + const cache = new RedisCache(options); + cacheInstances.set(key, cache); + return cache; + } + return new RedisCache(options); +} +``` + +### 3. Pure Factory Functions +**Use for:** Services that need creation logic beyond simple instantiation + +**Example:** Event bus with configuration processing +```typescript +export function createEventBus(config: EventBusConfig): EventBus { + // Process config, set defaults, etc. + const processedConfig = { ...defaultConfig, ...config }; + return new EventBus(processedConfig); +} +``` + +**Note:** Simple instantiation doesn't need factories - use direct class instantiation or DI container. + +### 4. Direct Class Exports +**Use for:** Simple utilities or services managed by DI container + +**Example:** MongoDB library +```typescript +export { MongoDBClient } from './client'; +// No factory function - let DI container handle instantiation +``` + +### 5. Singleton Classes +**Use for:** Manager classes that coordinate multiple instances + +**Example:** QueueManager +```typescript +export class QueueManager { + private static instance: QueueManager | null = null; + + static initialize(config: QueueConfig): QueueManager { + if (!QueueManager.instance) { + QueueManager.instance = new QueueManager(config); + } + return QueueManager.instance; + } + + static getInstance(): QueueManager { + if (!QueueManager.instance) { + throw new Error('QueueManager not initialized'); + } + return QueueManager.instance; + } +} +``` + +## Pattern Selection Guide + +Choose the initialization pattern based on these criteria: + +| Pattern | When to Use | Examples | +|---------|-------------|----------| +| **Singleton with Global State** | - One instance per process
- Stateful configuration
- Process-wide settings | config, logger setup | +| **Factory with Registry** | - Multiple instances with same config should share
- Connection pooling
- Resource optimization | cache, logger instances | +| **Pure Factory** | - Complex initialization logic
- Configuration processing needed
- Defaults to apply | event bus (if needed) | +| **Direct Class Export** | - DI container manages lifecycle
- Simple initialization
- No special setup needed | database clients (MongoDB, PostgreSQL, QuestDB), utilities | +| **Singleton Class** | - Coordinates multiple resources
- Central management point
- Graceful shutdown needed | QueueManager, ConnectionManager | + +## Additional Standards + +### Error Handling +- All libraries should throw descriptive errors +- Consider creating custom error classes for domain-specific errors +- Always include context in error messages + +### Configuration +- Accept configuration through constructor/factory parameters +- Validate configuration using Zod schemas +- Provide sensible defaults where appropriate + +### Testing +- All libraries must have unit tests +- Use consistent test file naming: `*.test.ts` +- Mock external dependencies + +### Documentation +- Every library must have a README.md +- Include usage examples +- Document all public APIs with JSDoc + +### TypeScript +- Export all public types +- Use strict TypeScript settings +- Avoid `any` types + +### Dependencies +- Minimize external dependencies +- Use exact versions for critical dependencies +- Document peer dependencies clearly \ No newline at end of file diff --git a/libs/core/di/src/awilix-container.ts b/libs/core/di/src/awilix-container.ts index 3dd4f06..a482518 100644 --- a/libs/core/di/src/awilix-container.ts +++ b/libs/core/di/src/awilix-container.ts @@ -8,9 +8,9 @@ import { createCache, type CacheProvider } from '@stock-bot/cache'; import type { IServiceContainer } from '@stock-bot/handlers'; import { getLogger, type Logger } from '@stock-bot/logger'; import { MongoDBClient } from '@stock-bot/mongodb'; -import { createPostgreSQLClient, type PostgreSQLClient } from '@stock-bot/postgres'; +import { PostgreSQLClient } from '@stock-bot/postgres'; import { ProxyManager } from '@stock-bot/proxy'; -import { createQuestDBClient, type QuestDBClient } from '@stock-bot/questdb'; +import { QuestDBClient } from '@stock-bot/questdb'; import { type QueueManager } from '@stock-bot/queue'; import { asFunction, asValue, createContainer, InjectionMode, type AwilixContainer } from 'awilix'; import { z } from 'zod'; @@ -146,7 +146,7 @@ export function createServiceContainer(rawConfig: unknown): AwilixContainer { - return createPostgreSQLClient( + return new PostgreSQLClient( { host: postgresConfig.host, port: postgresConfig.port, @@ -165,7 +165,7 @@ export function createServiceContainer(rawConfig: unknown): AwilixContainer { console.log('Creating QuestDB client with config:', questdbConfig); - return createQuestDBClient( + return new QuestDBClient( { host: questdbConfig.host, httpPort: questdbConfig.httpPort, diff --git a/libs/core/handlers/src/base/BaseHandler.ts b/libs/core/handlers/src/base/BaseHandler.ts index 3efa521..05dfe6a 100644 --- a/libs/core/handlers/src/base/BaseHandler.ts +++ b/libs/core/handlers/src/base/BaseHandler.ts @@ -1,9 +1,9 @@ import { getLogger } from '@stock-bot/logger'; import { createJobHandler, handlerRegistry, type HandlerConfigWithSchedule } from '@stock-bot/types'; import { fetch } from '@stock-bot/utils'; +import type { Collection } from 'mongodb'; import type { IServiceContainer } from '../types/service-container'; import type { ExecutionContext, IHandler } from '../types/types'; -import type { Collection } from 'mongodb'; /** * Abstract base class for all handlers with improved DI @@ -196,7 +196,7 @@ export abstract class BaseHandler implements IHandler { */ protected hasService(name: keyof IServiceContainer): boolean { const service = this[name as keyof this]; - return service != null; + return service !== null; } /** diff --git a/libs/core/logger/src/index.ts b/libs/core/logger/src/index.ts index db74377..81d9113 100644 --- a/libs/core/logger/src/index.ts +++ b/libs/core/logger/src/index.ts @@ -9,6 +9,3 @@ export { Logger, getLogger, shutdownLoggers, setLoggerConfig } from './logger'; // Type definitions export type { LogLevel, LogContext, LogMetadata, LoggerConfig } from './types'; - -// Default export -export { getLogger as default } from './logger'; diff --git a/libs/data/cache/src/index.ts b/libs/data/cache/src/index.ts index 2594101..b95d2e7 100644 --- a/libs/data/cache/src/index.ts +++ b/libs/data/cache/src/index.ts @@ -45,6 +45,3 @@ export type { export { RedisConnectionManager } from './connection-manager'; export { CacheKeyGenerator } from './key-generator'; export { RedisCache } from './redis-cache'; - -// Default export for convenience -export default createCache; diff --git a/libs/data/mongodb/src/factory.ts b/libs/data/mongodb/src/factory.ts deleted file mode 100644 index 43ae795..0000000 --- a/libs/data/mongodb/src/factory.ts +++ /dev/null @@ -1,5 +0,0 @@ -// This factory is no longer needed when using Awilix DI -// The MongoDBClient is now registered directly in the DI container -// See: libs/core/di/src/awilix-container.ts - -export { MongoDBClient } from './client'; diff --git a/libs/data/postgres/src/factory.ts b/libs/data/postgres/src/factory.ts deleted file mode 100644 index 268f47a..0000000 --- a/libs/data/postgres/src/factory.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { PostgreSQLClient } from './client'; -import type { PostgreSQLClientConfig, PostgreSQLConnectionOptions, ConnectionEvents } from './types'; - -/** - * Factory function to create a PostgreSQL client instance - */ -export function createPostgreSQLClient( - config: PostgreSQLClientConfig, - logger?: any, - options?: PostgreSQLConnectionOptions, - events?: ConnectionEvents -): PostgreSQLClient { - return new PostgreSQLClient(config, logger, options, events); -} - -/** - * Create and connect a PostgreSQL client - */ -export async function createAndConnectPostgreSQLClient( - config: PostgreSQLClientConfig, - logger?: any, - options?: PostgreSQLConnectionOptions, - events?: ConnectionEvents -): Promise { - const client = createPostgreSQLClient(config, logger, options, events); - await client.connect(); - return client; -} - diff --git a/libs/data/postgres/src/index.ts b/libs/data/postgres/src/index.ts index 9f34ed1..6218f41 100644 --- a/libs/data/postgres/src/index.ts +++ b/libs/data/postgres/src/index.ts @@ -33,10 +33,5 @@ export type { DynamicPoolConfig, } from './types'; -// Factory functions -export { - createPostgreSQLClient, - createAndConnectPostgreSQLClient, -} from './factory'; - -// Singleton pattern removed - use factory functions instead +// Note: Factory functions removed - instantiate directly with new PostgreSQLClient() +// or use the Awilix DI container (recommended) diff --git a/libs/data/questdb/src/factory.ts b/libs/data/questdb/src/factory.ts deleted file mode 100644 index df56ebc..0000000 --- a/libs/data/questdb/src/factory.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { QuestDBClient } from './client'; -import type { QuestDBClientConfig, QuestDBConnectionOptions } from './types'; - -/** - * Factory function to create a QuestDB client instance - */ -export function createQuestDBClient( - config: QuestDBClientConfig, - logger?: any, - options?: QuestDBConnectionOptions -): QuestDBClient { - return new QuestDBClient(config, logger, options); -} - -/** - * Create and connect a QuestDB client - */ -export async function createAndConnectQuestDBClient( - config: QuestDBClientConfig, - logger?: any, - options?: QuestDBConnectionOptions -): Promise { - const client = createQuestDBClient(config, logger, options); - await client.connect(); - return client; -} \ No newline at end of file diff --git a/libs/data/questdb/src/index.ts b/libs/data/questdb/src/index.ts index 1add108..46ab368 100644 --- a/libs/data/questdb/src/index.ts +++ b/libs/data/questdb/src/index.ts @@ -28,5 +28,5 @@ export type { InsertResult, } from './types'; -// Utils -export { createQuestDBClient, createAndConnectQuestDBClient } from './factory'; +// Note: Factory functions removed - instantiate directly with new QuestDBClient() +// or use the Awilix DI container (recommended) diff --git a/libs/data/questdb/test/integration.test.ts b/libs/data/questdb/test/integration.test.ts index 7960577..49a02da 100644 --- a/libs/data/questdb/test/integration.test.ts +++ b/libs/data/questdb/test/integration.test.ts @@ -7,7 +7,6 @@ import { afterEach, describe, expect, it } from 'bun:test'; import { - createQuestDBClient, QuestDBClient, QuestDBHealthMonitor, QuestDBInfluxWriter, @@ -40,9 +39,17 @@ describe('QuestDB Client Integration', () => { }); describe('Client Initialization', () => { - it('should create client with factory function', () => { - const factoryClient = createQuestDBClient(); - expect(factoryClient).toBeInstanceOf(QuestDBClient); + it('should create client with constructor', () => { + const newClient = new QuestDBClient({ + host: 'localhost', + httpPort: 9000, + pgPort: 8812, + influxPort: 9009, + database: 'questdb', + user: 'admin', + password: 'quest', + }); + expect(newClient).toBeInstanceOf(QuestDBClient); }); it('should initialize all supporting classes', () => { diff --git a/libs/services/event-bus/src/index.ts b/libs/services/event-bus/src/index.ts index d92979e..2bc77db 100644 --- a/libs/services/event-bus/src/index.ts +++ b/libs/services/event-bus/src/index.ts @@ -10,7 +10,4 @@ export function createEventBus(config: EventBusConfig): EventBus { // Re-export everything export { EventBus } from './event-bus'; -export * from './types'; - -// Default export -export default createEventBus; \ No newline at end of file +export * from './types'; \ No newline at end of file