thing im pretty much done with extracting the queue and making it reususable, maybe just a few more change to be able to making the batch names a bit more specific

This commit is contained in:
Boki 2025-06-14 18:22:28 -04:00
parent ad5e353ec3
commit e8c90532d5
14 changed files with 117 additions and 93 deletions

View file

@ -6,10 +6,10 @@ import { Browser } from '@stock-bot/browser';
import { loadEnvVariables } from '@stock-bot/config';
import { getLogger, shutdownLoggers } from '@stock-bot/logger';
import { connectMongoDB, disconnectMongoDB } from '@stock-bot/mongodb-client';
import { processItems, QueueManager, type QueueConfig } from '@stock-bot/queue';
import { Shutdown } from '@stock-bot/shutdown';
import { initializeIBResources } from './providers/ib.tasks';
import { initializeProxyResources } from './providers/proxy.tasks';
import { initializeQueueManager, shutdownQueueManager } from './services/queue-manager.service';
import { healthRoutes, queueRoutes } from './routes';
// Load environment variables
@ -23,6 +23,41 @@ let server: ReturnType<typeof Bun.serve> | null = null;
// Initialize shutdown manager with 15 second timeout
const shutdown = Shutdown.getInstance({ timeout: 15000 });
/**
* Create and configure the enhanced queue manager for data service
*/
function createDataServiceQueueManager(): QueueManager {
const config: QueueConfig = {
queueName: 'data-service-queue',
workers: parseInt(process.env.WORKER_COUNT || '5'),
concurrency: parseInt(process.env.WORKER_CONCURRENCY || '20'),
redis: {
host: process.env.DRAGONFLY_HOST || 'localhost',
port: parseInt(process.env.DRAGONFLY_PORT || '6379'),
},
providers: [
// Import and initialize providers lazily
async () => {
const { initializeIBProvider } = await import('./providers/ib.provider');
return initializeIBProvider();
},
async () => {
const { initializeProxyProvider } = await import('./providers/proxy.provider');
return initializeProxyProvider();
},
],
enableScheduledJobs: true,
};
return new QueueManager(config);
}
// Create singleton instance
export const queueManager = createDataServiceQueueManager();
// Export processItems for direct use
export { processItems };
// Register all routes
app.route('', healthRoutes);
app.route('', queueRoutes);
@ -54,7 +89,7 @@ async function initializeServices() {
// Initialize queue manager (includes batch cache initialization)
logger.info('Starting queue manager initialization...');
await initializeQueueManager();
await queueManager.initialize();
logger.info('Queue manager initialized');
logger.info('All services initialized successfully');
@ -92,7 +127,7 @@ shutdown.onShutdown(async () => {
shutdown.onShutdown(async () => {
logger.info('Shutting down queue manager...');
try {
await shutdownQueueManager();
await queueManager.shutdown();
logger.info('Queue manager shut down successfully');
} catch (error) {
logger.error('Error shutting down queue manager', { error });

View file

@ -14,7 +14,57 @@ export function initializeProxyProvider() {
const proxyProviderConfig: ProviderConfigWithSchedule = {
name: 'proxy',
operations: {
'fetch-from-sources': async _payload => {
// Fetch proxies from all configured sources
logger.info('Processing fetch proxies from sources request');
const { fetchProxiesFromSources } = await import('./proxy.tasks');
const { processItems, queueManager } = await import('../index');
// Fetch all proxies from sources
const proxies = await fetchProxiesFromSources();
logger.info('Fetched proxies from sources', { count: proxies.length });
if (proxies.length === 0) {
logger.warn('No proxies fetched from sources');
return { processed: 0, successful: 0 };
}
// Batch process the proxies through check-proxy operation
const batchResult = await processItems(
proxies,
queueManager,
{
provider: 'proxy',
operation: 'check-proxy',
totalDelayHours: 0.083, // 5 minutes (5/60 hours)
batchSize: 50, // Process 50 proxies per batch
priority: 3,
useBatching: true,
retries: 1,
ttl: 30000, // 30 second timeout per proxy check
removeOnComplete: 5,
removeOnFail: 3,
}
);
logger.info('Batch proxy validation completed', {
totalProxies: proxies.length,
jobsCreated: batchResult.jobsCreated,
mode: batchResult.mode,
batchesCreated: batchResult.batchesCreated,
duration: `${batchResult.duration}ms`
});
return {
processed: proxies.length,
jobsCreated: batchResult.jobsCreated,
batchesCreated: batchResult.batchesCreated,
mode: batchResult.mode
};
},
'check-proxy': async (payload: ProxyInfo) => {
// payload is now the raw proxy info object
logger.debug('Processing proxy check request', {
@ -23,12 +73,6 @@ export function initializeProxyProvider() {
const { checkProxy } = await import('./proxy.tasks');
return checkProxy(payload);
},
'fetch-from-sources': async _payload => {
// Fetch proxies from all configured sources
logger.info('Processing fetch proxies from sources request');
const { fetchProxiesFromSources } = await import('./proxy.tasks');
return fetchProxiesFromSources();
},
},
scheduledJobs: [
{
@ -38,7 +82,7 @@ export function initializeProxyProvider() {
cronPattern: '0 */2 * * *', // Every 2 hours
priority: 5,
description: 'Fetch and validate proxy list from sources',
// immediately: true, // Don't run immediately during startup to avoid conflicts
immediately: true, // Don't run immediately during startup to avoid conflicts
},
],
};

View file

@ -244,7 +244,7 @@ async function updateProxyInCache(proxy: ProxyInfo, isWorking: boolean): Promise
const cacheKey = `${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`;
try {
const existing: any = await cache.get(cacheKey);
const existing: ProxyInfo | null = await cache.get(cacheKey);
// For failed proxies, only update if they already exist
if (!isWorking && !existing) {
@ -309,8 +309,8 @@ async function updateProxyInCache(proxy: ProxyInfo, isWorking: boolean): Promise
// Individual task functions
export async function queueProxyFetch(): Promise<string> {
const { queueManager } = await import('../services/queue.service');
const job = await queueManager.addJob({
const { queueManager } = await import('../index');
const job = await queueManager.add('proxy-fetch', {
type: 'proxy-fetch',
provider: 'proxy-service',
operation: 'fetch-and-check',
@ -324,8 +324,8 @@ export async function queueProxyFetch(): Promise<string> {
}
export async function queueProxyCheck(proxies: ProxyInfo[]): Promise<string> {
const { queueManager } = await import('../services/queue.service');
const job = await queueManager.addJob({
const { queueManager } = await import('../index');
const job = await queueManager.add('proxy-check', {
type: 'proxy-check',
provider: 'proxy-service',
operation: 'check-specific',

View file

@ -2,7 +2,7 @@
* Health check routes
*/
import { Hono } from 'hono';
import { queueManager } from '../services/queue-manager.service';
import { queueManager } from '../index';
export const healthRoutes = new Hono();

View file

@ -3,7 +3,7 @@
*/
import { Hono } from 'hono';
import { getLogger } from '@stock-bot/logger';
import { processItems, queueManager } from '../services/queue-manager.service';
import { processItems, queueManager } from '../index';
const logger = getLogger('market-data-routes');
@ -79,7 +79,7 @@ marketDataRoutes.post('/api/process-symbols', async c => {
provider = 'ib',
operation = 'fetch-session',
useBatching = true,
totalDelayMs = 30000,
totalDelayHours = 0.0083, // ~30 seconds (30/3600 hours)
batchSize = 10,
} = await c.req.json();
@ -95,7 +95,7 @@ marketDataRoutes.post('/api/process-symbols', async c => {
});
const result = await processItems(symbols, queueManager, {
totalDelayMs,
totalDelayHours,
useBatching,
batchSize,
priority: 2,

View file

@ -3,7 +3,7 @@
*/
import { Hono } from 'hono';
import { getLogger } from '@stock-bot/logger';
import { queueManager } from '../services/queue-manager.service';
import { queueManager } from '../index';
const logger = getLogger('queue-routes');

View file

@ -1,57 +0,0 @@
/**
* Data Service Queue Manager
* Uses the enhanced @stock-bot/queue library with provider registry
*/
import { getLogger } from '@stock-bot/logger';
import type { QueueConfig } from '@stock-bot/queue';
import { processItems, QueueManager } from '@stock-bot/queue';
const logger = getLogger('queue-manager-service');
/**
* Create and configure the enhanced queue manager for data service
*/
function createDataServiceQueueManager(): QueueManager {
const config: QueueConfig = {
queueName: 'data-service-queue',
workers: parseInt(process.env.WORKER_COUNT || '5'),
concurrency: parseInt(process.env.WORKER_CONCURRENCY || '20'),
redis: {
host: process.env.DRAGONFLY_HOST || 'localhost',
port: parseInt(process.env.DRAGONFLY_PORT || '6379'),
},
providers: [
// Import and initialize providers lazily
async () => {
const { initializeIBProvider } = await import('../providers/ib.provider');
return initializeIBProvider();
},
async () => {
const { initializeProxyProvider } = await import('../providers/proxy.provider');
return initializeProxyProvider();
},
],
enableScheduledJobs: true,
};
return new QueueManager(config);
}
// Create singleton instance
export const queueManager = createDataServiceQueueManager();
// Export convenience functions that use the queue manager
export async function initializeQueueManager() {
logger.info('Initializing data service queue manager...');
await queueManager.initialize();
logger.info('Data service queue manager initialized successfully');
}
export async function shutdownQueueManager() {
logger.info('Shutting down data service queue manager...');
await queueManager.shutdown();
logger.info('Data service queue manager shutdown complete');
}
// Export processItems for direct use
export { processItems };