linxus fs fixes
This commit is contained in:
parent
ac23b70146
commit
0b7846fe67
292 changed files with 41947 additions and 41947 deletions
|
|
@ -1,258 +1,258 @@
|
|||
/**
|
||||
* Data Service - Combined live and historical data ingestion with queue-based architecture
|
||||
*/
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { loadEnvVariables } from '@stock-bot/config';
|
||||
import { Hono } from 'hono';
|
||||
import { serve } from '@hono/node-server';
|
||||
import { queueManager } from './services/queue.service';
|
||||
|
||||
// Load environment variables
|
||||
loadEnvVariables();
|
||||
|
||||
const app = new Hono();
|
||||
const logger = getLogger('data-service');
|
||||
|
||||
const PORT = parseInt(process.env.DATA_SERVICE_PORT || '3002');
|
||||
// Health check endpoint
|
||||
app.get('/health', (c) => {
|
||||
return c.json({
|
||||
service: 'data-service',
|
||||
status: 'healthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
queue: {
|
||||
status: 'running',
|
||||
workers: queueManager.getWorkerCount()
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Queue management endpoints
|
||||
app.get('/api/queue/status', async (c) => {
|
||||
try {
|
||||
const status = await queueManager.getQueueStatus();
|
||||
return c.json({ status: 'success', data: status });
|
||||
} catch (error) {
|
||||
logger.error('Failed to get queue status', { error });
|
||||
return c.json({ status: 'error', message: 'Failed to get queue status' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
app.post('/api/queue/job', async (c) => {
|
||||
try {
|
||||
const jobData = await c.req.json();
|
||||
const job = await queueManager.addJob(jobData);
|
||||
return c.json({ status: 'success', jobId: job.id });
|
||||
} catch (error) {
|
||||
logger.error('Failed to add job', { error });
|
||||
return c.json({ status: 'error', message: 'Failed to add job' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
// Market data endpoints
|
||||
app.get('/api/live/:symbol', async (c) => {
|
||||
const symbol = c.req.param('symbol');
|
||||
logger.info('Live data request', { symbol });
|
||||
|
||||
try { // Queue job for live data using Yahoo provider
|
||||
const job = await queueManager.addJob({
|
||||
type: 'market-data-live',
|
||||
service: 'market-data',
|
||||
provider: 'yahoo-finance',
|
||||
operation: 'live-data',
|
||||
payload: { symbol }
|
||||
});
|
||||
return c.json({
|
||||
status: 'success',
|
||||
message: 'Live data job queued',
|
||||
jobId: job.id,
|
||||
symbol
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to queue live data job', { symbol, error });
|
||||
return c.json({ status: 'error', message: 'Failed to queue live data job' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
app.get('/api/historical/:symbol', async (c) => {
|
||||
const symbol = c.req.param('symbol');
|
||||
const from = c.req.query('from');
|
||||
const to = c.req.query('to');
|
||||
|
||||
logger.info('Historical data request', { symbol, from, to });
|
||||
|
||||
try {
|
||||
const fromDate = from ? new Date(from) : new Date(Date.now() - 30 * 24 * 60 * 60 * 1000); // 30 days ago
|
||||
const toDate = to ? new Date(to) : new Date(); // Now
|
||||
// Queue job for historical data using Yahoo provider
|
||||
const job = await queueManager.addJob({
|
||||
type: 'market-data-historical',
|
||||
service: 'market-data',
|
||||
provider: 'yahoo-finance',
|
||||
operation: 'historical-data',
|
||||
payload: {
|
||||
symbol,
|
||||
from: fromDate.toISOString(),
|
||||
to: toDate.toISOString()
|
||||
}
|
||||
}); return c.json({
|
||||
status: 'success',
|
||||
message: 'Historical data job queued',
|
||||
jobId: job.id,
|
||||
symbol,
|
||||
from: fromDate,
|
||||
to: toDate
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to queue historical data job', { symbol, from, to, error });
|
||||
return c.json({ status: 'error', message: 'Failed to queue historical data job' }, 500); }
|
||||
});
|
||||
|
||||
// Proxy management endpoints
|
||||
app.post('/api/proxy/fetch', async (c) => {
|
||||
try {
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-fetch',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
operation: 'fetch-and-check',
|
||||
payload: {},
|
||||
priority: 5
|
||||
});
|
||||
|
||||
return c.json({
|
||||
status: 'success',
|
||||
jobId: job.id,
|
||||
message: 'Proxy fetch job queued'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to queue proxy fetch', { error });
|
||||
return c.json({ status: 'error', message: 'Failed to queue proxy fetch' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
app.post('/api/proxy/check', async (c) => {
|
||||
try {
|
||||
const { proxies } = await c.req.json();
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-check',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
operation: 'check-specific',
|
||||
payload: { proxies },
|
||||
priority: 8
|
||||
});
|
||||
|
||||
return c.json({
|
||||
status: 'success',
|
||||
jobId: job.id,
|
||||
message: `Proxy check job queued for ${proxies.length} proxies`
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to queue proxy check', { error });
|
||||
return c.json({ status: 'error', message: 'Failed to queue proxy check' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
// Get proxy stats via queue
|
||||
app.get('/api/proxy/stats', async (c) => {
|
||||
try {
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-stats',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
operation: 'get-stats',
|
||||
payload: {},
|
||||
priority: 3
|
||||
});
|
||||
|
||||
return c.json({
|
||||
status: 'success',
|
||||
jobId: job.id,
|
||||
message: 'Proxy stats job queued'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to queue proxy stats', { error });
|
||||
return c.json({ status: 'error', message: 'Failed to queue proxy stats' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
// Provider registry endpoints
|
||||
app.get('/api/providers', async (c) => {
|
||||
try {
|
||||
const providers = queueManager.getRegisteredProviders();
|
||||
return c.json({ status: 'success', providers });
|
||||
} catch (error) {
|
||||
logger.error('Failed to get providers', { error });
|
||||
return c.json({ status: 'error', message: 'Failed to get providers' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
// Add new endpoint to see scheduled jobs
|
||||
app.get('/api/scheduled-jobs', async (c) => {
|
||||
try {
|
||||
const jobs = queueManager.getScheduledJobsInfo();
|
||||
return c.json({
|
||||
status: 'success',
|
||||
count: jobs.length,
|
||||
jobs
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to get scheduled jobs info', { error });
|
||||
return c.json({ status: 'error', message: 'Failed to get scheduled jobs' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
// Initialize services
|
||||
async function initializeServices() {
|
||||
logger.info('Initializing data service...');
|
||||
|
||||
try {
|
||||
// Initialize queue service
|
||||
await queueManager.initialize();
|
||||
logger.info('Queue service initialized');
|
||||
logger.info('All services initialized successfully');
|
||||
} catch (error) {
|
||||
logger.error('Failed to initialize services', { error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Start server
|
||||
async function startServer() {
|
||||
await initializeServices();
|
||||
|
||||
serve({
|
||||
fetch: app.fetch,
|
||||
port: PORT,
|
||||
});
|
||||
|
||||
logger.info(`Data Service started on port ${PORT}`);
|
||||
logger.info('Available endpoints:');
|
||||
logger.info(' GET /health - Health check');
|
||||
logger.info(' GET /api/queue/status - Queue status');
|
||||
logger.info(' POST /api/queue/job - Add job to queue');
|
||||
logger.info(' GET /api/live/:symbol - Live market data');
|
||||
logger.info(' GET /api/historical/:symbol - Historical market data');
|
||||
logger.info(' POST /api/proxy/fetch - Queue proxy fetch');
|
||||
logger.info(' POST /api/proxy/check - Queue proxy check');
|
||||
logger.info(' GET /api/providers - List registered providers');
|
||||
}
|
||||
|
||||
// Graceful shutdown
|
||||
process.on('SIGINT', async () => {
|
||||
logger.info('Received SIGINT, shutting down gracefully...');
|
||||
await queueManager.shutdown();
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
process.on('SIGTERM', async () => {
|
||||
logger.info('Received SIGTERM, shutting down gracefully...');
|
||||
await queueManager.shutdown();
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
startServer().catch(error => {
|
||||
logger.error('Failed to start server', { error });
|
||||
process.exit(1);
|
||||
});
|
||||
/**
|
||||
* Data Service - Combined live and historical data ingestion with queue-based architecture
|
||||
*/
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { loadEnvVariables } from '@stock-bot/config';
|
||||
import { Hono } from 'hono';
|
||||
import { serve } from '@hono/node-server';
|
||||
import { queueManager } from './services/queue.service';
|
||||
|
||||
// Load environment variables
|
||||
loadEnvVariables();
|
||||
|
||||
const app = new Hono();
|
||||
const logger = getLogger('data-service');
|
||||
|
||||
const PORT = parseInt(process.env.DATA_SERVICE_PORT || '3002');
|
||||
// Health check endpoint
|
||||
app.get('/health', (c) => {
|
||||
return c.json({
|
||||
service: 'data-service',
|
||||
status: 'healthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
queue: {
|
||||
status: 'running',
|
||||
workers: queueManager.getWorkerCount()
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Queue management endpoints
|
||||
app.get('/api/queue/status', async (c) => {
|
||||
try {
|
||||
const status = await queueManager.getQueueStatus();
|
||||
return c.json({ status: 'success', data: status });
|
||||
} catch (error) {
|
||||
logger.error('Failed to get queue status', { error });
|
||||
return c.json({ status: 'error', message: 'Failed to get queue status' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
app.post('/api/queue/job', async (c) => {
|
||||
try {
|
||||
const jobData = await c.req.json();
|
||||
const job = await queueManager.addJob(jobData);
|
||||
return c.json({ status: 'success', jobId: job.id });
|
||||
} catch (error) {
|
||||
logger.error('Failed to add job', { error });
|
||||
return c.json({ status: 'error', message: 'Failed to add job' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
// Market data endpoints
|
||||
app.get('/api/live/:symbol', async (c) => {
|
||||
const symbol = c.req.param('symbol');
|
||||
logger.info('Live data request', { symbol });
|
||||
|
||||
try { // Queue job for live data using Yahoo provider
|
||||
const job = await queueManager.addJob({
|
||||
type: 'market-data-live',
|
||||
service: 'market-data',
|
||||
provider: 'yahoo-finance',
|
||||
operation: 'live-data',
|
||||
payload: { symbol }
|
||||
});
|
||||
return c.json({
|
||||
status: 'success',
|
||||
message: 'Live data job queued',
|
||||
jobId: job.id,
|
||||
symbol
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to queue live data job', { symbol, error });
|
||||
return c.json({ status: 'error', message: 'Failed to queue live data job' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
app.get('/api/historical/:symbol', async (c) => {
|
||||
const symbol = c.req.param('symbol');
|
||||
const from = c.req.query('from');
|
||||
const to = c.req.query('to');
|
||||
|
||||
logger.info('Historical data request', { symbol, from, to });
|
||||
|
||||
try {
|
||||
const fromDate = from ? new Date(from) : new Date(Date.now() - 30 * 24 * 60 * 60 * 1000); // 30 days ago
|
||||
const toDate = to ? new Date(to) : new Date(); // Now
|
||||
// Queue job for historical data using Yahoo provider
|
||||
const job = await queueManager.addJob({
|
||||
type: 'market-data-historical',
|
||||
service: 'market-data',
|
||||
provider: 'yahoo-finance',
|
||||
operation: 'historical-data',
|
||||
payload: {
|
||||
symbol,
|
||||
from: fromDate.toISOString(),
|
||||
to: toDate.toISOString()
|
||||
}
|
||||
}); return c.json({
|
||||
status: 'success',
|
||||
message: 'Historical data job queued',
|
||||
jobId: job.id,
|
||||
symbol,
|
||||
from: fromDate,
|
||||
to: toDate
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to queue historical data job', { symbol, from, to, error });
|
||||
return c.json({ status: 'error', message: 'Failed to queue historical data job' }, 500); }
|
||||
});
|
||||
|
||||
// Proxy management endpoints
|
||||
app.post('/api/proxy/fetch', async (c) => {
|
||||
try {
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-fetch',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
operation: 'fetch-and-check',
|
||||
payload: {},
|
||||
priority: 5
|
||||
});
|
||||
|
||||
return c.json({
|
||||
status: 'success',
|
||||
jobId: job.id,
|
||||
message: 'Proxy fetch job queued'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to queue proxy fetch', { error });
|
||||
return c.json({ status: 'error', message: 'Failed to queue proxy fetch' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
app.post('/api/proxy/check', async (c) => {
|
||||
try {
|
||||
const { proxies } = await c.req.json();
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-check',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
operation: 'check-specific',
|
||||
payload: { proxies },
|
||||
priority: 8
|
||||
});
|
||||
|
||||
return c.json({
|
||||
status: 'success',
|
||||
jobId: job.id,
|
||||
message: `Proxy check job queued for ${proxies.length} proxies`
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to queue proxy check', { error });
|
||||
return c.json({ status: 'error', message: 'Failed to queue proxy check' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
// Get proxy stats via queue
|
||||
app.get('/api/proxy/stats', async (c) => {
|
||||
try {
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-stats',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
operation: 'get-stats',
|
||||
payload: {},
|
||||
priority: 3
|
||||
});
|
||||
|
||||
return c.json({
|
||||
status: 'success',
|
||||
jobId: job.id,
|
||||
message: 'Proxy stats job queued'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to queue proxy stats', { error });
|
||||
return c.json({ status: 'error', message: 'Failed to queue proxy stats' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
// Provider registry endpoints
|
||||
app.get('/api/providers', async (c) => {
|
||||
try {
|
||||
const providers = queueManager.getRegisteredProviders();
|
||||
return c.json({ status: 'success', providers });
|
||||
} catch (error) {
|
||||
logger.error('Failed to get providers', { error });
|
||||
return c.json({ status: 'error', message: 'Failed to get providers' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
// Add new endpoint to see scheduled jobs
|
||||
app.get('/api/scheduled-jobs', async (c) => {
|
||||
try {
|
||||
const jobs = queueManager.getScheduledJobsInfo();
|
||||
return c.json({
|
||||
status: 'success',
|
||||
count: jobs.length,
|
||||
jobs
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to get scheduled jobs info', { error });
|
||||
return c.json({ status: 'error', message: 'Failed to get scheduled jobs' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
// Initialize services
|
||||
async function initializeServices() {
|
||||
logger.info('Initializing data service...');
|
||||
|
||||
try {
|
||||
// Initialize queue service
|
||||
await queueManager.initialize();
|
||||
logger.info('Queue service initialized');
|
||||
logger.info('All services initialized successfully');
|
||||
} catch (error) {
|
||||
logger.error('Failed to initialize services', { error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Start server
|
||||
async function startServer() {
|
||||
await initializeServices();
|
||||
|
||||
serve({
|
||||
fetch: app.fetch,
|
||||
port: PORT,
|
||||
});
|
||||
|
||||
logger.info(`Data Service started on port ${PORT}`);
|
||||
logger.info('Available endpoints:');
|
||||
logger.info(' GET /health - Health check');
|
||||
logger.info(' GET /api/queue/status - Queue status');
|
||||
logger.info(' POST /api/queue/job - Add job to queue');
|
||||
logger.info(' GET /api/live/:symbol - Live market data');
|
||||
logger.info(' GET /api/historical/:symbol - Historical market data');
|
||||
logger.info(' POST /api/proxy/fetch - Queue proxy fetch');
|
||||
logger.info(' POST /api/proxy/check - Queue proxy check');
|
||||
logger.info(' GET /api/providers - List registered providers');
|
||||
}
|
||||
|
||||
// Graceful shutdown
|
||||
process.on('SIGINT', async () => {
|
||||
logger.info('Received SIGINT, shutting down gracefully...');
|
||||
await queueManager.shutdown();
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
process.on('SIGTERM', async () => {
|
||||
logger.info('Received SIGTERM, shutting down gracefully...');
|
||||
await queueManager.shutdown();
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
startServer().catch(error => {
|
||||
logger.error('Failed to start server', { error });
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,140 +1,140 @@
|
|||
import { ProxyInfo } from 'libs/http/src/types';
|
||||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { BatchProcessor } from '../utils/batch-processor';
|
||||
|
||||
// Create logger for this provider
|
||||
const logger = getLogger('proxy-provider');
|
||||
|
||||
// This will run at the same time each day as when the app started
|
||||
const getEvery24HourCron = (): string => {
|
||||
const now = new Date();
|
||||
const hours = now.getHours();
|
||||
const minutes = now.getMinutes();
|
||||
return `${minutes} ${hours} * * *`; // Every day at startup time
|
||||
};
|
||||
|
||||
export const proxyProvider: ProviderConfig = {
|
||||
name: 'proxy-service',
|
||||
service: 'proxy',
|
||||
operations: {
|
||||
'fetch-and-check': async (payload: { sources?: string[] }) => {
|
||||
const { proxyService } = await import('./proxy.tasks');
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
|
||||
const proxies = await proxyService.fetchProxiesFromSources();
|
||||
|
||||
if (proxies.length === 0) {
|
||||
return { proxiesFetched: 0, jobsCreated: 0 };
|
||||
}
|
||||
|
||||
const batchProcessor = new BatchProcessor(queueManager);
|
||||
|
||||
// Simplified configuration
|
||||
const result = await batchProcessor.processItems({
|
||||
items: proxies,
|
||||
batchSize: parseInt(process.env.PROXY_BATCH_SIZE || '200'),
|
||||
totalDelayMs: parseInt(process.env.PROXY_VALIDATION_HOURS || '4') * 60 * 60 * 1000 ,
|
||||
jobNamePrefix: 'proxy',
|
||||
operation: 'check-proxy',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
priority: 2,
|
||||
useBatching: process.env.PROXY_DIRECT_MODE !== 'true', // Simple boolean flag
|
||||
createJobData: (proxy: ProxyInfo) => ({
|
||||
proxy,
|
||||
source: 'fetch-and-check'
|
||||
}),
|
||||
removeOnComplete: 5,
|
||||
removeOnFail: 3
|
||||
});
|
||||
|
||||
return {
|
||||
proxiesFetched: result.totalItems,
|
||||
...result
|
||||
};
|
||||
},
|
||||
|
||||
'process-proxy-batch': async (payload: any) => {
|
||||
// Process a batch of proxies - uses the fetch-and-check JobNamePrefix process-(proxy)-batch
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const batchProcessor = new BatchProcessor(queueManager);
|
||||
return await batchProcessor.processBatch(
|
||||
payload,
|
||||
(proxy: ProxyInfo) => ({
|
||||
proxy,
|
||||
source: payload.config?.source || 'batch-processing'
|
||||
})
|
||||
);
|
||||
},
|
||||
|
||||
'check-proxy': async (payload: {
|
||||
proxy: ProxyInfo,
|
||||
source?: string,
|
||||
batchIndex?: number,
|
||||
itemIndex?: number,
|
||||
total?: number
|
||||
}) => {
|
||||
const { checkProxy } = await import('./proxy.tasks');
|
||||
|
||||
try {
|
||||
const result = await checkProxy(payload.proxy);
|
||||
|
||||
logger.debug('Proxy validated', {
|
||||
proxy: `${payload.proxy.host}:${payload.proxy.port}`,
|
||||
isWorking: result.isWorking,
|
||||
responseTime: result.responseTime,
|
||||
batchIndex: payload.batchIndex
|
||||
});
|
||||
|
||||
return {
|
||||
result,
|
||||
proxy: payload.proxy,
|
||||
// Only include batch info if it exists (for batch mode)
|
||||
...(payload.batchIndex !== undefined && {
|
||||
batchInfo: {
|
||||
batchIndex: payload.batchIndex,
|
||||
itemIndex: payload.itemIndex,
|
||||
total: payload.total,
|
||||
source: payload.source
|
||||
}
|
||||
})
|
||||
};
|
||||
} catch (error) {
|
||||
logger.warn('Proxy validation failed', {
|
||||
proxy: `${payload.proxy.host}:${payload.proxy.port}`,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
batchIndex: payload.batchIndex
|
||||
});
|
||||
|
||||
return {
|
||||
result: { isWorking: false, error: String(error) },
|
||||
proxy: payload.proxy,
|
||||
// Only include batch info if it exists (for batch mode)
|
||||
...(payload.batchIndex !== undefined && {
|
||||
batchInfo: {
|
||||
batchIndex: payload.batchIndex,
|
||||
itemIndex: payload.itemIndex,
|
||||
total: payload.total,
|
||||
source: payload.source
|
||||
}
|
||||
})
|
||||
};
|
||||
}
|
||||
}
|
||||
},
|
||||
scheduledJobs: [
|
||||
{
|
||||
type: 'proxy-maintenance',
|
||||
operation: 'fetch-and-check',
|
||||
payload: {},
|
||||
// should remove and just run at the same time so app restarts dont keeping adding same jobs
|
||||
cronPattern: getEvery24HourCron(),
|
||||
priority: 5,
|
||||
immediately: true,
|
||||
description: 'Fetch and validate proxy list from sources'
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
|
||||
import { ProxyInfo } from 'libs/http/src/types';
|
||||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { BatchProcessor } from '../utils/batch-processor';
|
||||
|
||||
// Create logger for this provider
|
||||
const logger = getLogger('proxy-provider');
|
||||
|
||||
// This will run at the same time each day as when the app started
|
||||
const getEvery24HourCron = (): string => {
|
||||
const now = new Date();
|
||||
const hours = now.getHours();
|
||||
const minutes = now.getMinutes();
|
||||
return `${minutes} ${hours} * * *`; // Every day at startup time
|
||||
};
|
||||
|
||||
export const proxyProvider: ProviderConfig = {
|
||||
name: 'proxy-service',
|
||||
service: 'proxy',
|
||||
operations: {
|
||||
'fetch-and-check': async (payload: { sources?: string[] }) => {
|
||||
const { proxyService } = await import('./proxy.tasks');
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
|
||||
const proxies = await proxyService.fetchProxiesFromSources();
|
||||
|
||||
if (proxies.length === 0) {
|
||||
return { proxiesFetched: 0, jobsCreated: 0 };
|
||||
}
|
||||
|
||||
const batchProcessor = new BatchProcessor(queueManager);
|
||||
|
||||
// Simplified configuration
|
||||
const result = await batchProcessor.processItems({
|
||||
items: proxies,
|
||||
batchSize: parseInt(process.env.PROXY_BATCH_SIZE || '200'),
|
||||
totalDelayMs: parseInt(process.env.PROXY_VALIDATION_HOURS || '4') * 60 * 60 * 1000 ,
|
||||
jobNamePrefix: 'proxy',
|
||||
operation: 'check-proxy',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
priority: 2,
|
||||
useBatching: process.env.PROXY_DIRECT_MODE !== 'true', // Simple boolean flag
|
||||
createJobData: (proxy: ProxyInfo) => ({
|
||||
proxy,
|
||||
source: 'fetch-and-check'
|
||||
}),
|
||||
removeOnComplete: 5,
|
||||
removeOnFail: 3
|
||||
});
|
||||
|
||||
return {
|
||||
proxiesFetched: result.totalItems,
|
||||
...result
|
||||
};
|
||||
},
|
||||
|
||||
'process-proxy-batch': async (payload: any) => {
|
||||
// Process a batch of proxies - uses the fetch-and-check JobNamePrefix process-(proxy)-batch
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const batchProcessor = new BatchProcessor(queueManager);
|
||||
return await batchProcessor.processBatch(
|
||||
payload,
|
||||
(proxy: ProxyInfo) => ({
|
||||
proxy,
|
||||
source: payload.config?.source || 'batch-processing'
|
||||
})
|
||||
);
|
||||
},
|
||||
|
||||
'check-proxy': async (payload: {
|
||||
proxy: ProxyInfo,
|
||||
source?: string,
|
||||
batchIndex?: number,
|
||||
itemIndex?: number,
|
||||
total?: number
|
||||
}) => {
|
||||
const { checkProxy } = await import('./proxy.tasks');
|
||||
|
||||
try {
|
||||
const result = await checkProxy(payload.proxy);
|
||||
|
||||
logger.debug('Proxy validated', {
|
||||
proxy: `${payload.proxy.host}:${payload.proxy.port}`,
|
||||
isWorking: result.isWorking,
|
||||
responseTime: result.responseTime,
|
||||
batchIndex: payload.batchIndex
|
||||
});
|
||||
|
||||
return {
|
||||
result,
|
||||
proxy: payload.proxy,
|
||||
// Only include batch info if it exists (for batch mode)
|
||||
...(payload.batchIndex !== undefined && {
|
||||
batchInfo: {
|
||||
batchIndex: payload.batchIndex,
|
||||
itemIndex: payload.itemIndex,
|
||||
total: payload.total,
|
||||
source: payload.source
|
||||
}
|
||||
})
|
||||
};
|
||||
} catch (error) {
|
||||
logger.warn('Proxy validation failed', {
|
||||
proxy: `${payload.proxy.host}:${payload.proxy.port}`,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
batchIndex: payload.batchIndex
|
||||
});
|
||||
|
||||
return {
|
||||
result: { isWorking: false, error: String(error) },
|
||||
proxy: payload.proxy,
|
||||
// Only include batch info if it exists (for batch mode)
|
||||
...(payload.batchIndex !== undefined && {
|
||||
batchInfo: {
|
||||
batchIndex: payload.batchIndex,
|
||||
itemIndex: payload.itemIndex,
|
||||
total: payload.total,
|
||||
source: payload.source
|
||||
}
|
||||
})
|
||||
};
|
||||
}
|
||||
}
|
||||
},
|
||||
scheduledJobs: [
|
||||
{
|
||||
type: 'proxy-maintenance',
|
||||
operation: 'fetch-and-check',
|
||||
payload: {},
|
||||
// should remove and just run at the same time so app restarts dont keeping adding same jobs
|
||||
cronPattern: getEvery24HourCron(),
|
||||
priority: 5,
|
||||
immediately: true,
|
||||
description: 'Fetch and validate proxy list from sources'
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,264 +1,264 @@
|
|||
import { getLogger } from '@stock-bot/logger';
|
||||
import createCache, { type CacheProvider } from '@stock-bot/cache';
|
||||
import { HttpClient, ProxyInfo } from '@stock-bot/http';
|
||||
import pLimit from 'p-limit';
|
||||
|
||||
// Shared configuration and utilities
|
||||
const PROXY_CONFIG = {
|
||||
CACHE_KEY: 'proxy',
|
||||
CACHE_TTL: 86400, // 24 hours
|
||||
CHECK_TIMEOUT: 7000,
|
||||
CHECK_IP: '99.246.102.205',
|
||||
CHECK_URL: 'https://proxy-detection.stare.gg/?api_key=bd406bf53ddc6abe1d9de5907830a955',
|
||||
CONCURRENCY_LIMIT: 100,
|
||||
PROXY_SOURCES: [
|
||||
{url: 'https://raw.githubusercontent.com/prxchk/proxy-list/main/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/casals-ar/proxy-list/main/http',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/http/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/http.txt', protocol: 'http' },
|
||||
{url: 'https://raw.githubusercontent.com/dpangestuw/Free-Proxy/refs/heads/main/http_proxies.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/gitrecon1455/fresh-proxy-list/refs/heads/main/proxylist.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/themiralay/Proxy-List-World/refs/heads/master/data.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/casa-ls/proxy-list/refs/heads/main/http',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/BreakingTechFr/Proxy_Free/refs/heads/main/proxies/http.txt', protocol: 'http' },
|
||||
{url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/http.txt',protocol: 'http', },
|
||||
|
||||
// {url: 'https://raw.githubusercontent.com/r00tee/Proxy-List/refs/heads/main/Https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/https.txt', protocol: 'https' },
|
||||
// {url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/https/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/https.txt',protocol: 'https', },
|
||||
]
|
||||
};
|
||||
|
||||
// Shared instances (module-scoped, not global)
|
||||
let logger: ReturnType<typeof getLogger>;
|
||||
let cache: CacheProvider;
|
||||
let httpClient: HttpClient;
|
||||
let concurrencyLimit: ReturnType<typeof pLimit>;
|
||||
|
||||
// Initialize shared resources
|
||||
function initializeSharedResources() {
|
||||
if (!logger) {
|
||||
logger = getLogger('proxy-tasks');
|
||||
cache = createCache('hybrid');
|
||||
httpClient = new HttpClient({ timeout: 10000 }, logger);
|
||||
concurrencyLimit = pLimit(PROXY_CONFIG.CONCURRENCY_LIMIT);
|
||||
logger.info('Proxy tasks initialized');
|
||||
}
|
||||
}
|
||||
|
||||
// Individual task functions
|
||||
export async function queueProxyFetch(): Promise<string> {
|
||||
initializeSharedResources();
|
||||
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-fetch',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
operation: 'fetch-and-check',
|
||||
payload: {},
|
||||
priority: 5
|
||||
});
|
||||
|
||||
const jobId = job.id || 'unknown';
|
||||
logger.info('Proxy fetch job queued', { jobId });
|
||||
return jobId;
|
||||
}
|
||||
|
||||
export async function queueProxyCheck(proxies: ProxyInfo[]): Promise<string> {
|
||||
initializeSharedResources();
|
||||
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-check',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
operation: 'check-specific',
|
||||
payload: { proxies },
|
||||
priority: 3
|
||||
});
|
||||
|
||||
const jobId = job.id || 'unknown';
|
||||
logger.info('Proxy check job queued', { jobId, count: proxies.length });
|
||||
return jobId;
|
||||
}
|
||||
|
||||
export async function fetchProxiesFromSources(): Promise<ProxyInfo[]> {
|
||||
initializeSharedResources();
|
||||
|
||||
const sources = PROXY_CONFIG.PROXY_SOURCES.map(source =>
|
||||
concurrencyLimit(() => fetchProxiesFromSource(source))
|
||||
);
|
||||
const result = await Promise.all(sources);
|
||||
let allProxies: ProxyInfo[] = result.flat();
|
||||
allProxies = removeDuplicateProxies(allProxies);
|
||||
// await checkProxies(allProxies);
|
||||
return allProxies;
|
||||
}
|
||||
|
||||
export async function fetchProxiesFromSource(source: { url: string; protocol: string }): Promise<ProxyInfo[]> {
|
||||
initializeSharedResources();
|
||||
|
||||
const allProxies: ProxyInfo[] = [];
|
||||
|
||||
try {
|
||||
logger.info(`Fetching proxies from ${source.url}`);
|
||||
|
||||
const response = await httpClient.get(source.url, {
|
||||
timeout: 10000
|
||||
});
|
||||
|
||||
if (response.status !== 200) {
|
||||
logger.warn(`Failed to fetch from ${source.url}: ${response.status}`);
|
||||
return [];
|
||||
}
|
||||
|
||||
const text = response.data;
|
||||
const lines = text.split('\n').filter((line: string) => line.trim());
|
||||
|
||||
for (const line of lines) {
|
||||
let trimmed = line.trim();
|
||||
trimmed = cleanProxyUrl(trimmed);
|
||||
if (!trimmed || trimmed.startsWith('#')) continue;
|
||||
|
||||
// Parse formats like "host:port" or "host:port:user:pass"
|
||||
const parts = trimmed.split(':');
|
||||
if (parts.length >= 2) {
|
||||
const proxy: ProxyInfo = {
|
||||
protocol: source.protocol as 'http' | 'https' | 'socks4' | 'socks5',
|
||||
host: parts[0],
|
||||
port: parseInt(parts[1])
|
||||
};
|
||||
|
||||
if (!isNaN(proxy.port) && proxy.host) {
|
||||
allProxies.push(proxy);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Parsed ${allProxies.length} proxies from ${source.url}`);
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`Error fetching proxies from ${source.url}`, error);
|
||||
return [];
|
||||
}
|
||||
|
||||
return allProxies;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a proxy is working
|
||||
*/
|
||||
export async function checkProxy(proxy: ProxyInfo): Promise<ProxyInfo> {
|
||||
initializeSharedResources();
|
||||
|
||||
let success = false;
|
||||
logger.debug(`Checking Proxy:`, {
|
||||
protocol: proxy.protocol,
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
});
|
||||
|
||||
try {
|
||||
// Test the proxy
|
||||
const response = await httpClient.get(PROXY_CONFIG.CHECK_URL, {
|
||||
proxy,
|
||||
timeout: PROXY_CONFIG.CHECK_TIMEOUT
|
||||
});
|
||||
|
||||
const isWorking = response.status >= 200 && response.status < 300;
|
||||
|
||||
const result: ProxyInfo = {
|
||||
...proxy,
|
||||
isWorking,
|
||||
checkedAt: new Date(),
|
||||
responseTime: response.responseTime,
|
||||
};
|
||||
|
||||
if (isWorking && !JSON.stringify(response.data).includes(PROXY_CONFIG.CHECK_IP)) {
|
||||
success = true;
|
||||
await cache.set(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`, result, PROXY_CONFIG.CACHE_TTL);
|
||||
} else {
|
||||
await cache.del(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`);
|
||||
}
|
||||
|
||||
logger.debug('Proxy check completed', {
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
isWorking,
|
||||
});
|
||||
|
||||
return result;
|
||||
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
|
||||
const result: ProxyInfo = {
|
||||
...proxy,
|
||||
isWorking: false,
|
||||
error: errorMessage,
|
||||
checkedAt: new Date()
|
||||
};
|
||||
|
||||
// If the proxy check failed, remove it from cache - success is here cause i think abort signal fails sometimes
|
||||
if (!success) {
|
||||
await cache.del(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`);
|
||||
}
|
||||
|
||||
logger.debug('Proxy check failed', {
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
error: errorMessage
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// Utility functions
|
||||
function cleanProxyUrl(url: string): string {
|
||||
return url
|
||||
.replace(/^https?:\/\//, '')
|
||||
.replace(/^0+/, '')
|
||||
.replace(/:0+(\d)/g, ':$1');
|
||||
}
|
||||
|
||||
function removeDuplicateProxies(proxies: ProxyInfo[]): ProxyInfo[] {
|
||||
const seen = new Set<string>();
|
||||
const unique: ProxyInfo[] = [];
|
||||
|
||||
for (const proxy of proxies) {
|
||||
const key = `${proxy.protocol}://${proxy.host}:${proxy.port}`;
|
||||
if (!seen.has(key)) {
|
||||
seen.add(key);
|
||||
unique.push(proxy);
|
||||
}
|
||||
}
|
||||
|
||||
return unique;
|
||||
}
|
||||
|
||||
// Optional: Export a convenience object that groups related tasks
|
||||
export const proxyTasks = {
|
||||
queueProxyFetch,
|
||||
queueProxyCheck,
|
||||
fetchProxiesFromSources,
|
||||
fetchProxiesFromSource,
|
||||
checkProxy,
|
||||
};
|
||||
|
||||
// Export singleton instance for backward compatibility (optional)
|
||||
// Remove this if you want to fully move to the task-based approach
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import createCache, { type CacheProvider } from '@stock-bot/cache';
|
||||
import { HttpClient, ProxyInfo } from '@stock-bot/http';
|
||||
import pLimit from 'p-limit';
|
||||
|
||||
// Shared configuration and utilities
|
||||
const PROXY_CONFIG = {
|
||||
CACHE_KEY: 'proxy',
|
||||
CACHE_TTL: 86400, // 24 hours
|
||||
CHECK_TIMEOUT: 7000,
|
||||
CHECK_IP: '99.246.102.205',
|
||||
CHECK_URL: 'https://proxy-detection.stare.gg/?api_key=bd406bf53ddc6abe1d9de5907830a955',
|
||||
CONCURRENCY_LIMIT: 100,
|
||||
PROXY_SOURCES: [
|
||||
{url: 'https://raw.githubusercontent.com/prxchk/proxy-list/main/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/casals-ar/proxy-list/main/http',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/http/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/http.txt', protocol: 'http' },
|
||||
{url: 'https://raw.githubusercontent.com/dpangestuw/Free-Proxy/refs/heads/main/http_proxies.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/gitrecon1455/fresh-proxy-list/refs/heads/main/proxylist.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/themiralay/Proxy-List-World/refs/heads/master/data.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/casa-ls/proxy-list/refs/heads/main/http',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/BreakingTechFr/Proxy_Free/refs/heads/main/proxies/http.txt', protocol: 'http' },
|
||||
{url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/http.txt',protocol: 'http', },
|
||||
|
||||
// {url: 'https://raw.githubusercontent.com/r00tee/Proxy-List/refs/heads/main/Https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/https.txt', protocol: 'https' },
|
||||
// {url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/https/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/https.txt',protocol: 'https', },
|
||||
]
|
||||
};
|
||||
|
||||
// Shared instances (module-scoped, not global)
|
||||
let logger: ReturnType<typeof getLogger>;
|
||||
let cache: CacheProvider;
|
||||
let httpClient: HttpClient;
|
||||
let concurrencyLimit: ReturnType<typeof pLimit>;
|
||||
|
||||
// Initialize shared resources
|
||||
function initializeSharedResources() {
|
||||
if (!logger) {
|
||||
logger = getLogger('proxy-tasks');
|
||||
cache = createCache('hybrid');
|
||||
httpClient = new HttpClient({ timeout: 10000 }, logger);
|
||||
concurrencyLimit = pLimit(PROXY_CONFIG.CONCURRENCY_LIMIT);
|
||||
logger.info('Proxy tasks initialized');
|
||||
}
|
||||
}
|
||||
|
||||
// Individual task functions
|
||||
export async function queueProxyFetch(): Promise<string> {
|
||||
initializeSharedResources();
|
||||
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-fetch',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
operation: 'fetch-and-check',
|
||||
payload: {},
|
||||
priority: 5
|
||||
});
|
||||
|
||||
const jobId = job.id || 'unknown';
|
||||
logger.info('Proxy fetch job queued', { jobId });
|
||||
return jobId;
|
||||
}
|
||||
|
||||
export async function queueProxyCheck(proxies: ProxyInfo[]): Promise<string> {
|
||||
initializeSharedResources();
|
||||
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-check',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
operation: 'check-specific',
|
||||
payload: { proxies },
|
||||
priority: 3
|
||||
});
|
||||
|
||||
const jobId = job.id || 'unknown';
|
||||
logger.info('Proxy check job queued', { jobId, count: proxies.length });
|
||||
return jobId;
|
||||
}
|
||||
|
||||
export async function fetchProxiesFromSources(): Promise<ProxyInfo[]> {
|
||||
initializeSharedResources();
|
||||
|
||||
const sources = PROXY_CONFIG.PROXY_SOURCES.map(source =>
|
||||
concurrencyLimit(() => fetchProxiesFromSource(source))
|
||||
);
|
||||
const result = await Promise.all(sources);
|
||||
let allProxies: ProxyInfo[] = result.flat();
|
||||
allProxies = removeDuplicateProxies(allProxies);
|
||||
// await checkProxies(allProxies);
|
||||
return allProxies;
|
||||
}
|
||||
|
||||
export async function fetchProxiesFromSource(source: { url: string; protocol: string }): Promise<ProxyInfo[]> {
|
||||
initializeSharedResources();
|
||||
|
||||
const allProxies: ProxyInfo[] = [];
|
||||
|
||||
try {
|
||||
logger.info(`Fetching proxies from ${source.url}`);
|
||||
|
||||
const response = await httpClient.get(source.url, {
|
||||
timeout: 10000
|
||||
});
|
||||
|
||||
if (response.status !== 200) {
|
||||
logger.warn(`Failed to fetch from ${source.url}: ${response.status}`);
|
||||
return [];
|
||||
}
|
||||
|
||||
const text = response.data;
|
||||
const lines = text.split('\n').filter((line: string) => line.trim());
|
||||
|
||||
for (const line of lines) {
|
||||
let trimmed = line.trim();
|
||||
trimmed = cleanProxyUrl(trimmed);
|
||||
if (!trimmed || trimmed.startsWith('#')) continue;
|
||||
|
||||
// Parse formats like "host:port" or "host:port:user:pass"
|
||||
const parts = trimmed.split(':');
|
||||
if (parts.length >= 2) {
|
||||
const proxy: ProxyInfo = {
|
||||
protocol: source.protocol as 'http' | 'https' | 'socks4' | 'socks5',
|
||||
host: parts[0],
|
||||
port: parseInt(parts[1])
|
||||
};
|
||||
|
||||
if (!isNaN(proxy.port) && proxy.host) {
|
||||
allProxies.push(proxy);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Parsed ${allProxies.length} proxies from ${source.url}`);
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`Error fetching proxies from ${source.url}`, error);
|
||||
return [];
|
||||
}
|
||||
|
||||
return allProxies;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a proxy is working
|
||||
*/
|
||||
export async function checkProxy(proxy: ProxyInfo): Promise<ProxyInfo> {
|
||||
initializeSharedResources();
|
||||
|
||||
let success = false;
|
||||
logger.debug(`Checking Proxy:`, {
|
||||
protocol: proxy.protocol,
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
});
|
||||
|
||||
try {
|
||||
// Test the proxy
|
||||
const response = await httpClient.get(PROXY_CONFIG.CHECK_URL, {
|
||||
proxy,
|
||||
timeout: PROXY_CONFIG.CHECK_TIMEOUT
|
||||
});
|
||||
|
||||
const isWorking = response.status >= 200 && response.status < 300;
|
||||
|
||||
const result: ProxyInfo = {
|
||||
...proxy,
|
||||
isWorking,
|
||||
checkedAt: new Date(),
|
||||
responseTime: response.responseTime,
|
||||
};
|
||||
|
||||
if (isWorking && !JSON.stringify(response.data).includes(PROXY_CONFIG.CHECK_IP)) {
|
||||
success = true;
|
||||
await cache.set(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`, result, PROXY_CONFIG.CACHE_TTL);
|
||||
} else {
|
||||
await cache.del(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`);
|
||||
}
|
||||
|
||||
logger.debug('Proxy check completed', {
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
isWorking,
|
||||
});
|
||||
|
||||
return result;
|
||||
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
|
||||
const result: ProxyInfo = {
|
||||
...proxy,
|
||||
isWorking: false,
|
||||
error: errorMessage,
|
||||
checkedAt: new Date()
|
||||
};
|
||||
|
||||
// If the proxy check failed, remove it from cache - success is here cause i think abort signal fails sometimes
|
||||
if (!success) {
|
||||
await cache.del(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`);
|
||||
}
|
||||
|
||||
logger.debug('Proxy check failed', {
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
error: errorMessage
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// Utility functions
|
||||
function cleanProxyUrl(url: string): string {
|
||||
return url
|
||||
.replace(/^https?:\/\//, '')
|
||||
.replace(/^0+/, '')
|
||||
.replace(/:0+(\d)/g, ':$1');
|
||||
}
|
||||
|
||||
function removeDuplicateProxies(proxies: ProxyInfo[]): ProxyInfo[] {
|
||||
const seen = new Set<string>();
|
||||
const unique: ProxyInfo[] = [];
|
||||
|
||||
for (const proxy of proxies) {
|
||||
const key = `${proxy.protocol}://${proxy.host}:${proxy.port}`;
|
||||
if (!seen.has(key)) {
|
||||
seen.add(key);
|
||||
unique.push(proxy);
|
||||
}
|
||||
}
|
||||
|
||||
return unique;
|
||||
}
|
||||
|
||||
// Optional: Export a convenience object that groups related tasks
|
||||
export const proxyTasks = {
|
||||
queueProxyFetch,
|
||||
queueProxyCheck,
|
||||
fetchProxiesFromSources,
|
||||
fetchProxiesFromSource,
|
||||
checkProxy,
|
||||
};
|
||||
|
||||
// Export singleton instance for backward compatibility (optional)
|
||||
// Remove this if you want to fully move to the task-based approach
|
||||
export const proxyService = proxyTasks;
|
||||
|
|
@ -1,175 +1,175 @@
|
|||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
const logger = getLogger('quotemedia-provider');
|
||||
|
||||
export const quotemediaProvider: ProviderConfig = {
|
||||
name: 'quotemedia',
|
||||
service: 'market-data',
|
||||
operations: { 'live-data': async (payload: { symbol: string; fields?: string[] }) => {
|
||||
logger.info('Fetching live data from QuoteMedia', { symbol: payload.symbol });
|
||||
|
||||
// Simulate QuoteMedia API call
|
||||
const mockData = {
|
||||
symbol: payload.symbol,
|
||||
price: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
change: (Math.random() - 0.5) * 20,
|
||||
changePercent: (Math.random() - 0.5) * 5,
|
||||
timestamp: new Date().toISOString(),
|
||||
source: 'quotemedia',
|
||||
fields: payload.fields || ['price', 'volume', 'change']
|
||||
};
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 100 + Math.random() * 200));
|
||||
|
||||
return mockData;
|
||||
},
|
||||
|
||||
'historical-data': async (payload: {
|
||||
symbol: string;
|
||||
from: Date;
|
||||
to: Date;
|
||||
interval?: string;
|
||||
fields?: string[]; }) => {
|
||||
logger.info('Fetching historical data from QuoteMedia', {
|
||||
symbol: payload.symbol,
|
||||
from: payload.from,
|
||||
to: payload.to,
|
||||
interval: payload.interval || '1d'
|
||||
});
|
||||
|
||||
// Generate mock historical data
|
||||
const days = Math.ceil((payload.to.getTime() - payload.from.getTime()) / (1000 * 60 * 60 * 24));
|
||||
const data = [];
|
||||
|
||||
for (let i = 0; i < Math.min(days, 100); i++) {
|
||||
const date = new Date(payload.from.getTime() + i * 24 * 60 * 60 * 1000);
|
||||
data.push({
|
||||
date: date.toISOString().split('T')[0],
|
||||
open: Math.random() * 1000 + 100,
|
||||
high: Math.random() * 1000 + 100,
|
||||
low: Math.random() * 1000 + 100,
|
||||
close: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
source: 'quotemedia'
|
||||
});
|
||||
}
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 200 + Math.random() * 300));
|
||||
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
interval: payload.interval || '1d',
|
||||
data,
|
||||
source: 'quotemedia',
|
||||
totalRecords: data.length
|
||||
};
|
||||
},
|
||||
'batch-quotes': async (payload: { symbols: string[]; fields?: string[] }) => {
|
||||
logger.info('Fetching batch quotes from QuoteMedia', {
|
||||
symbols: payload.symbols,
|
||||
count: payload.symbols.length
|
||||
});
|
||||
|
||||
const quotes = payload.symbols.map(symbol => ({
|
||||
symbol,
|
||||
price: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
change: (Math.random() - 0.5) * 20,
|
||||
timestamp: new Date().toISOString(),
|
||||
source: 'quotemedia'
|
||||
}));
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 300 + Math.random() * 200));
|
||||
|
||||
return {
|
||||
quotes,
|
||||
source: 'quotemedia',
|
||||
timestamp: new Date().toISOString(),
|
||||
totalSymbols: payload.symbols.length
|
||||
};
|
||||
}, 'company-profile': async (payload: { symbol: string }) => {
|
||||
logger.info('Fetching company profile from QuoteMedia', { symbol: payload.symbol });
|
||||
|
||||
// Simulate company profile data
|
||||
const profile = {
|
||||
symbol: payload.symbol,
|
||||
companyName: `${payload.symbol} Corporation`,
|
||||
sector: 'Technology',
|
||||
industry: 'Software',
|
||||
description: `${payload.symbol} is a leading technology company.`,
|
||||
marketCap: Math.floor(Math.random() * 1000000000000),
|
||||
employees: Math.floor(Math.random() * 100000),
|
||||
website: `https://www.${payload.symbol.toLowerCase()}.com`,
|
||||
source: 'quotemedia'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 150 + Math.random() * 100));
|
||||
|
||||
return profile;
|
||||
}, 'options-chain': async (payload: { symbol: string; expiration?: string }) => {
|
||||
logger.info('Fetching options chain from QuoteMedia', {
|
||||
symbol: payload.symbol,
|
||||
expiration: payload.expiration
|
||||
});
|
||||
|
||||
// Generate mock options data
|
||||
const strikes = Array.from({ length: 20 }, (_, i) => 100 + i * 5);
|
||||
const calls = strikes.map(strike => ({
|
||||
strike,
|
||||
bid: Math.random() * 10,
|
||||
ask: Math.random() * 10 + 0.5,
|
||||
volume: Math.floor(Math.random() * 1000),
|
||||
openInterest: Math.floor(Math.random() * 5000)
|
||||
}));
|
||||
|
||||
const puts = strikes.map(strike => ({
|
||||
strike,
|
||||
bid: Math.random() * 10,
|
||||
ask: Math.random() * 10 + 0.5,
|
||||
volume: Math.floor(Math.random() * 1000),
|
||||
openInterest: Math.floor(Math.random() * 5000)
|
||||
}));
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 400 + Math.random() * 300));
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
expiration: payload.expiration || new Date(Date.now() + 30 * 24 * 60 * 60 * 1000).toISOString().split('T')[0],
|
||||
calls,
|
||||
puts,
|
||||
source: 'quotemedia'
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
scheduledJobs: [
|
||||
// {
|
||||
// type: 'quotemedia-premium-refresh',
|
||||
// operation: 'batch-quotes',
|
||||
// payload: { symbols: ['AAPL', 'GOOGL', 'MSFT'] },
|
||||
// cronPattern: '*/2 * * * *', // Every 2 minutes
|
||||
// priority: 7,
|
||||
// description: 'Refresh premium quotes with detailed market data'
|
||||
// },
|
||||
// {
|
||||
// type: 'quotemedia-options-update',
|
||||
// operation: 'options-chain',
|
||||
// payload: { symbol: 'SPY' },
|
||||
// cronPattern: '*/10 * * * *', // Every 10 minutes
|
||||
// priority: 5,
|
||||
// description: 'Update options chain data for SPY ETF'
|
||||
// },
|
||||
// {
|
||||
// type: 'quotemedia-profiles',
|
||||
// operation: 'company-profile',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '0 9 * * 1-5', // Weekdays at 9 AM
|
||||
// priority: 3,
|
||||
// description: 'Update company profile data'
|
||||
// }
|
||||
]
|
||||
};
|
||||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
const logger = getLogger('quotemedia-provider');
|
||||
|
||||
export const quotemediaProvider: ProviderConfig = {
|
||||
name: 'quotemedia',
|
||||
service: 'market-data',
|
||||
operations: { 'live-data': async (payload: { symbol: string; fields?: string[] }) => {
|
||||
logger.info('Fetching live data from QuoteMedia', { symbol: payload.symbol });
|
||||
|
||||
// Simulate QuoteMedia API call
|
||||
const mockData = {
|
||||
symbol: payload.symbol,
|
||||
price: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
change: (Math.random() - 0.5) * 20,
|
||||
changePercent: (Math.random() - 0.5) * 5,
|
||||
timestamp: new Date().toISOString(),
|
||||
source: 'quotemedia',
|
||||
fields: payload.fields || ['price', 'volume', 'change']
|
||||
};
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 100 + Math.random() * 200));
|
||||
|
||||
return mockData;
|
||||
},
|
||||
|
||||
'historical-data': async (payload: {
|
||||
symbol: string;
|
||||
from: Date;
|
||||
to: Date;
|
||||
interval?: string;
|
||||
fields?: string[]; }) => {
|
||||
logger.info('Fetching historical data from QuoteMedia', {
|
||||
symbol: payload.symbol,
|
||||
from: payload.from,
|
||||
to: payload.to,
|
||||
interval: payload.interval || '1d'
|
||||
});
|
||||
|
||||
// Generate mock historical data
|
||||
const days = Math.ceil((payload.to.getTime() - payload.from.getTime()) / (1000 * 60 * 60 * 24));
|
||||
const data = [];
|
||||
|
||||
for (let i = 0; i < Math.min(days, 100); i++) {
|
||||
const date = new Date(payload.from.getTime() + i * 24 * 60 * 60 * 1000);
|
||||
data.push({
|
||||
date: date.toISOString().split('T')[0],
|
||||
open: Math.random() * 1000 + 100,
|
||||
high: Math.random() * 1000 + 100,
|
||||
low: Math.random() * 1000 + 100,
|
||||
close: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
source: 'quotemedia'
|
||||
});
|
||||
}
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 200 + Math.random() * 300));
|
||||
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
interval: payload.interval || '1d',
|
||||
data,
|
||||
source: 'quotemedia',
|
||||
totalRecords: data.length
|
||||
};
|
||||
},
|
||||
'batch-quotes': async (payload: { symbols: string[]; fields?: string[] }) => {
|
||||
logger.info('Fetching batch quotes from QuoteMedia', {
|
||||
symbols: payload.symbols,
|
||||
count: payload.symbols.length
|
||||
});
|
||||
|
||||
const quotes = payload.symbols.map(symbol => ({
|
||||
symbol,
|
||||
price: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
change: (Math.random() - 0.5) * 20,
|
||||
timestamp: new Date().toISOString(),
|
||||
source: 'quotemedia'
|
||||
}));
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 300 + Math.random() * 200));
|
||||
|
||||
return {
|
||||
quotes,
|
||||
source: 'quotemedia',
|
||||
timestamp: new Date().toISOString(),
|
||||
totalSymbols: payload.symbols.length
|
||||
};
|
||||
}, 'company-profile': async (payload: { symbol: string }) => {
|
||||
logger.info('Fetching company profile from QuoteMedia', { symbol: payload.symbol });
|
||||
|
||||
// Simulate company profile data
|
||||
const profile = {
|
||||
symbol: payload.symbol,
|
||||
companyName: `${payload.symbol} Corporation`,
|
||||
sector: 'Technology',
|
||||
industry: 'Software',
|
||||
description: `${payload.symbol} is a leading technology company.`,
|
||||
marketCap: Math.floor(Math.random() * 1000000000000),
|
||||
employees: Math.floor(Math.random() * 100000),
|
||||
website: `https://www.${payload.symbol.toLowerCase()}.com`,
|
||||
source: 'quotemedia'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 150 + Math.random() * 100));
|
||||
|
||||
return profile;
|
||||
}, 'options-chain': async (payload: { symbol: string; expiration?: string }) => {
|
||||
logger.info('Fetching options chain from QuoteMedia', {
|
||||
symbol: payload.symbol,
|
||||
expiration: payload.expiration
|
||||
});
|
||||
|
||||
// Generate mock options data
|
||||
const strikes = Array.from({ length: 20 }, (_, i) => 100 + i * 5);
|
||||
const calls = strikes.map(strike => ({
|
||||
strike,
|
||||
bid: Math.random() * 10,
|
||||
ask: Math.random() * 10 + 0.5,
|
||||
volume: Math.floor(Math.random() * 1000),
|
||||
openInterest: Math.floor(Math.random() * 5000)
|
||||
}));
|
||||
|
||||
const puts = strikes.map(strike => ({
|
||||
strike,
|
||||
bid: Math.random() * 10,
|
||||
ask: Math.random() * 10 + 0.5,
|
||||
volume: Math.floor(Math.random() * 1000),
|
||||
openInterest: Math.floor(Math.random() * 5000)
|
||||
}));
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 400 + Math.random() * 300));
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
expiration: payload.expiration || new Date(Date.now() + 30 * 24 * 60 * 60 * 1000).toISOString().split('T')[0],
|
||||
calls,
|
||||
puts,
|
||||
source: 'quotemedia'
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
scheduledJobs: [
|
||||
// {
|
||||
// type: 'quotemedia-premium-refresh',
|
||||
// operation: 'batch-quotes',
|
||||
// payload: { symbols: ['AAPL', 'GOOGL', 'MSFT'] },
|
||||
// cronPattern: '*/2 * * * *', // Every 2 minutes
|
||||
// priority: 7,
|
||||
// description: 'Refresh premium quotes with detailed market data'
|
||||
// },
|
||||
// {
|
||||
// type: 'quotemedia-options-update',
|
||||
// operation: 'options-chain',
|
||||
// payload: { symbol: 'SPY' },
|
||||
// cronPattern: '*/10 * * * *', // Every 10 minutes
|
||||
// priority: 5,
|
||||
// description: 'Update options chain data for SPY ETF'
|
||||
// },
|
||||
// {
|
||||
// type: 'quotemedia-profiles',
|
||||
// operation: 'company-profile',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '0 9 * * 1-5', // Weekdays at 9 AM
|
||||
// priority: 3,
|
||||
// description: 'Update company profile data'
|
||||
// }
|
||||
]
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,249 +1,249 @@
|
|||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
export const yahooProvider: ProviderConfig = {
|
||||
name: 'yahoo-finance',
|
||||
service: 'market-data',
|
||||
operations: {
|
||||
'live-data': async (payload: { symbol: string; modules?: string[] }) => {
|
||||
|
||||
|
||||
logger.info('Fetching live data from Yahoo Finance', { symbol: payload.symbol });
|
||||
|
||||
// Simulate Yahoo Finance API call
|
||||
const mockData = {
|
||||
symbol: payload.symbol,
|
||||
regularMarketPrice: Math.random() * 1000 + 100,
|
||||
regularMarketVolume: Math.floor(Math.random() * 1000000),
|
||||
regularMarketChange: (Math.random() - 0.5) * 20,
|
||||
regularMarketChangePercent: (Math.random() - 0.5) * 5,
|
||||
preMarketPrice: Math.random() * 1000 + 100,
|
||||
postMarketPrice: Math.random() * 1000 + 100,
|
||||
marketCap: Math.floor(Math.random() * 1000000000000),
|
||||
peRatio: Math.random() * 50 + 5,
|
||||
dividendYield: Math.random() * 0.1,
|
||||
fiftyTwoWeekHigh: Math.random() * 1200 + 100,
|
||||
fiftyTwoWeekLow: Math.random() * 800 + 50,
|
||||
timestamp: Date.now() / 1000,
|
||||
source: 'yahoo-finance',
|
||||
modules: payload.modules || ['price', 'summaryDetail']
|
||||
};
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 150 + Math.random() * 250));
|
||||
|
||||
return mockData;
|
||||
},
|
||||
|
||||
'historical-data': async (payload: {
|
||||
symbol: string;
|
||||
period1: number;
|
||||
period2: number;
|
||||
interval?: string;
|
||||
events?: string; }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching historical data from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
period1: payload.period1,
|
||||
period2: payload.period2,
|
||||
interval: payload.interval || '1d'
|
||||
});
|
||||
|
||||
// Generate mock historical data
|
||||
const days = Math.ceil((payload.period2 - payload.period1) / (24 * 60 * 60));
|
||||
const data = [];
|
||||
|
||||
for (let i = 0; i < Math.min(days, 100); i++) {
|
||||
const timestamp = payload.period1 + i * 24 * 60 * 60;
|
||||
data.push({
|
||||
timestamp,
|
||||
date: new Date(timestamp * 1000).toISOString().split('T')[0],
|
||||
open: Math.random() * 1000 + 100,
|
||||
high: Math.random() * 1000 + 100,
|
||||
low: Math.random() * 1000 + 100,
|
||||
close: Math.random() * 1000 + 100,
|
||||
adjClose: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
source: 'yahoo-finance'
|
||||
});
|
||||
}
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 250 + Math.random() * 350));
|
||||
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
interval: payload.interval || '1d',
|
||||
timestamps: data.map(d => d.timestamp),
|
||||
indicators: {
|
||||
quote: [{
|
||||
open: data.map(d => d.open),
|
||||
high: data.map(d => d.high),
|
||||
low: data.map(d => d.low),
|
||||
close: data.map(d => d.close),
|
||||
volume: data.map(d => d.volume)
|
||||
}],
|
||||
adjclose: [{
|
||||
adjclose: data.map(d => d.adjClose)
|
||||
}]
|
||||
},
|
||||
source: 'yahoo-finance',
|
||||
totalRecords: data.length
|
||||
};
|
||||
},
|
||||
'search': async (payload: { query: string; quotesCount?: number; newsCount?: number }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Searching Yahoo Finance', { query: payload.query });
|
||||
|
||||
// Generate mock search results
|
||||
const quotes = Array.from({ length: payload.quotesCount || 5 }, (_, i) => ({
|
||||
symbol: `${payload.query.toUpperCase()}${i}`,
|
||||
shortname: `${payload.query} Company ${i}`,
|
||||
longname: `${payload.query} Corporation ${i}`,
|
||||
exchDisp: 'NASDAQ',
|
||||
typeDisp: 'Equity',
|
||||
source: 'yahoo-finance'
|
||||
}));
|
||||
|
||||
const news = Array.from({ length: payload.newsCount || 3 }, (_, i) => ({
|
||||
uuid: `news-${i}-${Date.now()}`,
|
||||
title: `${payload.query} News Article ${i}`,
|
||||
publisher: 'Financial News',
|
||||
providerPublishTime: Date.now() - i * 3600000,
|
||||
type: 'STORY',
|
||||
source: 'yahoo-finance'
|
||||
}));
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 200 + Math.random() * 200));
|
||||
|
||||
return {
|
||||
quotes,
|
||||
news,
|
||||
totalQuotes: quotes.length,
|
||||
totalNews: news.length,
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
}, 'financials': async (payload: { symbol: string; type?: 'income' | 'balance' | 'cash' }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching financials from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
type: payload.type || 'income'
|
||||
});
|
||||
|
||||
// Generate mock financial data
|
||||
const financials = {
|
||||
symbol: payload.symbol,
|
||||
type: payload.type || 'income',
|
||||
currency: 'USD',
|
||||
annual: Array.from({ length: 4 }, (_, i) => ({
|
||||
fiscalYear: 2024 - i,
|
||||
revenue: Math.floor(Math.random() * 100000000000),
|
||||
netIncome: Math.floor(Math.random() * 10000000000),
|
||||
totalAssets: Math.floor(Math.random() * 500000000000),
|
||||
totalDebt: Math.floor(Math.random() * 50000000000)
|
||||
})),
|
||||
quarterly: Array.from({ length: 4 }, (_, i) => ({
|
||||
fiscalQuarter: `Q${4-i} 2024`,
|
||||
revenue: Math.floor(Math.random() * 25000000000),
|
||||
netIncome: Math.floor(Math.random() * 2500000000)
|
||||
})),
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 300 + Math.random() * 200));
|
||||
|
||||
return financials;
|
||||
}, 'earnings': async (payload: { symbol: string; period?: 'annual' | 'quarterly' }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching earnings from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
period: payload.period || 'quarterly'
|
||||
});
|
||||
|
||||
// Generate mock earnings data
|
||||
const earnings = {
|
||||
symbol: payload.symbol,
|
||||
period: payload.period || 'quarterly',
|
||||
earnings: Array.from({ length: 8 }, (_, i) => ({
|
||||
quarter: `Q${(i % 4) + 1} ${2024 - Math.floor(i/4)}`,
|
||||
epsEstimate: Math.random() * 5,
|
||||
epsActual: Math.random() * 5,
|
||||
revenueEstimate: Math.floor(Math.random() * 50000000000),
|
||||
revenueActual: Math.floor(Math.random() * 50000000000),
|
||||
surprise: (Math.random() - 0.5) * 2
|
||||
})),
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 250 + Math.random() * 150));
|
||||
|
||||
return earnings;
|
||||
}, 'recommendations': async (payload: { symbol: string }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching recommendations from Yahoo Finance', { symbol: payload.symbol });
|
||||
|
||||
// Generate mock recommendations
|
||||
const recommendations = {
|
||||
symbol: payload.symbol,
|
||||
current: {
|
||||
strongBuy: Math.floor(Math.random() * 10),
|
||||
buy: Math.floor(Math.random() * 15),
|
||||
hold: Math.floor(Math.random() * 20),
|
||||
sell: Math.floor(Math.random() * 5),
|
||||
strongSell: Math.floor(Math.random() * 3)
|
||||
},
|
||||
trend: Array.from({ length: 4 }, (_, i) => ({
|
||||
period: `${i}m`,
|
||||
strongBuy: Math.floor(Math.random() * 10),
|
||||
buy: Math.floor(Math.random() * 15),
|
||||
hold: Math.floor(Math.random() * 20),
|
||||
sell: Math.floor(Math.random() * 5),
|
||||
strongSell: Math.floor(Math.random() * 3)
|
||||
})),
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 180 + Math.random() * 120));
|
||||
return recommendations;
|
||||
}
|
||||
},
|
||||
|
||||
scheduledJobs: [
|
||||
// {
|
||||
// type: 'yahoo-market-refresh',
|
||||
// operation: 'live-data',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '*/1 * * * *', // Every minute
|
||||
// priority: 8,
|
||||
// description: 'Refresh Apple stock price from Yahoo Finance'
|
||||
// },
|
||||
// {
|
||||
// type: 'yahoo-sp500-update',
|
||||
// operation: 'live-data',
|
||||
// payload: { symbol: 'SPY' },
|
||||
// cronPattern: '*/2 * * * *', // Every 2 minutes
|
||||
// priority: 9,
|
||||
// description: 'Update S&P 500 ETF price'
|
||||
// },
|
||||
// {
|
||||
// type: 'yahoo-earnings-check',
|
||||
// operation: 'earnings',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '0 16 * * 1-5', // Weekdays at 4 PM (market close)
|
||||
// priority: 6,
|
||||
// description: 'Check earnings data for Apple'
|
||||
// }
|
||||
]
|
||||
};
|
||||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
export const yahooProvider: ProviderConfig = {
|
||||
name: 'yahoo-finance',
|
||||
service: 'market-data',
|
||||
operations: {
|
||||
'live-data': async (payload: { symbol: string; modules?: string[] }) => {
|
||||
|
||||
|
||||
logger.info('Fetching live data from Yahoo Finance', { symbol: payload.symbol });
|
||||
|
||||
// Simulate Yahoo Finance API call
|
||||
const mockData = {
|
||||
symbol: payload.symbol,
|
||||
regularMarketPrice: Math.random() * 1000 + 100,
|
||||
regularMarketVolume: Math.floor(Math.random() * 1000000),
|
||||
regularMarketChange: (Math.random() - 0.5) * 20,
|
||||
regularMarketChangePercent: (Math.random() - 0.5) * 5,
|
||||
preMarketPrice: Math.random() * 1000 + 100,
|
||||
postMarketPrice: Math.random() * 1000 + 100,
|
||||
marketCap: Math.floor(Math.random() * 1000000000000),
|
||||
peRatio: Math.random() * 50 + 5,
|
||||
dividendYield: Math.random() * 0.1,
|
||||
fiftyTwoWeekHigh: Math.random() * 1200 + 100,
|
||||
fiftyTwoWeekLow: Math.random() * 800 + 50,
|
||||
timestamp: Date.now() / 1000,
|
||||
source: 'yahoo-finance',
|
||||
modules: payload.modules || ['price', 'summaryDetail']
|
||||
};
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 150 + Math.random() * 250));
|
||||
|
||||
return mockData;
|
||||
},
|
||||
|
||||
'historical-data': async (payload: {
|
||||
symbol: string;
|
||||
period1: number;
|
||||
period2: number;
|
||||
interval?: string;
|
||||
events?: string; }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching historical data from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
period1: payload.period1,
|
||||
period2: payload.period2,
|
||||
interval: payload.interval || '1d'
|
||||
});
|
||||
|
||||
// Generate mock historical data
|
||||
const days = Math.ceil((payload.period2 - payload.period1) / (24 * 60 * 60));
|
||||
const data = [];
|
||||
|
||||
for (let i = 0; i < Math.min(days, 100); i++) {
|
||||
const timestamp = payload.period1 + i * 24 * 60 * 60;
|
||||
data.push({
|
||||
timestamp,
|
||||
date: new Date(timestamp * 1000).toISOString().split('T')[0],
|
||||
open: Math.random() * 1000 + 100,
|
||||
high: Math.random() * 1000 + 100,
|
||||
low: Math.random() * 1000 + 100,
|
||||
close: Math.random() * 1000 + 100,
|
||||
adjClose: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
source: 'yahoo-finance'
|
||||
});
|
||||
}
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 250 + Math.random() * 350));
|
||||
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
interval: payload.interval || '1d',
|
||||
timestamps: data.map(d => d.timestamp),
|
||||
indicators: {
|
||||
quote: [{
|
||||
open: data.map(d => d.open),
|
||||
high: data.map(d => d.high),
|
||||
low: data.map(d => d.low),
|
||||
close: data.map(d => d.close),
|
||||
volume: data.map(d => d.volume)
|
||||
}],
|
||||
adjclose: [{
|
||||
adjclose: data.map(d => d.adjClose)
|
||||
}]
|
||||
},
|
||||
source: 'yahoo-finance',
|
||||
totalRecords: data.length
|
||||
};
|
||||
},
|
||||
'search': async (payload: { query: string; quotesCount?: number; newsCount?: number }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Searching Yahoo Finance', { query: payload.query });
|
||||
|
||||
// Generate mock search results
|
||||
const quotes = Array.from({ length: payload.quotesCount || 5 }, (_, i) => ({
|
||||
symbol: `${payload.query.toUpperCase()}${i}`,
|
||||
shortname: `${payload.query} Company ${i}`,
|
||||
longname: `${payload.query} Corporation ${i}`,
|
||||
exchDisp: 'NASDAQ',
|
||||
typeDisp: 'Equity',
|
||||
source: 'yahoo-finance'
|
||||
}));
|
||||
|
||||
const news = Array.from({ length: payload.newsCount || 3 }, (_, i) => ({
|
||||
uuid: `news-${i}-${Date.now()}`,
|
||||
title: `${payload.query} News Article ${i}`,
|
||||
publisher: 'Financial News',
|
||||
providerPublishTime: Date.now() - i * 3600000,
|
||||
type: 'STORY',
|
||||
source: 'yahoo-finance'
|
||||
}));
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 200 + Math.random() * 200));
|
||||
|
||||
return {
|
||||
quotes,
|
||||
news,
|
||||
totalQuotes: quotes.length,
|
||||
totalNews: news.length,
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
}, 'financials': async (payload: { symbol: string; type?: 'income' | 'balance' | 'cash' }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching financials from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
type: payload.type || 'income'
|
||||
});
|
||||
|
||||
// Generate mock financial data
|
||||
const financials = {
|
||||
symbol: payload.symbol,
|
||||
type: payload.type || 'income',
|
||||
currency: 'USD',
|
||||
annual: Array.from({ length: 4 }, (_, i) => ({
|
||||
fiscalYear: 2024 - i,
|
||||
revenue: Math.floor(Math.random() * 100000000000),
|
||||
netIncome: Math.floor(Math.random() * 10000000000),
|
||||
totalAssets: Math.floor(Math.random() * 500000000000),
|
||||
totalDebt: Math.floor(Math.random() * 50000000000)
|
||||
})),
|
||||
quarterly: Array.from({ length: 4 }, (_, i) => ({
|
||||
fiscalQuarter: `Q${4-i} 2024`,
|
||||
revenue: Math.floor(Math.random() * 25000000000),
|
||||
netIncome: Math.floor(Math.random() * 2500000000)
|
||||
})),
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 300 + Math.random() * 200));
|
||||
|
||||
return financials;
|
||||
}, 'earnings': async (payload: { symbol: string; period?: 'annual' | 'quarterly' }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching earnings from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
period: payload.period || 'quarterly'
|
||||
});
|
||||
|
||||
// Generate mock earnings data
|
||||
const earnings = {
|
||||
symbol: payload.symbol,
|
||||
period: payload.period || 'quarterly',
|
||||
earnings: Array.from({ length: 8 }, (_, i) => ({
|
||||
quarter: `Q${(i % 4) + 1} ${2024 - Math.floor(i/4)}`,
|
||||
epsEstimate: Math.random() * 5,
|
||||
epsActual: Math.random() * 5,
|
||||
revenueEstimate: Math.floor(Math.random() * 50000000000),
|
||||
revenueActual: Math.floor(Math.random() * 50000000000),
|
||||
surprise: (Math.random() - 0.5) * 2
|
||||
})),
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 250 + Math.random() * 150));
|
||||
|
||||
return earnings;
|
||||
}, 'recommendations': async (payload: { symbol: string }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching recommendations from Yahoo Finance', { symbol: payload.symbol });
|
||||
|
||||
// Generate mock recommendations
|
||||
const recommendations = {
|
||||
symbol: payload.symbol,
|
||||
current: {
|
||||
strongBuy: Math.floor(Math.random() * 10),
|
||||
buy: Math.floor(Math.random() * 15),
|
||||
hold: Math.floor(Math.random() * 20),
|
||||
sell: Math.floor(Math.random() * 5),
|
||||
strongSell: Math.floor(Math.random() * 3)
|
||||
},
|
||||
trend: Array.from({ length: 4 }, (_, i) => ({
|
||||
period: `${i}m`,
|
||||
strongBuy: Math.floor(Math.random() * 10),
|
||||
buy: Math.floor(Math.random() * 15),
|
||||
hold: Math.floor(Math.random() * 20),
|
||||
sell: Math.floor(Math.random() * 5),
|
||||
strongSell: Math.floor(Math.random() * 3)
|
||||
})),
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 180 + Math.random() * 120));
|
||||
return recommendations;
|
||||
}
|
||||
},
|
||||
|
||||
scheduledJobs: [
|
||||
// {
|
||||
// type: 'yahoo-market-refresh',
|
||||
// operation: 'live-data',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '*/1 * * * *', // Every minute
|
||||
// priority: 8,
|
||||
// description: 'Refresh Apple stock price from Yahoo Finance'
|
||||
// },
|
||||
// {
|
||||
// type: 'yahoo-sp500-update',
|
||||
// operation: 'live-data',
|
||||
// payload: { symbol: 'SPY' },
|
||||
// cronPattern: '*/2 * * * *', // Every 2 minutes
|
||||
// priority: 9,
|
||||
// description: 'Update S&P 500 ETF price'
|
||||
// },
|
||||
// {
|
||||
// type: 'yahoo-earnings-check',
|
||||
// operation: 'earnings',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '0 16 * * 1-5', // Weekdays at 4 PM (market close)
|
||||
// priority: 6,
|
||||
// description: 'Check earnings data for Apple'
|
||||
// }
|
||||
]
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,24 +1,24 @@
|
|||
import { proxyService } from './providers/proxy.tasks';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
// Initialize logger for the demo
|
||||
const logger = getLogger('proxy-demo');
|
||||
console.log('🔧 Starting proxy demo...');
|
||||
/**
|
||||
* Example: Custom proxy source with enhanced logging
|
||||
*/
|
||||
async function demonstrateCustomProxySource() {
|
||||
console.log('🔧 Demonstrating!');
|
||||
logger.info('🔧 Demonstrating custom proxy source...');
|
||||
|
||||
try {
|
||||
console.log('🔧 Demonstrating 1');
|
||||
await proxyService.fetchProxiesFromSources();
|
||||
console.log('🔧 Demonstrating custom proxy source is DONE!');
|
||||
} catch (error) {
|
||||
logger.error('❌ Custom source scraping failed',{
|
||||
error: error
|
||||
});
|
||||
}
|
||||
}
|
||||
demonstrateCustomProxySource()
|
||||
import { proxyService } from './providers/proxy.tasks';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
// Initialize logger for the demo
|
||||
const logger = getLogger('proxy-demo');
|
||||
console.log('🔧 Starting proxy demo...');
|
||||
/**
|
||||
* Example: Custom proxy source with enhanced logging
|
||||
*/
|
||||
async function demonstrateCustomProxySource() {
|
||||
console.log('🔧 Demonstrating!');
|
||||
logger.info('🔧 Demonstrating custom proxy source...');
|
||||
|
||||
try {
|
||||
console.log('🔧 Demonstrating 1');
|
||||
await proxyService.fetchProxiesFromSources();
|
||||
console.log('🔧 Demonstrating custom proxy source is DONE!');
|
||||
} catch (error) {
|
||||
logger.error('❌ Custom source scraping failed',{
|
||||
error: error
|
||||
});
|
||||
}
|
||||
}
|
||||
demonstrateCustomProxySource()
|
||||
|
|
|
|||
|
|
@ -1,115 +1,115 @@
|
|||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
export interface JobHandler {
|
||||
(payload: any): Promise<any>;
|
||||
}
|
||||
|
||||
export interface ScheduledJob {
|
||||
type: string;
|
||||
operation: string;
|
||||
payload: any;
|
||||
cronPattern: string;
|
||||
priority?: number;
|
||||
description?: string;
|
||||
immediately?: boolean;
|
||||
}
|
||||
|
||||
export interface ProviderConfig {
|
||||
name: string;
|
||||
service: string;
|
||||
operations: Record<string, JobHandler>;
|
||||
scheduledJobs?: ScheduledJob[];
|
||||
}
|
||||
|
||||
export class ProviderRegistry {
|
||||
private logger = getLogger('provider-registry');
|
||||
private providers = new Map<string, ProviderConfig>();
|
||||
|
||||
/**
|
||||
* Register a provider with its operations
|
||||
*/ registerProvider(config: ProviderConfig): void {
|
||||
const key = `${config.service}:${config.name}`;
|
||||
this.providers.set(key, config);
|
||||
this.logger.info(`Registered provider: ${key}`, {
|
||||
operations: Object.keys(config.operations),
|
||||
scheduledJobs: config.scheduledJobs?.length || 0
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a job handler for a specific provider and operation
|
||||
*/
|
||||
getHandler(service: string, provider: string, operation: string): JobHandler | null {
|
||||
const key = `${service}:${provider}`;
|
||||
const providerConfig = this.providers.get(key);
|
||||
|
||||
if (!providerConfig) {
|
||||
this.logger.warn(`Provider not found: ${key}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
const handler = providerConfig.operations[operation];
|
||||
if (!handler) {
|
||||
this.logger.warn(`Operation not found: ${operation} in provider ${key}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return handler;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all registered providers
|
||||
*/
|
||||
getAllScheduledJobs(): Array<{
|
||||
service: string;
|
||||
provider: string;
|
||||
job: ScheduledJob;
|
||||
}> {
|
||||
const allJobs: Array<{ service: string; provider: string; job: ScheduledJob }> = [];
|
||||
|
||||
for (const [key, config] of this.providers) {
|
||||
if (config.scheduledJobs) {
|
||||
for (const job of config.scheduledJobs) {
|
||||
allJobs.push({
|
||||
service: config.service,
|
||||
provider: config.name,
|
||||
job
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return allJobs;
|
||||
}
|
||||
|
||||
getProviders(): Array<{ key: string; config: ProviderConfig }> {
|
||||
return Array.from(this.providers.entries()).map(([key, config]) => ({
|
||||
key,
|
||||
config
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a provider exists
|
||||
*/
|
||||
hasProvider(service: string, provider: string): boolean {
|
||||
return this.providers.has(`${service}:${provider}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get providers by service type
|
||||
*/
|
||||
getProvidersByService(service: string): ProviderConfig[] {
|
||||
return Array.from(this.providers.values()).filter(provider => provider.service === service);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all providers (useful for testing)
|
||||
*/
|
||||
clear(): void {
|
||||
this.providers.clear();
|
||||
this.logger.info('All providers cleared');
|
||||
}
|
||||
}
|
||||
|
||||
export const providerRegistry = new ProviderRegistry();
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
export interface JobHandler {
|
||||
(payload: any): Promise<any>;
|
||||
}
|
||||
|
||||
export interface ScheduledJob {
|
||||
type: string;
|
||||
operation: string;
|
||||
payload: any;
|
||||
cronPattern: string;
|
||||
priority?: number;
|
||||
description?: string;
|
||||
immediately?: boolean;
|
||||
}
|
||||
|
||||
export interface ProviderConfig {
|
||||
name: string;
|
||||
service: string;
|
||||
operations: Record<string, JobHandler>;
|
||||
scheduledJobs?: ScheduledJob[];
|
||||
}
|
||||
|
||||
export class ProviderRegistry {
|
||||
private logger = getLogger('provider-registry');
|
||||
private providers = new Map<string, ProviderConfig>();
|
||||
|
||||
/**
|
||||
* Register a provider with its operations
|
||||
*/ registerProvider(config: ProviderConfig): void {
|
||||
const key = `${config.service}:${config.name}`;
|
||||
this.providers.set(key, config);
|
||||
this.logger.info(`Registered provider: ${key}`, {
|
||||
operations: Object.keys(config.operations),
|
||||
scheduledJobs: config.scheduledJobs?.length || 0
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a job handler for a specific provider and operation
|
||||
*/
|
||||
getHandler(service: string, provider: string, operation: string): JobHandler | null {
|
||||
const key = `${service}:${provider}`;
|
||||
const providerConfig = this.providers.get(key);
|
||||
|
||||
if (!providerConfig) {
|
||||
this.logger.warn(`Provider not found: ${key}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
const handler = providerConfig.operations[operation];
|
||||
if (!handler) {
|
||||
this.logger.warn(`Operation not found: ${operation} in provider ${key}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return handler;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all registered providers
|
||||
*/
|
||||
getAllScheduledJobs(): Array<{
|
||||
service: string;
|
||||
provider: string;
|
||||
job: ScheduledJob;
|
||||
}> {
|
||||
const allJobs: Array<{ service: string; provider: string; job: ScheduledJob }> = [];
|
||||
|
||||
for (const [key, config] of this.providers) {
|
||||
if (config.scheduledJobs) {
|
||||
for (const job of config.scheduledJobs) {
|
||||
allJobs.push({
|
||||
service: config.service,
|
||||
provider: config.name,
|
||||
job
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return allJobs;
|
||||
}
|
||||
|
||||
getProviders(): Array<{ key: string; config: ProviderConfig }> {
|
||||
return Array.from(this.providers.entries()).map(([key, config]) => ({
|
||||
key,
|
||||
config
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a provider exists
|
||||
*/
|
||||
hasProvider(service: string, provider: string): boolean {
|
||||
return this.providers.has(`${service}:${provider}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get providers by service type
|
||||
*/
|
||||
getProvidersByService(service: string): ProviderConfig[] {
|
||||
return Array.from(this.providers.values()).filter(provider => provider.service === service);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all providers (useful for testing)
|
||||
*/
|
||||
clear(): void {
|
||||
this.providers.clear();
|
||||
this.logger.info('All providers cleared');
|
||||
}
|
||||
}
|
||||
|
||||
export const providerRegistry = new ProviderRegistry();
|
||||
|
|
|
|||
|
|
@ -1,478 +1,478 @@
|
|||
import { Queue, Worker, QueueEvents } from 'bullmq';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { providerRegistry } from './provider-registry.service';
|
||||
|
||||
export interface JobData {
|
||||
type: string;
|
||||
service: string;
|
||||
provider: string;
|
||||
operation: string;
|
||||
payload: any;
|
||||
priority?: number;
|
||||
immediately?: boolean;
|
||||
}
|
||||
|
||||
export class QueueService {
|
||||
private logger = getLogger('queue-service');
|
||||
private queue!: Queue;
|
||||
private workers: Worker[] = [];
|
||||
private queueEvents!: QueueEvents;
|
||||
private isInitialized = false;
|
||||
|
||||
constructor() {
|
||||
// Don't initialize in constructor to allow for proper async initialization
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
if (this.isInitialized) {
|
||||
this.logger.warn('Queue service already initialized');
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.info('Initializing queue service...');
|
||||
|
||||
// Register all providers first
|
||||
await this.registerProviders();
|
||||
|
||||
const connection = {
|
||||
host: process.env.DRAGONFLY_HOST || 'localhost',
|
||||
port: parseInt(process.env.DRAGONFLY_PORT || '6379'),
|
||||
// Add these Redis-specific options to fix the undeclared key issue
|
||||
maxRetriesPerRequest: null,
|
||||
retryDelayOnFailover: 100,
|
||||
enableReadyCheck: false,
|
||||
lazyConnect: true,
|
||||
// Disable Redis Cluster mode if you're using standalone Redis/Dragonfly
|
||||
enableOfflineQueue: false
|
||||
};
|
||||
|
||||
// Worker configuration
|
||||
const workerCount = parseInt(process.env.WORKER_COUNT || '5');
|
||||
const concurrencyPerWorker = parseInt(process.env.WORKER_CONCURRENCY || '20');
|
||||
|
||||
this.logger.info('Connecting to Redis/Dragonfly', connection);
|
||||
|
||||
try {
|
||||
this.queue = new Queue('{data-service-queue}', {
|
||||
connection,
|
||||
defaultJobOptions: {
|
||||
removeOnComplete: 10,
|
||||
removeOnFail: 5,
|
||||
attempts: 3,
|
||||
backoff: {
|
||||
type: 'exponential',
|
||||
delay: 1000,
|
||||
}
|
||||
}
|
||||
});
|
||||
// Create multiple workers
|
||||
for (let i = 0; i < workerCount; i++) {
|
||||
const worker = new Worker(
|
||||
'{data-service-queue}',
|
||||
this.processJob.bind(this),
|
||||
{
|
||||
connection: { ...connection }, // Each worker gets its own connection
|
||||
concurrency: concurrencyPerWorker,
|
||||
maxStalledCount: 1,
|
||||
stalledInterval: 30000,
|
||||
}
|
||||
);
|
||||
// Add worker-specific logging
|
||||
worker.on('ready', () => {
|
||||
this.logger.info(`Worker ${i + 1} ready`, { workerId: i + 1 });
|
||||
});
|
||||
|
||||
worker.on('error', (error) => {
|
||||
this.logger.error(`Worker ${i + 1} error`, { workerId: i + 1, error });
|
||||
});
|
||||
|
||||
this.workers.push(worker);
|
||||
}
|
||||
this.queueEvents = new QueueEvents('{data-service-queue}', { connection }); // Test connection
|
||||
|
||||
// Wait for all workers to be ready
|
||||
await this.queue.waitUntilReady();
|
||||
await Promise.all(this.workers.map(worker => worker.waitUntilReady()));
|
||||
await this.queueEvents.waitUntilReady();
|
||||
|
||||
this.setupEventListeners();
|
||||
this.isInitialized = true;
|
||||
this.logger.info('Queue service initialized successfully');
|
||||
|
||||
await this.setupScheduledTasks();
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to initialize queue service', { error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Update getTotalConcurrency method
|
||||
getTotalConcurrency() {
|
||||
if (!this.isInitialized) {
|
||||
return 0;
|
||||
}
|
||||
return this.workers.reduce((total, worker) => {
|
||||
return total + (worker.opts.concurrency || 1);
|
||||
}, 0);
|
||||
}
|
||||
|
||||
private async registerProviders() {
|
||||
this.logger.info('Registering providers...');
|
||||
|
||||
try {
|
||||
// Import and register all providers
|
||||
const { proxyProvider } = await import('../providers/proxy.provider');
|
||||
const { quotemediaProvider } = await import('../providers/quotemedia.provider');
|
||||
const { yahooProvider } = await import('../providers/yahoo.provider');
|
||||
|
||||
providerRegistry.registerProvider(proxyProvider);
|
||||
providerRegistry.registerProvider(quotemediaProvider);
|
||||
providerRegistry.registerProvider(yahooProvider);
|
||||
|
||||
this.logger.info('All providers registered successfully');
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to register providers', { error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private async processJob(job: any) {
|
||||
const { service, provider, operation, payload }: JobData = job.data;
|
||||
|
||||
this.logger.info('Processing job', {
|
||||
id: job.id,
|
||||
service,
|
||||
provider,
|
||||
operation,
|
||||
payloadKeys: Object.keys(payload || {})
|
||||
});
|
||||
|
||||
try {
|
||||
// Get handler from registry
|
||||
const handler = providerRegistry.getHandler(service, provider, operation);
|
||||
|
||||
if (!handler) {
|
||||
throw new Error(`No handler found for ${service}:${provider}:${operation}`);
|
||||
}
|
||||
|
||||
// Execute the handler
|
||||
const result = await handler(payload);
|
||||
|
||||
this.logger.info('Job completed successfully', {
|
||||
id: job.id,
|
||||
service,
|
||||
provider,
|
||||
operation
|
||||
});
|
||||
|
||||
return result;
|
||||
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.error('Job failed', {
|
||||
id: job.id,
|
||||
service,
|
||||
provider,
|
||||
operation,
|
||||
error: errorMessage
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async addBulk(jobs: any[]) : Promise<any[]> {
|
||||
return await this.queue.addBulk(jobs)
|
||||
}
|
||||
private setupEventListeners() {
|
||||
this.queueEvents.on('completed', (job) => {
|
||||
this.logger.info('Job completed', { id: job.jobId });
|
||||
});
|
||||
|
||||
this.queueEvents.on('failed', (job) => {
|
||||
this.logger.error('Job failed', { id: job.jobId, error: job.failedReason });
|
||||
});
|
||||
|
||||
// Note: Worker-specific events are already set up during worker creation
|
||||
// No need for additional progress events since we handle them per-worker
|
||||
}
|
||||
private async setupScheduledTasks() {
|
||||
try {
|
||||
this.logger.info('Setting up scheduled tasks from providers...');
|
||||
|
||||
// Get all scheduled jobs from all providers
|
||||
const allScheduledJobs = providerRegistry.getAllScheduledJobs();
|
||||
|
||||
if (allScheduledJobs.length === 0) {
|
||||
this.logger.warn('No scheduled jobs found in providers');
|
||||
return;
|
||||
}
|
||||
|
||||
// Get existing repeatable jobs for comparison
|
||||
const existingJobs = await this.queue.getRepeatableJobs();
|
||||
this.logger.info(`Found ${existingJobs.length} existing repeatable jobs`);
|
||||
|
||||
let successCount = 0;
|
||||
let failureCount = 0;
|
||||
let updatedCount = 0;
|
||||
let newCount = 0;
|
||||
|
||||
// Process each scheduled job
|
||||
for (const { service, provider, job } of allScheduledJobs) {
|
||||
try {
|
||||
const jobKey = `${service}-${provider}-${job.operation}`;
|
||||
|
||||
// Check if this job already exists
|
||||
const existingJob = existingJobs.find(existing =>
|
||||
existing.key?.includes(jobKey) || existing.name === job.type
|
||||
);
|
||||
|
||||
if (existingJob) {
|
||||
// Check if the job needs updating (different cron pattern or config)
|
||||
const needsUpdate = existingJob.pattern !== job.cronPattern;
|
||||
|
||||
if (needsUpdate) {
|
||||
this.logger.info('Job configuration changed, updating', {
|
||||
jobKey,
|
||||
oldPattern: existingJob.pattern,
|
||||
newPattern: job.cronPattern
|
||||
});
|
||||
updatedCount++;
|
||||
} else {
|
||||
this.logger.debug('Job unchanged, skipping', { jobKey });
|
||||
successCount++;
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
newCount++;
|
||||
}
|
||||
|
||||
// Add delay between job registrations
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
await this.addRecurringJob({
|
||||
type: job.type,
|
||||
service: service,
|
||||
provider: provider,
|
||||
operation: job.operation,
|
||||
payload: job.payload,
|
||||
priority: job.priority,
|
||||
immediately: job.immediately || false
|
||||
}, job.cronPattern);
|
||||
|
||||
this.logger.info('Scheduled job registered', {
|
||||
type: job.type,
|
||||
service,
|
||||
provider,
|
||||
operation: job.operation,
|
||||
cronPattern: job.cronPattern,
|
||||
description: job.description,
|
||||
immediately: job.immediately || false
|
||||
});
|
||||
|
||||
successCount++;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to register scheduled job', {
|
||||
type: job.type,
|
||||
service,
|
||||
provider,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
failureCount++;
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.info(`Scheduled tasks setup complete`, {
|
||||
total: allScheduledJobs.length,
|
||||
successful: successCount,
|
||||
failed: failureCount,
|
||||
updated: updatedCount,
|
||||
new: newCount
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to setup scheduled tasks', error);
|
||||
}
|
||||
}
|
||||
|
||||
async addJob(jobData: JobData, options?: any) {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('Queue service not initialized. Call initialize() first.');
|
||||
}
|
||||
return this.queue.add(jobData.type, jobData, {
|
||||
priority: jobData.priority || 0,
|
||||
removeOnComplete: 10,
|
||||
removeOnFail: 5,
|
||||
...options
|
||||
});
|
||||
}
|
||||
|
||||
async addRecurringJob(jobData: JobData, cronPattern: string, options?: any) {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('Queue service not initialized. Call initialize() first.');
|
||||
}
|
||||
|
||||
try {
|
||||
// Create a unique job key for this specific job
|
||||
const jobKey = `${jobData.service}-${jobData.provider}-${jobData.operation}`;
|
||||
|
||||
// Get all existing repeatable jobs
|
||||
const existingJobs = await this.queue.getRepeatableJobs();
|
||||
|
||||
// Find and remove the existing job with the same key if it exists
|
||||
const existingJob = existingJobs.find(job => {
|
||||
// Check if this is the same job by comparing key components
|
||||
return job.key?.includes(jobKey) || job.name === jobData.type;
|
||||
});
|
||||
|
||||
if (existingJob) {
|
||||
this.logger.info('Updating existing recurring job', {
|
||||
jobKey,
|
||||
existingPattern: existingJob.pattern,
|
||||
newPattern: cronPattern
|
||||
});
|
||||
|
||||
// Remove the existing job
|
||||
await this.queue.removeRepeatableByKey(existingJob.key);
|
||||
|
||||
// Small delay to ensure cleanup is complete
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
} else {
|
||||
this.logger.info('Creating new recurring job', { jobKey, cronPattern });
|
||||
}
|
||||
|
||||
// Add the new/updated recurring job
|
||||
const job = await this.queue.add(jobData.type, jobData, {
|
||||
repeat: {
|
||||
pattern: cronPattern,
|
||||
tz: 'UTC',
|
||||
immediately: jobData.immediately || false,
|
||||
},
|
||||
// Use a consistent jobId for this specific recurring job
|
||||
jobId: `recurring-${jobKey}`,
|
||||
removeOnComplete: 1,
|
||||
removeOnFail: 1,
|
||||
attempts: 2,
|
||||
backoff: {
|
||||
type: 'fixed',
|
||||
delay: 5000
|
||||
},
|
||||
...options
|
||||
});
|
||||
|
||||
this.logger.info('Recurring job added/updated successfully', {
|
||||
jobKey,
|
||||
type: jobData.type,
|
||||
cronPattern,
|
||||
immediately: jobData.immediately || false
|
||||
});
|
||||
|
||||
return job;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to add/update recurring job', {
|
||||
jobData,
|
||||
cronPattern,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async getJobStats() {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('Queue service not initialized. Call initialize() first.');
|
||||
}
|
||||
const [waiting, active, completed, failed, delayed] = await Promise.all([
|
||||
this.queue.getWaiting(),
|
||||
this.queue.getActive(),
|
||||
this.queue.getCompleted(),
|
||||
this.queue.getFailed(),
|
||||
this.queue.getDelayed()
|
||||
]);
|
||||
|
||||
return {
|
||||
waiting: waiting.length,
|
||||
active: active.length,
|
||||
completed: completed.length,
|
||||
failed: failed.length,
|
||||
delayed: delayed.length
|
||||
};
|
||||
}
|
||||
|
||||
async drainQueue() {
|
||||
if (!this.isInitialized) {
|
||||
await this.queue.drain()
|
||||
}
|
||||
}
|
||||
|
||||
async getQueueStatus() {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('Queue service not initialized. Call initialize() first.');
|
||||
}
|
||||
const stats = await this.getJobStats();
|
||||
return {
|
||||
...stats,
|
||||
workers: this.getWorkerCount(),
|
||||
totalConcurrency: this.getTotalConcurrency(),
|
||||
queue: this.queue.name,
|
||||
connection: {
|
||||
host: process.env.DRAGONFLY_HOST || 'localhost',
|
||||
port: parseInt(process.env.DRAGONFLY_PORT || '6379')
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
getWorkerCount() {
|
||||
if (!this.isInitialized) {
|
||||
return 0;
|
||||
}
|
||||
return this.workers.length;
|
||||
}
|
||||
|
||||
getRegisteredProviders() {
|
||||
return providerRegistry.getProviders().map(({ key, config }) => ({
|
||||
key,
|
||||
name: config.name,
|
||||
service: config.service,
|
||||
operations: Object.keys(config.operations),
|
||||
scheduledJobs: config.scheduledJobs?.length || 0
|
||||
}));
|
||||
}
|
||||
|
||||
getScheduledJobsInfo() {
|
||||
return providerRegistry.getAllScheduledJobs().map(({ service, provider, job }) => ({
|
||||
id: `${service}-${provider}-${job.type}`,
|
||||
service,
|
||||
provider,
|
||||
type: job.type,
|
||||
operation: job.operation,
|
||||
cronPattern: job.cronPattern,
|
||||
priority: job.priority,
|
||||
description: job.description,
|
||||
immediately: job.immediately || false
|
||||
}));
|
||||
}
|
||||
async shutdown() {
|
||||
if (!this.isInitialized) {
|
||||
this.logger.warn('Queue service not initialized, nothing to shutdown');
|
||||
return;
|
||||
}
|
||||
this.logger.info('Shutting down queue service');
|
||||
|
||||
// Close all workers
|
||||
this.logger.info(`Closing ${this.workers.length} workers...`);
|
||||
await Promise.all(this.workers.map((worker, index) => {
|
||||
this.logger.debug(`Closing worker ${index + 1}`);
|
||||
return worker.close();
|
||||
}));
|
||||
|
||||
await this.queue.close();
|
||||
await this.queueEvents.close();
|
||||
this.isInitialized = false;
|
||||
this.logger.info('Queue service shutdown complete');
|
||||
}
|
||||
}
|
||||
|
||||
export const queueManager = new QueueService();
|
||||
import { Queue, Worker, QueueEvents } from 'bullmq';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { providerRegistry } from './provider-registry.service';
|
||||
|
||||
export interface JobData {
|
||||
type: string;
|
||||
service: string;
|
||||
provider: string;
|
||||
operation: string;
|
||||
payload: any;
|
||||
priority?: number;
|
||||
immediately?: boolean;
|
||||
}
|
||||
|
||||
export class QueueService {
|
||||
private logger = getLogger('queue-service');
|
||||
private queue!: Queue;
|
||||
private workers: Worker[] = [];
|
||||
private queueEvents!: QueueEvents;
|
||||
private isInitialized = false;
|
||||
|
||||
constructor() {
|
||||
// Don't initialize in constructor to allow for proper async initialization
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
if (this.isInitialized) {
|
||||
this.logger.warn('Queue service already initialized');
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.info('Initializing queue service...');
|
||||
|
||||
// Register all providers first
|
||||
await this.registerProviders();
|
||||
|
||||
const connection = {
|
||||
host: process.env.DRAGONFLY_HOST || 'localhost',
|
||||
port: parseInt(process.env.DRAGONFLY_PORT || '6379'),
|
||||
// Add these Redis-specific options to fix the undeclared key issue
|
||||
maxRetriesPerRequest: null,
|
||||
retryDelayOnFailover: 100,
|
||||
enableReadyCheck: false,
|
||||
lazyConnect: true,
|
||||
// Disable Redis Cluster mode if you're using standalone Redis/Dragonfly
|
||||
enableOfflineQueue: false
|
||||
};
|
||||
|
||||
// Worker configuration
|
||||
const workerCount = parseInt(process.env.WORKER_COUNT || '5');
|
||||
const concurrencyPerWorker = parseInt(process.env.WORKER_CONCURRENCY || '20');
|
||||
|
||||
this.logger.info('Connecting to Redis/Dragonfly', connection);
|
||||
|
||||
try {
|
||||
this.queue = new Queue('{data-service-queue}', {
|
||||
connection,
|
||||
defaultJobOptions: {
|
||||
removeOnComplete: 10,
|
||||
removeOnFail: 5,
|
||||
attempts: 3,
|
||||
backoff: {
|
||||
type: 'exponential',
|
||||
delay: 1000,
|
||||
}
|
||||
}
|
||||
});
|
||||
// Create multiple workers
|
||||
for (let i = 0; i < workerCount; i++) {
|
||||
const worker = new Worker(
|
||||
'{data-service-queue}',
|
||||
this.processJob.bind(this),
|
||||
{
|
||||
connection: { ...connection }, // Each worker gets its own connection
|
||||
concurrency: concurrencyPerWorker,
|
||||
maxStalledCount: 1,
|
||||
stalledInterval: 30000,
|
||||
}
|
||||
);
|
||||
// Add worker-specific logging
|
||||
worker.on('ready', () => {
|
||||
this.logger.info(`Worker ${i + 1} ready`, { workerId: i + 1 });
|
||||
});
|
||||
|
||||
worker.on('error', (error) => {
|
||||
this.logger.error(`Worker ${i + 1} error`, { workerId: i + 1, error });
|
||||
});
|
||||
|
||||
this.workers.push(worker);
|
||||
}
|
||||
this.queueEvents = new QueueEvents('{data-service-queue}', { connection }); // Test connection
|
||||
|
||||
// Wait for all workers to be ready
|
||||
await this.queue.waitUntilReady();
|
||||
await Promise.all(this.workers.map(worker => worker.waitUntilReady()));
|
||||
await this.queueEvents.waitUntilReady();
|
||||
|
||||
this.setupEventListeners();
|
||||
this.isInitialized = true;
|
||||
this.logger.info('Queue service initialized successfully');
|
||||
|
||||
await this.setupScheduledTasks();
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to initialize queue service', { error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Update getTotalConcurrency method
|
||||
getTotalConcurrency() {
|
||||
if (!this.isInitialized) {
|
||||
return 0;
|
||||
}
|
||||
return this.workers.reduce((total, worker) => {
|
||||
return total + (worker.opts.concurrency || 1);
|
||||
}, 0);
|
||||
}
|
||||
|
||||
private async registerProviders() {
|
||||
this.logger.info('Registering providers...');
|
||||
|
||||
try {
|
||||
// Import and register all providers
|
||||
const { proxyProvider } = await import('../providers/proxy.provider');
|
||||
const { quotemediaProvider } = await import('../providers/quotemedia.provider');
|
||||
const { yahooProvider } = await import('../providers/yahoo.provider');
|
||||
|
||||
providerRegistry.registerProvider(proxyProvider);
|
||||
providerRegistry.registerProvider(quotemediaProvider);
|
||||
providerRegistry.registerProvider(yahooProvider);
|
||||
|
||||
this.logger.info('All providers registered successfully');
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to register providers', { error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private async processJob(job: any) {
|
||||
const { service, provider, operation, payload }: JobData = job.data;
|
||||
|
||||
this.logger.info('Processing job', {
|
||||
id: job.id,
|
||||
service,
|
||||
provider,
|
||||
operation,
|
||||
payloadKeys: Object.keys(payload || {})
|
||||
});
|
||||
|
||||
try {
|
||||
// Get handler from registry
|
||||
const handler = providerRegistry.getHandler(service, provider, operation);
|
||||
|
||||
if (!handler) {
|
||||
throw new Error(`No handler found for ${service}:${provider}:${operation}`);
|
||||
}
|
||||
|
||||
// Execute the handler
|
||||
const result = await handler(payload);
|
||||
|
||||
this.logger.info('Job completed successfully', {
|
||||
id: job.id,
|
||||
service,
|
||||
provider,
|
||||
operation
|
||||
});
|
||||
|
||||
return result;
|
||||
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.error('Job failed', {
|
||||
id: job.id,
|
||||
service,
|
||||
provider,
|
||||
operation,
|
||||
error: errorMessage
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async addBulk(jobs: any[]) : Promise<any[]> {
|
||||
return await this.queue.addBulk(jobs)
|
||||
}
|
||||
private setupEventListeners() {
|
||||
this.queueEvents.on('completed', (job) => {
|
||||
this.logger.info('Job completed', { id: job.jobId });
|
||||
});
|
||||
|
||||
this.queueEvents.on('failed', (job) => {
|
||||
this.logger.error('Job failed', { id: job.jobId, error: job.failedReason });
|
||||
});
|
||||
|
||||
// Note: Worker-specific events are already set up during worker creation
|
||||
// No need for additional progress events since we handle them per-worker
|
||||
}
|
||||
private async setupScheduledTasks() {
|
||||
try {
|
||||
this.logger.info('Setting up scheduled tasks from providers...');
|
||||
|
||||
// Get all scheduled jobs from all providers
|
||||
const allScheduledJobs = providerRegistry.getAllScheduledJobs();
|
||||
|
||||
if (allScheduledJobs.length === 0) {
|
||||
this.logger.warn('No scheduled jobs found in providers');
|
||||
return;
|
||||
}
|
||||
|
||||
// Get existing repeatable jobs for comparison
|
||||
const existingJobs = await this.queue.getRepeatableJobs();
|
||||
this.logger.info(`Found ${existingJobs.length} existing repeatable jobs`);
|
||||
|
||||
let successCount = 0;
|
||||
let failureCount = 0;
|
||||
let updatedCount = 0;
|
||||
let newCount = 0;
|
||||
|
||||
// Process each scheduled job
|
||||
for (const { service, provider, job } of allScheduledJobs) {
|
||||
try {
|
||||
const jobKey = `${service}-${provider}-${job.operation}`;
|
||||
|
||||
// Check if this job already exists
|
||||
const existingJob = existingJobs.find(existing =>
|
||||
existing.key?.includes(jobKey) || existing.name === job.type
|
||||
);
|
||||
|
||||
if (existingJob) {
|
||||
// Check if the job needs updating (different cron pattern or config)
|
||||
const needsUpdate = existingJob.pattern !== job.cronPattern;
|
||||
|
||||
if (needsUpdate) {
|
||||
this.logger.info('Job configuration changed, updating', {
|
||||
jobKey,
|
||||
oldPattern: existingJob.pattern,
|
||||
newPattern: job.cronPattern
|
||||
});
|
||||
updatedCount++;
|
||||
} else {
|
||||
this.logger.debug('Job unchanged, skipping', { jobKey });
|
||||
successCount++;
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
newCount++;
|
||||
}
|
||||
|
||||
// Add delay between job registrations
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
await this.addRecurringJob({
|
||||
type: job.type,
|
||||
service: service,
|
||||
provider: provider,
|
||||
operation: job.operation,
|
||||
payload: job.payload,
|
||||
priority: job.priority,
|
||||
immediately: job.immediately || false
|
||||
}, job.cronPattern);
|
||||
|
||||
this.logger.info('Scheduled job registered', {
|
||||
type: job.type,
|
||||
service,
|
||||
provider,
|
||||
operation: job.operation,
|
||||
cronPattern: job.cronPattern,
|
||||
description: job.description,
|
||||
immediately: job.immediately || false
|
||||
});
|
||||
|
||||
successCount++;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to register scheduled job', {
|
||||
type: job.type,
|
||||
service,
|
||||
provider,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
failureCount++;
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.info(`Scheduled tasks setup complete`, {
|
||||
total: allScheduledJobs.length,
|
||||
successful: successCount,
|
||||
failed: failureCount,
|
||||
updated: updatedCount,
|
||||
new: newCount
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to setup scheduled tasks', error);
|
||||
}
|
||||
}
|
||||
|
||||
async addJob(jobData: JobData, options?: any) {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('Queue service not initialized. Call initialize() first.');
|
||||
}
|
||||
return this.queue.add(jobData.type, jobData, {
|
||||
priority: jobData.priority || 0,
|
||||
removeOnComplete: 10,
|
||||
removeOnFail: 5,
|
||||
...options
|
||||
});
|
||||
}
|
||||
|
||||
async addRecurringJob(jobData: JobData, cronPattern: string, options?: any) {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('Queue service not initialized. Call initialize() first.');
|
||||
}
|
||||
|
||||
try {
|
||||
// Create a unique job key for this specific job
|
||||
const jobKey = `${jobData.service}-${jobData.provider}-${jobData.operation}`;
|
||||
|
||||
// Get all existing repeatable jobs
|
||||
const existingJobs = await this.queue.getRepeatableJobs();
|
||||
|
||||
// Find and remove the existing job with the same key if it exists
|
||||
const existingJob = existingJobs.find(job => {
|
||||
// Check if this is the same job by comparing key components
|
||||
return job.key?.includes(jobKey) || job.name === jobData.type;
|
||||
});
|
||||
|
||||
if (existingJob) {
|
||||
this.logger.info('Updating existing recurring job', {
|
||||
jobKey,
|
||||
existingPattern: existingJob.pattern,
|
||||
newPattern: cronPattern
|
||||
});
|
||||
|
||||
// Remove the existing job
|
||||
await this.queue.removeRepeatableByKey(existingJob.key);
|
||||
|
||||
// Small delay to ensure cleanup is complete
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
} else {
|
||||
this.logger.info('Creating new recurring job', { jobKey, cronPattern });
|
||||
}
|
||||
|
||||
// Add the new/updated recurring job
|
||||
const job = await this.queue.add(jobData.type, jobData, {
|
||||
repeat: {
|
||||
pattern: cronPattern,
|
||||
tz: 'UTC',
|
||||
immediately: jobData.immediately || false,
|
||||
},
|
||||
// Use a consistent jobId for this specific recurring job
|
||||
jobId: `recurring-${jobKey}`,
|
||||
removeOnComplete: 1,
|
||||
removeOnFail: 1,
|
||||
attempts: 2,
|
||||
backoff: {
|
||||
type: 'fixed',
|
||||
delay: 5000
|
||||
},
|
||||
...options
|
||||
});
|
||||
|
||||
this.logger.info('Recurring job added/updated successfully', {
|
||||
jobKey,
|
||||
type: jobData.type,
|
||||
cronPattern,
|
||||
immediately: jobData.immediately || false
|
||||
});
|
||||
|
||||
return job;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to add/update recurring job', {
|
||||
jobData,
|
||||
cronPattern,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async getJobStats() {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('Queue service not initialized. Call initialize() first.');
|
||||
}
|
||||
const [waiting, active, completed, failed, delayed] = await Promise.all([
|
||||
this.queue.getWaiting(),
|
||||
this.queue.getActive(),
|
||||
this.queue.getCompleted(),
|
||||
this.queue.getFailed(),
|
||||
this.queue.getDelayed()
|
||||
]);
|
||||
|
||||
return {
|
||||
waiting: waiting.length,
|
||||
active: active.length,
|
||||
completed: completed.length,
|
||||
failed: failed.length,
|
||||
delayed: delayed.length
|
||||
};
|
||||
}
|
||||
|
||||
async drainQueue() {
|
||||
if (!this.isInitialized) {
|
||||
await this.queue.drain()
|
||||
}
|
||||
}
|
||||
|
||||
async getQueueStatus() {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('Queue service not initialized. Call initialize() first.');
|
||||
}
|
||||
const stats = await this.getJobStats();
|
||||
return {
|
||||
...stats,
|
||||
workers: this.getWorkerCount(),
|
||||
totalConcurrency: this.getTotalConcurrency(),
|
||||
queue: this.queue.name,
|
||||
connection: {
|
||||
host: process.env.DRAGONFLY_HOST || 'localhost',
|
||||
port: parseInt(process.env.DRAGONFLY_PORT || '6379')
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
getWorkerCount() {
|
||||
if (!this.isInitialized) {
|
||||
return 0;
|
||||
}
|
||||
return this.workers.length;
|
||||
}
|
||||
|
||||
getRegisteredProviders() {
|
||||
return providerRegistry.getProviders().map(({ key, config }) => ({
|
||||
key,
|
||||
name: config.name,
|
||||
service: config.service,
|
||||
operations: Object.keys(config.operations),
|
||||
scheduledJobs: config.scheduledJobs?.length || 0
|
||||
}));
|
||||
}
|
||||
|
||||
getScheduledJobsInfo() {
|
||||
return providerRegistry.getAllScheduledJobs().map(({ service, provider, job }) => ({
|
||||
id: `${service}-${provider}-${job.type}`,
|
||||
service,
|
||||
provider,
|
||||
type: job.type,
|
||||
operation: job.operation,
|
||||
cronPattern: job.cronPattern,
|
||||
priority: job.priority,
|
||||
description: job.description,
|
||||
immediately: job.immediately || false
|
||||
}));
|
||||
}
|
||||
async shutdown() {
|
||||
if (!this.isInitialized) {
|
||||
this.logger.warn('Queue service not initialized, nothing to shutdown');
|
||||
return;
|
||||
}
|
||||
this.logger.info('Shutting down queue service');
|
||||
|
||||
// Close all workers
|
||||
this.logger.info(`Closing ${this.workers.length} workers...`);
|
||||
await Promise.all(this.workers.map((worker, index) => {
|
||||
this.logger.debug(`Closing worker ${index + 1}`);
|
||||
return worker.close();
|
||||
}));
|
||||
|
||||
await this.queue.close();
|
||||
await this.queueEvents.close();
|
||||
this.isInitialized = false;
|
||||
this.logger.info('Queue service shutdown complete');
|
||||
}
|
||||
}
|
||||
|
||||
export const queueManager = new QueueService();
|
||||
|
|
|
|||
|
|
@ -1,293 +1,293 @@
|
|||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
export interface BatchConfig<T> {
|
||||
items: T[];
|
||||
batchSize?: number; // Optional - only used for batch mode
|
||||
totalDelayMs: number;
|
||||
jobNamePrefix: string;
|
||||
operation: string;
|
||||
service: string;
|
||||
provider: string;
|
||||
priority?: number;
|
||||
createJobData: (item: T, index: number) => any;
|
||||
removeOnComplete?: number;
|
||||
removeOnFail?: number;
|
||||
useBatching?: boolean; // Simple flag to choose mode
|
||||
}
|
||||
|
||||
const logger = getLogger('batch-processor');
|
||||
|
||||
export class BatchProcessor {
|
||||
constructor(private queueManager: any) {}
|
||||
|
||||
/**
|
||||
* Unified method that handles both direct and batch approaches
|
||||
*/
|
||||
async processItems<T>(config: BatchConfig<T>) {
|
||||
const { items, useBatching = false } = config;
|
||||
|
||||
if (items.length === 0) {
|
||||
return { totalItems: 0, jobsCreated: 0 };
|
||||
}
|
||||
|
||||
if (useBatching) {
|
||||
return await this.createBatchJobs(config);
|
||||
} else {
|
||||
return await this.createDirectJobs(config);
|
||||
}
|
||||
}
|
||||
|
||||
private async createDirectJobs<T>(config: BatchConfig<T>) {
|
||||
const {
|
||||
items,
|
||||
totalDelayMs,
|
||||
jobNamePrefix,
|
||||
operation,
|
||||
service,
|
||||
provider,
|
||||
priority = 2,
|
||||
createJobData,
|
||||
removeOnComplete = 5,
|
||||
removeOnFail = 3
|
||||
} = config;
|
||||
|
||||
const delayPerItem = Math.floor(totalDelayMs / items.length);
|
||||
const chunkSize = 100;
|
||||
let totalJobsCreated = 0;
|
||||
|
||||
logger.info('Creating direct jobs', {
|
||||
totalItems: items.length,
|
||||
delayPerItem: `${(delayPerItem / 1000).toFixed(1)}s`,
|
||||
estimatedDuration: `${(totalDelayMs / 1000 / 60 / 60).toFixed(1)} hours`
|
||||
});
|
||||
|
||||
// Process in chunks to avoid overwhelming Redis
|
||||
for (let i = 0; i < items.length; i += chunkSize) {
|
||||
const chunk = items.slice(i, i + chunkSize);
|
||||
|
||||
const jobs = chunk.map((item, chunkIndex) => {
|
||||
const globalIndex = i + chunkIndex;
|
||||
return {
|
||||
name: `${jobNamePrefix}-processing`,
|
||||
data: {
|
||||
type: `${jobNamePrefix}-processing`,
|
||||
service,
|
||||
provider,
|
||||
operation,
|
||||
payload: createJobData(item, globalIndex),
|
||||
priority
|
||||
},
|
||||
opts: {
|
||||
delay: globalIndex * delayPerItem,
|
||||
jobId: `${jobNamePrefix}-${globalIndex}-${Date.now()}`,
|
||||
removeOnComplete,
|
||||
removeOnFail
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
try {
|
||||
const createdJobs = await this.queueManager.queue.addBulk(jobs);
|
||||
totalJobsCreated += createdJobs.length;
|
||||
|
||||
// Log progress every 500 jobs
|
||||
if (totalJobsCreated % 500 === 0 || i + chunkSize >= items.length) {
|
||||
logger.info('Direct job creation progress', {
|
||||
created: totalJobsCreated,
|
||||
total: items.length,
|
||||
percentage: `${((totalJobsCreated / items.length) * 100).toFixed(1)}%`
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to create job chunk', {
|
||||
startIndex: i,
|
||||
chunkSize: chunk.length,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
totalItems: items.length,
|
||||
jobsCreated: totalJobsCreated,
|
||||
mode: 'direct'
|
||||
};
|
||||
}
|
||||
|
||||
private async createBatchJobs<T>(config: BatchConfig<T>) {
|
||||
const {
|
||||
items,
|
||||
batchSize = 200,
|
||||
totalDelayMs,
|
||||
jobNamePrefix,
|
||||
operation,
|
||||
service,
|
||||
provider,
|
||||
priority = 3
|
||||
} = config;
|
||||
|
||||
const totalBatches = Math.ceil(items.length / batchSize);
|
||||
const delayPerBatch = Math.floor(totalDelayMs / totalBatches);
|
||||
const chunkSize = 50; // Create batch jobs in chunks
|
||||
let batchJobsCreated = 0;
|
||||
|
||||
logger.info('Creating batch jobs', {
|
||||
totalItems: items.length,
|
||||
batchSize,
|
||||
totalBatches,
|
||||
delayPerBatch: `${(delayPerBatch / 1000 / 60).toFixed(2)} minutes`
|
||||
});
|
||||
|
||||
// Create batch jobs in chunks
|
||||
for (let chunkStart = 0; chunkStart < totalBatches; chunkStart += chunkSize) {
|
||||
const chunkEnd = Math.min(chunkStart + chunkSize, totalBatches);
|
||||
const batchJobs = [];
|
||||
|
||||
for (let batchIndex = chunkStart; batchIndex < chunkEnd; batchIndex++) {
|
||||
const startIndex = batchIndex * batchSize;
|
||||
const endIndex = Math.min(startIndex + batchSize, items.length);
|
||||
const batchItems = items.slice(startIndex, endIndex);
|
||||
|
||||
batchJobs.push({
|
||||
name: `${jobNamePrefix}-batch-processing`,
|
||||
data: {
|
||||
type: `${jobNamePrefix}-batch-processing`,
|
||||
service,
|
||||
provider,
|
||||
operation: `process-${jobNamePrefix}-batch`,
|
||||
payload: {
|
||||
items: batchItems,
|
||||
batchIndex,
|
||||
total: totalBatches,
|
||||
config: { ...config, priority: priority - 1 }
|
||||
},
|
||||
priority
|
||||
},
|
||||
opts: {
|
||||
delay: batchIndex * delayPerBatch,
|
||||
jobId: `${jobNamePrefix}-batch-${batchIndex}-${Date.now()}`
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const createdJobs = await this.queueManager.queue.addBulk(batchJobs);
|
||||
batchJobsCreated += createdJobs.length;
|
||||
|
||||
logger.info('Batch chunk created', {
|
||||
chunkStart: chunkStart + 1,
|
||||
chunkEnd,
|
||||
created: createdJobs.length,
|
||||
totalCreated: batchJobsCreated,
|
||||
progress: `${((chunkEnd / totalBatches) * 100).toFixed(1)}%`
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to create batch chunk', {
|
||||
chunkStart,
|
||||
chunkEnd,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
|
||||
// Small delay between chunks
|
||||
if (chunkEnd < totalBatches) {
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
totalItems: items.length,
|
||||
batchJobsCreated,
|
||||
totalBatches,
|
||||
estimatedDurationHours: totalDelayMs / 1000 / 60 / 60,
|
||||
mode: 'batch'
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a batch (called by batch jobs)
|
||||
*/
|
||||
async processBatch<T>(payload: {
|
||||
items: T[];
|
||||
batchIndex: number;
|
||||
total: number;
|
||||
config: BatchConfig<T>;
|
||||
}, createJobData?: (item: T, index: number) => any) {
|
||||
const { items, batchIndex, total, config } = payload;
|
||||
|
||||
logger.info('Processing batch', {
|
||||
batchIndex,
|
||||
batchSize: items.length,
|
||||
total,
|
||||
progress: `${((batchIndex + 1) / total * 100).toFixed(2)}%`
|
||||
});
|
||||
|
||||
const totalBatchDelayMs = config.totalDelayMs / total;
|
||||
const delayPerItem = Math.floor(totalBatchDelayMs / items.length);
|
||||
|
||||
const jobs = items.map((item, itemIndex) => {
|
||||
// Use the provided createJobData function or fall back to config
|
||||
const jobDataFn = createJobData || config.createJobData;
|
||||
|
||||
if (!jobDataFn) {
|
||||
throw new Error('createJobData function is required');
|
||||
}
|
||||
|
||||
const userData = jobDataFn(item, itemIndex);
|
||||
|
||||
return {
|
||||
name: `${config.jobNamePrefix}-processing`,
|
||||
data: {
|
||||
type: `${config.jobNamePrefix}-processing`,
|
||||
service: config.service,
|
||||
provider: config.provider,
|
||||
operation: config.operation,
|
||||
payload: {
|
||||
...userData,
|
||||
batchIndex,
|
||||
itemIndex,
|
||||
total,
|
||||
source: userData.source || 'batch-processing'
|
||||
},
|
||||
priority: config.priority || 2
|
||||
},
|
||||
opts: {
|
||||
delay: itemIndex * delayPerItem,
|
||||
jobId: `${config.jobNamePrefix}-${batchIndex}-${itemIndex}-${Date.now()}`,
|
||||
removeOnComplete: config.removeOnComplete || 5,
|
||||
removeOnFail: config.removeOnFail || 3
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
try {
|
||||
const createdJobs = await this.queueManager.queue.addBulk(jobs);
|
||||
|
||||
logger.info('Batch processing completed', {
|
||||
batchIndex,
|
||||
totalItems: items.length,
|
||||
jobsCreated: createdJobs.length,
|
||||
progress: `${((batchIndex + 1) / total * 100).toFixed(2)}%`
|
||||
});
|
||||
|
||||
return {
|
||||
batchIndex,
|
||||
totalItems: items.length,
|
||||
jobsCreated: createdJobs.length,
|
||||
jobsFailed: 0
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Failed to process batch', {
|
||||
batchIndex,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
|
||||
return {
|
||||
batchIndex,
|
||||
totalItems: items.length,
|
||||
jobsCreated: 0,
|
||||
jobsFailed: items.length
|
||||
};
|
||||
}
|
||||
}
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
export interface BatchConfig<T> {
|
||||
items: T[];
|
||||
batchSize?: number; // Optional - only used for batch mode
|
||||
totalDelayMs: number;
|
||||
jobNamePrefix: string;
|
||||
operation: string;
|
||||
service: string;
|
||||
provider: string;
|
||||
priority?: number;
|
||||
createJobData: (item: T, index: number) => any;
|
||||
removeOnComplete?: number;
|
||||
removeOnFail?: number;
|
||||
useBatching?: boolean; // Simple flag to choose mode
|
||||
}
|
||||
|
||||
const logger = getLogger('batch-processor');
|
||||
|
||||
export class BatchProcessor {
|
||||
constructor(private queueManager: any) {}
|
||||
|
||||
/**
|
||||
* Unified method that handles both direct and batch approaches
|
||||
*/
|
||||
async processItems<T>(config: BatchConfig<T>) {
|
||||
const { items, useBatching = false } = config;
|
||||
|
||||
if (items.length === 0) {
|
||||
return { totalItems: 0, jobsCreated: 0 };
|
||||
}
|
||||
|
||||
if (useBatching) {
|
||||
return await this.createBatchJobs(config);
|
||||
} else {
|
||||
return await this.createDirectJobs(config);
|
||||
}
|
||||
}
|
||||
|
||||
private async createDirectJobs<T>(config: BatchConfig<T>) {
|
||||
const {
|
||||
items,
|
||||
totalDelayMs,
|
||||
jobNamePrefix,
|
||||
operation,
|
||||
service,
|
||||
provider,
|
||||
priority = 2,
|
||||
createJobData,
|
||||
removeOnComplete = 5,
|
||||
removeOnFail = 3
|
||||
} = config;
|
||||
|
||||
const delayPerItem = Math.floor(totalDelayMs / items.length);
|
||||
const chunkSize = 100;
|
||||
let totalJobsCreated = 0;
|
||||
|
||||
logger.info('Creating direct jobs', {
|
||||
totalItems: items.length,
|
||||
delayPerItem: `${(delayPerItem / 1000).toFixed(1)}s`,
|
||||
estimatedDuration: `${(totalDelayMs / 1000 / 60 / 60).toFixed(1)} hours`
|
||||
});
|
||||
|
||||
// Process in chunks to avoid overwhelming Redis
|
||||
for (let i = 0; i < items.length; i += chunkSize) {
|
||||
const chunk = items.slice(i, i + chunkSize);
|
||||
|
||||
const jobs = chunk.map((item, chunkIndex) => {
|
||||
const globalIndex = i + chunkIndex;
|
||||
return {
|
||||
name: `${jobNamePrefix}-processing`,
|
||||
data: {
|
||||
type: `${jobNamePrefix}-processing`,
|
||||
service,
|
||||
provider,
|
||||
operation,
|
||||
payload: createJobData(item, globalIndex),
|
||||
priority
|
||||
},
|
||||
opts: {
|
||||
delay: globalIndex * delayPerItem,
|
||||
jobId: `${jobNamePrefix}-${globalIndex}-${Date.now()}`,
|
||||
removeOnComplete,
|
||||
removeOnFail
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
try {
|
||||
const createdJobs = await this.queueManager.queue.addBulk(jobs);
|
||||
totalJobsCreated += createdJobs.length;
|
||||
|
||||
// Log progress every 500 jobs
|
||||
if (totalJobsCreated % 500 === 0 || i + chunkSize >= items.length) {
|
||||
logger.info('Direct job creation progress', {
|
||||
created: totalJobsCreated,
|
||||
total: items.length,
|
||||
percentage: `${((totalJobsCreated / items.length) * 100).toFixed(1)}%`
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to create job chunk', {
|
||||
startIndex: i,
|
||||
chunkSize: chunk.length,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
totalItems: items.length,
|
||||
jobsCreated: totalJobsCreated,
|
||||
mode: 'direct'
|
||||
};
|
||||
}
|
||||
|
||||
private async createBatchJobs<T>(config: BatchConfig<T>) {
|
||||
const {
|
||||
items,
|
||||
batchSize = 200,
|
||||
totalDelayMs,
|
||||
jobNamePrefix,
|
||||
operation,
|
||||
service,
|
||||
provider,
|
||||
priority = 3
|
||||
} = config;
|
||||
|
||||
const totalBatches = Math.ceil(items.length / batchSize);
|
||||
const delayPerBatch = Math.floor(totalDelayMs / totalBatches);
|
||||
const chunkSize = 50; // Create batch jobs in chunks
|
||||
let batchJobsCreated = 0;
|
||||
|
||||
logger.info('Creating batch jobs', {
|
||||
totalItems: items.length,
|
||||
batchSize,
|
||||
totalBatches,
|
||||
delayPerBatch: `${(delayPerBatch / 1000 / 60).toFixed(2)} minutes`
|
||||
});
|
||||
|
||||
// Create batch jobs in chunks
|
||||
for (let chunkStart = 0; chunkStart < totalBatches; chunkStart += chunkSize) {
|
||||
const chunkEnd = Math.min(chunkStart + chunkSize, totalBatches);
|
||||
const batchJobs = [];
|
||||
|
||||
for (let batchIndex = chunkStart; batchIndex < chunkEnd; batchIndex++) {
|
||||
const startIndex = batchIndex * batchSize;
|
||||
const endIndex = Math.min(startIndex + batchSize, items.length);
|
||||
const batchItems = items.slice(startIndex, endIndex);
|
||||
|
||||
batchJobs.push({
|
||||
name: `${jobNamePrefix}-batch-processing`,
|
||||
data: {
|
||||
type: `${jobNamePrefix}-batch-processing`,
|
||||
service,
|
||||
provider,
|
||||
operation: `process-${jobNamePrefix}-batch`,
|
||||
payload: {
|
||||
items: batchItems,
|
||||
batchIndex,
|
||||
total: totalBatches,
|
||||
config: { ...config, priority: priority - 1 }
|
||||
},
|
||||
priority
|
||||
},
|
||||
opts: {
|
||||
delay: batchIndex * delayPerBatch,
|
||||
jobId: `${jobNamePrefix}-batch-${batchIndex}-${Date.now()}`
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const createdJobs = await this.queueManager.queue.addBulk(batchJobs);
|
||||
batchJobsCreated += createdJobs.length;
|
||||
|
||||
logger.info('Batch chunk created', {
|
||||
chunkStart: chunkStart + 1,
|
||||
chunkEnd,
|
||||
created: createdJobs.length,
|
||||
totalCreated: batchJobsCreated,
|
||||
progress: `${((chunkEnd / totalBatches) * 100).toFixed(1)}%`
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to create batch chunk', {
|
||||
chunkStart,
|
||||
chunkEnd,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
|
||||
// Small delay between chunks
|
||||
if (chunkEnd < totalBatches) {
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
totalItems: items.length,
|
||||
batchJobsCreated,
|
||||
totalBatches,
|
||||
estimatedDurationHours: totalDelayMs / 1000 / 60 / 60,
|
||||
mode: 'batch'
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a batch (called by batch jobs)
|
||||
*/
|
||||
async processBatch<T>(payload: {
|
||||
items: T[];
|
||||
batchIndex: number;
|
||||
total: number;
|
||||
config: BatchConfig<T>;
|
||||
}, createJobData?: (item: T, index: number) => any) {
|
||||
const { items, batchIndex, total, config } = payload;
|
||||
|
||||
logger.info('Processing batch', {
|
||||
batchIndex,
|
||||
batchSize: items.length,
|
||||
total,
|
||||
progress: `${((batchIndex + 1) / total * 100).toFixed(2)}%`
|
||||
});
|
||||
|
||||
const totalBatchDelayMs = config.totalDelayMs / total;
|
||||
const delayPerItem = Math.floor(totalBatchDelayMs / items.length);
|
||||
|
||||
const jobs = items.map((item, itemIndex) => {
|
||||
// Use the provided createJobData function or fall back to config
|
||||
const jobDataFn = createJobData || config.createJobData;
|
||||
|
||||
if (!jobDataFn) {
|
||||
throw new Error('createJobData function is required');
|
||||
}
|
||||
|
||||
const userData = jobDataFn(item, itemIndex);
|
||||
|
||||
return {
|
||||
name: `${config.jobNamePrefix}-processing`,
|
||||
data: {
|
||||
type: `${config.jobNamePrefix}-processing`,
|
||||
service: config.service,
|
||||
provider: config.provider,
|
||||
operation: config.operation,
|
||||
payload: {
|
||||
...userData,
|
||||
batchIndex,
|
||||
itemIndex,
|
||||
total,
|
||||
source: userData.source || 'batch-processing'
|
||||
},
|
||||
priority: config.priority || 2
|
||||
},
|
||||
opts: {
|
||||
delay: itemIndex * delayPerItem,
|
||||
jobId: `${config.jobNamePrefix}-${batchIndex}-${itemIndex}-${Date.now()}`,
|
||||
removeOnComplete: config.removeOnComplete || 5,
|
||||
removeOnFail: config.removeOnFail || 3
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
try {
|
||||
const createdJobs = await this.queueManager.queue.addBulk(jobs);
|
||||
|
||||
logger.info('Batch processing completed', {
|
||||
batchIndex,
|
||||
totalItems: items.length,
|
||||
jobsCreated: createdJobs.length,
|
||||
progress: `${((batchIndex + 1) / total * 100).toFixed(2)}%`
|
||||
});
|
||||
|
||||
return {
|
||||
batchIndex,
|
||||
totalItems: items.length,
|
||||
jobsCreated: createdJobs.length,
|
||||
jobsFailed: 0
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Failed to process batch', {
|
||||
batchIndex,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
|
||||
return {
|
||||
batchIndex,
|
||||
totalItems: items.length,
|
||||
jobsCreated: 0,
|
||||
jobsFailed: items.length
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue