running prettier for cleanup
This commit is contained in:
parent
fe7733aeb5
commit
d85cd58acd
151 changed files with 29158 additions and 27966 deletions
|
|
@ -1,106 +1,100 @@
|
|||
/**
|
||||
* Data Service - Combined live and historical data ingestion with queue-based architecture
|
||||
*/
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { loadEnvVariables } from '@stock-bot/config';
|
||||
import { Hono } from 'hono';
|
||||
import { Shutdown } from '@stock-bot/shutdown';
|
||||
import { queueManager } from './services/queue.service';
|
||||
import { initializeBatchCache } from './utils/batch-helpers';
|
||||
import { initializeProxyCache } from './providers/proxy.tasks';
|
||||
import {
|
||||
healthRoutes,
|
||||
queueRoutes,
|
||||
marketDataRoutes,
|
||||
proxyRoutes,
|
||||
testRoutes
|
||||
} from './routes';
|
||||
|
||||
// Load environment variables
|
||||
loadEnvVariables();
|
||||
|
||||
const app = new Hono();
|
||||
const logger = getLogger('data-service');
|
||||
const PORT = parseInt(process.env.DATA_SERVICE_PORT || '3002');
|
||||
let server: any = null;
|
||||
|
||||
// Initialize shutdown manager with 15 second timeout
|
||||
const shutdown = Shutdown.getInstance({ timeout: 15000 });
|
||||
|
||||
// Register all routes
|
||||
app.route('', healthRoutes);
|
||||
app.route('', queueRoutes);
|
||||
app.route('', marketDataRoutes);
|
||||
app.route('', proxyRoutes);
|
||||
app.route('', testRoutes);
|
||||
|
||||
// Initialize services
|
||||
async function initializeServices() {
|
||||
logger.info('Initializing data service...');
|
||||
|
||||
try {
|
||||
// Initialize batch cache FIRST - before queue service
|
||||
logger.info('Starting batch cache initialization...');
|
||||
await initializeBatchCache();
|
||||
logger.info('Batch cache initialized');
|
||||
|
||||
// Initialize proxy cache - before queue service
|
||||
logger.info('Starting proxy cache initialization...');
|
||||
await initializeProxyCache();
|
||||
logger.info('Proxy cache initialized');
|
||||
|
||||
// Initialize queue service (Redis connections should be ready now)
|
||||
logger.info('Starting queue service initialization...');
|
||||
await queueManager.initialize();
|
||||
logger.info('Queue service initialized');
|
||||
|
||||
logger.info('All services initialized successfully');
|
||||
} catch (error) {
|
||||
logger.error('Failed to initialize services', { error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Start server
|
||||
async function startServer() {
|
||||
await initializeServices();
|
||||
// Start the HTTP server using Bun's native serve
|
||||
server = Bun.serve({
|
||||
port: PORT,
|
||||
fetch: app.fetch,
|
||||
development: process.env.NODE_ENV === 'development',
|
||||
});
|
||||
logger.info(`Data Service started on port ${PORT}`);
|
||||
}
|
||||
|
||||
// Register shutdown handlers
|
||||
shutdown.onShutdown(async () => {
|
||||
if (server) {
|
||||
logger.info('Stopping HTTP server...');
|
||||
try {
|
||||
server.stop();
|
||||
logger.info('HTTP server stopped successfully');
|
||||
} catch (error) {
|
||||
logger.error('Error stopping HTTP server', { error });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
shutdown.onShutdown(async () => {
|
||||
logger.info('Shutting down queue manager...');
|
||||
try {
|
||||
await queueManager.shutdown();
|
||||
logger.info('Queue manager shut down successfully');
|
||||
} catch (error) {
|
||||
logger.error('Error shutting down queue manager', { error });
|
||||
throw error; // Re-throw to mark shutdown as failed
|
||||
}
|
||||
});
|
||||
|
||||
// Start the application
|
||||
startServer().catch(error => {
|
||||
logger.error('Failed to start server', { error });
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
logger.info('Data service startup initiated with graceful shutdown handlers');
|
||||
/**
|
||||
* Data Service - Combined live and historical data ingestion with queue-based architecture
|
||||
*/
|
||||
import { Hono } from 'hono';
|
||||
import { loadEnvVariables } from '@stock-bot/config';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { Shutdown } from '@stock-bot/shutdown';
|
||||
import { initializeProxyCache } from './providers/proxy.tasks';
|
||||
import { queueManager } from './services/queue.service';
|
||||
import { initializeBatchCache } from './utils/batch-helpers';
|
||||
import { healthRoutes, marketDataRoutes, proxyRoutes, queueRoutes, testRoutes } from './routes';
|
||||
|
||||
// Load environment variables
|
||||
loadEnvVariables();
|
||||
|
||||
const app = new Hono();
|
||||
const logger = getLogger('data-service');
|
||||
const PORT = parseInt(process.env.DATA_SERVICE_PORT || '3002');
|
||||
let server: any = null;
|
||||
|
||||
// Initialize shutdown manager with 15 second timeout
|
||||
const shutdown = Shutdown.getInstance({ timeout: 15000 });
|
||||
|
||||
// Register all routes
|
||||
app.route('', healthRoutes);
|
||||
app.route('', queueRoutes);
|
||||
app.route('', marketDataRoutes);
|
||||
app.route('', proxyRoutes);
|
||||
app.route('', testRoutes);
|
||||
|
||||
// Initialize services
|
||||
async function initializeServices() {
|
||||
logger.info('Initializing data service...');
|
||||
|
||||
try {
|
||||
// Initialize batch cache FIRST - before queue service
|
||||
logger.info('Starting batch cache initialization...');
|
||||
await initializeBatchCache();
|
||||
logger.info('Batch cache initialized');
|
||||
|
||||
// Initialize proxy cache - before queue service
|
||||
logger.info('Starting proxy cache initialization...');
|
||||
await initializeProxyCache();
|
||||
logger.info('Proxy cache initialized');
|
||||
|
||||
// Initialize queue service (Redis connections should be ready now)
|
||||
logger.info('Starting queue service initialization...');
|
||||
await queueManager.initialize();
|
||||
logger.info('Queue service initialized');
|
||||
|
||||
logger.info('All services initialized successfully');
|
||||
} catch (error) {
|
||||
logger.error('Failed to initialize services', { error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Start server
|
||||
async function startServer() {
|
||||
await initializeServices();
|
||||
// Start the HTTP server using Bun's native serve
|
||||
server = Bun.serve({
|
||||
port: PORT,
|
||||
fetch: app.fetch,
|
||||
development: process.env.NODE_ENV === 'development',
|
||||
});
|
||||
logger.info(`Data Service started on port ${PORT}`);
|
||||
}
|
||||
|
||||
// Register shutdown handlers
|
||||
shutdown.onShutdown(async () => {
|
||||
if (server) {
|
||||
logger.info('Stopping HTTP server...');
|
||||
try {
|
||||
server.stop();
|
||||
logger.info('HTTP server stopped successfully');
|
||||
} catch (error) {
|
||||
logger.error('Error stopping HTTP server', { error });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
shutdown.onShutdown(async () => {
|
||||
logger.info('Shutting down queue manager...');
|
||||
try {
|
||||
await queueManager.shutdown();
|
||||
logger.info('Queue manager shut down successfully');
|
||||
} catch (error) {
|
||||
logger.error('Error shutting down queue manager', { error });
|
||||
throw error; // Re-throw to mark shutdown as failed
|
||||
}
|
||||
});
|
||||
|
||||
// Start the application
|
||||
startServer().catch(error => {
|
||||
logger.error('Failed to start server', { error });
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
logger.info('Data service startup initiated with graceful shutdown handlers');
|
||||
|
|
|
|||
|
|
@ -1,131 +1,131 @@
|
|||
import { ProxyInfo } from 'libs/http/src/types';
|
||||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
// Create logger for this provider
|
||||
const logger = getLogger('proxy-provider');
|
||||
|
||||
// This will run at the same time each day as when the app started
|
||||
const getEvery24HourCron = (): string => {
|
||||
const now = new Date();
|
||||
const hours = now.getHours();
|
||||
const minutes = now.getMinutes();
|
||||
return `${minutes} ${hours} * * *`; // Every day at startup time
|
||||
};
|
||||
|
||||
export const proxyProvider: ProviderConfig = {
|
||||
name: 'proxy-provider',
|
||||
operations: {'fetch-and-check': async (payload: { sources?: string[] }) => {
|
||||
const { proxyService } = await import('./proxy.tasks');
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const { processItems } = await import('../utils/batch-helpers');
|
||||
|
||||
const proxies = await proxyService.fetchProxiesFromSources();
|
||||
|
||||
if (proxies.length === 0) {
|
||||
return { proxiesFetched: 0, jobsCreated: 0 };
|
||||
}
|
||||
|
||||
// Use generic function with routing parameters
|
||||
const result = await processItems(
|
||||
proxies,
|
||||
(proxy, index) => ({
|
||||
proxy,
|
||||
index,
|
||||
source: 'batch-processing'
|
||||
}),
|
||||
queueManager,
|
||||
{
|
||||
totalDelayHours: 4,//parseFloat(process.env.PROXY_VALIDATION_HOURS || '1'),
|
||||
batchSize: parseInt(process.env.PROXY_BATCH_SIZE || '200'),
|
||||
useBatching: process.env.PROXY_DIRECT_MODE !== 'true',
|
||||
priority: 2,
|
||||
provider: 'proxy-provider',
|
||||
operation: 'check-proxy'
|
||||
}
|
||||
);return {
|
||||
proxiesFetched: result.totalItems,
|
||||
jobsCreated: result.jobsCreated,
|
||||
mode: result.mode,
|
||||
batchesCreated: result.batchesCreated,
|
||||
processingTimeMs: result.duration
|
||||
};
|
||||
},
|
||||
'process-batch-items': async (payload: any) => {
|
||||
// Process a batch using the simplified batch helpers
|
||||
const { processBatchJob } = await import('../utils/batch-helpers');
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
|
||||
return await processBatchJob(payload, queueManager);
|
||||
},
|
||||
|
||||
'check-proxy': async (payload: {
|
||||
proxy: ProxyInfo,
|
||||
source?: string,
|
||||
batchIndex?: number,
|
||||
itemIndex?: number,
|
||||
total?: number
|
||||
}) => {
|
||||
const { checkProxy } = await import('./proxy.tasks');
|
||||
|
||||
try {
|
||||
const result = await checkProxy(payload.proxy);
|
||||
|
||||
logger.debug('Proxy validated', {
|
||||
proxy: `${payload.proxy.host}:${payload.proxy.port}`,
|
||||
isWorking: result.isWorking,
|
||||
responseTime: result.responseTime,
|
||||
batchIndex: payload.batchIndex
|
||||
});
|
||||
|
||||
return {
|
||||
result,
|
||||
proxy: payload.proxy,
|
||||
// Only include batch info if it exists (for batch mode)
|
||||
...(payload.batchIndex !== undefined && {
|
||||
batchInfo: {
|
||||
batchIndex: payload.batchIndex,
|
||||
itemIndex: payload.itemIndex,
|
||||
total: payload.total,
|
||||
source: payload.source
|
||||
}
|
||||
})
|
||||
};
|
||||
} catch (error) {
|
||||
logger.warn('Proxy validation failed', {
|
||||
proxy: `${payload.proxy.host}:${payload.proxy.port}`,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
batchIndex: payload.batchIndex
|
||||
});
|
||||
|
||||
return {
|
||||
result: { isWorking: false, error: String(error) },
|
||||
proxy: payload.proxy,
|
||||
// Only include batch info if it exists (for batch mode)
|
||||
...(payload.batchIndex !== undefined && {
|
||||
batchInfo: {
|
||||
batchIndex: payload.batchIndex,
|
||||
itemIndex: payload.itemIndex,
|
||||
total: payload.total,
|
||||
source: payload.source
|
||||
}
|
||||
})
|
||||
};
|
||||
}
|
||||
}
|
||||
},
|
||||
scheduledJobs: [
|
||||
{
|
||||
type: 'proxy-maintenance',
|
||||
operation: 'fetch-and-check',
|
||||
payload: {},
|
||||
// should remove and just run at the same time so app restarts dont keeping adding same jobs
|
||||
cronPattern: getEvery24HourCron(),
|
||||
priority: 5,
|
||||
immediately: true, // Don't run immediately during startup to avoid conflicts
|
||||
description: 'Fetch and validate proxy list from sources'
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
|
||||
import { ProxyInfo } from 'libs/http/src/types';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
|
||||
// Create logger for this provider
|
||||
const logger = getLogger('proxy-provider');
|
||||
|
||||
// This will run at the same time each day as when the app started
|
||||
const getEvery24HourCron = (): string => {
|
||||
const now = new Date();
|
||||
const hours = now.getHours();
|
||||
const minutes = now.getMinutes();
|
||||
return `${minutes} ${hours} * * *`; // Every day at startup time
|
||||
};
|
||||
|
||||
export const proxyProvider: ProviderConfig = {
|
||||
name: 'proxy-provider',
|
||||
operations: {
|
||||
'fetch-and-check': async (payload: { sources?: string[] }) => {
|
||||
const { proxyService } = await import('./proxy.tasks');
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const { processItems } = await import('../utils/batch-helpers');
|
||||
|
||||
const proxies = await proxyService.fetchProxiesFromSources();
|
||||
|
||||
if (proxies.length === 0) {
|
||||
return { proxiesFetched: 0, jobsCreated: 0 };
|
||||
}
|
||||
|
||||
// Use generic function with routing parameters
|
||||
const result = await processItems(
|
||||
proxies,
|
||||
(proxy, index) => ({
|
||||
proxy,
|
||||
index,
|
||||
source: 'batch-processing',
|
||||
}),
|
||||
queueManager,
|
||||
{
|
||||
totalDelayHours: 4, //parseFloat(process.env.PROXY_VALIDATION_HOURS || '1'),
|
||||
batchSize: parseInt(process.env.PROXY_BATCH_SIZE || '200'),
|
||||
useBatching: process.env.PROXY_DIRECT_MODE !== 'true',
|
||||
priority: 2,
|
||||
provider: 'proxy-provider',
|
||||
operation: 'check-proxy',
|
||||
}
|
||||
);
|
||||
return {
|
||||
proxiesFetched: result.totalItems,
|
||||
jobsCreated: result.jobsCreated,
|
||||
mode: result.mode,
|
||||
batchesCreated: result.batchesCreated,
|
||||
processingTimeMs: result.duration,
|
||||
};
|
||||
},
|
||||
'process-batch-items': async (payload: any) => {
|
||||
// Process a batch using the simplified batch helpers
|
||||
const { processBatchJob } = await import('../utils/batch-helpers');
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
|
||||
return await processBatchJob(payload, queueManager);
|
||||
},
|
||||
|
||||
'check-proxy': async (payload: {
|
||||
proxy: ProxyInfo;
|
||||
source?: string;
|
||||
batchIndex?: number;
|
||||
itemIndex?: number;
|
||||
total?: number;
|
||||
}) => {
|
||||
const { checkProxy } = await import('./proxy.tasks');
|
||||
|
||||
try {
|
||||
const result = await checkProxy(payload.proxy);
|
||||
|
||||
logger.debug('Proxy validated', {
|
||||
proxy: `${payload.proxy.host}:${payload.proxy.port}`,
|
||||
isWorking: result.isWorking,
|
||||
responseTime: result.responseTime,
|
||||
batchIndex: payload.batchIndex,
|
||||
});
|
||||
|
||||
return {
|
||||
result,
|
||||
proxy: payload.proxy,
|
||||
// Only include batch info if it exists (for batch mode)
|
||||
...(payload.batchIndex !== undefined && {
|
||||
batchInfo: {
|
||||
batchIndex: payload.batchIndex,
|
||||
itemIndex: payload.itemIndex,
|
||||
total: payload.total,
|
||||
source: payload.source,
|
||||
},
|
||||
}),
|
||||
};
|
||||
} catch (error) {
|
||||
logger.warn('Proxy validation failed', {
|
||||
proxy: `${payload.proxy.host}:${payload.proxy.port}`,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
batchIndex: payload.batchIndex,
|
||||
});
|
||||
|
||||
return {
|
||||
result: { isWorking: false, error: String(error) },
|
||||
proxy: payload.proxy,
|
||||
// Only include batch info if it exists (for batch mode)
|
||||
...(payload.batchIndex !== undefined && {
|
||||
batchInfo: {
|
||||
batchIndex: payload.batchIndex,
|
||||
itemIndex: payload.itemIndex,
|
||||
total: payload.total,
|
||||
source: payload.source,
|
||||
},
|
||||
}),
|
||||
};
|
||||
}
|
||||
},
|
||||
},
|
||||
scheduledJobs: [
|
||||
{
|
||||
type: 'proxy-maintenance',
|
||||
operation: 'fetch-and-check',
|
||||
payload: {},
|
||||
// should remove and just run at the same time so app restarts dont keeping adding same jobs
|
||||
cronPattern: getEvery24HourCron(),
|
||||
priority: 5,
|
||||
immediately: true, // Don't run immediately during startup to avoid conflicts
|
||||
description: 'Fetch and validate proxy list from sources',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,436 +1,536 @@
|
|||
import { getLogger } from '@stock-bot/logger';
|
||||
import { createCache, type CacheProvider } from '@stock-bot/cache';
|
||||
import { HttpClient, ProxyInfo } from '@stock-bot/http';
|
||||
import pLimit from 'p-limit';
|
||||
|
||||
// Type definitions
|
||||
export interface ProxySource {
|
||||
id: string;
|
||||
url: string;
|
||||
protocol: string;
|
||||
working?: number; // Optional, used for stats
|
||||
total?: number; // Optional, used for stats
|
||||
percentWorking?: number; // Optional, used for stats
|
||||
lastChecked?: Date; // Optional, used for stats
|
||||
}
|
||||
|
||||
// Shared configuration and utilities
|
||||
const PROXY_CONFIG = {
|
||||
CACHE_KEY: 'active',
|
||||
CACHE_STATS_KEY: 'stats',
|
||||
CACHE_TTL: 86400, // 24 hours
|
||||
CHECK_TIMEOUT: 7000,
|
||||
CHECK_IP: '99.246.102.205',
|
||||
CHECK_URL: 'https://proxy-detection.stare.gg/?api_key=bd406bf53ddc6abe1d9de5907830a955',
|
||||
CONCURRENCY_LIMIT: 100,
|
||||
PROXY_SOURCES: [
|
||||
{id: 'prxchk', url: 'https://raw.githubusercontent.com/prxchk/proxy-list/main/http.txt', protocol: 'http'},
|
||||
{id: 'casals', url: 'https://raw.githubusercontent.com/casals-ar/proxy-list/main/http', protocol: 'http'},
|
||||
{id: 'sunny9577', url: 'https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt', protocol: 'http'},
|
||||
{id: 'themiralay', url: 'https://raw.githubusercontent.com/themiralay/Proxy-List-World/refs/heads/master/data.txt', protocol: 'http'},
|
||||
{id: 'casa-ls', url: 'https://raw.githubusercontent.com/casa-ls/proxy-list/refs/heads/main/http', protocol: 'http'},
|
||||
{id: 'databay', url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/http.txt', protocol: 'http'},
|
||||
{id: 'speedx', url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt', protocol: 'http'},
|
||||
{id: 'monosans', url: 'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt', protocol: 'http'},
|
||||
|
||||
{id: 'murong', url: 'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/http.txt', protocol: 'http'},
|
||||
{id: 'vakhov-fresh', url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/master/http.txt', protocol: 'http'},
|
||||
{id: 'kangproxy', url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/http/http.txt', protocol: 'http'},
|
||||
{id: 'gfpcom', url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/http.txt', protocol: 'http'},
|
||||
{id: 'dpangestuw', url: 'https://raw.githubusercontent.com/dpangestuw/Free-Proxy/refs/heads/main/http_proxies.txt', protocol: 'http'},
|
||||
{id: 'gitrecon', url: 'https://raw.githubusercontent.com/gitrecon1455/fresh-proxy-list/refs/heads/main/proxylist.txt', protocol: 'http'},
|
||||
{id: 'vakhov-master', url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/http.txt', protocol: 'http'},
|
||||
{id: 'breaking-tech', url: 'https://raw.githubusercontent.com/BreakingTechFr/Proxy_Free/refs/heads/main/proxies/http.txt', protocol: 'http'},
|
||||
{id: 'ercindedeoglu', url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/http.txt', protocol: 'http'},
|
||||
{id: 'tuanminpay', url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/http.txt', protocol: 'http'},
|
||||
|
||||
{id: 'r00tee-https', url: 'https://raw.githubusercontent.com/r00tee/Proxy-List/refs/heads/main/Https.txt', protocol: 'https'},
|
||||
{id: 'ercindedeoglu-https', url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/https.txt', protocol: 'https'},
|
||||
{id: 'vakhov-fresh-https', url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/https.txt', protocol: 'https'},
|
||||
{id: 'databay-https', url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/https.txt', protocol: 'https'},
|
||||
{id: 'kangproxy-https', url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/https/https.txt', protocol: 'https'},
|
||||
{id: 'zloi-user-https', url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/https.txt', protocol: 'https'},
|
||||
{id: 'gfpcom-https', url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/https.txt', protocol: 'https'},
|
||||
]
|
||||
};
|
||||
|
||||
// Shared instances (module-scoped, not global)
|
||||
let logger: ReturnType<typeof getLogger>;
|
||||
let cache: CacheProvider;
|
||||
let httpClient: HttpClient;
|
||||
let concurrencyLimit: ReturnType<typeof pLimit>;
|
||||
let proxyStats: ProxySource[] = PROXY_CONFIG.PROXY_SOURCES.map(source => ({
|
||||
id: source.id,
|
||||
total: 0,
|
||||
working: 0,
|
||||
lastChecked: new Date(),
|
||||
protocol: source.protocol,
|
||||
url: source.url,
|
||||
}));
|
||||
|
||||
|
||||
// make a function that takes in source id and a boolean success and updates the proxyStats array
|
||||
async function updateProxyStats(sourceId: string, success: boolean) {
|
||||
const source = proxyStats.find(s => s.id === sourceId);
|
||||
if (source !== undefined) {
|
||||
if(typeof source.working !== 'number')
|
||||
source.working = 0;
|
||||
if(typeof source.total !== 'number')
|
||||
source.total = 0;
|
||||
source.total += 1;
|
||||
if (success) {
|
||||
source.working += 1;
|
||||
}
|
||||
source.percentWorking = source.working / source.total * 100;
|
||||
source.lastChecked = new Date();
|
||||
await cache.set(`${PROXY_CONFIG.CACHE_STATS_KEY}:${source.id}`, source, PROXY_CONFIG.CACHE_TTL);
|
||||
return source;
|
||||
} else {
|
||||
logger.warn(`Unknown proxy source: ${sourceId}`);
|
||||
}
|
||||
}
|
||||
|
||||
// make a function that resets proxyStats
|
||||
async function resetProxyStats(): Promise<void> {
|
||||
proxyStats = PROXY_CONFIG.PROXY_SOURCES.map(source => ({
|
||||
id: source.id,
|
||||
total: 0,
|
||||
working: 0,
|
||||
lastChecked: new Date(),
|
||||
protocol: source.protocol,
|
||||
url: source.url,
|
||||
}));
|
||||
for (const source of proxyStats) {
|
||||
await cache.set(`${PROXY_CONFIG.CACHE_STATS_KEY}:${source.id}`, source, PROXY_CONFIG.CACHE_TTL);
|
||||
}
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
/**
|
||||
* Update proxy data in cache with working/total stats and average response time
|
||||
* @param proxy - The proxy to update
|
||||
* @param isWorking - Whether the proxy is currently working
|
||||
*/
|
||||
async function updateProxyInCache(proxy: ProxyInfo, isWorking: boolean): Promise<void> {
|
||||
const cacheKey = `${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`;
|
||||
|
||||
try {
|
||||
const existing: any = await cache.get(cacheKey);
|
||||
|
||||
// For failed proxies, only update if they already exist
|
||||
if (!isWorking && !existing) {
|
||||
logger.debug('Proxy not in cache, skipping failed update', {
|
||||
proxy: `${proxy.host}:${proxy.port}`
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Calculate new average response time if we have a response time
|
||||
let newAverageResponseTime = existing?.averageResponseTime;
|
||||
if (proxy.responseTime !== undefined) {
|
||||
const existingAvg = existing?.averageResponseTime || 0;
|
||||
const existingTotal = existing?.total || 0;
|
||||
|
||||
// Calculate weighted average: (existing_avg * existing_count + new_response) / (existing_count + 1)
|
||||
newAverageResponseTime = existingTotal > 0
|
||||
? ((existingAvg * existingTotal) + proxy.responseTime) / (existingTotal + 1)
|
||||
: proxy.responseTime;
|
||||
}
|
||||
|
||||
// Build updated proxy data
|
||||
const updated = {
|
||||
...existing,
|
||||
...proxy, // Keep latest proxy info
|
||||
total: (existing?.total || 0) + 1,
|
||||
working: isWorking ? (existing?.working || 0) + 1 : (existing?.working || 0),
|
||||
isWorking,
|
||||
lastChecked: new Date(),
|
||||
// Add firstSeen only for new entries
|
||||
...(existing ? {} : { firstSeen: new Date() }),
|
||||
// Update average response time if we calculated a new one
|
||||
...(newAverageResponseTime !== undefined ? { averageResponseTime: newAverageResponseTime } : {})
|
||||
};
|
||||
|
||||
// Calculate success rate
|
||||
updated.successRate = updated.total > 0 ? (updated.working / updated.total) * 100 : 0;
|
||||
|
||||
// Save to cache: reset TTL for working proxies, keep existing TTL for failed ones
|
||||
const cacheOptions = isWorking ? PROXY_CONFIG.CACHE_TTL : undefined;
|
||||
await cache.set(cacheKey, updated, cacheOptions);
|
||||
|
||||
logger.debug(`Updated ${isWorking ? 'working' : 'failed'} proxy in cache`, {
|
||||
proxy: `${proxy.host}:${proxy.port}`,
|
||||
working: updated.working,
|
||||
total: updated.total,
|
||||
successRate: updated.successRate.toFixed(1) + '%',
|
||||
avgResponseTime: updated.averageResponseTime ? `${updated.averageResponseTime.toFixed(0)}ms` : 'N/A'
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
logger.error('Failed to update proxy in cache', {
|
||||
proxy: `${proxy.host}:${proxy.port}`,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize proxy cache for use during application startup
|
||||
* This should be called before any proxy operations
|
||||
*/
|
||||
export async function initializeProxyCache(): Promise<void> {
|
||||
logger = getLogger('proxy-tasks');
|
||||
cache = createCache({
|
||||
keyPrefix: 'proxy:',
|
||||
ttl: PROXY_CONFIG.CACHE_TTL,
|
||||
enableMetrics: true
|
||||
});
|
||||
|
||||
logger.info('Initializing proxy cache...');
|
||||
await cache.waitForReady(10000);
|
||||
logger.info('Proxy cache initialized successfully');
|
||||
|
||||
// Initialize other shared resources that don't require cache
|
||||
httpClient = new HttpClient({ timeout: 10000 }, logger);
|
||||
concurrencyLimit = pLimit(PROXY_CONFIG.CONCURRENCY_LIMIT);
|
||||
|
||||
logger.info('Proxy tasks initialized');
|
||||
}
|
||||
|
||||
async function initializeSharedResources() {
|
||||
if (!logger) {
|
||||
// If not initialized at startup, initialize with fallback mode
|
||||
logger = getLogger('proxy-tasks');
|
||||
cache = createCache({
|
||||
keyPrefix: 'proxy:',
|
||||
ttl: PROXY_CONFIG.CACHE_TTL,
|
||||
enableMetrics: true
|
||||
});
|
||||
|
||||
httpClient = new HttpClient({ timeout: 10000 }, logger);
|
||||
concurrencyLimit = pLimit(PROXY_CONFIG.CONCURRENCY_LIMIT);
|
||||
|
||||
logger.info('Proxy tasks initialized (fallback mode)');
|
||||
}
|
||||
}
|
||||
|
||||
// Individual task functions
|
||||
export async function queueProxyFetch(): Promise<string> {
|
||||
await initializeSharedResources();
|
||||
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-fetch',
|
||||
provider: 'proxy-service',
|
||||
operation: 'fetch-and-check',
|
||||
payload: {},
|
||||
priority: 5
|
||||
});
|
||||
|
||||
const jobId = job.id || 'unknown';
|
||||
logger.info('Proxy fetch job queued', { jobId });
|
||||
return jobId;
|
||||
}
|
||||
|
||||
export async function queueProxyCheck(proxies: ProxyInfo[]): Promise<string> {
|
||||
await initializeSharedResources();
|
||||
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-check',
|
||||
provider: 'proxy-service',
|
||||
operation: 'check-specific',
|
||||
payload: { proxies },
|
||||
priority: 3
|
||||
});
|
||||
|
||||
const jobId = job.id || 'unknown';
|
||||
logger.info('Proxy check job queued', { jobId, count: proxies.length });
|
||||
return jobId;
|
||||
}
|
||||
|
||||
export async function fetchProxiesFromSources(): Promise<ProxyInfo[]> {
|
||||
await initializeSharedResources();
|
||||
await resetProxyStats();
|
||||
|
||||
// Ensure concurrencyLimit is available before using it
|
||||
if (!concurrencyLimit) {
|
||||
logger.error('concurrencyLimit not initialized, using sequential processing');
|
||||
const result = [];
|
||||
for (const source of PROXY_CONFIG.PROXY_SOURCES) {
|
||||
const proxies = await fetchProxiesFromSource(source);
|
||||
result.push(...proxies);
|
||||
}
|
||||
let allProxies: ProxyInfo[] = result;
|
||||
allProxies = removeDuplicateProxies(allProxies);
|
||||
return allProxies;
|
||||
}
|
||||
|
||||
const sources = PROXY_CONFIG.PROXY_SOURCES.map(source =>
|
||||
concurrencyLimit(() => fetchProxiesFromSource(source))
|
||||
);
|
||||
const result = await Promise.all(sources);
|
||||
let allProxies: ProxyInfo[] = result.flat();
|
||||
allProxies = removeDuplicateProxies(allProxies);
|
||||
// await checkProxies(allProxies);
|
||||
return allProxies;
|
||||
}
|
||||
|
||||
export async function fetchProxiesFromSource(source: ProxySource): Promise<ProxyInfo[]> {
|
||||
await initializeSharedResources();
|
||||
|
||||
const allProxies: ProxyInfo[] = [];
|
||||
|
||||
try {
|
||||
logger.info(`Fetching proxies from ${source.url}`);
|
||||
|
||||
const response = await httpClient.get(source.url, {
|
||||
timeout: 10000
|
||||
});
|
||||
|
||||
if (response.status !== 200) {
|
||||
logger.warn(`Failed to fetch from ${source.url}: ${response.status}`);
|
||||
return [];
|
||||
}
|
||||
|
||||
const text = response.data;
|
||||
const lines = text.split('\n').filter((line: string) => line.trim());
|
||||
|
||||
for (const line of lines) {
|
||||
let trimmed = line.trim();
|
||||
trimmed = cleanProxyUrl(trimmed);
|
||||
if (!trimmed || trimmed.startsWith('#')) continue;
|
||||
|
||||
// Parse formats like "host:port" or "host:port:user:pass"
|
||||
const parts = trimmed.split(':');
|
||||
if (parts.length >= 2) {
|
||||
const proxy: ProxyInfo = {
|
||||
source: source.id,
|
||||
protocol: source.protocol as 'http' | 'https' | 'socks4' | 'socks5',
|
||||
host: parts[0],
|
||||
port: parseInt(parts[1])
|
||||
};
|
||||
|
||||
if (!isNaN(proxy.port) && proxy.host) {
|
||||
allProxies.push(proxy);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Parsed ${allProxies.length} proxies from ${source.url}`);
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`Error fetching proxies from ${source.url}`, error);
|
||||
return [];
|
||||
}
|
||||
|
||||
return allProxies;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a proxy is working
|
||||
*/
|
||||
export async function checkProxy(proxy: ProxyInfo): Promise<ProxyInfo> {
|
||||
await initializeSharedResources();
|
||||
|
||||
let success = false;
|
||||
logger.debug(`Checking Proxy:`, {
|
||||
protocol: proxy.protocol,
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
});
|
||||
|
||||
try {
|
||||
// Test the proxy
|
||||
const response = await httpClient.get(PROXY_CONFIG.CHECK_URL, {
|
||||
proxy,
|
||||
timeout: PROXY_CONFIG.CHECK_TIMEOUT
|
||||
});
|
||||
|
||||
const isWorking = response.status >= 200 && response.status < 300;
|
||||
const result: ProxyInfo = {
|
||||
...proxy,
|
||||
isWorking,
|
||||
lastChecked: new Date(),
|
||||
responseTime: response.responseTime,
|
||||
};
|
||||
|
||||
if (isWorking && !JSON.stringify(response.data).includes(PROXY_CONFIG.CHECK_IP)) {
|
||||
success = true;
|
||||
await updateProxyInCache(result, true);
|
||||
} else {
|
||||
await updateProxyInCache(result, false);
|
||||
}
|
||||
|
||||
if( proxy.source ){
|
||||
await updateProxyStats(proxy.source, success);
|
||||
}
|
||||
|
||||
logger.debug('Proxy check completed', {
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
isWorking,
|
||||
});
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
const result: ProxyInfo = {
|
||||
...proxy,
|
||||
isWorking: false,
|
||||
error: errorMessage,
|
||||
lastChecked: new Date()
|
||||
};
|
||||
|
||||
// Update cache for failed proxy (increment total, don't update TTL)
|
||||
await updateProxyInCache(result, false);
|
||||
|
||||
if( proxy.source ){
|
||||
await updateProxyStats(proxy.source, success);
|
||||
}
|
||||
|
||||
logger.debug('Proxy check failed', {
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
error: errorMessage
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// Utility functions
|
||||
function cleanProxyUrl(url: string): string {
|
||||
return url
|
||||
.replace(/^https?:\/\//, '')
|
||||
.replace(/^0+/, '')
|
||||
.replace(/:0+(\d)/g, ':$1');
|
||||
}
|
||||
|
||||
function removeDuplicateProxies(proxies: ProxyInfo[]): ProxyInfo[] {
|
||||
const seen = new Set<string>();
|
||||
const unique: ProxyInfo[] = [];
|
||||
|
||||
for (const proxy of proxies) {
|
||||
const key = `${proxy.protocol}://${proxy.host}:${proxy.port}`;
|
||||
if (!seen.has(key)) {
|
||||
seen.add(key);
|
||||
unique.push(proxy);
|
||||
}
|
||||
}
|
||||
|
||||
return unique;
|
||||
}
|
||||
|
||||
// Optional: Export a convenience object that groups related tasks
|
||||
export const proxyTasks = {
|
||||
queueProxyFetch,
|
||||
queueProxyCheck,
|
||||
fetchProxiesFromSources,
|
||||
fetchProxiesFromSource,
|
||||
checkProxy,
|
||||
};
|
||||
|
||||
// Export singleton instance for backward compatibility (optional)
|
||||
// Remove this if you want to fully move to the task-based approach
|
||||
export const proxyService = proxyTasks;
|
||||
import pLimit from 'p-limit';
|
||||
import { createCache, type CacheProvider } from '@stock-bot/cache';
|
||||
import { HttpClient, ProxyInfo } from '@stock-bot/http';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
// Type definitions
|
||||
export interface ProxySource {
|
||||
id: string;
|
||||
url: string;
|
||||
protocol: string;
|
||||
working?: number; // Optional, used for stats
|
||||
total?: number; // Optional, used for stats
|
||||
percentWorking?: number; // Optional, used for stats
|
||||
lastChecked?: Date; // Optional, used for stats
|
||||
}
|
||||
|
||||
// Shared configuration and utilities
|
||||
const PROXY_CONFIG = {
|
||||
CACHE_KEY: 'active',
|
||||
CACHE_STATS_KEY: 'stats',
|
||||
CACHE_TTL: 86400, // 24 hours
|
||||
CHECK_TIMEOUT: 7000,
|
||||
CHECK_IP: '99.246.102.205',
|
||||
CHECK_URL: 'https://proxy-detection.stare.gg/?api_key=bd406bf53ddc6abe1d9de5907830a955',
|
||||
CONCURRENCY_LIMIT: 100,
|
||||
PROXY_SOURCES: [
|
||||
{
|
||||
id: 'prxchk',
|
||||
url: 'https://raw.githubusercontent.com/prxchk/proxy-list/main/http.txt',
|
||||
protocol: 'http',
|
||||
},
|
||||
{
|
||||
id: 'casals',
|
||||
url: 'https://raw.githubusercontent.com/casals-ar/proxy-list/main/http',
|
||||
protocol: 'http',
|
||||
},
|
||||
{
|
||||
id: 'sunny9577',
|
||||
url: 'https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt',
|
||||
protocol: 'http',
|
||||
},
|
||||
{
|
||||
id: 'themiralay',
|
||||
url: 'https://raw.githubusercontent.com/themiralay/Proxy-List-World/refs/heads/master/data.txt',
|
||||
protocol: 'http',
|
||||
},
|
||||
{
|
||||
id: 'casa-ls',
|
||||
url: 'https://raw.githubusercontent.com/casa-ls/proxy-list/refs/heads/main/http',
|
||||
protocol: 'http',
|
||||
},
|
||||
{
|
||||
id: 'databay',
|
||||
url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/http.txt',
|
||||
protocol: 'http',
|
||||
},
|
||||
{
|
||||
id: 'speedx',
|
||||
url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt',
|
||||
protocol: 'http',
|
||||
},
|
||||
{
|
||||
id: 'monosans',
|
||||
url: 'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt',
|
||||
protocol: 'http',
|
||||
},
|
||||
|
||||
{
|
||||
id: 'murong',
|
||||
url: 'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/http.txt',
|
||||
protocol: 'http',
|
||||
},
|
||||
{
|
||||
id: 'vakhov-fresh',
|
||||
url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/master/http.txt',
|
||||
protocol: 'http',
|
||||
},
|
||||
{
|
||||
id: 'kangproxy',
|
||||
url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/http/http.txt',
|
||||
protocol: 'http',
|
||||
},
|
||||
{
|
||||
id: 'gfpcom',
|
||||
url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/http.txt',
|
||||
protocol: 'http',
|
||||
},
|
||||
{
|
||||
id: 'dpangestuw',
|
||||
url: 'https://raw.githubusercontent.com/dpangestuw/Free-Proxy/refs/heads/main/http_proxies.txt',
|
||||
protocol: 'http',
|
||||
},
|
||||
{
|
||||
id: 'gitrecon',
|
||||
url: 'https://raw.githubusercontent.com/gitrecon1455/fresh-proxy-list/refs/heads/main/proxylist.txt',
|
||||
protocol: 'http',
|
||||
},
|
||||
{
|
||||
id: 'vakhov-master',
|
||||
url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/http.txt',
|
||||
protocol: 'http',
|
||||
},
|
||||
{
|
||||
id: 'breaking-tech',
|
||||
url: 'https://raw.githubusercontent.com/BreakingTechFr/Proxy_Free/refs/heads/main/proxies/http.txt',
|
||||
protocol: 'http',
|
||||
},
|
||||
{
|
||||
id: 'ercindedeoglu',
|
||||
url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/http.txt',
|
||||
protocol: 'http',
|
||||
},
|
||||
{
|
||||
id: 'tuanminpay',
|
||||
url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/http.txt',
|
||||
protocol: 'http',
|
||||
},
|
||||
|
||||
{
|
||||
id: 'r00tee-https',
|
||||
url: 'https://raw.githubusercontent.com/r00tee/Proxy-List/refs/heads/main/Https.txt',
|
||||
protocol: 'https',
|
||||
},
|
||||
{
|
||||
id: 'ercindedeoglu-https',
|
||||
url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/https.txt',
|
||||
protocol: 'https',
|
||||
},
|
||||
{
|
||||
id: 'vakhov-fresh-https',
|
||||
url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/https.txt',
|
||||
protocol: 'https',
|
||||
},
|
||||
{
|
||||
id: 'databay-https',
|
||||
url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/https.txt',
|
||||
protocol: 'https',
|
||||
},
|
||||
{
|
||||
id: 'kangproxy-https',
|
||||
url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/https/https.txt',
|
||||
protocol: 'https',
|
||||
},
|
||||
{
|
||||
id: 'zloi-user-https',
|
||||
url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/https.txt',
|
||||
protocol: 'https',
|
||||
},
|
||||
{
|
||||
id: 'gfpcom-https',
|
||||
url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/https.txt',
|
||||
protocol: 'https',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
// Shared instances (module-scoped, not global)
|
||||
let logger: ReturnType<typeof getLogger>;
|
||||
let cache: CacheProvider;
|
||||
let httpClient: HttpClient;
|
||||
let concurrencyLimit: ReturnType<typeof pLimit>;
|
||||
let proxyStats: ProxySource[] = PROXY_CONFIG.PROXY_SOURCES.map(source => ({
|
||||
id: source.id,
|
||||
total: 0,
|
||||
working: 0,
|
||||
lastChecked: new Date(),
|
||||
protocol: source.protocol,
|
||||
url: source.url,
|
||||
}));
|
||||
|
||||
// make a function that takes in source id and a boolean success and updates the proxyStats array
|
||||
async function updateProxyStats(sourceId: string, success: boolean) {
|
||||
const source = proxyStats.find(s => s.id === sourceId);
|
||||
if (source !== undefined) {
|
||||
if (typeof source.working !== 'number') source.working = 0;
|
||||
if (typeof source.total !== 'number') source.total = 0;
|
||||
source.total += 1;
|
||||
if (success) {
|
||||
source.working += 1;
|
||||
}
|
||||
source.percentWorking = (source.working / source.total) * 100;
|
||||
source.lastChecked = new Date();
|
||||
await cache.set(`${PROXY_CONFIG.CACHE_STATS_KEY}:${source.id}`, source, PROXY_CONFIG.CACHE_TTL);
|
||||
return source;
|
||||
} else {
|
||||
logger.warn(`Unknown proxy source: ${sourceId}`);
|
||||
}
|
||||
}
|
||||
|
||||
// make a function that resets proxyStats
|
||||
async function resetProxyStats(): Promise<void> {
|
||||
proxyStats = PROXY_CONFIG.PROXY_SOURCES.map(source => ({
|
||||
id: source.id,
|
||||
total: 0,
|
||||
working: 0,
|
||||
lastChecked: new Date(),
|
||||
protocol: source.protocol,
|
||||
url: source.url,
|
||||
}));
|
||||
for (const source of proxyStats) {
|
||||
await cache.set(`${PROXY_CONFIG.CACHE_STATS_KEY}:${source.id}`, source, PROXY_CONFIG.CACHE_TTL);
|
||||
}
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
/**
|
||||
* Update proxy data in cache with working/total stats and average response time
|
||||
* @param proxy - The proxy to update
|
||||
* @param isWorking - Whether the proxy is currently working
|
||||
*/
|
||||
async function updateProxyInCache(proxy: ProxyInfo, isWorking: boolean): Promise<void> {
|
||||
const cacheKey = `${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`;
|
||||
|
||||
try {
|
||||
const existing: any = await cache.get(cacheKey);
|
||||
|
||||
// For failed proxies, only update if they already exist
|
||||
if (!isWorking && !existing) {
|
||||
logger.debug('Proxy not in cache, skipping failed update', {
|
||||
proxy: `${proxy.host}:${proxy.port}`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Calculate new average response time if we have a response time
|
||||
let newAverageResponseTime = existing?.averageResponseTime;
|
||||
if (proxy.responseTime !== undefined) {
|
||||
const existingAvg = existing?.averageResponseTime || 0;
|
||||
const existingTotal = existing?.total || 0;
|
||||
|
||||
// Calculate weighted average: (existing_avg * existing_count + new_response) / (existing_count + 1)
|
||||
newAverageResponseTime =
|
||||
existingTotal > 0
|
||||
? (existingAvg * existingTotal + proxy.responseTime) / (existingTotal + 1)
|
||||
: proxy.responseTime;
|
||||
}
|
||||
|
||||
// Build updated proxy data
|
||||
const updated = {
|
||||
...existing,
|
||||
...proxy, // Keep latest proxy info
|
||||
total: (existing?.total || 0) + 1,
|
||||
working: isWorking ? (existing?.working || 0) + 1 : existing?.working || 0,
|
||||
isWorking,
|
||||
lastChecked: new Date(),
|
||||
// Add firstSeen only for new entries
|
||||
...(existing ? {} : { firstSeen: new Date() }),
|
||||
// Update average response time if we calculated a new one
|
||||
...(newAverageResponseTime !== undefined
|
||||
? { averageResponseTime: newAverageResponseTime }
|
||||
: {}),
|
||||
};
|
||||
|
||||
// Calculate success rate
|
||||
updated.successRate = updated.total > 0 ? (updated.working / updated.total) * 100 : 0;
|
||||
|
||||
// Save to cache: reset TTL for working proxies, keep existing TTL for failed ones
|
||||
const cacheOptions = isWorking ? PROXY_CONFIG.CACHE_TTL : undefined;
|
||||
await cache.set(cacheKey, updated, cacheOptions);
|
||||
|
||||
logger.debug(`Updated ${isWorking ? 'working' : 'failed'} proxy in cache`, {
|
||||
proxy: `${proxy.host}:${proxy.port}`,
|
||||
working: updated.working,
|
||||
total: updated.total,
|
||||
successRate: updated.successRate.toFixed(1) + '%',
|
||||
avgResponseTime: updated.averageResponseTime
|
||||
? `${updated.averageResponseTime.toFixed(0)}ms`
|
||||
: 'N/A',
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to update proxy in cache', {
|
||||
proxy: `${proxy.host}:${proxy.port}`,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize proxy cache for use during application startup
|
||||
* This should be called before any proxy operations
|
||||
*/
|
||||
export async function initializeProxyCache(): Promise<void> {
|
||||
logger = getLogger('proxy-tasks');
|
||||
cache = createCache({
|
||||
keyPrefix: 'proxy:',
|
||||
ttl: PROXY_CONFIG.CACHE_TTL,
|
||||
enableMetrics: true,
|
||||
});
|
||||
|
||||
logger.info('Initializing proxy cache...');
|
||||
await cache.waitForReady(10000);
|
||||
logger.info('Proxy cache initialized successfully');
|
||||
|
||||
// Initialize other shared resources that don't require cache
|
||||
httpClient = new HttpClient({ timeout: 10000 }, logger);
|
||||
concurrencyLimit = pLimit(PROXY_CONFIG.CONCURRENCY_LIMIT);
|
||||
|
||||
logger.info('Proxy tasks initialized');
|
||||
}
|
||||
|
||||
async function initializeSharedResources() {
|
||||
if (!logger) {
|
||||
// If not initialized at startup, initialize with fallback mode
|
||||
logger = getLogger('proxy-tasks');
|
||||
cache = createCache({
|
||||
keyPrefix: 'proxy:',
|
||||
ttl: PROXY_CONFIG.CACHE_TTL,
|
||||
enableMetrics: true,
|
||||
});
|
||||
|
||||
httpClient = new HttpClient({ timeout: 10000 }, logger);
|
||||
concurrencyLimit = pLimit(PROXY_CONFIG.CONCURRENCY_LIMIT);
|
||||
|
||||
logger.info('Proxy tasks initialized (fallback mode)');
|
||||
}
|
||||
}
|
||||
|
||||
// Individual task functions
|
||||
export async function queueProxyFetch(): Promise<string> {
|
||||
await initializeSharedResources();
|
||||
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-fetch',
|
||||
provider: 'proxy-service',
|
||||
operation: 'fetch-and-check',
|
||||
payload: {},
|
||||
priority: 5,
|
||||
});
|
||||
|
||||
const jobId = job.id || 'unknown';
|
||||
logger.info('Proxy fetch job queued', { jobId });
|
||||
return jobId;
|
||||
}
|
||||
|
||||
export async function queueProxyCheck(proxies: ProxyInfo[]): Promise<string> {
|
||||
await initializeSharedResources();
|
||||
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-check',
|
||||
provider: 'proxy-service',
|
||||
operation: 'check-specific',
|
||||
payload: { proxies },
|
||||
priority: 3,
|
||||
});
|
||||
|
||||
const jobId = job.id || 'unknown';
|
||||
logger.info('Proxy check job queued', { jobId, count: proxies.length });
|
||||
return jobId;
|
||||
}
|
||||
|
||||
export async function fetchProxiesFromSources(): Promise<ProxyInfo[]> {
|
||||
await initializeSharedResources();
|
||||
await resetProxyStats();
|
||||
|
||||
// Ensure concurrencyLimit is available before using it
|
||||
if (!concurrencyLimit) {
|
||||
logger.error('concurrencyLimit not initialized, using sequential processing');
|
||||
const result = [];
|
||||
for (const source of PROXY_CONFIG.PROXY_SOURCES) {
|
||||
const proxies = await fetchProxiesFromSource(source);
|
||||
result.push(...proxies);
|
||||
}
|
||||
let allProxies: ProxyInfo[] = result;
|
||||
allProxies = removeDuplicateProxies(allProxies);
|
||||
return allProxies;
|
||||
}
|
||||
|
||||
const sources = PROXY_CONFIG.PROXY_SOURCES.map(source =>
|
||||
concurrencyLimit(() => fetchProxiesFromSource(source))
|
||||
);
|
||||
const result = await Promise.all(sources);
|
||||
let allProxies: ProxyInfo[] = result.flat();
|
||||
allProxies = removeDuplicateProxies(allProxies);
|
||||
// await checkProxies(allProxies);
|
||||
return allProxies;
|
||||
}
|
||||
|
||||
export async function fetchProxiesFromSource(source: ProxySource): Promise<ProxyInfo[]> {
|
||||
await initializeSharedResources();
|
||||
|
||||
const allProxies: ProxyInfo[] = [];
|
||||
|
||||
try {
|
||||
logger.info(`Fetching proxies from ${source.url}`);
|
||||
|
||||
const response = await httpClient.get(source.url, {
|
||||
timeout: 10000,
|
||||
});
|
||||
|
||||
if (response.status !== 200) {
|
||||
logger.warn(`Failed to fetch from ${source.url}: ${response.status}`);
|
||||
return [];
|
||||
}
|
||||
|
||||
const text = response.data;
|
||||
const lines = text.split('\n').filter((line: string) => line.trim());
|
||||
|
||||
for (const line of lines) {
|
||||
let trimmed = line.trim();
|
||||
trimmed = cleanProxyUrl(trimmed);
|
||||
if (!trimmed || trimmed.startsWith('#')) continue;
|
||||
|
||||
// Parse formats like "host:port" or "host:port:user:pass"
|
||||
const parts = trimmed.split(':');
|
||||
if (parts.length >= 2) {
|
||||
const proxy: ProxyInfo = {
|
||||
source: source.id,
|
||||
protocol: source.protocol as 'http' | 'https' | 'socks4' | 'socks5',
|
||||
host: parts[0],
|
||||
port: parseInt(parts[1]),
|
||||
};
|
||||
|
||||
if (!isNaN(proxy.port) && proxy.host) {
|
||||
allProxies.push(proxy);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Parsed ${allProxies.length} proxies from ${source.url}`);
|
||||
} catch (error) {
|
||||
logger.error(`Error fetching proxies from ${source.url}`, error);
|
||||
return [];
|
||||
}
|
||||
|
||||
return allProxies;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a proxy is working
|
||||
*/
|
||||
export async function checkProxy(proxy: ProxyInfo): Promise<ProxyInfo> {
|
||||
await initializeSharedResources();
|
||||
|
||||
let success = false;
|
||||
logger.debug(`Checking Proxy:`, {
|
||||
protocol: proxy.protocol,
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
});
|
||||
|
||||
try {
|
||||
// Test the proxy
|
||||
const response = await httpClient.get(PROXY_CONFIG.CHECK_URL, {
|
||||
proxy,
|
||||
timeout: PROXY_CONFIG.CHECK_TIMEOUT,
|
||||
});
|
||||
|
||||
const isWorking = response.status >= 200 && response.status < 300;
|
||||
const result: ProxyInfo = {
|
||||
...proxy,
|
||||
isWorking,
|
||||
lastChecked: new Date(),
|
||||
responseTime: response.responseTime,
|
||||
};
|
||||
|
||||
if (isWorking && !JSON.stringify(response.data).includes(PROXY_CONFIG.CHECK_IP)) {
|
||||
success = true;
|
||||
await updateProxyInCache(result, true);
|
||||
} else {
|
||||
await updateProxyInCache(result, false);
|
||||
}
|
||||
|
||||
if (proxy.source) {
|
||||
await updateProxyStats(proxy.source, success);
|
||||
}
|
||||
|
||||
logger.debug('Proxy check completed', {
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
isWorking,
|
||||
});
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
const result: ProxyInfo = {
|
||||
...proxy,
|
||||
isWorking: false,
|
||||
error: errorMessage,
|
||||
lastChecked: new Date(),
|
||||
};
|
||||
|
||||
// Update cache for failed proxy (increment total, don't update TTL)
|
||||
await updateProxyInCache(result, false);
|
||||
|
||||
if (proxy.source) {
|
||||
await updateProxyStats(proxy.source, success);
|
||||
}
|
||||
|
||||
logger.debug('Proxy check failed', {
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
error: errorMessage,
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// Utility functions
|
||||
function cleanProxyUrl(url: string): string {
|
||||
return url
|
||||
.replace(/^https?:\/\//, '')
|
||||
.replace(/^0+/, '')
|
||||
.replace(/:0+(\d)/g, ':$1');
|
||||
}
|
||||
|
||||
function removeDuplicateProxies(proxies: ProxyInfo[]): ProxyInfo[] {
|
||||
const seen = new Set<string>();
|
||||
const unique: ProxyInfo[] = [];
|
||||
|
||||
for (const proxy of proxies) {
|
||||
const key = `${proxy.protocol}://${proxy.host}:${proxy.port}`;
|
||||
if (!seen.has(key)) {
|
||||
seen.add(key);
|
||||
unique.push(proxy);
|
||||
}
|
||||
}
|
||||
|
||||
return unique;
|
||||
}
|
||||
|
||||
// Optional: Export a convenience object that groups related tasks
|
||||
export const proxyTasks = {
|
||||
queueProxyFetch,
|
||||
queueProxyCheck,
|
||||
fetchProxiesFromSources,
|
||||
fetchProxiesFromSource,
|
||||
checkProxy,
|
||||
};
|
||||
|
||||
// Export singleton instance for backward compatibility (optional)
|
||||
// Remove this if you want to fully move to the task-based approach
|
||||
export const proxyService = proxyTasks;
|
||||
|
|
|
|||
|
|
@ -1,174 +1,182 @@
|
|||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
const logger = getLogger('quotemedia-provider');
|
||||
|
||||
export const quotemediaProvider: ProviderConfig = {
|
||||
name: 'quotemedia',
|
||||
operations: { 'live-data': async (payload: { symbol: string; fields?: string[] }) => {
|
||||
logger.info('Fetching live data from QuoteMedia', { symbol: payload.symbol });
|
||||
|
||||
// Simulate QuoteMedia API call
|
||||
const mockData = {
|
||||
symbol: payload.symbol,
|
||||
price: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
change: (Math.random() - 0.5) * 20,
|
||||
changePercent: (Math.random() - 0.5) * 5,
|
||||
timestamp: new Date().toISOString(),
|
||||
source: 'quotemedia',
|
||||
fields: payload.fields || ['price', 'volume', 'change']
|
||||
};
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 100 + Math.random() * 200));
|
||||
|
||||
return mockData;
|
||||
},
|
||||
|
||||
'historical-data': async (payload: {
|
||||
symbol: string;
|
||||
from: Date;
|
||||
to: Date;
|
||||
interval?: string;
|
||||
fields?: string[]; }) => {
|
||||
logger.info('Fetching historical data from QuoteMedia', {
|
||||
symbol: payload.symbol,
|
||||
from: payload.from,
|
||||
to: payload.to,
|
||||
interval: payload.interval || '1d'
|
||||
});
|
||||
|
||||
// Generate mock historical data
|
||||
const days = Math.ceil((payload.to.getTime() - payload.from.getTime()) / (1000 * 60 * 60 * 24));
|
||||
const data = [];
|
||||
|
||||
for (let i = 0; i < Math.min(days, 100); i++) {
|
||||
const date = new Date(payload.from.getTime() + i * 24 * 60 * 60 * 1000);
|
||||
data.push({
|
||||
date: date.toISOString().split('T')[0],
|
||||
open: Math.random() * 1000 + 100,
|
||||
high: Math.random() * 1000 + 100,
|
||||
low: Math.random() * 1000 + 100,
|
||||
close: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
source: 'quotemedia'
|
||||
});
|
||||
}
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 200 + Math.random() * 300));
|
||||
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
interval: payload.interval || '1d',
|
||||
data,
|
||||
source: 'quotemedia',
|
||||
totalRecords: data.length
|
||||
};
|
||||
},
|
||||
'batch-quotes': async (payload: { symbols: string[]; fields?: string[] }) => {
|
||||
logger.info('Fetching batch quotes from QuoteMedia', {
|
||||
symbols: payload.symbols,
|
||||
count: payload.symbols.length
|
||||
});
|
||||
|
||||
const quotes = payload.symbols.map(symbol => ({
|
||||
symbol,
|
||||
price: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
change: (Math.random() - 0.5) * 20,
|
||||
timestamp: new Date().toISOString(),
|
||||
source: 'quotemedia'
|
||||
}));
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 300 + Math.random() * 200));
|
||||
|
||||
return {
|
||||
quotes,
|
||||
source: 'quotemedia',
|
||||
timestamp: new Date().toISOString(),
|
||||
totalSymbols: payload.symbols.length
|
||||
};
|
||||
}, 'company-profile': async (payload: { symbol: string }) => {
|
||||
logger.info('Fetching company profile from QuoteMedia', { symbol: payload.symbol });
|
||||
|
||||
// Simulate company profile data
|
||||
const profile = {
|
||||
symbol: payload.symbol,
|
||||
companyName: `${payload.symbol} Corporation`,
|
||||
sector: 'Technology',
|
||||
industry: 'Software',
|
||||
description: `${payload.symbol} is a leading technology company.`,
|
||||
marketCap: Math.floor(Math.random() * 1000000000000),
|
||||
employees: Math.floor(Math.random() * 100000),
|
||||
website: `https://www.${payload.symbol.toLowerCase()}.com`,
|
||||
source: 'quotemedia'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 150 + Math.random() * 100));
|
||||
|
||||
return profile;
|
||||
}, 'options-chain': async (payload: { symbol: string; expiration?: string }) => {
|
||||
logger.info('Fetching options chain from QuoteMedia', {
|
||||
symbol: payload.symbol,
|
||||
expiration: payload.expiration
|
||||
});
|
||||
|
||||
// Generate mock options data
|
||||
const strikes = Array.from({ length: 20 }, (_, i) => 100 + i * 5);
|
||||
const calls = strikes.map(strike => ({
|
||||
strike,
|
||||
bid: Math.random() * 10,
|
||||
ask: Math.random() * 10 + 0.5,
|
||||
volume: Math.floor(Math.random() * 1000),
|
||||
openInterest: Math.floor(Math.random() * 5000)
|
||||
}));
|
||||
|
||||
const puts = strikes.map(strike => ({
|
||||
strike,
|
||||
bid: Math.random() * 10,
|
||||
ask: Math.random() * 10 + 0.5,
|
||||
volume: Math.floor(Math.random() * 1000),
|
||||
openInterest: Math.floor(Math.random() * 5000)
|
||||
}));
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 400 + Math.random() * 300));
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
expiration: payload.expiration || new Date(Date.now() + 30 * 24 * 60 * 60 * 1000).toISOString().split('T')[0],
|
||||
calls,
|
||||
puts,
|
||||
source: 'quotemedia'
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
scheduledJobs: [
|
||||
// {
|
||||
// type: 'quotemedia-premium-refresh',
|
||||
// operation: 'batch-quotes',
|
||||
// payload: { symbols: ['AAPL', 'GOOGL', 'MSFT'] },
|
||||
// cronPattern: '*/2 * * * *', // Every 2 minutes
|
||||
// priority: 7,
|
||||
// description: 'Refresh premium quotes with detailed market data'
|
||||
// },
|
||||
// {
|
||||
// type: 'quotemedia-options-update',
|
||||
// operation: 'options-chain',
|
||||
// payload: { symbol: 'SPY' },
|
||||
// cronPattern: '*/10 * * * *', // Every 10 minutes
|
||||
// priority: 5,
|
||||
// description: 'Update options chain data for SPY ETF'
|
||||
// },
|
||||
// {
|
||||
// type: 'quotemedia-profiles',
|
||||
// operation: 'company-profile',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '0 9 * * 1-5', // Weekdays at 9 AM
|
||||
// priority: 3,
|
||||
// description: 'Update company profile data'
|
||||
// }
|
||||
]
|
||||
};
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
|
||||
const logger = getLogger('quotemedia-provider');
|
||||
|
||||
export const quotemediaProvider: ProviderConfig = {
|
||||
name: 'quotemedia',
|
||||
operations: {
|
||||
'live-data': async (payload: { symbol: string; fields?: string[] }) => {
|
||||
logger.info('Fetching live data from QuoteMedia', { symbol: payload.symbol });
|
||||
|
||||
// Simulate QuoteMedia API call
|
||||
const mockData = {
|
||||
symbol: payload.symbol,
|
||||
price: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
change: (Math.random() - 0.5) * 20,
|
||||
changePercent: (Math.random() - 0.5) * 5,
|
||||
timestamp: new Date().toISOString(),
|
||||
source: 'quotemedia',
|
||||
fields: payload.fields || ['price', 'volume', 'change'],
|
||||
};
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 100 + Math.random() * 200));
|
||||
|
||||
return mockData;
|
||||
},
|
||||
|
||||
'historical-data': async (payload: {
|
||||
symbol: string;
|
||||
from: Date;
|
||||
to: Date;
|
||||
interval?: string;
|
||||
fields?: string[];
|
||||
}) => {
|
||||
logger.info('Fetching historical data from QuoteMedia', {
|
||||
symbol: payload.symbol,
|
||||
from: payload.from,
|
||||
to: payload.to,
|
||||
interval: payload.interval || '1d',
|
||||
});
|
||||
|
||||
// Generate mock historical data
|
||||
const days = Math.ceil(
|
||||
(payload.to.getTime() - payload.from.getTime()) / (1000 * 60 * 60 * 24)
|
||||
);
|
||||
const data = [];
|
||||
|
||||
for (let i = 0; i < Math.min(days, 100); i++) {
|
||||
const date = new Date(payload.from.getTime() + i * 24 * 60 * 60 * 1000);
|
||||
data.push({
|
||||
date: date.toISOString().split('T')[0],
|
||||
open: Math.random() * 1000 + 100,
|
||||
high: Math.random() * 1000 + 100,
|
||||
low: Math.random() * 1000 + 100,
|
||||
close: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
source: 'quotemedia',
|
||||
});
|
||||
}
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 200 + Math.random() * 300));
|
||||
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
interval: payload.interval || '1d',
|
||||
data,
|
||||
source: 'quotemedia',
|
||||
totalRecords: data.length,
|
||||
};
|
||||
},
|
||||
'batch-quotes': async (payload: { symbols: string[]; fields?: string[] }) => {
|
||||
logger.info('Fetching batch quotes from QuoteMedia', {
|
||||
symbols: payload.symbols,
|
||||
count: payload.symbols.length,
|
||||
});
|
||||
|
||||
const quotes = payload.symbols.map(symbol => ({
|
||||
symbol,
|
||||
price: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
change: (Math.random() - 0.5) * 20,
|
||||
timestamp: new Date().toISOString(),
|
||||
source: 'quotemedia',
|
||||
}));
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 300 + Math.random() * 200));
|
||||
|
||||
return {
|
||||
quotes,
|
||||
source: 'quotemedia',
|
||||
timestamp: new Date().toISOString(),
|
||||
totalSymbols: payload.symbols.length,
|
||||
};
|
||||
},
|
||||
'company-profile': async (payload: { symbol: string }) => {
|
||||
logger.info('Fetching company profile from QuoteMedia', { symbol: payload.symbol });
|
||||
|
||||
// Simulate company profile data
|
||||
const profile = {
|
||||
symbol: payload.symbol,
|
||||
companyName: `${payload.symbol} Corporation`,
|
||||
sector: 'Technology',
|
||||
industry: 'Software',
|
||||
description: `${payload.symbol} is a leading technology company.`,
|
||||
marketCap: Math.floor(Math.random() * 1000000000000),
|
||||
employees: Math.floor(Math.random() * 100000),
|
||||
website: `https://www.${payload.symbol.toLowerCase()}.com`,
|
||||
source: 'quotemedia',
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 150 + Math.random() * 100));
|
||||
|
||||
return profile;
|
||||
},
|
||||
'options-chain': async (payload: { symbol: string; expiration?: string }) => {
|
||||
logger.info('Fetching options chain from QuoteMedia', {
|
||||
symbol: payload.symbol,
|
||||
expiration: payload.expiration,
|
||||
});
|
||||
|
||||
// Generate mock options data
|
||||
const strikes = Array.from({ length: 20 }, (_, i) => 100 + i * 5);
|
||||
const calls = strikes.map(strike => ({
|
||||
strike,
|
||||
bid: Math.random() * 10,
|
||||
ask: Math.random() * 10 + 0.5,
|
||||
volume: Math.floor(Math.random() * 1000),
|
||||
openInterest: Math.floor(Math.random() * 5000),
|
||||
}));
|
||||
|
||||
const puts = strikes.map(strike => ({
|
||||
strike,
|
||||
bid: Math.random() * 10,
|
||||
ask: Math.random() * 10 + 0.5,
|
||||
volume: Math.floor(Math.random() * 1000),
|
||||
openInterest: Math.floor(Math.random() * 5000),
|
||||
}));
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 400 + Math.random() * 300));
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
expiration:
|
||||
payload.expiration ||
|
||||
new Date(Date.now() + 30 * 24 * 60 * 60 * 1000).toISOString().split('T')[0],
|
||||
calls,
|
||||
puts,
|
||||
source: 'quotemedia',
|
||||
};
|
||||
},
|
||||
},
|
||||
|
||||
scheduledJobs: [
|
||||
// {
|
||||
// type: 'quotemedia-premium-refresh',
|
||||
// operation: 'batch-quotes',
|
||||
// payload: { symbols: ['AAPL', 'GOOGL', 'MSFT'] },
|
||||
// cronPattern: '*/2 * * * *', // Every 2 minutes
|
||||
// priority: 7,
|
||||
// description: 'Refresh premium quotes with detailed market data'
|
||||
// },
|
||||
// {
|
||||
// type: 'quotemedia-options-update',
|
||||
// operation: 'options-chain',
|
||||
// payload: { symbol: 'SPY' },
|
||||
// cronPattern: '*/10 * * * *', // Every 10 minutes
|
||||
// priority: 5,
|
||||
// description: 'Update options chain data for SPY ETF'
|
||||
// },
|
||||
// {
|
||||
// type: 'quotemedia-profiles',
|
||||
// operation: 'company-profile',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '0 9 * * 1-5', // Weekdays at 9 AM
|
||||
// priority: 3,
|
||||
// description: 'Update company profile data'
|
||||
// }
|
||||
],
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,248 +1,254 @@
|
|||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
export const yahooProvider: ProviderConfig = {
|
||||
name: 'yahoo-finance',
|
||||
operations: {
|
||||
'live-data': async (payload: { symbol: string; modules?: string[] }) => {
|
||||
|
||||
|
||||
logger.info('Fetching live data from Yahoo Finance', { symbol: payload.symbol });
|
||||
|
||||
// Simulate Yahoo Finance API call
|
||||
const mockData = {
|
||||
symbol: payload.symbol,
|
||||
regularMarketPrice: Math.random() * 1000 + 100,
|
||||
regularMarketVolume: Math.floor(Math.random() * 1000000),
|
||||
regularMarketChange: (Math.random() - 0.5) * 20,
|
||||
regularMarketChangePercent: (Math.random() - 0.5) * 5,
|
||||
preMarketPrice: Math.random() * 1000 + 100,
|
||||
postMarketPrice: Math.random() * 1000 + 100,
|
||||
marketCap: Math.floor(Math.random() * 1000000000000),
|
||||
peRatio: Math.random() * 50 + 5,
|
||||
dividendYield: Math.random() * 0.1,
|
||||
fiftyTwoWeekHigh: Math.random() * 1200 + 100,
|
||||
fiftyTwoWeekLow: Math.random() * 800 + 50,
|
||||
timestamp: Date.now() / 1000,
|
||||
source: 'yahoo-finance',
|
||||
modules: payload.modules || ['price', 'summaryDetail']
|
||||
};
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 150 + Math.random() * 250));
|
||||
|
||||
return mockData;
|
||||
},
|
||||
|
||||
'historical-data': async (payload: {
|
||||
symbol: string;
|
||||
period1: number;
|
||||
period2: number;
|
||||
interval?: string;
|
||||
events?: string; }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching historical data from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
period1: payload.period1,
|
||||
period2: payload.period2,
|
||||
interval: payload.interval || '1d'
|
||||
});
|
||||
|
||||
// Generate mock historical data
|
||||
const days = Math.ceil((payload.period2 - payload.period1) / (24 * 60 * 60));
|
||||
const data = [];
|
||||
|
||||
for (let i = 0; i < Math.min(days, 100); i++) {
|
||||
const timestamp = payload.period1 + i * 24 * 60 * 60;
|
||||
data.push({
|
||||
timestamp,
|
||||
date: new Date(timestamp * 1000).toISOString().split('T')[0],
|
||||
open: Math.random() * 1000 + 100,
|
||||
high: Math.random() * 1000 + 100,
|
||||
low: Math.random() * 1000 + 100,
|
||||
close: Math.random() * 1000 + 100,
|
||||
adjClose: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
source: 'yahoo-finance'
|
||||
});
|
||||
}
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 250 + Math.random() * 350));
|
||||
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
interval: payload.interval || '1d',
|
||||
timestamps: data.map(d => d.timestamp),
|
||||
indicators: {
|
||||
quote: [{
|
||||
open: data.map(d => d.open),
|
||||
high: data.map(d => d.high),
|
||||
low: data.map(d => d.low),
|
||||
close: data.map(d => d.close),
|
||||
volume: data.map(d => d.volume)
|
||||
}],
|
||||
adjclose: [{
|
||||
adjclose: data.map(d => d.adjClose)
|
||||
}]
|
||||
},
|
||||
source: 'yahoo-finance',
|
||||
totalRecords: data.length
|
||||
};
|
||||
},
|
||||
'search': async (payload: { query: string; quotesCount?: number; newsCount?: number }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Searching Yahoo Finance', { query: payload.query });
|
||||
|
||||
// Generate mock search results
|
||||
const quotes = Array.from({ length: payload.quotesCount || 5 }, (_, i) => ({
|
||||
symbol: `${payload.query.toUpperCase()}${i}`,
|
||||
shortname: `${payload.query} Company ${i}`,
|
||||
longname: `${payload.query} Corporation ${i}`,
|
||||
exchDisp: 'NASDAQ',
|
||||
typeDisp: 'Equity',
|
||||
source: 'yahoo-finance'
|
||||
}));
|
||||
|
||||
const news = Array.from({ length: payload.newsCount || 3 }, (_, i) => ({
|
||||
uuid: `news-${i}-${Date.now()}`,
|
||||
title: `${payload.query} News Article ${i}`,
|
||||
publisher: 'Financial News',
|
||||
providerPublishTime: Date.now() - i * 3600000,
|
||||
type: 'STORY',
|
||||
source: 'yahoo-finance'
|
||||
}));
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 200 + Math.random() * 200));
|
||||
|
||||
return {
|
||||
quotes,
|
||||
news,
|
||||
totalQuotes: quotes.length,
|
||||
totalNews: news.length,
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
}, 'financials': async (payload: { symbol: string; type?: 'income' | 'balance' | 'cash' }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching financials from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
type: payload.type || 'income'
|
||||
});
|
||||
|
||||
// Generate mock financial data
|
||||
const financials = {
|
||||
symbol: payload.symbol,
|
||||
type: payload.type || 'income',
|
||||
currency: 'USD',
|
||||
annual: Array.from({ length: 4 }, (_, i) => ({
|
||||
fiscalYear: 2024 - i,
|
||||
revenue: Math.floor(Math.random() * 100000000000),
|
||||
netIncome: Math.floor(Math.random() * 10000000000),
|
||||
totalAssets: Math.floor(Math.random() * 500000000000),
|
||||
totalDebt: Math.floor(Math.random() * 50000000000)
|
||||
})),
|
||||
quarterly: Array.from({ length: 4 }, (_, i) => ({
|
||||
fiscalQuarter: `Q${4-i} 2024`,
|
||||
revenue: Math.floor(Math.random() * 25000000000),
|
||||
netIncome: Math.floor(Math.random() * 2500000000)
|
||||
})),
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 300 + Math.random() * 200));
|
||||
|
||||
return financials;
|
||||
}, 'earnings': async (payload: { symbol: string; period?: 'annual' | 'quarterly' }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching earnings from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
period: payload.period || 'quarterly'
|
||||
});
|
||||
|
||||
// Generate mock earnings data
|
||||
const earnings = {
|
||||
symbol: payload.symbol,
|
||||
period: payload.period || 'quarterly',
|
||||
earnings: Array.from({ length: 8 }, (_, i) => ({
|
||||
quarter: `Q${(i % 4) + 1} ${2024 - Math.floor(i/4)}`,
|
||||
epsEstimate: Math.random() * 5,
|
||||
epsActual: Math.random() * 5,
|
||||
revenueEstimate: Math.floor(Math.random() * 50000000000),
|
||||
revenueActual: Math.floor(Math.random() * 50000000000),
|
||||
surprise: (Math.random() - 0.5) * 2
|
||||
})),
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 250 + Math.random() * 150));
|
||||
|
||||
return earnings;
|
||||
}, 'recommendations': async (payload: { symbol: string }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching recommendations from Yahoo Finance', { symbol: payload.symbol });
|
||||
|
||||
// Generate mock recommendations
|
||||
const recommendations = {
|
||||
symbol: payload.symbol,
|
||||
current: {
|
||||
strongBuy: Math.floor(Math.random() * 10),
|
||||
buy: Math.floor(Math.random() * 15),
|
||||
hold: Math.floor(Math.random() * 20),
|
||||
sell: Math.floor(Math.random() * 5),
|
||||
strongSell: Math.floor(Math.random() * 3)
|
||||
},
|
||||
trend: Array.from({ length: 4 }, (_, i) => ({
|
||||
period: `${i}m`,
|
||||
strongBuy: Math.floor(Math.random() * 10),
|
||||
buy: Math.floor(Math.random() * 15),
|
||||
hold: Math.floor(Math.random() * 20),
|
||||
sell: Math.floor(Math.random() * 5),
|
||||
strongSell: Math.floor(Math.random() * 3)
|
||||
})),
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 180 + Math.random() * 120));
|
||||
return recommendations;
|
||||
}
|
||||
},
|
||||
|
||||
scheduledJobs: [
|
||||
// {
|
||||
// type: 'yahoo-market-refresh',
|
||||
// operation: 'live-data',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '*/1 * * * *', // Every minute
|
||||
// priority: 8,
|
||||
// description: 'Refresh Apple stock price from Yahoo Finance'
|
||||
// },
|
||||
// {
|
||||
// type: 'yahoo-sp500-update',
|
||||
// operation: 'live-data',
|
||||
// payload: { symbol: 'SPY' },
|
||||
// cronPattern: '*/2 * * * *', // Every 2 minutes
|
||||
// priority: 9,
|
||||
// description: 'Update S&P 500 ETF price'
|
||||
// },
|
||||
// {
|
||||
// type: 'yahoo-earnings-check',
|
||||
// operation: 'earnings',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '0 16 * * 1-5', // Weekdays at 4 PM (market close)
|
||||
// priority: 6,
|
||||
// description: 'Check earnings data for Apple'
|
||||
// }
|
||||
]
|
||||
};
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
export const yahooProvider: ProviderConfig = {
|
||||
name: 'yahoo-finance',
|
||||
operations: {
|
||||
'live-data': async (payload: { symbol: string; modules?: string[] }) => {
|
||||
logger.info('Fetching live data from Yahoo Finance', { symbol: payload.symbol });
|
||||
|
||||
// Simulate Yahoo Finance API call
|
||||
const mockData = {
|
||||
symbol: payload.symbol,
|
||||
regularMarketPrice: Math.random() * 1000 + 100,
|
||||
regularMarketVolume: Math.floor(Math.random() * 1000000),
|
||||
regularMarketChange: (Math.random() - 0.5) * 20,
|
||||
regularMarketChangePercent: (Math.random() - 0.5) * 5,
|
||||
preMarketPrice: Math.random() * 1000 + 100,
|
||||
postMarketPrice: Math.random() * 1000 + 100,
|
||||
marketCap: Math.floor(Math.random() * 1000000000000),
|
||||
peRatio: Math.random() * 50 + 5,
|
||||
dividendYield: Math.random() * 0.1,
|
||||
fiftyTwoWeekHigh: Math.random() * 1200 + 100,
|
||||
fiftyTwoWeekLow: Math.random() * 800 + 50,
|
||||
timestamp: Date.now() / 1000,
|
||||
source: 'yahoo-finance',
|
||||
modules: payload.modules || ['price', 'summaryDetail'],
|
||||
};
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 150 + Math.random() * 250));
|
||||
|
||||
return mockData;
|
||||
},
|
||||
|
||||
'historical-data': async (payload: {
|
||||
symbol: string;
|
||||
period1: number;
|
||||
period2: number;
|
||||
interval?: string;
|
||||
events?: string;
|
||||
}) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching historical data from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
period1: payload.period1,
|
||||
period2: payload.period2,
|
||||
interval: payload.interval || '1d',
|
||||
});
|
||||
|
||||
// Generate mock historical data
|
||||
const days = Math.ceil((payload.period2 - payload.period1) / (24 * 60 * 60));
|
||||
const data = [];
|
||||
|
||||
for (let i = 0; i < Math.min(days, 100); i++) {
|
||||
const timestamp = payload.period1 + i * 24 * 60 * 60;
|
||||
data.push({
|
||||
timestamp,
|
||||
date: new Date(timestamp * 1000).toISOString().split('T')[0],
|
||||
open: Math.random() * 1000 + 100,
|
||||
high: Math.random() * 1000 + 100,
|
||||
low: Math.random() * 1000 + 100,
|
||||
close: Math.random() * 1000 + 100,
|
||||
adjClose: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
source: 'yahoo-finance',
|
||||
});
|
||||
}
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 250 + Math.random() * 350));
|
||||
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
interval: payload.interval || '1d',
|
||||
timestamps: data.map(d => d.timestamp),
|
||||
indicators: {
|
||||
quote: [
|
||||
{
|
||||
open: data.map(d => d.open),
|
||||
high: data.map(d => d.high),
|
||||
low: data.map(d => d.low),
|
||||
close: data.map(d => d.close),
|
||||
volume: data.map(d => d.volume),
|
||||
},
|
||||
],
|
||||
adjclose: [
|
||||
{
|
||||
adjclose: data.map(d => d.adjClose),
|
||||
},
|
||||
],
|
||||
},
|
||||
source: 'yahoo-finance',
|
||||
totalRecords: data.length,
|
||||
};
|
||||
},
|
||||
search: async (payload: { query: string; quotesCount?: number; newsCount?: number }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Searching Yahoo Finance', { query: payload.query });
|
||||
|
||||
// Generate mock search results
|
||||
const quotes = Array.from({ length: payload.quotesCount || 5 }, (_, i) => ({
|
||||
symbol: `${payload.query.toUpperCase()}${i}`,
|
||||
shortname: `${payload.query} Company ${i}`,
|
||||
longname: `${payload.query} Corporation ${i}`,
|
||||
exchDisp: 'NASDAQ',
|
||||
typeDisp: 'Equity',
|
||||
source: 'yahoo-finance',
|
||||
}));
|
||||
|
||||
const news = Array.from({ length: payload.newsCount || 3 }, (_, i) => ({
|
||||
uuid: `news-${i}-${Date.now()}`,
|
||||
title: `${payload.query} News Article ${i}`,
|
||||
publisher: 'Financial News',
|
||||
providerPublishTime: Date.now() - i * 3600000,
|
||||
type: 'STORY',
|
||||
source: 'yahoo-finance',
|
||||
}));
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 200 + Math.random() * 200));
|
||||
|
||||
return {
|
||||
quotes,
|
||||
news,
|
||||
totalQuotes: quotes.length,
|
||||
totalNews: news.length,
|
||||
source: 'yahoo-finance',
|
||||
};
|
||||
},
|
||||
financials: async (payload: { symbol: string; type?: 'income' | 'balance' | 'cash' }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching financials from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
type: payload.type || 'income',
|
||||
});
|
||||
|
||||
// Generate mock financial data
|
||||
const financials = {
|
||||
symbol: payload.symbol,
|
||||
type: payload.type || 'income',
|
||||
currency: 'USD',
|
||||
annual: Array.from({ length: 4 }, (_, i) => ({
|
||||
fiscalYear: 2024 - i,
|
||||
revenue: Math.floor(Math.random() * 100000000000),
|
||||
netIncome: Math.floor(Math.random() * 10000000000),
|
||||
totalAssets: Math.floor(Math.random() * 500000000000),
|
||||
totalDebt: Math.floor(Math.random() * 50000000000),
|
||||
})),
|
||||
quarterly: Array.from({ length: 4 }, (_, i) => ({
|
||||
fiscalQuarter: `Q${4 - i} 2024`,
|
||||
revenue: Math.floor(Math.random() * 25000000000),
|
||||
netIncome: Math.floor(Math.random() * 2500000000),
|
||||
})),
|
||||
source: 'yahoo-finance',
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 300 + Math.random() * 200));
|
||||
|
||||
return financials;
|
||||
},
|
||||
earnings: async (payload: { symbol: string; period?: 'annual' | 'quarterly' }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching earnings from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
period: payload.period || 'quarterly',
|
||||
});
|
||||
|
||||
// Generate mock earnings data
|
||||
const earnings = {
|
||||
symbol: payload.symbol,
|
||||
period: payload.period || 'quarterly',
|
||||
earnings: Array.from({ length: 8 }, (_, i) => ({
|
||||
quarter: `Q${(i % 4) + 1} ${2024 - Math.floor(i / 4)}`,
|
||||
epsEstimate: Math.random() * 5,
|
||||
epsActual: Math.random() * 5,
|
||||
revenueEstimate: Math.floor(Math.random() * 50000000000),
|
||||
revenueActual: Math.floor(Math.random() * 50000000000),
|
||||
surprise: (Math.random() - 0.5) * 2,
|
||||
})),
|
||||
source: 'yahoo-finance',
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 250 + Math.random() * 150));
|
||||
|
||||
return earnings;
|
||||
},
|
||||
recommendations: async (payload: { symbol: string }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching recommendations from Yahoo Finance', { symbol: payload.symbol });
|
||||
|
||||
// Generate mock recommendations
|
||||
const recommendations = {
|
||||
symbol: payload.symbol,
|
||||
current: {
|
||||
strongBuy: Math.floor(Math.random() * 10),
|
||||
buy: Math.floor(Math.random() * 15),
|
||||
hold: Math.floor(Math.random() * 20),
|
||||
sell: Math.floor(Math.random() * 5),
|
||||
strongSell: Math.floor(Math.random() * 3),
|
||||
},
|
||||
trend: Array.from({ length: 4 }, (_, i) => ({
|
||||
period: `${i}m`,
|
||||
strongBuy: Math.floor(Math.random() * 10),
|
||||
buy: Math.floor(Math.random() * 15),
|
||||
hold: Math.floor(Math.random() * 20),
|
||||
sell: Math.floor(Math.random() * 5),
|
||||
strongSell: Math.floor(Math.random() * 3),
|
||||
})),
|
||||
source: 'yahoo-finance',
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 180 + Math.random() * 120));
|
||||
return recommendations;
|
||||
},
|
||||
},
|
||||
|
||||
scheduledJobs: [
|
||||
// {
|
||||
// type: 'yahoo-market-refresh',
|
||||
// operation: 'live-data',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '*/1 * * * *', // Every minute
|
||||
// priority: 8,
|
||||
// description: 'Refresh Apple stock price from Yahoo Finance'
|
||||
// },
|
||||
// {
|
||||
// type: 'yahoo-sp500-update',
|
||||
// operation: 'live-data',
|
||||
// payload: { symbol: 'SPY' },
|
||||
// cronPattern: '*/2 * * * *', // Every 2 minutes
|
||||
// priority: 9,
|
||||
// description: 'Update S&P 500 ETF price'
|
||||
// },
|
||||
// {
|
||||
// type: 'yahoo-earnings-check',
|
||||
// operation: 'earnings',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '0 16 * * 1-5', // Weekdays at 4 PM (market close)
|
||||
// priority: 6,
|
||||
// description: 'Check earnings data for Apple'
|
||||
// }
|
||||
],
|
||||
};
|
||||
|
|
|
|||
|
|
@ -7,14 +7,14 @@ import { queueManager } from '../services/queue.service';
|
|||
export const healthRoutes = new Hono();
|
||||
|
||||
// Health check endpoint
|
||||
healthRoutes.get('/health', (c) => {
|
||||
return c.json({
|
||||
service: 'data-service',
|
||||
healthRoutes.get('/health', c => {
|
||||
return c.json({
|
||||
service: 'data-service',
|
||||
status: 'healthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
queue: {
|
||||
status: 'running',
|
||||
workers: queueManager.getWorkerCount()
|
||||
}
|
||||
workers: queueManager.getWorkerCount(),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -10,10 +10,10 @@ const logger = getLogger('market-data-routes');
|
|||
export const marketDataRoutes = new Hono();
|
||||
|
||||
// Market data endpoints
|
||||
marketDataRoutes.get('/api/live/:symbol', async (c) => {
|
||||
marketDataRoutes.get('/api/live/:symbol', async c => {
|
||||
const symbol = c.req.param('symbol');
|
||||
logger.info('Live data request', { symbol });
|
||||
|
||||
|
||||
try {
|
||||
// Queue job for live data using Yahoo provider
|
||||
const job = await queueManager.addJob({
|
||||
|
|
@ -21,13 +21,13 @@ marketDataRoutes.get('/api/live/:symbol', async (c) => {
|
|||
service: 'market-data',
|
||||
provider: 'yahoo-finance',
|
||||
operation: 'live-data',
|
||||
payload: { symbol }
|
||||
payload: { symbol },
|
||||
});
|
||||
return c.json({
|
||||
status: 'success',
|
||||
return c.json({
|
||||
status: 'success',
|
||||
message: 'Live data job queued',
|
||||
jobId: job.id,
|
||||
symbol
|
||||
symbol,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to queue live data job', { symbol, error });
|
||||
|
|
@ -35,37 +35,37 @@ marketDataRoutes.get('/api/live/:symbol', async (c) => {
|
|||
}
|
||||
});
|
||||
|
||||
marketDataRoutes.get('/api/historical/:symbol', async (c) => {
|
||||
marketDataRoutes.get('/api/historical/:symbol', async c => {
|
||||
const symbol = c.req.param('symbol');
|
||||
const from = c.req.query('from');
|
||||
const to = c.req.query('to');
|
||||
|
||||
|
||||
logger.info('Historical data request', { symbol, from, to });
|
||||
|
||||
|
||||
try {
|
||||
const fromDate = from ? new Date(from) : new Date(Date.now() - 30 * 24 * 60 * 60 * 1000); // 30 days ago
|
||||
const toDate = to ? new Date(to) : new Date(); // Now
|
||||
|
||||
|
||||
// Queue job for historical data using Yahoo provider
|
||||
const job = await queueManager.addJob({
|
||||
type: 'market-data-historical',
|
||||
service: 'market-data',
|
||||
provider: 'yahoo-finance',
|
||||
operation: 'historical-data',
|
||||
payload: {
|
||||
symbol,
|
||||
from: fromDate.toISOString(),
|
||||
to: toDate.toISOString()
|
||||
}
|
||||
payload: {
|
||||
symbol,
|
||||
from: fromDate.toISOString(),
|
||||
to: toDate.toISOString(),
|
||||
},
|
||||
});
|
||||
|
||||
return c.json({
|
||||
status: 'success',
|
||||
|
||||
return c.json({
|
||||
status: 'success',
|
||||
message: 'Historical data job queued',
|
||||
jobId: job.id,
|
||||
symbol,
|
||||
from: fromDate,
|
||||
to: toDate
|
||||
symbol,
|
||||
from: fromDate,
|
||||
to: toDate,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to queue historical data job', { symbol, from, to, error });
|
||||
|
|
|
|||
|
|
@ -10,20 +10,20 @@ const logger = getLogger('proxy-routes');
|
|||
export const proxyRoutes = new Hono();
|
||||
|
||||
// Proxy management endpoints
|
||||
proxyRoutes.post('/api/proxy/fetch', async (c) => {
|
||||
proxyRoutes.post('/api/proxy/fetch', async c => {
|
||||
try {
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-fetch',
|
||||
provider: 'proxy-provider',
|
||||
operation: 'fetch-and-check',
|
||||
payload: {},
|
||||
priority: 5
|
||||
priority: 5,
|
||||
});
|
||||
|
||||
return c.json({
|
||||
status: 'success',
|
||||
jobId: job.id,
|
||||
message: 'Proxy fetch job queued'
|
||||
|
||||
return c.json({
|
||||
status: 'success',
|
||||
jobId: job.id,
|
||||
message: 'Proxy fetch job queued',
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to queue proxy fetch', { error });
|
||||
|
|
@ -31,7 +31,7 @@ proxyRoutes.post('/api/proxy/fetch', async (c) => {
|
|||
}
|
||||
});
|
||||
|
||||
proxyRoutes.post('/api/proxy/check', async (c) => {
|
||||
proxyRoutes.post('/api/proxy/check', async c => {
|
||||
try {
|
||||
const { proxies } = await c.req.json();
|
||||
const job = await queueManager.addJob({
|
||||
|
|
@ -39,13 +39,13 @@ proxyRoutes.post('/api/proxy/check', async (c) => {
|
|||
provider: 'proxy-provider',
|
||||
operation: 'check-specific',
|
||||
payload: { proxies },
|
||||
priority: 8
|
||||
priority: 8,
|
||||
});
|
||||
|
||||
return c.json({
|
||||
status: 'success',
|
||||
jobId: job.id,
|
||||
message: `Proxy check job queued for ${proxies.length} proxies`
|
||||
|
||||
return c.json({
|
||||
status: 'success',
|
||||
jobId: job.id,
|
||||
message: `Proxy check job queued for ${proxies.length} proxies`,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to queue proxy check', { error });
|
||||
|
|
@ -54,20 +54,20 @@ proxyRoutes.post('/api/proxy/check', async (c) => {
|
|||
});
|
||||
|
||||
// Get proxy stats via queue
|
||||
proxyRoutes.get('/api/proxy/stats', async (c) => {
|
||||
proxyRoutes.get('/api/proxy/stats', async c => {
|
||||
try {
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-stats',
|
||||
provider: 'proxy-provider',
|
||||
operation: 'get-stats',
|
||||
payload: {},
|
||||
priority: 3
|
||||
priority: 3,
|
||||
});
|
||||
|
||||
return c.json({
|
||||
status: 'success',
|
||||
jobId: job.id,
|
||||
message: 'Proxy stats job queued'
|
||||
|
||||
return c.json({
|
||||
status: 'success',
|
||||
jobId: job.id,
|
||||
message: 'Proxy stats job queued',
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to queue proxy stats', { error });
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ const logger = getLogger('queue-routes');
|
|||
export const queueRoutes = new Hono();
|
||||
|
||||
// Queue management endpoints
|
||||
queueRoutes.get('/api/queue/status', async (c) => {
|
||||
queueRoutes.get('/api/queue/status', async c => {
|
||||
try {
|
||||
const status = await queueManager.getQueueStatus();
|
||||
return c.json({ status: 'success', data: status });
|
||||
|
|
@ -20,7 +20,7 @@ queueRoutes.get('/api/queue/status', async (c) => {
|
|||
}
|
||||
});
|
||||
|
||||
queueRoutes.post('/api/queue/job', async (c) => {
|
||||
queueRoutes.post('/api/queue/job', async c => {
|
||||
try {
|
||||
const jobData = await c.req.json();
|
||||
const job = await queueManager.addJob(jobData);
|
||||
|
|
@ -32,7 +32,7 @@ queueRoutes.post('/api/queue/job', async (c) => {
|
|||
});
|
||||
|
||||
// Provider registry endpoints
|
||||
queueRoutes.get('/api/providers', async (c) => {
|
||||
queueRoutes.get('/api/providers', async c => {
|
||||
try {
|
||||
const { providerRegistry } = await import('../services/provider-registry.service');
|
||||
const providers = providerRegistry.getProviders();
|
||||
|
|
@ -44,14 +44,14 @@ queueRoutes.get('/api/providers', async (c) => {
|
|||
});
|
||||
|
||||
// Add new endpoint to see scheduled jobs
|
||||
queueRoutes.get('/api/scheduled-jobs', async (c) => {
|
||||
queueRoutes.get('/api/scheduled-jobs', async c => {
|
||||
try {
|
||||
const { providerRegistry } = await import('../services/provider-registry.service');
|
||||
const jobs = providerRegistry.getAllScheduledJobs();
|
||||
return c.json({
|
||||
status: 'success',
|
||||
return c.json({
|
||||
status: 'success',
|
||||
count: jobs.length,
|
||||
jobs
|
||||
jobs,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to get scheduled jobs info', { error });
|
||||
|
|
@ -59,7 +59,7 @@ queueRoutes.get('/api/scheduled-jobs', async (c) => {
|
|||
}
|
||||
});
|
||||
|
||||
queueRoutes.post('/api/queue/drain', async (c) => {
|
||||
queueRoutes.post('/api/queue/drain', async c => {
|
||||
try {
|
||||
await queueManager.drainQueue();
|
||||
const status = await queueManager.getQueueStatus();
|
||||
|
|
|
|||
|
|
@ -10,21 +10,21 @@ const logger = getLogger('test-routes');
|
|||
export const testRoutes = new Hono();
|
||||
|
||||
// Test endpoint for new functional batch processing
|
||||
testRoutes.post('/api/test/batch-symbols', async (c) => {
|
||||
testRoutes.post('/api/test/batch-symbols', async c => {
|
||||
try {
|
||||
const { symbols, useBatching = false, totalDelayHours = 1 } = await c.req.json();
|
||||
const { processItems } = await import('../utils/batch-helpers');
|
||||
|
||||
|
||||
if (!symbols || !Array.isArray(symbols)) {
|
||||
return c.json({ status: 'error', message: 'symbols array is required' }, 400);
|
||||
}
|
||||
|
||||
const result = await processItems(
|
||||
symbols,
|
||||
(symbol, index) => ({
|
||||
symbol,
|
||||
(symbol, index) => ({
|
||||
symbol,
|
||||
index,
|
||||
timestamp: new Date().toISOString()
|
||||
timestamp: new Date().toISOString(),
|
||||
}),
|
||||
queueManager,
|
||||
{
|
||||
|
|
@ -33,14 +33,14 @@ testRoutes.post('/api/test/batch-symbols', async (c) => {
|
|||
batchSize: 10,
|
||||
priority: 1,
|
||||
provider: 'test-provider',
|
||||
operation: 'live-data'
|
||||
operation: 'live-data',
|
||||
}
|
||||
);
|
||||
|
||||
return c.json({
|
||||
status: 'success',
|
||||
return c.json({
|
||||
status: 'success',
|
||||
message: 'Batch processing started',
|
||||
result
|
||||
result,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to start batch symbol processing', { error });
|
||||
|
|
@ -48,21 +48,21 @@ testRoutes.post('/api/test/batch-symbols', async (c) => {
|
|||
}
|
||||
});
|
||||
|
||||
testRoutes.post('/api/test/batch-custom', async (c) => {
|
||||
testRoutes.post('/api/test/batch-custom', async c => {
|
||||
try {
|
||||
const { items, useBatching = false, totalDelayHours = 0.5 } = await c.req.json();
|
||||
const { processItems } = await import('../utils/batch-helpers');
|
||||
|
||||
|
||||
if (!items || !Array.isArray(items)) {
|
||||
return c.json({ status: 'error', message: 'items array is required' }, 400);
|
||||
}
|
||||
|
||||
const result = await processItems(
|
||||
items,
|
||||
(item, index) => ({
|
||||
originalItem: item,
|
||||
(item, index) => ({
|
||||
originalItem: item,
|
||||
processIndex: index,
|
||||
timestamp: new Date().toISOString()
|
||||
timestamp: new Date().toISOString(),
|
||||
}),
|
||||
queueManager,
|
||||
{
|
||||
|
|
@ -71,14 +71,14 @@ testRoutes.post('/api/test/batch-custom', async (c) => {
|
|||
batchSize: 5,
|
||||
priority: 1,
|
||||
provider: 'test-provider',
|
||||
operation: 'custom-test'
|
||||
operation: 'custom-test',
|
||||
}
|
||||
);
|
||||
|
||||
return c.json({
|
||||
status: 'success',
|
||||
return c.json({
|
||||
status: 'success',
|
||||
message: 'Custom batch processing started',
|
||||
result
|
||||
result,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to start custom batch processing', { error });
|
||||
|
|
|
|||
|
|
@ -1,135 +1,135 @@
|
|||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
export interface JobHandler {
|
||||
(payload: any): Promise<any>;
|
||||
}
|
||||
|
||||
export interface JobData {
|
||||
type?: string;
|
||||
provider: string;
|
||||
operation: string;
|
||||
payload: any;
|
||||
priority?: number;
|
||||
immediately?: boolean;
|
||||
}
|
||||
|
||||
export interface ScheduledJob {
|
||||
type: string;
|
||||
operation: string;
|
||||
payload: any;
|
||||
cronPattern: string;
|
||||
priority?: number;
|
||||
description?: string;
|
||||
immediately?: boolean;
|
||||
}
|
||||
|
||||
export interface ProviderConfig {
|
||||
name: string;
|
||||
operations: Record<string, JobHandler>;
|
||||
scheduledJobs?: ScheduledJob[];
|
||||
}
|
||||
|
||||
export interface ProviderRegistry {
|
||||
registerProvider: (config: ProviderConfig) => void;
|
||||
getHandler: (provider: string, operation: string) => JobHandler | null;
|
||||
getAllScheduledJobs: () => Array<{ provider: string; job: ScheduledJob }>;
|
||||
getProviders: () => Array<{ key: string; config: ProviderConfig }>;
|
||||
hasProvider: (provider: string) => boolean;
|
||||
clear: () => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new provider registry instance
|
||||
*/
|
||||
export function createProviderRegistry(): ProviderRegistry {
|
||||
const logger = getLogger('provider-registry');
|
||||
const providers = new Map<string, ProviderConfig>();
|
||||
|
||||
/**
|
||||
* Register a provider with its operations
|
||||
*/
|
||||
function registerProvider(config: ProviderConfig): void {
|
||||
providers.set(config.name, config);
|
||||
logger.info(`Registered provider: ${config.name}`, {
|
||||
operations: Object.keys(config.operations),
|
||||
scheduledJobs: config.scheduledJobs?.length || 0
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a job handler for a specific provider and operation
|
||||
*/
|
||||
function getHandler(provider: string, operation: string): JobHandler | null {
|
||||
const providerConfig = providers.get(provider);
|
||||
|
||||
if (!providerConfig) {
|
||||
logger.warn(`Provider not found: ${provider}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
const handler = providerConfig.operations[operation];
|
||||
if (!handler) {
|
||||
logger.warn(`Operation not found: ${operation} in provider ${provider}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return handler;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all scheduled jobs from all providers
|
||||
*/
|
||||
function getAllScheduledJobs(): Array<{ provider: string; job: ScheduledJob }> {
|
||||
const allJobs: Array<{ provider: string; job: ScheduledJob }> = [];
|
||||
|
||||
for (const [key, config] of providers) {
|
||||
if (config.scheduledJobs) {
|
||||
for (const job of config.scheduledJobs) {
|
||||
allJobs.push({
|
||||
provider: config.name,
|
||||
job
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return allJobs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all registered providers with their configurations
|
||||
*/
|
||||
function getProviders(): Array<{ key: string; config: ProviderConfig }> {
|
||||
return Array.from(providers.entries()).map(([key, config]) => ({
|
||||
key,
|
||||
config
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a provider exists
|
||||
*/
|
||||
function hasProvider(provider: string): boolean {
|
||||
return providers.has(provider);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all providers (useful for testing)
|
||||
*/
|
||||
function clear(): void {
|
||||
providers.clear();
|
||||
logger.info('All providers cleared');
|
||||
}
|
||||
|
||||
return {
|
||||
registerProvider,
|
||||
getHandler,
|
||||
getAllScheduledJobs,
|
||||
getProviders,
|
||||
hasProvider,
|
||||
clear
|
||||
};
|
||||
}
|
||||
|
||||
// Create the default shared registry instance
|
||||
export const providerRegistry = createProviderRegistry();
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
export interface JobHandler {
|
||||
(payload: any): Promise<any>;
|
||||
}
|
||||
|
||||
export interface JobData {
|
||||
type?: string;
|
||||
provider: string;
|
||||
operation: string;
|
||||
payload: any;
|
||||
priority?: number;
|
||||
immediately?: boolean;
|
||||
}
|
||||
|
||||
export interface ScheduledJob {
|
||||
type: string;
|
||||
operation: string;
|
||||
payload: any;
|
||||
cronPattern: string;
|
||||
priority?: number;
|
||||
description?: string;
|
||||
immediately?: boolean;
|
||||
}
|
||||
|
||||
export interface ProviderConfig {
|
||||
name: string;
|
||||
operations: Record<string, JobHandler>;
|
||||
scheduledJobs?: ScheduledJob[];
|
||||
}
|
||||
|
||||
export interface ProviderRegistry {
|
||||
registerProvider: (config: ProviderConfig) => void;
|
||||
getHandler: (provider: string, operation: string) => JobHandler | null;
|
||||
getAllScheduledJobs: () => Array<{ provider: string; job: ScheduledJob }>;
|
||||
getProviders: () => Array<{ key: string; config: ProviderConfig }>;
|
||||
hasProvider: (provider: string) => boolean;
|
||||
clear: () => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new provider registry instance
|
||||
*/
|
||||
export function createProviderRegistry(): ProviderRegistry {
|
||||
const logger = getLogger('provider-registry');
|
||||
const providers = new Map<string, ProviderConfig>();
|
||||
|
||||
/**
|
||||
* Register a provider with its operations
|
||||
*/
|
||||
function registerProvider(config: ProviderConfig): void {
|
||||
providers.set(config.name, config);
|
||||
logger.info(`Registered provider: ${config.name}`, {
|
||||
operations: Object.keys(config.operations),
|
||||
scheduledJobs: config.scheduledJobs?.length || 0,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a job handler for a specific provider and operation
|
||||
*/
|
||||
function getHandler(provider: string, operation: string): JobHandler | null {
|
||||
const providerConfig = providers.get(provider);
|
||||
|
||||
if (!providerConfig) {
|
||||
logger.warn(`Provider not found: ${provider}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
const handler = providerConfig.operations[operation];
|
||||
if (!handler) {
|
||||
logger.warn(`Operation not found: ${operation} in provider ${provider}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return handler;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all scheduled jobs from all providers
|
||||
*/
|
||||
function getAllScheduledJobs(): Array<{ provider: string; job: ScheduledJob }> {
|
||||
const allJobs: Array<{ provider: string; job: ScheduledJob }> = [];
|
||||
|
||||
for (const [key, config] of providers) {
|
||||
if (config.scheduledJobs) {
|
||||
for (const job of config.scheduledJobs) {
|
||||
allJobs.push({
|
||||
provider: config.name,
|
||||
job,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return allJobs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all registered providers with their configurations
|
||||
*/
|
||||
function getProviders(): Array<{ key: string; config: ProviderConfig }> {
|
||||
return Array.from(providers.entries()).map(([key, config]) => ({
|
||||
key,
|
||||
config,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a provider exists
|
||||
*/
|
||||
function hasProvider(provider: string): boolean {
|
||||
return providers.has(provider);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all providers (useful for testing)
|
||||
*/
|
||||
function clear(): void {
|
||||
providers.clear();
|
||||
logger.info('All providers cleared');
|
||||
}
|
||||
|
||||
return {
|
||||
registerProvider,
|
||||
getHandler,
|
||||
getAllScheduledJobs,
|
||||
getProviders,
|
||||
hasProvider,
|
||||
clear,
|
||||
};
|
||||
}
|
||||
|
||||
// Create the default shared registry instance
|
||||
export const providerRegistry = createProviderRegistry();
|
||||
|
|
|
|||
|
|
@ -1,380 +1,416 @@
|
|||
import { Queue, Worker, QueueEvents, type Job } from 'bullmq';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { providerRegistry, type JobData } from './provider-registry.service';
|
||||
|
||||
export class QueueService {
|
||||
private logger = getLogger('queue-service');
|
||||
private queue!: Queue;
|
||||
private workers: Worker[] = [];
|
||||
private queueEvents!: QueueEvents;
|
||||
|
||||
private config = {
|
||||
workers: parseInt(process.env.WORKER_COUNT || '5'),
|
||||
concurrency: parseInt(process.env.WORKER_CONCURRENCY || '20'),
|
||||
redis: {
|
||||
host: process.env.DRAGONFLY_HOST || 'localhost',
|
||||
port: parseInt(process.env.DRAGONFLY_PORT || '6379')
|
||||
}
|
||||
};
|
||||
|
||||
private get isInitialized() {
|
||||
return !!this.queue;
|
||||
}
|
||||
|
||||
constructor() {
|
||||
// Don't initialize in constructor to allow for proper async initialization
|
||||
} async initialize() {
|
||||
if (this.isInitialized) {
|
||||
this.logger.warn('Queue service already initialized');
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.info('Initializing queue service...');
|
||||
|
||||
try {
|
||||
// Step 1: Register providers
|
||||
await this.registerProviders();
|
||||
|
||||
// Step 2: Setup queue and workers
|
||||
const connection = this.getConnection();
|
||||
const queueName = '{data-service-queue}';
|
||||
|
||||
this.queue = new Queue(queueName, {
|
||||
connection,
|
||||
defaultJobOptions: {
|
||||
removeOnComplete: 10,
|
||||
removeOnFail: 5,
|
||||
attempts: 3,
|
||||
backoff: { type: 'exponential', delay: 1000 }
|
||||
}
|
||||
});
|
||||
|
||||
this.queueEvents = new QueueEvents(queueName, { connection });
|
||||
|
||||
// Step 3: Create workers
|
||||
const { workerCount, totalConcurrency } = this.createWorkers(queueName, connection);
|
||||
|
||||
// Step 4: Wait for readiness (parallel)
|
||||
await Promise.all([
|
||||
this.queue.waitUntilReady(),
|
||||
this.queueEvents.waitUntilReady(),
|
||||
...this.workers.map(worker => worker.waitUntilReady())
|
||||
]);
|
||||
|
||||
// Step 5: Setup events and scheduled tasks
|
||||
this.setupQueueEvents();
|
||||
await this.setupScheduledTasks();
|
||||
|
||||
this.logger.info('Queue service initialized successfully', {
|
||||
workers: workerCount,
|
||||
totalConcurrency
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to initialize queue service', { error });
|
||||
throw error;
|
||||
}
|
||||
} private getConnection() {
|
||||
return {
|
||||
...this.config.redis,
|
||||
maxRetriesPerRequest: null,
|
||||
retryDelayOnFailover: 100,
|
||||
lazyConnect: false
|
||||
};
|
||||
}
|
||||
|
||||
private createWorkers(queueName: string, connection: any) {
|
||||
for (let i = 0; i < this.config.workers; i++) {
|
||||
const worker = new Worker(queueName, this.processJob.bind(this), {
|
||||
connection: { ...connection },
|
||||
concurrency: this.config.concurrency,
|
||||
maxStalledCount: 1,
|
||||
stalledInterval: 30000,
|
||||
});
|
||||
|
||||
// Setup events inline
|
||||
worker.on('ready', () => this.logger.info(`Worker ${i + 1} ready`));
|
||||
worker.on('error', (error) => this.logger.error(`Worker ${i + 1} error`, { error }));
|
||||
|
||||
this.workers.push(worker);
|
||||
}
|
||||
|
||||
return {
|
||||
workerCount: this.config.workers,
|
||||
totalConcurrency: this.config.workers * this.config.concurrency
|
||||
};
|
||||
} private setupQueueEvents() {
|
||||
// Only log failures, not every completion
|
||||
this.queueEvents.on('failed', (job, error) => {
|
||||
this.logger.error('Job failed', {
|
||||
id: job.jobId,
|
||||
error: String(error)
|
||||
});
|
||||
});
|
||||
|
||||
// Only log completions in debug mode
|
||||
if (process.env.LOG_LEVEL === 'debug') {
|
||||
this.queueEvents.on('completed', (job) => {
|
||||
this.logger.debug('Job completed', { id: job.jobId });
|
||||
});
|
||||
}
|
||||
}private async registerProviders() {
|
||||
this.logger.info('Registering providers...');
|
||||
|
||||
try {
|
||||
// Define providers to register
|
||||
const providers = [
|
||||
{ module: '../providers/proxy.provider', export: 'proxyProvider' },
|
||||
{ module: '../providers/quotemedia.provider', export: 'quotemediaProvider' },
|
||||
{ module: '../providers/yahoo.provider', export: 'yahooProvider' }
|
||||
];
|
||||
|
||||
// Import and register all providers
|
||||
for (const { module, export: exportName } of providers) {
|
||||
const providerModule = await import(module);
|
||||
providerRegistry.registerProvider(providerModule[exportName]);
|
||||
}
|
||||
|
||||
this.logger.info('All providers registered successfully');
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to register providers', { error });
|
||||
throw error;
|
||||
}
|
||||
}private async processJob(job: Job) {
|
||||
const { provider, operation, payload }: JobData = job.data;
|
||||
|
||||
this.logger.info('Processing job', {
|
||||
id: job.id,
|
||||
provider,
|
||||
operation,
|
||||
payloadKeys: Object.keys(payload || {})
|
||||
}); try {
|
||||
let result;
|
||||
|
||||
if (operation === 'process-batch-items') {
|
||||
// Special handling for batch processing - requires 2 parameters
|
||||
const { processBatchJob } = await import('../utils/batch-helpers');
|
||||
result = await processBatchJob(payload, this);
|
||||
} else {
|
||||
// Regular handler lookup - requires 1 parameter
|
||||
const handler = providerRegistry.getHandler(provider, operation);
|
||||
|
||||
if (!handler) {
|
||||
throw new Error(`No handler found for ${provider}:${operation}`);
|
||||
}
|
||||
|
||||
result = await handler(payload);
|
||||
}
|
||||
|
||||
this.logger.info('Job completed successfully', {
|
||||
id: job.id,
|
||||
provider,
|
||||
operation
|
||||
});
|
||||
|
||||
return result;
|
||||
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.error('Job failed', {
|
||||
id: job.id,
|
||||
provider,
|
||||
operation,
|
||||
error: errorMessage
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
} async addBulk(jobs: any[]): Promise<any[]> {
|
||||
return await this.queue.addBulk(jobs);
|
||||
}
|
||||
|
||||
private getTotalConcurrency() {
|
||||
return this.workers.reduce((total, worker) => total + (worker.opts.concurrency || 1), 0);
|
||||
}
|
||||
private async setupScheduledTasks() {
|
||||
const allScheduledJobs = providerRegistry.getAllScheduledJobs();
|
||||
|
||||
if (allScheduledJobs.length === 0) {
|
||||
this.logger.warn('No scheduled jobs found in providers');
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.info('Setting up scheduled tasks...', { count: allScheduledJobs.length });
|
||||
|
||||
// Use Promise.allSettled for parallel processing + better error handling
|
||||
const results = await Promise.allSettled(
|
||||
allScheduledJobs.map(async ({ provider, job }) => {
|
||||
await this.addRecurringJob({
|
||||
type: job.type,
|
||||
provider,
|
||||
operation: job.operation,
|
||||
payload: job.payload,
|
||||
priority: job.priority,
|
||||
immediately: job.immediately || false
|
||||
}, job.cronPattern);
|
||||
|
||||
return { provider, operation: job.operation };
|
||||
})
|
||||
);
|
||||
|
||||
// Log results
|
||||
const successful = results.filter(r => r.status === 'fulfilled');
|
||||
const failed = results.filter(r => r.status === 'rejected');
|
||||
|
||||
if (failed.length > 0) {
|
||||
failed.forEach((result, index) => {
|
||||
const { provider, job } = allScheduledJobs[index];
|
||||
this.logger.error('Failed to register scheduled job', {
|
||||
provider,
|
||||
operation: job.operation,
|
||||
error: result.reason
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
this.logger.info('Scheduled tasks setup complete', {
|
||||
successful: successful.length,
|
||||
failed: failed.length
|
||||
});
|
||||
} private async addJobInternal(jobData: JobData, options: any = {}) {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('Queue service not initialized');
|
||||
}
|
||||
|
||||
const jobType = jobData.type || `${jobData.provider}-${jobData.operation}`;
|
||||
return this.queue.add(jobType, jobData, {
|
||||
priority: jobData.priority || 0,
|
||||
removeOnComplete: 10,
|
||||
removeOnFail: 5,
|
||||
...options
|
||||
});
|
||||
}
|
||||
|
||||
async addJob(jobData: JobData, options?: any) {
|
||||
return this.addJobInternal(jobData, options);
|
||||
} async addRecurringJob(jobData: JobData, cronPattern: string, options?: any) {
|
||||
const jobKey = `recurring-${jobData.provider}-${jobData.operation}`;
|
||||
|
||||
return this.addJobInternal(jobData, {
|
||||
repeat: {
|
||||
pattern: cronPattern,
|
||||
tz: 'UTC',
|
||||
immediately: jobData.immediately || false,
|
||||
},
|
||||
jobId: jobKey,
|
||||
removeOnComplete: 1,
|
||||
removeOnFail: 1,
|
||||
attempts: 2,
|
||||
backoff: {
|
||||
type: 'fixed',
|
||||
delay: 5000
|
||||
},
|
||||
...options
|
||||
});
|
||||
}
|
||||
async getJobStats() {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('Queue service not initialized. Call initialize() first.');
|
||||
}
|
||||
const [waiting, active, completed, failed, delayed] = await Promise.all([
|
||||
this.queue.getWaiting(),
|
||||
this.queue.getActive(),
|
||||
this.queue.getCompleted(),
|
||||
this.queue.getFailed(),
|
||||
this.queue.getDelayed()
|
||||
]);
|
||||
|
||||
return {
|
||||
waiting: waiting.length,
|
||||
active: active.length,
|
||||
completed: completed.length,
|
||||
failed: failed.length,
|
||||
delayed: delayed.length
|
||||
};
|
||||
}
|
||||
async drainQueue() {
|
||||
if (this.isInitialized) {
|
||||
await this.queue.drain();
|
||||
}
|
||||
}
|
||||
async getQueueStatus() {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('Queue service not initialized');
|
||||
}
|
||||
|
||||
const stats = await this.getJobStats();
|
||||
return {
|
||||
...stats,
|
||||
workers: this.workers.length,
|
||||
concurrency: this.getTotalConcurrency()
|
||||
};
|
||||
}
|
||||
async shutdown() {
|
||||
if (!this.isInitialized) {
|
||||
this.logger.warn('Queue service not initialized, nothing to shutdown');
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.info('Shutting down queue service gracefully...');
|
||||
|
||||
try {
|
||||
// Step 1: Stop accepting new jobs and wait for current jobs to finish
|
||||
this.logger.debug('Closing workers gracefully...');
|
||||
const workerClosePromises = this.workers.map(async (worker, index) => {
|
||||
this.logger.debug(`Closing worker ${index + 1}/${this.workers.length}`);
|
||||
try {
|
||||
// Wait for current jobs to finish, then close
|
||||
await Promise.race([
|
||||
worker.close(),
|
||||
new Promise((_, reject) =>
|
||||
setTimeout(() => reject(new Error(`Worker ${index + 1} close timeout`)), 5000)
|
||||
)
|
||||
]);
|
||||
this.logger.debug(`Worker ${index + 1} closed successfully`);
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to close worker ${index + 1}`, { error });
|
||||
// Force close if graceful close fails
|
||||
await worker.close(true);
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.allSettled(workerClosePromises);
|
||||
this.logger.debug('All workers closed');
|
||||
|
||||
// Step 2: Close queue and events with timeout protection
|
||||
this.logger.debug('Closing queue and events...');
|
||||
await Promise.allSettled([
|
||||
Promise.race([
|
||||
this.queue.close(),
|
||||
new Promise((_, reject) =>
|
||||
setTimeout(() => reject(new Error('Queue close timeout')), 3000)
|
||||
)
|
||||
]).catch(error => this.logger.error('Queue close error', { error })),
|
||||
|
||||
Promise.race([
|
||||
this.queueEvents.close(),
|
||||
new Promise((_, reject) =>
|
||||
setTimeout(() => reject(new Error('QueueEvents close timeout')), 3000)
|
||||
)
|
||||
]).catch(error => this.logger.error('QueueEvents close error', { error }))
|
||||
]);
|
||||
|
||||
this.logger.info('Queue service shutdown completed successfully');
|
||||
} catch (error) {
|
||||
this.logger.error('Error during queue service shutdown', { error });
|
||||
// Force close everything as last resort
|
||||
try {
|
||||
await Promise.allSettled([
|
||||
...this.workers.map(worker => worker.close(true)),
|
||||
this.queue.close(),
|
||||
this.queueEvents.close()
|
||||
]);
|
||||
} catch (forceCloseError) {
|
||||
this.logger.error('Force close also failed', { error: forceCloseError });
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const queueManager = new QueueService();
|
||||
import { Queue, QueueEvents, Worker, type Job } from 'bullmq';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { providerRegistry, type JobData } from './provider-registry.service';
|
||||
|
||||
export class QueueService {
|
||||
private logger = getLogger('queue-service');
|
||||
private queue!: Queue;
|
||||
private workers: Worker[] = [];
|
||||
private queueEvents!: QueueEvents;
|
||||
|
||||
private config = {
|
||||
workers: parseInt(process.env.WORKER_COUNT || '5'),
|
||||
concurrency: parseInt(process.env.WORKER_CONCURRENCY || '20'),
|
||||
redis: {
|
||||
host: process.env.DRAGONFLY_HOST || 'localhost',
|
||||
port: parseInt(process.env.DRAGONFLY_PORT || '6379'),
|
||||
},
|
||||
};
|
||||
|
||||
private get isInitialized() {
|
||||
return !!this.queue;
|
||||
}
|
||||
|
||||
constructor() {
|
||||
// Don't initialize in constructor to allow for proper async initialization
|
||||
}
|
||||
async initialize() {
|
||||
if (this.isInitialized) {
|
||||
this.logger.warn('Queue service already initialized');
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.info('Initializing queue service...');
|
||||
|
||||
try {
|
||||
// Step 1: Register providers
|
||||
await this.registerProviders();
|
||||
|
||||
// Step 2: Setup queue and workers
|
||||
const connection = this.getConnection();
|
||||
const queueName = '{data-service-queue}';
|
||||
this.queue = new Queue(queueName, {
|
||||
connection,
|
||||
defaultJobOptions: {
|
||||
removeOnComplete: 10,
|
||||
removeOnFail: 5,
|
||||
attempts: 3,
|
||||
backoff: { type: 'exponential', delay: 1000 },
|
||||
},
|
||||
});
|
||||
|
||||
this.queueEvents = new QueueEvents(queueName, { connection });
|
||||
|
||||
// Step 3: Create workers
|
||||
const { workerCount, totalConcurrency } = this.createWorkers(queueName, connection);
|
||||
|
||||
// Step 4: Wait for readiness (parallel)
|
||||
await Promise.all([
|
||||
this.queue.waitUntilReady(),
|
||||
this.queueEvents.waitUntilReady(),
|
||||
...this.workers.map(worker => worker.waitUntilReady()),
|
||||
]);
|
||||
|
||||
// Step 5: Setup events and scheduled tasks
|
||||
this.setupQueueEvents();
|
||||
await this.setupScheduledTasks();
|
||||
|
||||
this.logger.info('Queue service initialized successfully', {
|
||||
workers: workerCount,
|
||||
totalConcurrency,
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to initialize queue service', { error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
private getConnection() {
|
||||
return {
|
||||
...this.config.redis,
|
||||
maxRetriesPerRequest: null,
|
||||
retryDelayOnFailover: 100,
|
||||
lazyConnect: false,
|
||||
};
|
||||
}
|
||||
|
||||
private createWorkers(queueName: string, connection: any) {
|
||||
for (let i = 0; i < this.config.workers; i++) {
|
||||
const worker = new Worker(queueName, this.processJob.bind(this), {
|
||||
connection: { ...connection },
|
||||
concurrency: this.config.concurrency,
|
||||
maxStalledCount: 1,
|
||||
stalledInterval: 30000,
|
||||
});
|
||||
|
||||
// Setup events inline
|
||||
worker.on('ready', () => this.logger.info(`Worker ${i + 1} ready`));
|
||||
worker.on('error', error => this.logger.error(`Worker ${i + 1} error`, { error }));
|
||||
|
||||
this.workers.push(worker);
|
||||
}
|
||||
|
||||
return {
|
||||
workerCount: this.config.workers,
|
||||
totalConcurrency: this.config.workers * this.config.concurrency,
|
||||
};
|
||||
}
|
||||
private setupQueueEvents() {
|
||||
// Add comprehensive logging to see job flow
|
||||
this.queueEvents.on('added', job => {
|
||||
this.logger.debug('Job added to queue', {
|
||||
id: job.jobId,
|
||||
});
|
||||
});
|
||||
|
||||
this.queueEvents.on('waiting', job => {
|
||||
this.logger.debug('Job moved to waiting', {
|
||||
id: job.jobId,
|
||||
});
|
||||
});
|
||||
|
||||
this.queueEvents.on('active', job => {
|
||||
this.logger.debug('Job became active', {
|
||||
id: job.jobId,
|
||||
});
|
||||
});
|
||||
|
||||
this.queueEvents.on('delayed', job => {
|
||||
this.logger.debug('Job delayed', {
|
||||
id: job.jobId,
|
||||
delay: job.delay,
|
||||
});
|
||||
});
|
||||
|
||||
this.queueEvents.on('completed', job => {
|
||||
this.logger.debug('Job completed', {
|
||||
id: job.jobId,
|
||||
});
|
||||
});
|
||||
|
||||
this.queueEvents.on('failed', (job, error) => {
|
||||
this.logger.error('Job failed', {
|
||||
id: job.jobId,
|
||||
error: String(error),
|
||||
});
|
||||
});
|
||||
}
|
||||
private async registerProviders() {
|
||||
this.logger.info('Registering providers...');
|
||||
|
||||
try {
|
||||
// Define providers to register
|
||||
const providers = [
|
||||
{ module: '../providers/proxy.provider', export: 'proxyProvider' },
|
||||
{ module: '../providers/quotemedia.provider', export: 'quotemediaProvider' },
|
||||
{ module: '../providers/yahoo.provider', export: 'yahooProvider' },
|
||||
];
|
||||
|
||||
// Import and register all providers
|
||||
for (const { module, export: exportName } of providers) {
|
||||
const providerModule = await import(module);
|
||||
providerRegistry.registerProvider(providerModule[exportName]);
|
||||
}
|
||||
|
||||
this.logger.info('All providers registered successfully');
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to register providers', { error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
private async processJob(job: Job) {
|
||||
const { provider, operation, payload }: JobData = job.data;
|
||||
|
||||
this.logger.info('Processing job', {
|
||||
id: job.id,
|
||||
provider,
|
||||
operation,
|
||||
payloadKeys: Object.keys(payload || {}),
|
||||
});
|
||||
try {
|
||||
let result;
|
||||
|
||||
if (operation === 'process-batch-items') {
|
||||
// Special handling for batch processing - requires 2 parameters
|
||||
const { processBatchJob } = await import('../utils/batch-helpers');
|
||||
result = await processBatchJob(payload, this);
|
||||
} else {
|
||||
// Regular handler lookup - requires 1 parameter
|
||||
const handler = providerRegistry.getHandler(provider, operation);
|
||||
|
||||
if (!handler) {
|
||||
throw new Error(`No handler found for ${provider}:${operation}`);
|
||||
}
|
||||
|
||||
result = await handler(payload);
|
||||
}
|
||||
|
||||
this.logger.info('Job completed successfully', {
|
||||
id: job.id,
|
||||
provider,
|
||||
operation,
|
||||
});
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.error('Job failed', {
|
||||
id: job.id,
|
||||
provider,
|
||||
operation,
|
||||
error: errorMessage,
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async addBulk(jobs: any[]): Promise<any[]> {
|
||||
return await this.queue.addBulk(jobs);
|
||||
}
|
||||
|
||||
private getTotalConcurrency() {
|
||||
return this.workers.reduce((total, worker) => total + (worker.opts.concurrency || 1), 0);
|
||||
}
|
||||
|
||||
private async setupScheduledTasks() {
|
||||
const allScheduledJobs = providerRegistry.getAllScheduledJobs();
|
||||
|
||||
if (allScheduledJobs.length === 0) {
|
||||
this.logger.warn('No scheduled jobs found in providers');
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.info('Setting up scheduled tasks...', { count: allScheduledJobs.length });
|
||||
|
||||
// Use Promise.allSettled for parallel processing + better error handling
|
||||
const results = await Promise.allSettled(
|
||||
allScheduledJobs.map(async ({ provider, job }) => {
|
||||
await this.addRecurringJob(
|
||||
{
|
||||
type: job.type,
|
||||
provider,
|
||||
operation: job.operation,
|
||||
payload: job.payload,
|
||||
priority: job.priority,
|
||||
immediately: job.immediately || false,
|
||||
},
|
||||
job.cronPattern
|
||||
);
|
||||
|
||||
return { provider, operation: job.operation };
|
||||
})
|
||||
);
|
||||
|
||||
// Log results
|
||||
const successful = results.filter(r => r.status === 'fulfilled');
|
||||
const failed = results.filter(r => r.status === 'rejected');
|
||||
|
||||
if (failed.length > 0) {
|
||||
failed.forEach((result, index) => {
|
||||
const { provider, job } = allScheduledJobs[index];
|
||||
this.logger.error('Failed to register scheduled job', {
|
||||
provider,
|
||||
operation: job.operation,
|
||||
error: result.reason,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
this.logger.info('Scheduled tasks setup complete', {
|
||||
successful: successful.length,
|
||||
failed: failed.length,
|
||||
});
|
||||
}
|
||||
private async addJobInternal(jobData: JobData, options: any = {}) {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('Queue service not initialized');
|
||||
}
|
||||
|
||||
const jobType = jobData.type || `${jobData.provider}-${jobData.operation}`;
|
||||
return this.queue.add(jobType, jobData, {
|
||||
priority: jobData.priority || 0,
|
||||
removeOnComplete: 10,
|
||||
removeOnFail: 5,
|
||||
...options,
|
||||
});
|
||||
}
|
||||
|
||||
async addJob(jobData: JobData, options?: any) {
|
||||
return this.addJobInternal(jobData, options);
|
||||
}
|
||||
|
||||
async addRecurringJob(jobData: JobData, cronPattern: string, options?: any) {
|
||||
const jobKey = `recurring-${jobData.provider}-${jobData.operation}`;
|
||||
|
||||
return this.addJobInternal(jobData, {
|
||||
repeat: {
|
||||
pattern: cronPattern,
|
||||
tz: 'UTC',
|
||||
immediately: jobData.immediately || false,
|
||||
},
|
||||
jobId: jobKey,
|
||||
removeOnComplete: 1,
|
||||
removeOnFail: 1,
|
||||
attempts: 2,
|
||||
backoff: {
|
||||
type: 'fixed',
|
||||
delay: 5000,
|
||||
},
|
||||
...options,
|
||||
});
|
||||
}
|
||||
async getJobStats() {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('Queue service not initialized. Call initialize() first.');
|
||||
}
|
||||
const [waiting, active, completed, failed, delayed] = await Promise.all([
|
||||
this.queue.getWaiting(),
|
||||
this.queue.getActive(),
|
||||
this.queue.getCompleted(),
|
||||
this.queue.getFailed(),
|
||||
this.queue.getDelayed(),
|
||||
]);
|
||||
|
||||
return {
|
||||
waiting: waiting.length,
|
||||
active: active.length,
|
||||
completed: completed.length,
|
||||
failed: failed.length,
|
||||
delayed: delayed.length,
|
||||
};
|
||||
}
|
||||
async drainQueue() {
|
||||
if (this.isInitialized) {
|
||||
await this.queue.drain();
|
||||
}
|
||||
}
|
||||
async getQueueStatus() {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('Queue service not initialized');
|
||||
}
|
||||
|
||||
const stats = await this.getJobStats();
|
||||
return {
|
||||
...stats,
|
||||
workers: this.workers.length,
|
||||
concurrency: this.getTotalConcurrency(),
|
||||
};
|
||||
}
|
||||
async shutdown() {
|
||||
if (!this.isInitialized) {
|
||||
this.logger.warn('Queue service not initialized, nothing to shutdown');
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.info('Shutting down queue service gracefully...');
|
||||
|
||||
try {
|
||||
// Step 1: Stop accepting new jobs and wait for current jobs to finish
|
||||
this.logger.debug('Closing workers gracefully...');
|
||||
const workerClosePromises = this.workers.map(async (worker, index) => {
|
||||
this.logger.debug(`Closing worker ${index + 1}/${this.workers.length}`);
|
||||
try {
|
||||
// Wait for current jobs to finish, then close
|
||||
await Promise.race([
|
||||
worker.close(),
|
||||
new Promise((_, reject) =>
|
||||
setTimeout(() => reject(new Error(`Worker ${index + 1} close timeout`)), 5000)
|
||||
),
|
||||
]);
|
||||
this.logger.debug(`Worker ${index + 1} closed successfully`);
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to close worker ${index + 1}`, { error });
|
||||
// Force close if graceful close fails
|
||||
await worker.close(true);
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.allSettled(workerClosePromises);
|
||||
this.logger.debug('All workers closed');
|
||||
|
||||
// Step 2: Close queue and events with timeout protection
|
||||
this.logger.debug('Closing queue and events...');
|
||||
await Promise.allSettled([
|
||||
Promise.race([
|
||||
this.queue.close(),
|
||||
new Promise((_, reject) =>
|
||||
setTimeout(() => reject(new Error('Queue close timeout')), 3000)
|
||||
),
|
||||
]).catch(error => this.logger.error('Queue close error', { error })),
|
||||
|
||||
Promise.race([
|
||||
this.queueEvents.close(),
|
||||
new Promise((_, reject) =>
|
||||
setTimeout(() => reject(new Error('QueueEvents close timeout')), 3000)
|
||||
),
|
||||
]).catch(error => this.logger.error('QueueEvents close error', { error })),
|
||||
]);
|
||||
|
||||
this.logger.info('Queue service shutdown completed successfully');
|
||||
} catch (error) {
|
||||
this.logger.error('Error during queue service shutdown', { error });
|
||||
// Force close everything as last resort
|
||||
try {
|
||||
await Promise.allSettled([
|
||||
...this.workers.map(worker => worker.close(true)),
|
||||
this.queue.close(),
|
||||
this.queueEvents.close(),
|
||||
]);
|
||||
} catch (forceCloseError) {
|
||||
this.logger.error('Force close also failed', { error: forceCloseError });
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const queueManager = new QueueService();
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { CacheProvider, createCache } from '@stock-bot/cache';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { createCache, CacheProvider } from '@stock-bot/cache';
|
||||
import type { QueueService } from '../services/queue.service';
|
||||
|
||||
const logger = getLogger('batch-helpers');
|
||||
|
|
@ -35,7 +35,7 @@ function getCache(): CacheProvider {
|
|||
cacheProvider = createCache({
|
||||
keyPrefix: 'batch:',
|
||||
ttl: 86400, // 24 hours default
|
||||
enableMetrics: true
|
||||
enableMetrics: true,
|
||||
});
|
||||
}
|
||||
return cacheProvider;
|
||||
|
|
@ -62,13 +62,13 @@ export async function processItems<T>(
|
|||
options: ProcessOptions
|
||||
): Promise<BatchResult> {
|
||||
const startTime = Date.now();
|
||||
|
||||
|
||||
if (items.length === 0) {
|
||||
return {
|
||||
jobsCreated: 0,
|
||||
mode: 'direct',
|
||||
totalItems: 0,
|
||||
duration: 0
|
||||
duration: 0,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -76,23 +76,22 @@ export async function processItems<T>(
|
|||
totalItems: items.length,
|
||||
mode: options.useBatching ? 'batch' : 'direct',
|
||||
batchSize: options.batchSize,
|
||||
totalDelayHours: options.totalDelayHours
|
||||
totalDelayHours: options.totalDelayHours,
|
||||
});
|
||||
|
||||
try {
|
||||
const result = options.useBatching
|
||||
const result = options.useBatching
|
||||
? await processBatched(items, processor, queue, options)
|
||||
: await processDirect(items, processor, queue, options);
|
||||
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
|
||||
logger.info('Batch processing completed', {
|
||||
...result,
|
||||
duration: `${(duration / 1000).toFixed(1)}s`
|
||||
duration: `${(duration / 1000).toFixed(1)}s`,
|
||||
});
|
||||
|
||||
return { ...result, duration };
|
||||
|
||||
} catch (error) {
|
||||
logger.error('Batch processing failed', error);
|
||||
throw error;
|
||||
|
|
@ -108,13 +107,12 @@ async function processDirect<T>(
|
|||
queue: QueueService,
|
||||
options: ProcessOptions
|
||||
): Promise<Omit<BatchResult, 'duration'>> {
|
||||
|
||||
const totalDelayMs = options.totalDelayHours * 60 * 60 * 1000;
|
||||
const delayPerItem = totalDelayMs / items.length;
|
||||
|
||||
|
||||
logger.info('Creating direct jobs', {
|
||||
totalItems: items.length,
|
||||
delayPerItem: `${(delayPerItem / 1000).toFixed(1)}s`
|
||||
delayPerItem: `${(delayPerItem / 1000).toFixed(1)}s`,
|
||||
});
|
||||
|
||||
const jobs = items.map((item, index) => ({
|
||||
|
|
@ -124,23 +122,23 @@ async function processDirect<T>(
|
|||
provider: options.provider || 'generic',
|
||||
operation: options.operation || 'process-item',
|
||||
payload: processor(item, index),
|
||||
priority: options.priority || 1
|
||||
priority: options.priority || 1,
|
||||
},
|
||||
opts: {
|
||||
delay: index * delayPerItem,
|
||||
priority: options.priority || 1,
|
||||
attempts: options.retries || 3,
|
||||
removeOnComplete: options.removeOnComplete || 10,
|
||||
removeOnFail: options.removeOnFail || 5
|
||||
}
|
||||
removeOnFail: options.removeOnFail || 5,
|
||||
},
|
||||
}));
|
||||
|
||||
const createdJobs = await addJobsInChunks(queue, jobs);
|
||||
|
||||
|
||||
return {
|
||||
totalItems: items.length,
|
||||
jobsCreated: createdJobs.length,
|
||||
mode: 'direct'
|
||||
mode: 'direct',
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -153,7 +151,6 @@ async function processBatched<T>(
|
|||
queue: QueueService,
|
||||
options: ProcessOptions
|
||||
): Promise<Omit<BatchResult, 'duration'>> {
|
||||
|
||||
const batchSize = options.batchSize || 100;
|
||||
const batches = createBatches(items, batchSize);
|
||||
const totalDelayMs = options.totalDelayHours * 60 * 60 * 1000;
|
||||
|
|
@ -163,13 +160,13 @@ async function processBatched<T>(
|
|||
totalItems: items.length,
|
||||
batchSize,
|
||||
totalBatches: batches.length,
|
||||
delayPerBatch: `${(delayPerBatch / 1000 / 60).toFixed(2)} minutes`
|
||||
delayPerBatch: `${(delayPerBatch / 1000 / 60).toFixed(2)} minutes`,
|
||||
});
|
||||
|
||||
const batchJobs = await Promise.all(
|
||||
batches.map(async (batch, batchIndex) => {
|
||||
const payloadKey = await storePayload(batch, processor, options);
|
||||
|
||||
|
||||
return {
|
||||
name: 'process-batch',
|
||||
data: {
|
||||
|
|
@ -180,17 +177,17 @@ async function processBatched<T>(
|
|||
payloadKey,
|
||||
batchIndex,
|
||||
totalBatches: batches.length,
|
||||
itemCount: batch.length
|
||||
itemCount: batch.length,
|
||||
},
|
||||
priority: options.priority || 2
|
||||
priority: options.priority || 2,
|
||||
},
|
||||
opts: {
|
||||
delay: batchIndex * delayPerBatch,
|
||||
priority: options.priority || 2,
|
||||
attempts: options.retries || 3,
|
||||
removeOnComplete: options.removeOnComplete || 10,
|
||||
removeOnFail: options.removeOnFail || 5
|
||||
}
|
||||
removeOnFail: options.removeOnFail || 5,
|
||||
},
|
||||
};
|
||||
})
|
||||
);
|
||||
|
|
@ -201,7 +198,7 @@ async function processBatched<T>(
|
|||
totalItems: items.length,
|
||||
jobsCreated: createdJobs.length,
|
||||
batchesCreated: batches.length,
|
||||
mode: 'batch'
|
||||
mode: 'batch',
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -210,11 +207,11 @@ async function processBatched<T>(
|
|||
*/
|
||||
export async function processBatchJob(jobData: any, queue: QueueService): Promise<any> {
|
||||
const { payloadKey, batchIndex, totalBatches, itemCount } = jobData;
|
||||
|
||||
logger.debug('Processing batch job', {
|
||||
batchIndex,
|
||||
totalBatches,
|
||||
itemCount
|
||||
|
||||
logger.debug('Processing batch job', {
|
||||
batchIndex,
|
||||
totalBatches,
|
||||
itemCount,
|
||||
});
|
||||
|
||||
try {
|
||||
|
|
@ -225,7 +222,7 @@ export async function processBatchJob(jobData: any, queue: QueueService): Promis
|
|||
}
|
||||
|
||||
const { items, processorStr, options } = payload;
|
||||
|
||||
|
||||
// Deserialize the processor function
|
||||
const processor = new Function('return ' + processorStr)();
|
||||
|
||||
|
|
@ -236,26 +233,25 @@ export async function processBatchJob(jobData: any, queue: QueueService): Promis
|
|||
provider: options.provider || 'generic',
|
||||
operation: options.operation || 'generic',
|
||||
payload: processor(item, index),
|
||||
priority: options.priority || 1
|
||||
priority: options.priority || 1,
|
||||
},
|
||||
opts: {
|
||||
delay: index * (options.delayPerItem || 1000),
|
||||
priority: options.priority || 1,
|
||||
attempts: options.retries || 3
|
||||
}
|
||||
attempts: options.retries || 3,
|
||||
},
|
||||
}));
|
||||
|
||||
const createdJobs = await addJobsInChunks(queue, jobs);
|
||||
|
||||
|
||||
// Cleanup payload after successful processing
|
||||
await cleanupPayload(payloadKey);
|
||||
|
||||
return {
|
||||
batchIndex,
|
||||
itemsProcessed: items.length,
|
||||
jobsCreated: createdJobs.length
|
||||
jobsCreated: createdJobs.length,
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
logger.error('Batch job processing failed', { batchIndex, error });
|
||||
throw error;
|
||||
|
|
@ -273,20 +269,20 @@ function createBatches<T>(items: T[], batchSize: number): T[][] {
|
|||
}
|
||||
|
||||
async function storePayload<T>(
|
||||
items: T[],
|
||||
items: T[],
|
||||
processor: (item: T, index: number) => any,
|
||||
options: ProcessOptions
|
||||
): Promise<string> {
|
||||
const cache = getCache();
|
||||
|
||||
|
||||
// Create more specific key: batch:provider:operation:payload_timestamp_random
|
||||
const timestamp = Date.now();
|
||||
const randomId = Math.random().toString(36).substr(2, 9);
|
||||
const provider = options.provider || 'generic';
|
||||
const operation = options.operation || 'generic';
|
||||
|
||||
|
||||
const key = `${provider}:${operation}:payload_${timestamp}_${randomId}`;
|
||||
|
||||
|
||||
const payload = {
|
||||
items,
|
||||
processorStr: processor.toString(),
|
||||
|
|
@ -296,33 +292,33 @@ async function storePayload<T>(
|
|||
retries: options.retries || 3,
|
||||
// Store routing information for later use
|
||||
provider: options.provider || 'generic',
|
||||
operation: options.operation || 'generic'
|
||||
operation: options.operation || 'generic',
|
||||
},
|
||||
createdAt: Date.now()
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
|
||||
logger.debug('Storing batch payload', {
|
||||
key,
|
||||
itemCount: items.length
|
||||
|
||||
logger.debug('Storing batch payload', {
|
||||
key,
|
||||
itemCount: items.length,
|
||||
});
|
||||
|
||||
|
||||
await cache.set(key, payload, options.ttl || 86400);
|
||||
|
||||
logger.debug('Stored batch payload successfully', {
|
||||
key,
|
||||
itemCount: items.length
|
||||
|
||||
logger.debug('Stored batch payload successfully', {
|
||||
key,
|
||||
itemCount: items.length,
|
||||
});
|
||||
|
||||
|
||||
return key;
|
||||
}
|
||||
|
||||
async function loadPayload(key: string): Promise<any> {
|
||||
const cache = getCache();
|
||||
|
||||
|
||||
logger.debug('Loading batch payload', { key });
|
||||
|
||||
|
||||
const data = await cache.get(key);
|
||||
|
||||
|
||||
if (!data) {
|
||||
logger.error('Payload not found in cache', { key });
|
||||
throw new Error(`Payload not found: ${key}`);
|
||||
|
|
@ -344,27 +340,25 @@ async function cleanupPayload(key: string): Promise<void> {
|
|||
|
||||
async function addJobsInChunks(queue: QueueService, jobs: any[], chunkSize = 100): Promise<any[]> {
|
||||
const allCreatedJobs = [];
|
||||
|
||||
|
||||
for (let i = 0; i < jobs.length; i += chunkSize) {
|
||||
const chunk = jobs.slice(i, i + chunkSize);
|
||||
try {
|
||||
const createdJobs = await queue.addBulk(chunk);
|
||||
allCreatedJobs.push(...createdJobs);
|
||||
|
||||
|
||||
// Small delay between chunks to avoid overwhelming Redis
|
||||
if (i + chunkSize < jobs.length) {
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to add job chunk', {
|
||||
startIndex: i,
|
||||
chunkSize: chunk.length,
|
||||
error
|
||||
logger.error('Failed to add job chunk', {
|
||||
startIndex: i,
|
||||
chunkSize: chunk.length,
|
||||
error,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return allCreatedJobs;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue