starting to implement the queue job service
This commit is contained in:
parent
c10a524aa8
commit
8681c34529
9 changed files with 458 additions and 20 deletions
115
apps/data-service/src/api/queue.routes.ts
Normal file
115
apps/data-service/src/api/queue.routes.ts
Normal file
|
|
@ -0,0 +1,115 @@
|
|||
/**
|
||||
* Queue API Endpoints
|
||||
* REST API for monitoring and controlling job queues
|
||||
*/
|
||||
import { Router } from 'express';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { proxyQueueIntegration } from '../services/proxy-queue-integration';
|
||||
|
||||
const logger = getLogger('queue-api');
|
||||
const router = Router();
|
||||
|
||||
/**
|
||||
* GET /api/queue/stats
|
||||
* Get queue statistics
|
||||
*/
|
||||
router.get('/stats', async (req, res) => {
|
||||
try {
|
||||
const stats = await proxyQueueIntegration.getStats();
|
||||
res.json({ success: true, data: stats });
|
||||
} catch (error) {
|
||||
logger.error('Failed to get queue stats', { error });
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to get queue statistics'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/queue/proxy/fetch
|
||||
* Manually trigger proxy fetching
|
||||
*/
|
||||
router.post('/proxy/fetch', async (req, res) => {
|
||||
try {
|
||||
const job = await proxyQueueIntegration.triggerProxyFetch();
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
jobId: job.id,
|
||||
message: 'Proxy fetch job queued'
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to trigger proxy fetch', { error });
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to queue proxy fetch job'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/queue/proxy/check
|
||||
* Check specific proxies
|
||||
*/
|
||||
router.post('/proxy/check', async (req, res) => {
|
||||
try {
|
||||
const { proxies } = req.body;
|
||||
|
||||
if (!Array.isArray(proxies) || proxies.length === 0) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Proxies array is required'
|
||||
});
|
||||
}
|
||||
|
||||
const job = await proxyQueueIntegration.checkSpecificProxies(proxies);
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
jobId: job.id,
|
||||
proxiesCount: proxies.length,
|
||||
message: 'Proxy check job queued'
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to queue proxy check', { error });
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to queue proxy check job'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/queue/health
|
||||
* Health check for queue service
|
||||
*/
|
||||
router.get('/health', async (req, res) => {
|
||||
try {
|
||||
const stats = await proxyQueueIntegration.getStats();
|
||||
const isHealthy = stats.active >= 0; // Basic health check
|
||||
|
||||
res.status(isHealthy ? 200 : 503).json({
|
||||
success: isHealthy,
|
||||
data: {
|
||||
status: isHealthy ? 'healthy' : 'unhealthy',
|
||||
stats,
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Queue health check failed', { error });
|
||||
res.status(503).json({
|
||||
success: false,
|
||||
data: {
|
||||
status: 'unhealthy',
|
||||
error: 'Queue service unavailable',
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
90
apps/data-service/src/services/proxy-queue-integration.ts
Normal file
90
apps/data-service/src/services/proxy-queue-integration.ts
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
/**
|
||||
* Example: Proxy Service with BullMQ Integration
|
||||
* This shows how to integrate the queue service with your existing proxy service
|
||||
*/
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { queueService } from './queue.service';
|
||||
import type { ProxyInfo } from '@stock-bot/http';
|
||||
|
||||
const logger = getLogger('proxy-queue-integration');
|
||||
|
||||
export class ProxyQueueIntegration {
|
||||
|
||||
constructor() {
|
||||
// Initialize recurring tasks when service starts
|
||||
this.initializeScheduledTasks();
|
||||
}
|
||||
|
||||
private async initializeScheduledTasks() {
|
||||
try {
|
||||
await queueService.scheduleRecurringTasks();
|
||||
logger.info('Proxy scheduling tasks initialized');
|
||||
} catch (error) {
|
||||
logger.error('Failed to initialize scheduled tasks', { error });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually trigger proxy fetching and checking
|
||||
*/
|
||||
async triggerProxyFetch() {
|
||||
try {
|
||||
const job = await queueService.addManualProxyFetch();
|
||||
logger.info('Manual proxy fetch job added', { jobId: job.id });
|
||||
return job;
|
||||
} catch (error) {
|
||||
logger.error('Failed to trigger proxy fetch', { error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check specific proxies immediately
|
||||
*/
|
||||
async checkSpecificProxies(proxies: ProxyInfo[]) {
|
||||
try {
|
||||
const job = await queueService.addImmediateProxyCheck(proxies);
|
||||
logger.info('Specific proxy check job added', {
|
||||
jobId: job.id,
|
||||
proxiesCount: proxies.length
|
||||
});
|
||||
return job;
|
||||
} catch (error) {
|
||||
logger.error('Failed to check specific proxies', { error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get queue statistics
|
||||
*/
|
||||
async getStats() {
|
||||
try {
|
||||
return await queueService.getQueueStats();
|
||||
} catch (error) {
|
||||
logger.error('Failed to get queue stats', { error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the queue instance for Bull Board monitoring
|
||||
*/
|
||||
async getQueue() {
|
||||
return await queueService.getQueue();
|
||||
}
|
||||
|
||||
/**
|
||||
* Shutdown queue service gracefully
|
||||
*/
|
||||
async shutdown() {
|
||||
try {
|
||||
await queueService.shutdown();
|
||||
logger.info('Proxy queue integration shut down');
|
||||
} catch (error) {
|
||||
logger.error('Error during shutdown', { error });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const proxyQueueIntegration = new ProxyQueueIntegration();
|
||||
|
|
@ -2,15 +2,16 @@ import { Logger } from '@stock-bot/logger';
|
|||
import createCache, { type CacheProvider } from '@stock-bot/cache';
|
||||
import { HttpClient, ProxyInfo } from '@stock-bot/http';
|
||||
import pLimit from 'p-limit';
|
||||
import { queueService } from './queue.service';
|
||||
|
||||
export class ProxyService {
|
||||
private logger = new Logger('proxy-service');
|
||||
private cache: CacheProvider = createCache('hybrid');
|
||||
private httpClient: HttpClient;
|
||||
private readonly concurrencyLimit = pLimit(250);
|
||||
private readonly concurrencyLimit = pLimit(100);
|
||||
private readonly CACHE_KEY = 'proxy';
|
||||
private readonly CACHE_TTL = 86400; // 24 hours
|
||||
private readonly CHECK_TIMEOUT = 5000;
|
||||
private readonly CHECK_TIMEOUT = 7000;
|
||||
private readonly CHECK_IP = '99.246.102.205'
|
||||
private readonly CHECK_URL = 'https://proxy-detection.stare.gg/?api_key=bd406bf53ddc6abe1d9de5907830a955';
|
||||
private readonly PROXY_SOURCES = [
|
||||
|
|
@ -20,13 +21,10 @@ export class ProxyService {
|
|||
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/http/http.txt',protocol: 'http', },
|
||||
// {url: 'https://github.com/zloi-user/hideip.me/raw/refs/heads/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/http.txt', protocol: 'http' },
|
||||
{url: 'https://raw.githubusercontent.com/dpangestuw/Free-Proxy/refs/heads/main/http_proxies.txt',protocol: 'http', },
|
||||
// {url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/gitrecon1455/fresh-proxy-list/refs/heads/main/proxylist.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/themiralay/Proxy-List-World/refs/heads/master/data.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/dpangestuw/Free-Proxy/refs/heads/main/http_proxies.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/casa-ls/proxy-list/refs/heads/main/http',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/http.txt',protocol: 'http', },
|
||||
|
|
@ -35,17 +33,15 @@ export class ProxyService {
|
|||
{url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/casals-ar/proxy-list/main/http',protocol: 'http', },
|
||||
// {url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/http.txt',protocol: 'http', },
|
||||
|
||||
|
||||
{url: 'https://raw.githubusercontent.com/r00tee/Proxy-List/refs/heads/main/Https.txt',protocol: 'https', },
|
||||
{url: 'https://raw.githubusercontent.com/r00tee/Proxy-List/refs/heads/main/Https.txt',protocol: 'https', },
|
||||
{url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/https.txt',protocol: 'https', },
|
||||
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/https.txt', protocol: 'https' },
|
||||
{url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/https.txt',protocol: 'https', },
|
||||
{url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/https/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/https.txt',protocol: 'https', },
|
||||
{url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/https.txt',protocol: 'https', },
|
||||
{url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/socks4.txt',protocol: 'socks4', },
|
||||
// {url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/socks4.txt',protocol: 'socks4', },
|
||||
// {url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/socks4.txt',protocol: 'socks4', },
|
||||
|
|
@ -68,12 +64,24 @@ export class ProxyService {
|
|||
|
||||
constructor() {
|
||||
this.httpClient = new HttpClient({
|
||||
timeout: this.CHECK_TIMEOUT,
|
||||
timeout: 10000,
|
||||
}, this.logger);
|
||||
|
||||
// Start scheduled tasks
|
||||
this.initializeScheduling();
|
||||
|
||||
this.logger.info('ProxyService initialized');
|
||||
}
|
||||
|
||||
private async initializeScheduling() {
|
||||
try {
|
||||
await queueService.scheduleRecurringTasks();
|
||||
this.logger.info('Proxy scheduling initialized');
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to initialize scheduling', { error });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async fetchProxiesFromSources() : Promise<number> {
|
||||
const sources = this.PROXY_SOURCES.map(source =>
|
||||
|
|
|
|||
198
apps/data-service/src/services/queue.service.ts
Normal file
198
apps/data-service/src/services/queue.service.ts
Normal file
|
|
@ -0,0 +1,198 @@
|
|||
/**
|
||||
* BullMQ Queue Service
|
||||
* Handles job scheduling and processing for the data service
|
||||
*/
|
||||
import { Queue, Worker, QueueEvents } from 'bullmq';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import type { ProxyInfo } from '@stock-bot/http';
|
||||
|
||||
const logger = getLogger('queue-service');
|
||||
|
||||
export interface ProxyJobData {
|
||||
type: 'fetch-and-check' | 'check-specific' | 'clear-cache';
|
||||
proxies?: ProxyInfo[];
|
||||
}
|
||||
|
||||
export class QueueService {
|
||||
private queue: Queue;
|
||||
private worker: Worker;
|
||||
private queueEvents: QueueEvents;
|
||||
|
||||
constructor() {
|
||||
const connection = {
|
||||
host: process.env.DRAGONFLY_HOST || 'localhost',
|
||||
port: parseInt(process.env.DRAGONFLY_PORT || '6379'),
|
||||
};
|
||||
|
||||
// Create queue
|
||||
this.queue = new Queue('proxy-tasks', { connection });
|
||||
|
||||
// Create worker
|
||||
this.worker = new Worker('proxy-tasks', this.processJob.bind(this), {
|
||||
connection,
|
||||
concurrency: 3,
|
||||
});
|
||||
|
||||
// Create queue events for monitoring
|
||||
this.queueEvents = new QueueEvents('proxy-tasks', { connection });
|
||||
|
||||
this.setupEventListeners();
|
||||
logger.info('Queue service initialized', { connection });
|
||||
}
|
||||
|
||||
private async processJob(job: any) {
|
||||
const { type, proxies }: ProxyJobData = job.data;
|
||||
logger.info('Processing job', {
|
||||
id: job.id,
|
||||
type,
|
||||
proxiesCount: proxies?.length
|
||||
});
|
||||
|
||||
try {
|
||||
switch (type) {
|
||||
case 'fetch-and-check':
|
||||
// Import proxy service dynamically to avoid circular dependencies
|
||||
const { proxyService } = await import('./proxy.service');
|
||||
return await proxyService.fetchProxiesFromSources();
|
||||
|
||||
case 'check-specific':
|
||||
if (!proxies) throw new Error('Proxies required for check-specific job');
|
||||
const { proxyService: ps } = await import('./proxy.service');
|
||||
return await ps.checkProxies(proxies);
|
||||
|
||||
case 'clear-cache':
|
||||
// Clear proxy cache
|
||||
const { proxyService: pcs } = await import('./proxy.service');
|
||||
// Assuming you have a clearCache method
|
||||
// return await pcs.clearCache();
|
||||
logger.info('Cache clear job processed');
|
||||
return { cleared: true };
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown job type: ${type}`);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Job processing failed', {
|
||||
id: job.id,
|
||||
type,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private setupEventListeners() {
|
||||
this.worker.on('completed', (job) => {
|
||||
logger.info('Job completed', {
|
||||
id: job.id,
|
||||
type: job.data.type,
|
||||
result: job.returnvalue
|
||||
});
|
||||
});
|
||||
|
||||
this.worker.on('failed', (job, err) => {
|
||||
logger.error('Job failed', {
|
||||
id: job?.id,
|
||||
type: job?.data.type,
|
||||
error: err.message
|
||||
});
|
||||
});
|
||||
|
||||
this.worker.on('progress', (job, progress) => {
|
||||
logger.debug('Job progress', {
|
||||
id: job.id,
|
||||
progress: `${progress}%`
|
||||
});
|
||||
});
|
||||
|
||||
this.queueEvents.on('waiting', ({ jobId }) => {
|
||||
logger.debug('Job waiting', { jobId });
|
||||
});
|
||||
|
||||
this.queueEvents.on('active', ({ jobId }) => {
|
||||
logger.debug('Job active', { jobId });
|
||||
});
|
||||
}
|
||||
|
||||
async scheduleRecurringTasks() {
|
||||
// Fetch and check proxies every 15 minutes
|
||||
await this.queue.add('fetch-and-check',
|
||||
{ type: 'fetch-and-check' },
|
||||
{
|
||||
repeat: { pattern: '*/15 * * * *' },
|
||||
removeOnComplete: 10,
|
||||
removeOnFail: 5,
|
||||
jobId: 'recurring-proxy-fetch', // Use consistent ID to prevent duplicates
|
||||
}
|
||||
);
|
||||
|
||||
// Clear cache daily at midnight
|
||||
await this.queue.add('clear-cache',
|
||||
{ type: 'clear-cache' },
|
||||
{
|
||||
repeat: { pattern: '0 0 * * *' },
|
||||
removeOnComplete: 1,
|
||||
removeOnFail: 1,
|
||||
jobId: 'daily-cache-clear',
|
||||
}
|
||||
);
|
||||
|
||||
logger.info('Recurring tasks scheduled');
|
||||
}
|
||||
|
||||
async addImmediateProxyCheck(proxies: ProxyInfo[]) {
|
||||
return await this.queue.add('check-specific',
|
||||
{ type: 'check-specific', proxies },
|
||||
{
|
||||
priority: 10,
|
||||
removeOnComplete: 5,
|
||||
removeOnFail: 3,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
async addManualProxyFetch() {
|
||||
return await this.queue.add('fetch-and-check',
|
||||
{ type: 'fetch-and-check' },
|
||||
{
|
||||
priority: 5,
|
||||
removeOnComplete: 5,
|
||||
removeOnFail: 3,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
async getQueueStats() {
|
||||
const [waiting, active, completed, failed, delayed] = await Promise.all([
|
||||
this.queue.getWaiting(),
|
||||
this.queue.getActive(),
|
||||
this.queue.getCompleted(),
|
||||
this.queue.getFailed(),
|
||||
this.queue.getDelayed(),
|
||||
]);
|
||||
|
||||
return {
|
||||
waiting: waiting.length,
|
||||
active: active.length,
|
||||
completed: completed.length,
|
||||
failed: failed.length,
|
||||
delayed: delayed.length,
|
||||
};
|
||||
}
|
||||
|
||||
async getQueue() {
|
||||
return this.queue;
|
||||
}
|
||||
|
||||
async shutdown() {
|
||||
logger.info('Shutting down queue service...');
|
||||
|
||||
await this.worker.close();
|
||||
await this.queue.close();
|
||||
await this.queueEvents.close();
|
||||
|
||||
logger.info('Queue service shut down');
|
||||
}
|
||||
}
|
||||
|
||||
export const queueService = new QueueService();
|
||||
Loading…
Add table
Add a link
Reference in a new issue