starting to implement the queue job service
This commit is contained in:
parent
c10a524aa8
commit
8681c34529
9 changed files with 458 additions and 20 deletions
|
|
@ -50,10 +50,12 @@ Once running, access these services:
|
||||||
|---------|-----|-------|
|
|---------|-----|-------|
|
||||||
| **QuestDB Console** | http://localhost:9000 | No login required |
|
| **QuestDB Console** | http://localhost:9000 | No login required |
|
||||||
| **Redis Insight** | http://localhost:8001 | No login required |
|
| **Redis Insight** | http://localhost:8001 | No login required |
|
||||||
|
| **Bull Board** | http://localhost:3001 | No login required |
|
||||||
| **PgAdmin** | http://localhost:8080 | `admin@tradingbot.local` / `admin123` |
|
| **PgAdmin** | http://localhost:8080 | `admin@tradingbot.local` / `admin123` |
|
||||||
| **Mongo Express** | http://localhost:8081 | `admin` / `admin123` |
|
| **Mongo Express** | http://localhost:8081 | `admin` / `admin123` |
|
||||||
| **Prometheus** | http://localhost:9090 | No login required |
|
| **Prometheus** | http://localhost:9090 | No login required |
|
||||||
| **Grafana** | http://localhost:3000 | `admin` / `admin123` |
|
| **Grafana** | http://localhost:3000 | `admin` / `admin123` |
|
||||||
|
| **Bull Board** | http://localhost:3001 | No login required |
|
||||||
|
|
||||||
## 📊 Database Connections
|
## 📊 Database Connections
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,7 @@
|
||||||
"@stock-bot/questdb-client": "*",
|
"@stock-bot/questdb-client": "*",
|
||||||
"@stock-bot/shutdown": "*",
|
"@stock-bot/shutdown": "*",
|
||||||
"@stock-bot/types": "*",
|
"@stock-bot/types": "*",
|
||||||
|
"bullmq": "^5.53.2",
|
||||||
"hono": "^4.0.0",
|
"hono": "^4.0.0",
|
||||||
"p-limit": "^6.2.0",
|
"p-limit": "^6.2.0",
|
||||||
"ws": "^8.0.0"
|
"ws": "^8.0.0"
|
||||||
|
|
|
||||||
115
apps/data-service/src/api/queue.routes.ts
Normal file
115
apps/data-service/src/api/queue.routes.ts
Normal file
|
|
@ -0,0 +1,115 @@
|
||||||
|
/**
|
||||||
|
* Queue API Endpoints
|
||||||
|
* REST API for monitoring and controlling job queues
|
||||||
|
*/
|
||||||
|
import { Router } from 'express';
|
||||||
|
import { getLogger } from '@stock-bot/logger';
|
||||||
|
import { proxyQueueIntegration } from '../services/proxy-queue-integration';
|
||||||
|
|
||||||
|
const logger = getLogger('queue-api');
|
||||||
|
const router = Router();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/queue/stats
|
||||||
|
* Get queue statistics
|
||||||
|
*/
|
||||||
|
router.get('/stats', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const stats = await proxyQueueIntegration.getStats();
|
||||||
|
res.json({ success: true, data: stats });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to get queue stats', { error });
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to get queue statistics'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POST /api/queue/proxy/fetch
|
||||||
|
* Manually trigger proxy fetching
|
||||||
|
*/
|
||||||
|
router.post('/proxy/fetch', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const job = await proxyQueueIntegration.triggerProxyFetch();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
jobId: job.id,
|
||||||
|
message: 'Proxy fetch job queued'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to trigger proxy fetch', { error });
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to queue proxy fetch job'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POST /api/queue/proxy/check
|
||||||
|
* Check specific proxies
|
||||||
|
*/
|
||||||
|
router.post('/proxy/check', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { proxies } = req.body;
|
||||||
|
|
||||||
|
if (!Array.isArray(proxies) || proxies.length === 0) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Proxies array is required'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const job = await proxyQueueIntegration.checkSpecificProxies(proxies);
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
jobId: job.id,
|
||||||
|
proxiesCount: proxies.length,
|
||||||
|
message: 'Proxy check job queued'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to queue proxy check', { error });
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to queue proxy check job'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/queue/health
|
||||||
|
* Health check for queue service
|
||||||
|
*/
|
||||||
|
router.get('/health', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const stats = await proxyQueueIntegration.getStats();
|
||||||
|
const isHealthy = stats.active >= 0; // Basic health check
|
||||||
|
|
||||||
|
res.status(isHealthy ? 200 : 503).json({
|
||||||
|
success: isHealthy,
|
||||||
|
data: {
|
||||||
|
status: isHealthy ? 'healthy' : 'unhealthy',
|
||||||
|
stats,
|
||||||
|
timestamp: new Date().toISOString()
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Queue health check failed', { error });
|
||||||
|
res.status(503).json({
|
||||||
|
success: false,
|
||||||
|
data: {
|
||||||
|
status: 'unhealthy',
|
||||||
|
error: 'Queue service unavailable',
|
||||||
|
timestamp: new Date().toISOString()
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export default router;
|
||||||
90
apps/data-service/src/services/proxy-queue-integration.ts
Normal file
90
apps/data-service/src/services/proxy-queue-integration.ts
Normal file
|
|
@ -0,0 +1,90 @@
|
||||||
|
/**
|
||||||
|
* Example: Proxy Service with BullMQ Integration
|
||||||
|
* This shows how to integrate the queue service with your existing proxy service
|
||||||
|
*/
|
||||||
|
import { getLogger } from '@stock-bot/logger';
|
||||||
|
import { queueService } from './queue.service';
|
||||||
|
import type { ProxyInfo } from '@stock-bot/http';
|
||||||
|
|
||||||
|
const logger = getLogger('proxy-queue-integration');
|
||||||
|
|
||||||
|
export class ProxyQueueIntegration {
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
// Initialize recurring tasks when service starts
|
||||||
|
this.initializeScheduledTasks();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async initializeScheduledTasks() {
|
||||||
|
try {
|
||||||
|
await queueService.scheduleRecurringTasks();
|
||||||
|
logger.info('Proxy scheduling tasks initialized');
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to initialize scheduled tasks', { error });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Manually trigger proxy fetching and checking
|
||||||
|
*/
|
||||||
|
async triggerProxyFetch() {
|
||||||
|
try {
|
||||||
|
const job = await queueService.addManualProxyFetch();
|
||||||
|
logger.info('Manual proxy fetch job added', { jobId: job.id });
|
||||||
|
return job;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to trigger proxy fetch', { error });
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check specific proxies immediately
|
||||||
|
*/
|
||||||
|
async checkSpecificProxies(proxies: ProxyInfo[]) {
|
||||||
|
try {
|
||||||
|
const job = await queueService.addImmediateProxyCheck(proxies);
|
||||||
|
logger.info('Specific proxy check job added', {
|
||||||
|
jobId: job.id,
|
||||||
|
proxiesCount: proxies.length
|
||||||
|
});
|
||||||
|
return job;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to check specific proxies', { error });
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get queue statistics
|
||||||
|
*/
|
||||||
|
async getStats() {
|
||||||
|
try {
|
||||||
|
return await queueService.getQueueStats();
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to get queue stats', { error });
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the queue instance for Bull Board monitoring
|
||||||
|
*/
|
||||||
|
async getQueue() {
|
||||||
|
return await queueService.getQueue();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Shutdown queue service gracefully
|
||||||
|
*/
|
||||||
|
async shutdown() {
|
||||||
|
try {
|
||||||
|
await queueService.shutdown();
|
||||||
|
logger.info('Proxy queue integration shut down');
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error during shutdown', { error });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const proxyQueueIntegration = new ProxyQueueIntegration();
|
||||||
|
|
@ -2,15 +2,16 @@ import { Logger } from '@stock-bot/logger';
|
||||||
import createCache, { type CacheProvider } from '@stock-bot/cache';
|
import createCache, { type CacheProvider } from '@stock-bot/cache';
|
||||||
import { HttpClient, ProxyInfo } from '@stock-bot/http';
|
import { HttpClient, ProxyInfo } from '@stock-bot/http';
|
||||||
import pLimit from 'p-limit';
|
import pLimit from 'p-limit';
|
||||||
|
import { queueService } from './queue.service';
|
||||||
|
|
||||||
export class ProxyService {
|
export class ProxyService {
|
||||||
private logger = new Logger('proxy-service');
|
private logger = new Logger('proxy-service');
|
||||||
private cache: CacheProvider = createCache('hybrid');
|
private cache: CacheProvider = createCache('hybrid');
|
||||||
private httpClient: HttpClient;
|
private httpClient: HttpClient;
|
||||||
private readonly concurrencyLimit = pLimit(250);
|
private readonly concurrencyLimit = pLimit(100);
|
||||||
private readonly CACHE_KEY = 'proxy';
|
private readonly CACHE_KEY = 'proxy';
|
||||||
private readonly CACHE_TTL = 86400; // 24 hours
|
private readonly CACHE_TTL = 86400; // 24 hours
|
||||||
private readonly CHECK_TIMEOUT = 5000;
|
private readonly CHECK_TIMEOUT = 7000;
|
||||||
private readonly CHECK_IP = '99.246.102.205'
|
private readonly CHECK_IP = '99.246.102.205'
|
||||||
private readonly CHECK_URL = 'https://proxy-detection.stare.gg/?api_key=bd406bf53ddc6abe1d9de5907830a955';
|
private readonly CHECK_URL = 'https://proxy-detection.stare.gg/?api_key=bd406bf53ddc6abe1d9de5907830a955';
|
||||||
private readonly PROXY_SOURCES = [
|
private readonly PROXY_SOURCES = [
|
||||||
|
|
@ -20,13 +21,10 @@ export class ProxyService {
|
||||||
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/master/http.txt',protocol: 'http', },
|
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/master/http.txt',protocol: 'http', },
|
||||||
{url: 'https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt',protocol: 'http', },
|
{url: 'https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt',protocol: 'http', },
|
||||||
{url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/http/http.txt',protocol: 'http', },
|
{url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/http/http.txt',protocol: 'http', },
|
||||||
// {url: 'https://github.com/zloi-user/hideip.me/raw/refs/heads/master/http.txt',protocol: 'http', },
|
|
||||||
{url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/http.txt', protocol: 'http' },
|
{url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/http.txt', protocol: 'http' },
|
||||||
{url: 'https://raw.githubusercontent.com/dpangestuw/Free-Proxy/refs/heads/main/http_proxies.txt',protocol: 'http', },
|
{url: 'https://raw.githubusercontent.com/dpangestuw/Free-Proxy/refs/heads/main/http_proxies.txt',protocol: 'http', },
|
||||||
// {url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/http.txt',protocol: 'http', },
|
|
||||||
{url: 'https://raw.githubusercontent.com/gitrecon1455/fresh-proxy-list/refs/heads/main/proxylist.txt',protocol: 'http', },
|
{url: 'https://raw.githubusercontent.com/gitrecon1455/fresh-proxy-list/refs/heads/main/proxylist.txt',protocol: 'http', },
|
||||||
{url: 'https://raw.githubusercontent.com/themiralay/Proxy-List-World/refs/heads/master/data.txt',protocol: 'http', },
|
{url: 'https://raw.githubusercontent.com/themiralay/Proxy-List-World/refs/heads/master/data.txt',protocol: 'http', },
|
||||||
{url: 'https://raw.githubusercontent.com/dpangestuw/Free-Proxy/refs/heads/main/http_proxies.txt',protocol: 'http', },
|
|
||||||
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/http.txt',protocol: 'http', },
|
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/http.txt',protocol: 'http', },
|
||||||
{url: 'https://raw.githubusercontent.com/casa-ls/proxy-list/refs/heads/main/http',protocol: 'http', },
|
{url: 'https://raw.githubusercontent.com/casa-ls/proxy-list/refs/heads/main/http',protocol: 'http', },
|
||||||
{url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/http.txt',protocol: 'http', },
|
{url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/http.txt',protocol: 'http', },
|
||||||
|
|
@ -35,17 +33,15 @@ export class ProxyService {
|
||||||
{url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/http.txt',protocol: 'http', },
|
{url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/http.txt',protocol: 'http', },
|
||||||
{url: 'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt',protocol: 'http', },
|
{url: 'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt',protocol: 'http', },
|
||||||
{url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/http.txt',protocol: 'http', },
|
{url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/http.txt',protocol: 'http', },
|
||||||
{url: 'https://raw.githubusercontent.com/casals-ar/proxy-list/main/http',protocol: 'http', },
|
// {url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/http.txt',protocol: 'http', },
|
||||||
|
|
||||||
|
|
||||||
{url: 'https://raw.githubusercontent.com/r00tee/Proxy-List/refs/heads/main/Https.txt',protocol: 'https', },
|
|
||||||
{url: 'https://raw.githubusercontent.com/r00tee/Proxy-List/refs/heads/main/Https.txt',protocol: 'https', },
|
{url: 'https://raw.githubusercontent.com/r00tee/Proxy-List/refs/heads/main/Https.txt',protocol: 'https', },
|
||||||
{url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/https.txt',protocol: 'https', },
|
{url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/https.txt',protocol: 'https', },
|
||||||
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/https.txt', protocol: 'https' },
|
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/https.txt', protocol: 'https' },
|
||||||
{url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/https.txt',protocol: 'https', },
|
{url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/https.txt',protocol: 'https', },
|
||||||
{url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/https/https.txt',protocol: 'https', },
|
{url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/https/https.txt',protocol: 'https', },
|
||||||
// {url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/https.txt',protocol: 'https', },
|
{url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/https.txt',protocol: 'https', },
|
||||||
// {url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/https.txt',protocol: 'https', },
|
{url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/https.txt',protocol: 'https', },
|
||||||
// {url: 'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/socks4.txt',protocol: 'socks4', },
|
// {url: 'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/socks4.txt',protocol: 'socks4', },
|
||||||
// {url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/socks4.txt',protocol: 'socks4', },
|
// {url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/socks4.txt',protocol: 'socks4', },
|
||||||
// {url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/socks4.txt',protocol: 'socks4', },
|
// {url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/socks4.txt',protocol: 'socks4', },
|
||||||
|
|
@ -68,12 +64,24 @@ export class ProxyService {
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.httpClient = new HttpClient({
|
this.httpClient = new HttpClient({
|
||||||
timeout: this.CHECK_TIMEOUT,
|
timeout: 10000,
|
||||||
}, this.logger);
|
}, this.logger);
|
||||||
|
|
||||||
|
// Start scheduled tasks
|
||||||
|
this.initializeScheduling();
|
||||||
|
|
||||||
this.logger.info('ProxyService initialized');
|
this.logger.info('ProxyService initialized');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async initializeScheduling() {
|
||||||
|
try {
|
||||||
|
await queueService.scheduleRecurringTasks();
|
||||||
|
this.logger.info('Proxy scheduling initialized');
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.error('Failed to initialize scheduling', { error });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
async fetchProxiesFromSources() : Promise<number> {
|
async fetchProxiesFromSources() : Promise<number> {
|
||||||
const sources = this.PROXY_SOURCES.map(source =>
|
const sources = this.PROXY_SOURCES.map(source =>
|
||||||
|
|
|
||||||
198
apps/data-service/src/services/queue.service.ts
Normal file
198
apps/data-service/src/services/queue.service.ts
Normal file
|
|
@ -0,0 +1,198 @@
|
||||||
|
/**
|
||||||
|
* BullMQ Queue Service
|
||||||
|
* Handles job scheduling and processing for the data service
|
||||||
|
*/
|
||||||
|
import { Queue, Worker, QueueEvents } from 'bullmq';
|
||||||
|
import { getLogger } from '@stock-bot/logger';
|
||||||
|
import type { ProxyInfo } from '@stock-bot/http';
|
||||||
|
|
||||||
|
const logger = getLogger('queue-service');
|
||||||
|
|
||||||
|
export interface ProxyJobData {
|
||||||
|
type: 'fetch-and-check' | 'check-specific' | 'clear-cache';
|
||||||
|
proxies?: ProxyInfo[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export class QueueService {
|
||||||
|
private queue: Queue;
|
||||||
|
private worker: Worker;
|
||||||
|
private queueEvents: QueueEvents;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
const connection = {
|
||||||
|
host: process.env.DRAGONFLY_HOST || 'localhost',
|
||||||
|
port: parseInt(process.env.DRAGONFLY_PORT || '6379'),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create queue
|
||||||
|
this.queue = new Queue('proxy-tasks', { connection });
|
||||||
|
|
||||||
|
// Create worker
|
||||||
|
this.worker = new Worker('proxy-tasks', this.processJob.bind(this), {
|
||||||
|
connection,
|
||||||
|
concurrency: 3,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create queue events for monitoring
|
||||||
|
this.queueEvents = new QueueEvents('proxy-tasks', { connection });
|
||||||
|
|
||||||
|
this.setupEventListeners();
|
||||||
|
logger.info('Queue service initialized', { connection });
|
||||||
|
}
|
||||||
|
|
||||||
|
private async processJob(job: any) {
|
||||||
|
const { type, proxies }: ProxyJobData = job.data;
|
||||||
|
logger.info('Processing job', {
|
||||||
|
id: job.id,
|
||||||
|
type,
|
||||||
|
proxiesCount: proxies?.length
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
switch (type) {
|
||||||
|
case 'fetch-and-check':
|
||||||
|
// Import proxy service dynamically to avoid circular dependencies
|
||||||
|
const { proxyService } = await import('./proxy.service');
|
||||||
|
return await proxyService.fetchProxiesFromSources();
|
||||||
|
|
||||||
|
case 'check-specific':
|
||||||
|
if (!proxies) throw new Error('Proxies required for check-specific job');
|
||||||
|
const { proxyService: ps } = await import('./proxy.service');
|
||||||
|
return await ps.checkProxies(proxies);
|
||||||
|
|
||||||
|
case 'clear-cache':
|
||||||
|
// Clear proxy cache
|
||||||
|
const { proxyService: pcs } = await import('./proxy.service');
|
||||||
|
// Assuming you have a clearCache method
|
||||||
|
// return await pcs.clearCache();
|
||||||
|
logger.info('Cache clear job processed');
|
||||||
|
return { cleared: true };
|
||||||
|
|
||||||
|
default:
|
||||||
|
throw new Error(`Unknown job type: ${type}`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Job processing failed', {
|
||||||
|
id: job.id,
|
||||||
|
type,
|
||||||
|
error: error instanceof Error ? error.message : String(error)
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private setupEventListeners() {
|
||||||
|
this.worker.on('completed', (job) => {
|
||||||
|
logger.info('Job completed', {
|
||||||
|
id: job.id,
|
||||||
|
type: job.data.type,
|
||||||
|
result: job.returnvalue
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
this.worker.on('failed', (job, err) => {
|
||||||
|
logger.error('Job failed', {
|
||||||
|
id: job?.id,
|
||||||
|
type: job?.data.type,
|
||||||
|
error: err.message
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
this.worker.on('progress', (job, progress) => {
|
||||||
|
logger.debug('Job progress', {
|
||||||
|
id: job.id,
|
||||||
|
progress: `${progress}%`
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
this.queueEvents.on('waiting', ({ jobId }) => {
|
||||||
|
logger.debug('Job waiting', { jobId });
|
||||||
|
});
|
||||||
|
|
||||||
|
this.queueEvents.on('active', ({ jobId }) => {
|
||||||
|
logger.debug('Job active', { jobId });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async scheduleRecurringTasks() {
|
||||||
|
// Fetch and check proxies every 15 minutes
|
||||||
|
await this.queue.add('fetch-and-check',
|
||||||
|
{ type: 'fetch-and-check' },
|
||||||
|
{
|
||||||
|
repeat: { pattern: '*/15 * * * *' },
|
||||||
|
removeOnComplete: 10,
|
||||||
|
removeOnFail: 5,
|
||||||
|
jobId: 'recurring-proxy-fetch', // Use consistent ID to prevent duplicates
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// Clear cache daily at midnight
|
||||||
|
await this.queue.add('clear-cache',
|
||||||
|
{ type: 'clear-cache' },
|
||||||
|
{
|
||||||
|
repeat: { pattern: '0 0 * * *' },
|
||||||
|
removeOnComplete: 1,
|
||||||
|
removeOnFail: 1,
|
||||||
|
jobId: 'daily-cache-clear',
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.info('Recurring tasks scheduled');
|
||||||
|
}
|
||||||
|
|
||||||
|
async addImmediateProxyCheck(proxies: ProxyInfo[]) {
|
||||||
|
return await this.queue.add('check-specific',
|
||||||
|
{ type: 'check-specific', proxies },
|
||||||
|
{
|
||||||
|
priority: 10,
|
||||||
|
removeOnComplete: 5,
|
||||||
|
removeOnFail: 3,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async addManualProxyFetch() {
|
||||||
|
return await this.queue.add('fetch-and-check',
|
||||||
|
{ type: 'fetch-and-check' },
|
||||||
|
{
|
||||||
|
priority: 5,
|
||||||
|
removeOnComplete: 5,
|
||||||
|
removeOnFail: 3,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async getQueueStats() {
|
||||||
|
const [waiting, active, completed, failed, delayed] = await Promise.all([
|
||||||
|
this.queue.getWaiting(),
|
||||||
|
this.queue.getActive(),
|
||||||
|
this.queue.getCompleted(),
|
||||||
|
this.queue.getFailed(),
|
||||||
|
this.queue.getDelayed(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
waiting: waiting.length,
|
||||||
|
active: active.length,
|
||||||
|
completed: completed.length,
|
||||||
|
failed: failed.length,
|
||||||
|
delayed: delayed.length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async getQueue() {
|
||||||
|
return this.queue;
|
||||||
|
}
|
||||||
|
|
||||||
|
async shutdown() {
|
||||||
|
logger.info('Shutting down queue service...');
|
||||||
|
|
||||||
|
await this.worker.close();
|
||||||
|
await this.queue.close();
|
||||||
|
await this.queueEvents.close();
|
||||||
|
|
||||||
|
logger.info('Queue service shut down');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const queueService = new QueueService();
|
||||||
14
bun.lock
14
bun.lock
|
|
@ -4,6 +4,7 @@
|
||||||
"": {
|
"": {
|
||||||
"name": "stock-bot",
|
"name": "stock-bot",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"bullmq": "^5.53.2",
|
||||||
"envalid": "^8.0.0",
|
"envalid": "^8.0.0",
|
||||||
"valibot": "^1.1.0",
|
"valibot": "^1.1.0",
|
||||||
},
|
},
|
||||||
|
|
@ -70,6 +71,7 @@
|
||||||
"@stock-bot/questdb-client": "*",
|
"@stock-bot/questdb-client": "*",
|
||||||
"@stock-bot/shutdown": "*",
|
"@stock-bot/shutdown": "*",
|
||||||
"@stock-bot/types": "*",
|
"@stock-bot/types": "*",
|
||||||
|
"bullmq": "^5.53.2",
|
||||||
"hono": "^4.0.0",
|
"hono": "^4.0.0",
|
||||||
"p-limit": "^6.2.0",
|
"p-limit": "^6.2.0",
|
||||||
"ws": "^8.0.0",
|
"ws": "^8.0.0",
|
||||||
|
|
@ -1002,6 +1004,8 @@
|
||||||
|
|
||||||
"buildcheck": ["buildcheck@0.0.6", "", {}, "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A=="],
|
"buildcheck": ["buildcheck@0.0.6", "", {}, "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A=="],
|
||||||
|
|
||||||
|
"bullmq": ["bullmq@5.53.2", "", { "dependencies": { "cron-parser": "^4.9.0", "ioredis": "^5.4.1", "msgpackr": "^1.11.2", "node-abort-controller": "^3.1.1", "semver": "^7.5.4", "tslib": "^2.0.0", "uuid": "^9.0.0" } }, "sha512-xHgxrP/yNJHD7VCw1h+eRBh+2TCPBCM39uC9gCyksYc6ufcJP+HTZ/A2lzB2x7qMFWrvsX7tM40AT2BmdkYL/Q=="],
|
||||||
|
|
||||||
"bun-types": ["bun-types@1.2.15", "", { "dependencies": { "@types/node": "*" } }, "sha512-NarRIaS+iOaQU1JPfyKhZm4AsUOrwUOqRNHY0XxI8GI8jYxiLXLcdjYMG9UKS+fwWasc1uw1htV9AX24dD+p4w=="],
|
"bun-types": ["bun-types@1.2.15", "", { "dependencies": { "@types/node": "*" } }, "sha512-NarRIaS+iOaQU1JPfyKhZm4AsUOrwUOqRNHY0XxI8GI8jYxiLXLcdjYMG9UKS+fwWasc1uw1htV9AX24dD+p4w=="],
|
||||||
|
|
||||||
"byline": ["byline@5.0.0", "", {}, "sha512-s6webAy+R4SR8XVuJWt2V2rGvhnrhxN+9S15GNuTK3wKPOXFF6RNc+8ug2XhH+2s4f+uudG4kUVYmYOQWL2g0Q=="],
|
"byline": ["byline@5.0.0", "", {}, "sha512-s6webAy+R4SR8XVuJWt2V2rGvhnrhxN+9S15GNuTK3wKPOXFF6RNc+8ug2XhH+2s4f+uudG4kUVYmYOQWL2g0Q=="],
|
||||||
|
|
@ -1084,6 +1088,8 @@
|
||||||
|
|
||||||
"crc32-stream": ["crc32-stream@6.0.0", "", { "dependencies": { "crc-32": "^1.2.0", "readable-stream": "^4.0.0" } }, "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g=="],
|
"crc32-stream": ["crc32-stream@6.0.0", "", { "dependencies": { "crc-32": "^1.2.0", "readable-stream": "^4.0.0" } }, "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g=="],
|
||||||
|
|
||||||
|
"cron-parser": ["cron-parser@4.9.0", "", { "dependencies": { "luxon": "^3.2.1" } }, "sha512-p0SaNjrHOnQeR8/VnfGbmg9te2kfyYSQ7Sc/j/6DtPL3JQvKxmjO9TSjNFpujqV3vEYYBvNNvXSxzyksBWAx1Q=="],
|
||||||
|
|
||||||
"cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="],
|
"cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="],
|
||||||
|
|
||||||
"css-select": ["css-select@5.1.0", "", { "dependencies": { "boolbase": "^1.0.0", "css-what": "^6.1.0", "domhandler": "^5.0.2", "domutils": "^3.0.1", "nth-check": "^2.0.1" } }, "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg=="],
|
"css-select": ["css-select@5.1.0", "", { "dependencies": { "boolbase": "^1.0.0", "css-what": "^6.1.0", "domhandler": "^5.0.2", "domutils": "^3.0.1", "nth-check": "^2.0.1" } }, "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg=="],
|
||||||
|
|
@ -1524,6 +1530,8 @@
|
||||||
|
|
||||||
"lru-cache": ["lru-cache@6.0.0", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA=="],
|
"lru-cache": ["lru-cache@6.0.0", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA=="],
|
||||||
|
|
||||||
|
"luxon": ["luxon@3.6.1", "", {}, "sha512-tJLxrKJhO2ukZ5z0gyjY1zPh3Rh88Ej9P7jNrZiHMUXHae1yvI2imgOZtL1TO8TW6biMMKfTtAOoEJANgtWBMQ=="],
|
||||||
|
|
||||||
"magic-string": ["magic-string@0.30.17", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0" } }, "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA=="],
|
"magic-string": ["magic-string@0.30.17", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0" } }, "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA=="],
|
||||||
|
|
||||||
"make-dir": ["make-dir@4.0.0", "", { "dependencies": { "semver": "^7.5.3" } }, "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw=="],
|
"make-dir": ["make-dir@4.0.0", "", { "dependencies": { "semver": "^7.5.3" } }, "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw=="],
|
||||||
|
|
@ -1610,6 +1618,8 @@
|
||||||
|
|
||||||
"new-find-package-json": ["new-find-package-json@2.0.0", "", { "dependencies": { "debug": "^4.3.4" } }, "sha512-lDcBsjBSMlj3LXH2v/FW3txlh2pYTjmbOXPYJD93HI5EwuLzI11tdHSIpUMmfq/IOsldj4Ps8M8flhm+pCK4Ew=="],
|
"new-find-package-json": ["new-find-package-json@2.0.0", "", { "dependencies": { "debug": "^4.3.4" } }, "sha512-lDcBsjBSMlj3LXH2v/FW3txlh2pYTjmbOXPYJD93HI5EwuLzI11tdHSIpUMmfq/IOsldj4Ps8M8flhm+pCK4Ew=="],
|
||||||
|
|
||||||
|
"node-abort-controller": ["node-abort-controller@3.1.1", "", {}, "sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ=="],
|
||||||
|
|
||||||
"node-addon-api": ["node-addon-api@6.1.0", "", {}, "sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA=="],
|
"node-addon-api": ["node-addon-api@6.1.0", "", {}, "sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA=="],
|
||||||
|
|
||||||
"node-gyp": ["node-gyp@11.2.0", "", { "dependencies": { "env-paths": "^2.2.0", "exponential-backoff": "^3.1.1", "graceful-fs": "^4.2.6", "make-fetch-happen": "^14.0.3", "nopt": "^8.0.0", "proc-log": "^5.0.0", "semver": "^7.3.5", "tar": "^7.4.3", "tinyglobby": "^0.2.12", "which": "^5.0.0" }, "bin": { "node-gyp": "bin/node-gyp.js" } }, "sha512-T0S1zqskVUSxcsSTkAsLc7xCycrRYmtDHadDinzocrThjyQCn5kMlEBSj6H4qDbgsIOSLmmlRIeb0lZXj+UArA=="],
|
"node-gyp": ["node-gyp@11.2.0", "", { "dependencies": { "env-paths": "^2.2.0", "exponential-backoff": "^3.1.1", "graceful-fs": "^4.2.6", "make-fetch-happen": "^14.0.3", "nopt": "^8.0.0", "proc-log": "^5.0.0", "semver": "^7.3.5", "tar": "^7.4.3", "tinyglobby": "^0.2.12", "which": "^5.0.0" }, "bin": { "node-gyp": "bin/node-gyp.js" } }, "sha512-T0S1zqskVUSxcsSTkAsLc7xCycrRYmtDHadDinzocrThjyQCn5kMlEBSj6H4qDbgsIOSLmmlRIeb0lZXj+UArA=="],
|
||||||
|
|
@ -2042,7 +2052,7 @@
|
||||||
|
|
||||||
"utils-merge": ["utils-merge@1.0.1", "", {}, "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA=="],
|
"utils-merge": ["utils-merge@1.0.1", "", {}, "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA=="],
|
||||||
|
|
||||||
"uuid": ["uuid@10.0.0", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ=="],
|
"uuid": ["uuid@9.0.1", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA=="],
|
||||||
|
|
||||||
"valibot": ["valibot@1.1.0", "", { "peerDependencies": { "typescript": ">=5" }, "optionalPeers": ["typescript"] }, "sha512-Nk8lX30Qhu+9txPYTwM0cFlWLdPFsFr6LblzqIySfbZph9+BFsAHsNvHOymEviUepeIW6KFHzpX8TKhbptBXXw=="],
|
"valibot": ["valibot@1.1.0", "", { "peerDependencies": { "typescript": ">=5" }, "optionalPeers": ["typescript"] }, "sha512-Nk8lX30Qhu+9txPYTwM0cFlWLdPFsFr6LblzqIySfbZph9+BFsAHsNvHOymEviUepeIW6KFHzpX8TKhbptBXXw=="],
|
||||||
|
|
||||||
|
|
@ -2226,6 +2236,8 @@
|
||||||
|
|
||||||
"dockerode/tar-fs": ["tar-fs@2.1.3", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg=="],
|
"dockerode/tar-fs": ["tar-fs@2.1.3", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg=="],
|
||||||
|
|
||||||
|
"dockerode/uuid": ["uuid@10.0.0", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ=="],
|
||||||
|
|
||||||
"dom-serializer/entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="],
|
"dom-serializer/entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="],
|
||||||
|
|
||||||
"encoding/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
|
"encoding/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
|
||||||
|
|
|
||||||
|
|
@ -60,11 +60,6 @@ services:
|
||||||
environment:
|
environment:
|
||||||
- QDB_TELEMETRY_ENABLED=false
|
- QDB_TELEMETRY_ENABLED=false
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:9000/status"]
|
|
||||||
interval: 30s
|
|
||||||
timeout: 10s
|
|
||||||
retries: 3
|
|
||||||
networks:
|
networks:
|
||||||
- trading-bot-network
|
- trading-bot-network
|
||||||
|
|
||||||
|
|
@ -179,7 +174,6 @@ services:
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
networks:
|
networks:
|
||||||
- trading-bot-network
|
- trading-bot-network
|
||||||
|
|
||||||
# Grafana - Visualization for logs and metrics
|
# Grafana - Visualization for logs and metrics
|
||||||
grafana:
|
grafana:
|
||||||
image: grafana/grafana:10.2.0
|
image: grafana/grafana:10.2.0
|
||||||
|
|
@ -201,6 +195,24 @@ services:
|
||||||
networks:
|
networks:
|
||||||
- trading-bot-network
|
- trading-bot-network
|
||||||
|
|
||||||
|
# Bull Board - Queue monitoring
|
||||||
|
bull-board:
|
||||||
|
image: deadly0/bull-board
|
||||||
|
container_name: trading-bot-bull-board
|
||||||
|
ports:
|
||||||
|
- "3001:3000"
|
||||||
|
environment:
|
||||||
|
- REDIS_HOST=dragonfly
|
||||||
|
- REDIS_PORT=6379
|
||||||
|
- REDIS_PASSWORD=
|
||||||
|
- REDIS_DB=0
|
||||||
|
- REDIS_URL=redis://dragonfly:6379
|
||||||
|
depends_on:
|
||||||
|
- dragonfly
|
||||||
|
restart: unless-stopped
|
||||||
|
networks:
|
||||||
|
- trading-bot-network
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
postgres_data:
|
postgres_data:
|
||||||
questdb_data:
|
questdb_data:
|
||||||
|
|
|
||||||
|
|
@ -41,7 +41,6 @@
|
||||||
"dev:full": "npm run infra:up && npm run docker:admin && turbo run dev",
|
"dev:full": "npm run infra:up && npm run docker:admin && turbo run dev",
|
||||||
"dev:clean": "npm run infra:reset && npm run dev:full",
|
"dev:clean": "npm run infra:reset && npm run dev:full",
|
||||||
"proxy": "bun run ./apps/data-service/src/proxy-demo.ts"
|
"proxy": "bun run ./apps/data-service/src/proxy-demo.ts"
|
||||||
|
|
||||||
},
|
},
|
||||||
"workspaces": [
|
"workspaces": [
|
||||||
"libs/*",
|
"libs/*",
|
||||||
|
|
@ -67,6 +66,7 @@
|
||||||
"bun": ">=1.1.0"
|
"bun": ">=1.1.0"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"bullmq": "^5.53.2",
|
||||||
"envalid": "^8.0.0",
|
"envalid": "^8.0.0",
|
||||||
"valibot": "^1.1.0"
|
"valibot": "^1.1.0"
|
||||||
},
|
},
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue