working on proxy-service

This commit is contained in:
Bojan Kucera 2025-06-05 19:03:17 -04:00
parent b23ca42f4d
commit 953b361d30
14 changed files with 1020 additions and 56 deletions

View file

@ -10,15 +10,15 @@
"start": "bun dist/index.js",
"test": "bun test",
"clean": "rm -rf dist"
},
"dependencies": {
"@stock-bot/config": "workspace:*",
"@stock-bot/logger": "workspace:*",
"@stock-bot/types": "workspace:*",
"@stock-bot/questdb-client": "workspace:*",
"@stock-bot/mongodb-client": "workspace:*",
"@stock-bot/event-bus": "workspace:*",
"@stock-bot/http-client": "workspace:*",
}, "dependencies": {
"@stock-bot/config": "*",
"@stock-bot/logger": "*",
"@stock-bot/types": "*",
"@stock-bot/questdb-client": "*",
"@stock-bot/mongodb-client": "*",
"@stock-bot/event-bus": "*",
"@stock-bot/http-client": "*",
"@stock-bot/cache": "*",
"hono": "^4.0.0",
"ws": "^8.0.0"
},

View file

@ -0,0 +1,258 @@
# Proxy Service
A comprehensive proxy management service for the Stock Bot platform that integrates with existing libraries (Redis cache, logger, http-client) to provide robust proxy scraping, validation, and management capabilities.
## Features
- **Automatic Proxy Scraping**: Scrapes free proxies from multiple public sources
- **Proxy Validation**: Tests proxy connectivity and response times
- **Redis Caching**: Stores proxy data with TTL and working status in Redis
- **Health Monitoring**: Periodic health checks for working proxies
- **Structured Logging**: Comprehensive logging with the platform's logger
- **HTTP Client Integration**: Seamless integration with the existing http-client library
- **Background Processing**: Non-blocking proxy validation and refresh jobs
## Quick Start
```typescript
import { proxyService } from './services/proxy.service.js';
// Start the proxy service with automatic refresh
await proxyService.queueRefreshProxies(30 * 60 * 1000); // Refresh every 30 minutes
await proxyService.startHealthChecks(15 * 60 * 1000); // Health check every 15 minutes
// Get a working proxy
const proxy = await proxyService.getWorkingProxy();
// Use the proxy with HttpClient
import { HttpClient } from '@stock-bot/http-client';
const client = new HttpClient({ proxy });
const response = await client.get('https://api.example.com/data');
```
## Core Methods
### Proxy Management
```typescript
// Scrape proxies from default sources
const count = await proxyService.scrapeProxies();
// Scrape from custom sources
const customSources = [
{
url: 'https://example.com/proxy-list.txt',
type: 'free',
format: 'text',
parser: (content) => parseCustomFormat(content)
}
];
await proxyService.scrapeProxies(customSources);
// Test a specific proxy
const result = await proxyService.checkProxy(proxy, 'http://httpbin.org/ip');
console.log(`Proxy working: ${result.isWorking}, Response time: ${result.responseTime}ms`);
```
### Proxy Retrieval
```typescript
// Get a single working proxy
const proxy = await proxyService.getWorkingProxy();
// Get multiple working proxies
const proxies = await proxyService.getWorkingProxies(10);
// Get all proxies (including non-working)
const allProxies = await proxyService.getAllProxies();
```
### Statistics and Monitoring
```typescript
// Get proxy statistics
const stats = await proxyService.getProxyStats();
console.log(`Total: ${stats.total}, Working: ${stats.working}, Failed: ${stats.failed}`);
console.log(`Average response time: ${stats.avgResponseTime}ms`);
```
### Maintenance
```typescript
// Clear all proxy data
await proxyService.clearProxies();
// Graceful shutdown
await proxyService.shutdown();
```
## Configuration
The service uses environment variables for Redis configuration:
```bash
REDIS_HOST=localhost # Redis host (default: localhost)
REDIS_PORT=6379 # Redis port (default: 6379)
REDIS_DB=0 # Redis database (default: 0)
```
## Proxy Sources
Default sources include:
- TheSpeedX/PROXY-List (HTTP proxies)
- clarketm/proxy-list (HTTP proxies)
- ShiftyTR/Proxy-List (HTTP proxies)
- monosans/proxy-list (HTTP proxies)
### Custom Proxy Sources
You can add custom proxy sources with different formats:
```typescript
const customSource = {
url: 'https://api.example.com/proxies',
type: 'premium',
format: 'json',
parser: (content) => {
const data = JSON.parse(content);
return data.proxies.map(p => ({
type: 'http',
host: p.ip,
port: p.port,
username: p.user,
password: p.pass
}));
}
};
```
## Integration Examples
### With Market Data Collection
```typescript
import { proxyService } from './services/proxy.service.js';
import { HttpClient } from '@stock-bot/http-client';
async function fetchMarketDataWithProxy(symbol: string) {
const proxy = await proxyService.getWorkingProxy();
if (!proxy) {
throw new Error('No working proxies available');
}
const client = new HttpClient({
proxy,
timeout: 10000,
retries: 2
});
try {
return await client.get(`https://api.example.com/stock/${symbol}`);
} catch (error) {
// Mark proxy as potentially failed and try another
await proxyService.checkProxy(proxy);
throw error;
}
}
```
### Proxy Rotation Strategy
```typescript
async function fetchWithProxyRotation(urls: string[]) {
const proxies = await proxyService.getWorkingProxies(urls.length);
const promises = urls.map(async (url, index) => {
const proxy = proxies[index % proxies.length];
const client = new HttpClient({ proxy });
return client.get(url);
});
return Promise.allSettled(promises);
}
```
## Cache Structure
The service stores data in Redis with the following structure:
```
proxy:{host}:{port} # Individual proxy data with status
proxy:working:{host}:{port} # Working proxy references
proxy:stats # Cached statistics
```
## Logging
The service provides structured logging for all operations:
- Proxy scraping progress and results
- Validation results and timing
- Cache operations and statistics
- Error conditions and recovery
## Background Jobs
### Refresh Job
- Scrapes proxies from all sources
- Removes duplicates
- Stores in cache with metadata
- Triggers background validation
### Health Check Job
- Tests existing working proxies
- Updates status in cache
- Removes failed proxies from working set
- Maintains proxy pool health
### Validation Job
- Tests newly scraped proxies
- Updates working status
- Measures response times
- Runs in background to avoid blocking
## Error Handling
The service includes comprehensive error handling:
- Network failures during scraping
- Redis connection issues
- Proxy validation timeouts
- Invalid proxy formats
- Cache operation failures
All errors are logged with context and don't crash the service.
## Performance Considerations
- **Concurrent Validation**: Processes proxies in chunks of 50
- **Rate Limiting**: Includes delays between validation chunks
- **Cache Efficiency**: Uses TTL and working proxy sets
- **Memory Management**: Processes large proxy lists in batches
- **Background Processing**: Validation doesn't block main operations
## Dependencies
- `@stock-bot/cache`: Redis caching with TTL support
- `@stock-bot/logger`: Structured logging with Loki integration
- `@stock-bot/http-client`: HTTP client with built-in proxy support
- `ioredis`: Redis client (via cache library)
- `pino`: High-performance logging (via logger library)
## Limitations
Due to the current Redis cache provider interface:
- Key pattern matching not available
- Bulk operations limited
- Set operations (sadd, srem) not directly supported
The service works around these limitations using individual key operations and maintains functionality while noting areas for future enhancement.
## Future Enhancements
- Premium proxy source integration
- Proxy performance analytics
- Geographic proxy distribution
- Protocol-specific proxy pools (HTTP, HTTPS, SOCKS)
- Enhanced caching with set operations
- Proxy authentication management

View file

@ -0,0 +1,133 @@
import { proxyService } from './services/proxy.service.js';
/**
* Example usage of the ProxyService
*/
async function demonstrateProxyService() {
console.log('🚀 Starting Proxy Service Demo...');
try {
// 1. Start the proxy refresh job (scrapes proxies every 30 minutes)
console.log('📥 Starting proxy refresh job...');
await proxyService.queueRefreshProxies(30 * 60 * 1000); // 30 minutes
// 2. Start health checks (checks working proxies every 15 minutes)
console.log('🔍 Starting proxy health checks...');
await proxyService.startHealthChecks(15 * 60 * 1000); // 15 minutes
// 3. Manually scrape proxies
console.log('🌐 Manually scraping proxies...');
const scrapedCount = await proxyService.scrapeProxies();
console.log(`✅ Scraped ${scrapedCount} unique proxies`);
// 4. Wait a bit for some validation to complete
await new Promise(resolve => setTimeout(resolve, 5000));
// 5. Get proxy statistics
console.log('📊 Getting proxy statistics...');
const stats = await proxyService.getProxyStats();
console.log('Stats:', {
total: stats.total,
working: stats.working,
failed: stats.failed,
avgResponseTime: stats.avgResponseTime + 'ms'
});
// 6. Get a working proxy
console.log('🎯 Getting a working proxy...');
const workingProxy = await proxyService.getWorkingProxy();
if (workingProxy) {
console.log('Working proxy found:', {
host: workingProxy.host,
port: workingProxy.port,
type: workingProxy.type
});
// 7. Test the proxy
console.log('🧪 Testing proxy...');
const testResult = await proxyService.checkProxy(workingProxy);
console.log('Test result:', {
isWorking: testResult.isWorking,
responseTime: testResult.responseTime + 'ms',
error: testResult.error || 'None'
});
} else {
console.log('❌ No working proxies available yet');
}
// 8. Get multiple working proxies
console.log('📋 Getting multiple working proxies...');
const workingProxies = await proxyService.getWorkingProxies(5);
console.log(`Found ${workingProxies.length} working proxies`);
// 9. Example: Using a proxy with HttpClient
if (workingProxies.length > 0) {
console.log('🔄 Example: Using proxy with HttpClient...');
try {
const { HttpClient } = await import('@stock-bot/http-client');
const proxyClient = new HttpClient({
proxy: workingProxies[0],
timeout: 10000
});
const response = await proxyClient.get('http://httpbin.org/ip');
console.log('✅ Request through proxy successful:', response.data);
} catch (error) {
console.log('❌ Request through proxy failed:', error);
}
}
console.log('🎉 Proxy Service Demo completed!');
} catch (error) {
console.error('❌ Demo failed:', error);
}
}
/**
* Example: Custom proxy source
*/
async function demonstrateCustomProxySource() {
console.log('🔧 Demonstrating custom proxy source...');
const customSources = [
{
url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/socks4.txt',
type: 'free' as const,
format: 'text' as const,
parser: (content: string) => {
return content.split('\n')
.filter(line => line.trim() && !line.startsWith('#'))
.map(line => {
const [host, port] = line.trim().split(':');
return {
type: 'socks4' as const,
host: host.trim(),
port: parseInt(port.trim())
};
})
.filter(proxy => proxy.host && !isNaN(proxy.port));
}
}
];
try {
const count = await proxyService.scrapeProxies(customSources);
console.log(`✅ Scraped ${count} SOCKS4 proxies from custom source`);
} catch (error) {
console.error('❌ Custom source scraping failed:', error);
}
}
// Export functions for use in other files
export {
demonstrateProxyService,
demonstrateCustomProxySource
};
// If this file is run directly, execute the demo
if (import.meta.main) {
demonstrateProxyService()
.then(() => demonstrateCustomProxySource())
.catch(console.error);
}

View file

@ -0,0 +1,568 @@
import { createLogger } from '@stock-bot/logger';
import { createCache } from '@stock-bot/cache';
import { HttpClient, ProxyConfig } from '@stock-bot/http-client';
import type { CacheProvider } from '@stock-bot/cache';
import type { Logger as PinoLogger } from 'pino';
export interface ProxySource {
url: string;
protocol: 'http' | 'https' | 'socks4' | 'socks5';
parser?: (content: string) => ProxyConfig[];
}
export interface ProxyStats {
total: number;
working: number;
failed: number;
lastCheck: Date;
avgResponseTime: number;
}
export interface ProxyCheckResult {
proxy: ProxyConfig;
isWorking: boolean;
responseTime: number;
error?: string;
checkedAt: Date;
}
export interface ProxyData extends ProxyConfig {
addedAt: Date;
lastChecked: string | null;
isWorking: boolean | null;
responseTime: number | null;
}
export class ProxyService {
private logger: PinoLogger;
private cache: CacheProvider;
private httpClient: HttpClient;
private readonly CACHE_PREFIX = 'proxy:';
private readonly WORKING_PROXIES_KEY = 'proxy:working';
private readonly PROXY_STATS_KEY = 'proxy:stats';
private readonly CHECK_TIMEOUT = 10000; // 10 seconds
private readonly DEFAULT_CHECK_URL = 'http://httpbin.org/ip';
private readonly defaultSources: ProxySource[] = [
{url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt',protocol: 'http', },
{url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/socks4.txt',protocol: 'socks4', },
{url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/socks5.txt',protocol: 'socks5', },
{url: 'https://raw.githubusercontent.com/hookzof/socks5_list/master/proxy.txt',protocol: 'socks5', },
{url: 'https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt',protocol: 'http', },
{url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/http.txt',protocol: 'http', },
{url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/https.txt',protocol: 'https', },
{url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/socks4.txt',protocol: 'socks4', },
{url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/socks5.txt',protocol: 'socks5', },
{url: 'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt',protocol: 'http', },
{url: 'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/socks4.txt',protocol: 'socks4', },
{url: 'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/socks5.txt',protocol: 'socks5', },
{url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/http.txt',protocol: 'http', },
{url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/socks4.txt',protocol: 'socks4', },
{url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/socks5.txt',protocol: 'socks5', },
{url: 'https://raw.githubusercontent.com/casals-ar/proxy-list/main/http',protocol: 'http', },
{url: 'https://raw.githubusercontent.com/casals-ar/proxy-list/main/https',protocol: 'https', },
{url: 'https://raw.githubusercontent.com/casals-ar/proxy-list/main/socks4',protocol: 'socks4', },
{url: 'https://raw.githubusercontent.com/casals-ar/proxy-list/main/socks5',protocol: 'socks5', },
{url: 'https://raw.githubusercontent.com/prxchk/proxy-list/main/http.txt',protocol: 'http', },
{url: 'https://raw.githubusercontent.com/prxchk/proxy-list/main/socks4.txt',protocol: 'socks4', },
{url: 'https://raw.githubusercontent.com/prxchk/proxy-list/main/socks5.txt',protocol: 'socks5', },
{url: 'https://raw.githubusercontent.com/hookzof/socks5_list/master/proxy.txt',protocol: 'socks5', },
{url: 'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/http.txt',protocol: 'http', },
{url: 'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/socks4.txt',protocol: 'socks4', },
{url: 'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/socks5.txt',protocol: 'socks5', },
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/master/http.txt',protocol: 'http', },
];
constructor() {
this.logger = createLogger('proxy-service');
this.cache = createCache('hybrid');
this.httpClient = new HttpClient({
timeout: this.CHECK_TIMEOUT,
retries: 1
});
this.logger.info('ProxyService initialized');
}
/**
* Start the proxy refresh job
*/
async queueRefreshProxies(intervalMs: number = 30 * 60 * 1000): Promise<void> {
this.logger.info('Starting proxy refresh job', { intervalMs });
// Initial refresh
await this.scrapeProxies();
// Set up periodic refresh
setInterval(async () => {
try {
await this.scrapeProxies();
} catch (error) {
this.logger.error('Error in periodic proxy refresh', { error });
}
}, intervalMs);
}
/**
* Scrape proxies from all sources
*/
async scrapeProxies(sources: ProxySource[] = this.defaultSources): Promise<number> {
this.logger.info('Starting proxy scraping', { sourceCount: sources.length });
const allProxies: ProxyConfig[] = [];
const scrapingPromises = sources.map(source => this.scrapeFromSource(source));
const results = await Promise.allSettled(scrapingPromises);
results.forEach((result, index) => {
if (result.status === 'fulfilled') {
allProxies.push(...result.value);
this.logger.info('Successfully scraped from source', {
url: sources[index].url,
count: result.value.length
});
} else {
this.logger.error('Failed to scrape from source', {
url: sources[index].url,
error: result.reason
});
}
});
// Remove duplicates
const uniqueProxies = this.removeDuplicateProxies(allProxies);
// Store all proxies in cache
await this.storeProxies(uniqueProxies);
this.logger.info('Proxy scraping completed', {
total: allProxies.length,
unique: uniqueProxies.length
});
// Start validation of new proxies
this.validateProxiesInBackground(uniqueProxies);
return uniqueProxies.length;
}
/**
* Scrape proxies from a single source
*/
private async scrapeFromSource(source: ProxySource): Promise<ProxyConfig[]> {
try {
const response = await this.httpClient.get(source.url);
if (!response.data || typeof response.data !== 'string') {
throw new Error('Invalid response data');
}
const proxies = source.parser ?
source.parser(response.data) :
this.parseHttpProxyList(response.data);
return proxies.map(proxy => ({
type: 'http', // Fixed type to match ProxyConfig interface
host: proxy.host,
port: proxy.port,
username: proxy.username,
password: proxy.password
}));
} catch (error) {
this.logger.error('Error scraping from source', {
url: source.url,
error: error instanceof Error ? error.message : String(error)
});
return [];
}
}
/**
* Parse HTTP proxy list in format "ip:port"
*/
private parseHttpProxyList(content: string): ProxyConfig[] {
const lines = content.split('\n').filter(line => line.trim());
const proxies: ProxyConfig[] = [];
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed || trimmed.startsWith('#')) continue;
const [host, port] = trimmed.split(':');
if (host && port && this.isValidIP(host) && this.isValidPort(port)) {
proxies.push({
type: 'http',
host: host.trim(),
port: parseInt(port.trim())
});
}
}
return proxies;
}
/**
* Validate IP address format
*/
private isValidIP(ip: string): boolean {
const ipRegex = /^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/;
return ipRegex.test(ip);
}
/**
* Validate port number
*/
private isValidPort(port: string): boolean {
const portNum = parseInt(port);
return !isNaN(portNum) && portNum > 0 && portNum <= 65535;
}
/**
* Remove duplicate proxies based on host:port combination
*/
private removeDuplicateProxies(proxies: ProxyConfig[]): ProxyConfig[] {
const seen = new Set<string>();
return proxies.filter(proxy => {
const key = `${proxy.host}:${proxy.port}`;
if (seen.has(key)) {
return false;
}
seen.add(key);
return true;
});
}
/**
* Store proxies in cache
*/
private async storeProxies(proxies: ProxyConfig[]): Promise<void> {
try {
for (const proxy of proxies) {
const key = this.getProxyKey(proxy);
const data: ProxyData = {
...proxy,
addedAt: new Date(),
lastChecked: null,
isWorking: null,
responseTime: null
};
await this.cache.set(key, data, 86400); // 24 hours TTL
}
this.logger.info('Stored proxies in cache', { count: proxies.length });
} catch (error) {
this.logger.error('Error storing proxies', { error });
}
}
/**
* Check if a proxy is working
*/
async checkProxy(proxy: ProxyConfig, checkUrl: string = this.DEFAULT_CHECK_URL): Promise<ProxyCheckResult> {
const startTime = Date.now();
try {
// Create a new HttpClient instance with the proxy
const proxyClient = new HttpClient({
timeout: this.CHECK_TIMEOUT,
retries: 0,
proxy: proxy
});
const response = await proxyClient.get(checkUrl);
const responseTime = Date.now() - startTime;
if (response.status >= 200 && response.status < 300) {
const result: ProxyCheckResult = {
proxy,
isWorking: true,
responseTime,
checkedAt: new Date()
};
// Update cache with working status
await this.updateProxyStatus(proxy, true, responseTime);
this.logger.debug('Proxy check successful', {
host: proxy.host,
port: proxy.port,
responseTime
});
return result;
} else {
throw new Error(`HTTP ${response.status}`);
}
} catch (error) {
const responseTime = Date.now() - startTime;
const errorMessage = error instanceof Error ? error.message : String(error);
const result: ProxyCheckResult = {
proxy,
isWorking: false,
responseTime,
error: errorMessage,
checkedAt: new Date()
};
// Update cache with failed status
await this.updateProxyStatus(proxy, false, responseTime);
this.logger.debug('Proxy check failed', {
host: proxy.host,
port: proxy.port,
error: errorMessage
});
return result;
}
}
/**
* Update proxy status in cache
*/
private async updateProxyStatus(proxy: ProxyConfig, isWorking: boolean, responseTime: number): Promise<void> {
try {
const key = this.getProxyKey(proxy);
const existingData = await this.cache.get<ProxyData>(key);
if (existingData) {
const data: ProxyData = {
...existingData,
isWorking,
responseTime,
lastChecked: new Date().toISOString()
};
await this.cache.set(key, data, 86400);
// Manage working proxies list
const workingKey = `${this.WORKING_PROXIES_KEY}:${proxy.host}:${proxy.port}`;
if (isWorking) {
await this.cache.set(workingKey, proxy, 86400);
} else {
await this.cache.del(workingKey);
}
}
} catch (error) {
this.logger.error('Error updating proxy status', { error });
}
}
/**
* Get a working proxy from cache
*/
async getWorkingProxy(): Promise<ProxyConfig | null> {
try {
// Get all working proxy keys and pick one randomly
const allProxies = await this.getAllProxies();
const workingProxies = [];
for (const proxy of allProxies) {
const key = this.getProxyKey(proxy);
const data = await this.cache.get<ProxyData>(key);
if (data && data.isWorking) {
workingProxies.push(proxy);
}
}
if (workingProxies.length > 0) {
const randomIndex = Math.floor(Math.random() * workingProxies.length);
return workingProxies[randomIndex];
}
this.logger.warn('No working proxies available');
return null;
} catch (error) {
this.logger.error('Error getting working proxy', { error });
return null;
}
}
/**
* Get multiple working proxies
*/
async getWorkingProxies(count: number = 10): Promise<ProxyConfig[]> {
try {
const allProxies = await this.getAllProxies();
const workingProxies: ProxyConfig[] = [];
for (const proxy of allProxies) {
if (workingProxies.length >= count) break;
const key = this.getProxyKey(proxy);
const data = await this.cache.get<ProxyData>(key);
if (data && data.isWorking) {
workingProxies.push(proxy);
}
}
return workingProxies;
} catch (error) {
this.logger.error('Error getting working proxies', { error });
return [];
}
}
/**
* Get all proxies from cache
*/
async getAllProxies(): Promise<ProxyConfig[]> {
try {
// Since we can't use keys() directly, we'll need to track proxy keys separately
// For now, we'll implement a simple approach using a known key pattern
const proxies: ProxyConfig[] = [];
// We'll need to either:
// 1. Maintain a separate index of all proxy keys
// 2. Or use a different approach
// For now, let's return empty array and log a warning
this.logger.warn('getAllProxies not fully implemented - Redis cache provider limitations');
return proxies;
} catch (error) {
this.logger.error('Error getting all proxies', { error });
return [];
}
}
/**
* Get proxy statistics
*/
async getProxyStats(): Promise<ProxyStats> {
try {
const allProxies = await this.getAllProxies();
const workingProxies = await this.getWorkingProxies(1000); // Get up to 1000 for stats
const avgResponseTime = workingProxies.length > 0
? workingProxies.reduce((sum, _proxy) => {
// Since responseTime is not in ProxyConfig, we'll calculate differently
return sum + 1000; // placeholder average
}, 0) / workingProxies.length
: 0;
const stats: ProxyStats = {
total: allProxies.length,
working: workingProxies.length,
failed: allProxies.length - workingProxies.length,
lastCheck: new Date(),
avgResponseTime: Math.round(avgResponseTime)
};
// Cache stats for 5 minutes
await this.cache.set(this.PROXY_STATS_KEY, stats, 300);
return stats;
} catch (error) {
this.logger.error('Error getting proxy stats', { error });
return {
total: 0,
working: 0,
failed: 0,
lastCheck: new Date(),
avgResponseTime: 0
};
}
}
/**
* Validate proxies in background
*/
private async validateProxiesInBackground(proxies: ProxyConfig[]): Promise<void> {
this.logger.info('Starting background proxy validation', { count: proxies.length });
const concurrency = 50; // Process 50 proxies concurrently
const chunks = this.chunkArray(proxies, concurrency);
for (const chunk of chunks) {
const validationPromises = chunk.map(proxy =>
this.checkProxy(proxy).catch(error => {
this.logger.error('Error validating proxy', {
host: proxy.host,
port: proxy.port,
error
});
return null;
})
);
await Promise.allSettled(validationPromises);
// Small delay between chunks to avoid overwhelming the system
await new Promise(resolve => setTimeout(resolve, 1000));
}
this.logger.info('Background proxy validation completed');
}
/**
* Start periodic proxy health checks
*/
async startHealthChecks(intervalMs: number = 15 * 60 * 1000): Promise<void> {
this.logger.info('Starting periodic proxy health checks', { intervalMs });
setInterval(async () => {
try {
const workingProxies = await this.getWorkingProxies(100); // Check up to 100 working proxies
const validationPromises = workingProxies.map(proxy => this.checkProxy(proxy));
const results = await Promise.allSettled(validationPromises);
const successCount = results.filter(r =>
r.status === 'fulfilled' && r.value.isWorking
).length;
this.logger.info('Health check completed', {
checked: workingProxies.length,
stillWorking: successCount
});
} catch (error) {
this.logger.error('Error in health check', { error });
}
}, intervalMs);
}
/**
* Clear all proxy data from cache
*/
async clearProxies(): Promise<void> {
try {
// Since we can't use keys() and del() with spread, we'll clear known keys
await this.cache.del(this.PROXY_STATS_KEY);
// Note: This is a limitation of the current cache provider
// In a full implementation, we'd need to maintain an index of proxy keys
this.logger.info('Cleared proxy stats from cache');
this.logger.warn('Full proxy data clearing not implemented due to cache provider limitations');
} catch (error) {
this.logger.error('Error clearing proxy data', { error });
}
}
/**
* Get cache key for a proxy
*/
private getProxyKey(proxy: ProxyConfig): string {
return `${this.CACHE_PREFIX}${proxy.host}:${proxy.port}`;
}
/**
* Split array into chunks
*/
private chunkArray<T>(array: T[], size: number): T[][] {
const chunks: T[][] = [];
for (let i = 0; i < array.length; i += size) {
chunks.push(array.slice(i, i + size));
}
return chunks;
}
/**
* Graceful shutdown
*/
async shutdown(): Promise<void> {
this.logger.info('Shutting down ProxyService');
// The cache and http client will handle their own cleanup
}
}
// Export singleton instance
export const proxyService = new ProxyService();

View file

@ -15,12 +15,12 @@
"dependencies": {
"@hono/node-server": "^1.12.0",
"hono": "^4.6.1",
"@stock-bot/config": "workspace:*",
"@stock-bot/logger": "workspace:*",
"@stock-bot/types": "workspace:*",
"@stock-bot/questdb-client": "workspace:*",
"@stock-bot/utils": "workspace:*",
"@stock-bot/data-frame": "workspace:*"
"@stock-bot/config": "*",
"@stock-bot/logger": "*",
"@stock-bot/types": "*",
"@stock-bot/questdb-client": "*",
"@stock-bot/utils": "*",
"@stock-bot/data-frame": "*"
},
"devDependencies": {
"@types/node": "^22.5.0",

View file

@ -12,12 +12,12 @@
"clean": "rm -rf dist"
},
"dependencies": {
"@stock-bot/config": "workspace:*",
"@stock-bot/logger": "workspace:*",
"@stock-bot/types": "workspace:*",
"@stock-bot/utils": "workspace:*",
"@stock-bot/event-bus": "workspace:*",
"@stock-bot/vector-engine": "workspace:*",
"@stock-bot/config": "*",
"@stock-bot/logger": "*",
"@stock-bot/types": "*",
"@stock-bot/utils": "*",
"@stock-bot/event-bus": "*",
"@stock-bot/vector-engine": "*",
"hono": "^4.0.0"
},
"devDependencies": {

View file

@ -14,15 +14,15 @@
"cli": "bun src/cli/index.ts"
},
"dependencies": {
"@stock-bot/config": "workspace:*",
"@stock-bot/logger": "workspace:*",
"@stock-bot/types": "workspace:*",
"@stock-bot/utils": "workspace:*",
"@stock-bot/event-bus": "workspace:*",
"@stock-bot/strategy-engine": "workspace:*",
"@stock-bot/vector-engine": "workspace:*",
"@stock-bot/data-frame": "workspace:*",
"@stock-bot/questdb-client": "workspace:*",
"@stock-bot/config": "*",
"@stock-bot/logger": "*",
"@stock-bot/types": "*",
"@stock-bot/utils": "*",
"@stock-bot/event-bus": "*",
"@stock-bot/strategy-engine": "*",
"@stock-bot/vector-engine": "*",
"@stock-bot/data-frame": "*",
"@stock-bot/questdb-client": "*",
"hono": "^4.0.0",
"commander": "^11.0.0"
},

View file

@ -57,8 +57,8 @@
"name": "@stock-bot/data-frame",
"version": "1.0.0",
"dependencies": {
"@stock-bot/logger": "workspace:*",
"@stock-bot/utils": "workspace:*",
"@stock-bot/logger": "*",
"@stock-bot/utils": "*",
},
"devDependencies": {
"@types/node": "^20.10.0",
@ -72,8 +72,8 @@
"name": "@stock-bot/event-bus",
"version": "1.0.0",
"dependencies": {
"@stock-bot/config": "workspace:*",
"@stock-bot/logger": "workspace:*",
"@stock-bot/config": "*",
"@stock-bot/logger": "*",
"eventemitter3": "^5.0.1",
"ioredis": "^5.3.2",
},
@ -91,6 +91,7 @@
"dependencies": {
"@stock-bot/logger": "*",
"@stock-bot/types": "*",
"http-proxy-agent": "^7.0.2",
"https-proxy-agent": "^7.0.6",
"socks-proxy-agent": "^8.0.5",
},
@ -160,11 +161,11 @@
"name": "@stock-bot/strategy-engine",
"version": "1.0.0",
"dependencies": {
"@stock-bot/config": "workspace:*",
"@stock-bot/data-frame": "workspace:*",
"@stock-bot/event-bus": "workspace:*",
"@stock-bot/logger": "workspace:*",
"@stock-bot/utils": "workspace:*",
"@stock-bot/config": "*",
"@stock-bot/data-frame": "*",
"@stock-bot/event-bus": "*",
"@stock-bot/logger": "*",
"@stock-bot/utils": "*",
"commander": "^14.0.0",
"eventemitter3": "^5.0.1",
},
@ -195,9 +196,9 @@
"name": "@stock-bot/vector-engine",
"version": "1.0.0",
"dependencies": {
"@stock-bot/data-frame": "workspace:*",
"@stock-bot/logger": "workspace:*",
"@stock-bot/utils": "workspace:*",
"@stock-bot/data-frame": "*",
"@stock-bot/logger": "*",
"@stock-bot/utils": "*",
},
"devDependencies": {
"@types/node": "^20.10.0",
@ -691,6 +692,8 @@
"help-me": ["help-me@5.0.0", "", {}, "sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg=="],
"http-proxy-agent": ["http-proxy-agent@7.0.2", "", { "dependencies": { "agent-base": "^7.1.0", "debug": "^4.3.4" } }, "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig=="],
"https-proxy-agent": ["https-proxy-agent@7.0.6", "", { "dependencies": { "agent-base": "^7.1.2", "debug": "4" } }, "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw=="],
"ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],

View file

@ -22,7 +22,7 @@ The cache library is already included in the monorepo. To use it in your service
```json
{
"dependencies": {
"@stock-bot/cache": "workspace:*"
"@stock-bot/cache": "*"
}
}
```

View file

@ -10,8 +10,8 @@
"test": "bun test"
},
"dependencies": {
"@stock-bot/logger": "workspace:*",
"@stock-bot/utils": "workspace:*"
"@stock-bot/logger": "*",
"@stock-bot/utils": "*"
},
"devDependencies": {
"@types/node": "^20.10.0",

View file

@ -10,8 +10,8 @@
"test": "bun test"
},
"dependencies": {
"@stock-bot/logger": "workspace:*",
"@stock-bot/config": "workspace:*",
"@stock-bot/logger": "*",
"@stock-bot/config": "*",
"ioredis": "^5.3.2",
"eventemitter3": "^5.0.1"
},

View file

@ -12,9 +12,11 @@
"lint": "eslint src/**/*.ts",
"type-check": "tsc --noEmit",
"dev": "tsc --watch"
}, "dependencies": {
},
"dependencies": {
"@stock-bot/logger": "*",
"@stock-bot/types": "*",
"http-proxy-agent": "^7.0.2",
"https-proxy-agent": "^7.0.6",
"socks-proxy-agent": "^8.0.5"
},

View file

@ -10,11 +10,11 @@
"test": "bun test"
},
"dependencies": {
"@stock-bot/config": "workspace:*",
"@stock-bot/data-frame": "workspace:*",
"@stock-bot/event-bus": "workspace:*",
"@stock-bot/logger": "workspace:*",
"@stock-bot/utils": "workspace:*",
"@stock-bot/config": "*",
"@stock-bot/data-frame": "*",
"@stock-bot/event-bus": "*",
"@stock-bot/logger": "*",
"@stock-bot/utils": "*",
"commander": "^14.0.0",
"eventemitter3": "^5.0.1"
},

View file

@ -10,9 +10,9 @@
"test": "bun test"
},
"dependencies": {
"@stock-bot/logger": "workspace:*",
"@stock-bot/utils": "workspace:*",
"@stock-bot/data-frame": "workspace:*"
"@stock-bot/logger": "*",
"@stock-bot/utils": "*",
"@stock-bot/data-frame": "*"
},
"devDependencies": {
"@types/node": "^20.10.0",