574 lines
17 KiB
TypeScript
574 lines
17 KiB
TypeScript
import { createCache, type CacheProvider } from '@stock-bot/cache';
|
|
import { HttpClient, ProxyInfo } from '@stock-bot/http';
|
|
import { getLogger } from '@stock-bot/logger';
|
|
|
|
// Type definitions
|
|
export interface ProxySource {
|
|
id: string;
|
|
url: string;
|
|
protocol: string;
|
|
working?: number; // Optional, used for stats
|
|
total?: number; // Optional, used for stats
|
|
percentWorking?: number; // Optional, used for stats
|
|
lastChecked?: Date; // Optional, used for stats
|
|
}
|
|
|
|
// Shared configuration and utilities
|
|
const PROXY_CONFIG = {
|
|
CACHE_KEY: 'active',
|
|
CACHE_STATS_KEY: 'stats',
|
|
CACHE_TTL: 86400, // 24 hours
|
|
CHECK_TIMEOUT: 7000,
|
|
CHECK_IP: '99.246.102.205',
|
|
CHECK_URL: 'https://proxy-detection.stare.gg/?api_key=bd406bf53ddc6abe1d9de5907830a955',
|
|
PROXY_SOURCES: [
|
|
{
|
|
id: 'prxchk',
|
|
url: 'https://raw.githubusercontent.com/prxchk/proxy-list/main/http.txt',
|
|
protocol: 'http',
|
|
},
|
|
{
|
|
id: 'casals',
|
|
url: 'https://raw.githubusercontent.com/casals-ar/proxy-list/main/http',
|
|
protocol: 'http',
|
|
},
|
|
{
|
|
id: 'sunny9577',
|
|
url: 'https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt',
|
|
protocol: 'http',
|
|
},
|
|
{
|
|
id: 'themiralay',
|
|
url: 'https://raw.githubusercontent.com/themiralay/Proxy-List-World/refs/heads/master/data.txt',
|
|
protocol: 'http',
|
|
},
|
|
{
|
|
id: 'casa-ls',
|
|
url: 'https://raw.githubusercontent.com/casa-ls/proxy-list/refs/heads/main/http',
|
|
protocol: 'http',
|
|
},
|
|
{
|
|
id: 'databay',
|
|
url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/http.txt',
|
|
protocol: 'http',
|
|
},
|
|
{
|
|
id: 'speedx',
|
|
url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt',
|
|
protocol: 'http',
|
|
},
|
|
{
|
|
id: 'monosans',
|
|
url: 'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt',
|
|
protocol: 'http',
|
|
},
|
|
{
|
|
id: 'murong',
|
|
url: 'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/http.txt',
|
|
protocol: 'http',
|
|
},
|
|
{
|
|
id: 'vakhov-fresh',
|
|
url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/master/http.txt',
|
|
protocol: 'http',
|
|
},
|
|
{
|
|
id: 'kangproxy',
|
|
url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/http/http.txt',
|
|
protocol: 'http',
|
|
},
|
|
{
|
|
id: 'gfpcom',
|
|
url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/http.txt',
|
|
protocol: 'http',
|
|
},
|
|
{
|
|
id: 'dpangestuw',
|
|
url: 'https://raw.githubusercontent.com/dpangestuw/Free-Proxy/refs/heads/main/http_proxies.txt',
|
|
protocol: 'http',
|
|
},
|
|
{
|
|
id: 'gitrecon',
|
|
url: 'https://raw.githubusercontent.com/gitrecon1455/fresh-proxy-list/refs/heads/main/proxylist.txt',
|
|
protocol: 'http',
|
|
},
|
|
{
|
|
id: 'vakhov-master',
|
|
url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/http.txt',
|
|
protocol: 'http',
|
|
},
|
|
{
|
|
id: 'breaking-tech',
|
|
url: 'https://raw.githubusercontent.com/BreakingTechFr/Proxy_Free/refs/heads/main/proxies/http.txt',
|
|
protocol: 'http',
|
|
},
|
|
{
|
|
id: 'ercindedeoglu',
|
|
url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/http.txt',
|
|
protocol: 'http',
|
|
},
|
|
{
|
|
id: 'tuanminpay',
|
|
url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/http.txt',
|
|
protocol: 'http',
|
|
},
|
|
|
|
{
|
|
id: 'r00tee-https',
|
|
url: 'https://raw.githubusercontent.com/r00tee/Proxy-List/refs/heads/main/Https.txt',
|
|
protocol: 'https',
|
|
},
|
|
{
|
|
id: 'ercindedeoglu-https',
|
|
url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/https.txt',
|
|
protocol: 'https',
|
|
},
|
|
{
|
|
id: 'vakhov-fresh-https',
|
|
url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/https.txt',
|
|
protocol: 'https',
|
|
},
|
|
{
|
|
id: 'databay-https',
|
|
url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/https.txt',
|
|
protocol: 'https',
|
|
},
|
|
{
|
|
id: 'kangproxy-https',
|
|
url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/https/https.txt',
|
|
protocol: 'https',
|
|
},
|
|
{
|
|
id: 'zloi-user-https',
|
|
url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/https.txt',
|
|
protocol: 'https',
|
|
},
|
|
{
|
|
id: 'gfpcom-https',
|
|
url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/https.txt',
|
|
protocol: 'https',
|
|
},
|
|
],
|
|
};
|
|
|
|
// Shared instances (module-scoped, not global)
|
|
let isInitialized = false; // Track if resources are initialized
|
|
let logger: ReturnType<typeof getLogger>;
|
|
let cache: CacheProvider;
|
|
let httpClient: HttpClient;
|
|
let proxyStats: ProxySource[] = PROXY_CONFIG.PROXY_SOURCES.map(source => ({
|
|
id: source.id,
|
|
total: 0,
|
|
working: 0,
|
|
lastChecked: new Date(),
|
|
protocol: source.protocol,
|
|
url: source.url,
|
|
}));
|
|
|
|
/**
|
|
* Initialize proxy resources (cache and shared dependencies)
|
|
* This should be called before any proxy operations
|
|
* @param waitForCache - Whether to wait for cache readiness (default: false for fallback mode)
|
|
*/
|
|
export async function initializeProxyResources(waitForCache = false): Promise<void> {
|
|
// Skip if already initialized
|
|
if (isInitialized) {
|
|
return;
|
|
}
|
|
|
|
logger = getLogger('proxy-tasks');
|
|
cache = createCache({
|
|
keyPrefix: 'proxy:',
|
|
ttl: PROXY_CONFIG.CACHE_TTL,
|
|
enableMetrics: true,
|
|
});
|
|
|
|
httpClient = new HttpClient({ timeout: 10000 }, logger);
|
|
|
|
if (waitForCache) {
|
|
logger.info('Initializing proxy cache...');
|
|
await cache.waitForReady(10000);
|
|
logger.info('Proxy cache initialized successfully');
|
|
logger.info('Proxy tasks initialized');
|
|
} else {
|
|
logger.info('Proxy tasks initialized (fallback mode)');
|
|
}
|
|
isInitialized = true;
|
|
}
|
|
|
|
// make a function that takes in source id and a boolean success and updates the proxyStats array
|
|
async function updateProxyStats(sourceId: string, success: boolean) {
|
|
const source = proxyStats.find(s => s.id === sourceId);
|
|
if (source !== undefined) {
|
|
if (typeof source.working !== 'number') {
|
|
source.working = 0;
|
|
}
|
|
if (typeof source.total !== 'number') {
|
|
source.total = 0;
|
|
}
|
|
source.total += 1;
|
|
if (success) {
|
|
source.working += 1;
|
|
}
|
|
source.percentWorking = (source.working / source.total) * 100;
|
|
source.lastChecked = new Date();
|
|
await cache.set(`${PROXY_CONFIG.CACHE_STATS_KEY}:${source.id}`, source, PROXY_CONFIG.CACHE_TTL);
|
|
return source;
|
|
} else {
|
|
logger.warn(`Unknown proxy source: ${sourceId}`);
|
|
}
|
|
}
|
|
|
|
// make a function that resets proxyStats
|
|
async function resetProxyStats(): Promise<void> {
|
|
proxyStats = PROXY_CONFIG.PROXY_SOURCES.map(source => ({
|
|
id: source.id,
|
|
total: 0,
|
|
working: 0,
|
|
lastChecked: new Date(),
|
|
protocol: source.protocol,
|
|
url: source.url,
|
|
}));
|
|
for (const source of proxyStats) {
|
|
await cache.set(`${PROXY_CONFIG.CACHE_STATS_KEY}:${source.id}`, source, PROXY_CONFIG.CACHE_TTL);
|
|
}
|
|
return Promise.resolve();
|
|
}
|
|
|
|
/**
|
|
* Update proxy data in cache with working/total stats and average response time
|
|
* @param proxy - The proxy to update
|
|
* @param isWorking - Whether the proxy is currently working
|
|
*/
|
|
async function updateProxyInCache(proxy: ProxyInfo, isWorking: boolean): Promise<void> {
|
|
const cacheKey = `${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`;
|
|
|
|
try {
|
|
const existing: any = await cache.get(cacheKey);
|
|
|
|
// For failed proxies, only update if they already exist
|
|
if (!isWorking && !existing) {
|
|
logger.debug('Proxy not in cache, skipping failed update', {
|
|
proxy: `${proxy.host}:${proxy.port}`,
|
|
});
|
|
return;
|
|
}
|
|
|
|
// Calculate new average response time if we have a response time
|
|
let newAverageResponseTime = existing?.averageResponseTime;
|
|
if (proxy.responseTime !== undefined) {
|
|
const existingAvg = existing?.averageResponseTime || 0;
|
|
const existingTotal = existing?.total || 0;
|
|
|
|
// Calculate weighted average: (existing_avg * existing_count + new_response) / (existing_count + 1)
|
|
newAverageResponseTime =
|
|
existingTotal > 0
|
|
? (existingAvg * existingTotal + proxy.responseTime) / (existingTotal + 1)
|
|
: proxy.responseTime;
|
|
}
|
|
|
|
// Build updated proxy data
|
|
const updated = {
|
|
...existing,
|
|
...proxy, // Keep latest proxy info
|
|
total: (existing?.total || 0) + 1,
|
|
working: isWorking ? (existing?.working || 0) + 1 : existing?.working || 0,
|
|
isWorking,
|
|
lastChecked: new Date(),
|
|
// Add firstSeen only for new entries
|
|
...(existing ? {} : { firstSeen: new Date() }),
|
|
// Update average response time if we calculated a new one
|
|
...(newAverageResponseTime !== undefined
|
|
? { averageResponseTime: newAverageResponseTime }
|
|
: {}),
|
|
};
|
|
|
|
// Calculate success rate
|
|
updated.successRate = updated.total > 0 ? (updated.working / updated.total) * 100 : 0;
|
|
|
|
// Save to cache: reset TTL for working proxies, keep existing TTL for failed ones
|
|
const cacheOptions = isWorking ? PROXY_CONFIG.CACHE_TTL : undefined;
|
|
await cache.set(cacheKey, updated, cacheOptions);
|
|
|
|
logger.debug(`Updated ${isWorking ? 'working' : 'failed'} proxy in cache`, {
|
|
proxy: `${proxy.host}:${proxy.port}`,
|
|
working: updated.working,
|
|
total: updated.total,
|
|
successRate: updated.successRate.toFixed(1) + '%',
|
|
avgResponseTime: updated.averageResponseTime
|
|
? `${updated.averageResponseTime.toFixed(0)}ms`
|
|
: 'N/A',
|
|
});
|
|
} catch (error) {
|
|
logger.error('Failed to update proxy in cache', {
|
|
proxy: `${proxy.host}:${proxy.port}`,
|
|
error: error instanceof Error ? error.message : String(error),
|
|
});
|
|
}
|
|
}
|
|
|
|
// Individual task functions
|
|
export async function queueProxyFetch(): Promise<string> {
|
|
const { queueManager } = await import('../services/queue.service');
|
|
const job = await queueManager.addJob({
|
|
type: 'proxy-fetch',
|
|
provider: 'proxy-service',
|
|
operation: 'fetch-and-check',
|
|
payload: {},
|
|
priority: 5,
|
|
});
|
|
|
|
const jobId = job.id || 'unknown';
|
|
logger.info('Proxy fetch job queued', { jobId });
|
|
return jobId;
|
|
}
|
|
|
|
export async function queueProxyCheck(proxies: ProxyInfo[]): Promise<string> {
|
|
const { queueManager } = await import('../services/queue.service');
|
|
const job = await queueManager.addJob({
|
|
type: 'proxy-check',
|
|
provider: 'proxy-service',
|
|
operation: 'check-specific',
|
|
payload: { proxies },
|
|
priority: 3,
|
|
});
|
|
|
|
const jobId = job.id || 'unknown';
|
|
logger.info('Proxy check job queued', { jobId, count: proxies.length });
|
|
return jobId;
|
|
}
|
|
|
|
export async function fetchProxiesFromSources(): Promise<ProxyInfo[]> {
|
|
await resetProxyStats();
|
|
const fetchPromises = PROXY_CONFIG.PROXY_SOURCES.map(source => fetchProxiesFromSource(source));
|
|
const results = await Promise.all(fetchPromises);
|
|
let allProxies: ProxyInfo[] = results.flat();
|
|
allProxies = removeDuplicateProxies(allProxies);
|
|
return allProxies;
|
|
}
|
|
|
|
export async function fetchProxiesFromSource(source: ProxySource): Promise<ProxyInfo[]> {
|
|
const allProxies: ProxyInfo[] = [];
|
|
|
|
try {
|
|
logger.info(`Fetching proxies from ${source.url}`);
|
|
|
|
const response = await httpClient.get(source.url, {
|
|
timeout: 10000,
|
|
});
|
|
|
|
if (response.status !== 200) {
|
|
logger.warn(`Failed to fetch from ${source.url}: ${response.status}`);
|
|
return [];
|
|
}
|
|
|
|
const text = response.data;
|
|
const lines = text.split('\n').filter((line: string) => line.trim());
|
|
|
|
for (const line of lines) {
|
|
let trimmed = line.trim();
|
|
trimmed = cleanProxyUrl(trimmed);
|
|
if (!trimmed || trimmed.startsWith('#')) {
|
|
continue;
|
|
}
|
|
|
|
// Parse formats like "host:port" or "host:port:user:pass"
|
|
const parts = trimmed.split(':');
|
|
if (parts.length >= 2) {
|
|
const proxy: ProxyInfo = {
|
|
source: source.id,
|
|
protocol: source.protocol as 'http' | 'https' | 'socks4' | 'socks5',
|
|
host: parts[0],
|
|
port: parseInt(parts[1]),
|
|
};
|
|
|
|
if (!isNaN(proxy.port) && proxy.host) {
|
|
allProxies.push(proxy);
|
|
}
|
|
}
|
|
}
|
|
|
|
logger.info(`Parsed ${allProxies.length} proxies from ${source.url}`);
|
|
} catch (error) {
|
|
logger.error(`Error fetching proxies from ${source.url}`, error);
|
|
return [];
|
|
}
|
|
|
|
return allProxies;
|
|
}
|
|
|
|
/**
|
|
* Check if a proxy is working
|
|
*/
|
|
export async function checkProxy(proxy: ProxyInfo): Promise<ProxyInfo> {
|
|
let success = false;
|
|
logger.debug(`Checking Proxy:`, {
|
|
protocol: proxy.protocol,
|
|
host: proxy.host,
|
|
port: proxy.port,
|
|
});
|
|
|
|
try {
|
|
// Test the proxy
|
|
const response = await httpClient.get(PROXY_CONFIG.CHECK_URL, {
|
|
proxy,
|
|
timeout: PROXY_CONFIG.CHECK_TIMEOUT,
|
|
});
|
|
|
|
const isWorking = response.status >= 200 && response.status < 300;
|
|
const result: ProxyInfo = {
|
|
...proxy,
|
|
isWorking,
|
|
lastChecked: new Date(),
|
|
responseTime: response.responseTime,
|
|
};
|
|
|
|
if (isWorking && !JSON.stringify(response.data).includes(PROXY_CONFIG.CHECK_IP)) {
|
|
success = true;
|
|
await updateProxyInCache(result, true);
|
|
} else {
|
|
await updateProxyInCache(result, false);
|
|
}
|
|
|
|
if (proxy.source) {
|
|
await updateProxyStats(proxy.source, success);
|
|
}
|
|
|
|
logger.debug('Proxy check completed', {
|
|
host: proxy.host,
|
|
port: proxy.port,
|
|
isWorking,
|
|
});
|
|
|
|
return result;
|
|
} catch (error) {
|
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
const result: ProxyInfo = {
|
|
...proxy,
|
|
isWorking: false,
|
|
error: errorMessage,
|
|
lastChecked: new Date(),
|
|
};
|
|
|
|
// Update cache for failed proxy (increment total, don't update TTL)
|
|
await updateProxyInCache(result, false);
|
|
|
|
if (proxy.source) {
|
|
await updateProxyStats(proxy.source, success);
|
|
}
|
|
|
|
logger.debug('Proxy check failed', {
|
|
host: proxy.host,
|
|
port: proxy.port,
|
|
error: errorMessage,
|
|
});
|
|
|
|
return result;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get a random active proxy from the cache
|
|
* @param protocol - Optional protocol filter ('http' | 'https' | 'socks4' | 'socks5')
|
|
* @param minSuccessRate - Minimum success rate percentage (default: 50)
|
|
* @returns A random working proxy or null if none found
|
|
*/
|
|
export async function getRandomActiveProxy(
|
|
protocol?: 'http' | 'https' | 'socks4' | 'socks5',
|
|
minSuccessRate: number = 50
|
|
): Promise<ProxyInfo | null> {
|
|
try {
|
|
// Get all active proxy keys from cache
|
|
const pattern = protocol
|
|
? `${PROXY_CONFIG.CACHE_KEY}:${protocol}://*`
|
|
: `${PROXY_CONFIG.CACHE_KEY}:*`;
|
|
|
|
const keys = await cache.keys(pattern);
|
|
|
|
if (keys.length === 0) {
|
|
logger.debug('No active proxies found in cache', { pattern });
|
|
return null;
|
|
}
|
|
|
|
// Shuffle the keys for randomness
|
|
const shuffledKeys = keys.sort(() => Math.random() - 0.5);
|
|
|
|
// Find a working proxy that meets the criteria
|
|
for (const key of shuffledKeys) {
|
|
try {
|
|
const proxyData: ProxyInfo | null = await cache.get(key);
|
|
|
|
if (
|
|
proxyData &&
|
|
proxyData.isWorking &&
|
|
(!proxyData.successRate || proxyData.successRate >= minSuccessRate)
|
|
) {
|
|
logger.debug('Random active proxy selected', {
|
|
proxy: `${proxyData.host}:${proxyData.port}`,
|
|
protocol: proxyData.protocol,
|
|
successRate: proxyData.successRate?.toFixed(1) + '%',
|
|
avgResponseTime: proxyData.averageResponseTime
|
|
? `${proxyData.averageResponseTime.toFixed(0)}ms`
|
|
: 'N/A',
|
|
});
|
|
|
|
return proxyData;
|
|
}
|
|
} catch (error) {
|
|
logger.debug('Error reading proxy from cache', { key, error: (error as Error).message });
|
|
continue;
|
|
}
|
|
}
|
|
|
|
logger.debug('No working proxies found meeting criteria', {
|
|
protocol,
|
|
minSuccessRate,
|
|
keysChecked: shuffledKeys.length,
|
|
});
|
|
|
|
return null;
|
|
} catch (error) {
|
|
logger.error('Error getting random active proxy', {
|
|
error: error instanceof Error ? error.message : String(error),
|
|
protocol,
|
|
minSuccessRate,
|
|
});
|
|
return null;
|
|
}
|
|
}
|
|
|
|
// Utility functions
|
|
function cleanProxyUrl(url: string): string {
|
|
return url
|
|
.replace(/^https?:\/\//, '')
|
|
.replace(/^0+/, '')
|
|
.replace(/:0+(\d)/g, ':$1');
|
|
}
|
|
|
|
function removeDuplicateProxies(proxies: ProxyInfo[]): ProxyInfo[] {
|
|
const seen = new Set<string>();
|
|
const unique: ProxyInfo[] = [];
|
|
|
|
for (const proxy of proxies) {
|
|
const key = `${proxy.protocol}://${proxy.host}:${proxy.port}`;
|
|
if (!seen.has(key)) {
|
|
seen.add(key);
|
|
unique.push(proxy);
|
|
}
|
|
}
|
|
|
|
return unique;
|
|
}
|
|
|
|
// Optional: Export a convenience object that groups related tasks
|
|
export const proxyTasks = {
|
|
queueProxyFetch,
|
|
queueProxyCheck,
|
|
fetchProxiesFromSources,
|
|
fetchProxiesFromSource,
|
|
checkProxy,
|
|
};
|
|
|
|
// Export singleton instance for backward compatibility (optional)
|
|
// Remove this if you want to fully move to the task-based approach
|
|
export const proxyService = proxyTasks;
|