112 lines
No EOL
3.1 KiB
TypeScript
112 lines
No EOL
3.1 KiB
TypeScript
/**
|
|
* Proxy Fetch Operations - Fetching proxies from sources
|
|
*/
|
|
import { HttpClient, ProxyInfo } from '@stock-bot/http';
|
|
import { OperationContext } from '@stock-bot/utils';
|
|
|
|
import { PROXY_CONFIG } from '../shared/config';
|
|
import { ProxyStatsManager } from '../shared/proxy-manager';
|
|
import type { ProxySource } from '../shared/types';
|
|
|
|
// Shared HTTP client
|
|
let httpClient: HttpClient;
|
|
|
|
function getHttpClient(ctx: OperationContext): HttpClient {
|
|
if (!httpClient) {
|
|
httpClient = new HttpClient({ timeout: 10000 }, ctx.logger);
|
|
}
|
|
return httpClient;
|
|
}
|
|
|
|
export async function fetchProxiesFromSources(): Promise<ProxyInfo[]> {
|
|
const ctx = OperationContext.create('proxy', 'fetch-sources');
|
|
|
|
const statsManager = ProxyStatsManager.getInstance();
|
|
statsManager.resetStats();
|
|
|
|
const fetchPromises = PROXY_CONFIG.PROXY_SOURCES.map(source => fetchProxiesFromSource(source, ctx));
|
|
const results = await Promise.all(fetchPromises);
|
|
let allProxies: ProxyInfo[] = results.flat();
|
|
allProxies = removeDuplicateProxies(allProxies);
|
|
|
|
ctx.logger.info('Fetched proxies from all sources', { total: allProxies.length });
|
|
return allProxies;
|
|
}
|
|
|
|
export async function fetchProxiesFromSource(source: ProxySource, ctx?: OperationContext): Promise<ProxyInfo[]> {
|
|
if (!ctx) {
|
|
ctx = OperationContext.create('proxy', 'fetch-source');
|
|
}
|
|
|
|
const allProxies: ProxyInfo[] = [];
|
|
|
|
try {
|
|
ctx.logger.info(`Fetching proxies from ${source.url}`);
|
|
|
|
const client = getHttpClient(ctx);
|
|
const response = await client.get(source.url, {
|
|
timeout: 10000,
|
|
});
|
|
|
|
if (response.status !== 200) {
|
|
ctx.logger.warn(`Failed to fetch from ${source.url}: ${response.status}`);
|
|
return [];
|
|
}
|
|
|
|
const text = response.data;
|
|
const lines = text.split('\n').filter((line: string) => line.trim());
|
|
|
|
for (const line of lines) {
|
|
let trimmed = line.trim();
|
|
trimmed = cleanProxyUrl(trimmed);
|
|
if (!trimmed || trimmed.startsWith('#')) {
|
|
continue;
|
|
}
|
|
|
|
// Parse formats like \"host:port\" or \"host:port:user:pass\"
|
|
const parts = trimmed.split(':');
|
|
if (parts.length >= 2) {
|
|
const proxy: ProxyInfo = {
|
|
source: source.id,
|
|
protocol: source.protocol as 'http' | 'https' | 'socks4' | 'socks5',
|
|
host: parts[0],
|
|
port: parseInt(parts[1]),
|
|
};
|
|
|
|
if (!isNaN(proxy.port) && proxy.host) {
|
|
allProxies.push(proxy);
|
|
}
|
|
}
|
|
}
|
|
|
|
ctx.logger.info(`Parsed ${allProxies.length} proxies from ${source.url}`);
|
|
} catch (error) {
|
|
ctx.logger.error(`Error fetching proxies from ${source.url}`, error);
|
|
return [];
|
|
}
|
|
|
|
return allProxies;
|
|
}
|
|
|
|
// Utility functions
|
|
function cleanProxyUrl(url: string): string {
|
|
return url
|
|
.replace(/^https?:\/\//, '')
|
|
.replace(/^0+/, '')
|
|
.replace(/:0+(\d)/g, ':$1');
|
|
}
|
|
|
|
function removeDuplicateProxies(proxies: ProxyInfo[]): ProxyInfo[] {
|
|
const seen = new Set<string>();
|
|
const unique: ProxyInfo[] = [];
|
|
|
|
for (const proxy of proxies) {
|
|
const key = `${proxy.protocol}://${proxy.host}:${proxy.port}`;
|
|
if (!seen.has(key)) {
|
|
seen.add(key);
|
|
unique.push(proxy);
|
|
}
|
|
}
|
|
|
|
return unique;
|
|
} |