reorganized providers to handlers and changed folder structure for maintaiablity

This commit is contained in:
Boki 2025-06-21 09:53:33 -04:00
parent 1bb2380a28
commit 59ab0940ae
11 changed files with 21 additions and 21 deletions

View file

@ -0,0 +1,263 @@
/**
* Exchange Sync Provider - Queue provider for syncing IB exchanges to master records
*/
import { getLogger } from '@stock-bot/logger';
import type { MasterExchange } from '@stock-bot/mongodb-client';
import type { HandlerConfigWithSchedule } from '@stock-bot/queue';
import { createJobHandler, handlerRegistry } from '@stock-bot/queue';
import type { IBExchange } from '../types/exchange.types';
const logger = getLogger('exchange-sync');
export function initializeExchangeSyncProvider() {
logger.info('Registering exchange sync provider...');
const exchangeSyncConfig: HandlerConfigWithSchedule = {
name: 'exchange-sync',
operations: {
'sync-ib-exchanges': createJobHandler(async () => {
logger.info('Syncing IB exchanges to master table');
return await syncIBExchanges();
}),
'get-master-exchange': createJobHandler(async (payload: { masterExchangeId: string }) => {
logger.debug('Getting master exchange details', payload);
const exchange = await getMasterExchangeDetails(payload.masterExchangeId);
return { exchange, ...payload };
}),
},
scheduledJobs: [
{
type: 'exchange-sync-daily',
operation: 'sync-ib-exchanges',
cronPattern: '0 3 * * *', // Daily at 3 AM
priority: 3,
description: 'Daily sync of IB exchanges to master table',
immediately: true, // Run on startup to test
},
],
};
handlerRegistry.registerWithSchedule(exchangeSyncConfig);
logger.info('Exchange sync provider registered successfully');
}
/**
* Sync IB exchanges from actual ibExchanges table - creates 1:1 master records
*/
async function syncIBExchanges(): Promise<{ syncedCount: number; totalExchanges: number }> {
logger.info('Syncing IB exchanges from database...');
try {
const { connectMongoDB, getDatabase } = await import('@stock-bot/mongodb-client');
// Ensure MongoDB client is connected
await connectMongoDB();
const db = getDatabase();
// Filter by country code US and CA
const ibExchanges = await db
.collection<IBExchange>('ibExchanges')
.find({
country_code: { $in: ['US', 'CA'] },
})
.toArray();
logger.info('Found IB exchanges in database', { count: ibExchanges.length });
let syncedCount = 0;
for (const exchange of ibExchanges) {
try {
await createOrUpdateMasterExchange(exchange);
syncedCount++;
logger.debug('Synced IB exchange', {
ibId: exchange.id,
country: exchange.country_code,
});
} catch (error) {
logger.error('Failed to sync IB exchange', { exchange: exchange.id, error });
}
}
logger.info('IB exchange sync completed', {
syncedCount,
totalExchanges: ibExchanges.length,
});
return { syncedCount, totalExchanges: ibExchanges.length };
} catch (error) {
logger.error('Failed to fetch IB exchanges from database', { error });
return { syncedCount: 0, totalExchanges: 0 };
}
}
/**
* Create or update master exchange record 1:1 from IB exchange
*/
async function createOrUpdateMasterExchange(ibExchange: IBExchange): Promise<void> {
const { connectMongoDB, getDatabase } = await import('@stock-bot/mongodb-client');
await connectMongoDB();
const db = getDatabase();
const collection = db.collection<MasterExchange>('masterExchanges');
const masterExchangeId = generateMasterExchangeId(ibExchange);
const now = new Date();
// Check if master exchange already exists
const existing = await collection.findOne({ masterExchangeId });
if (existing) {
// Update existing record
await collection.updateOne(
{ masterExchangeId },
{
$set: {
officialName: ibExchange.name || `Exchange ${ibExchange.id}`,
country: ibExchange.country_code || 'UNKNOWN',
currency: ibExchange.currency || 'USD',
timezone: inferTimezone(ibExchange),
updated_at: now,
},
}
);
logger.debug('Updated existing master exchange', { masterExchangeId });
} else {
// Create new master exchange
const masterExchange: MasterExchange = {
masterExchangeId,
shortName: masterExchangeId, // Set shortName to masterExchangeId on creation
officialName: ibExchange.name || `Exchange ${ibExchange.id}`,
country: ibExchange.country_code || 'UNKNOWN',
currency: ibExchange.currency || 'USD',
timezone: inferTimezone(ibExchange),
active: false, // Set active to false only on creation
sourceMappings: {
ib: {
id: ibExchange.id || ibExchange._id?.toString() || 'unknown',
name: ibExchange.name || `Exchange ${ibExchange.id}`,
code: ibExchange.code || ibExchange.id || '',
aliases: generateAliases(ibExchange),
lastUpdated: now,
},
},
confidence: 1.0, // High confidence for direct IB mapping
verified: true, // Mark as verified since it's direct from IB
// DocumentBase fields
source: 'exchange-sync-provider',
created_at: now,
updated_at: now,
};
await collection.insertOne(masterExchange);
logger.debug('Created new master exchange', { masterExchangeId });
}
}
/**
* Get master exchange details
*/
async function getMasterExchangeDetails(masterExchangeId: string): Promise<MasterExchange | null> {
try {
const { connectMongoDB, getDatabase } = await import('@stock-bot/mongodb-client');
await connectMongoDB();
const db = getDatabase();
const collection = db.collection<MasterExchange>('masterExchanges');
return await collection.findOne({ masterExchangeId });
} catch (error) {
logger.error('Error getting master exchange details', { masterExchangeId, error });
return null;
}
}
/**
* Generate master exchange ID from IB exchange
*/
function generateMasterExchangeId(ibExchange: IBExchange): string {
// Use code if available, otherwise use ID, otherwise generate from name
if (ibExchange.code) {
return ibExchange.code.toUpperCase().replace(/[^A-Z0-9]/g, '');
}
if (ibExchange.id) {
return ibExchange.id.toUpperCase().replace(/[^A-Z0-9]/g, '');
}
if (ibExchange.name) {
return ibExchange.name
.toUpperCase()
.split(' ')
.slice(0, 2)
.join('_')
.replace(/[^A-Z0-9_]/g, '');
}
return 'UNKNOWN_EXCHANGE';
}
/**
* Generate aliases for the exchange
*/
function generateAliases(ibExchange: IBExchange): string[] {
const aliases: string[] = [];
if (ibExchange.name && ibExchange.name.includes(' ')) {
// Add abbreviated version
aliases.push(
ibExchange.name
.split(' ')
.map(w => w[0])
.join('')
.toUpperCase()
);
}
if (ibExchange.code) {
aliases.push(ibExchange.code.toUpperCase());
}
return aliases;
}
/**
* Infer timezone from exchange name/location
*/
function inferTimezone(ibExchange: IBExchange): string {
if (!ibExchange.name) {
return 'UTC';
}
const name = ibExchange.name.toUpperCase();
if (name.includes('NEW YORK') || name.includes('NYSE') || name.includes('NASDAQ')) {
return 'America/New_York';
}
if (name.includes('LONDON')) {
return 'Europe/London';
}
if (name.includes('TOKYO')) {
return 'Asia/Tokyo';
}
if (name.includes('SHANGHAI')) {
return 'Asia/Shanghai';
}
if (name.includes('TORONTO')) {
return 'America/Toronto';
}
if (name.includes('FRANKFURT')) {
return 'Europe/Berlin';
}
return 'UTC'; // Default
}

View file

@ -0,0 +1,85 @@
/**
* Interactive Brokers Provider for new queue system
*/
import { getLogger } from '@stock-bot/logger';
import {
createJobHandler,
handlerRegistry,
type HandlerConfigWithSchedule,
} from '@stock-bot/queue';
const logger = getLogger('ib-provider');
// Initialize and register the IB provider
export function initializeIBProvider() {
logger.debug('Registering IB provider with scheduled jobs...');
const ibProviderConfig: HandlerConfigWithSchedule = {
name: 'ib',
operations: {
'fetch-session': createJobHandler(async () => {
// payload contains session configuration (not used in current implementation)
logger.debug('Processing session fetch request');
const { fetchSession } = await import('./ib.operations');
return fetchSession();
}),
'fetch-exchanges': createJobHandler(async () => {
// payload should contain session headers
logger.debug('Processing exchanges fetch request');
const { fetchSession, fetchExchanges } = await import('./ib.operations');
const sessionHeaders = await fetchSession();
if (sessionHeaders) {
return fetchExchanges(sessionHeaders);
}
throw new Error('Failed to get session headers');
}),
'fetch-symbols': createJobHandler(async () => {
// payload should contain session headers
logger.debug('Processing symbols fetch request');
const { fetchSession, fetchSymbols } = await import('./ib.operations');
const sessionHeaders = await fetchSession();
if (sessionHeaders) {
return fetchSymbols(sessionHeaders);
}
throw new Error('Failed to get session headers');
}),
'ib-exchanges-and-symbols': createJobHandler(async () => {
// Legacy operation for scheduled jobs
logger.info('Fetching symbol summary from IB');
const { fetchSession, fetchExchanges, fetchSymbols } = await import('./ib.operations');
const sessionHeaders = await fetchSession();
logger.info('Fetched symbol summary from IB');
if (sessionHeaders) {
logger.debug('Fetching exchanges from IB');
const exchanges = await fetchExchanges(sessionHeaders);
logger.info('Fetched exchanges from IB', { count: exchanges?.length });
logger.debug('Fetching symbols from IB');
const symbols = await fetchSymbols(sessionHeaders);
logger.info('Fetched symbols from IB', { symbols });
return { exchangesCount: exchanges?.length, symbolsCount: symbols?.length };
}
return null;
}),
},
scheduledJobs: [
{
type: 'ib-exchanges-and-symbols',
operation: 'ib-exchanges-and-symbols',
cronPattern: '0 0 * * 0', // Every Sunday at midnight
priority: 5,
description: 'Fetch and update IB exchanges and symbols data',
// immediately: true, // Don't run immediately during startup to avoid conflicts
},
],
};
handlerRegistry.registerWithSchedule(ibProviderConfig);
logger.debug('IB provider registered successfully with scheduled jobs');
}

View file

@ -0,0 +1,309 @@
import { Browser } from '@stock-bot/browser';
import { getLogger } from '@stock-bot/logger';
import { getMongoDBClient } from '@stock-bot/mongodb-client';
// Shared instances (module-scoped, not global)
let isInitialized = false; // Track if resources are initialized
let logger: ReturnType<typeof getLogger>;
// let cache: CacheProvider;
export async function initializeIBResources(): Promise<void> {
// Skip if already initialized
if (isInitialized) {
return;
}
logger = getLogger('proxy-tasks');
// cache = createCache({
// keyPrefix: 'proxy:',
// ttl: PROXY_CONFIG.CACHE_TTL,
// enableMetrics: true,
// });
// httpClient = new HttpClient({ timeout: 15000 }, logger);
// if (waitForCache) {
// // logger.info('Initializing proxy cache...');
// // await cache.waitForReady(10000);
// // logger.info('Proxy cache initialized successfully');
// logger.info('Proxy tasks initialized');
// } else {
// logger.info('Proxy tasks initialized (fallback mode)');
// }
isInitialized = true;
}
export async function fetchSession(): Promise<Record<string, string> | undefined> {
try {
await Browser.initialize({ headless: true, timeout: 10000, blockResources: false });
logger.info('✅ Browser initialized');
const { page } = await Browser.createPageWithProxy(
'https://www.interactivebrokers.com/en/trading/products-exchanges.php#/',
'http://doimvbnb-US-rotate:w5fpiwrb9895@p.webshare.io:80'
);
logger.info('✅ Page created with proxy');
const headersPromise = new Promise<Record<string, string> | undefined>(resolve => {
let resolved = false;
page.onNetworkEvent(event => {
if (event.url.includes('/webrest/search/product-types/summary')) {
if (event.type === 'request') {
try {
resolve(event.headers);
} catch (e) {
resolve(undefined);
logger.debug('Raw Summary Response error', { error: (e as Error).message });
}
}
}
});
// Timeout fallback
setTimeout(() => {
if (!resolved) {
resolved = true;
logger.warn('Timeout waiting for headers');
resolve(undefined);
}
}, 30000);
});
logger.info('⏳ Waiting for page load...');
await page.waitForLoadState('domcontentloaded', { timeout: 20000 });
logger.info('✅ Page loaded');
//Products tabs
logger.info('🔍 Looking for Products tab...');
const productsTab = page.locator('#productSearchTab[role="tab"][href="#products"]');
await productsTab.waitFor({ timeout: 5000 });
logger.info('✅ Found Products tab');
logger.info('🖱️ Clicking Products tab...');
await productsTab.click();
logger.info('✅ Products tab clicked');
// New Products Checkbox
logger.info('🔍 Looking for "New Products Only" radio button...');
const radioButton = page.locator('span.checkbox-text:has-text("New Products Only")');
await radioButton.waitFor({ timeout: 5000 });
logger.info(`🎯 Found "New Products Only" radio button`);
await radioButton.first().click();
logger.info('✅ "New Products Only" radio button clicked');
// Wait for and return headers immediately when captured
logger.info('⏳ Waiting for headers to be captured...');
const headers = await headersPromise;
page.close();
if (headers) {
logger.info('✅ Headers captured successfully');
} else {
logger.warn('⚠️ No headers were captured');
}
return headers;
} catch (error) {
logger.error('Failed to fetch IB symbol summary', { error });
return;
}
}
export async function fetchExchanges(sessionHeaders: Record<string, string>): Promise<unknown[] | null> {
try {
logger.info('🔍 Fetching exchanges with session headers...');
// The URL for the exchange data API
const exchangeUrl = 'https://www.interactivebrokers.com/webrest/exchanges';
// Configure the proxy
const proxyUrl = 'http://doimvbnb-US-rotate:w5fpiwrb9895@p.webshare.io:80';
// Prepare headers - include all session headers plus any additional ones
const requestHeaders = {
...sessionHeaders,
Accept: 'application/json, text/plain, */*',
'Accept-Language': 'en-US,en;q=0.9',
'Cache-Control': 'no-cache',
Pragma: 'no-cache',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'same-origin',
'X-Requested-With': 'XMLHttpRequest',
};
logger.info('📤 Making request to exchange API...', {
url: exchangeUrl,
headerCount: Object.keys(requestHeaders).length,
});
// Use fetch with proxy configuration
const response = await fetch(exchangeUrl, {
method: 'GET',
headers: requestHeaders,
proxy: proxyUrl,
});
if (!response.ok) {
logger.error('❌ Exchange API request failed', {
status: response.status,
statusText: response.statusText,
});
return null;
}
const data = await response.json();
const exchanges = data?.exchanges || [];
logger.info('✅ Exchange data fetched successfully');
logger.info('Saving IB exchanges to MongoDB...');
const client = getMongoDBClient();
await client.batchUpsert('ibExchanges', exchanges, ['id', 'country_code']);
logger.info('✅ Exchange IB data saved to MongoDB:', {
count: exchanges.length,
});
return exchanges;
} catch (error) {
logger.error('❌ Failed to fetch exchanges', { error });
return null;
}
}
// Fetch symbols from IB using the session headers
export async function fetchSymbols(sessionHeaders: Record<string, string>): Promise<unknown[] | null> {
try {
logger.info('🔍 Fetching symbols with session headers...');
// Configure the proxy
const proxyUrl = 'http://doimvbnb-US-rotate:w5fpiwrb9895@p.webshare.io:80';
// Prepare headers - include all session headers plus any additional ones
const requestHeaders = {
...sessionHeaders,
Accept: 'application/json, text/plain, */*',
'Accept-Language': 'en-US,en;q=0.9',
'Cache-Control': 'no-cache',
Pragma: 'no-cache',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'same-origin',
'X-Requested-With': 'XMLHttpRequest',
};
const requestBody = {
domain: 'com',
newProduct: 'all',
pageNumber: 1,
pageSize: 100,
productCountry: ['CA', 'US'],
productSymbol: '',
productType: ['STK'],
sortDirection: 'asc',
sortField: 'symbol',
};
// Get Summary
const summaryResponse = await fetch(
'https://www.interactivebrokers.com/webrest/search/product-types/summary',
{
method: 'POST',
headers: requestHeaders,
proxy: proxyUrl,
body: JSON.stringify(requestBody),
}
);
if (!summaryResponse.ok) {
logger.error('❌ Summary API request failed', {
status: summaryResponse.status,
statusText: summaryResponse.statusText,
});
return null;
}
const summaryData = await summaryResponse.json();
logger.info('✅ IB Summary data fetched successfully', {
totalCount: summaryData[0].totalCount,
});
const symbols = [];
requestBody.pageSize = 500;
const pageCount = Math.ceil(summaryData[0].totalCount / 500) || 0;
logger.info('Fetching Symbols for IB', { pageCount });
const symbolPromises = [];
for (let page = 1; page <= pageCount; page++) {
requestBody.pageNumber = page;
// Fetch symbols for the current page
const symbolsResponse = fetch(
'https://www.interactivebrokers.com/webrest/search/products-by-filters',
{
method: 'POST',
headers: requestHeaders,
proxy: proxyUrl,
body: JSON.stringify(requestBody),
}
);
symbolPromises.push(symbolsResponse);
}
const responses = await Promise.all(symbolPromises);
for (const response of responses) {
if (!response.ok) {
logger.error('❌ Symbols API request failed', {
status: response.status,
statusText: response.statusText,
});
return null;
}
const data = await response.json();
const symJson = data?.products || [];
if (symJson && symJson.length > 0) {
symbols.push(...symJson);
} else {
logger.warn('⚠️ No symbols found in response');
continue;
}
}
if (symbols.length === 0) {
logger.warn('⚠️ No symbols fetched from IB');
return null;
}
logger.info('✅ IB symbols fetched successfully, saving to DB...', {
totalSymbols: symbols.length,
});
const client = getMongoDBClient();
await client.batchUpsert('ib_symbols', symbols, ['symbol', 'exchangeId']);
logger.info('Saved IB symbols to DB', {
totalSymbols: symbols.length,
});
return symbols;
// logger.info('📤 Making request to exchange API...', {
// url: exchangeUrl,
// headerCount: Object.keys(requestHeaders).length,
// });
// // Use fetch with proxy configuration
// const response = await fetch(exchangeUrl, {
// method: 'GET',
// headers: requestHeaders,
// proxy: proxyUrl,
// });
// if (!response.ok) {
// logger.error('❌ Exchange API request failed', {
// status: response.status,
// statusText: response.statusText,
// });
// return null;
// }
} catch (error) {
logger.error('❌ Failed to fetch symbols', { error });
return null;
}
}
export const ibTasks = {
fetchSymbols,
fetchSession,
fetchExchanges,
};

View file

@ -0,0 +1,86 @@
/**
* Proxy Provider for new queue system
*/
import { ProxyInfo } from '@stock-bot/http';
import { getLogger } from '@stock-bot/logger';
import { handlerRegistry, createJobHandler, type HandlerConfigWithSchedule } from '@stock-bot/queue';
const logger = getLogger('proxy-provider');
// Initialize and register the Proxy provider
export function initializeProxyProvider() {
logger.debug('Registering proxy provider with scheduled jobs...');
const proxyProviderConfig: HandlerConfigWithSchedule = {
name: 'proxy',
operations: {
'fetch-from-sources': createJobHandler(async () => {
// Fetch proxies from all configured sources
logger.info('Processing fetch proxies from sources request');
const { fetchProxiesFromSources } = await import('./proxy.operations');
const { processItems } = await import('@stock-bot/queue');
// Fetch all proxies from sources
const proxies = await fetchProxiesFromSources();
logger.info('Fetched proxies from sources', { count: proxies.length });
if (proxies.length === 0) {
logger.warn('No proxies fetched from sources');
return { processed: 0, successful: 0 };
}
// Batch process the proxies through check-proxy operation
const batchResult = await processItems(proxies, 'proxy', {
handler: 'proxy',
operation: 'check-proxy',
totalDelayHours: 0.083, // 5 minutes (5/60 hours)
batchSize: 50, // Process 50 proxies per batch
priority: 3,
useBatching: true,
retries: 1,
ttl: 30000, // 30 second timeout per proxy check
removeOnComplete: 5,
removeOnFail: 3,
});
logger.info('Batch proxy validation completed', {
totalProxies: proxies.length,
jobsCreated: batchResult.jobsCreated,
mode: batchResult.mode,
batchesCreated: batchResult.batchesCreated,
duration: `${batchResult.duration}ms`,
});
return {
processed: proxies.length,
jobsCreated: batchResult.jobsCreated,
batchesCreated: batchResult.batchesCreated,
mode: batchResult.mode,
};
}),
'check-proxy': createJobHandler(async (payload: ProxyInfo) => {
// payload is now the raw proxy info object
logger.debug('Processing proxy check request', {
proxy: `${payload.host}:${payload.port}`,
});
const { checkProxy } = await import('./proxy.operations');
return checkProxy(payload);
}),
},
scheduledJobs: [
{
type: 'proxy-fetch-and-check',
operation: 'fetch-from-sources',
cronPattern: '0 0 * * 0', // Every week at midnight on Sunday
priority: 0,
description: 'Fetch and validate proxy list from sources',
// immediately: true, // Don't run immediately during startup to avoid conflicts
},
],
};
handlerRegistry.registerWithSchedule(proxyProviderConfig);
logger.debug('Proxy provider registered successfully with scheduled jobs');
}

View file

@ -0,0 +1,578 @@
import { createCache, type CacheProvider } from '@stock-bot/cache';
import { getDatabaseConfig } from '@stock-bot/config';
import { HttpClient, ProxyInfo } from '@stock-bot/http';
import { getLogger } from '@stock-bot/logger';
import { QueueManager } from '@stock-bot/queue';
// Type definitions
export interface ProxySource {
id: string;
url: string;
protocol: string;
working?: number; // Optional, used for stats
total?: number; // Optional, used for stats
percentWorking?: number; // Optional, used for stats
lastChecked?: Date; // Optional, used for stats
}
// Shared configuration and utilities
const PROXY_CONFIG = {
CACHE_KEY: 'active',
CACHE_STATS_KEY: 'stats',
CACHE_TTL: 86400, // 24 hours
CHECK_TIMEOUT: 7000,
CHECK_IP: '99.246.102.205',
CHECK_URL: 'https://proxy-detection.stare.gg/?api_key=bd406bf53ddc6abe1d9de5907830a955',
PROXY_SOURCES: [
{
id: 'prxchk',
url: 'https://raw.githubusercontent.com/prxchk/proxy-list/main/http.txt',
protocol: 'http',
},
{
id: 'casals',
url: 'https://raw.githubusercontent.com/casals-ar/proxy-list/main/http',
protocol: 'http',
},
{
id: 'sunny9577',
url: 'https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt',
protocol: 'http',
},
{
id: 'themiralay',
url: 'https://raw.githubusercontent.com/themiralay/Proxy-List-World/refs/heads/master/data.txt',
protocol: 'http',
},
{
id: 'casa-ls',
url: 'https://raw.githubusercontent.com/casa-ls/proxy-list/refs/heads/main/http',
protocol: 'http',
},
{
id: 'databay',
url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/http.txt',
protocol: 'http',
},
{
id: 'speedx',
url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt',
protocol: 'http',
},
{
id: 'monosans',
url: 'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt',
protocol: 'http',
},
{
id: 'murong',
url: 'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/http.txt',
protocol: 'http',
},
{
id: 'vakhov-fresh',
url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/master/http.txt',
protocol: 'http',
},
{
id: 'kangproxy',
url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/http/http.txt',
protocol: 'http',
},
{
id: 'gfpcom',
url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/http.txt',
protocol: 'http',
},
{
id: 'dpangestuw',
url: 'https://raw.githubusercontent.com/dpangestuw/Free-Proxy/refs/heads/main/http_proxies.txt',
protocol: 'http',
},
{
id: 'gitrecon',
url: 'https://raw.githubusercontent.com/gitrecon1455/fresh-proxy-list/refs/heads/main/proxylist.txt',
protocol: 'http',
},
{
id: 'vakhov-master',
url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/http.txt',
protocol: 'http',
},
{
id: 'breaking-tech',
url: 'https://raw.githubusercontent.com/BreakingTechFr/Proxy_Free/refs/heads/main/proxies/http.txt',
protocol: 'http',
},
{
id: 'ercindedeoglu',
url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/http.txt',
protocol: 'http',
},
{
id: 'tuanminpay',
url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/http.txt',
protocol: 'http',
},
{
id: 'r00tee-https',
url: 'https://raw.githubusercontent.com/r00tee/Proxy-List/refs/heads/main/Https.txt',
protocol: 'https',
},
{
id: 'ercindedeoglu-https',
url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/https.txt',
protocol: 'https',
},
{
id: 'vakhov-fresh-https',
url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/https.txt',
protocol: 'https',
},
{
id: 'databay-https',
url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/https.txt',
protocol: 'https',
},
{
id: 'kangproxy-https',
url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/https/https.txt',
protocol: 'https',
},
{
id: 'zloi-user-https',
url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/https.txt',
protocol: 'https',
},
{
id: 'gfpcom-https',
url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/https.txt',
protocol: 'https',
},
],
};
// Shared instances (module-scoped, not global)
let isInitialized = false; // Track if resources are initialized
let logger: ReturnType<typeof getLogger>;
let cache: CacheProvider;
let httpClient: HttpClient;
let proxyStats: ProxySource[] = PROXY_CONFIG.PROXY_SOURCES.map(source => ({
id: source.id,
total: 0,
working: 0,
lastChecked: new Date(),
protocol: source.protocol,
url: source.url,
}));
/**
* Initialize proxy resources (cache and shared dependencies)
* This should be called before any proxy operations
* @param waitForCache - Whether to wait for cache readiness (default: false for fallback mode)
*/
export async function initializeProxyResources(waitForCache = false): Promise<void> {
// Skip if already initialized
if (isInitialized) {
return;
}
logger = getLogger('proxy-tasks');
const databaseConfig = getDatabaseConfig();
cache = createCache({
redisConfig: databaseConfig.dragonfly,
keyPrefix: 'proxy:',
ttl: PROXY_CONFIG.CACHE_TTL,
enableMetrics: true,
});
httpClient = new HttpClient({ timeout: 10000 }, logger);
if (waitForCache) {
logger.info('Initializing proxy cache...');
await cache.waitForReady(10000);
logger.info('Proxy cache initialized successfully');
logger.info('Proxy tasks initialized');
} else {
logger.info('Proxy tasks initialized (fallback mode)');
}
isInitialized = true;
}
// make a function that takes in source id and a boolean success and updates the proxyStats array
async function updateProxyStats(sourceId: string, success: boolean) {
const source = proxyStats.find(s => s.id === sourceId);
if (source !== undefined) {
if (typeof source.working !== 'number') {
source.working = 0;
}
if (typeof source.total !== 'number') {
source.total = 0;
}
source.total += 1;
if (success) {
source.working += 1;
}
source.percentWorking = (source.working / source.total) * 100;
source.lastChecked = new Date();
await cache.set(`${PROXY_CONFIG.CACHE_STATS_KEY}:${source.id}`, source, PROXY_CONFIG.CACHE_TTL);
return source;
} else {
logger.warn(`Unknown proxy source: ${sourceId}`);
}
}
// make a function that resets proxyStats
async function resetProxyStats(): Promise<void> {
proxyStats = PROXY_CONFIG.PROXY_SOURCES.map(source => ({
id: source.id,
total: 0,
working: 0,
lastChecked: new Date(),
protocol: source.protocol,
url: source.url,
}));
for (const source of proxyStats) {
await cache.set(`${PROXY_CONFIG.CACHE_STATS_KEY}:${source.id}`, source, PROXY_CONFIG.CACHE_TTL);
}
return Promise.resolve();
}
/**
* Update proxy data in cache with working/total stats and average response time
* @param proxy - The proxy to update
* @param isWorking - Whether the proxy is currently working
*/
async function updateProxyInCache(proxy: ProxyInfo, isWorking: boolean): Promise<void> {
const cacheKey = `${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`;
try {
const existing: ProxyInfo | null = await cache.get(cacheKey);
// For failed proxies, only update if they already exist
if (!isWorking && !existing) {
logger.debug('Proxy not in cache, skipping failed update', {
proxy: `${proxy.host}:${proxy.port}`,
});
return;
}
// Calculate new average response time if we have a response time
let newAverageResponseTime = existing?.averageResponseTime;
if (proxy.responseTime !== undefined) {
const existingAvg = existing?.averageResponseTime || 0;
const existingTotal = existing?.total || 0;
// Calculate weighted average: (existing_avg * existing_count + new_response) / (existing_count + 1)
newAverageResponseTime =
existingTotal > 0
? (existingAvg * existingTotal + proxy.responseTime) / (existingTotal + 1)
: proxy.responseTime;
}
// Build updated proxy data
const updated = {
...existing,
...proxy, // Keep latest proxy info
total: (existing?.total || 0) + 1,
working: isWorking ? (existing?.working || 0) + 1 : existing?.working || 0,
isWorking,
lastChecked: new Date(),
// Add firstSeen only for new entries
...(existing ? {} : { firstSeen: new Date() }),
// Update average response time if we calculated a new one
...(newAverageResponseTime !== undefined
? { averageResponseTime: newAverageResponseTime }
: {}),
};
// Calculate success rate
updated.successRate = updated.total > 0 ? (updated.working / updated.total) * 100 : 0;
// Save to cache: reset TTL for working proxies, keep existing TTL for failed ones
const cacheOptions = isWorking ? PROXY_CONFIG.CACHE_TTL : undefined;
await cache.set(cacheKey, updated, cacheOptions);
logger.debug(`Updated ${isWorking ? 'working' : 'failed'} proxy in cache`, {
proxy: `${proxy.host}:${proxy.port}`,
working: updated.working,
total: updated.total,
successRate: updated.successRate.toFixed(1) + '%',
avgResponseTime: updated.averageResponseTime
? `${updated.averageResponseTime.toFixed(0)}ms`
: 'N/A',
});
} catch (error) {
logger.error('Failed to update proxy in cache', {
proxy: `${proxy.host}:${proxy.port}`,
error: error instanceof Error ? error.message : String(error),
});
}
}
// Individual task functions
export async function queueProxyFetch(): Promise<string> {
const queueManager = QueueManager.getInstance();
const queue = queueManager.getQueue('proxy');
const job = await queue.add('proxy-fetch', {
handler: 'proxy',
operation: 'fetch-and-check',
payload: {},
priority: 5,
});
const jobId = job.id || 'unknown';
logger.info('Proxy fetch job queued', { jobId });
return jobId;
}
export async function queueProxyCheck(proxies: ProxyInfo[]): Promise<string> {
const queueManager = QueueManager.getInstance();
const queue = queueManager.getQueue('proxy');
const job = await queue.add('proxy-check', {
handler: 'proxy',
operation: 'check-specific',
payload: { proxies },
priority: 3,
});
const jobId = job.id || 'unknown';
logger.info('Proxy check job queued', { jobId, count: proxies.length });
return jobId;
}
export async function fetchProxiesFromSources(): Promise<ProxyInfo[]> {
await resetProxyStats();
const fetchPromises = PROXY_CONFIG.PROXY_SOURCES.map(source => fetchProxiesFromSource(source));
const results = await Promise.all(fetchPromises);
let allProxies: ProxyInfo[] = results.flat();
allProxies = removeDuplicateProxies(allProxies);
return allProxies;
}
export async function fetchProxiesFromSource(source: ProxySource): Promise<ProxyInfo[]> {
const allProxies: ProxyInfo[] = [];
try {
logger.info(`Fetching proxies from ${source.url}`);
const response = await httpClient.get(source.url, {
timeout: 10000,
});
if (response.status !== 200) {
logger.warn(`Failed to fetch from ${source.url}: ${response.status}`);
return [];
}
const text = response.data;
const lines = text.split('\n').filter((line: string) => line.trim());
for (const line of lines) {
let trimmed = line.trim();
trimmed = cleanProxyUrl(trimmed);
if (!trimmed || trimmed.startsWith('#')) {
continue;
}
// Parse formats like "host:port" or "host:port:user:pass"
const parts = trimmed.split(':');
if (parts.length >= 2) {
const proxy: ProxyInfo = {
source: source.id,
protocol: source.protocol as 'http' | 'https' | 'socks4' | 'socks5',
host: parts[0],
port: parseInt(parts[1]),
};
if (!isNaN(proxy.port) && proxy.host) {
allProxies.push(proxy);
}
}
}
logger.info(`Parsed ${allProxies.length} proxies from ${source.url}`);
} catch (error) {
logger.error(`Error fetching proxies from ${source.url}`, error);
return [];
}
return allProxies;
}
/**
* Check if a proxy is working
*/
export async function checkProxy(proxy: ProxyInfo): Promise<ProxyInfo> {
let success = false;
logger.debug(`Checking Proxy:`, {
protocol: proxy.protocol,
host: proxy.host,
port: proxy.port,
});
try {
// Test the proxy
const response = await httpClient.get(PROXY_CONFIG.CHECK_URL, {
proxy,
timeout: PROXY_CONFIG.CHECK_TIMEOUT,
});
const isWorking = response.status >= 200 && response.status < 300;
const result: ProxyInfo = {
...proxy,
isWorking,
lastChecked: new Date(),
responseTime: response.responseTime,
};
if (isWorking && !JSON.stringify(response.data).includes(PROXY_CONFIG.CHECK_IP)) {
success = true;
await updateProxyInCache(result, true);
} else {
await updateProxyInCache(result, false);
}
if (proxy.source) {
await updateProxyStats(proxy.source, success);
}
logger.debug('Proxy check completed', {
host: proxy.host,
port: proxy.port,
isWorking,
});
return result;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
const result: ProxyInfo = {
...proxy,
isWorking: false,
error: errorMessage,
lastChecked: new Date(),
};
// Update cache for failed proxy (increment total, don't update TTL)
await updateProxyInCache(result, false);
if (proxy.source) {
await updateProxyStats(proxy.source, success);
}
logger.debug('Proxy check failed', {
host: proxy.host,
port: proxy.port,
error: errorMessage,
});
return result;
}
}
/**
* Get a random active proxy from the cache
* @param protocol - Optional protocol filter ('http' | 'https' | 'socks4' | 'socks5')
* @param minSuccessRate - Minimum success rate percentage (default: 50)
* @returns A random working proxy or null if none found
*/
export async function getRandomActiveProxy(
protocol?: 'http' | 'https' | 'socks4' | 'socks5',
minSuccessRate: number = 50
): Promise<ProxyInfo | null> {
try {
// Get all active proxy keys from cache
const pattern = protocol
? `${PROXY_CONFIG.CACHE_KEY}:${protocol}://*`
: `${PROXY_CONFIG.CACHE_KEY}:*`;
const keys = await cache.keys(pattern);
if (keys.length === 0) {
logger.debug('No active proxies found in cache', { pattern });
return null;
}
// Shuffle the keys for randomness
const shuffledKeys = keys.sort(() => Math.random() - 0.5);
// Find a working proxy that meets the criteria
for (const key of shuffledKeys) {
try {
const proxyData: ProxyInfo | null = await cache.get(key);
if (
proxyData &&
proxyData.isWorking &&
(!proxyData.successRate || proxyData.successRate >= minSuccessRate)
) {
logger.debug('Random active proxy selected', {
proxy: `${proxyData.host}:${proxyData.port}`,
protocol: proxyData.protocol,
successRate: proxyData.successRate?.toFixed(1) + '%',
avgResponseTime: proxyData.averageResponseTime
? `${proxyData.averageResponseTime.toFixed(0)}ms`
: 'N/A',
});
return proxyData;
}
} catch (error) {
logger.debug('Error reading proxy from cache', { key, error: (error as Error).message });
continue;
}
}
logger.debug('No working proxies found meeting criteria', {
protocol,
minSuccessRate,
keysChecked: shuffledKeys.length,
});
return null;
} catch (error) {
logger.error('Error getting random active proxy', {
error: error instanceof Error ? error.message : String(error),
protocol,
minSuccessRate,
});
return null;
}
}
// Utility functions
function cleanProxyUrl(url: string): string {
return url
.replace(/^https?:\/\//, '')
.replace(/^0+/, '')
.replace(/:0+(\d)/g, ':$1');
}
function removeDuplicateProxies(proxies: ProxyInfo[]): ProxyInfo[] {
const seen = new Set<string>();
const unique: ProxyInfo[] = [];
for (const proxy of proxies) {
const key = `${proxy.protocol}://${proxy.host}:${proxy.port}`;
if (!seen.has(key)) {
seen.add(key);
unique.push(proxy);
}
}
return unique;
}
// Optional: Export a convenience object that groups related tasks
export const proxyTasks = {
queueProxyFetch,
queueProxyCheck,
fetchProxiesFromSources,
fetchProxiesFromSource,
checkProxy,
};
// Export singleton instance for backward compatibility (optional)
// Remove this if you want to fully move to the task-based approach
export const proxyService = proxyTasks;

View file

@ -0,0 +1,89 @@
import { getLogger } from '@stock-bot/logger';
import {
createJobHandler,
handlerRegistry,
type HandlerConfigWithSchedule
} from '@stock-bot/queue';
import type { SymbolSpiderJob } from './qm.operations';
const logger = getLogger('qm-provider');
// Initialize and register the QM provider
export function initializeQMProvider() {
logger.debug('Registering QM provider with scheduled jobs...');
const qmProviderConfig: HandlerConfigWithSchedule = {
name: 'qm',
operations: {
'create-sessions': createJobHandler(async () => {
logger.debug('Creating QM sessions...');
const { createSessions } = await import('./qm.operations');
await createSessions();
logger.debug('QM sessions created successfully');
return { success: true, message: 'QM sessions created successfully' };
}),
'search-symbols': createJobHandler(async () => {
logger.info('Starting QM symbol search...');
const { fetchSymbols } = await import('./qm.operations');
const symbols = await fetchSymbols();
if (symbols && symbols.length > 0) {
logger.info('QM symbol search completed successfully', { count: symbols.length });
return {
success: true,
message: 'QM symbol search completed successfully',
count: symbols.length,
symbols: symbols.slice(0, 10), // Return first 10 symbols as sample
};
} else {
logger.warn('QM symbol search returned no results');
return {
success: false,
message: 'No symbols found',
count: 0,
};
}
}),
'spider-symbol-search': createJobHandler(async (payload: SymbolSpiderJob) => {
logger.debug('Processing spider symbol search job', { payload });
const { spiderSymbolSearch } = await import('./qm.operations');
const result = await spiderSymbolSearch(payload);
logger.debug('Spider search job completed', {
success: result.success,
symbolsFound: result.symbolsFound,
});
return result;
}),
},
scheduledJobs: [
{
type: 'session-management',
operation: 'create-sessions',
cronPattern: '0 */15 * * *', // Every 15 minutes
priority: 7,
immediately: true, // Don't run on startup to avoid blocking
description: 'Create and maintain QM sessions',
},
{
type: 'qm-maintnance',
operation: 'spider-symbol-search',
payload: {
prefix: null,
depth: 1,
source: 'qm',
maxDepth: 4
},
cronPattern: '0 0 * * 0', // Every Sunday at midnight
priority: 10,
immediately: true, // Don't run on startup - this is a heavy operation
description: 'Comprehensive symbol search using QM API',
},
],
};
handlerRegistry.registerWithSchedule(qmProviderConfig);
logger.debug('IB provider registered successfully with scheduled jobs');
}

View file

@ -0,0 +1,420 @@
import { getRandomUserAgent } from '@stock-bot/http';
import { getLogger } from '@stock-bot/logger';
import { getMongoDBClient } from '@stock-bot/mongodb-client';
import { QueueManager } from '@stock-bot/queue';
import { isShutdownSignalReceived } from '@stock-bot/shutdown';
import { getRandomProxy } from '@stock-bot/utils';
// Shared instances (module-scoped, not global)
let isInitialized = false; // Track if resources are initialized
let logger: ReturnType<typeof getLogger>;
// let cache: CacheProvider;
export interface QMSession {
proxy: string;
headers: Record<string, string>;
successfulCalls: number;
failedCalls: number;
lastUsed: Date;
}
export interface SymbolSpiderJob {
prefix: string | null; // null = root job (A-Z)
depth: number; // 1=A, 2=AA, 3=AAA, etc.
source: string; // 'qm'
maxDepth?: number; // optional max depth limit
}
interface Exchange {
exchange: string;
exchangeCode: string;
exchangeShortName: string;
countryCode: string;
source: string;
}
function getQmHeaders(): Record<string, string> {
return {
'User-Agent': getRandomUserAgent(),
Accept: '*/*',
'Accept-Language': 'en',
'Sec-Fetch-Mode': 'cors',
Origin: 'https://www.quotemedia.com',
Referer: 'https://www.quotemedia.com/',
};
}
const sessionCache: Record<string, QMSession[]> = {
// '5ad521e05faf5778d567f6d0012ec34d6cdbaeb2462f41568f66558bc7b4ced9': [], //4488d072b
// cc1cbdaf040f76db8f4c94f7d156b9b9b716e1a7509ec9c74a48a47f6b6b9f87: [], //97ff00cf3 // getQuotes
// '74963ff42f1db2320d051762b5d3950ff9eab23f9d5c5b592551b4ca0441d086': [], //32ca24e394b // getSplitsBySymbol getBrokerRatingsBySymbol getDividendsBySymbol getEarningsSurprisesBySymbol getEarningsEventsBySymbol
// '1e1d7cb1de1fd2fe52684abdea41a446919a5fe12776dfab88615ac1ce1ec2f6': [], //fb5721812d2c // getEnhancedQuotes getProfiles
// a900a06cc6b3e8036afb9eeb1bbf9783f0007698ed8f5cb1e373dc790e7be2e5: [], //cc882cd95f9 // getEnhancedQuotes
// a863d519e38f80e45d10e280fb1afc729816e23f0218db2f3e8b23005a9ad8dd: [], //05a09a41225 // getCompanyFilings getEnhancedQuotes
// b3cdb1873f3682c5aeeac097be6181529bfb755945e5a412a24f4b9316291427: [], //6a63f56a6 // getHeadlinesTickerStory
dc8c9930437f65d30f6597768800957017bac203a0a50342932757c8dfa158d6: [], //fceb3c4bdd // lookup
// '97b24911d7b034620aafad9441afdb2bc906ee5c992d86933c5903254ca29709': [], //c56424868d // detailed-quotes
// '8a394f09cb8540c8be8988780660a7ae5b583c331a1f6cb12834f051a0169a8f': [], //2a86d214e50e5 // getGlobalIndustrySectorPeers getKeyRatiosBySymbol getGlobalIndustrySectorCodeList
// '2f059f75e2a839437095c9e7e4991d2365bafa7bbb086672a87ae0cf8d92eb01': [], // 48fa36d // getNethouseBySymbol
// d7ae7e0091dd1d7011948c3dc4af09b5ec552285d92bb188be2618968bc78e3f: [], // 63548ee //getRecentTradesBySymbol getQuotes getLevel2Quote getRecentTradesBySymbol
// d22d1db8f67fe6e420b4028e5129b289ca64862aa6cee8459193747b68c01de3: [], // 84e9e
// '6e0b22a7cbc02ac3fa07d45e2880b7696aaebeb29574dce81789e570570c9002': [], //
};
export async function initializeQMResources(): Promise<void> {
// Skip if already initialized
if (isInitialized) {
return;
}
logger = getLogger('qm-tasks');
isInitialized = true;
}
export async function createSessions(): Promise<void> {
try {
//for each session, check array length, if less than 5, create new session
if (!isInitialized) {
await initializeQMResources();
}
logger.info('Creating QM sessions...');
for (const [sessionId, sessionArray] of Object.entries(sessionCache)) {
const initialCount = sessionArray.length;
const filteredArray = sessionArray.filter(session => session.failedCalls <= 10);
sessionCache[sessionId] = filteredArray;
const removedCount = initialCount - filteredArray.length;
if (removedCount > 0) {
logger.info(
`Removed ${removedCount} sessions with excessive failures for ${sessionId}. Remaining: ${filteredArray.length}`
);
}
while (sessionCache[sessionId].length < 10) {
if(isShutdownSignalReceived()) {
logger.info('Shutting down, skipping session creation');
break; // Exit if shutting down
}
logger.info(`Creating new session for ${sessionId}`);
const proxyInfo = await getRandomProxy();
if (!proxyInfo) {
logger.error('No proxy available for QM session creation');
break; // Skip session creation if no proxy is available
}
// Convert ProxyInfo to string format
const auth = proxyInfo.username && proxyInfo.password ? `${proxyInfo.username}:${proxyInfo.password}@` : '';
const proxy = `${proxyInfo.protocol}://${auth}${proxyInfo.host}:${proxyInfo.port}`;
const newSession: QMSession = {
proxy: proxy, // Placeholder, should be set to a valid proxy
headers: getQmHeaders(),
successfulCalls: 0,
failedCalls: 0,
lastUsed: new Date(),
};
const sessionResponse = await fetch(
`https://app.quotemedia.com/auth/g/authenticate/dataTool/v0/500/${sessionId}`,
{
method: 'GET',
proxy: newSession.proxy,
headers: newSession.headers,
}
);
logger.debug('Session response received', {
status: sessionResponse.status,
sessionId,
});
if (!sessionResponse.ok) {
logger.error('Failed to create QM session', {
sessionId,
status: sessionResponse.status,
statusText: sessionResponse.statusText,
});
continue; // Skip this session if creation failed
}
const sessionData = await sessionResponse.json();
logger.info('QM session created successfully', {
sessionId,
sessionData,
proxy: newSession.proxy,
sessionCount: sessionCache[sessionId].length + 1,
});
newSession.headers['Datatool-Token'] = sessionData.token;
sessionCache[sessionId].push(newSession);
}
}
return undefined;
} catch (error) {
logger.error('❌ Failed to fetch QM session', { error });
return undefined;
}
}
// Spider-based symbol search functions
export async function spiderSymbolSearch(
payload: SymbolSpiderJob
): Promise<{ success: boolean; symbolsFound: number; jobsCreated: number }> {
try {
if (!isInitialized) {
await initializeQMResources();
}
const { prefix, depth, source = 'qm', maxDepth = 4 } = payload;
logger.info(`Starting spider search`, { prefix: prefix || 'ROOT', depth, source });
// Root job: Create A-Z jobs
if (prefix === null || prefix === undefined || prefix === '') {
return await createAlphabetJobs(source, maxDepth);
}
// Leaf job: Search for symbols with this prefix
return await searchAndSpawnJobs(prefix, depth, source, maxDepth);
} catch (error) {
logger.error('Spider symbol search failed', { error, payload });
return { success: false, symbolsFound: 0, jobsCreated: 0 };
}
}
async function createAlphabetJobs(
source: string,
maxDepth: number
): Promise<{ success: boolean; symbolsFound: number; jobsCreated: number }> {
try {
const queueManager = QueueManager.getInstance();
const queue = queueManager.getQueue('qm');
let jobsCreated = 0;
// Create jobs for A-Z
for (let i = 0; i < 26; i++) {
const letter = String.fromCharCode(65 + i); // A=65, B=66, etc.
const job: SymbolSpiderJob = {
prefix: letter,
depth: 1,
source,
maxDepth,
};
await queue.add(
'spider-symbol-search',
{
handler: 'qm',
operation: 'spider-symbol-search',
payload: job,
},
{
priority: 5,
delay: i * 100, // Stagger jobs by 100ms
attempts: 3,
backoff: { type: 'exponential', delay: 2000 },
}
);
jobsCreated++;
}
logger.info(`Created ${jobsCreated} alphabet jobs (A-Z)`);
return { success: true, symbolsFound: 0, jobsCreated };
} catch (error) {
logger.error('Failed to create alphabet jobs', { error });
return { success: false, symbolsFound: 0, jobsCreated: 0 };
}
}
async function searchAndSpawnJobs(
prefix: string,
depth: number,
source: string,
maxDepth: number
): Promise<{ success: boolean; symbolsFound: number; jobsCreated: number }> {
try {
// Ensure sessions exist
const sessionId = 'dc8c9930437f65d30f6597768800957017bac203a0a50342932757c8dfa158d6';
const currentSessions = sessionCache[sessionId] || [];
if (currentSessions.length === 0) {
logger.info('No sessions found, creating sessions first...');
await createSessions();
await new Promise(resolve => setTimeout(resolve, 1000));
}
// Search for symbols with this prefix
const symbols = await searchQMSymbolsAPI(prefix);
const symbolCount = symbols.length;
logger.info(`Prefix "${prefix}" returned ${symbolCount} symbols`);
let jobsCreated = 0;
// If we have 50+ symbols and haven't reached max depth, spawn sub-jobs
if (symbolCount >= 50 && depth < maxDepth) {
const queueManager = QueueManager.getInstance();
const queue = queueManager.getQueue('qm');
logger.info(`Spawning sub-jobs for prefix "${prefix}" (${symbolCount} >= 50 symbols)`);
// Create jobs for prefixA, prefixB, prefixC... prefixZ
for (let i = 0; i < 26; i++) {
const letter = String.fromCharCode(65 + i);
const newPrefix = prefix + letter;
const job: SymbolSpiderJob = {
prefix: newPrefix,
depth: depth + 1,
source,
maxDepth,
};
await queue.add(
'spider-symbol-search',
{
handler: 'qm',
operation: 'spider-symbol-search',
payload: job,
},
{
priority: Math.max(1, 6 - depth), // Higher priority for deeper jobs
delay: i * 50, // Stagger sub-jobs by 50ms
attempts: 3,
backoff: { type: 'exponential', delay: 2000 },
}
);
jobsCreated++;
}
logger.info(`Created ${jobsCreated} sub-jobs for prefix "${prefix}"`);
} else {
// Terminal case: save symbols and exchanges (already done in searchQMSymbolsAPI)
logger.info(`Terminal case for prefix "${prefix}": ${symbolCount} symbols saved`);
}
return { success: true, symbolsFound: symbolCount, jobsCreated };
} catch (error) {
logger.error(`Failed to search and spawn jobs for prefix "${prefix}"`, { error, depth });
return { success: false, symbolsFound: 0, jobsCreated: 0 };
}
}
// API call function to search symbols via QM
async function searchQMSymbolsAPI(query: string): Promise<string[]> {
const proxyInfo = await getRandomProxy();
if (!proxyInfo) {
throw new Error('No proxy available for QM API call');
}
const sessionId = 'dc8c9930437f65d30f6597768800957017bac203a0a50342932757c8dfa158d6'; // Use the session ID for symbol lookup
const session =
sessionCache[sessionId][Math.floor(Math.random() * sessionCache[sessionId].length)]; // lookup session
if (!session) {
throw new Error(`No active session found for QM API with ID: ${sessionId}`);
}
try {
// QM lookup endpoint for symbol search
const apiUrl = `https://app.quotemedia.com/datatool/lookup.json?marketType=equity&pathName=%2Fdemo%2Fportal%2Fcompany-summary.php&q=${encodeURIComponent(query)}&qmodTool=SmartSymbolLookup&searchType=symbol&showFree=false&showHisa=false&webmasterId=500`;
const response = await fetch(apiUrl, {
method: 'GET',
headers: session.headers,
proxy: session.proxy,
});
if (!response.ok) {
throw new Error(`QM API request failed: ${response.status} ${response.statusText}`);
}
const symbols = await response.json();
const mongoClient = getMongoDBClient();
const updatedSymbols = symbols.map((symbol: Record<string, unknown>) => {
return {
...symbol,
qmSearchCode: symbol.symbol, // Store original symbol for reference
symbol: symbol.symbol.split(':')[0], // Extract symbol from "symbol:exchange"
};
});
await mongoClient.batchUpsert('qmSymbols', updatedSymbols, ['qmSearchCode']);
const exchanges: Exchange[] = [];
for (const symbol of symbols) {
if (!exchanges.some(ex => ex.exchange === symbol.exchange)) {
exchanges.push({
exchange: symbol.exchange,
exchangeCode: symbol.exchangeCode,
exchangeShortName: symbol.exchangeShortName,
countryCode: symbol.countryCode,
source: 'qm',
});
}
}
await mongoClient.batchUpsert('qmExchanges', exchanges, ['exchange']);
session.successfulCalls++;
session.lastUsed = new Date();
logger.info(
`QM API returned ${symbols.length} symbols for query: ${query} with proxy ${session.proxy}`
);
return symbols;
} catch (error) {
logger.error(`Error searching QM symbols for query "${query}":`, error);
if (session) {
session.failedCalls++;
session.lastUsed = new Date();
}
throw error;
}
}
export async function fetchSymbols(): Promise<unknown[] | null> {
try {
if (!isInitialized) {
await initializeQMResources();
}
logger.info('🔄 Starting QM spider-based symbol search...');
// Start the spider process with root job
const rootJob: SymbolSpiderJob = {
prefix: null, // Root job creates A-Z jobs
depth: 0,
source: 'qm',
maxDepth: 4,
};
const result = await spiderSymbolSearch(rootJob);
if (result.success) {
logger.info(
`QM spider search initiated successfully. Created ${result.jobsCreated} initial jobs`
);
return [`Spider search initiated with ${result.jobsCreated} jobs`];
} else {
logger.error('Failed to initiate QM spider search');
return null;
}
} catch (error) {
logger.error('❌ Failed to start QM spider symbol search', { error });
return null;
}
}
export async function fetchExchanges(): Promise<unknown[] | null> {
try {
if (!isInitialized) {
await initializeQMResources();
}
logger.info('🔄 QM exchanges fetch - not implemented yet');
// TODO: Implement QM exchanges fetching logic
return null;
} catch (error) {
logger.error('❌ Failed to fetch QM exchanges', { error });
return null;
}
}
export const qmTasks = {
createSessions,
fetchSymbols,
fetchExchanges,
spiderSymbolSearch,
};

View file

@ -0,0 +1,81 @@
/**
* WebShare Provider for proxy management with scheduled updates
*/
import { getLogger } from '@stock-bot/logger';
import {
createJobHandler,
handlerRegistry,
type HandlerConfigWithSchedule,
} from '@stock-bot/queue';
import { updateProxies } from '@stock-bot/utils';
const logger = getLogger('webshare-provider');
// Initialize and register the WebShare provider
export function initializeWebShareProvider() {
logger.debug('Registering WebShare provider with scheduled jobs...');
const webShareProviderConfig: HandlerConfigWithSchedule = {
name: 'webshare',
operations: {
'fetch-proxies': createJobHandler(async () => {
logger.info('Fetching proxies from WebShare API');
const { fetchWebShareProxies } = await import('./webshare.operations');
try {
const proxies = await fetchWebShareProxies();
if (proxies.length > 0) {
// Update the centralized proxy manager
await updateProxies(proxies);
logger.info('Updated proxy manager with WebShare proxies', {
count: proxies.length,
workingCount: proxies.filter(p => p.isWorking !== false).length,
});
return {
success: true,
proxiesUpdated: proxies.length,
workingProxies: proxies.filter(p => p.isWorking !== false).length,
};
} else {
logger.warn('No proxies fetched from WebShare API');
return {
success: false,
proxiesUpdated: 0,
error: 'No proxies returned from API',
};
}
} catch (error) {
logger.error('Failed to fetch and update proxies', { error });
return {
success: false,
proxiesUpdated: 0,
error: error instanceof Error ? error.message : 'Unknown error',
};
}
}),
},
scheduledJobs: [
{
type: 'webshare-fetch',
operation: 'fetch-proxies',
cronPattern: '0 */6 * * *', // Every 6 hours
priority: 3,
description: 'Fetch fresh proxies from WebShare API',
immediately: true, // Run on startup
},
],
};
handlerRegistry.registerWithSchedule(webShareProviderConfig);
logger.debug('WebShare provider registered successfully');
}
export const webShareProvider = {
initialize: initializeWebShareProvider,
};

View file

@ -0,0 +1,82 @@
/**
* WebShare Tasks - API integration
*/
import { getLogger } from '@stock-bot/logger';
import { type ProxyInfo } from '@stock-bot/http';
const logger = getLogger('webshare-tasks');
/**
* Fetch proxies from WebShare API and convert to ProxyInfo format
*/
export async function fetchWebShareProxies(): Promise<ProxyInfo[]> {
try {
// Get configuration from config system
const { getConfig } = await import('@stock-bot/config');
const config = getConfig();
const apiKey = config.webshare?.apiKey;
const apiUrl = config.webshare?.apiUrl;
if (!apiKey || !apiUrl) {
logger.error('Missing WebShare configuration', {
hasApiKey: !!apiKey,
hasApiUrl: !!apiUrl,
});
return [];
}
logger.info('Fetching proxies from WebShare API', { apiUrl });
const response = await fetch(`${apiUrl}proxy/list/?mode=direct&page=1&page_size=100`, {
method: 'GET',
headers: {
Authorization: `Token ${apiKey}`,
'Content-Type': 'application/json',
},
});
if (!response.ok) {
logger.error('WebShare API request failed', {
status: response.status,
statusText: response.statusText,
});
return [];
}
const data = await response.json();
if (!data.results || !Array.isArray(data.results)) {
logger.error('Invalid response format from WebShare API', { data });
return [];
}
// Transform proxy data to ProxyInfo format
const proxies: ProxyInfo[] = data.results.map((proxy: {
username: string;
password: string;
proxy_address: string;
port: number;
}) => ({
source: 'webshare',
protocol: 'http' as const,
host: proxy.proxy_address,
port: proxy.port,
username: proxy.username,
password: proxy.password,
isWorking: true, // WebShare provides working proxies
firstSeen: new Date(),
lastChecked: new Date(),
}));
logger.info('Successfully fetched proxies from WebShare', {
count: proxies.length,
total: data.count || proxies.length,
});
return proxies;
} catch (error) {
logger.error('Failed to fetch proxies from WebShare', { error });
return [];
}
}