linxus fs fixes
This commit is contained in:
parent
ac23b70146
commit
0b7846fe67
292 changed files with 41947 additions and 41947 deletions
|
|
@ -1,140 +1,140 @@
|
|||
import { ProxyInfo } from 'libs/http/src/types';
|
||||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { BatchProcessor } from '../utils/batch-processor';
|
||||
|
||||
// Create logger for this provider
|
||||
const logger = getLogger('proxy-provider');
|
||||
|
||||
// This will run at the same time each day as when the app started
|
||||
const getEvery24HourCron = (): string => {
|
||||
const now = new Date();
|
||||
const hours = now.getHours();
|
||||
const minutes = now.getMinutes();
|
||||
return `${minutes} ${hours} * * *`; // Every day at startup time
|
||||
};
|
||||
|
||||
export const proxyProvider: ProviderConfig = {
|
||||
name: 'proxy-service',
|
||||
service: 'proxy',
|
||||
operations: {
|
||||
'fetch-and-check': async (payload: { sources?: string[] }) => {
|
||||
const { proxyService } = await import('./proxy.tasks');
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
|
||||
const proxies = await proxyService.fetchProxiesFromSources();
|
||||
|
||||
if (proxies.length === 0) {
|
||||
return { proxiesFetched: 0, jobsCreated: 0 };
|
||||
}
|
||||
|
||||
const batchProcessor = new BatchProcessor(queueManager);
|
||||
|
||||
// Simplified configuration
|
||||
const result = await batchProcessor.processItems({
|
||||
items: proxies,
|
||||
batchSize: parseInt(process.env.PROXY_BATCH_SIZE || '200'),
|
||||
totalDelayMs: parseInt(process.env.PROXY_VALIDATION_HOURS || '4') * 60 * 60 * 1000 ,
|
||||
jobNamePrefix: 'proxy',
|
||||
operation: 'check-proxy',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
priority: 2,
|
||||
useBatching: process.env.PROXY_DIRECT_MODE !== 'true', // Simple boolean flag
|
||||
createJobData: (proxy: ProxyInfo) => ({
|
||||
proxy,
|
||||
source: 'fetch-and-check'
|
||||
}),
|
||||
removeOnComplete: 5,
|
||||
removeOnFail: 3
|
||||
});
|
||||
|
||||
return {
|
||||
proxiesFetched: result.totalItems,
|
||||
...result
|
||||
};
|
||||
},
|
||||
|
||||
'process-proxy-batch': async (payload: any) => {
|
||||
// Process a batch of proxies - uses the fetch-and-check JobNamePrefix process-(proxy)-batch
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const batchProcessor = new BatchProcessor(queueManager);
|
||||
return await batchProcessor.processBatch(
|
||||
payload,
|
||||
(proxy: ProxyInfo) => ({
|
||||
proxy,
|
||||
source: payload.config?.source || 'batch-processing'
|
||||
})
|
||||
);
|
||||
},
|
||||
|
||||
'check-proxy': async (payload: {
|
||||
proxy: ProxyInfo,
|
||||
source?: string,
|
||||
batchIndex?: number,
|
||||
itemIndex?: number,
|
||||
total?: number
|
||||
}) => {
|
||||
const { checkProxy } = await import('./proxy.tasks');
|
||||
|
||||
try {
|
||||
const result = await checkProxy(payload.proxy);
|
||||
|
||||
logger.debug('Proxy validated', {
|
||||
proxy: `${payload.proxy.host}:${payload.proxy.port}`,
|
||||
isWorking: result.isWorking,
|
||||
responseTime: result.responseTime,
|
||||
batchIndex: payload.batchIndex
|
||||
});
|
||||
|
||||
return {
|
||||
result,
|
||||
proxy: payload.proxy,
|
||||
// Only include batch info if it exists (for batch mode)
|
||||
...(payload.batchIndex !== undefined && {
|
||||
batchInfo: {
|
||||
batchIndex: payload.batchIndex,
|
||||
itemIndex: payload.itemIndex,
|
||||
total: payload.total,
|
||||
source: payload.source
|
||||
}
|
||||
})
|
||||
};
|
||||
} catch (error) {
|
||||
logger.warn('Proxy validation failed', {
|
||||
proxy: `${payload.proxy.host}:${payload.proxy.port}`,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
batchIndex: payload.batchIndex
|
||||
});
|
||||
|
||||
return {
|
||||
result: { isWorking: false, error: String(error) },
|
||||
proxy: payload.proxy,
|
||||
// Only include batch info if it exists (for batch mode)
|
||||
...(payload.batchIndex !== undefined && {
|
||||
batchInfo: {
|
||||
batchIndex: payload.batchIndex,
|
||||
itemIndex: payload.itemIndex,
|
||||
total: payload.total,
|
||||
source: payload.source
|
||||
}
|
||||
})
|
||||
};
|
||||
}
|
||||
}
|
||||
},
|
||||
scheduledJobs: [
|
||||
{
|
||||
type: 'proxy-maintenance',
|
||||
operation: 'fetch-and-check',
|
||||
payload: {},
|
||||
// should remove and just run at the same time so app restarts dont keeping adding same jobs
|
||||
cronPattern: getEvery24HourCron(),
|
||||
priority: 5,
|
||||
immediately: true,
|
||||
description: 'Fetch and validate proxy list from sources'
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
|
||||
import { ProxyInfo } from 'libs/http/src/types';
|
||||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { BatchProcessor } from '../utils/batch-processor';
|
||||
|
||||
// Create logger for this provider
|
||||
const logger = getLogger('proxy-provider');
|
||||
|
||||
// This will run at the same time each day as when the app started
|
||||
const getEvery24HourCron = (): string => {
|
||||
const now = new Date();
|
||||
const hours = now.getHours();
|
||||
const minutes = now.getMinutes();
|
||||
return `${minutes} ${hours} * * *`; // Every day at startup time
|
||||
};
|
||||
|
||||
export const proxyProvider: ProviderConfig = {
|
||||
name: 'proxy-service',
|
||||
service: 'proxy',
|
||||
operations: {
|
||||
'fetch-and-check': async (payload: { sources?: string[] }) => {
|
||||
const { proxyService } = await import('./proxy.tasks');
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
|
||||
const proxies = await proxyService.fetchProxiesFromSources();
|
||||
|
||||
if (proxies.length === 0) {
|
||||
return { proxiesFetched: 0, jobsCreated: 0 };
|
||||
}
|
||||
|
||||
const batchProcessor = new BatchProcessor(queueManager);
|
||||
|
||||
// Simplified configuration
|
||||
const result = await batchProcessor.processItems({
|
||||
items: proxies,
|
||||
batchSize: parseInt(process.env.PROXY_BATCH_SIZE || '200'),
|
||||
totalDelayMs: parseInt(process.env.PROXY_VALIDATION_HOURS || '4') * 60 * 60 * 1000 ,
|
||||
jobNamePrefix: 'proxy',
|
||||
operation: 'check-proxy',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
priority: 2,
|
||||
useBatching: process.env.PROXY_DIRECT_MODE !== 'true', // Simple boolean flag
|
||||
createJobData: (proxy: ProxyInfo) => ({
|
||||
proxy,
|
||||
source: 'fetch-and-check'
|
||||
}),
|
||||
removeOnComplete: 5,
|
||||
removeOnFail: 3
|
||||
});
|
||||
|
||||
return {
|
||||
proxiesFetched: result.totalItems,
|
||||
...result
|
||||
};
|
||||
},
|
||||
|
||||
'process-proxy-batch': async (payload: any) => {
|
||||
// Process a batch of proxies - uses the fetch-and-check JobNamePrefix process-(proxy)-batch
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const batchProcessor = new BatchProcessor(queueManager);
|
||||
return await batchProcessor.processBatch(
|
||||
payload,
|
||||
(proxy: ProxyInfo) => ({
|
||||
proxy,
|
||||
source: payload.config?.source || 'batch-processing'
|
||||
})
|
||||
);
|
||||
},
|
||||
|
||||
'check-proxy': async (payload: {
|
||||
proxy: ProxyInfo,
|
||||
source?: string,
|
||||
batchIndex?: number,
|
||||
itemIndex?: number,
|
||||
total?: number
|
||||
}) => {
|
||||
const { checkProxy } = await import('./proxy.tasks');
|
||||
|
||||
try {
|
||||
const result = await checkProxy(payload.proxy);
|
||||
|
||||
logger.debug('Proxy validated', {
|
||||
proxy: `${payload.proxy.host}:${payload.proxy.port}`,
|
||||
isWorking: result.isWorking,
|
||||
responseTime: result.responseTime,
|
||||
batchIndex: payload.batchIndex
|
||||
});
|
||||
|
||||
return {
|
||||
result,
|
||||
proxy: payload.proxy,
|
||||
// Only include batch info if it exists (for batch mode)
|
||||
...(payload.batchIndex !== undefined && {
|
||||
batchInfo: {
|
||||
batchIndex: payload.batchIndex,
|
||||
itemIndex: payload.itemIndex,
|
||||
total: payload.total,
|
||||
source: payload.source
|
||||
}
|
||||
})
|
||||
};
|
||||
} catch (error) {
|
||||
logger.warn('Proxy validation failed', {
|
||||
proxy: `${payload.proxy.host}:${payload.proxy.port}`,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
batchIndex: payload.batchIndex
|
||||
});
|
||||
|
||||
return {
|
||||
result: { isWorking: false, error: String(error) },
|
||||
proxy: payload.proxy,
|
||||
// Only include batch info if it exists (for batch mode)
|
||||
...(payload.batchIndex !== undefined && {
|
||||
batchInfo: {
|
||||
batchIndex: payload.batchIndex,
|
||||
itemIndex: payload.itemIndex,
|
||||
total: payload.total,
|
||||
source: payload.source
|
||||
}
|
||||
})
|
||||
};
|
||||
}
|
||||
}
|
||||
},
|
||||
scheduledJobs: [
|
||||
{
|
||||
type: 'proxy-maintenance',
|
||||
operation: 'fetch-and-check',
|
||||
payload: {},
|
||||
// should remove and just run at the same time so app restarts dont keeping adding same jobs
|
||||
cronPattern: getEvery24HourCron(),
|
||||
priority: 5,
|
||||
immediately: true,
|
||||
description: 'Fetch and validate proxy list from sources'
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,264 +1,264 @@
|
|||
import { getLogger } from '@stock-bot/logger';
|
||||
import createCache, { type CacheProvider } from '@stock-bot/cache';
|
||||
import { HttpClient, ProxyInfo } from '@stock-bot/http';
|
||||
import pLimit from 'p-limit';
|
||||
|
||||
// Shared configuration and utilities
|
||||
const PROXY_CONFIG = {
|
||||
CACHE_KEY: 'proxy',
|
||||
CACHE_TTL: 86400, // 24 hours
|
||||
CHECK_TIMEOUT: 7000,
|
||||
CHECK_IP: '99.246.102.205',
|
||||
CHECK_URL: 'https://proxy-detection.stare.gg/?api_key=bd406bf53ddc6abe1d9de5907830a955',
|
||||
CONCURRENCY_LIMIT: 100,
|
||||
PROXY_SOURCES: [
|
||||
{url: 'https://raw.githubusercontent.com/prxchk/proxy-list/main/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/casals-ar/proxy-list/main/http',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/http/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/http.txt', protocol: 'http' },
|
||||
{url: 'https://raw.githubusercontent.com/dpangestuw/Free-Proxy/refs/heads/main/http_proxies.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/gitrecon1455/fresh-proxy-list/refs/heads/main/proxylist.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/themiralay/Proxy-List-World/refs/heads/master/data.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/casa-ls/proxy-list/refs/heads/main/http',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/BreakingTechFr/Proxy_Free/refs/heads/main/proxies/http.txt', protocol: 'http' },
|
||||
{url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/http.txt',protocol: 'http', },
|
||||
|
||||
// {url: 'https://raw.githubusercontent.com/r00tee/Proxy-List/refs/heads/main/Https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/https.txt', protocol: 'https' },
|
||||
// {url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/https/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/https.txt',protocol: 'https', },
|
||||
]
|
||||
};
|
||||
|
||||
// Shared instances (module-scoped, not global)
|
||||
let logger: ReturnType<typeof getLogger>;
|
||||
let cache: CacheProvider;
|
||||
let httpClient: HttpClient;
|
||||
let concurrencyLimit: ReturnType<typeof pLimit>;
|
||||
|
||||
// Initialize shared resources
|
||||
function initializeSharedResources() {
|
||||
if (!logger) {
|
||||
logger = getLogger('proxy-tasks');
|
||||
cache = createCache('hybrid');
|
||||
httpClient = new HttpClient({ timeout: 10000 }, logger);
|
||||
concurrencyLimit = pLimit(PROXY_CONFIG.CONCURRENCY_LIMIT);
|
||||
logger.info('Proxy tasks initialized');
|
||||
}
|
||||
}
|
||||
|
||||
// Individual task functions
|
||||
export async function queueProxyFetch(): Promise<string> {
|
||||
initializeSharedResources();
|
||||
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-fetch',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
operation: 'fetch-and-check',
|
||||
payload: {},
|
||||
priority: 5
|
||||
});
|
||||
|
||||
const jobId = job.id || 'unknown';
|
||||
logger.info('Proxy fetch job queued', { jobId });
|
||||
return jobId;
|
||||
}
|
||||
|
||||
export async function queueProxyCheck(proxies: ProxyInfo[]): Promise<string> {
|
||||
initializeSharedResources();
|
||||
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-check',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
operation: 'check-specific',
|
||||
payload: { proxies },
|
||||
priority: 3
|
||||
});
|
||||
|
||||
const jobId = job.id || 'unknown';
|
||||
logger.info('Proxy check job queued', { jobId, count: proxies.length });
|
||||
return jobId;
|
||||
}
|
||||
|
||||
export async function fetchProxiesFromSources(): Promise<ProxyInfo[]> {
|
||||
initializeSharedResources();
|
||||
|
||||
const sources = PROXY_CONFIG.PROXY_SOURCES.map(source =>
|
||||
concurrencyLimit(() => fetchProxiesFromSource(source))
|
||||
);
|
||||
const result = await Promise.all(sources);
|
||||
let allProxies: ProxyInfo[] = result.flat();
|
||||
allProxies = removeDuplicateProxies(allProxies);
|
||||
// await checkProxies(allProxies);
|
||||
return allProxies;
|
||||
}
|
||||
|
||||
export async function fetchProxiesFromSource(source: { url: string; protocol: string }): Promise<ProxyInfo[]> {
|
||||
initializeSharedResources();
|
||||
|
||||
const allProxies: ProxyInfo[] = [];
|
||||
|
||||
try {
|
||||
logger.info(`Fetching proxies from ${source.url}`);
|
||||
|
||||
const response = await httpClient.get(source.url, {
|
||||
timeout: 10000
|
||||
});
|
||||
|
||||
if (response.status !== 200) {
|
||||
logger.warn(`Failed to fetch from ${source.url}: ${response.status}`);
|
||||
return [];
|
||||
}
|
||||
|
||||
const text = response.data;
|
||||
const lines = text.split('\n').filter((line: string) => line.trim());
|
||||
|
||||
for (const line of lines) {
|
||||
let trimmed = line.trim();
|
||||
trimmed = cleanProxyUrl(trimmed);
|
||||
if (!trimmed || trimmed.startsWith('#')) continue;
|
||||
|
||||
// Parse formats like "host:port" or "host:port:user:pass"
|
||||
const parts = trimmed.split(':');
|
||||
if (parts.length >= 2) {
|
||||
const proxy: ProxyInfo = {
|
||||
protocol: source.protocol as 'http' | 'https' | 'socks4' | 'socks5',
|
||||
host: parts[0],
|
||||
port: parseInt(parts[1])
|
||||
};
|
||||
|
||||
if (!isNaN(proxy.port) && proxy.host) {
|
||||
allProxies.push(proxy);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Parsed ${allProxies.length} proxies from ${source.url}`);
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`Error fetching proxies from ${source.url}`, error);
|
||||
return [];
|
||||
}
|
||||
|
||||
return allProxies;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a proxy is working
|
||||
*/
|
||||
export async function checkProxy(proxy: ProxyInfo): Promise<ProxyInfo> {
|
||||
initializeSharedResources();
|
||||
|
||||
let success = false;
|
||||
logger.debug(`Checking Proxy:`, {
|
||||
protocol: proxy.protocol,
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
});
|
||||
|
||||
try {
|
||||
// Test the proxy
|
||||
const response = await httpClient.get(PROXY_CONFIG.CHECK_URL, {
|
||||
proxy,
|
||||
timeout: PROXY_CONFIG.CHECK_TIMEOUT
|
||||
});
|
||||
|
||||
const isWorking = response.status >= 200 && response.status < 300;
|
||||
|
||||
const result: ProxyInfo = {
|
||||
...proxy,
|
||||
isWorking,
|
||||
checkedAt: new Date(),
|
||||
responseTime: response.responseTime,
|
||||
};
|
||||
|
||||
if (isWorking && !JSON.stringify(response.data).includes(PROXY_CONFIG.CHECK_IP)) {
|
||||
success = true;
|
||||
await cache.set(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`, result, PROXY_CONFIG.CACHE_TTL);
|
||||
} else {
|
||||
await cache.del(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`);
|
||||
}
|
||||
|
||||
logger.debug('Proxy check completed', {
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
isWorking,
|
||||
});
|
||||
|
||||
return result;
|
||||
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
|
||||
const result: ProxyInfo = {
|
||||
...proxy,
|
||||
isWorking: false,
|
||||
error: errorMessage,
|
||||
checkedAt: new Date()
|
||||
};
|
||||
|
||||
// If the proxy check failed, remove it from cache - success is here cause i think abort signal fails sometimes
|
||||
if (!success) {
|
||||
await cache.del(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`);
|
||||
}
|
||||
|
||||
logger.debug('Proxy check failed', {
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
error: errorMessage
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// Utility functions
|
||||
function cleanProxyUrl(url: string): string {
|
||||
return url
|
||||
.replace(/^https?:\/\//, '')
|
||||
.replace(/^0+/, '')
|
||||
.replace(/:0+(\d)/g, ':$1');
|
||||
}
|
||||
|
||||
function removeDuplicateProxies(proxies: ProxyInfo[]): ProxyInfo[] {
|
||||
const seen = new Set<string>();
|
||||
const unique: ProxyInfo[] = [];
|
||||
|
||||
for (const proxy of proxies) {
|
||||
const key = `${proxy.protocol}://${proxy.host}:${proxy.port}`;
|
||||
if (!seen.has(key)) {
|
||||
seen.add(key);
|
||||
unique.push(proxy);
|
||||
}
|
||||
}
|
||||
|
||||
return unique;
|
||||
}
|
||||
|
||||
// Optional: Export a convenience object that groups related tasks
|
||||
export const proxyTasks = {
|
||||
queueProxyFetch,
|
||||
queueProxyCheck,
|
||||
fetchProxiesFromSources,
|
||||
fetchProxiesFromSource,
|
||||
checkProxy,
|
||||
};
|
||||
|
||||
// Export singleton instance for backward compatibility (optional)
|
||||
// Remove this if you want to fully move to the task-based approach
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import createCache, { type CacheProvider } from '@stock-bot/cache';
|
||||
import { HttpClient, ProxyInfo } from '@stock-bot/http';
|
||||
import pLimit from 'p-limit';
|
||||
|
||||
// Shared configuration and utilities
|
||||
const PROXY_CONFIG = {
|
||||
CACHE_KEY: 'proxy',
|
||||
CACHE_TTL: 86400, // 24 hours
|
||||
CHECK_TIMEOUT: 7000,
|
||||
CHECK_IP: '99.246.102.205',
|
||||
CHECK_URL: 'https://proxy-detection.stare.gg/?api_key=bd406bf53ddc6abe1d9de5907830a955',
|
||||
CONCURRENCY_LIMIT: 100,
|
||||
PROXY_SOURCES: [
|
||||
{url: 'https://raw.githubusercontent.com/prxchk/proxy-list/main/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/casals-ar/proxy-list/main/http',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/http/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/http.txt', protocol: 'http' },
|
||||
{url: 'https://raw.githubusercontent.com/dpangestuw/Free-Proxy/refs/heads/main/http_proxies.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/gitrecon1455/fresh-proxy-list/refs/heads/main/proxylist.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/themiralay/Proxy-List-World/refs/heads/master/data.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/casa-ls/proxy-list/refs/heads/main/http',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/BreakingTechFr/Proxy_Free/refs/heads/main/proxies/http.txt', protocol: 'http' },
|
||||
{url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt',protocol: 'http', },
|
||||
{url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/http.txt',protocol: 'http', },
|
||||
|
||||
// {url: 'https://raw.githubusercontent.com/r00tee/Proxy-List/refs/heads/main/Https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/https.txt', protocol: 'https' },
|
||||
// {url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/https/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/https.txt',protocol: 'https', },
|
||||
// {url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/https.txt',protocol: 'https', },
|
||||
]
|
||||
};
|
||||
|
||||
// Shared instances (module-scoped, not global)
|
||||
let logger: ReturnType<typeof getLogger>;
|
||||
let cache: CacheProvider;
|
||||
let httpClient: HttpClient;
|
||||
let concurrencyLimit: ReturnType<typeof pLimit>;
|
||||
|
||||
// Initialize shared resources
|
||||
function initializeSharedResources() {
|
||||
if (!logger) {
|
||||
logger = getLogger('proxy-tasks');
|
||||
cache = createCache('hybrid');
|
||||
httpClient = new HttpClient({ timeout: 10000 }, logger);
|
||||
concurrencyLimit = pLimit(PROXY_CONFIG.CONCURRENCY_LIMIT);
|
||||
logger.info('Proxy tasks initialized');
|
||||
}
|
||||
}
|
||||
|
||||
// Individual task functions
|
||||
export async function queueProxyFetch(): Promise<string> {
|
||||
initializeSharedResources();
|
||||
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-fetch',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
operation: 'fetch-and-check',
|
||||
payload: {},
|
||||
priority: 5
|
||||
});
|
||||
|
||||
const jobId = job.id || 'unknown';
|
||||
logger.info('Proxy fetch job queued', { jobId });
|
||||
return jobId;
|
||||
}
|
||||
|
||||
export async function queueProxyCheck(proxies: ProxyInfo[]): Promise<string> {
|
||||
initializeSharedResources();
|
||||
|
||||
const { queueManager } = await import('../services/queue.service');
|
||||
const job = await queueManager.addJob({
|
||||
type: 'proxy-check',
|
||||
service: 'proxy',
|
||||
provider: 'proxy-service',
|
||||
operation: 'check-specific',
|
||||
payload: { proxies },
|
||||
priority: 3
|
||||
});
|
||||
|
||||
const jobId = job.id || 'unknown';
|
||||
logger.info('Proxy check job queued', { jobId, count: proxies.length });
|
||||
return jobId;
|
||||
}
|
||||
|
||||
export async function fetchProxiesFromSources(): Promise<ProxyInfo[]> {
|
||||
initializeSharedResources();
|
||||
|
||||
const sources = PROXY_CONFIG.PROXY_SOURCES.map(source =>
|
||||
concurrencyLimit(() => fetchProxiesFromSource(source))
|
||||
);
|
||||
const result = await Promise.all(sources);
|
||||
let allProxies: ProxyInfo[] = result.flat();
|
||||
allProxies = removeDuplicateProxies(allProxies);
|
||||
// await checkProxies(allProxies);
|
||||
return allProxies;
|
||||
}
|
||||
|
||||
export async function fetchProxiesFromSource(source: { url: string; protocol: string }): Promise<ProxyInfo[]> {
|
||||
initializeSharedResources();
|
||||
|
||||
const allProxies: ProxyInfo[] = [];
|
||||
|
||||
try {
|
||||
logger.info(`Fetching proxies from ${source.url}`);
|
||||
|
||||
const response = await httpClient.get(source.url, {
|
||||
timeout: 10000
|
||||
});
|
||||
|
||||
if (response.status !== 200) {
|
||||
logger.warn(`Failed to fetch from ${source.url}: ${response.status}`);
|
||||
return [];
|
||||
}
|
||||
|
||||
const text = response.data;
|
||||
const lines = text.split('\n').filter((line: string) => line.trim());
|
||||
|
||||
for (const line of lines) {
|
||||
let trimmed = line.trim();
|
||||
trimmed = cleanProxyUrl(trimmed);
|
||||
if (!trimmed || trimmed.startsWith('#')) continue;
|
||||
|
||||
// Parse formats like "host:port" or "host:port:user:pass"
|
||||
const parts = trimmed.split(':');
|
||||
if (parts.length >= 2) {
|
||||
const proxy: ProxyInfo = {
|
||||
protocol: source.protocol as 'http' | 'https' | 'socks4' | 'socks5',
|
||||
host: parts[0],
|
||||
port: parseInt(parts[1])
|
||||
};
|
||||
|
||||
if (!isNaN(proxy.port) && proxy.host) {
|
||||
allProxies.push(proxy);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Parsed ${allProxies.length} proxies from ${source.url}`);
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`Error fetching proxies from ${source.url}`, error);
|
||||
return [];
|
||||
}
|
||||
|
||||
return allProxies;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a proxy is working
|
||||
*/
|
||||
export async function checkProxy(proxy: ProxyInfo): Promise<ProxyInfo> {
|
||||
initializeSharedResources();
|
||||
|
||||
let success = false;
|
||||
logger.debug(`Checking Proxy:`, {
|
||||
protocol: proxy.protocol,
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
});
|
||||
|
||||
try {
|
||||
// Test the proxy
|
||||
const response = await httpClient.get(PROXY_CONFIG.CHECK_URL, {
|
||||
proxy,
|
||||
timeout: PROXY_CONFIG.CHECK_TIMEOUT
|
||||
});
|
||||
|
||||
const isWorking = response.status >= 200 && response.status < 300;
|
||||
|
||||
const result: ProxyInfo = {
|
||||
...proxy,
|
||||
isWorking,
|
||||
checkedAt: new Date(),
|
||||
responseTime: response.responseTime,
|
||||
};
|
||||
|
||||
if (isWorking && !JSON.stringify(response.data).includes(PROXY_CONFIG.CHECK_IP)) {
|
||||
success = true;
|
||||
await cache.set(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`, result, PROXY_CONFIG.CACHE_TTL);
|
||||
} else {
|
||||
await cache.del(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`);
|
||||
}
|
||||
|
||||
logger.debug('Proxy check completed', {
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
isWorking,
|
||||
});
|
||||
|
||||
return result;
|
||||
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
|
||||
const result: ProxyInfo = {
|
||||
...proxy,
|
||||
isWorking: false,
|
||||
error: errorMessage,
|
||||
checkedAt: new Date()
|
||||
};
|
||||
|
||||
// If the proxy check failed, remove it from cache - success is here cause i think abort signal fails sometimes
|
||||
if (!success) {
|
||||
await cache.del(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`);
|
||||
}
|
||||
|
||||
logger.debug('Proxy check failed', {
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
error: errorMessage
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// Utility functions
|
||||
function cleanProxyUrl(url: string): string {
|
||||
return url
|
||||
.replace(/^https?:\/\//, '')
|
||||
.replace(/^0+/, '')
|
||||
.replace(/:0+(\d)/g, ':$1');
|
||||
}
|
||||
|
||||
function removeDuplicateProxies(proxies: ProxyInfo[]): ProxyInfo[] {
|
||||
const seen = new Set<string>();
|
||||
const unique: ProxyInfo[] = [];
|
||||
|
||||
for (const proxy of proxies) {
|
||||
const key = `${proxy.protocol}://${proxy.host}:${proxy.port}`;
|
||||
if (!seen.has(key)) {
|
||||
seen.add(key);
|
||||
unique.push(proxy);
|
||||
}
|
||||
}
|
||||
|
||||
return unique;
|
||||
}
|
||||
|
||||
// Optional: Export a convenience object that groups related tasks
|
||||
export const proxyTasks = {
|
||||
queueProxyFetch,
|
||||
queueProxyCheck,
|
||||
fetchProxiesFromSources,
|
||||
fetchProxiesFromSource,
|
||||
checkProxy,
|
||||
};
|
||||
|
||||
// Export singleton instance for backward compatibility (optional)
|
||||
// Remove this if you want to fully move to the task-based approach
|
||||
export const proxyService = proxyTasks;
|
||||
|
|
@ -1,175 +1,175 @@
|
|||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
const logger = getLogger('quotemedia-provider');
|
||||
|
||||
export const quotemediaProvider: ProviderConfig = {
|
||||
name: 'quotemedia',
|
||||
service: 'market-data',
|
||||
operations: { 'live-data': async (payload: { symbol: string; fields?: string[] }) => {
|
||||
logger.info('Fetching live data from QuoteMedia', { symbol: payload.symbol });
|
||||
|
||||
// Simulate QuoteMedia API call
|
||||
const mockData = {
|
||||
symbol: payload.symbol,
|
||||
price: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
change: (Math.random() - 0.5) * 20,
|
||||
changePercent: (Math.random() - 0.5) * 5,
|
||||
timestamp: new Date().toISOString(),
|
||||
source: 'quotemedia',
|
||||
fields: payload.fields || ['price', 'volume', 'change']
|
||||
};
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 100 + Math.random() * 200));
|
||||
|
||||
return mockData;
|
||||
},
|
||||
|
||||
'historical-data': async (payload: {
|
||||
symbol: string;
|
||||
from: Date;
|
||||
to: Date;
|
||||
interval?: string;
|
||||
fields?: string[]; }) => {
|
||||
logger.info('Fetching historical data from QuoteMedia', {
|
||||
symbol: payload.symbol,
|
||||
from: payload.from,
|
||||
to: payload.to,
|
||||
interval: payload.interval || '1d'
|
||||
});
|
||||
|
||||
// Generate mock historical data
|
||||
const days = Math.ceil((payload.to.getTime() - payload.from.getTime()) / (1000 * 60 * 60 * 24));
|
||||
const data = [];
|
||||
|
||||
for (let i = 0; i < Math.min(days, 100); i++) {
|
||||
const date = new Date(payload.from.getTime() + i * 24 * 60 * 60 * 1000);
|
||||
data.push({
|
||||
date: date.toISOString().split('T')[0],
|
||||
open: Math.random() * 1000 + 100,
|
||||
high: Math.random() * 1000 + 100,
|
||||
low: Math.random() * 1000 + 100,
|
||||
close: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
source: 'quotemedia'
|
||||
});
|
||||
}
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 200 + Math.random() * 300));
|
||||
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
interval: payload.interval || '1d',
|
||||
data,
|
||||
source: 'quotemedia',
|
||||
totalRecords: data.length
|
||||
};
|
||||
},
|
||||
'batch-quotes': async (payload: { symbols: string[]; fields?: string[] }) => {
|
||||
logger.info('Fetching batch quotes from QuoteMedia', {
|
||||
symbols: payload.symbols,
|
||||
count: payload.symbols.length
|
||||
});
|
||||
|
||||
const quotes = payload.symbols.map(symbol => ({
|
||||
symbol,
|
||||
price: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
change: (Math.random() - 0.5) * 20,
|
||||
timestamp: new Date().toISOString(),
|
||||
source: 'quotemedia'
|
||||
}));
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 300 + Math.random() * 200));
|
||||
|
||||
return {
|
||||
quotes,
|
||||
source: 'quotemedia',
|
||||
timestamp: new Date().toISOString(),
|
||||
totalSymbols: payload.symbols.length
|
||||
};
|
||||
}, 'company-profile': async (payload: { symbol: string }) => {
|
||||
logger.info('Fetching company profile from QuoteMedia', { symbol: payload.symbol });
|
||||
|
||||
// Simulate company profile data
|
||||
const profile = {
|
||||
symbol: payload.symbol,
|
||||
companyName: `${payload.symbol} Corporation`,
|
||||
sector: 'Technology',
|
||||
industry: 'Software',
|
||||
description: `${payload.symbol} is a leading technology company.`,
|
||||
marketCap: Math.floor(Math.random() * 1000000000000),
|
||||
employees: Math.floor(Math.random() * 100000),
|
||||
website: `https://www.${payload.symbol.toLowerCase()}.com`,
|
||||
source: 'quotemedia'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 150 + Math.random() * 100));
|
||||
|
||||
return profile;
|
||||
}, 'options-chain': async (payload: { symbol: string; expiration?: string }) => {
|
||||
logger.info('Fetching options chain from QuoteMedia', {
|
||||
symbol: payload.symbol,
|
||||
expiration: payload.expiration
|
||||
});
|
||||
|
||||
// Generate mock options data
|
||||
const strikes = Array.from({ length: 20 }, (_, i) => 100 + i * 5);
|
||||
const calls = strikes.map(strike => ({
|
||||
strike,
|
||||
bid: Math.random() * 10,
|
||||
ask: Math.random() * 10 + 0.5,
|
||||
volume: Math.floor(Math.random() * 1000),
|
||||
openInterest: Math.floor(Math.random() * 5000)
|
||||
}));
|
||||
|
||||
const puts = strikes.map(strike => ({
|
||||
strike,
|
||||
bid: Math.random() * 10,
|
||||
ask: Math.random() * 10 + 0.5,
|
||||
volume: Math.floor(Math.random() * 1000),
|
||||
openInterest: Math.floor(Math.random() * 5000)
|
||||
}));
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 400 + Math.random() * 300));
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
expiration: payload.expiration || new Date(Date.now() + 30 * 24 * 60 * 60 * 1000).toISOString().split('T')[0],
|
||||
calls,
|
||||
puts,
|
||||
source: 'quotemedia'
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
scheduledJobs: [
|
||||
// {
|
||||
// type: 'quotemedia-premium-refresh',
|
||||
// operation: 'batch-quotes',
|
||||
// payload: { symbols: ['AAPL', 'GOOGL', 'MSFT'] },
|
||||
// cronPattern: '*/2 * * * *', // Every 2 minutes
|
||||
// priority: 7,
|
||||
// description: 'Refresh premium quotes with detailed market data'
|
||||
// },
|
||||
// {
|
||||
// type: 'quotemedia-options-update',
|
||||
// operation: 'options-chain',
|
||||
// payload: { symbol: 'SPY' },
|
||||
// cronPattern: '*/10 * * * *', // Every 10 minutes
|
||||
// priority: 5,
|
||||
// description: 'Update options chain data for SPY ETF'
|
||||
// },
|
||||
// {
|
||||
// type: 'quotemedia-profiles',
|
||||
// operation: 'company-profile',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '0 9 * * 1-5', // Weekdays at 9 AM
|
||||
// priority: 3,
|
||||
// description: 'Update company profile data'
|
||||
// }
|
||||
]
|
||||
};
|
||||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
const logger = getLogger('quotemedia-provider');
|
||||
|
||||
export const quotemediaProvider: ProviderConfig = {
|
||||
name: 'quotemedia',
|
||||
service: 'market-data',
|
||||
operations: { 'live-data': async (payload: { symbol: string; fields?: string[] }) => {
|
||||
logger.info('Fetching live data from QuoteMedia', { symbol: payload.symbol });
|
||||
|
||||
// Simulate QuoteMedia API call
|
||||
const mockData = {
|
||||
symbol: payload.symbol,
|
||||
price: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
change: (Math.random() - 0.5) * 20,
|
||||
changePercent: (Math.random() - 0.5) * 5,
|
||||
timestamp: new Date().toISOString(),
|
||||
source: 'quotemedia',
|
||||
fields: payload.fields || ['price', 'volume', 'change']
|
||||
};
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 100 + Math.random() * 200));
|
||||
|
||||
return mockData;
|
||||
},
|
||||
|
||||
'historical-data': async (payload: {
|
||||
symbol: string;
|
||||
from: Date;
|
||||
to: Date;
|
||||
interval?: string;
|
||||
fields?: string[]; }) => {
|
||||
logger.info('Fetching historical data from QuoteMedia', {
|
||||
symbol: payload.symbol,
|
||||
from: payload.from,
|
||||
to: payload.to,
|
||||
interval: payload.interval || '1d'
|
||||
});
|
||||
|
||||
// Generate mock historical data
|
||||
const days = Math.ceil((payload.to.getTime() - payload.from.getTime()) / (1000 * 60 * 60 * 24));
|
||||
const data = [];
|
||||
|
||||
for (let i = 0; i < Math.min(days, 100); i++) {
|
||||
const date = new Date(payload.from.getTime() + i * 24 * 60 * 60 * 1000);
|
||||
data.push({
|
||||
date: date.toISOString().split('T')[0],
|
||||
open: Math.random() * 1000 + 100,
|
||||
high: Math.random() * 1000 + 100,
|
||||
low: Math.random() * 1000 + 100,
|
||||
close: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
source: 'quotemedia'
|
||||
});
|
||||
}
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 200 + Math.random() * 300));
|
||||
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
interval: payload.interval || '1d',
|
||||
data,
|
||||
source: 'quotemedia',
|
||||
totalRecords: data.length
|
||||
};
|
||||
},
|
||||
'batch-quotes': async (payload: { symbols: string[]; fields?: string[] }) => {
|
||||
logger.info('Fetching batch quotes from QuoteMedia', {
|
||||
symbols: payload.symbols,
|
||||
count: payload.symbols.length
|
||||
});
|
||||
|
||||
const quotes = payload.symbols.map(symbol => ({
|
||||
symbol,
|
||||
price: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
change: (Math.random() - 0.5) * 20,
|
||||
timestamp: new Date().toISOString(),
|
||||
source: 'quotemedia'
|
||||
}));
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 300 + Math.random() * 200));
|
||||
|
||||
return {
|
||||
quotes,
|
||||
source: 'quotemedia',
|
||||
timestamp: new Date().toISOString(),
|
||||
totalSymbols: payload.symbols.length
|
||||
};
|
||||
}, 'company-profile': async (payload: { symbol: string }) => {
|
||||
logger.info('Fetching company profile from QuoteMedia', { symbol: payload.symbol });
|
||||
|
||||
// Simulate company profile data
|
||||
const profile = {
|
||||
symbol: payload.symbol,
|
||||
companyName: `${payload.symbol} Corporation`,
|
||||
sector: 'Technology',
|
||||
industry: 'Software',
|
||||
description: `${payload.symbol} is a leading technology company.`,
|
||||
marketCap: Math.floor(Math.random() * 1000000000000),
|
||||
employees: Math.floor(Math.random() * 100000),
|
||||
website: `https://www.${payload.symbol.toLowerCase()}.com`,
|
||||
source: 'quotemedia'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 150 + Math.random() * 100));
|
||||
|
||||
return profile;
|
||||
}, 'options-chain': async (payload: { symbol: string; expiration?: string }) => {
|
||||
logger.info('Fetching options chain from QuoteMedia', {
|
||||
symbol: payload.symbol,
|
||||
expiration: payload.expiration
|
||||
});
|
||||
|
||||
// Generate mock options data
|
||||
const strikes = Array.from({ length: 20 }, (_, i) => 100 + i * 5);
|
||||
const calls = strikes.map(strike => ({
|
||||
strike,
|
||||
bid: Math.random() * 10,
|
||||
ask: Math.random() * 10 + 0.5,
|
||||
volume: Math.floor(Math.random() * 1000),
|
||||
openInterest: Math.floor(Math.random() * 5000)
|
||||
}));
|
||||
|
||||
const puts = strikes.map(strike => ({
|
||||
strike,
|
||||
bid: Math.random() * 10,
|
||||
ask: Math.random() * 10 + 0.5,
|
||||
volume: Math.floor(Math.random() * 1000),
|
||||
openInterest: Math.floor(Math.random() * 5000)
|
||||
}));
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 400 + Math.random() * 300));
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
expiration: payload.expiration || new Date(Date.now() + 30 * 24 * 60 * 60 * 1000).toISOString().split('T')[0],
|
||||
calls,
|
||||
puts,
|
||||
source: 'quotemedia'
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
scheduledJobs: [
|
||||
// {
|
||||
// type: 'quotemedia-premium-refresh',
|
||||
// operation: 'batch-quotes',
|
||||
// payload: { symbols: ['AAPL', 'GOOGL', 'MSFT'] },
|
||||
// cronPattern: '*/2 * * * *', // Every 2 minutes
|
||||
// priority: 7,
|
||||
// description: 'Refresh premium quotes with detailed market data'
|
||||
// },
|
||||
// {
|
||||
// type: 'quotemedia-options-update',
|
||||
// operation: 'options-chain',
|
||||
// payload: { symbol: 'SPY' },
|
||||
// cronPattern: '*/10 * * * *', // Every 10 minutes
|
||||
// priority: 5,
|
||||
// description: 'Update options chain data for SPY ETF'
|
||||
// },
|
||||
// {
|
||||
// type: 'quotemedia-profiles',
|
||||
// operation: 'company-profile',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '0 9 * * 1-5', // Weekdays at 9 AM
|
||||
// priority: 3,
|
||||
// description: 'Update company profile data'
|
||||
// }
|
||||
]
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,249 +1,249 @@
|
|||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
export const yahooProvider: ProviderConfig = {
|
||||
name: 'yahoo-finance',
|
||||
service: 'market-data',
|
||||
operations: {
|
||||
'live-data': async (payload: { symbol: string; modules?: string[] }) => {
|
||||
|
||||
|
||||
logger.info('Fetching live data from Yahoo Finance', { symbol: payload.symbol });
|
||||
|
||||
// Simulate Yahoo Finance API call
|
||||
const mockData = {
|
||||
symbol: payload.symbol,
|
||||
regularMarketPrice: Math.random() * 1000 + 100,
|
||||
regularMarketVolume: Math.floor(Math.random() * 1000000),
|
||||
regularMarketChange: (Math.random() - 0.5) * 20,
|
||||
regularMarketChangePercent: (Math.random() - 0.5) * 5,
|
||||
preMarketPrice: Math.random() * 1000 + 100,
|
||||
postMarketPrice: Math.random() * 1000 + 100,
|
||||
marketCap: Math.floor(Math.random() * 1000000000000),
|
||||
peRatio: Math.random() * 50 + 5,
|
||||
dividendYield: Math.random() * 0.1,
|
||||
fiftyTwoWeekHigh: Math.random() * 1200 + 100,
|
||||
fiftyTwoWeekLow: Math.random() * 800 + 50,
|
||||
timestamp: Date.now() / 1000,
|
||||
source: 'yahoo-finance',
|
||||
modules: payload.modules || ['price', 'summaryDetail']
|
||||
};
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 150 + Math.random() * 250));
|
||||
|
||||
return mockData;
|
||||
},
|
||||
|
||||
'historical-data': async (payload: {
|
||||
symbol: string;
|
||||
period1: number;
|
||||
period2: number;
|
||||
interval?: string;
|
||||
events?: string; }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching historical data from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
period1: payload.period1,
|
||||
period2: payload.period2,
|
||||
interval: payload.interval || '1d'
|
||||
});
|
||||
|
||||
// Generate mock historical data
|
||||
const days = Math.ceil((payload.period2 - payload.period1) / (24 * 60 * 60));
|
||||
const data = [];
|
||||
|
||||
for (let i = 0; i < Math.min(days, 100); i++) {
|
||||
const timestamp = payload.period1 + i * 24 * 60 * 60;
|
||||
data.push({
|
||||
timestamp,
|
||||
date: new Date(timestamp * 1000).toISOString().split('T')[0],
|
||||
open: Math.random() * 1000 + 100,
|
||||
high: Math.random() * 1000 + 100,
|
||||
low: Math.random() * 1000 + 100,
|
||||
close: Math.random() * 1000 + 100,
|
||||
adjClose: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
source: 'yahoo-finance'
|
||||
});
|
||||
}
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 250 + Math.random() * 350));
|
||||
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
interval: payload.interval || '1d',
|
||||
timestamps: data.map(d => d.timestamp),
|
||||
indicators: {
|
||||
quote: [{
|
||||
open: data.map(d => d.open),
|
||||
high: data.map(d => d.high),
|
||||
low: data.map(d => d.low),
|
||||
close: data.map(d => d.close),
|
||||
volume: data.map(d => d.volume)
|
||||
}],
|
||||
adjclose: [{
|
||||
adjclose: data.map(d => d.adjClose)
|
||||
}]
|
||||
},
|
||||
source: 'yahoo-finance',
|
||||
totalRecords: data.length
|
||||
};
|
||||
},
|
||||
'search': async (payload: { query: string; quotesCount?: number; newsCount?: number }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Searching Yahoo Finance', { query: payload.query });
|
||||
|
||||
// Generate mock search results
|
||||
const quotes = Array.from({ length: payload.quotesCount || 5 }, (_, i) => ({
|
||||
symbol: `${payload.query.toUpperCase()}${i}`,
|
||||
shortname: `${payload.query} Company ${i}`,
|
||||
longname: `${payload.query} Corporation ${i}`,
|
||||
exchDisp: 'NASDAQ',
|
||||
typeDisp: 'Equity',
|
||||
source: 'yahoo-finance'
|
||||
}));
|
||||
|
||||
const news = Array.from({ length: payload.newsCount || 3 }, (_, i) => ({
|
||||
uuid: `news-${i}-${Date.now()}`,
|
||||
title: `${payload.query} News Article ${i}`,
|
||||
publisher: 'Financial News',
|
||||
providerPublishTime: Date.now() - i * 3600000,
|
||||
type: 'STORY',
|
||||
source: 'yahoo-finance'
|
||||
}));
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 200 + Math.random() * 200));
|
||||
|
||||
return {
|
||||
quotes,
|
||||
news,
|
||||
totalQuotes: quotes.length,
|
||||
totalNews: news.length,
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
}, 'financials': async (payload: { symbol: string; type?: 'income' | 'balance' | 'cash' }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching financials from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
type: payload.type || 'income'
|
||||
});
|
||||
|
||||
// Generate mock financial data
|
||||
const financials = {
|
||||
symbol: payload.symbol,
|
||||
type: payload.type || 'income',
|
||||
currency: 'USD',
|
||||
annual: Array.from({ length: 4 }, (_, i) => ({
|
||||
fiscalYear: 2024 - i,
|
||||
revenue: Math.floor(Math.random() * 100000000000),
|
||||
netIncome: Math.floor(Math.random() * 10000000000),
|
||||
totalAssets: Math.floor(Math.random() * 500000000000),
|
||||
totalDebt: Math.floor(Math.random() * 50000000000)
|
||||
})),
|
||||
quarterly: Array.from({ length: 4 }, (_, i) => ({
|
||||
fiscalQuarter: `Q${4-i} 2024`,
|
||||
revenue: Math.floor(Math.random() * 25000000000),
|
||||
netIncome: Math.floor(Math.random() * 2500000000)
|
||||
})),
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 300 + Math.random() * 200));
|
||||
|
||||
return financials;
|
||||
}, 'earnings': async (payload: { symbol: string; period?: 'annual' | 'quarterly' }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching earnings from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
period: payload.period || 'quarterly'
|
||||
});
|
||||
|
||||
// Generate mock earnings data
|
||||
const earnings = {
|
||||
symbol: payload.symbol,
|
||||
period: payload.period || 'quarterly',
|
||||
earnings: Array.from({ length: 8 }, (_, i) => ({
|
||||
quarter: `Q${(i % 4) + 1} ${2024 - Math.floor(i/4)}`,
|
||||
epsEstimate: Math.random() * 5,
|
||||
epsActual: Math.random() * 5,
|
||||
revenueEstimate: Math.floor(Math.random() * 50000000000),
|
||||
revenueActual: Math.floor(Math.random() * 50000000000),
|
||||
surprise: (Math.random() - 0.5) * 2
|
||||
})),
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 250 + Math.random() * 150));
|
||||
|
||||
return earnings;
|
||||
}, 'recommendations': async (payload: { symbol: string }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching recommendations from Yahoo Finance', { symbol: payload.symbol });
|
||||
|
||||
// Generate mock recommendations
|
||||
const recommendations = {
|
||||
symbol: payload.symbol,
|
||||
current: {
|
||||
strongBuy: Math.floor(Math.random() * 10),
|
||||
buy: Math.floor(Math.random() * 15),
|
||||
hold: Math.floor(Math.random() * 20),
|
||||
sell: Math.floor(Math.random() * 5),
|
||||
strongSell: Math.floor(Math.random() * 3)
|
||||
},
|
||||
trend: Array.from({ length: 4 }, (_, i) => ({
|
||||
period: `${i}m`,
|
||||
strongBuy: Math.floor(Math.random() * 10),
|
||||
buy: Math.floor(Math.random() * 15),
|
||||
hold: Math.floor(Math.random() * 20),
|
||||
sell: Math.floor(Math.random() * 5),
|
||||
strongSell: Math.floor(Math.random() * 3)
|
||||
})),
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 180 + Math.random() * 120));
|
||||
return recommendations;
|
||||
}
|
||||
},
|
||||
|
||||
scheduledJobs: [
|
||||
// {
|
||||
// type: 'yahoo-market-refresh',
|
||||
// operation: 'live-data',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '*/1 * * * *', // Every minute
|
||||
// priority: 8,
|
||||
// description: 'Refresh Apple stock price from Yahoo Finance'
|
||||
// },
|
||||
// {
|
||||
// type: 'yahoo-sp500-update',
|
||||
// operation: 'live-data',
|
||||
// payload: { symbol: 'SPY' },
|
||||
// cronPattern: '*/2 * * * *', // Every 2 minutes
|
||||
// priority: 9,
|
||||
// description: 'Update S&P 500 ETF price'
|
||||
// },
|
||||
// {
|
||||
// type: 'yahoo-earnings-check',
|
||||
// operation: 'earnings',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '0 16 * * 1-5', // Weekdays at 4 PM (market close)
|
||||
// priority: 6,
|
||||
// description: 'Check earnings data for Apple'
|
||||
// }
|
||||
]
|
||||
};
|
||||
import { ProviderConfig } from '../services/provider-registry.service';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
export const yahooProvider: ProviderConfig = {
|
||||
name: 'yahoo-finance',
|
||||
service: 'market-data',
|
||||
operations: {
|
||||
'live-data': async (payload: { symbol: string; modules?: string[] }) => {
|
||||
|
||||
|
||||
logger.info('Fetching live data from Yahoo Finance', { symbol: payload.symbol });
|
||||
|
||||
// Simulate Yahoo Finance API call
|
||||
const mockData = {
|
||||
symbol: payload.symbol,
|
||||
regularMarketPrice: Math.random() * 1000 + 100,
|
||||
regularMarketVolume: Math.floor(Math.random() * 1000000),
|
||||
regularMarketChange: (Math.random() - 0.5) * 20,
|
||||
regularMarketChangePercent: (Math.random() - 0.5) * 5,
|
||||
preMarketPrice: Math.random() * 1000 + 100,
|
||||
postMarketPrice: Math.random() * 1000 + 100,
|
||||
marketCap: Math.floor(Math.random() * 1000000000000),
|
||||
peRatio: Math.random() * 50 + 5,
|
||||
dividendYield: Math.random() * 0.1,
|
||||
fiftyTwoWeekHigh: Math.random() * 1200 + 100,
|
||||
fiftyTwoWeekLow: Math.random() * 800 + 50,
|
||||
timestamp: Date.now() / 1000,
|
||||
source: 'yahoo-finance',
|
||||
modules: payload.modules || ['price', 'summaryDetail']
|
||||
};
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 150 + Math.random() * 250));
|
||||
|
||||
return mockData;
|
||||
},
|
||||
|
||||
'historical-data': async (payload: {
|
||||
symbol: string;
|
||||
period1: number;
|
||||
period2: number;
|
||||
interval?: string;
|
||||
events?: string; }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching historical data from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
period1: payload.period1,
|
||||
period2: payload.period2,
|
||||
interval: payload.interval || '1d'
|
||||
});
|
||||
|
||||
// Generate mock historical data
|
||||
const days = Math.ceil((payload.period2 - payload.period1) / (24 * 60 * 60));
|
||||
const data = [];
|
||||
|
||||
for (let i = 0; i < Math.min(days, 100); i++) {
|
||||
const timestamp = payload.period1 + i * 24 * 60 * 60;
|
||||
data.push({
|
||||
timestamp,
|
||||
date: new Date(timestamp * 1000).toISOString().split('T')[0],
|
||||
open: Math.random() * 1000 + 100,
|
||||
high: Math.random() * 1000 + 100,
|
||||
low: Math.random() * 1000 + 100,
|
||||
close: Math.random() * 1000 + 100,
|
||||
adjClose: Math.random() * 1000 + 100,
|
||||
volume: Math.floor(Math.random() * 1000000),
|
||||
source: 'yahoo-finance'
|
||||
});
|
||||
}
|
||||
|
||||
// Simulate network delay
|
||||
await new Promise(resolve => setTimeout(resolve, 250 + Math.random() * 350));
|
||||
|
||||
return {
|
||||
symbol: payload.symbol,
|
||||
interval: payload.interval || '1d',
|
||||
timestamps: data.map(d => d.timestamp),
|
||||
indicators: {
|
||||
quote: [{
|
||||
open: data.map(d => d.open),
|
||||
high: data.map(d => d.high),
|
||||
low: data.map(d => d.low),
|
||||
close: data.map(d => d.close),
|
||||
volume: data.map(d => d.volume)
|
||||
}],
|
||||
adjclose: [{
|
||||
adjclose: data.map(d => d.adjClose)
|
||||
}]
|
||||
},
|
||||
source: 'yahoo-finance',
|
||||
totalRecords: data.length
|
||||
};
|
||||
},
|
||||
'search': async (payload: { query: string; quotesCount?: number; newsCount?: number }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Searching Yahoo Finance', { query: payload.query });
|
||||
|
||||
// Generate mock search results
|
||||
const quotes = Array.from({ length: payload.quotesCount || 5 }, (_, i) => ({
|
||||
symbol: `${payload.query.toUpperCase()}${i}`,
|
||||
shortname: `${payload.query} Company ${i}`,
|
||||
longname: `${payload.query} Corporation ${i}`,
|
||||
exchDisp: 'NASDAQ',
|
||||
typeDisp: 'Equity',
|
||||
source: 'yahoo-finance'
|
||||
}));
|
||||
|
||||
const news = Array.from({ length: payload.newsCount || 3 }, (_, i) => ({
|
||||
uuid: `news-${i}-${Date.now()}`,
|
||||
title: `${payload.query} News Article ${i}`,
|
||||
publisher: 'Financial News',
|
||||
providerPublishTime: Date.now() - i * 3600000,
|
||||
type: 'STORY',
|
||||
source: 'yahoo-finance'
|
||||
}));
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 200 + Math.random() * 200));
|
||||
|
||||
return {
|
||||
quotes,
|
||||
news,
|
||||
totalQuotes: quotes.length,
|
||||
totalNews: news.length,
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
}, 'financials': async (payload: { symbol: string; type?: 'income' | 'balance' | 'cash' }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching financials from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
type: payload.type || 'income'
|
||||
});
|
||||
|
||||
// Generate mock financial data
|
||||
const financials = {
|
||||
symbol: payload.symbol,
|
||||
type: payload.type || 'income',
|
||||
currency: 'USD',
|
||||
annual: Array.from({ length: 4 }, (_, i) => ({
|
||||
fiscalYear: 2024 - i,
|
||||
revenue: Math.floor(Math.random() * 100000000000),
|
||||
netIncome: Math.floor(Math.random() * 10000000000),
|
||||
totalAssets: Math.floor(Math.random() * 500000000000),
|
||||
totalDebt: Math.floor(Math.random() * 50000000000)
|
||||
})),
|
||||
quarterly: Array.from({ length: 4 }, (_, i) => ({
|
||||
fiscalQuarter: `Q${4-i} 2024`,
|
||||
revenue: Math.floor(Math.random() * 25000000000),
|
||||
netIncome: Math.floor(Math.random() * 2500000000)
|
||||
})),
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 300 + Math.random() * 200));
|
||||
|
||||
return financials;
|
||||
}, 'earnings': async (payload: { symbol: string; period?: 'annual' | 'quarterly' }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching earnings from Yahoo Finance', {
|
||||
symbol: payload.symbol,
|
||||
period: payload.period || 'quarterly'
|
||||
});
|
||||
|
||||
// Generate mock earnings data
|
||||
const earnings = {
|
||||
symbol: payload.symbol,
|
||||
period: payload.period || 'quarterly',
|
||||
earnings: Array.from({ length: 8 }, (_, i) => ({
|
||||
quarter: `Q${(i % 4) + 1} ${2024 - Math.floor(i/4)}`,
|
||||
epsEstimate: Math.random() * 5,
|
||||
epsActual: Math.random() * 5,
|
||||
revenueEstimate: Math.floor(Math.random() * 50000000000),
|
||||
revenueActual: Math.floor(Math.random() * 50000000000),
|
||||
surprise: (Math.random() - 0.5) * 2
|
||||
})),
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 250 + Math.random() * 150));
|
||||
|
||||
return earnings;
|
||||
}, 'recommendations': async (payload: { symbol: string }) => {
|
||||
const { getLogger } = await import('@stock-bot/logger');
|
||||
const logger = getLogger('yahoo-provider');
|
||||
|
||||
logger.info('Fetching recommendations from Yahoo Finance', { symbol: payload.symbol });
|
||||
|
||||
// Generate mock recommendations
|
||||
const recommendations = {
|
||||
symbol: payload.symbol,
|
||||
current: {
|
||||
strongBuy: Math.floor(Math.random() * 10),
|
||||
buy: Math.floor(Math.random() * 15),
|
||||
hold: Math.floor(Math.random() * 20),
|
||||
sell: Math.floor(Math.random() * 5),
|
||||
strongSell: Math.floor(Math.random() * 3)
|
||||
},
|
||||
trend: Array.from({ length: 4 }, (_, i) => ({
|
||||
period: `${i}m`,
|
||||
strongBuy: Math.floor(Math.random() * 10),
|
||||
buy: Math.floor(Math.random() * 15),
|
||||
hold: Math.floor(Math.random() * 20),
|
||||
sell: Math.floor(Math.random() * 5),
|
||||
strongSell: Math.floor(Math.random() * 3)
|
||||
})),
|
||||
source: 'yahoo-finance'
|
||||
};
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 180 + Math.random() * 120));
|
||||
return recommendations;
|
||||
}
|
||||
},
|
||||
|
||||
scheduledJobs: [
|
||||
// {
|
||||
// type: 'yahoo-market-refresh',
|
||||
// operation: 'live-data',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '*/1 * * * *', // Every minute
|
||||
// priority: 8,
|
||||
// description: 'Refresh Apple stock price from Yahoo Finance'
|
||||
// },
|
||||
// {
|
||||
// type: 'yahoo-sp500-update',
|
||||
// operation: 'live-data',
|
||||
// payload: { symbol: 'SPY' },
|
||||
// cronPattern: '*/2 * * * *', // Every 2 minutes
|
||||
// priority: 9,
|
||||
// description: 'Update S&P 500 ETF price'
|
||||
// },
|
||||
// {
|
||||
// type: 'yahoo-earnings-check',
|
||||
// operation: 'earnings',
|
||||
// payload: { symbol: 'AAPL' },
|
||||
// cronPattern: '0 16 * * 1-5', // Weekdays at 4 PM (market close)
|
||||
// priority: 6,
|
||||
// description: 'Check earnings data for Apple'
|
||||
// }
|
||||
]
|
||||
};
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue