fully cleaned things up, few more things to go.

This commit is contained in:
Boki 2025-06-14 15:42:47 -04:00
parent e5170b1c78
commit ad5e353ec3
11 changed files with 180 additions and 362 deletions

View file

@ -2,7 +2,7 @@
* Health check routes
*/
import { Hono } from 'hono';
import { queueManager } from '../services/queue.service';
import { queueManager } from '../services/queue-manager.service';
export const healthRoutes = new Hono();
@ -14,7 +14,7 @@ healthRoutes.get('/health', c => {
timestamp: new Date().toISOString(),
queue: {
status: 'running',
workers: queueManager.getWorkerCount(),
name: queueManager.getQueueName(),
},
});
});

View file

@ -3,6 +3,3 @@
*/
export { healthRoutes } from './health.routes';
export { queueRoutes } from './queue.routes';
export { marketDataRoutes } from './market-data.routes';
export { proxyRoutes } from './proxy.routes';
export { testRoutes } from './test.routes';

View file

@ -3,7 +3,7 @@
*/
import { Hono } from 'hono';
import { getLogger } from '@stock-bot/logger';
import { queueManager } from '../services/queue-manager.service';
import { processItems, queueManager } from '../services/queue-manager.service';
const logger = getLogger('market-data-routes');
@ -16,7 +16,7 @@ marketDataRoutes.get('/api/live/:symbol', async c => {
try {
// Queue job for live data using Yahoo provider
const job = await queueManager.addJob('market-data-live', {
const job = await queueManager.add('market-data-live', {
type: 'market-data-live',
provider: 'yahoo-finance',
operation: 'live-data',
@ -46,7 +46,7 @@ marketDataRoutes.get('/api/historical/:symbol', async c => {
const toDate = to ? new Date(to) : new Date(); // Now
// Queue job for historical data using Yahoo provider
const job = await queueManager.addJob('market-data-historical', {
const job = await queueManager.add('market-data-historical', {
type: 'market-data-historical',
provider: 'yahoo-finance',
operation: 'historical-data',
@ -94,7 +94,7 @@ marketDataRoutes.post('/api/process-symbols', async c => {
useBatching,
});
const result = await queueManager.processSymbols(symbols, {
const result = await processItems(symbols, queueManager, {
totalDelayMs,
useBatching,
batchSize,

View file

@ -1,76 +0,0 @@
/**
* Proxy management routes
*/
import { Hono } from 'hono';
import { getLogger } from '@stock-bot/logger';
import { queueManager } from '../services/queue.service';
const logger = getLogger('proxy-routes');
export const proxyRoutes = new Hono();
// Proxy management endpoints
proxyRoutes.post('/api/proxy/fetch', async c => {
try {
const job = await queueManager.addJob({
type: 'proxy-fetch',
provider: 'proxy-provider',
operation: 'fetch-and-check',
payload: {},
priority: 5,
});
return c.json({
status: 'success',
jobId: job.id,
message: 'Proxy fetch job queued',
});
} catch (error) {
logger.error('Failed to queue proxy fetch', { error });
return c.json({ status: 'error', message: 'Failed to queue proxy fetch' }, 500);
}
});
proxyRoutes.post('/api/proxy/check', async c => {
try {
const { proxies } = await c.req.json();
const job = await queueManager.addJob({
type: 'proxy-check',
provider: 'proxy-provider',
operation: 'check-specific',
payload: { proxies },
priority: 8,
});
return c.json({
status: 'success',
jobId: job.id,
message: `Proxy check job queued for ${proxies.length} proxies`,
});
} catch (error) {
logger.error('Failed to queue proxy check', { error });
return c.json({ status: 'error', message: 'Failed to queue proxy check' }, 500);
}
});
// Get proxy stats via queue
proxyRoutes.get('/api/proxy/stats', async c => {
try {
const job = await queueManager.addJob({
type: 'proxy-stats',
provider: 'proxy-provider',
operation: 'get-stats',
payload: {},
priority: 3,
});
return c.json({
status: 'success',
jobId: job.id,
message: 'Proxy stats job queued',
});
} catch (error) {
logger.error('Failed to queue proxy stats', { error });
return c.json({ status: 'error', message: 'Failed to queue proxy stats' }, 500);
}
});

View file

@ -23,7 +23,7 @@ queueRoutes.get('/api/queue/status', async c => {
queueRoutes.post('/api/queue/job', async c => {
try {
const { name, data, options } = await c.req.json();
const job = await queueManager.addJob(name, data, options);
const job = await queueManager.add(name, data, options);
return c.json({ status: 'success', jobId: job.id });
} catch (error) {
logger.error('Failed to add job', { error });

View file

@ -1,87 +0,0 @@
/**
* Test and development routes for batch processing
*/
import { Hono } from 'hono';
import { getLogger } from '@stock-bot/logger';
import { queueManager } from '../services/queue.service';
const logger = getLogger('test-routes');
export const testRoutes = new Hono();
// Test endpoint for new functional batch processing
testRoutes.post('/api/test/batch-symbols', async c => {
try {
const { symbols, useBatching = false, totalDelayHours = 1 } = await c.req.json();
const { processItems } = await import('../utils/batch-helpers');
if (!symbols || !Array.isArray(symbols)) {
return c.json({ status: 'error', message: 'symbols array is required' }, 400);
}
const result = await processItems(
symbols,
(symbol, index) => ({
symbol,
index,
timestamp: new Date().toISOString(),
}),
queueManager,
{
totalDelayHours,
useBatching,
batchSize: 10,
priority: 1,
provider: 'test-provider',
operation: 'live-data',
}
);
return c.json({
status: 'success',
message: 'Batch processing started',
result,
});
} catch (error) {
logger.error('Failed to start batch symbol processing', { error });
return c.json({ status: 'error', message: 'Failed to start batch processing' }, 500);
}
});
testRoutes.post('/api/test/batch-custom', async c => {
try {
const { items, useBatching = false, totalDelayHours = 0.5 } = await c.req.json();
const { processItems } = await import('../utils/batch-helpers');
if (!items || !Array.isArray(items)) {
return c.json({ status: 'error', message: 'items array is required' }, 400);
}
const result = await processItems(
items,
(item, index) => ({
originalItem: item,
processIndex: index,
timestamp: new Date().toISOString(),
}),
queueManager,
{
totalDelayHours,
useBatching,
batchSize: 5,
priority: 1,
provider: 'test-provider',
operation: 'custom-test',
}
);
return c.json({
status: 'success',
message: 'Custom batch processing started',
result,
});
} catch (error) {
logger.error('Failed to start custom batch processing', { error });
return c.json({ status: 'error', message: 'Failed to start custom batch processing' }, 500);
}
});