huge refactor to remove depenencie hell and add typesafe container

This commit is contained in:
Boki 2025-06-24 09:37:51 -04:00
parent 28b9822d55
commit 843a7b9b9b
148 changed files with 3603 additions and 2378 deletions

View file

@ -1,34 +1,34 @@
/**
* Service Container Setup for Web API
* Configures dependency injection for the web API service
*/
import type { IServiceContainer } from '@stock-bot/handlers';
import { getLogger } from '@stock-bot/logger';
import type { AppConfig } from '@stock-bot/config';
const logger = getLogger('web-api-container');
/**
* Configure the service container for web API workloads
*/
export function setupServiceContainer(
config: AppConfig,
container: IServiceContainer
): IServiceContainer {
logger.info('Configuring web API service container...');
// Web API specific configuration
// This service mainly reads data, so smaller pool sizes are fine
const poolSizes = {
mongodb: config.environment === 'production' ? 20 : 10,
postgres: config.environment === 'production' ? 30 : 15,
cache: config.environment === 'production' ? 20 : 10,
};
logger.info('Web API pool sizes configured', poolSizes);
// The container is already configured with connections
// Just return it with our logging
return container;
}
/**
* Service Container Setup for Web API
* Configures dependency injection for the web API service
*/
import type { AppConfig } from '@stock-bot/config';
import type { IServiceContainer } from '@stock-bot/handlers';
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('web-api-container');
/**
* Configure the service container for web API workloads
*/
export function setupServiceContainer(
config: AppConfig,
container: IServiceContainer
): IServiceContainer {
logger.info('Configuring web API service container...');
// Web API specific configuration
// This service mainly reads data, so smaller pool sizes are fine
const poolSizes = {
mongodb: config.environment === 'production' ? 20 : 10,
postgres: config.environment === 'production' ? 30 : 15,
cache: config.environment === 'production' ? 20 : 10,
};
logger.info('Web API pool sizes configured', poolSizes);
// The container is already configured with connections
// Just return it with our logging
return container;
}

View file

@ -3,10 +3,9 @@
* Simplified entry point using ServiceApplication framework
*/
import { initializeStockConfig } from '@stock-bot/stock-config';
import { ServiceApplication } from '@stock-bot/di';
import { getLogger } from '@stock-bot/logger';
import { initializeStockConfig } from '@stock-bot/stock-config';
// Local imports
import { createRoutes } from './routes/create-routes';
@ -49,7 +48,7 @@ const app = new ServiceApplication(
},
{
// Custom lifecycle hooks
onStarted: (_port) => {
onStarted: _port => {
const logger = getLogger('web-api');
logger.info('Web API service startup initiated with ServiceApplication framework');
},
@ -59,7 +58,7 @@ const app = new ServiceApplication(
// Container factory function
async function createContainer(config: any) {
const { ServiceContainerBuilder } = await import('@stock-bot/di');
const container = await new ServiceContainerBuilder()
.withConfig(config)
.withOptions({
@ -72,7 +71,7 @@ async function createContainer(config: any) {
enableProxy: false, // Web API doesn't need proxy
})
.build(); // This automatically initializes services
return container;
}
@ -81,4 +80,4 @@ app.start(createContainer, createRoutes).catch(error => {
const logger = getLogger('web-api');
logger.fatal('Failed to start web API service', { error });
process.exit(1);
});
});

View file

@ -5,8 +5,8 @@
import { Hono } from 'hono';
import type { IServiceContainer } from '@stock-bot/handlers';
import { createHealthRoutes } from './health.routes';
import { createExchangeRoutes } from './exchange.routes';
import { createHealthRoutes } from './health.routes';
import { createMonitoringRoutes } from './monitoring.routes';
import { createPipelineRoutes } from './pipeline.routes';
@ -26,4 +26,4 @@ export function createRoutes(container: IServiceContainer): Hono {
app.route('/api/pipeline', pipelineRoutes);
return app;
}
}

View file

@ -2,8 +2,8 @@
* Exchange management routes - Refactored
*/
import { Hono } from 'hono';
import type { IServiceContainer } from '@stock-bot/types';
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
import { createExchangeService } from '../services/exchange.service';
import { createSuccessResponse, handleError } from '../utils/error-handler';
import {
@ -259,4 +259,4 @@ export function createExchangeRoutes(container: IServiceContainer) {
});
return exchangeRoutes;
}
}

View file

@ -2,8 +2,8 @@
* Health check routes factory
*/
import { Hono } from 'hono';
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('health-routes');
@ -70,7 +70,10 @@ export function createHealthRoutes(container: IServiceContainer) {
health.checks.postgresql = { status: 'healthy', message: 'Connected and responsive' };
logger.debug('PostgreSQL health check passed');
} else {
health.checks.postgresql = { status: 'unhealthy', message: 'PostgreSQL client not available' };
health.checks.postgresql = {
status: 'unhealthy',
message: 'PostgreSQL client not available',
};
logger.warn('PostgreSQL health check failed - client not available');
}
} catch (error) {
@ -108,4 +111,4 @@ export function createHealthRoutes(container: IServiceContainer) {
}
// Export legacy routes for backward compatibility during migration
export const healthRoutes = createHealthRoutes({} as IServiceContainer);
export const healthRoutes = createHealthRoutes({} as IServiceContainer);

View file

@ -13,167 +13,200 @@ export function createMonitoringRoutes(container: IServiceContainer) {
/**
* Get overall system health
*/
monitoring.get('/', async (c) => {
monitoring.get('/', async c => {
try {
const health = await monitoringService.getSystemHealth();
// Set appropriate status code based on health
const statusCode = health.status === 'healthy' ? 200 :
health.status === 'degraded' ? 503 : 500;
const statusCode =
health.status === 'healthy' ? 200 : health.status === 'degraded' ? 503 : 500;
return c.json(health, statusCode);
} catch (error) {
return c.json({
status: 'error',
message: 'Failed to retrieve system health',
error: error instanceof Error ? error.message : 'Unknown error',
}, 500);
return c.json(
{
status: 'error',
message: 'Failed to retrieve system health',
error: error instanceof Error ? error.message : 'Unknown error',
},
500
);
}
});
/**
* Get cache/Dragonfly statistics
*/
monitoring.get('/cache', async (c) => {
monitoring.get('/cache', async c => {
try {
const stats = await monitoringService.getCacheStats();
return c.json(stats);
} catch (error) {
return c.json({
error: 'Failed to retrieve cache statistics',
message: error instanceof Error ? error.message : 'Unknown error',
}, 500);
return c.json(
{
error: 'Failed to retrieve cache statistics',
message: error instanceof Error ? error.message : 'Unknown error',
},
500
);
}
});
/**
* Get queue statistics
*/
monitoring.get('/queues', async (c) => {
monitoring.get('/queues', async c => {
try {
const stats = await monitoringService.getQueueStats();
return c.json({ queues: stats });
} catch (error) {
return c.json({
error: 'Failed to retrieve queue statistics',
message: error instanceof Error ? error.message : 'Unknown error',
}, 500);
return c.json(
{
error: 'Failed to retrieve queue statistics',
message: error instanceof Error ? error.message : 'Unknown error',
},
500
);
}
});
/**
* Get specific queue statistics
*/
monitoring.get('/queues/:name', async (c) => {
monitoring.get('/queues/:name', async c => {
try {
const queueName = c.req.param('name');
const stats = await monitoringService.getQueueStats();
const queueStats = stats.find(q => q.name === queueName);
if (!queueStats) {
return c.json({
error: 'Queue not found',
message: `Queue '${queueName}' does not exist`,
}, 404);
return c.json(
{
error: 'Queue not found',
message: `Queue '${queueName}' does not exist`,
},
404
);
}
return c.json(queueStats);
} catch (error) {
return c.json({
error: 'Failed to retrieve queue statistics',
message: error instanceof Error ? error.message : 'Unknown error',
}, 500);
return c.json(
{
error: 'Failed to retrieve queue statistics',
message: error instanceof Error ? error.message : 'Unknown error',
},
500
);
}
});
/**
* Get database statistics
*/
monitoring.get('/databases', async (c) => {
monitoring.get('/databases', async c => {
try {
const stats = await monitoringService.getDatabaseStats();
return c.json({ databases: stats });
} catch (error) {
return c.json({
error: 'Failed to retrieve database statistics',
message: error instanceof Error ? error.message : 'Unknown error',
}, 500);
return c.json(
{
error: 'Failed to retrieve database statistics',
message: error instanceof Error ? error.message : 'Unknown error',
},
500
);
}
});
/**
* Get specific database statistics
*/
monitoring.get('/databases/:type', async (c) => {
monitoring.get('/databases/:type', async c => {
try {
const dbType = c.req.param('type') as 'postgres' | 'mongodb' | 'questdb';
const stats = await monitoringService.getDatabaseStats();
const dbStats = stats.find(db => db.type === dbType);
if (!dbStats) {
return c.json({
error: 'Database not found',
message: `Database type '${dbType}' not found or not enabled`,
}, 404);
return c.json(
{
error: 'Database not found',
message: `Database type '${dbType}' not found or not enabled`,
},
404
);
}
return c.json(dbStats);
} catch (error) {
return c.json({
error: 'Failed to retrieve database statistics',
message: error instanceof Error ? error.message : 'Unknown error',
}, 500);
return c.json(
{
error: 'Failed to retrieve database statistics',
message: error instanceof Error ? error.message : 'Unknown error',
},
500
);
}
});
/**
* Get service metrics
*/
monitoring.get('/metrics', async (c) => {
monitoring.get('/metrics', async c => {
try {
const metrics = await monitoringService.getServiceMetrics();
return c.json(metrics);
} catch (error) {
return c.json({
error: 'Failed to retrieve service metrics',
message: error instanceof Error ? error.message : 'Unknown error',
}, 500);
return c.json(
{
error: 'Failed to retrieve service metrics',
message: error instanceof Error ? error.message : 'Unknown error',
},
500
);
}
});
/**
* Get detailed cache info (Redis INFO command output)
*/
monitoring.get('/cache/info', async (c) => {
monitoring.get('/cache/info', async c => {
try {
if (!container.cache) {
return c.json({
error: 'Cache not available',
message: 'Cache service is not enabled',
}, 503);
return c.json(
{
error: 'Cache not available',
message: 'Cache service is not enabled',
},
503
);
}
const info = await container.cache.info();
const stats = await monitoringService.getCacheStats();
return c.json({
parsed: stats,
raw: info,
});
} catch (error) {
return c.json({
error: 'Failed to retrieve cache info',
message: error instanceof Error ? error.message : 'Unknown error',
}, 500);
return c.json(
{
error: 'Failed to retrieve cache info',
message: error instanceof Error ? error.message : 'Unknown error',
},
500
);
}
});
/**
* Health check endpoint for monitoring
*/
monitoring.get('/ping', (c) => {
return c.json({
status: 'ok',
monitoring.get('/ping', c => {
return c.json({
status: 'ok',
timestamp: new Date().toISOString(),
service: 'monitoring',
});
@ -182,78 +215,90 @@ export function createMonitoringRoutes(container: IServiceContainer) {
/**
* Get service status for all microservices
*/
monitoring.get('/services', async (c) => {
monitoring.get('/services', async c => {
try {
const services = await monitoringService.getServiceStatus();
return c.json({ services });
} catch (error) {
return c.json({
error: 'Failed to retrieve service status',
message: error instanceof Error ? error.message : 'Unknown error',
}, 500);
return c.json(
{
error: 'Failed to retrieve service status',
message: error instanceof Error ? error.message : 'Unknown error',
},
500
);
}
});
/**
* Get proxy statistics
*/
monitoring.get('/proxies', async (c) => {
monitoring.get('/proxies', async c => {
try {
const stats = await monitoringService.getProxyStats();
return c.json(stats || { enabled: false });
} catch (error) {
return c.json({
error: 'Failed to retrieve proxy statistics',
message: error instanceof Error ? error.message : 'Unknown error',
}, 500);
return c.json(
{
error: 'Failed to retrieve proxy statistics',
message: error instanceof Error ? error.message : 'Unknown error',
},
500
);
}
});
/**
* Get comprehensive system overview
*/
monitoring.get('/overview', async (c) => {
monitoring.get('/overview', async c => {
try {
const overview = await monitoringService.getSystemOverview();
return c.json(overview);
} catch (error) {
return c.json({
error: 'Failed to retrieve system overview',
message: error instanceof Error ? error.message : 'Unknown error',
}, 500);
return c.json(
{
error: 'Failed to retrieve system overview',
message: error instanceof Error ? error.message : 'Unknown error',
},
500
);
}
});
/**
* Test direct BullMQ queue access
*/
monitoring.get('/test/queue/:name', async (c) => {
monitoring.get('/test/queue/:name', async c => {
const queueName = c.req.param('name');
const { Queue } = await import('bullmq');
const connection = {
host: 'localhost',
port: 6379,
db: 0, // All queues in DB 0
db: 0, // All queues in DB 0
};
const queue = new Queue(queueName, { connection });
try {
const counts = await queue.getJobCounts();
await queue.close();
return c.json({
return c.json({
queueName,
counts
counts,
});
} catch (error: any) {
await queue.close();
return c.json({
queueName,
error: error.message
}, 500);
return c.json(
{
queueName,
error: error.message,
},
500
);
}
});
return monitoring;
}
}

View file

@ -132,4 +132,4 @@ export function createPipelineRoutes(container: IServiceContainer) {
});
return pipeline;
}
}

View file

@ -1,5 +1,5 @@
import type { IServiceContainer } from '@stock-bot/types';
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
import {
CreateExchangeRequest,
CreateProviderMappingRequest,
@ -380,4 +380,4 @@ export class ExchangeService {
// Export function to create service instance with container
export function createExchangeService(container: IServiceContainer): ExchangeService {
return new ExchangeService(container);
}
}

View file

@ -3,19 +3,19 @@
* Collects health and performance metrics from all system components
*/
import * as os from 'os';
import type { IServiceContainer } from '@stock-bot/handlers';
import { getLogger } from '@stock-bot/logger';
import type {
CacheStats,
QueueStats,
DatabaseStats,
SystemHealth,
import type {
CacheStats,
DatabaseStats,
ProxyStats,
QueueStats,
ServiceMetrics,
ServiceStatus,
ProxyStats,
SystemOverview
SystemHealth,
SystemOverview,
} from '../types/monitoring.types';
import * as os from 'os';
export class MonitoringService {
private readonly logger = getLogger('monitoring-service');
@ -46,7 +46,7 @@ export class MonitoringService {
// Get cache stats from the provider
const cacheStats = this.container.cache.getStats();
// Since we can't access Redis info directly, we'll use what's available
return {
provider: 'dragonfly',
@ -74,7 +74,7 @@ export class MonitoringService {
*/
async getQueueStats(): Promise<QueueStats[]> {
const stats: QueueStats[] = [];
try {
if (!this.container.queue) {
this.logger.warn('No queue manager available');
@ -83,27 +83,27 @@ export class MonitoringService {
// Get all queue names from the SmartQueueManager
const queueManager = this.container.queue as any;
this.logger.debug('Queue manager type:', {
this.logger.debug('Queue manager type:', {
type: queueManager.constructor.name,
hasGetAllQueues: typeof queueManager.getAllQueues === 'function',
hasQueues: !!queueManager.queues,
hasGetQueue: typeof queueManager.getQueue === 'function'
hasGetQueue: typeof queueManager.getQueue === 'function',
});
// Always use the known queue names since web-api doesn't create worker queues
const handlerMapping = {
'proxy': 'data-ingestion',
'qm': 'data-ingestion',
'ib': 'data-ingestion',
'ceo': 'data-ingestion',
'webshare': 'data-ingestion',
'exchanges': 'data-pipeline',
'symbols': 'data-pipeline',
proxy: 'data-ingestion',
qm: 'data-ingestion',
ib: 'data-ingestion',
ceo: 'data-ingestion',
webshare: 'data-ingestion',
exchanges: 'data-pipeline',
symbols: 'data-pipeline',
};
const queueNames = Object.keys(handlerMapping);
this.logger.debug('Using known queue names', { count: queueNames.length, names: queueNames });
// Create BullMQ queues directly with the correct format
for (const handlerName of queueNames) {
try {
@ -114,17 +114,17 @@ export class MonitoringService {
port: 6379,
db: 0, // All queues now in DB 0
};
// Get the service that owns this handler
const serviceName = handlerMapping[handlerName as keyof typeof handlerMapping];
// Create BullMQ queue with the new naming format {service_handler}
const fullQueueName = `{${serviceName}_${handlerName}}`;
const bullQueue = new BullMQQueue(fullQueueName, { connection });
// Get stats directly from BullMQ
const queueStats = await this.getQueueStatsForBullQueue(bullQueue, handlerName);
stats.push({
name: handlerName,
connected: true,
@ -134,7 +134,7 @@ export class MonitoringService {
concurrency: 1,
},
});
// Close the queue connection after getting stats
await bullQueue.close();
} catch (error) {
@ -167,7 +167,7 @@ export class MonitoringService {
try {
// BullMQ provides getJobCounts which returns all counts at once
const counts = await bullQueue.getJobCounts();
return {
waiting: counts.waiting || 0,
active: counts.active || 0,
@ -184,11 +184,11 @@ export class MonitoringService {
try {
const [waiting, active, completed, failed, delayed, paused] = await Promise.all([
bullQueue.getWaitingCount(),
bullQueue.getActiveCount(),
bullQueue.getActiveCount(),
bullQueue.getCompletedCount(),
bullQueue.getFailedCount(),
bullQueue.getDelayedCount(),
bullQueue.getPausedCount ? bullQueue.getPausedCount() : 0
bullQueue.getPausedCount ? bullQueue.getPausedCount() : 0,
]);
return {
@ -222,7 +222,7 @@ export class MonitoringService {
paused: stats.paused || 0,
};
}
// Try individual count methods
const [waiting, active, completed, failed, delayed] = await Promise.all([
this.safeGetCount(queue, 'getWaitingCount', 'getWaiting'),
@ -252,7 +252,7 @@ export class MonitoringService {
if (queue[methodName] && typeof queue[methodName] === 'function') {
try {
const result = await queue[methodName]();
return Array.isArray(result) ? result.length : (result || 0);
return Array.isArray(result) ? result.length : result || 0;
} catch (_e) {
// Continue to next method
}
@ -291,7 +291,7 @@ export class MonitoringService {
concurrency: queue.workers[0]?.concurrency || 1,
};
}
// Check queue manager for worker config
if (queueManager.config?.defaultQueueOptions) {
const options = queueManager.config.defaultQueueOptions;
@ -300,7 +300,7 @@ export class MonitoringService {
concurrency: options.concurrency || 1,
};
}
// Check for getWorkerCount method
if (queue.getWorkerCount && typeof queue.getWorkerCount === 'function') {
const count = queue.getWorkerCount();
@ -312,7 +312,7 @@ export class MonitoringService {
} catch (_e) {
// Ignore
}
return undefined;
}
@ -331,12 +331,14 @@ export class MonitoringService {
// Get pool stats
const pool = (this.container.postgres as any).pool;
const poolStats = pool ? {
size: pool.totalCount || 0,
active: pool.idleCount || 0,
idle: pool.waitingCount || 0,
max: pool.options?.max || 0,
} : undefined;
const poolStats = pool
? {
size: pool.totalCount || 0,
active: pool.idleCount || 0,
idle: pool.waitingCount || 0,
max: pool.options?.max || 0,
}
: undefined;
stats.push({
type: 'postgres',
@ -365,7 +367,7 @@ export class MonitoringService {
const latency = Date.now() - startTime;
const serverStatus = await db.admin().serverStatus();
stats.push({
type: 'mongodb',
name: 'MongoDB',
@ -393,9 +395,11 @@ export class MonitoringService {
try {
const startTime = Date.now();
// QuestDB health check
const response = await fetch(`http://${process.env.QUESTDB_HOST || 'localhost'}:9000/exec?query=SELECT%201`);
const response = await fetch(
`http://${process.env.QUESTDB_HOST || 'localhost'}:9000/exec?query=SELECT%201`
);
const latency = Date.now() - startTime;
stats.push({
type: 'questdb',
name: 'QuestDB',
@ -432,23 +436,22 @@ export class MonitoringService {
// Determine overall health status
const errors: string[] = [];
if (!cacheStats.connected) {
errors.push('Cache service is disconnected');
}
const disconnectedQueues = queueStats.filter(q => !q.connected);
if (disconnectedQueues.length > 0) {
errors.push(`${disconnectedQueues.length} queue(s) are disconnected`);
}
const disconnectedDbs = databaseStats.filter(db => !db.connected);
if (disconnectedDbs.length > 0) {
errors.push(`${disconnectedDbs.length} database(s) are disconnected`);
}
const status = errors.length === 0 ? 'healthy' :
errors.length < 3 ? 'degraded' : 'unhealthy';
const status = errors.length === 0 ? 'healthy' : errors.length < 3 ? 'degraded' : 'unhealthy';
return {
status,
@ -478,7 +481,7 @@ export class MonitoringService {
*/
async getServiceMetrics(): Promise<ServiceMetrics> {
const now = new Date().toISOString();
return {
requestsPerSecond: {
timestamp: now,
@ -517,12 +520,12 @@ export class MonitoringService {
private parseRedisInfo(info: string): Record<string, any> {
const result: Record<string, any> = {};
const sections = info.split('\r\n\r\n');
for (const section of sections) {
const lines = section.split('\r\n');
const sectionName = lines[0]?.replace('# ', '') || 'general';
result[sectionName] = {};
for (let i = 1; i < lines.length; i++) {
const [key, value] = lines[i].split(':');
if (key && value) {
@ -530,7 +533,7 @@ export class MonitoringService {
}
}
}
return result;
}
@ -539,7 +542,7 @@ export class MonitoringService {
*/
async getServiceStatus(): Promise<ServiceStatus[]> {
const services: ServiceStatus[] = [];
// Define service endpoints
const serviceEndpoints = [
{ name: 'data-ingestion', port: 2001, path: '/health' },
@ -562,13 +565,13 @@ export class MonitoringService {
});
continue;
}
const startTime = Date.now();
const response = await fetch(`http://localhost:${service.port}${service.path}`, {
signal: AbortSignal.timeout(5000), // 5 second timeout
});
const _latency = Date.now() - startTime;
if (response.ok) {
const data = await response.json();
services.push({
@ -629,28 +632,28 @@ export class MonitoringService {
// Get proxy data from cache using getRaw method
// The proxy manager uses cache:proxy: prefix, but web-api cache uses cache:api:
const cacheProvider = this.container.cache;
if (cacheProvider.getRaw) {
// Use getRaw to access data with different cache prefix
// The proxy manager now uses a global cache:proxy: prefix
this.logger.debug('Attempting to fetch proxy data from cache');
const [cachedProxies, lastUpdateStr] = await Promise.all([
cacheProvider.getRaw<any[]>('cache:proxy:active'),
cacheProvider.getRaw<string>('cache:proxy:last-update')
cacheProvider.getRaw<string>('cache:proxy:last-update'),
]);
this.logger.debug('Proxy cache data retrieved', {
this.logger.debug('Proxy cache data retrieved', {
hasProxies: !!cachedProxies,
isArray: Array.isArray(cachedProxies),
proxyCount: cachedProxies ? cachedProxies.length : 0,
lastUpdate: lastUpdateStr
lastUpdate: lastUpdateStr,
});
if (cachedProxies && Array.isArray(cachedProxies)) {
const workingCount = cachedProxies.filter((p: any) => p.isWorking !== false).length;
const failedCount = cachedProxies.filter((p: any) => p.isWorking === false).length;
return {
enabled: true,
totalProxies: cachedProxies.length,
@ -662,7 +665,7 @@ export class MonitoringService {
} else {
this.logger.debug('Cache provider does not support getRaw method');
}
// No cached data found - proxies might not be initialized yet
return {
enabled: true,
@ -672,7 +675,7 @@ export class MonitoringService {
};
} catch (cacheError) {
this.logger.debug('Could not retrieve proxy data from cache', { error: cacheError });
// Return basic stats if cache query fails
return {
enabled: true,
@ -727,7 +730,7 @@ export class MonitoringService {
const idle = totalIdle / cpus.length;
const total = totalTick / cpus.length;
const usage = 100 - ~~(100 * idle / total);
const usage = 100 - ~~((100 * idle) / total);
return {
usage,
@ -742,21 +745,21 @@ export class MonitoringService {
private getSystemMemory() {
const totalMem = os.totalmem();
const freeMem = os.freemem();
// On Linux, freeMem includes buffers/cache, but we want "available" memory
// which better represents memory that can be used by applications
let availableMem = freeMem;
// Try to read from /proc/meminfo for more accurate memory stats on Linux
if (os.platform() === 'linux') {
try {
const fs = require('fs');
const meminfo = fs.readFileSync('/proc/meminfo', 'utf8');
const lines = meminfo.split('\n');
let memAvailable = 0;
let _memTotal = 0;
for (const line of lines) {
if (line.startsWith('MemAvailable:')) {
memAvailable = parseInt(line.split(/\s+/)[1], 10) * 1024; // Convert from KB to bytes
@ -764,7 +767,7 @@ export class MonitoringService {
_memTotal = parseInt(line.split(/\s+/)[1], 10) * 1024;
}
}
if (memAvailable > 0) {
availableMem = memAvailable;
}
@ -773,7 +776,7 @@ export class MonitoringService {
this.logger.debug('Could not read /proc/meminfo', { error });
}
}
const usedMem = totalMem - availableMem;
return {
@ -784,4 +787,4 @@ export class MonitoringService {
percentage: (usedMem / totalMem) * 100,
};
}
}
}

View file

@ -332,4 +332,4 @@ export class PipelineService {
};
}
}
}
}

View file

@ -124,4 +124,4 @@ export interface SystemOverview {
architecture: string;
hostname: string;
};
}
}