created lots of tests
This commit is contained in:
parent
42baadae38
commit
54f37f9521
21 changed files with 4577 additions and 215 deletions
46
libs/core/di/test/factories.test.ts
Normal file
46
libs/core/di/test/factories.test.ts
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
import { describe, expect, it } from 'bun:test';
|
||||
import { CacheFactory } from '../src/factories';
|
||||
|
||||
describe('DI Factories', () => {
|
||||
describe('CacheFactory', () => {
|
||||
it('should be exported', () => {
|
||||
expect(CacheFactory).toBeDefined();
|
||||
});
|
||||
|
||||
it('should create cache with configuration', () => {
|
||||
const cacheConfig = {
|
||||
redisConfig: {
|
||||
host: 'localhost',
|
||||
port: 6379,
|
||||
db: 1,
|
||||
},
|
||||
keyPrefix: 'test:',
|
||||
};
|
||||
|
||||
const cache = CacheFactory.create(cacheConfig);
|
||||
expect(cache).toBeDefined();
|
||||
});
|
||||
|
||||
it('should create null cache without config', () => {
|
||||
const cache = CacheFactory.create();
|
||||
expect(cache).toBeDefined();
|
||||
expect(cache.type).toBe('null');
|
||||
});
|
||||
|
||||
it('should create cache with logger', () => {
|
||||
const mockLogger = {
|
||||
info: () => {},
|
||||
error: () => {},
|
||||
warn: () => {},
|
||||
debug: () => {},
|
||||
};
|
||||
|
||||
const cacheConfig = {
|
||||
logger: mockLogger,
|
||||
};
|
||||
|
||||
const cache = CacheFactory.create(cacheConfig);
|
||||
expect(cache).toBeDefined();
|
||||
});
|
||||
});
|
||||
})
|
||||
260
libs/core/di/test/lifecycle.test.ts
Normal file
260
libs/core/di/test/lifecycle.test.ts
Normal file
|
|
@ -0,0 +1,260 @@
|
|||
import { describe, expect, it, mock, beforeEach } from 'bun:test';
|
||||
import { ServiceLifecycleManager } from '../src/utils/lifecycle';
|
||||
import type { AwilixContainer } from 'awilix';
|
||||
|
||||
describe('ServiceLifecycleManager', () => {
|
||||
let manager: ServiceLifecycleManager;
|
||||
|
||||
beforeEach(() => {
|
||||
manager = new ServiceLifecycleManager();
|
||||
});
|
||||
|
||||
describe('initializeServices', () => {
|
||||
it('should initialize services with connect method', async () => {
|
||||
const mockCache = {
|
||||
connect: mock(() => Promise.resolve()),
|
||||
};
|
||||
|
||||
const mockMongoClient = {
|
||||
connect: mock(() => Promise.resolve()),
|
||||
};
|
||||
|
||||
const mockContainer = {
|
||||
cradle: {
|
||||
cache: mockCache,
|
||||
mongoClient: mockMongoClient,
|
||||
postgresClient: null, // Not configured
|
||||
},
|
||||
} as unknown as AwilixContainer;
|
||||
|
||||
await manager.initializeServices(mockContainer);
|
||||
|
||||
expect(mockCache.connect).toHaveBeenCalled();
|
||||
expect(mockMongoClient.connect).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should initialize services with initialize method', async () => {
|
||||
const mockService = {
|
||||
initialize: mock(() => Promise.resolve()),
|
||||
};
|
||||
|
||||
const mockContainer = {
|
||||
cradle: {
|
||||
cache: mockService,
|
||||
},
|
||||
} as unknown as AwilixContainer;
|
||||
|
||||
await manager.initializeServices(mockContainer);
|
||||
|
||||
expect(mockService.initialize).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle initialization errors', async () => {
|
||||
const mockService = {
|
||||
connect: mock(() => Promise.reject(new Error('Connection failed'))),
|
||||
};
|
||||
|
||||
const mockContainer = {
|
||||
cradle: {
|
||||
cache: mockService,
|
||||
},
|
||||
} as unknown as AwilixContainer;
|
||||
|
||||
await expect(manager.initializeServices(mockContainer)).rejects.toThrow('Connection failed');
|
||||
});
|
||||
|
||||
it('should handle initialization timeout', async () => {
|
||||
const mockService = {
|
||||
connect: mock(() => new Promise(() => {})), // Never resolves
|
||||
};
|
||||
|
||||
const mockContainer = {
|
||||
cradle: {
|
||||
cache: mockService,
|
||||
},
|
||||
} as unknown as AwilixContainer;
|
||||
|
||||
await expect(manager.initializeServices(mockContainer, 100)).rejects.toThrow('cache initialization timed out after 100ms');
|
||||
});
|
||||
});
|
||||
|
||||
describe('shutdownServices', () => {
|
||||
it('should shutdown services with disconnect method', async () => {
|
||||
const mockCache = {
|
||||
disconnect: mock(() => Promise.resolve()),
|
||||
};
|
||||
|
||||
const mockMongoClient = {
|
||||
disconnect: mock(() => Promise.resolve()),
|
||||
};
|
||||
|
||||
const mockContainer = {
|
||||
cradle: {
|
||||
cache: mockCache,
|
||||
mongoClient: mockMongoClient,
|
||||
},
|
||||
} as unknown as AwilixContainer;
|
||||
|
||||
await manager.shutdownServices(mockContainer);
|
||||
|
||||
expect(mockCache.disconnect).toHaveBeenCalled();
|
||||
expect(mockMongoClient.disconnect).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should shutdown services with close method', async () => {
|
||||
const mockService = {
|
||||
close: mock(() => Promise.resolve()),
|
||||
};
|
||||
|
||||
const mockContainer = {
|
||||
cradle: {
|
||||
queueManager: mockService,
|
||||
},
|
||||
} as unknown as AwilixContainer;
|
||||
|
||||
await manager.shutdownServices(mockContainer);
|
||||
|
||||
expect(mockService.close).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should shutdown services with shutdown method', async () => {
|
||||
const mockService = {
|
||||
shutdown: mock(() => Promise.resolve()),
|
||||
};
|
||||
|
||||
const mockContainer = {
|
||||
cradle: {
|
||||
cache: mockService,
|
||||
},
|
||||
} as unknown as AwilixContainer;
|
||||
|
||||
await manager.shutdownServices(mockContainer);
|
||||
|
||||
expect(mockService.shutdown).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle shutdown errors gracefully', async () => {
|
||||
const mockService = {
|
||||
disconnect: mock(() => Promise.reject(new Error('Disconnect failed'))),
|
||||
};
|
||||
|
||||
const mockContainer = {
|
||||
cradle: {
|
||||
cache: mockService,
|
||||
},
|
||||
} as unknown as AwilixContainer;
|
||||
|
||||
// Should not throw
|
||||
await manager.shutdownServices(mockContainer);
|
||||
});
|
||||
|
||||
it('should shutdown services in reverse order', async () => {
|
||||
const callOrder: string[] = [];
|
||||
|
||||
const mockCache = {
|
||||
disconnect: mock(() => {
|
||||
callOrder.push('cache');
|
||||
return Promise.resolve();
|
||||
}),
|
||||
};
|
||||
|
||||
const mockQueueManager = {
|
||||
close: mock(() => {
|
||||
callOrder.push('queue');
|
||||
return Promise.resolve();
|
||||
}),
|
||||
};
|
||||
|
||||
const mockContainer = {
|
||||
cradle: {
|
||||
cache: mockCache,
|
||||
queueManager: mockQueueManager,
|
||||
},
|
||||
} as unknown as AwilixContainer;
|
||||
|
||||
await manager.shutdownServices(mockContainer);
|
||||
|
||||
// Queue manager should be shutdown before cache (reverse order)
|
||||
expect(callOrder[0]).toBe('queue');
|
||||
expect(callOrder[1]).toBe('cache');
|
||||
});
|
||||
});
|
||||
|
||||
describe('mixed lifecycle methods', () => {
|
||||
it('should handle services with multiple lifecycle methods', async () => {
|
||||
const mockService = {
|
||||
connect: mock(() => Promise.resolve()),
|
||||
disconnect: mock(() => Promise.resolve()),
|
||||
initialize: mock(() => Promise.resolve()),
|
||||
shutdown: mock(() => Promise.resolve()),
|
||||
};
|
||||
|
||||
const mockContainer = {
|
||||
cradle: {
|
||||
cache: mockService,
|
||||
},
|
||||
} as unknown as AwilixContainer;
|
||||
|
||||
// Initialize should prefer connect over initialize
|
||||
await manager.initializeServices(mockContainer);
|
||||
expect(mockService.connect).toHaveBeenCalled();
|
||||
expect(mockService.initialize).not.toHaveBeenCalled();
|
||||
|
||||
// Shutdown should prefer disconnect over others
|
||||
await manager.shutdownServices(mockContainer);
|
||||
expect(mockService.disconnect).toHaveBeenCalled();
|
||||
expect(mockService.shutdown).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('complete lifecycle flow', () => {
|
||||
it('should handle full initialization and shutdown cycle', async () => {
|
||||
const mockCache = {
|
||||
connect: mock(() => Promise.resolve()),
|
||||
disconnect: mock(() => Promise.resolve()),
|
||||
};
|
||||
|
||||
const mockMongoClient = {
|
||||
connect: mock(() => Promise.resolve()),
|
||||
disconnect: mock(() => Promise.resolve()),
|
||||
};
|
||||
|
||||
const mockPostgresClient = {
|
||||
connect: mock(() => Promise.resolve()),
|
||||
close: mock(() => Promise.resolve()),
|
||||
};
|
||||
|
||||
const mockQuestdbClient = {
|
||||
initialize: mock(() => Promise.resolve()),
|
||||
shutdown: mock(() => Promise.resolve()),
|
||||
};
|
||||
|
||||
const mockContainer = {
|
||||
cradle: {
|
||||
cache: mockCache,
|
||||
mongoClient: mockMongoClient,
|
||||
postgresClient: mockPostgresClient,
|
||||
questdbClient: mockQuestdbClient,
|
||||
proxyManager: null, // Not configured
|
||||
queueManager: null, // Not configured
|
||||
},
|
||||
} as unknown as AwilixContainer;
|
||||
|
||||
// Initialize all services
|
||||
await manager.initializeServices(mockContainer);
|
||||
|
||||
expect(mockCache.connect).toHaveBeenCalled();
|
||||
expect(mockMongoClient.connect).toHaveBeenCalled();
|
||||
expect(mockPostgresClient.connect).toHaveBeenCalled();
|
||||
expect(mockQuestdbClient.initialize).toHaveBeenCalled();
|
||||
|
||||
// Shutdown all services
|
||||
await manager.shutdownServices(mockContainer);
|
||||
|
||||
expect(mockCache.disconnect).toHaveBeenCalled();
|
||||
expect(mockMongoClient.disconnect).toHaveBeenCalled();
|
||||
expect(mockPostgresClient.close).toHaveBeenCalled();
|
||||
expect(mockQuestdbClient.shutdown).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
})
|
||||
273
libs/core/di/test/operation-context.test.ts
Normal file
273
libs/core/di/test/operation-context.test.ts
Normal file
|
|
@ -0,0 +1,273 @@
|
|||
import { describe, expect, it, beforeEach, mock } from 'bun:test';
|
||||
import { OperationContext } from '../src/operation-context';
|
||||
import type { OperationContextOptions } from '../src/operation-context';
|
||||
|
||||
describe('OperationContext', () => {
|
||||
const mockLogger = {
|
||||
info: mock(() => {}),
|
||||
error: mock(() => {}),
|
||||
warn: mock(() => {}),
|
||||
debug: mock(() => {}),
|
||||
trace: mock(() => {}),
|
||||
child: mock(() => mockLogger),
|
||||
};
|
||||
|
||||
const mockContainer = {
|
||||
resolve: mock((name: string) => ({ name })),
|
||||
resolveAsync: mock(async (name: string) => ({ name })),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset mocks
|
||||
Object.keys(mockLogger).forEach(key => {
|
||||
if (typeof mockLogger[key as keyof typeof mockLogger] === 'function') {
|
||||
(mockLogger as any)[key] = mock(() =>
|
||||
key === 'child' ? mockLogger : undefined
|
||||
);
|
||||
}
|
||||
});
|
||||
mockContainer.resolve = mock((name: string) => ({ name }));
|
||||
mockContainer.resolveAsync = mock(async (name: string) => ({ name }));
|
||||
});
|
||||
|
||||
describe('constructor', () => {
|
||||
it('should create context with required options', () => {
|
||||
const options: OperationContextOptions = {
|
||||
handlerName: 'test-handler',
|
||||
operationName: 'test-op',
|
||||
};
|
||||
|
||||
const context = new OperationContext(options);
|
||||
|
||||
expect(context).toBeDefined();
|
||||
expect(context.traceId).toBeDefined();
|
||||
expect(context.metadata).toEqual({});
|
||||
expect(context.logger).toBeDefined();
|
||||
});
|
||||
|
||||
it('should create context with all options', () => {
|
||||
const options: OperationContextOptions = {
|
||||
handlerName: 'test-handler',
|
||||
operationName: 'test-op',
|
||||
parentLogger: mockLogger,
|
||||
container: mockContainer,
|
||||
metadata: { key: 'value' },
|
||||
traceId: 'custom-trace-id',
|
||||
};
|
||||
|
||||
const context = new OperationContext(options);
|
||||
|
||||
expect(context.traceId).toBe('custom-trace-id');
|
||||
expect(context.metadata).toEqual({ key: 'value' });
|
||||
expect(context.logger).toBe(mockLogger);
|
||||
});
|
||||
});
|
||||
|
||||
describe('static create', () => {
|
||||
it('should create context using static method', () => {
|
||||
const context = OperationContext.create('handler', 'operation', {
|
||||
metadata: { foo: 'bar' },
|
||||
});
|
||||
|
||||
expect(context).toBeDefined();
|
||||
expect(context.metadata).toEqual({ foo: 'bar' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('service resolution', () => {
|
||||
it('should resolve services from container', () => {
|
||||
const context = new OperationContext({
|
||||
handlerName: 'test',
|
||||
operationName: 'test-op',
|
||||
container: mockContainer,
|
||||
});
|
||||
|
||||
const service = context.resolve('myService');
|
||||
expect(service).toEqual({ name: 'myService' });
|
||||
expect(mockContainer.resolve).toHaveBeenCalledWith('myService');
|
||||
});
|
||||
|
||||
it('should resolve services asynchronously', async () => {
|
||||
const context = new OperationContext({
|
||||
handlerName: 'test',
|
||||
operationName: 'test-op',
|
||||
container: mockContainer,
|
||||
});
|
||||
|
||||
const service = await context.resolveAsync('myService');
|
||||
expect(service).toEqual({ name: 'myService' });
|
||||
expect(mockContainer.resolveAsync).toHaveBeenCalledWith('myService');
|
||||
});
|
||||
|
||||
it('should throw error when no container available', () => {
|
||||
const context = new OperationContext({
|
||||
handlerName: 'test',
|
||||
operationName: 'test-op',
|
||||
});
|
||||
|
||||
expect(() => context.resolve('service')).toThrow('No service container available');
|
||||
});
|
||||
|
||||
it('should throw error when no container available for async', async () => {
|
||||
const context = new OperationContext({
|
||||
handlerName: 'test',
|
||||
operationName: 'test-op',
|
||||
});
|
||||
|
||||
await expect(context.resolveAsync('service')).rejects.toThrow('No service container available');
|
||||
});
|
||||
});
|
||||
|
||||
describe('metadata', () => {
|
||||
it('should add metadata', () => {
|
||||
const context = new OperationContext({
|
||||
handlerName: 'test',
|
||||
operationName: 'test-op',
|
||||
});
|
||||
|
||||
context.addMetadata('userId', '12345');
|
||||
context.addMetadata('correlationId', 'corr-456');
|
||||
|
||||
expect(context.metadata.userId).toBe('12345');
|
||||
expect(context.metadata.correlationId).toBe('corr-456');
|
||||
});
|
||||
|
||||
it('should preserve initial metadata', () => {
|
||||
const context = new OperationContext({
|
||||
handlerName: 'test',
|
||||
operationName: 'test-op',
|
||||
metadata: { initial: 'value' },
|
||||
});
|
||||
|
||||
context.addMetadata('added', 'new-value');
|
||||
|
||||
expect(context.metadata.initial).toBe('value');
|
||||
expect(context.metadata.added).toBe('new-value');
|
||||
});
|
||||
});
|
||||
|
||||
describe('execution time', () => {
|
||||
it('should track execution time', async () => {
|
||||
const context = new OperationContext({
|
||||
handlerName: 'test',
|
||||
operationName: 'test-op',
|
||||
});
|
||||
|
||||
// Wait a bit
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
const executionTime = context.getExecutionTime();
|
||||
expect(executionTime).toBeGreaterThan(40);
|
||||
expect(executionTime).toBeLessThan(100);
|
||||
});
|
||||
});
|
||||
|
||||
describe('logging', () => {
|
||||
it('should log successful completion', () => {
|
||||
const context = new OperationContext({
|
||||
handlerName: 'test',
|
||||
operationName: 'test-op',
|
||||
parentLogger: mockLogger,
|
||||
});
|
||||
|
||||
context.logCompletion(true);
|
||||
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
'Operation completed successfully',
|
||||
expect.objectContaining({
|
||||
executionTime: expect.any(Number),
|
||||
metadata: {},
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should log failed completion with error', () => {
|
||||
const context = new OperationContext({
|
||||
handlerName: 'test',
|
||||
operationName: 'test-op',
|
||||
parentLogger: mockLogger,
|
||||
});
|
||||
|
||||
const error = new Error('Test error');
|
||||
context.logCompletion(false, error);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
'Operation failed',
|
||||
expect.objectContaining({
|
||||
executionTime: expect.any(Number),
|
||||
error: 'Test error',
|
||||
stack: expect.any(String),
|
||||
metadata: {},
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('child context', () => {
|
||||
it('should create child context', () => {
|
||||
const parent = new OperationContext({
|
||||
handlerName: 'parent',
|
||||
operationName: 'parent-op',
|
||||
parentLogger: mockLogger,
|
||||
container: mockContainer,
|
||||
traceId: 'parent-trace',
|
||||
metadata: { parentKey: 'parentValue' },
|
||||
});
|
||||
|
||||
const child = parent.createChild('child-op', { childKey: 'childValue' });
|
||||
|
||||
expect(child.traceId).toBe('parent-trace'); // Inherits trace ID
|
||||
expect(child.metadata).toEqual({
|
||||
parentKey: 'parentValue',
|
||||
childKey: 'childValue',
|
||||
});
|
||||
expect(child.logger).toBe(mockLogger); // Inherits logger
|
||||
});
|
||||
|
||||
it('should create child without additional metadata', () => {
|
||||
const parent = new OperationContext({
|
||||
handlerName: 'parent',
|
||||
operationName: 'parent-op',
|
||||
metadata: { key: 'value' },
|
||||
});
|
||||
|
||||
const child = parent.createChild('child-op');
|
||||
|
||||
expect(child.metadata).toEqual({ key: 'value' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('dispose', () => {
|
||||
it('should log completion on dispose', async () => {
|
||||
const context = new OperationContext({
|
||||
handlerName: 'test',
|
||||
operationName: 'test-op',
|
||||
parentLogger: mockLogger,
|
||||
});
|
||||
|
||||
await context.dispose();
|
||||
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
'Operation completed successfully',
|
||||
expect.any(Object)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('trace ID generation', () => {
|
||||
it('should generate unique trace IDs', () => {
|
||||
const context1 = new OperationContext({
|
||||
handlerName: 'test',
|
||||
operationName: 'test-op',
|
||||
});
|
||||
|
||||
const context2 = new OperationContext({
|
||||
handlerName: 'test',
|
||||
operationName: 'test-op',
|
||||
});
|
||||
|
||||
expect(context1.traceId).not.toBe(context2.traceId);
|
||||
expect(context1.traceId).toMatch(/^\d+-[a-z0-9]+$/);
|
||||
});
|
||||
});
|
||||
});
|
||||
293
libs/core/di/test/registration.test.ts
Normal file
293
libs/core/di/test/registration.test.ts
Normal file
|
|
@ -0,0 +1,293 @@
|
|||
import { describe, expect, it, mock } from 'bun:test';
|
||||
import { createContainer, asClass, asFunction, asValue } from 'awilix';
|
||||
import {
|
||||
registerCacheServices,
|
||||
registerDatabaseServices,
|
||||
registerServiceDependencies,
|
||||
} from '../src/registrations';
|
||||
|
||||
describe('DI Registrations', () => {
|
||||
describe('registerCacheServices', () => {
|
||||
it('should register null cache when no redis config', () => {
|
||||
const container = createContainer();
|
||||
const config = {
|
||||
service: {
|
||||
name: 'test-service',
|
||||
type: 'WORKER' as const,
|
||||
},
|
||||
// No redis config
|
||||
};
|
||||
|
||||
registerCacheServices(container, config);
|
||||
|
||||
const cache = container.resolve('cache');
|
||||
expect(cache).toBeDefined();
|
||||
expect(cache.type).toBe('null'); // NullCache type
|
||||
});
|
||||
|
||||
it('should register redis cache when redis config exists', () => {
|
||||
const container = createContainer();
|
||||
|
||||
// Register logger first as it's a dependency
|
||||
container.register({
|
||||
logger: asValue({
|
||||
info: () => {},
|
||||
error: () => {},
|
||||
warn: () => {},
|
||||
debug: () => {},
|
||||
}),
|
||||
});
|
||||
|
||||
const config = {
|
||||
service: {
|
||||
name: 'test-service',
|
||||
type: 'WORKER' as const,
|
||||
},
|
||||
redis: {
|
||||
host: 'localhost',
|
||||
port: 6379,
|
||||
db: 1,
|
||||
},
|
||||
};
|
||||
|
||||
registerCacheServices(container, config);
|
||||
|
||||
const cache = container.resolve('cache');
|
||||
expect(cache).toBeDefined();
|
||||
});
|
||||
|
||||
it('should register service cache', () => {
|
||||
const container = createContainer();
|
||||
|
||||
// Register dependencies
|
||||
container.register({
|
||||
cache: asValue({ type: 'null' }),
|
||||
config: asValue({
|
||||
service: { name: 'test-service' },
|
||||
redis: { host: 'localhost', port: 6379 },
|
||||
}),
|
||||
logger: asValue({ info: () => {} }),
|
||||
});
|
||||
|
||||
const config = {
|
||||
service: {
|
||||
name: 'test-service',
|
||||
type: 'WORKER' as const,
|
||||
},
|
||||
};
|
||||
|
||||
registerCacheServices(container, config);
|
||||
|
||||
const serviceCache = container.resolve('serviceCache');
|
||||
expect(serviceCache).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('registerDatabaseServices', () => {
|
||||
it('should register MongoDB when config exists', () => {
|
||||
const container = createContainer();
|
||||
const mockLogger = {
|
||||
info: () => {},
|
||||
error: () => {},
|
||||
warn: () => {},
|
||||
debug: () => {},
|
||||
};
|
||||
|
||||
container.register({
|
||||
logger: asValue(mockLogger),
|
||||
});
|
||||
|
||||
const config = {
|
||||
service: {
|
||||
name: 'test-service',
|
||||
type: 'WORKER' as const,
|
||||
},
|
||||
mongodb: {
|
||||
uri: 'mongodb://localhost:27017',
|
||||
database: 'test-db',
|
||||
},
|
||||
};
|
||||
|
||||
registerDatabaseServices(container, config);
|
||||
|
||||
// Check that mongodb is registered
|
||||
const registrations = container.registrations;
|
||||
expect(registrations.mongodb).toBeDefined();
|
||||
});
|
||||
|
||||
it('should register Postgres when config exists', () => {
|
||||
const container = createContainer();
|
||||
const mockLogger = { info: () => {}, error: () => {} };
|
||||
|
||||
container.register({
|
||||
logger: asValue(mockLogger),
|
||||
});
|
||||
|
||||
const config = {
|
||||
service: {
|
||||
name: 'test-service',
|
||||
type: 'WORKER' as const,
|
||||
},
|
||||
postgres: {
|
||||
host: 'localhost',
|
||||
port: 5432,
|
||||
database: 'test-db',
|
||||
username: 'user',
|
||||
password: 'pass',
|
||||
},
|
||||
};
|
||||
|
||||
registerDatabaseServices(container, config);
|
||||
|
||||
const registrations = container.registrations;
|
||||
expect(registrations.postgres).toBeDefined();
|
||||
});
|
||||
|
||||
it('should register QuestDB when config exists', () => {
|
||||
const container = createContainer();
|
||||
const mockLogger = { info: () => {}, error: () => {} };
|
||||
|
||||
container.register({
|
||||
logger: asValue(mockLogger),
|
||||
});
|
||||
|
||||
const config = {
|
||||
service: {
|
||||
name: 'test-service',
|
||||
type: 'WORKER' as const,
|
||||
},
|
||||
questdb: {
|
||||
host: 'localhost',
|
||||
httpPort: 9000,
|
||||
pgPort: 8812,
|
||||
},
|
||||
};
|
||||
|
||||
registerDatabaseServices(container, config);
|
||||
|
||||
const registrations = container.registrations;
|
||||
expect(registrations.questdb).toBeDefined();
|
||||
});
|
||||
|
||||
it('should not register databases without config', () => {
|
||||
const container = createContainer();
|
||||
const config = {
|
||||
service: {
|
||||
name: 'test-service',
|
||||
type: 'WORKER' as const,
|
||||
},
|
||||
// No database configs
|
||||
};
|
||||
|
||||
registerDatabaseServices(container, config);
|
||||
|
||||
const registrations = container.registrations;
|
||||
expect(registrations.mongodb).toBeUndefined();
|
||||
expect(registrations.postgres).toBeUndefined();
|
||||
expect(registrations.questdb).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('registerServiceDependencies', () => {
|
||||
it('should register browser service when config exists', () => {
|
||||
const container = createContainer();
|
||||
const mockLogger = { info: () => {}, error: () => {} };
|
||||
|
||||
container.register({
|
||||
logger: asValue(mockLogger),
|
||||
config: asValue({
|
||||
browser: { headless: true },
|
||||
}),
|
||||
});
|
||||
|
||||
const config = {
|
||||
service: {
|
||||
name: 'test-service',
|
||||
type: 'WORKER' as const,
|
||||
},
|
||||
browser: {
|
||||
headless: true,
|
||||
timeout: 30000,
|
||||
},
|
||||
};
|
||||
|
||||
registerServiceDependencies(container, config);
|
||||
|
||||
const registrations = container.registrations;
|
||||
expect(registrations.browser).toBeDefined();
|
||||
});
|
||||
|
||||
it('should register proxy service when config exists', () => {
|
||||
const container = createContainer();
|
||||
const mockLogger = { info: () => {}, error: () => {} };
|
||||
|
||||
container.register({
|
||||
logger: asValue(mockLogger),
|
||||
});
|
||||
|
||||
const config = {
|
||||
service: {
|
||||
name: 'test-service',
|
||||
type: 'WORKER' as const,
|
||||
},
|
||||
proxy: {
|
||||
enabled: true,
|
||||
rotateOnError: true,
|
||||
},
|
||||
};
|
||||
|
||||
registerServiceDependencies(container, config);
|
||||
|
||||
const registrations = container.registrations;
|
||||
expect(registrations.proxyManager).toBeDefined();
|
||||
});
|
||||
|
||||
it('should register queue services for worker type', () => {
|
||||
const container = createContainer();
|
||||
const mockLogger = { info: () => {}, error: () => {} };
|
||||
|
||||
container.register({
|
||||
logger: asValue(mockLogger),
|
||||
config: asValue({
|
||||
service: { name: 'test-service', type: 'WORKER' },
|
||||
redis: { host: 'localhost', port: 6379 },
|
||||
}),
|
||||
});
|
||||
|
||||
const config = {
|
||||
service: {
|
||||
name: 'test-service',
|
||||
type: 'WORKER' as const,
|
||||
},
|
||||
redis: {
|
||||
host: 'localhost',
|
||||
port: 6379,
|
||||
},
|
||||
};
|
||||
|
||||
registerServiceDependencies(container, config);
|
||||
|
||||
const registrations = container.registrations;
|
||||
expect(registrations.queueManager).toBeDefined();
|
||||
});
|
||||
|
||||
it('should not register queue for API type', () => {
|
||||
const container = createContainer();
|
||||
const config = {
|
||||
service: {
|
||||
name: 'test-api',
|
||||
type: 'API' as const,
|
||||
},
|
||||
redis: {
|
||||
host: 'localhost',
|
||||
port: 6379,
|
||||
},
|
||||
};
|
||||
|
||||
registerServiceDependencies(container, config);
|
||||
|
||||
const registrations = container.registrations;
|
||||
expect(registrations.queueManager).toBeUndefined();
|
||||
});
|
||||
});
|
||||
})
|
||||
|
|
@ -1,209 +0,0 @@
|
|||
import { beforeEach, describe, expect, it, mock } from 'bun:test';
|
||||
import { SimpleEventBus } from '../src/simple-event-bus';
|
||||
import type { EventHandler, EventSubscription, EventBusMessage } from '../src/types';
|
||||
|
||||
describe('EventBus', () => {
|
||||
let eventBus: SimpleEventBus;
|
||||
|
||||
beforeEach(() => {
|
||||
eventBus = new SimpleEventBus();
|
||||
});
|
||||
|
||||
describe('subscribe', () => {
|
||||
it('should subscribe to events', () => {
|
||||
const handler: EventHandler = mock(async () => {});
|
||||
|
||||
const subscription = eventBus.subscribe('test-event', handler);
|
||||
|
||||
expect(subscription).toBeDefined();
|
||||
expect(subscription.channel).toBe('test-event');
|
||||
expect(subscription.handler).toBe(handler);
|
||||
});
|
||||
|
||||
it('should allow multiple subscribers to same event', () => {
|
||||
const handler1 = mock(async () => {});
|
||||
const handler2 = mock(async () => {});
|
||||
|
||||
const sub1 = eventBus.subscribe('event', handler1);
|
||||
const sub2 = eventBus.subscribe('event', handler2);
|
||||
|
||||
expect(sub1.handler).toBe(handler1);
|
||||
expect(sub2.handler).toBe(handler2);
|
||||
});
|
||||
|
||||
it('should support pattern subscriptions', () => {
|
||||
const handler = mock(async () => {});
|
||||
|
||||
const subscription = eventBus.subscribe('user.*', handler);
|
||||
|
||||
expect(subscription.channel).toBe('user.*');
|
||||
});
|
||||
});
|
||||
|
||||
describe('unsubscribe', () => {
|
||||
it('should unsubscribe by subscription object', () => {
|
||||
const handler = mock(async () => {});
|
||||
const subscription = eventBus.subscribe('event', handler);
|
||||
|
||||
const result = eventBus.unsubscribe(subscription);
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should unsubscribe by id', () => {
|
||||
const handler = mock(async () => {});
|
||||
eventBus.subscribe('event', handler);
|
||||
|
||||
// We'll use the subscription object method since we don't expose IDs
|
||||
const result = eventBus.unsubscribe('sub-1');
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for non-existent subscription', () => {
|
||||
const result = eventBus.unsubscribe('non-existent');
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('publish', () => {
|
||||
it('should publish events to subscribers', async () => {
|
||||
const handler = mock(async (message: EventBusMessage) => {});
|
||||
eventBus.subscribe('event', handler);
|
||||
|
||||
await eventBus.publish('event', { data: 'test' });
|
||||
|
||||
expect(handler).toHaveBeenCalledTimes(1);
|
||||
const message = handler.mock.calls[0][0];
|
||||
expect(message.type).toBe('event');
|
||||
expect(message.data).toEqual({ data: 'test' });
|
||||
});
|
||||
|
||||
it('should publish to multiple subscribers', async () => {
|
||||
const handler1 = mock(async () => {});
|
||||
const handler2 = mock(async () => {});
|
||||
eventBus.subscribe('event', handler1);
|
||||
eventBus.subscribe('event', handler2);
|
||||
|
||||
await eventBus.publish('event', { data: 'test' });
|
||||
|
||||
expect(handler1).toHaveBeenCalledTimes(1);
|
||||
expect(handler2).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should match pattern subscriptions', async () => {
|
||||
const handler = mock(async () => {});
|
||||
eventBus.subscribe('user.*', handler);
|
||||
|
||||
await eventBus.publish('user.created', { userId: '123' });
|
||||
await eventBus.publish('user.updated', { userId: '123' });
|
||||
await eventBus.publish('order.created', { orderId: '456' });
|
||||
|
||||
expect(handler).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should handle errors in handlers gracefully', async () => {
|
||||
const errorHandler = mock(async () => {
|
||||
throw new Error('Handler error');
|
||||
});
|
||||
const successHandler = mock(async () => {});
|
||||
|
||||
eventBus.subscribe('event', errorHandler);
|
||||
eventBus.subscribe('event', successHandler);
|
||||
|
||||
await eventBus.publish('event', { data: 'test' });
|
||||
|
||||
expect(errorHandler).toHaveBeenCalledTimes(1);
|
||||
expect(successHandler).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('publishSync', () => {
|
||||
it('should publish synchronously', () => {
|
||||
const handler = mock((message: EventBusMessage) => {});
|
||||
eventBus.subscribe('event', handler);
|
||||
|
||||
eventBus.publishSync('event', { data: 'test' });
|
||||
|
||||
expect(handler).toHaveBeenCalledTimes(1);
|
||||
const message = handler.mock.calls[0][0];
|
||||
expect(message.type).toBe('event');
|
||||
expect(message.data).toEqual({ data: 'test' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('once', () => {
|
||||
it('should subscribe for single event', async () => {
|
||||
const handler = mock(async () => {});
|
||||
eventBus.once('event', handler);
|
||||
|
||||
await eventBus.publish('event', { data: 'first' });
|
||||
await eventBus.publish('event', { data: 'second' });
|
||||
|
||||
expect(handler).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('off', () => {
|
||||
it('should remove all handlers for event', async () => {
|
||||
const handler1 = mock(async () => {});
|
||||
const handler2 = mock(async () => {});
|
||||
eventBus.subscribe('event', handler1);
|
||||
eventBus.subscribe('event', handler2);
|
||||
|
||||
eventBus.off('event');
|
||||
|
||||
await eventBus.publish('event', { data: 'test' });
|
||||
|
||||
expect(handler1).not.toHaveBeenCalled();
|
||||
expect(handler2).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should remove specific handler', async () => {
|
||||
const handler1 = mock(async () => {});
|
||||
const handler2 = mock(async () => {});
|
||||
eventBus.subscribe('event', handler1);
|
||||
eventBus.subscribe('event', handler2);
|
||||
|
||||
eventBus.off('event', handler1);
|
||||
|
||||
await eventBus.publish('event', { data: 'test' });
|
||||
|
||||
expect(handler1).not.toHaveBeenCalled();
|
||||
expect(handler2).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('hasSubscribers', () => {
|
||||
it('should check for subscribers', () => {
|
||||
expect(eventBus.hasSubscribers('event')).toBe(false);
|
||||
|
||||
const handler = mock(async () => {});
|
||||
eventBus.subscribe('event', handler);
|
||||
|
||||
expect(eventBus.hasSubscribers('event')).toBe(true);
|
||||
|
||||
eventBus.off('event');
|
||||
|
||||
expect(eventBus.hasSubscribers('event')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('clear', () => {
|
||||
it('should clear all subscriptions', async () => {
|
||||
const handler1 = mock(async () => {});
|
||||
const handler2 = mock(async () => {});
|
||||
eventBus.subscribe('event1', handler1);
|
||||
eventBus.subscribe('event2', handler2);
|
||||
|
||||
eventBus.clear();
|
||||
|
||||
await eventBus.publish('event1', {});
|
||||
await eventBus.publish('event2', {});
|
||||
|
||||
expect(handler1).not.toHaveBeenCalled();
|
||||
expect(handler2).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
522
libs/core/handler-registry/test/registry-comprehensive.test.ts
Normal file
522
libs/core/handler-registry/test/registry-comprehensive.test.ts
Normal file
|
|
@ -0,0 +1,522 @@
|
|||
import { describe, expect, it, beforeEach } from 'bun:test';
|
||||
import { HandlerRegistry } from '../src/registry';
|
||||
import type {
|
||||
HandlerMetadata,
|
||||
HandlerConfiguration,
|
||||
OperationMetadata,
|
||||
ScheduleMetadata,
|
||||
} from '../src/types';
|
||||
|
||||
describe('HandlerRegistry Comprehensive Tests', () => {
|
||||
let registry: HandlerRegistry;
|
||||
|
||||
beforeEach(() => {
|
||||
registry = new HandlerRegistry();
|
||||
});
|
||||
|
||||
describe('registerMetadata', () => {
|
||||
it('should register handler metadata separately', () => {
|
||||
const metadata: HandlerMetadata = {
|
||||
name: 'TestHandler',
|
||||
service: 'test-service',
|
||||
operations: {
|
||||
processData: {
|
||||
name: 'processData',
|
||||
batch: false,
|
||||
},
|
||||
batchProcess: {
|
||||
name: 'batchProcess',
|
||||
batch: true,
|
||||
batchSize: 10,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
registry.registerMetadata(metadata);
|
||||
|
||||
const retrieved = registry.getMetadata('TestHandler');
|
||||
expect(retrieved).toEqual(metadata);
|
||||
});
|
||||
|
||||
it('should overwrite existing metadata', () => {
|
||||
const metadata1: HandlerMetadata = {
|
||||
name: 'TestHandler',
|
||||
service: 'service1',
|
||||
operations: {
|
||||
op1: { name: 'op1', batch: false },
|
||||
},
|
||||
};
|
||||
|
||||
const metadata2: HandlerMetadata = {
|
||||
name: 'TestHandler',
|
||||
service: 'service2',
|
||||
operations: {
|
||||
op2: { name: 'op2', batch: false },
|
||||
},
|
||||
};
|
||||
|
||||
registry.registerMetadata(metadata1);
|
||||
registry.registerMetadata(metadata2);
|
||||
|
||||
const retrieved = registry.getMetadata('TestHandler');
|
||||
expect(retrieved).toEqual(metadata2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('registerConfiguration', () => {
|
||||
it('should register handler configuration separately', () => {
|
||||
const config: HandlerConfiguration = {
|
||||
processData: async (data: any) => ({ processed: data }),
|
||||
batchProcess: async (items: any[]) => items.map(i => ({ processed: i })),
|
||||
};
|
||||
|
||||
registry.registerConfiguration('TestHandler', config);
|
||||
|
||||
const retrieved = registry.getConfiguration('TestHandler');
|
||||
expect(retrieved).toEqual(config);
|
||||
});
|
||||
|
||||
it('should handle async operations', async () => {
|
||||
const config: HandlerConfiguration = {
|
||||
asyncOp: async (data: any) => {
|
||||
await new Promise(resolve => setTimeout(resolve, 10));
|
||||
return { result: data };
|
||||
},
|
||||
};
|
||||
|
||||
registry.registerConfiguration('AsyncHandler', config);
|
||||
|
||||
const operation = registry.getOperation('AsyncHandler', 'asyncOp');
|
||||
expect(operation).toBeDefined();
|
||||
|
||||
const result = await operation!({ value: 42 });
|
||||
expect(result).toEqual({ result: { value: 42 } });
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMetadata', () => {
|
||||
it('should return handler metadata', () => {
|
||||
const metadata: HandlerMetadata = {
|
||||
name: 'MetaHandler',
|
||||
service: 'meta-service',
|
||||
operations: {
|
||||
metaOp: { name: 'metaOp', batch: false },
|
||||
},
|
||||
};
|
||||
|
||||
registry.registerMetadata(metadata);
|
||||
|
||||
const retrieved = registry.getMetadata('MetaHandler');
|
||||
expect(retrieved).toEqual(metadata);
|
||||
});
|
||||
|
||||
it('should return undefined for non-existent handler', () => {
|
||||
const metadata = registry.getMetadata('NonExistent');
|
||||
expect(metadata).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getServiceHandlers', () => {
|
||||
it('should return handlers for a specific service', () => {
|
||||
registry.register({
|
||||
metadata: {
|
||||
name: 'Handler1',
|
||||
service: 'service-a',
|
||||
operations: {},
|
||||
},
|
||||
configuration: {},
|
||||
});
|
||||
|
||||
registry.register({
|
||||
metadata: {
|
||||
name: 'Handler2',
|
||||
service: 'service-a',
|
||||
operations: {},
|
||||
},
|
||||
configuration: {},
|
||||
});
|
||||
|
||||
registry.register({
|
||||
metadata: {
|
||||
name: 'Handler3',
|
||||
service: 'service-b',
|
||||
operations: {},
|
||||
},
|
||||
configuration: {},
|
||||
});
|
||||
|
||||
const serviceAHandlers = registry.getServiceHandlers('service-a');
|
||||
expect(serviceAHandlers).toHaveLength(2);
|
||||
expect(serviceAHandlers.map(h => h.name)).toContain('Handler1');
|
||||
expect(serviceAHandlers.map(h => h.name)).toContain('Handler2');
|
||||
|
||||
const serviceBHandlers = registry.getServiceHandlers('service-b');
|
||||
expect(serviceBHandlers).toHaveLength(1);
|
||||
expect(serviceBHandlers[0].name).toBe('Handler3');
|
||||
});
|
||||
|
||||
it('should return empty array for non-existent service', () => {
|
||||
const handlers = registry.getServiceHandlers('non-existent-service');
|
||||
expect(handlers).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('setHandlerService and getHandlerService', () => {
|
||||
it('should set and get handler service ownership', () => {
|
||||
registry.register({
|
||||
metadata: {
|
||||
name: 'ServiceHandler',
|
||||
operations: {},
|
||||
},
|
||||
configuration: {},
|
||||
});
|
||||
|
||||
registry.setHandlerService('ServiceHandler', 'my-service');
|
||||
|
||||
const service = registry.getHandlerService('ServiceHandler');
|
||||
expect(service).toBe('my-service');
|
||||
});
|
||||
|
||||
it('should overwrite existing service ownership', () => {
|
||||
registry.register({
|
||||
metadata: {
|
||||
name: 'ServiceHandler',
|
||||
service: 'initial-service',
|
||||
operations: {},
|
||||
},
|
||||
configuration: {},
|
||||
});
|
||||
|
||||
registry.setHandlerService('ServiceHandler', 'new-service');
|
||||
|
||||
const service = registry.getHandlerService('ServiceHandler');
|
||||
expect(service).toBe('new-service');
|
||||
});
|
||||
|
||||
it('should return undefined for non-existent handler', () => {
|
||||
const service = registry.getHandlerService('NonExistent');
|
||||
expect(service).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getScheduledJobs', () => {
|
||||
it('should return scheduled jobs for a handler', () => {
|
||||
const schedules: ScheduleMetadata[] = [
|
||||
{
|
||||
operationName: 'dailyJob',
|
||||
schedule: '0 0 * * *',
|
||||
options: { timezone: 'UTC' },
|
||||
},
|
||||
{
|
||||
operationName: 'hourlyJob',
|
||||
schedule: '0 * * * *',
|
||||
},
|
||||
];
|
||||
|
||||
registry.register({
|
||||
metadata: {
|
||||
name: 'ScheduledHandler',
|
||||
operations: {
|
||||
dailyJob: { name: 'dailyJob', batch: false },
|
||||
hourlyJob: { name: 'hourlyJob', batch: false },
|
||||
},
|
||||
schedules,
|
||||
},
|
||||
configuration: {
|
||||
dailyJob: async () => ({ result: 'daily' }),
|
||||
hourlyJob: async () => ({ result: 'hourly' }),
|
||||
},
|
||||
});
|
||||
|
||||
const jobs = registry.getScheduledJobs('ScheduledHandler');
|
||||
expect(jobs).toHaveLength(2);
|
||||
expect(jobs).toEqual(schedules);
|
||||
});
|
||||
|
||||
it('should return empty array for handler without schedules', () => {
|
||||
registry.register({
|
||||
metadata: {
|
||||
name: 'NoScheduleHandler',
|
||||
operations: {},
|
||||
},
|
||||
configuration: {},
|
||||
});
|
||||
|
||||
const jobs = registry.getScheduledJobs('NoScheduleHandler');
|
||||
expect(jobs).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array for non-existent handler', () => {
|
||||
const jobs = registry.getScheduledJobs('NonExistent');
|
||||
expect(jobs).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getStats', () => {
|
||||
it('should return registry statistics', () => {
|
||||
// Register handlers with various configurations
|
||||
registry.register({
|
||||
metadata: {
|
||||
name: 'Handler1',
|
||||
service: 'service-a',
|
||||
operations: {
|
||||
op1: { name: 'op1', batch: false },
|
||||
op2: { name: 'op2', batch: true, batchSize: 5 },
|
||||
},
|
||||
schedules: [
|
||||
{ operationName: 'op1', schedule: '0 0 * * *' },
|
||||
],
|
||||
},
|
||||
configuration: {
|
||||
op1: async () => ({}),
|
||||
op2: async () => ({}),
|
||||
},
|
||||
});
|
||||
|
||||
registry.register({
|
||||
metadata: {
|
||||
name: 'Handler2',
|
||||
service: 'service-b',
|
||||
operations: {
|
||||
op3: { name: 'op3', batch: false },
|
||||
},
|
||||
},
|
||||
configuration: {
|
||||
op3: async () => ({}),
|
||||
},
|
||||
});
|
||||
|
||||
const stats = registry.getStats();
|
||||
|
||||
expect(stats.totalHandlers).toBe(2);
|
||||
expect(stats.totalOperations).toBe(3);
|
||||
expect(stats.batchOperations).toBe(1);
|
||||
expect(stats.scheduledOperations).toBe(1);
|
||||
expect(stats.handlersByService).toEqual({
|
||||
'service-a': 1,
|
||||
'service-b': 1,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return zero stats for empty registry', () => {
|
||||
const stats = registry.getStats();
|
||||
|
||||
expect(stats.totalHandlers).toBe(0);
|
||||
expect(stats.totalOperations).toBe(0);
|
||||
expect(stats.batchOperations).toBe(0);
|
||||
expect(stats.scheduledOperations).toBe(0);
|
||||
expect(stats.handlersByService).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('clear', () => {
|
||||
it('should clear all registrations', () => {
|
||||
registry.register({
|
||||
metadata: {
|
||||
name: 'Handler1',
|
||||
operations: {},
|
||||
},
|
||||
configuration: {},
|
||||
});
|
||||
|
||||
registry.register({
|
||||
metadata: {
|
||||
name: 'Handler2',
|
||||
operations: {},
|
||||
},
|
||||
configuration: {},
|
||||
});
|
||||
|
||||
expect(registry.getHandlerNames()).toHaveLength(2);
|
||||
|
||||
registry.clear();
|
||||
|
||||
expect(registry.getHandlerNames()).toHaveLength(0);
|
||||
expect(registry.getAllMetadata()).toEqual([]);
|
||||
expect(registry.getStats().totalHandlers).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('export and import', () => {
|
||||
it('should export and import registry data', () => {
|
||||
// Setup initial registry
|
||||
registry.register({
|
||||
metadata: {
|
||||
name: 'ExportHandler1',
|
||||
service: 'export-service',
|
||||
operations: {
|
||||
exportOp: { name: 'exportOp', batch: false },
|
||||
},
|
||||
schedules: [
|
||||
{ operationName: 'exportOp', schedule: '0 0 * * *' },
|
||||
],
|
||||
},
|
||||
configuration: {
|
||||
exportOp: async () => ({ exported: true }),
|
||||
},
|
||||
});
|
||||
|
||||
registry.register({
|
||||
metadata: {
|
||||
name: 'ExportHandler2',
|
||||
operations: {
|
||||
anotherOp: { name: 'anotherOp', batch: true, batchSize: 10 },
|
||||
},
|
||||
},
|
||||
configuration: {
|
||||
anotherOp: async () => ({ another: true }),
|
||||
},
|
||||
});
|
||||
|
||||
// Export data
|
||||
const exportedData = registry.export();
|
||||
|
||||
expect(exportedData.handlers).toHaveLength(2);
|
||||
expect(exportedData.version).toBe('1.0');
|
||||
expect(exportedData.exportedAt).toBeInstanceOf(Date);
|
||||
|
||||
// Clear and verify empty
|
||||
registry.clear();
|
||||
expect(registry.getHandlerNames()).toHaveLength(0);
|
||||
|
||||
// Import data
|
||||
registry.import(exportedData);
|
||||
|
||||
// Verify restored
|
||||
expect(registry.getHandlerNames()).toHaveLength(2);
|
||||
expect(registry.hasHandler('ExportHandler1')).toBe(true);
|
||||
expect(registry.hasHandler('ExportHandler2')).toBe(true);
|
||||
|
||||
const handler1 = registry.getMetadata('ExportHandler1');
|
||||
expect(handler1?.service).toBe('export-service');
|
||||
expect(handler1?.schedules).toHaveLength(1);
|
||||
|
||||
const handler2 = registry.getMetadata('ExportHandler2');
|
||||
expect(handler2?.operations.anotherOp.batch).toBe(true);
|
||||
expect(handler2?.operations.anotherOp.batchSize).toBe(10);
|
||||
});
|
||||
|
||||
it('should handle import with empty data', () => {
|
||||
const emptyData = {
|
||||
version: '1.0',
|
||||
exportedAt: new Date(),
|
||||
handlers: [],
|
||||
};
|
||||
|
||||
registry.import(emptyData);
|
||||
|
||||
expect(registry.getHandlerNames()).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should preserve configurations during export/import', async () => {
|
||||
const testData = { value: 42 };
|
||||
|
||||
registry.register({
|
||||
metadata: {
|
||||
name: 'ConfigHandler',
|
||||
operations: {
|
||||
configOp: { name: 'configOp', batch: false },
|
||||
},
|
||||
},
|
||||
configuration: {
|
||||
configOp: async (data: any) => ({ processed: data.value * 2 }),
|
||||
},
|
||||
});
|
||||
|
||||
// Test operation before export
|
||||
const opBefore = registry.getOperation('ConfigHandler', 'configOp');
|
||||
const resultBefore = await opBefore!(testData);
|
||||
expect(resultBefore).toEqual({ processed: 84 });
|
||||
|
||||
// Export and import
|
||||
const exported = registry.export();
|
||||
registry.clear();
|
||||
registry.import(exported);
|
||||
|
||||
// Test operation after import - configurations are lost in export
|
||||
const opAfter = registry.getOperation('ConfigHandler', 'configOp');
|
||||
expect(opAfter).toBeUndefined(); // Configurations don't persist
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle empty operations object', () => {
|
||||
registry.register({
|
||||
metadata: {
|
||||
name: 'EmptyHandler',
|
||||
operations: {},
|
||||
},
|
||||
configuration: {},
|
||||
});
|
||||
|
||||
const metadata = registry.getMetadata('EmptyHandler');
|
||||
expect(metadata?.operations).toEqual({});
|
||||
|
||||
const stats = registry.getStats();
|
||||
expect(stats.totalOperations).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle handlers with many operations', () => {
|
||||
const operations: Record<string, OperationMetadata> = {};
|
||||
const configuration: HandlerConfiguration = {};
|
||||
|
||||
// Create 50 operations
|
||||
for (let i = 0; i < 50; i++) {
|
||||
const opName = `operation${i}`;
|
||||
operations[opName] = {
|
||||
name: opName,
|
||||
batch: i % 2 === 0,
|
||||
batchSize: i % 2 === 0 ? i * 2 : undefined,
|
||||
};
|
||||
configuration[opName] = async () => ({ index: i });
|
||||
}
|
||||
|
||||
registry.register({
|
||||
metadata: {
|
||||
name: 'ManyOpsHandler',
|
||||
operations,
|
||||
},
|
||||
configuration,
|
||||
});
|
||||
|
||||
const metadata = registry.getMetadata('ManyOpsHandler');
|
||||
expect(Object.keys(metadata!.operations)).toHaveLength(50);
|
||||
|
||||
const stats = registry.getStats();
|
||||
expect(stats.totalOperations).toBe(50);
|
||||
expect(stats.batchOperations).toBe(25); // Half are batch operations
|
||||
});
|
||||
|
||||
it('should handle concurrent registrations', async () => {
|
||||
const promises = [];
|
||||
|
||||
// Register 10 handlers concurrently
|
||||
for (let i = 0; i < 10; i++) {
|
||||
promises.push(
|
||||
Promise.resolve().then(() => {
|
||||
registry.register({
|
||||
metadata: {
|
||||
name: `ConcurrentHandler${i}`,
|
||||
operations: {
|
||||
op: { name: 'op', batch: false },
|
||||
},
|
||||
},
|
||||
configuration: {
|
||||
op: async () => ({ handler: i }),
|
||||
},
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
await Promise.all(promises);
|
||||
|
||||
expect(registry.getHandlerNames()).toHaveLength(10);
|
||||
|
||||
// Verify all handlers registered correctly
|
||||
for (let i = 0; i < 10; i++) {
|
||||
expect(registry.hasHandler(`ConcurrentHandler${i}`)).toBe(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
275
libs/core/handlers/test/auto-register.test.ts
Normal file
275
libs/core/handlers/test/auto-register.test.ts
Normal file
|
|
@ -0,0 +1,275 @@
|
|||
import { describe, expect, it, beforeEach, mock } from 'bun:test';
|
||||
import {
|
||||
autoRegisterHandlers,
|
||||
createAutoHandlerRegistry,
|
||||
findHandlerFiles,
|
||||
extractHandlerClasses,
|
||||
} from '../src/registry/auto-register';
|
||||
import type { HandlerRegistry } from '@stock-bot/handler-registry';
|
||||
import { Handler, Operation } from '../src/decorators/decorators';
|
||||
|
||||
describe('Auto Registration', () => {
|
||||
const mockRegistry: HandlerRegistry = {
|
||||
registerHandler: mock(() => {}),
|
||||
getHandler: mock(() => null),
|
||||
hasHandler: mock(() => false),
|
||||
getAllHandlers: mock(() => []),
|
||||
getHandlersByService: mock(() => []),
|
||||
getHandlerOperations: mock(() => []),
|
||||
hasOperation: mock(() => false),
|
||||
executeOperation: mock(async () => ({ result: 'mocked' })),
|
||||
clear: mock(() => {}),
|
||||
};
|
||||
|
||||
const mockLogger = {
|
||||
info: mock(() => {}),
|
||||
error: mock(() => {}),
|
||||
warn: mock(() => {}),
|
||||
debug: mock(() => {}),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset all mocks
|
||||
Object.values(mockRegistry).forEach(method => {
|
||||
if (typeof method === 'function' && 'mockClear' in method) {
|
||||
(method as any).mockClear();
|
||||
}
|
||||
});
|
||||
Object.values(mockLogger).forEach(method => {
|
||||
if (typeof method === 'function' && 'mockClear' in method) {
|
||||
(method as any).mockClear();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('findHandlerFiles', () => {
|
||||
it('should find handler files matching default pattern', async () => {
|
||||
// This test would need actual file system or mocking
|
||||
// For now, we'll test the function exists and returns an array
|
||||
const files = await findHandlerFiles();
|
||||
expect(Array.isArray(files)).toBe(true);
|
||||
});
|
||||
|
||||
it('should find handler files with custom pattern', async () => {
|
||||
const files = await findHandlerFiles('**/*.handler.ts');
|
||||
expect(Array.isArray(files)).toBe(true);
|
||||
});
|
||||
|
||||
it('should find handler files in specific directory', async () => {
|
||||
const files = await findHandlerFiles('*.handler.ts', './src/handlers');
|
||||
expect(Array.isArray(files)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractHandlerClasses', () => {
|
||||
it('should extract handler classes from module', () => {
|
||||
@Handler('TestHandler1')
|
||||
class Handler1 {
|
||||
@Operation('op1')
|
||||
async operation1() {}
|
||||
}
|
||||
|
||||
@Handler('TestHandler2')
|
||||
class Handler2 {
|
||||
@Operation('op2')
|
||||
async operation2() {}
|
||||
}
|
||||
|
||||
class NotAHandler {
|
||||
async someMethod() {}
|
||||
}
|
||||
|
||||
const testModule = {
|
||||
Handler1,
|
||||
Handler2,
|
||||
NotAHandler,
|
||||
someFunction: () => {},
|
||||
someValue: 123,
|
||||
};
|
||||
|
||||
const handlers = extractHandlerClasses(testModule);
|
||||
|
||||
expect(handlers).toHaveLength(2);
|
||||
expect(handlers.map(h => h.name)).toContain('Handler1');
|
||||
expect(handlers.map(h => h.name)).toContain('Handler2');
|
||||
});
|
||||
|
||||
it('should handle modules with no handlers', () => {
|
||||
const testModule = {
|
||||
SomeClass: class {},
|
||||
someFunction: () => {},
|
||||
someValue: 'test',
|
||||
};
|
||||
|
||||
const handlers = extractHandlerClasses(testModule);
|
||||
expect(handlers).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle empty modules', () => {
|
||||
const handlers = extractHandlerClasses({});
|
||||
expect(handlers).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should extract handlers with metadata', () => {
|
||||
@Handler('MetadataHandler')
|
||||
class HandlerWithMetadata {
|
||||
@Operation('process')
|
||||
async process() {}
|
||||
}
|
||||
|
||||
const testModule = { HandlerWithMetadata };
|
||||
const handlers = extractHandlerClasses(testModule);
|
||||
|
||||
expect(handlers).toHaveLength(1);
|
||||
|
||||
const handlerClass = handlers[0];
|
||||
const metadata = Reflect.getMetadata('handler', handlerClass);
|
||||
|
||||
expect(metadata).toEqual({
|
||||
name: 'MetadataHandler',
|
||||
disabled: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('autoRegisterHandlers', () => {
|
||||
it('should auto-register handlers', async () => {
|
||||
// Since this function reads from file system, we'll test its behavior
|
||||
// by mocking the registry calls
|
||||
const options = {
|
||||
pattern: '**/*.handler.ts',
|
||||
directory: './test-handlers',
|
||||
serviceName: 'test-service',
|
||||
};
|
||||
|
||||
// This would normally scan files and register handlers
|
||||
// For unit testing, we'll verify the function executes without errors
|
||||
await expect(
|
||||
autoRegisterHandlers(mockRegistry, options, mockLogger)
|
||||
).resolves.not.toThrow();
|
||||
});
|
||||
|
||||
it('should use default options when not provided', async () => {
|
||||
await expect(
|
||||
autoRegisterHandlers(mockRegistry)
|
||||
).resolves.not.toThrow();
|
||||
});
|
||||
|
||||
it('should handle registration errors gracefully', async () => {
|
||||
mockRegistry.registerHandler = mock(() => {
|
||||
throw new Error('Registration failed');
|
||||
});
|
||||
|
||||
// Should not throw, but log errors
|
||||
await expect(
|
||||
autoRegisterHandlers(mockRegistry, {}, mockLogger)
|
||||
).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('createAutoHandlerRegistry', () => {
|
||||
it('should create a registry function for a service', () => {
|
||||
const registerFunction = createAutoHandlerRegistry('my-service');
|
||||
|
||||
expect(typeof registerFunction).toBe('function');
|
||||
|
||||
// Test the created function
|
||||
const result = registerFunction(mockRegistry, mockLogger);
|
||||
expect(result).toBeInstanceOf(Promise);
|
||||
});
|
||||
|
||||
it('should pass through custom options', () => {
|
||||
const customOptions = {
|
||||
pattern: '**/*.custom-handler.ts',
|
||||
directory: './custom-handlers',
|
||||
};
|
||||
|
||||
const registerFunction = createAutoHandlerRegistry('my-service', customOptions);
|
||||
|
||||
// The function should be created with merged options
|
||||
expect(typeof registerFunction).toBe('function');
|
||||
});
|
||||
|
||||
it('should use service name in registration', async () => {
|
||||
@Handler('ServiceHandler')
|
||||
class TestServiceHandler {
|
||||
@Operation('serviceOp')
|
||||
async operation() {}
|
||||
}
|
||||
|
||||
// Mock file discovery to return our test handler
|
||||
const mockFindFiles = mock(async () => ['test.handler.ts']);
|
||||
const mockExtract = mock(() => [TestServiceHandler]);
|
||||
|
||||
const registerFunction = createAutoHandlerRegistry('test-service');
|
||||
|
||||
// Execute registration
|
||||
await registerFunction(mockRegistry, mockLogger);
|
||||
|
||||
// Verify service name would be used in actual implementation
|
||||
expect(typeof registerFunction).toBe('function');
|
||||
});
|
||||
});
|
||||
|
||||
describe('integration scenarios', () => {
|
||||
it('should handle complex handler hierarchies', () => {
|
||||
@Handler('BaseHandler')
|
||||
class BaseTestHandler {
|
||||
@Operation('baseOp')
|
||||
async baseOperation() {}
|
||||
}
|
||||
|
||||
@Handler('DerivedHandler')
|
||||
class DerivedTestHandler extends BaseTestHandler {
|
||||
@Operation('derivedOp')
|
||||
async derivedOperation() {}
|
||||
}
|
||||
|
||||
const testModule = {
|
||||
BaseTestHandler,
|
||||
DerivedTestHandler,
|
||||
};
|
||||
|
||||
const handlers = extractHandlerClasses(testModule);
|
||||
|
||||
expect(handlers).toHaveLength(2);
|
||||
|
||||
// Both should be valid handler classes
|
||||
handlers.forEach(handlerClass => {
|
||||
const metadata = Reflect.getMetadata('handler', handlerClass);
|
||||
expect(metadata).toBeDefined();
|
||||
expect(metadata.disabled).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('should skip disabled handlers if needed', () => {
|
||||
@Handler('EnabledHandler')
|
||||
class EnabledHandler {
|
||||
@Operation('op1')
|
||||
async operation() {}
|
||||
}
|
||||
|
||||
@Handler('DisabledHandler')
|
||||
class DisabledHandler {
|
||||
@Operation('op2')
|
||||
async operation() {}
|
||||
}
|
||||
|
||||
// Mark as disabled
|
||||
Reflect.defineMetadata('handler', { name: 'DisabledHandler', disabled: true }, DisabledHandler);
|
||||
|
||||
const testModule = {
|
||||
EnabledHandler,
|
||||
DisabledHandler,
|
||||
};
|
||||
|
||||
const handlers = extractHandlerClasses(testModule);
|
||||
|
||||
// Should extract both, filtering happens during registration
|
||||
expect(handlers).toHaveLength(2);
|
||||
|
||||
const disabledMetadata = Reflect.getMetadata('handler', DisabledHandler);
|
||||
expect(disabledMetadata.disabled).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
356
libs/core/handlers/test/base-handler.test.ts
Normal file
356
libs/core/handlers/test/base-handler.test.ts
Normal file
|
|
@ -0,0 +1,356 @@
|
|||
import { describe, expect, it, beforeEach, mock } from 'bun:test';
|
||||
import { BaseHandler, ScheduledHandler } from '../src/base/BaseHandler';
|
||||
import type { IServiceContainer, ExecutionContext } from '@stock-bot/types';
|
||||
|
||||
describe('BaseHandler', () => {
|
||||
let mockServices: IServiceContainer;
|
||||
let mockContext: ExecutionContext;
|
||||
|
||||
beforeEach(() => {
|
||||
const mockQueue = {
|
||||
add: mock(async () => ({ id: 'job-456' })),
|
||||
getName: mock(() => 'test-queue'),
|
||||
};
|
||||
|
||||
mockServices = {
|
||||
cache: {
|
||||
get: mock(async () => null),
|
||||
set: mock(async () => {}),
|
||||
del: mock(async () => {}),
|
||||
clear: mock(async () => {}),
|
||||
has: mock(async () => false),
|
||||
keys: mock(async () => []),
|
||||
ttl: mock(async () => -1),
|
||||
type: 'memory',
|
||||
} as any,
|
||||
globalCache: {
|
||||
get: mock(async () => null),
|
||||
set: mock(async () => {}),
|
||||
del: mock(async () => {}),
|
||||
clear: mock(async () => {}),
|
||||
has: mock(async () => false),
|
||||
keys: mock(async () => []),
|
||||
ttl: mock(async () => -1),
|
||||
type: 'memory',
|
||||
} as any,
|
||||
queueManager: {
|
||||
getQueue: mock(() => mockQueue),
|
||||
createQueue: mock(() => mockQueue),
|
||||
hasQueue: mock(() => true),
|
||||
sendToQueue: mock(async () => 'job-123'),
|
||||
} as any,
|
||||
proxy: {
|
||||
getProxy: mock(() => ({ host: 'proxy.example.com', port: 8080 })),
|
||||
} as any,
|
||||
browser: {
|
||||
scrape: mock(async () => ({ data: 'scraped' })),
|
||||
} as any,
|
||||
mongodb: {
|
||||
db: mock(() => ({
|
||||
collection: mock(() => ({
|
||||
find: mock(() => ({ toArray: mock(async () => []) })),
|
||||
insertOne: mock(async () => ({ insertedId: 'id-123' })),
|
||||
})),
|
||||
})),
|
||||
} as any,
|
||||
postgres: {
|
||||
query: mock(async () => ({ rows: [] })),
|
||||
} as any,
|
||||
questdb: null,
|
||||
logger: {
|
||||
info: mock(() => {}),
|
||||
error: mock(() => {}),
|
||||
warn: mock(() => {}),
|
||||
debug: mock(() => {}),
|
||||
} as any,
|
||||
queue: mockQueue as any,
|
||||
};
|
||||
|
||||
mockContext = {
|
||||
logger: mockServices.logger,
|
||||
traceId: 'trace-123',
|
||||
handlerName: 'TestHandler',
|
||||
operationName: 'testOperation',
|
||||
metadata: {},
|
||||
startTime: new Date(),
|
||||
container: {
|
||||
resolve: mock((name: string) => mockServices[name as keyof IServiceContainer]),
|
||||
resolveAsync: mock(async (name: string) => mockServices[name as keyof IServiceContainer]),
|
||||
},
|
||||
} as any;
|
||||
|
||||
// Reset all mocks
|
||||
Object.values(mockServices).forEach(service => {
|
||||
if (service && typeof service === 'object') {
|
||||
Object.values(service).forEach(method => {
|
||||
if (typeof method === 'function' && 'mockClear' in method) {
|
||||
(method as any).mockClear();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
class TestHandler extends BaseHandler {
|
||||
constructor() {
|
||||
super(mockServices, 'TestHandler');
|
||||
}
|
||||
|
||||
async testOperation(data: any) {
|
||||
return { processed: data };
|
||||
}
|
||||
}
|
||||
|
||||
describe('service access', () => {
|
||||
it('should provide access to cache service', async () => {
|
||||
const handler = new TestHandler();
|
||||
|
||||
await handler.cache.set('key', 'value');
|
||||
|
||||
expect(mockServices.cache.set).toHaveBeenCalledWith('key', 'value');
|
||||
});
|
||||
|
||||
it('should have logger initialized', () => {
|
||||
const handler = new TestHandler();
|
||||
|
||||
expect(handler.logger).toBeDefined();
|
||||
// Logger is created by getLogger, not from mockServices
|
||||
});
|
||||
|
||||
it('should provide access to queue service', () => {
|
||||
const handler = new TestHandler();
|
||||
|
||||
expect(handler.queue).toBeDefined();
|
||||
expect(handler.queue.getName()).toBe('test-queue');
|
||||
});
|
||||
|
||||
it('should provide access to mongodb', () => {
|
||||
const handler = new TestHandler();
|
||||
|
||||
expect(handler.mongodb).toBe(mockServices.mongodb);
|
||||
});
|
||||
|
||||
it('should provide access to postgres', async () => {
|
||||
const handler = new TestHandler();
|
||||
|
||||
const result = await handler.postgres.query('SELECT 1');
|
||||
|
||||
expect(result.rows).toEqual([]);
|
||||
expect(mockServices.postgres.query).toHaveBeenCalledWith('SELECT 1');
|
||||
});
|
||||
|
||||
it('should provide access to browser', async () => {
|
||||
const handler = new TestHandler();
|
||||
|
||||
const result = await handler.browser.scrape('https://example.com');
|
||||
|
||||
expect(result).toEqual({ data: 'scraped' });
|
||||
expect(mockServices.browser.scrape).toHaveBeenCalledWith('https://example.com');
|
||||
});
|
||||
|
||||
it('should provide access to proxy manager', () => {
|
||||
const handler = new TestHandler();
|
||||
|
||||
const proxy = handler.proxy.getProxy();
|
||||
|
||||
expect(proxy).toEqual({ host: 'proxy.example.com', port: 8080 });
|
||||
});
|
||||
});
|
||||
|
||||
describe('caching utilities', () => {
|
||||
it('should generate handler-specific cache keys', async () => {
|
||||
const handler = new TestHandler();
|
||||
|
||||
const key = await handler.cacheKey(mockContext, 'user', '123');
|
||||
expect(key).toMatch(/TestHandler:user:123$/);
|
||||
});
|
||||
|
||||
it('should cache handler results', async () => {
|
||||
const handler = new TestHandler();
|
||||
const operation = mock(async () => ({ result: 'data' }));
|
||||
|
||||
// First call - executes operation
|
||||
const result1 = await handler.cacheHandler(
|
||||
mockContext,
|
||||
'test-cache',
|
||||
operation,
|
||||
{ ttl: 300 }
|
||||
);
|
||||
|
||||
expect(result1).toEqual({ result: 'data' });
|
||||
expect(operation).toHaveBeenCalledTimes(1);
|
||||
expect(mockServices.cache.set).toHaveBeenCalled();
|
||||
|
||||
// Mock cache hit
|
||||
mockServices.cache.get = mock(async () => ({ result: 'data' }));
|
||||
|
||||
// Second call - returns cached result
|
||||
const result2 = await handler.cacheHandler(
|
||||
mockContext,
|
||||
'test-cache',
|
||||
operation,
|
||||
{ ttl: 300 }
|
||||
);
|
||||
|
||||
expect(result2).toEqual({ result: 'data' });
|
||||
expect(operation).toHaveBeenCalledTimes(1); // Not called again
|
||||
});
|
||||
});
|
||||
|
||||
describe('scheduling', () => {
|
||||
it('should schedule operations', async () => {
|
||||
const handler = new TestHandler();
|
||||
|
||||
const jobId = await handler.scheduleOperation(
|
||||
mockContext,
|
||||
'processData',
|
||||
{ data: 'test' },
|
||||
{ delay: 5000 }
|
||||
);
|
||||
|
||||
expect(jobId).toBe('job-123');
|
||||
expect(mockServices.queueManager.sendToQueue).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('HTTP client', () => {
|
||||
it('should provide axios instance', () => {
|
||||
const handler = new TestHandler();
|
||||
|
||||
const http = handler.http(mockContext);
|
||||
expect(http).toBeDefined();
|
||||
expect(http.get).toBeDefined();
|
||||
expect(http.post).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('handler metadata', () => {
|
||||
it('should extract handler metadata', () => {
|
||||
const handler = new TestHandler();
|
||||
|
||||
const metadata = handler.getHandlerMetadata();
|
||||
expect(metadata).toBeDefined();
|
||||
expect(metadata.name).toBe('TestHandler');
|
||||
});
|
||||
});
|
||||
|
||||
describe('lifecycle hooks', () => {
|
||||
class LifecycleHandler extends BaseHandler {
|
||||
onInitCalled = false;
|
||||
onStartCalled = false;
|
||||
onStopCalled = false;
|
||||
onDisposeCalled = false;
|
||||
|
||||
constructor() {
|
||||
super(mockServices, 'LifecycleHandler');
|
||||
}
|
||||
|
||||
async onInit() {
|
||||
this.onInitCalled = true;
|
||||
}
|
||||
|
||||
async onStart() {
|
||||
this.onStartCalled = true;
|
||||
}
|
||||
|
||||
async onStop() {
|
||||
this.onStopCalled = true;
|
||||
}
|
||||
|
||||
async onDispose() {
|
||||
this.onDisposeCalled = true;
|
||||
}
|
||||
}
|
||||
|
||||
it('should call lifecycle hooks', async () => {
|
||||
const handler = new LifecycleHandler();
|
||||
|
||||
await handler.onInit();
|
||||
expect(handler.onInitCalled).toBe(true);
|
||||
|
||||
await handler.onStart();
|
||||
expect(handler.onStartCalled).toBe(true);
|
||||
|
||||
await handler.onStop();
|
||||
expect(handler.onStopCalled).toBe(true);
|
||||
|
||||
await handler.onDispose();
|
||||
expect(handler.onDisposeCalled).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('ScheduledHandler', () => {
|
||||
const mockServices: IServiceContainer = {
|
||||
cache: { type: 'memory' } as any,
|
||||
globalCache: { type: 'memory' } as any,
|
||||
queueManager: { getQueue: () => null } as any,
|
||||
proxy: null,
|
||||
browser: null,
|
||||
mongodb: null,
|
||||
postgres: null,
|
||||
questdb: null,
|
||||
logger: null as any,
|
||||
queue: null as any,
|
||||
};
|
||||
|
||||
class TestScheduledHandler extends ScheduledHandler {
|
||||
constructor() {
|
||||
super(mockServices, 'TestScheduledHandler');
|
||||
}
|
||||
|
||||
getScheduledJobs() {
|
||||
return [
|
||||
{
|
||||
name: 'dailyJob',
|
||||
schedule: '0 0 * * *',
|
||||
handler: 'processDailyData',
|
||||
},
|
||||
{
|
||||
name: 'hourlyJob',
|
||||
schedule: '0 * * * *',
|
||||
handler: 'processHourlyData',
|
||||
options: {
|
||||
timezone: 'UTC',
|
||||
},
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
async processDailyData() {
|
||||
return { processed: 'daily' };
|
||||
}
|
||||
|
||||
async processHourlyData() {
|
||||
return { processed: 'hourly' };
|
||||
}
|
||||
}
|
||||
|
||||
it('should define scheduled jobs', () => {
|
||||
const handler = new TestScheduledHandler();
|
||||
|
||||
const jobs = handler.getScheduledJobs();
|
||||
|
||||
expect(jobs).toHaveLength(2);
|
||||
expect(jobs[0]).toEqual({
|
||||
name: 'dailyJob',
|
||||
schedule: '0 0 * * *',
|
||||
handler: 'processDailyData',
|
||||
});
|
||||
expect(jobs[1]).toEqual({
|
||||
name: 'hourlyJob',
|
||||
schedule: '0 * * * *',
|
||||
handler: 'processHourlyData',
|
||||
options: {
|
||||
timezone: 'UTC',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should be a BaseHandler', () => {
|
||||
const handler = new TestScheduledHandler();
|
||||
|
||||
expect(handler).toBeInstanceOf(BaseHandler);
|
||||
expect(handler).toBeInstanceOf(ScheduledHandler);
|
||||
});
|
||||
});
|
||||
237
libs/core/handlers/test/create-job-handler.test.ts
Normal file
237
libs/core/handlers/test/create-job-handler.test.ts
Normal file
|
|
@ -0,0 +1,237 @@
|
|||
import { describe, expect, it } from 'bun:test';
|
||||
import { createJobHandler } from '../src/utils/create-job-handler';
|
||||
|
||||
describe('createJobHandler', () => {
|
||||
interface TestPayload {
|
||||
userId: string;
|
||||
action: string;
|
||||
data?: any;
|
||||
}
|
||||
|
||||
interface TestResult {
|
||||
success: boolean;
|
||||
processedBy: string;
|
||||
timestamp: Date;
|
||||
}
|
||||
|
||||
it('should create a type-safe job handler function', () => {
|
||||
const handler = createJobHandler<TestPayload, TestResult>(async (job) => {
|
||||
// Job should have correct payload type
|
||||
const { userId, action, data } = job.data;
|
||||
|
||||
return {
|
||||
success: true,
|
||||
processedBy: userId,
|
||||
timestamp: new Date(),
|
||||
};
|
||||
});
|
||||
|
||||
expect(typeof handler).toBe('function');
|
||||
});
|
||||
|
||||
it('should execute handler with job data', async () => {
|
||||
const testPayload: TestPayload = {
|
||||
userId: 'user-123',
|
||||
action: 'process',
|
||||
data: { value: 42 },
|
||||
};
|
||||
|
||||
const handler = createJobHandler<TestPayload, TestResult>(async (job) => {
|
||||
expect(job.data).toEqual(testPayload);
|
||||
expect(job.id).toBe('job-123');
|
||||
expect(job.name).toBe('test-job');
|
||||
|
||||
return {
|
||||
success: true,
|
||||
processedBy: job.data.userId,
|
||||
timestamp: new Date(),
|
||||
};
|
||||
});
|
||||
|
||||
// Create a mock job
|
||||
const mockJob = {
|
||||
id: 'job-123',
|
||||
name: 'test-job',
|
||||
data: testPayload,
|
||||
opts: {},
|
||||
progress: () => {},
|
||||
log: () => {},
|
||||
updateProgress: async () => {},
|
||||
};
|
||||
|
||||
const result = await handler(mockJob as any);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.processedBy).toBe('user-123');
|
||||
expect(result.timestamp).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
it('should handle errors in handler', async () => {
|
||||
const handler = createJobHandler<TestPayload, TestResult>(async (job) => {
|
||||
if (job.data.action === 'fail') {
|
||||
throw new Error('Handler error');
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
processedBy: job.data.userId,
|
||||
timestamp: new Date(),
|
||||
};
|
||||
});
|
||||
|
||||
const mockJob = {
|
||||
id: 'job-456',
|
||||
name: 'test-job',
|
||||
data: {
|
||||
userId: 'user-456',
|
||||
action: 'fail',
|
||||
},
|
||||
opts: {},
|
||||
progress: () => {},
|
||||
log: () => {},
|
||||
updateProgress: async () => {},
|
||||
};
|
||||
|
||||
await expect(handler(mockJob as any)).rejects.toThrow('Handler error');
|
||||
});
|
||||
|
||||
it('should support async operations', async () => {
|
||||
const handler = createJobHandler<TestPayload, TestResult>(async (job) => {
|
||||
// Simulate async operation
|
||||
await new Promise(resolve => setTimeout(resolve, 10));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
processedBy: job.data.userId,
|
||||
timestamp: new Date(),
|
||||
};
|
||||
});
|
||||
|
||||
const mockJob = {
|
||||
id: 'job-789',
|
||||
name: 'async-job',
|
||||
data: {
|
||||
userId: 'user-789',
|
||||
action: 'async-process',
|
||||
},
|
||||
opts: {},
|
||||
progress: () => {},
|
||||
log: () => {},
|
||||
updateProgress: async () => {},
|
||||
};
|
||||
|
||||
const startTime = Date.now();
|
||||
const result = await handler(mockJob as any);
|
||||
const endTime = Date.now();
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(endTime - startTime).toBeGreaterThanOrEqual(10);
|
||||
});
|
||||
|
||||
it('should maintain type safety for complex payloads', () => {
|
||||
interface ComplexPayload {
|
||||
user: {
|
||||
id: string;
|
||||
name: string;
|
||||
roles: string[];
|
||||
};
|
||||
request: {
|
||||
type: 'CREATE' | 'UPDATE' | 'DELETE';
|
||||
resource: string;
|
||||
data: Record<string, any>;
|
||||
};
|
||||
metadata: {
|
||||
timestamp: Date;
|
||||
source: string;
|
||||
version: number;
|
||||
};
|
||||
}
|
||||
|
||||
interface ComplexResult {
|
||||
status: 'success' | 'failure';
|
||||
changes: Array<{
|
||||
field: string;
|
||||
oldValue: any;
|
||||
newValue: any;
|
||||
}>;
|
||||
audit: {
|
||||
performedBy: string;
|
||||
performedAt: Date;
|
||||
duration: number;
|
||||
};
|
||||
}
|
||||
|
||||
const handler = createJobHandler<ComplexPayload, ComplexResult>(async (job) => {
|
||||
const startTime = Date.now();
|
||||
|
||||
// Type-safe access to nested properties
|
||||
const userId = job.data.user.id;
|
||||
const requestType = job.data.request.type;
|
||||
const version = job.data.metadata.version;
|
||||
|
||||
return {
|
||||
status: 'success',
|
||||
changes: [
|
||||
{
|
||||
field: 'resource',
|
||||
oldValue: null,
|
||||
newValue: job.data.request.resource,
|
||||
},
|
||||
],
|
||||
audit: {
|
||||
performedBy: userId,
|
||||
performedAt: new Date(),
|
||||
duration: Date.now() - startTime,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
expect(typeof handler).toBe('function');
|
||||
});
|
||||
|
||||
it('should work with job progress reporting', async () => {
|
||||
let progressValue = 0;
|
||||
|
||||
const handler = createJobHandler<TestPayload, TestResult>(async (job) => {
|
||||
// Report progress
|
||||
await job.updateProgress(25);
|
||||
progressValue = 25;
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 10));
|
||||
|
||||
await job.updateProgress(50);
|
||||
progressValue = 50;
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 10));
|
||||
|
||||
await job.updateProgress(100);
|
||||
progressValue = 100;
|
||||
|
||||
return {
|
||||
success: true,
|
||||
processedBy: job.data.userId,
|
||||
timestamp: new Date(),
|
||||
};
|
||||
});
|
||||
|
||||
const mockJob = {
|
||||
id: 'job-progress',
|
||||
name: 'progress-job',
|
||||
data: {
|
||||
userId: 'user-progress',
|
||||
action: 'long-process',
|
||||
},
|
||||
opts: {},
|
||||
progress: () => progressValue,
|
||||
log: () => {},
|
||||
updateProgress: async (value: number) => {
|
||||
progressValue = value;
|
||||
},
|
||||
};
|
||||
|
||||
const result = await handler(mockJob as any);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(progressValue).toBe(100);
|
||||
});
|
||||
});
|
||||
300
libs/core/handlers/test/decorators.test.ts
Normal file
300
libs/core/handlers/test/decorators.test.ts
Normal file
|
|
@ -0,0 +1,300 @@
|
|||
import { describe, expect, it, beforeEach } from 'bun:test';
|
||||
import {
|
||||
Handler,
|
||||
Operation,
|
||||
Disabled,
|
||||
QueueSchedule,
|
||||
ScheduledOperation,
|
||||
} from '../src/decorators/decorators';
|
||||
|
||||
describe('Handler Decorators', () => {
|
||||
beforeEach(() => {
|
||||
// Clear metadata between tests
|
||||
(global as any).__handlerMetadata = undefined;
|
||||
});
|
||||
|
||||
describe('@Handler', () => {
|
||||
it('should mark class as handler with name', () => {
|
||||
@Handler('TestHandler')
|
||||
class MyHandler {}
|
||||
|
||||
const instance = new MyHandler();
|
||||
const metadata = Reflect.getMetadata('handler', instance.constructor);
|
||||
|
||||
expect(metadata).toEqual({
|
||||
name: 'TestHandler',
|
||||
disabled: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should use class name if no name provided', () => {
|
||||
@Handler()
|
||||
class MyTestHandler {}
|
||||
|
||||
const instance = new MyTestHandler();
|
||||
const metadata = Reflect.getMetadata('handler', instance.constructor);
|
||||
|
||||
expect(metadata).toEqual({
|
||||
name: 'MyTestHandler',
|
||||
disabled: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with inheritance', () => {
|
||||
@Handler('BaseHandler')
|
||||
class BaseTestHandler {}
|
||||
|
||||
@Handler('DerivedHandler')
|
||||
class DerivedTestHandler extends BaseTestHandler {}
|
||||
|
||||
const baseInstance = new BaseTestHandler();
|
||||
const derivedInstance = new DerivedTestHandler();
|
||||
|
||||
const baseMetadata = Reflect.getMetadata('handler', baseInstance.constructor);
|
||||
const derivedMetadata = Reflect.getMetadata('handler', derivedInstance.constructor);
|
||||
|
||||
expect(baseMetadata.name).toBe('BaseHandler');
|
||||
expect(derivedMetadata.name).toBe('DerivedHandler');
|
||||
});
|
||||
});
|
||||
|
||||
describe('@Operation', () => {
|
||||
it('should mark method as operation', () => {
|
||||
class TestHandler {
|
||||
@Operation('processData')
|
||||
async process(data: any) {
|
||||
return data;
|
||||
}
|
||||
}
|
||||
|
||||
const operations = Reflect.getMetadata('operations', TestHandler.prototype) || {};
|
||||
|
||||
expect(operations.process).toEqual({
|
||||
name: 'processData',
|
||||
batch: false,
|
||||
batchSize: undefined,
|
||||
batchDelay: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should use method name if no name provided', () => {
|
||||
class TestHandler {
|
||||
@Operation()
|
||||
async processOrder(data: any) {
|
||||
return data;
|
||||
}
|
||||
}
|
||||
|
||||
const operations = Reflect.getMetadata('operations', TestHandler.prototype) || {};
|
||||
|
||||
expect(operations.processOrder).toEqual({
|
||||
name: 'processOrder',
|
||||
batch: false,
|
||||
batchSize: undefined,
|
||||
batchDelay: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should support batch configuration', () => {
|
||||
class TestHandler {
|
||||
@Operation('batchProcess', { batch: true, batchSize: 10, batchDelay: 1000 })
|
||||
async processBatch(items: any[]) {
|
||||
return items;
|
||||
}
|
||||
}
|
||||
|
||||
const operations = Reflect.getMetadata('operations', TestHandler.prototype) || {};
|
||||
|
||||
expect(operations.processBatch).toEqual({
|
||||
name: 'batchProcess',
|
||||
batch: true,
|
||||
batchSize: 10,
|
||||
batchDelay: 1000,
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with multiple operations', () => {
|
||||
class TestHandler {
|
||||
@Operation('op1')
|
||||
async operation1() {}
|
||||
|
||||
@Operation('op2')
|
||||
async operation2() {}
|
||||
|
||||
@Operation('op3')
|
||||
async operation3() {}
|
||||
}
|
||||
|
||||
const operations = Reflect.getMetadata('operations', TestHandler.prototype) || {};
|
||||
|
||||
expect(Object.keys(operations)).toHaveLength(3);
|
||||
expect(operations.operation1.name).toBe('op1');
|
||||
expect(operations.operation2.name).toBe('op2');
|
||||
expect(operations.operation3.name).toBe('op3');
|
||||
});
|
||||
});
|
||||
|
||||
describe('@Disabled', () => {
|
||||
it('should mark handler as disabled', () => {
|
||||
@Disabled()
|
||||
@Handler('DisabledHandler')
|
||||
class MyDisabledHandler {}
|
||||
|
||||
const instance = new MyDisabledHandler();
|
||||
const metadata = Reflect.getMetadata('handler', instance.constructor);
|
||||
|
||||
expect(metadata).toEqual({
|
||||
name: 'DisabledHandler',
|
||||
disabled: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should work when applied after Handler decorator', () => {
|
||||
@Handler('TestHandler')
|
||||
@Disabled()
|
||||
class MyHandler {}
|
||||
|
||||
const instance = new MyHandler();
|
||||
const metadata = Reflect.getMetadata('handler', instance.constructor);
|
||||
|
||||
expect(metadata).toEqual({
|
||||
name: 'TestHandler',
|
||||
disabled: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('@QueueSchedule', () => {
|
||||
it('should add queue schedule to operation', () => {
|
||||
class TestHandler {
|
||||
@QueueSchedule('0 0 * * *')
|
||||
@Operation('dailyTask')
|
||||
async runDaily() {}
|
||||
}
|
||||
|
||||
const schedules = Reflect.getMetadata('queueSchedules', TestHandler.prototype) || {};
|
||||
|
||||
expect(schedules.runDaily).toEqual({
|
||||
cron: '0 0 * * *',
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with multiple scheduled operations', () => {
|
||||
class TestHandler {
|
||||
@QueueSchedule('0 * * * *')
|
||||
@Operation('hourlyTask')
|
||||
async runHourly() {}
|
||||
|
||||
@QueueSchedule('0 0 * * *')
|
||||
@Operation('dailyTask')
|
||||
async runDaily() {}
|
||||
}
|
||||
|
||||
const schedules = Reflect.getMetadata('queueSchedules', TestHandler.prototype) || {};
|
||||
|
||||
expect(schedules.runHourly.cron).toBe('0 * * * *');
|
||||
expect(schedules.runDaily.cron).toBe('0 0 * * *');
|
||||
});
|
||||
});
|
||||
|
||||
describe('@ScheduledOperation', () => {
|
||||
it('should mark operation as scheduled with options', () => {
|
||||
class TestHandler {
|
||||
@ScheduledOperation({
|
||||
name: 'syncData',
|
||||
schedule: '*/5 * * * *',
|
||||
timezone: 'UTC',
|
||||
startDate: new Date('2024-01-01'),
|
||||
endDate: new Date('2024-12-31'),
|
||||
})
|
||||
async syncOperation() {}
|
||||
}
|
||||
|
||||
const scheduled = Reflect.getMetadata('scheduledOperations', TestHandler.prototype) || {};
|
||||
|
||||
expect(scheduled.syncOperation).toEqual({
|
||||
name: 'syncData',
|
||||
schedule: '*/5 * * * *',
|
||||
timezone: 'UTC',
|
||||
startDate: new Date('2024-01-01'),
|
||||
endDate: new Date('2024-12-31'),
|
||||
});
|
||||
});
|
||||
|
||||
it('should use method name if not provided in options', () => {
|
||||
class TestHandler {
|
||||
@ScheduledOperation({
|
||||
schedule: '0 0 * * *',
|
||||
})
|
||||
async dailyCleanup() {}
|
||||
}
|
||||
|
||||
const scheduled = Reflect.getMetadata('scheduledOperations', TestHandler.prototype) || {};
|
||||
|
||||
expect(scheduled.dailyCleanup).toEqual({
|
||||
name: 'dailyCleanup',
|
||||
schedule: '0 0 * * *',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle multiple scheduled operations', () => {
|
||||
class TestHandler {
|
||||
@ScheduledOperation({ schedule: '0 * * * *' })
|
||||
async hourlyCheck() {}
|
||||
|
||||
@ScheduledOperation({ schedule: '0 0 * * *' })
|
||||
async dailyReport() {}
|
||||
|
||||
@ScheduledOperation({ schedule: '0 0 * * 0' })
|
||||
async weeklyAnalysis() {}
|
||||
}
|
||||
|
||||
const scheduled = Reflect.getMetadata('scheduledOperations', TestHandler.prototype) || {};
|
||||
|
||||
expect(Object.keys(scheduled)).toHaveLength(3);
|
||||
expect(scheduled.hourlyCheck.schedule).toBe('0 * * * *');
|
||||
expect(scheduled.dailyReport.schedule).toBe('0 0 * * *');
|
||||
expect(scheduled.weeklyAnalysis.schedule).toBe('0 0 * * 0');
|
||||
});
|
||||
});
|
||||
|
||||
describe('decorator composition', () => {
|
||||
it('should work with all decorators combined', () => {
|
||||
@Handler('ComplexHandler')
|
||||
class MyComplexHandler {
|
||||
@Operation('complexOp', { batch: true, batchSize: 5 })
|
||||
@QueueSchedule('0 */6 * * *')
|
||||
async complexOperation(items: any[]) {
|
||||
return items;
|
||||
}
|
||||
|
||||
@ScheduledOperation({
|
||||
name: 'scheduledTask',
|
||||
schedule: '0 0 * * *',
|
||||
timezone: 'America/New_York',
|
||||
})
|
||||
async scheduledTask() {}
|
||||
}
|
||||
|
||||
const instance = new MyComplexHandler();
|
||||
const handlerMetadata = Reflect.getMetadata('handler', instance.constructor);
|
||||
const operations = Reflect.getMetadata('operations', MyComplexHandler.prototype) || {};
|
||||
const queueSchedules = Reflect.getMetadata('queueSchedules', MyComplexHandler.prototype) || {};
|
||||
const scheduledOps = Reflect.getMetadata('scheduledOperations', MyComplexHandler.prototype) || {};
|
||||
|
||||
expect(handlerMetadata.name).toBe('ComplexHandler');
|
||||
expect(operations.complexOperation).toEqual({
|
||||
name: 'complexOp',
|
||||
batch: true,
|
||||
batchSize: 5,
|
||||
batchDelay: undefined,
|
||||
});
|
||||
expect(queueSchedules.complexOperation.cron).toBe('0 */6 * * *');
|
||||
expect(scheduledOps.scheduledTask).toEqual({
|
||||
name: 'scheduledTask',
|
||||
schedule: '0 0 * * *',
|
||||
timezone: 'America/New_York',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
import { beforeEach, describe, expect, it } from 'bun:test';
|
||||
import { Logger, getLogger, shutdownLoggers, setLoggerConfig } from '../src/logger';
|
||||
import { Logger, getLogger, setLoggerConfig, shutdownLoggers } from '../src/logger';
|
||||
|
||||
describe('Logger', () => {
|
||||
beforeEach(async () => {
|
||||
|
|
@ -71,9 +71,7 @@ describe('Logger', () => {
|
|||
|
||||
it('should set logger config', () => {
|
||||
setLoggerConfig({
|
||||
level: 'debug',
|
||||
pretty: true,
|
||||
redact: ['password'],
|
||||
logLevel: 'debug',
|
||||
});
|
||||
|
||||
const logger = getLogger('test');
|
||||
|
|
@ -83,7 +81,7 @@ describe('Logger', () => {
|
|||
it('should handle shutdown', async () => {
|
||||
await shutdownLoggers(); // Reset first
|
||||
const logger1 = getLogger('test1');
|
||||
const logger2 = getLogger('test2');
|
||||
const _logger2 = getLogger('test2'); // not used, just to ensure multiple loggers can be created
|
||||
|
||||
// Store references
|
||||
const logger1Ref = logger1;
|
||||
|
|
@ -97,7 +95,7 @@ describe('Logger', () => {
|
|||
|
||||
it('should handle log levels', async () => {
|
||||
await shutdownLoggers(); // Reset first
|
||||
setLoggerConfig({ level: 'warn' });
|
||||
setLoggerConfig({ logLevel: 'warn' });
|
||||
const logger = getLogger('test');
|
||||
|
||||
// Just verify that log methods exist and don't throw
|
||||
|
|
|
|||
171
libs/core/queue/test/batch-processor.test.ts
Normal file
171
libs/core/queue/test/batch-processor.test.ts
Normal file
|
|
@ -0,0 +1,171 @@
|
|||
import { describe, expect, it, mock } from 'bun:test';
|
||||
import { processBatchJob, processItems } from '../src/batch-processor';
|
||||
import type { BatchJobData } from '../src/types';
|
||||
|
||||
describe('Batch Processor', () => {
|
||||
const mockLogger = {
|
||||
info: mock(() => {}),
|
||||
error: mock(() => {}),
|
||||
warn: mock(() => {}),
|
||||
debug: mock(() => {}),
|
||||
trace: mock(() => {}),
|
||||
};
|
||||
|
||||
describe('processBatchJob', () => {
|
||||
it('should process all items successfully', async () => {
|
||||
const batchData: BatchJobData = {
|
||||
items: ['item1', 'item2', 'item3'],
|
||||
options: {
|
||||
batchSize: 2,
|
||||
concurrency: 1,
|
||||
},
|
||||
};
|
||||
|
||||
const processor = mock((item: string) => Promise.resolve({ processed: item }));
|
||||
|
||||
const result = await processBatchJob(batchData, processor, mockLogger);
|
||||
|
||||
expect(result.totalItems).toBe(3);
|
||||
expect(result.successful).toBe(3);
|
||||
expect(result.failed).toBe(0);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
expect(processor).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('should handle partial failures', async () => {
|
||||
const batchData: BatchJobData = {
|
||||
items: ['item1', 'item2', 'item3'],
|
||||
options: {},
|
||||
};
|
||||
|
||||
const processor = mock((item: string) => {
|
||||
if (item === 'item2') {
|
||||
return Promise.reject(new Error('Processing failed'));
|
||||
}
|
||||
return Promise.resolve({ processed: item });
|
||||
});
|
||||
|
||||
const result = await processBatchJob(batchData, processor, mockLogger);
|
||||
|
||||
expect(result.totalItems).toBe(3);
|
||||
expect(result.successful).toBe(2);
|
||||
expect(result.failed).toBe(1);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0].item).toBe('item2');
|
||||
expect(result.errors[0].error).toBe('Processing failed');
|
||||
});
|
||||
|
||||
it('should handle empty items', async () => {
|
||||
const batchData: BatchJobData = {
|
||||
items: [],
|
||||
options: {},
|
||||
};
|
||||
|
||||
const processor = mock(() => Promise.resolve({}));
|
||||
|
||||
const result = await processBatchJob(batchData, processor, mockLogger);
|
||||
|
||||
expect(result.totalItems).toBe(0);
|
||||
expect(result.successful).toBe(0);
|
||||
expect(result.failed).toBe(0);
|
||||
expect(processor).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should track duration', async () => {
|
||||
const batchData: BatchJobData = {
|
||||
items: ['item1'],
|
||||
options: {},
|
||||
};
|
||||
|
||||
const processor = mock(() =>
|
||||
new Promise(resolve => setTimeout(() => resolve({}), 10))
|
||||
);
|
||||
|
||||
const result = await processBatchJob(batchData, processor, mockLogger);
|
||||
|
||||
expect(result.duration).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('processItems', () => {
|
||||
it('should process items with default options', async () => {
|
||||
const items = [1, 2, 3, 4, 5];
|
||||
const processor = mock((item: number) => Promise.resolve(item * 2));
|
||||
|
||||
const results = await processItems(items, processor);
|
||||
|
||||
expect(results).toEqual([2, 4, 6, 8, 10]);
|
||||
expect(processor).toHaveBeenCalledTimes(5);
|
||||
});
|
||||
|
||||
it('should process items in batches', async () => {
|
||||
const items = [1, 2, 3, 4, 5];
|
||||
const processor = mock((item: number) => Promise.resolve(item * 2));
|
||||
|
||||
const results = await processItems(items, processor, {
|
||||
batchSize: 2,
|
||||
concurrency: 1,
|
||||
});
|
||||
|
||||
expect(results).toEqual([2, 4, 6, 8, 10]);
|
||||
expect(processor).toHaveBeenCalledTimes(5);
|
||||
});
|
||||
|
||||
it('should handle concurrent processing', async () => {
|
||||
const items = [1, 2, 3, 4];
|
||||
let activeCount = 0;
|
||||
let maxActiveCount = 0;
|
||||
|
||||
const processor = mock(async (item: number) => {
|
||||
activeCount++;
|
||||
maxActiveCount = Math.max(maxActiveCount, activeCount);
|
||||
await new Promise(resolve => setTimeout(resolve, 10));
|
||||
activeCount--;
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
await processItems(items, processor, {
|
||||
batchSize: 10,
|
||||
concurrency: 2,
|
||||
});
|
||||
|
||||
// With concurrency 2, at most 2 items should be processed at once
|
||||
expect(maxActiveCount).toBeLessThanOrEqual(2);
|
||||
expect(processor).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
|
||||
it('should handle empty array', async () => {
|
||||
const processor = mock(() => Promise.resolve({}));
|
||||
const results = await processItems([], processor);
|
||||
|
||||
expect(results).toEqual([]);
|
||||
expect(processor).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should propagate errors', async () => {
|
||||
const items = [1, 2, 3];
|
||||
const processor = mock((item: number) => {
|
||||
if (item === 2) {
|
||||
return Promise.reject(new Error('Process error'));
|
||||
}
|
||||
return Promise.resolve(item);
|
||||
});
|
||||
|
||||
await expect(processItems(items, processor)).rejects.toThrow('Process error');
|
||||
});
|
||||
|
||||
it('should process large batches efficiently', async () => {
|
||||
const items = Array.from({ length: 100 }, (_, i) => i);
|
||||
const processor = mock((item: number) => Promise.resolve(item + 1));
|
||||
|
||||
const results = await processItems(items, processor, {
|
||||
batchSize: 20,
|
||||
concurrency: 5,
|
||||
});
|
||||
|
||||
expect(results).toHaveLength(100);
|
||||
expect(results[0]).toBe(1);
|
||||
expect(results[99]).toBe(100);
|
||||
});
|
||||
});
|
||||
});
|
||||
253
libs/core/queue/test/dlq-handler.test.ts
Normal file
253
libs/core/queue/test/dlq-handler.test.ts
Normal file
|
|
@ -0,0 +1,253 @@
|
|||
import { beforeEach, describe, expect, it, mock } from 'bun:test';
|
||||
import { DeadLetterQueueHandler } from '../src/dlq-handler';
|
||||
import type { Job, Queue } from 'bullmq';
|
||||
import type { RedisConfig } from '../src/types';
|
||||
|
||||
describe('DeadLetterQueueHandler', () => {
|
||||
const mockLogger = {
|
||||
info: mock(() => {}),
|
||||
error: mock(() => {}),
|
||||
warn: mock(() => {}),
|
||||
debug: mock(() => {}),
|
||||
trace: mock(() => {}),
|
||||
};
|
||||
|
||||
const mockQueue = {
|
||||
name: 'test-queue',
|
||||
add: mock(() => Promise.resolve({})),
|
||||
getCompleted: mock(() => Promise.resolve([])),
|
||||
getFailed: mock(() => Promise.resolve([])),
|
||||
getWaiting: mock(() => Promise.resolve([])),
|
||||
} as unknown as Queue;
|
||||
|
||||
const mockRedisConfig: RedisConfig = {
|
||||
host: 'localhost',
|
||||
port: 6379,
|
||||
};
|
||||
|
||||
let dlqHandler: DeadLetterQueueHandler;
|
||||
|
||||
beforeEach(() => {
|
||||
dlqHandler = new DeadLetterQueueHandler(mockQueue, mockRedisConfig, {}, mockLogger);
|
||||
// Reset mocks
|
||||
mockLogger.info = mock(() => {});
|
||||
mockLogger.error = mock(() => {});
|
||||
mockLogger.warn = mock(() => {});
|
||||
});
|
||||
|
||||
describe('handleFailedJob', () => {
|
||||
it('should log failed job details', async () => {
|
||||
const mockJob = {
|
||||
id: 'job-123',
|
||||
name: 'test-job',
|
||||
queueName: 'test-queue',
|
||||
data: {
|
||||
handler: 'testHandler',
|
||||
operation: 'testOp',
|
||||
payload: { test: true },
|
||||
},
|
||||
attemptsMade: 3,
|
||||
failedReason: 'Test error',
|
||||
finishedOn: Date.now(),
|
||||
processedOn: Date.now() - 5000,
|
||||
} as Job;
|
||||
|
||||
const error = new Error('Job processing failed');
|
||||
|
||||
await dlqHandler.handleFailedJob(mockJob, error);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
'Job moved to DLQ',
|
||||
expect.objectContaining({
|
||||
jobId: 'job-123',
|
||||
jobName: 'test-job',
|
||||
error: 'Job processing failed',
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle jobs without data gracefully', async () => {
|
||||
const mockJob = {
|
||||
id: 'job-123',
|
||||
name: 'test-job',
|
||||
queueName: 'test-queue',
|
||||
data: null,
|
||||
attemptsMade: 1,
|
||||
} as any;
|
||||
|
||||
const error = new Error('No data');
|
||||
|
||||
await dlqHandler.handleFailedJob(mockJob, error);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should check alert threshold', async () => {
|
||||
const mockJob = {
|
||||
id: 'job-123',
|
||||
name: 'critical-job',
|
||||
queueName: 'critical-queue',
|
||||
data: { handler: 'critical', operation: 'process' },
|
||||
attemptsMade: 3,
|
||||
opts: { attempts: 3 },
|
||||
} as Job;
|
||||
|
||||
const error = new Error('Critical failure');
|
||||
|
||||
await dlqHandler.handleFailedJob(mockJob, error);
|
||||
|
||||
expect(mockLogger.warn).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getStats', () => {
|
||||
it('should return DLQ statistics', async () => {
|
||||
const mockJobs = [
|
||||
{
|
||||
id: 'job-1',
|
||||
data: { originalJob: { name: 'process' } },
|
||||
timestamp: Date.now(),
|
||||
},
|
||||
{
|
||||
id: 'job-2',
|
||||
data: { originalJob: { name: 'process' } },
|
||||
timestamp: Date.now() - 100000,
|
||||
},
|
||||
{
|
||||
id: 'job-3',
|
||||
data: { originalJob: { name: 'validate' } },
|
||||
timestamp: Date.now() - 200000,
|
||||
},
|
||||
];
|
||||
|
||||
(mockQueue.getCompleted as any) = mock(() => Promise.resolve(mockJobs));
|
||||
(mockQueue.getFailed as any) = mock(() => Promise.resolve([]));
|
||||
(mockQueue.getWaiting as any) = mock(() => Promise.resolve([]));
|
||||
|
||||
const stats = await dlqHandler.getStats();
|
||||
|
||||
expect(stats.total).toBe(3);
|
||||
expect(stats.byJobName['process']).toBe(2);
|
||||
expect(stats.byJobName['validate']).toBe(1);
|
||||
expect(stats.oldestJob).toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle empty DLQ', async () => {
|
||||
const stats = await dlqHandler.getStats();
|
||||
|
||||
expect(stats.total).toBe(0);
|
||||
expect(stats.byJobName).toEqual({});
|
||||
expect(stats.oldestJob).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('retryDLQJobs', () => {
|
||||
it('should retry jobs from DLQ', async () => {
|
||||
const mockDLQJobs = [
|
||||
{
|
||||
id: 'dlq-1',
|
||||
data: {
|
||||
originalJob: {
|
||||
id: 'orig-1',
|
||||
name: 'retry-job',
|
||||
data: { test: true },
|
||||
opts: {},
|
||||
},
|
||||
},
|
||||
remove: mock(() => Promise.resolve()),
|
||||
},
|
||||
];
|
||||
|
||||
(dlqHandler as any).dlq = {
|
||||
getCompleted: mock(() => Promise.resolve(mockDLQJobs)),
|
||||
};
|
||||
|
||||
const retriedCount = await dlqHandler.retryDLQJobs(1);
|
||||
|
||||
expect(retriedCount).toBe(1);
|
||||
expect(mockQueue.add).toHaveBeenCalledWith(
|
||||
'retry-job',
|
||||
{ test: true },
|
||||
expect.objectContaining({ delay: expect.any(Number) })
|
||||
);
|
||||
expect(mockDLQJobs[0].remove).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('cleanup', () => {
|
||||
it('should clean up old DLQ entries', async () => {
|
||||
const oldJob = {
|
||||
id: 'old-1',
|
||||
timestamp: Date.now() - 8 * 24 * 60 * 60 * 1000, // 8 days old
|
||||
remove: mock(() => Promise.resolve()),
|
||||
};
|
||||
|
||||
const newJob = {
|
||||
id: 'new-1',
|
||||
timestamp: Date.now() - 1 * 24 * 60 * 60 * 1000, // 1 day old
|
||||
remove: mock(() => Promise.resolve()),
|
||||
};
|
||||
|
||||
(dlqHandler as any).dlq = {
|
||||
getCompleted: mock(() => Promise.resolve([oldJob, newJob])),
|
||||
};
|
||||
|
||||
const removedCount = await dlqHandler.cleanup();
|
||||
|
||||
expect(removedCount).toBe(1);
|
||||
expect(oldJob.remove).toHaveBeenCalled();
|
||||
expect(newJob.remove).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('inspectFailedJobs', () => {
|
||||
it('should return formatted failed jobs', async () => {
|
||||
const mockDLQJobs = [
|
||||
{
|
||||
data: {
|
||||
originalJob: {
|
||||
id: 'orig-1',
|
||||
name: 'test-job',
|
||||
data: { test: true },
|
||||
attemptsMade: 3,
|
||||
},
|
||||
error: {
|
||||
message: 'Test error',
|
||||
stack: 'Error stack',
|
||||
},
|
||||
movedToDLQAt: '2024-01-01T00:00:00Z',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
(dlqHandler as any).dlq = {
|
||||
getCompleted: mock(() => Promise.resolve(mockDLQJobs)),
|
||||
};
|
||||
|
||||
const inspected = await dlqHandler.inspectFailedJobs(10);
|
||||
|
||||
expect(inspected).toHaveLength(1);
|
||||
expect(inspected[0]).toEqual({
|
||||
id: 'orig-1',
|
||||
name: 'test-job',
|
||||
data: { test: true },
|
||||
error: { message: 'Test error', stack: 'Error stack' },
|
||||
failedAt: '2024-01-01T00:00:00Z',
|
||||
attempts: 3,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('shutdown', () => {
|
||||
it('should close DLQ and clear state', async () => {
|
||||
const mockClose = mock(() => Promise.resolve());
|
||||
(dlqHandler as any).dlq = {
|
||||
close: mockClose,
|
||||
};
|
||||
|
||||
await dlqHandler.shutdown();
|
||||
|
||||
expect(mockClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
})
|
||||
125
libs/core/queue/test/queue-class.test.ts
Normal file
125
libs/core/queue/test/queue-class.test.ts
Normal file
|
|
@ -0,0 +1,125 @@
|
|||
import { beforeEach, describe, expect, it, mock } from 'bun:test';
|
||||
import { Queue } from '../src/queue';
|
||||
import type { RedisConfig, JobData, QueueWorkerConfig } from '../src/types';
|
||||
|
||||
describe('Queue Class', () => {
|
||||
const mockRedisConfig: RedisConfig = {
|
||||
host: 'localhost',
|
||||
port: 6379,
|
||||
};
|
||||
|
||||
describe('basic functionality', () => {
|
||||
it('should create queue with minimal config', () => {
|
||||
const queue = new Queue('test-queue', mockRedisConfig);
|
||||
expect(queue).toBeDefined();
|
||||
expect(queue.getName()).toBe('test-queue');
|
||||
});
|
||||
|
||||
it('should create queue with default job options', () => {
|
||||
const defaultJobOptions = {
|
||||
attempts: 5,
|
||||
backoff: { type: 'exponential' as const, delay: 2000 },
|
||||
};
|
||||
|
||||
const queue = new Queue('test-queue', mockRedisConfig, defaultJobOptions);
|
||||
expect(queue).toBeDefined();
|
||||
expect(queue.getName()).toBe('test-queue');
|
||||
});
|
||||
|
||||
it('should create queue with custom logger', () => {
|
||||
const mockLogger = {
|
||||
info: mock(() => {}),
|
||||
error: mock(() => {}),
|
||||
warn: mock(() => {}),
|
||||
debug: mock(() => {}),
|
||||
trace: mock(() => {}),
|
||||
};
|
||||
|
||||
const queue = new Queue('test-queue', mockRedisConfig, {}, {}, mockLogger);
|
||||
expect(queue).toBeDefined();
|
||||
});
|
||||
|
||||
it('should create queue with worker config', () => {
|
||||
const workerConfig: QueueWorkerConfig = {
|
||||
workers: 2,
|
||||
concurrency: 5,
|
||||
startWorker: false, // Don't actually start workers
|
||||
serviceName: 'test-service',
|
||||
};
|
||||
|
||||
const queue = new Queue('test-queue', mockRedisConfig, {}, workerConfig);
|
||||
expect(queue).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('queue naming and utilities', () => {
|
||||
it('should return queue name', () => {
|
||||
const queue = new Queue('my-test-queue', mockRedisConfig);
|
||||
expect(queue.getName()).toBe('my-test-queue');
|
||||
});
|
||||
|
||||
it('should get bull queue instance', () => {
|
||||
const queue = new Queue('test-queue', mockRedisConfig);
|
||||
const bullQueue = queue.getBullQueue();
|
||||
expect(bullQueue).toBeDefined();
|
||||
});
|
||||
|
||||
it('should create child logger with logger that supports child', () => {
|
||||
const mockChildLogger = {
|
||||
info: mock(() => {}),
|
||||
error: mock(() => {}),
|
||||
warn: mock(() => {}),
|
||||
debug: mock(() => {}),
|
||||
trace: mock(() => {}),
|
||||
};
|
||||
|
||||
const mockLogger = {
|
||||
info: mock(() => {}),
|
||||
error: mock(() => {}),
|
||||
warn: mock(() => {}),
|
||||
debug: mock(() => {}),
|
||||
trace: mock(() => {}),
|
||||
child: mock(() => mockChildLogger),
|
||||
};
|
||||
|
||||
const queue = new Queue('test-queue', mockRedisConfig, {}, {}, mockLogger);
|
||||
const childLogger = queue.createChildLogger('batch', { batchId: '123' });
|
||||
|
||||
expect(childLogger).toBe(mockChildLogger);
|
||||
expect(mockLogger.child).toHaveBeenCalledWith('batch', { batchId: '123' });
|
||||
});
|
||||
|
||||
it('should fallback to main logger if child not supported', () => {
|
||||
const mockLogger = {
|
||||
info: mock(() => {}),
|
||||
error: mock(() => {}),
|
||||
warn: mock(() => {}),
|
||||
debug: mock(() => {}),
|
||||
trace: mock(() => {}),
|
||||
};
|
||||
|
||||
const queue = new Queue('test-queue', mockRedisConfig, {}, {}, mockLogger);
|
||||
const childLogger = queue.createChildLogger('batch', { batchId: '123' });
|
||||
|
||||
expect(childLogger).toBe(mockLogger);
|
||||
});
|
||||
});
|
||||
|
||||
describe('worker count methods', () => {
|
||||
it('should get worker count when no workers', () => {
|
||||
const queue = new Queue('test-queue', mockRedisConfig);
|
||||
expect(queue.getWorkerCount()).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle worker count with workers config', () => {
|
||||
const workerConfig: QueueWorkerConfig = {
|
||||
workers: 3,
|
||||
startWorker: false, // Don't actually start
|
||||
};
|
||||
|
||||
const queue = new Queue('test-queue', mockRedisConfig, {}, workerConfig);
|
||||
// Workers aren't actually started with startWorker: false
|
||||
expect(queue.getWorkerCount()).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
232
libs/core/queue/test/queue-manager.test.ts
Normal file
232
libs/core/queue/test/queue-manager.test.ts
Normal file
|
|
@ -0,0 +1,232 @@
|
|||
import { beforeEach, describe, expect, it, mock } from 'bun:test';
|
||||
import { QueueManager } from '../src/queue-manager';
|
||||
import type { RedisConfig, QueueManagerConfig } from '../src/types';
|
||||
|
||||
describe.skip('QueueManager', () => {
|
||||
// Skipping these tests as they require real Redis connection
|
||||
// TODO: Create mock implementation or use testcontainers
|
||||
|
||||
const mockRedisConfig: RedisConfig = {
|
||||
host: 'localhost',
|
||||
port: 6379,
|
||||
};
|
||||
|
||||
const mockLogger = {
|
||||
info: mock(() => {}),
|
||||
error: mock(() => {}),
|
||||
warn: mock(() => {}),
|
||||
debug: mock(() => {}),
|
||||
trace: mock(() => {}),
|
||||
};
|
||||
|
||||
describe('constructor', () => {
|
||||
it('should create queue manager with default config', () => {
|
||||
const manager = new QueueManager(mockRedisConfig);
|
||||
expect(manager).toBeDefined();
|
||||
});
|
||||
|
||||
it('should create queue manager with custom config', () => {
|
||||
const config: QueueManagerConfig = {
|
||||
defaultJobOptions: {
|
||||
attempts: 5,
|
||||
removeOnComplete: 50,
|
||||
},
|
||||
enableMetrics: true,
|
||||
enableScheduler: true,
|
||||
};
|
||||
|
||||
const manager = new QueueManager(mockRedisConfig, config, mockLogger);
|
||||
expect(manager).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('queue operations', () => {
|
||||
let manager: QueueManager;
|
||||
|
||||
beforeEach(() => {
|
||||
manager = new QueueManager(mockRedisConfig, {}, mockLogger);
|
||||
});
|
||||
|
||||
it('should create or get queue', () => {
|
||||
const queue = manager.createQueue('test-queue');
|
||||
expect(queue).toBeDefined();
|
||||
expect(queue.getName()).toBe('test-queue');
|
||||
});
|
||||
|
||||
it('should return same queue instance', () => {
|
||||
const queue1 = manager.createQueue('test-queue');
|
||||
const queue2 = manager.createQueue('test-queue');
|
||||
expect(queue1).toBe(queue2);
|
||||
});
|
||||
|
||||
it('should create queue with options', () => {
|
||||
const queue = manager.createQueue('test-queue', {
|
||||
concurrency: 5,
|
||||
workers: 2,
|
||||
});
|
||||
expect(queue).toBeDefined();
|
||||
});
|
||||
|
||||
it('should get existing queue', () => {
|
||||
manager.createQueue('test-queue');
|
||||
const queue = manager.getQueue('test-queue');
|
||||
expect(queue).toBeDefined();
|
||||
});
|
||||
|
||||
it('should return undefined for non-existent queue', () => {
|
||||
const queue = manager.getQueue('non-existent');
|
||||
expect(queue).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should list all queues', () => {
|
||||
manager.createQueue('queue1');
|
||||
manager.createQueue('queue2');
|
||||
const queues = manager.getQueues();
|
||||
expect(queues).toHaveLength(2);
|
||||
expect(queues.map(q => q.getName())).toContain('queue1');
|
||||
expect(queues.map(q => q.getName())).toContain('queue2');
|
||||
});
|
||||
|
||||
it('should check if queue exists', () => {
|
||||
manager.createQueue('test-queue');
|
||||
expect(manager.hasQueue('test-queue')).toBe(true);
|
||||
expect(manager.hasQueue('non-existent')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cache operations', () => {
|
||||
let manager: QueueManager;
|
||||
|
||||
beforeEach(() => {
|
||||
manager = new QueueManager(mockRedisConfig, {}, mockLogger);
|
||||
});
|
||||
|
||||
it('should create cache', () => {
|
||||
const cache = manager.createCache('test-cache');
|
||||
expect(cache).toBeDefined();
|
||||
});
|
||||
|
||||
it('should get existing cache', () => {
|
||||
manager.createCache('test-cache');
|
||||
const cache = manager.getCache('test-cache');
|
||||
expect(cache).toBeDefined();
|
||||
});
|
||||
|
||||
it('should return same cache instance', () => {
|
||||
const cache1 = manager.createCache('test-cache');
|
||||
const cache2 = manager.createCache('test-cache');
|
||||
expect(cache1).toBe(cache2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('service discovery', () => {
|
||||
let manager: QueueManager;
|
||||
|
||||
beforeEach(() => {
|
||||
manager = new QueueManager(mockRedisConfig, {}, mockLogger);
|
||||
});
|
||||
|
||||
it('should configure service name', () => {
|
||||
manager.configureService('test-service');
|
||||
expect((manager as any).serviceName).toBe('test-service');
|
||||
});
|
||||
|
||||
it('should register queue route', () => {
|
||||
manager.configureService('test-service');
|
||||
manager.registerQueueRoute({
|
||||
service: 'remote-service',
|
||||
handler: 'process',
|
||||
queueName: '{remote-service_process}',
|
||||
});
|
||||
|
||||
expect(manager.hasRoute('remote-service', 'process')).toBe(true);
|
||||
});
|
||||
|
||||
it('should send to remote queue', async () => {
|
||||
manager.configureService('test-service');
|
||||
manager.registerQueueRoute({
|
||||
service: 'remote-service',
|
||||
handler: 'process',
|
||||
queueName: '{remote-service_process}',
|
||||
});
|
||||
|
||||
const jobId = await manager.sendToQueue('remote-service', 'process', { data: 'test' });
|
||||
expect(jobId).toBeDefined();
|
||||
});
|
||||
|
||||
it('should send to local queue', async () => {
|
||||
manager.configureService('test-service');
|
||||
manager.createQueue('{test-service_process}');
|
||||
|
||||
const jobId = await manager.sendToQueue('test-service', 'process', { data: 'test' });
|
||||
expect(jobId).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('shutdown', () => {
|
||||
it('should shutdown gracefully', async () => {
|
||||
const manager = new QueueManager(mockRedisConfig, {}, mockLogger);
|
||||
manager.createQueue('test-queue');
|
||||
|
||||
await manager.shutdown();
|
||||
expect((manager as any).isShuttingDown).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle multiple shutdown calls', async () => {
|
||||
const manager = new QueueManager(mockRedisConfig, {}, mockLogger);
|
||||
|
||||
const promise1 = manager.shutdown();
|
||||
const promise2 = manager.shutdown();
|
||||
|
||||
expect(promise1).toBe(promise2);
|
||||
await promise1;
|
||||
});
|
||||
});
|
||||
|
||||
describe('metrics', () => {
|
||||
it('should get global stats', async () => {
|
||||
const manager = new QueueManager(mockRedisConfig, {
|
||||
enableMetrics: true,
|
||||
}, mockLogger);
|
||||
|
||||
manager.createQueue('queue1');
|
||||
manager.createQueue('queue2');
|
||||
|
||||
const stats = await manager.getGlobalStats();
|
||||
expect(stats).toBeDefined();
|
||||
expect(stats.totalQueues).toBe(2);
|
||||
});
|
||||
|
||||
it('should get queue stats', async () => {
|
||||
const manager = new QueueManager(mockRedisConfig, {
|
||||
enableMetrics: true,
|
||||
}, mockLogger);
|
||||
|
||||
const queue = manager.createQueue('test-queue');
|
||||
const stats = await manager.getQueueStats('test-queue');
|
||||
|
||||
expect(stats).toBeDefined();
|
||||
expect(stats.name).toBe('test-queue');
|
||||
});
|
||||
});
|
||||
|
||||
describe('rate limiting', () => {
|
||||
it('should apply rate limit rules', () => {
|
||||
const manager = new QueueManager(mockRedisConfig, {
|
||||
rateLimiter: {
|
||||
rules: [
|
||||
{
|
||||
name: 'api-limit',
|
||||
max: 100,
|
||||
duration: 60000,
|
||||
scope: 'global',
|
||||
},
|
||||
],
|
||||
},
|
||||
}, mockLogger);
|
||||
|
||||
const rateLimiter = (manager as any).rateLimiter;
|
||||
expect(rateLimiter).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
185
libs/core/queue/test/queue-metrics.test.ts
Normal file
185
libs/core/queue/test/queue-metrics.test.ts
Normal file
|
|
@ -0,0 +1,185 @@
|
|||
import { beforeEach, describe, expect, it, mock } from 'bun:test';
|
||||
import { QueueMetricsCollector } from '../src/queue-metrics';
|
||||
import type { Queue, QueueEvents } from 'bullmq';
|
||||
|
||||
describe('QueueMetricsCollector', () => {
|
||||
let metrics: QueueMetricsCollector;
|
||||
|
||||
const mockQueue = {
|
||||
name: 'test-queue',
|
||||
getWaitingCount: mock(() => Promise.resolve(0)),
|
||||
getActiveCount: mock(() => Promise.resolve(0)),
|
||||
getCompletedCount: mock(() => Promise.resolve(0)),
|
||||
getFailedCount: mock(() => Promise.resolve(0)),
|
||||
getDelayedCount: mock(() => Promise.resolve(0)),
|
||||
isPaused: mock(() => Promise.resolve(false)),
|
||||
getWaiting: mock(() => Promise.resolve([])),
|
||||
} as unknown as Queue;
|
||||
|
||||
const mockQueueEvents = {
|
||||
on: mock(() => {}),
|
||||
} as unknown as QueueEvents;
|
||||
|
||||
beforeEach(() => {
|
||||
metrics = new QueueMetricsCollector(mockQueue, mockQueueEvents);
|
||||
});
|
||||
|
||||
describe('collect metrics', () => {
|
||||
it('should collect current metrics', async () => {
|
||||
(mockQueue.getWaitingCount as any) = mock(() => Promise.resolve(5));
|
||||
(mockQueue.getActiveCount as any) = mock(() => Promise.resolve(2));
|
||||
(mockQueue.getCompletedCount as any) = mock(() => Promise.resolve(100));
|
||||
(mockQueue.getFailedCount as any) = mock(() => Promise.resolve(3));
|
||||
(mockQueue.getDelayedCount as any) = mock(() => Promise.resolve(1));
|
||||
|
||||
const result = await metrics.collect();
|
||||
|
||||
expect(result.waiting).toBe(5);
|
||||
expect(result.active).toBe(2);
|
||||
expect(result.completed).toBe(100);
|
||||
expect(result.failed).toBe(3);
|
||||
expect(result.delayed).toBe(1);
|
||||
expect(result.isHealthy).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect health issues', async () => {
|
||||
(mockQueue.getWaitingCount as any) = mock(() => Promise.resolve(2000)); // High backlog
|
||||
(mockQueue.getActiveCount as any) = mock(() => Promise.resolve(150)); // High active
|
||||
(mockQueue.getFailedCount as any) = mock(() => Promise.resolve(50));
|
||||
|
||||
const result = await metrics.collect();
|
||||
|
||||
expect(result.isHealthy).toBe(false);
|
||||
expect(result.healthIssues.length).toBeGreaterThan(0);
|
||||
expect(result.healthIssues.some(issue => issue.includes('queue backlog'))).toBe(true);
|
||||
expect(result.healthIssues.some(issue => issue.includes('active jobs'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle paused queue', async () => {
|
||||
(mockQueue.getWaitingCount as any) = mock(() => Promise.resolve(10));
|
||||
(mockQueue.isPaused as any) = mock(() => Promise.resolve(true));
|
||||
|
||||
const result = await metrics.collect();
|
||||
|
||||
expect(result.paused).toBe(10);
|
||||
});
|
||||
});
|
||||
|
||||
describe('processing time metrics', () => {
|
||||
it('should calculate processing time metrics', async () => {
|
||||
// Simulate some processing times
|
||||
(metrics as any).processingTimes = [1000, 2000, 3000, 4000, 5000];
|
||||
|
||||
const result = await metrics.collect();
|
||||
|
||||
expect(result.processingTime.avg).toBe(3000);
|
||||
expect(result.processingTime.min).toBe(1000);
|
||||
expect(result.processingTime.max).toBe(5000);
|
||||
expect(result.processingTime.p95).toBe(5000);
|
||||
});
|
||||
|
||||
it('should handle no processing times', async () => {
|
||||
const result = await metrics.collect();
|
||||
|
||||
expect(result.processingTime.avg).toBe(0);
|
||||
expect(result.processingTime.min).toBe(0);
|
||||
expect(result.processingTime.max).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('throughput metrics', () => {
|
||||
it('should calculate throughput', async () => {
|
||||
// Simulate completed and failed timestamps
|
||||
const now = Date.now();
|
||||
(metrics as any).completedTimestamps = [
|
||||
now - 30000, // 30 seconds ago
|
||||
now - 20000,
|
||||
now - 10000,
|
||||
];
|
||||
(metrics as any).failedTimestamps = [
|
||||
now - 25000,
|
||||
now - 5000,
|
||||
];
|
||||
|
||||
const result = await metrics.collect();
|
||||
|
||||
expect(result.throughput.completedPerMinute).toBe(3);
|
||||
expect(result.throughput.failedPerMinute).toBe(2);
|
||||
expect(result.throughput.totalPerMinute).toBe(5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getReport', () => {
|
||||
it('should generate formatted report', async () => {
|
||||
(mockQueue.getWaitingCount as any) = mock(() => Promise.resolve(5));
|
||||
(mockQueue.getActiveCount as any) = mock(() => Promise.resolve(2));
|
||||
(mockQueue.getCompletedCount as any) = mock(() => Promise.resolve(100));
|
||||
(mockQueue.getFailedCount as any) = mock(() => Promise.resolve(3));
|
||||
|
||||
const report = await metrics.getReport();
|
||||
|
||||
expect(report).toContain('Queue Metrics Report');
|
||||
expect(report).toContain('✅ Healthy');
|
||||
expect(report).toContain('Waiting: 5');
|
||||
expect(report).toContain('Active: 2');
|
||||
expect(report).toContain('Completed: 100');
|
||||
expect(report).toContain('Failed: 3');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getPrometheusMetrics', () => {
|
||||
it('should generate Prometheus formatted metrics', async () => {
|
||||
(mockQueue.getWaitingCount as any) = mock(() => Promise.resolve(5));
|
||||
(mockQueue.getActiveCount as any) = mock(() => Promise.resolve(2));
|
||||
(mockQueue.getCompletedCount as any) = mock(() => Promise.resolve(100));
|
||||
(mockQueue.getFailedCount as any) = mock(() => Promise.resolve(3));
|
||||
|
||||
const prometheusMetrics = await metrics.getPrometheusMetrics();
|
||||
|
||||
expect(prometheusMetrics).toContain('# HELP queue_jobs_total');
|
||||
expect(prometheusMetrics).toContain('queue_jobs_total{queue="test-queue",status="waiting"} 5');
|
||||
expect(prometheusMetrics).toContain('queue_jobs_total{queue="test-queue",status="active"} 2');
|
||||
expect(prometheusMetrics).toContain('queue_jobs_total{queue="test-queue",status="completed"} 100');
|
||||
expect(prometheusMetrics).toContain('# HELP queue_processing_time_seconds');
|
||||
expect(prometheusMetrics).toContain('# HELP queue_throughput_per_minute');
|
||||
expect(prometheusMetrics).toContain('# HELP queue_health');
|
||||
});
|
||||
});
|
||||
|
||||
describe('event listeners', () => {
|
||||
it('should setup event listeners on construction', () => {
|
||||
const newMockQueueEvents = {
|
||||
on: mock(() => {}),
|
||||
} as unknown as QueueEvents;
|
||||
|
||||
new QueueMetricsCollector(mockQueue, newMockQueueEvents);
|
||||
|
||||
expect(newMockQueueEvents.on).toHaveBeenCalledWith('completed', expect.any(Function));
|
||||
expect(newMockQueueEvents.on).toHaveBeenCalledWith('failed', expect.any(Function));
|
||||
expect(newMockQueueEvents.on).toHaveBeenCalledWith('active', expect.any(Function));
|
||||
});
|
||||
});
|
||||
|
||||
describe('oldest waiting job', () => {
|
||||
it('should get oldest waiting job date', async () => {
|
||||
const oldJob = {
|
||||
timestamp: Date.now() - 60000, // 1 minute ago
|
||||
};
|
||||
|
||||
(mockQueue.getWaiting as any) = mock(() => Promise.resolve([oldJob]));
|
||||
|
||||
const result = await metrics.collect();
|
||||
|
||||
expect(result.oldestWaitingJob).toBeDefined();
|
||||
expect(result.oldestWaitingJob).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
it('should return null when no waiting jobs', async () => {
|
||||
(mockQueue.getWaiting as any) = mock(() => Promise.resolve([]));
|
||||
|
||||
const result = await metrics.collect();
|
||||
|
||||
expect(result.oldestWaitingJob).toBeNull();
|
||||
});
|
||||
});
|
||||
})
|
||||
336
libs/core/queue/test/rate-limiter.test.ts
Normal file
336
libs/core/queue/test/rate-limiter.test.ts
Normal file
|
|
@ -0,0 +1,336 @@
|
|||
import { beforeEach, describe, expect, it, mock } from 'bun:test';
|
||||
import { QueueRateLimiter } from '../src/rate-limiter';
|
||||
import type { RateLimitRule } from '../src/types';
|
||||
|
||||
describe('QueueRateLimiter', () => {
|
||||
const mockRedisClient = {
|
||||
host: 'localhost',
|
||||
port: 6379,
|
||||
};
|
||||
|
||||
const mockLogger = {
|
||||
info: mock(() => {}),
|
||||
error: mock(() => {}),
|
||||
warn: mock(() => {}),
|
||||
debug: mock(() => {}),
|
||||
};
|
||||
|
||||
describe('constructor', () => {
|
||||
it('should create rate limiter', () => {
|
||||
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
|
||||
expect(limiter).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('addRule', () => {
|
||||
it('should add a rate limit rule', () => {
|
||||
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
|
||||
|
||||
const rule: RateLimitRule = {
|
||||
level: 'queue',
|
||||
queueName: 'test-queue',
|
||||
config: {
|
||||
points: 100,
|
||||
duration: 60,
|
||||
},
|
||||
};
|
||||
|
||||
limiter.addRule(rule);
|
||||
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
'Rate limit rule added',
|
||||
expect.objectContaining({
|
||||
level: 'queue',
|
||||
queueName: 'test-queue',
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should add operation-level rule', () => {
|
||||
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
|
||||
|
||||
const rule: RateLimitRule = {
|
||||
level: 'operation',
|
||||
queueName: 'test-queue',
|
||||
handler: 'user-service',
|
||||
operation: 'process-user',
|
||||
config: {
|
||||
points: 10,
|
||||
duration: 60,
|
||||
blockDuration: 300,
|
||||
},
|
||||
};
|
||||
|
||||
limiter.addRule(rule);
|
||||
const rules = limiter.getRules();
|
||||
expect(rules).toHaveLength(1);
|
||||
expect(rules[0]).toEqual(rule);
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkLimit', () => {
|
||||
it('should allow when no rules apply', async () => {
|
||||
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
|
||||
|
||||
const result = await limiter.checkLimit('test-queue', 'handler', 'operation');
|
||||
expect(result.allowed).toBe(true);
|
||||
expect(result.appliedRule).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should check against global rule', async () => {
|
||||
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
|
||||
|
||||
const globalRule: RateLimitRule = {
|
||||
level: 'global',
|
||||
config: { points: 1000, duration: 60 },
|
||||
};
|
||||
|
||||
limiter.addRule(globalRule);
|
||||
|
||||
const result = await limiter.checkLimit('any-queue', 'any-handler', 'any-op');
|
||||
expect(result.appliedRule).toEqual(globalRule);
|
||||
});
|
||||
|
||||
it('should prefer more specific rules', async () => {
|
||||
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
|
||||
|
||||
// Add rules from least to most specific
|
||||
const globalRule: RateLimitRule = {
|
||||
level: 'global',
|
||||
config: { points: 1000, duration: 60 },
|
||||
};
|
||||
|
||||
const queueRule: RateLimitRule = {
|
||||
level: 'queue',
|
||||
queueName: 'test-queue',
|
||||
config: { points: 100, duration: 60 },
|
||||
};
|
||||
|
||||
const handlerRule: RateLimitRule = {
|
||||
level: 'handler',
|
||||
queueName: 'test-queue',
|
||||
handler: 'test-handler',
|
||||
config: { points: 50, duration: 60 },
|
||||
};
|
||||
|
||||
const operationRule: RateLimitRule = {
|
||||
level: 'operation',
|
||||
queueName: 'test-queue',
|
||||
handler: 'test-handler',
|
||||
operation: 'test-op',
|
||||
config: { points: 10, duration: 60 },
|
||||
};
|
||||
|
||||
limiter.addRule(globalRule);
|
||||
limiter.addRule(queueRule);
|
||||
limiter.addRule(handlerRule);
|
||||
limiter.addRule(operationRule);
|
||||
|
||||
// Operation level should take precedence
|
||||
const result = await limiter.checkLimit('test-queue', 'test-handler', 'test-op');
|
||||
expect(result.appliedRule?.level).toBe('operation');
|
||||
|
||||
// Handler level for different operation
|
||||
const result2 = await limiter.checkLimit('test-queue', 'test-handler', 'other-op');
|
||||
expect(result2.appliedRule?.level).toBe('handler');
|
||||
|
||||
// Queue level for different handler
|
||||
const result3 = await limiter.checkLimit('test-queue', 'other-handler', 'some-op');
|
||||
expect(result3.appliedRule?.level).toBe('queue');
|
||||
|
||||
// Global for different queue
|
||||
const result4 = await limiter.checkLimit('other-queue', 'handler', 'op');
|
||||
expect(result4.appliedRule?.level).toBe('global');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getStatus', () => {
|
||||
it('should get rate limit status', async () => {
|
||||
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
|
||||
|
||||
const rule: RateLimitRule = {
|
||||
level: 'queue',
|
||||
queueName: 'test-queue',
|
||||
config: { points: 100, duration: 60 },
|
||||
};
|
||||
|
||||
limiter.addRule(rule);
|
||||
|
||||
const status = await limiter.getStatus('test-queue', 'handler', 'operation');
|
||||
|
||||
expect(status.queueName).toBe('test-queue');
|
||||
expect(status.handler).toBe('handler');
|
||||
expect(status.operation).toBe('operation');
|
||||
expect(status.appliedRule).toEqual(rule);
|
||||
});
|
||||
|
||||
it('should return status without rule when none apply', async () => {
|
||||
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
|
||||
|
||||
const status = await limiter.getStatus('test-queue', 'handler', 'operation');
|
||||
|
||||
expect(status.queueName).toBe('test-queue');
|
||||
expect(status.appliedRule).toBeUndefined();
|
||||
expect(status.limit).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('reset', () => {
|
||||
it('should reset rate limits for specific operation', async () => {
|
||||
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
|
||||
|
||||
const rule: RateLimitRule = {
|
||||
level: 'operation',
|
||||
queueName: 'test-queue',
|
||||
handler: 'test-handler',
|
||||
operation: 'test-op',
|
||||
config: { points: 10, duration: 60 },
|
||||
};
|
||||
|
||||
limiter.addRule(rule);
|
||||
|
||||
await limiter.reset('test-queue', 'test-handler', 'test-op');
|
||||
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
'Rate limits reset',
|
||||
expect.objectContaining({
|
||||
queueName: 'test-queue',
|
||||
handler: 'test-handler',
|
||||
operation: 'test-op',
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should warn about broad reset', async () => {
|
||||
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
|
||||
|
||||
await limiter.reset('test-queue');
|
||||
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
'Broad reset not implemented yet',
|
||||
expect.objectContaining({ queueName: 'test-queue' })
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeRule', () => {
|
||||
it('should remove a rule', () => {
|
||||
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
|
||||
|
||||
const rule: RateLimitRule = {
|
||||
level: 'queue',
|
||||
queueName: 'test-queue',
|
||||
config: { points: 100, duration: 60 },
|
||||
};
|
||||
|
||||
limiter.addRule(rule);
|
||||
expect(limiter.getRules()).toHaveLength(1);
|
||||
|
||||
const removed = limiter.removeRule('queue', 'test-queue');
|
||||
expect(removed).toBe(true);
|
||||
expect(limiter.getRules()).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should return false when rule not found', () => {
|
||||
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
|
||||
|
||||
const removed = limiter.removeRule('queue', 'non-existent');
|
||||
expect(removed).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getRules', () => {
|
||||
it('should return all configured rules', () => {
|
||||
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
|
||||
|
||||
const rule1: RateLimitRule = {
|
||||
level: 'global',
|
||||
config: { points: 1000, duration: 60 },
|
||||
};
|
||||
|
||||
const rule2: RateLimitRule = {
|
||||
level: 'queue',
|
||||
queueName: 'test-queue',
|
||||
config: { points: 100, duration: 60 },
|
||||
};
|
||||
|
||||
limiter.addRule(rule1);
|
||||
limiter.addRule(rule2);
|
||||
|
||||
const rules = limiter.getRules();
|
||||
expect(rules).toHaveLength(2);
|
||||
expect(rules).toContain(rule1);
|
||||
expect(rules).toContain(rule2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
it('should allow on rate limiter error', async () => {
|
||||
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
|
||||
|
||||
// Add a rule but don't set up the actual limiter to cause an error
|
||||
const rule: RateLimitRule = {
|
||||
level: 'queue',
|
||||
queueName: 'test-queue',
|
||||
config: { points: 100, duration: 60 },
|
||||
};
|
||||
|
||||
limiter.addRule(rule);
|
||||
|
||||
// Force the limiter map to be empty to simulate error
|
||||
(limiter as any).limiters.clear();
|
||||
|
||||
const result = await limiter.checkLimit('test-queue', 'handler', 'operation');
|
||||
|
||||
expect(result.allowed).toBe(true); // Should allow on error
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
'Rate limiter not found for rule',
|
||||
expect.any(Object)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('hierarchical rule precedence', () => {
|
||||
it('should correctly apply rule hierarchy', () => {
|
||||
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
|
||||
|
||||
// Add multiple rules at different levels
|
||||
const rules: RateLimitRule[] = [
|
||||
{
|
||||
level: 'global',
|
||||
config: { points: 10000, duration: 60 },
|
||||
},
|
||||
{
|
||||
level: 'queue',
|
||||
queueName: 'email-queue',
|
||||
config: { points: 1000, duration: 60 },
|
||||
},
|
||||
{
|
||||
level: 'handler',
|
||||
queueName: 'email-queue',
|
||||
handler: 'email-service',
|
||||
config: { points: 100, duration: 60 },
|
||||
},
|
||||
{
|
||||
level: 'operation',
|
||||
queueName: 'email-queue',
|
||||
handler: 'email-service',
|
||||
operation: 'send-bulk',
|
||||
config: { points: 10, duration: 60 },
|
||||
},
|
||||
];
|
||||
|
||||
rules.forEach(rule => limiter.addRule(rule));
|
||||
|
||||
// Test that getMostSpecificRule works correctly
|
||||
const specificRule = (limiter as any).getMostSpecificRule(
|
||||
'email-queue',
|
||||
'email-service',
|
||||
'send-bulk'
|
||||
);
|
||||
|
||||
expect(specificRule?.level).toBe('operation');
|
||||
expect(specificRule?.config.points).toBe(10);
|
||||
});
|
||||
});
|
||||
})
|
||||
57
libs/core/queue/test/service-cache.test.ts
Normal file
57
libs/core/queue/test/service-cache.test.ts
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
import { describe, expect, it } from 'bun:test';
|
||||
import { normalizeServiceName, generateCachePrefix } from '../src/service-utils';
|
||||
|
||||
describe('ServiceCache Integration', () => {
|
||||
// Since ServiceCache depends on external createCache, we'll test the utility functions it uses
|
||||
|
||||
describe('generateCachePrefix usage', () => {
|
||||
it('should generate correct cache prefix for service', () => {
|
||||
const prefix = generateCachePrefix('userService');
|
||||
expect(prefix).toBe('cache:user-service');
|
||||
});
|
||||
|
||||
it('should handle global cache prefix', () => {
|
||||
// This simulates what ServiceCache does for global cache
|
||||
const globalPrefix = 'stock-bot:shared';
|
||||
expect(globalPrefix).toBe('stock-bot:shared');
|
||||
});
|
||||
});
|
||||
|
||||
describe('cache key patterns', () => {
|
||||
it('should follow correct key pattern for job results', () => {
|
||||
const jobId = 'job-123';
|
||||
const expectedKey = `job:result:${jobId}`;
|
||||
expect(expectedKey).toBe('job:result:job-123');
|
||||
});
|
||||
|
||||
it('should follow correct key pattern for operation metrics', () => {
|
||||
const handler = 'test-handler';
|
||||
const operation = 'test-op';
|
||||
const expectedKey = `metrics:${handler}:${operation}`;
|
||||
expect(expectedKey).toBe('metrics:test-handler:test-op');
|
||||
});
|
||||
|
||||
it('should follow correct key pattern for service health', () => {
|
||||
const serviceName = 'test-service';
|
||||
const expectedKey = `health:${serviceName}`;
|
||||
expect(expectedKey).toBe('health:test-service');
|
||||
});
|
||||
|
||||
it('should follow correct key pattern for queue stats', () => {
|
||||
const queueName = 'test-queue';
|
||||
const expectedKey = `queue:stats:${queueName}`;
|
||||
expect(expectedKey).toBe('queue:stats:test-queue');
|
||||
});
|
||||
});
|
||||
|
||||
describe('service name normalization', () => {
|
||||
it('should normalize service names in cache prefix', () => {
|
||||
const serviceName = 'UserService';
|
||||
const normalized = normalizeServiceName(serviceName);
|
||||
expect(normalized).toBe('user-service');
|
||||
|
||||
const prefix = generateCachePrefix(normalized);
|
||||
expect(prefix).toBe('cache:user-service');
|
||||
});
|
||||
});
|
||||
})
|
||||
120
libs/core/queue/test/service-utils.test.ts
Normal file
120
libs/core/queue/test/service-utils.test.ts
Normal file
|
|
@ -0,0 +1,120 @@
|
|||
import { describe, expect, it } from 'bun:test';
|
||||
import {
|
||||
normalizeServiceName,
|
||||
generateCachePrefix,
|
||||
getFullQueueName,
|
||||
parseQueueName,
|
||||
} from '../src/service-utils';
|
||||
|
||||
describe('Service Utils', () => {
|
||||
describe('normalizeServiceName', () => {
|
||||
it('should convert camelCase to kebab-case', () => {
|
||||
expect(normalizeServiceName('myService')).toBe('my-service');
|
||||
expect(normalizeServiceName('userAuthService')).toBe('user-auth-service');
|
||||
expect(normalizeServiceName('APIGateway')).toBe('apigateway');
|
||||
});
|
||||
|
||||
it('should handle already kebab-case names', () => {
|
||||
expect(normalizeServiceName('my-service')).toBe('my-service');
|
||||
expect(normalizeServiceName('user-auth-service')).toBe('user-auth-service');
|
||||
});
|
||||
|
||||
it('should handle single word names', () => {
|
||||
expect(normalizeServiceName('service')).toBe('service');
|
||||
expect(normalizeServiceName('API')).toBe('api');
|
||||
});
|
||||
|
||||
it('should handle empty string', () => {
|
||||
expect(normalizeServiceName('')).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFullQueueName', () => {
|
||||
it('should generate queue name with service and handler', () => {
|
||||
const name = getFullQueueName('userService', 'processUser');
|
||||
expect(name).toBe('{user-service_processUser}');
|
||||
});
|
||||
|
||||
it('should normalize service name but keep handler as-is', () => {
|
||||
const name = getFullQueueName('APIGateway', 'handleRequest');
|
||||
expect(name).toBe('{apigateway_handleRequest}');
|
||||
});
|
||||
|
||||
it('should handle already normalized service name', () => {
|
||||
const name = getFullQueueName('user-service', 'process-user');
|
||||
expect(name).toBe('{user-service_process-user}');
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateCachePrefix', () => {
|
||||
it('should generate cache prefix for service', () => {
|
||||
const prefix = generateCachePrefix('userService');
|
||||
expect(prefix).toBe('cache:user-service');
|
||||
});
|
||||
|
||||
it('should generate cache prefix for service only', () => {
|
||||
const prefix = generateCachePrefix('userService');
|
||||
expect(prefix).toBe('cache:user-service');
|
||||
});
|
||||
|
||||
it('should normalize service names', () => {
|
||||
const prefix = generateCachePrefix('APIGateway');
|
||||
expect(prefix).toBe('cache:apigateway');
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseQueueName', () => {
|
||||
it('should parse valid queue name', () => {
|
||||
const result = parseQueueName('{user-service_process-user}');
|
||||
expect(result).toEqual({
|
||||
service: 'user-service',
|
||||
handler: 'process-user',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return null for invalid format', () => {
|
||||
expect(parseQueueName('invalid-queue-name')).toBeNull();
|
||||
expect(parseQueueName('user-service_process-user')).toBeNull(); // Missing braces
|
||||
expect(parseQueueName('{user-service-process-user}')).toBeNull(); // Missing underscore
|
||||
expect(parseQueueName('{user-service_}')).toBeNull(); // Missing handler
|
||||
expect(parseQueueName('{_process-user}')).toBeNull(); // Missing service
|
||||
});
|
||||
|
||||
it('should handle queue names with multiple underscores', () => {
|
||||
const result = parseQueueName('{user_service_process_user}');
|
||||
expect(result).toEqual({
|
||||
service: 'user',
|
||||
handler: 'service_process_user',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('complete service utils workflow', () => {
|
||||
it('should handle full queue name generation and parsing', () => {
|
||||
// Generate a queue name
|
||||
const serviceName = 'userService';
|
||||
const handlerName = 'processOrder';
|
||||
const queueName = getFullQueueName(serviceName, handlerName);
|
||||
|
||||
expect(queueName).toBe('{user-service_processOrder}');
|
||||
|
||||
// Parse it back
|
||||
const parsed = parseQueueName(queueName);
|
||||
expect(parsed).toEqual({
|
||||
service: 'user-service',
|
||||
handler: 'processOrder',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle cache prefix generation', () => {
|
||||
const serviceName = 'orderService';
|
||||
const cachePrefix = generateCachePrefix(serviceName);
|
||||
|
||||
expect(cachePrefix).toBe('cache:order-service');
|
||||
|
||||
// Use it for cache keys
|
||||
const cacheKey = `${cachePrefix}:user:123`;
|
||||
expect(cacheKey).toBe('cache:order-service:user:123');
|
||||
});
|
||||
});
|
||||
})
|
||||
106
libs/core/queue/test/utils.test.ts
Normal file
106
libs/core/queue/test/utils.test.ts
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
import { describe, expect, it, beforeEach, afterEach } from 'bun:test';
|
||||
import { getRedisConnection } from '../src/utils';
|
||||
import type { RedisConfig } from '../src/types';
|
||||
|
||||
describe('Queue Utils', () => {
|
||||
describe('getRedisConnection', () => {
|
||||
const originalEnv = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
process.env = { ...originalEnv };
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
it('should return test connection in test environment', () => {
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
const config: RedisConfig = {
|
||||
host: 'production.redis.com',
|
||||
port: 6380,
|
||||
password: 'secret',
|
||||
};
|
||||
|
||||
const connection = getRedisConnection(config);
|
||||
|
||||
expect(connection.host).toBe('localhost');
|
||||
expect(connection.port).toBe(6379);
|
||||
expect(connection.password).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should return test connection when BUNIT is set', () => {
|
||||
process.env.BUNIT = '1';
|
||||
|
||||
const config: RedisConfig = {
|
||||
host: 'production.redis.com',
|
||||
port: 6380,
|
||||
};
|
||||
|
||||
const connection = getRedisConnection(config);
|
||||
|
||||
expect(connection.host).toBe('localhost');
|
||||
expect(connection.port).toBe(6379);
|
||||
});
|
||||
|
||||
it('should return actual config in non-test environment', () => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
delete process.env.BUNIT;
|
||||
|
||||
const config: RedisConfig = {
|
||||
host: 'production.redis.com',
|
||||
port: 6380,
|
||||
password: 'secret',
|
||||
db: 1,
|
||||
username: 'user',
|
||||
};
|
||||
|
||||
const connection = getRedisConnection(config);
|
||||
|
||||
expect(connection).toEqual(config);
|
||||
expect(connection.host).toBe('production.redis.com');
|
||||
expect(connection.port).toBe(6380);
|
||||
expect(connection.password).toBe('secret');
|
||||
expect(connection.db).toBe(1);
|
||||
expect(connection.username).toBe('user');
|
||||
});
|
||||
|
||||
it('should handle minimal config', () => {
|
||||
process.env.NODE_ENV = 'development';
|
||||
|
||||
const config: RedisConfig = {
|
||||
host: 'localhost',
|
||||
port: 6379,
|
||||
};
|
||||
|
||||
const connection = getRedisConnection(config);
|
||||
|
||||
expect(connection.host).toBe('localhost');
|
||||
expect(connection.port).toBe(6379);
|
||||
expect(connection.password).toBeUndefined();
|
||||
expect(connection.db).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should preserve all config properties in non-test mode', () => {
|
||||
delete process.env.NODE_ENV;
|
||||
delete process.env.BUNIT;
|
||||
|
||||
const config: RedisConfig = {
|
||||
host: 'redis.example.com',
|
||||
port: 6379,
|
||||
password: 'pass123',
|
||||
db: 2,
|
||||
username: 'admin',
|
||||
// Additional properties that might be passed
|
||||
maxRetriesPerRequest: 3,
|
||||
enableReadyCheck: true,
|
||||
enableOfflineQueue: false,
|
||||
} as RedisConfig & Record<string, any>;
|
||||
|
||||
const connection = getRedisConnection(config);
|
||||
|
||||
expect(connection).toEqual(config);
|
||||
});
|
||||
});
|
||||
})
|
||||
426
libs/core/shutdown/test/shutdown-comprehensive.test.ts
Normal file
426
libs/core/shutdown/test/shutdown-comprehensive.test.ts
Normal file
|
|
@ -0,0 +1,426 @@
|
|||
import { describe, expect, it, beforeEach, afterEach, mock } from 'bun:test';
|
||||
import {
|
||||
Shutdown,
|
||||
onShutdown,
|
||||
onShutdownHigh,
|
||||
onShutdownMedium,
|
||||
onShutdownLow,
|
||||
setShutdownTimeout,
|
||||
isShuttingDown,
|
||||
isShutdownSignalReceived,
|
||||
getShutdownCallbackCount,
|
||||
initiateShutdown,
|
||||
resetShutdown,
|
||||
} from '../src';
|
||||
import type { ShutdownOptions, ShutdownResult } from '../src/types';
|
||||
|
||||
describe('Shutdown Comprehensive Tests', () => {
|
||||
beforeEach(() => {
|
||||
// Reset before each test
|
||||
resetShutdown();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Clean up after each test
|
||||
resetShutdown();
|
||||
});
|
||||
|
||||
describe('Global Functions', () => {
|
||||
describe('onShutdown', () => {
|
||||
it('should register callback with custom priority', () => {
|
||||
const callback = mock(async () => {});
|
||||
|
||||
onShutdown(callback, 'custom-handler', 25);
|
||||
|
||||
expect(getShutdownCallbackCount()).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle callback without name', () => {
|
||||
const callback = mock(async () => {});
|
||||
|
||||
onShutdown(callback);
|
||||
|
||||
expect(getShutdownCallbackCount()).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Priority convenience functions', () => {
|
||||
it('should register high priority callback', () => {
|
||||
const callback = mock(async () => {});
|
||||
|
||||
onShutdownHigh(callback, 'high-priority');
|
||||
|
||||
expect(getShutdownCallbackCount()).toBe(1);
|
||||
});
|
||||
|
||||
it('should register medium priority callback', () => {
|
||||
const callback = mock(async () => {});
|
||||
|
||||
onShutdownMedium(callback, 'medium-priority');
|
||||
|
||||
expect(getShutdownCallbackCount()).toBe(1);
|
||||
});
|
||||
|
||||
it('should register low priority callback', () => {
|
||||
const callback = mock(async () => {});
|
||||
|
||||
onShutdownLow(callback, 'low-priority');
|
||||
|
||||
expect(getShutdownCallbackCount()).toBe(1);
|
||||
});
|
||||
|
||||
it('should execute callbacks in priority order', async () => {
|
||||
const order: string[] = [];
|
||||
|
||||
const highCallback = mock(async () => {
|
||||
order.push('high');
|
||||
});
|
||||
const mediumCallback = mock(async () => {
|
||||
order.push('medium');
|
||||
});
|
||||
const lowCallback = mock(async () => {
|
||||
order.push('low');
|
||||
});
|
||||
|
||||
onShutdownLow(lowCallback, 'low');
|
||||
onShutdownHigh(highCallback, 'high');
|
||||
onShutdownMedium(mediumCallback, 'medium');
|
||||
|
||||
await initiateShutdown();
|
||||
|
||||
expect(order).toEqual(['high', 'medium', 'low']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('setShutdownTimeout', () => {
|
||||
it('should set custom timeout', () => {
|
||||
setShutdownTimeout(10000);
|
||||
|
||||
// Timeout is set internally, we can't directly verify it
|
||||
// but we can test it works by using a long-running callback
|
||||
expect(() => setShutdownTimeout(10000)).not.toThrow();
|
||||
});
|
||||
|
||||
it('should handle negative timeout values', () => {
|
||||
// Should either throw or use default
|
||||
expect(() => setShutdownTimeout(-1000)).not.toThrow();
|
||||
});
|
||||
|
||||
it('should handle zero timeout', () => {
|
||||
expect(() => setShutdownTimeout(0)).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Status functions', () => {
|
||||
it('should report shutting down status correctly', async () => {
|
||||
expect(isShuttingDown()).toBe(false);
|
||||
|
||||
const promise = initiateShutdown();
|
||||
expect(isShuttingDown()).toBe(true);
|
||||
|
||||
await promise;
|
||||
// Still true after completion
|
||||
expect(isShuttingDown()).toBe(true);
|
||||
|
||||
resetShutdown();
|
||||
expect(isShuttingDown()).toBe(false);
|
||||
});
|
||||
|
||||
it('should track shutdown signal', () => {
|
||||
expect(isShutdownSignalReceived()).toBe(false);
|
||||
|
||||
// Simulate signal by setting global
|
||||
(global as any).__SHUTDOWN_SIGNAL_RECEIVED__ = true;
|
||||
expect(isShutdownSignalReceived()).toBe(true);
|
||||
|
||||
// Clean up
|
||||
delete (global as any).__SHUTDOWN_SIGNAL_RECEIVED__;
|
||||
});
|
||||
|
||||
it('should count callbacks correctly', () => {
|
||||
expect(getShutdownCallbackCount()).toBe(0);
|
||||
|
||||
onShutdown(async () => {});
|
||||
expect(getShutdownCallbackCount()).toBe(1);
|
||||
|
||||
onShutdownHigh(async () => {});
|
||||
onShutdownMedium(async () => {});
|
||||
onShutdownLow(async () => {});
|
||||
expect(getShutdownCallbackCount()).toBe(4);
|
||||
});
|
||||
});
|
||||
|
||||
describe('initiateShutdown', () => {
|
||||
it('should execute all callbacks', async () => {
|
||||
const callback1 = mock(async () => {});
|
||||
const callback2 = mock(async () => {});
|
||||
const callback3 = mock(async () => {});
|
||||
|
||||
onShutdown(callback1);
|
||||
onShutdown(callback2);
|
||||
onShutdown(callback3);
|
||||
|
||||
const result = await initiateShutdown();
|
||||
|
||||
expect(callback1).toHaveBeenCalledTimes(1);
|
||||
expect(callback2).toHaveBeenCalledTimes(1);
|
||||
expect(callback3).toHaveBeenCalledTimes(1);
|
||||
expect(result.total).toBe(3);
|
||||
expect(result.successful).toBe(3);
|
||||
expect(result.failed).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle errors in callbacks', async () => {
|
||||
const successCallback = mock(async () => {});
|
||||
const errorCallback = mock(async () => {
|
||||
throw new Error('Callback error');
|
||||
});
|
||||
|
||||
onShutdown(successCallback, 'success-handler');
|
||||
onShutdown(errorCallback, 'error-handler');
|
||||
|
||||
const result = await initiateShutdown();
|
||||
|
||||
expect(result.total).toBe(2);
|
||||
expect(result.successful).toBe(1);
|
||||
expect(result.failed).toBe(1);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0]).toContain('error-handler');
|
||||
});
|
||||
|
||||
it('should only execute once', async () => {
|
||||
const callback = mock(async () => {});
|
||||
onShutdown(callback);
|
||||
|
||||
await initiateShutdown();
|
||||
await initiateShutdown();
|
||||
await initiateShutdown();
|
||||
|
||||
expect(callback).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Shutdown Class Direct Usage', () => {
|
||||
it('should create instance with options', () => {
|
||||
const options: ShutdownOptions = {
|
||||
timeout: 5000,
|
||||
autoRegister: false,
|
||||
};
|
||||
|
||||
const shutdown = new Shutdown(options);
|
||||
expect(shutdown).toBeInstanceOf(Shutdown);
|
||||
});
|
||||
|
||||
it('should handle concurrent callback registration', () => {
|
||||
const shutdown = new Shutdown();
|
||||
const callbacks = Array.from({ length: 10 }, (_, i) =>
|
||||
mock(async () => {})
|
||||
);
|
||||
|
||||
// Register callbacks concurrently
|
||||
callbacks.forEach((cb, i) => {
|
||||
shutdown.onShutdown(cb, `handler-${i}`, i * 10);
|
||||
});
|
||||
|
||||
expect(shutdown.getCallbackCount()).toBe(10);
|
||||
});
|
||||
|
||||
it('should handle empty callback list', async () => {
|
||||
const shutdown = new Shutdown();
|
||||
|
||||
const result = await shutdown.shutdown();
|
||||
|
||||
expect(result.total).toBe(0);
|
||||
expect(result.successful).toBe(0);
|
||||
expect(result.failed).toBe(0);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should respect timeout', async () => {
|
||||
const shutdown = new Shutdown({ timeout: 100 });
|
||||
|
||||
const slowCallback = mock(async () => {
|
||||
await new Promise(resolve => setTimeout(resolve, 200));
|
||||
});
|
||||
|
||||
shutdown.onShutdown(slowCallback, 'slow-handler');
|
||||
|
||||
const startTime = Date.now();
|
||||
const result = await shutdown.shutdown();
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
expect(duration).toBeLessThan(150); // Should timeout before 200ms
|
||||
expect(result.timedOut).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle synchronous callbacks', async () => {
|
||||
const shutdown = new Shutdown();
|
||||
|
||||
const syncCallback = mock(() => {
|
||||
// Synchronous callback
|
||||
return undefined;
|
||||
});
|
||||
|
||||
shutdown.onShutdown(syncCallback as any, 'sync-handler');
|
||||
|
||||
const result = await shutdown.shutdown();
|
||||
|
||||
expect(result.successful).toBe(1);
|
||||
expect(syncCallback).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle callback that adds more callbacks', async () => {
|
||||
const addingCallback = mock(async () => {
|
||||
// Try to add callback during shutdown
|
||||
onShutdown(async () => {
|
||||
// This should not execute
|
||||
});
|
||||
});
|
||||
|
||||
onShutdown(addingCallback);
|
||||
|
||||
const countBefore = getShutdownCallbackCount();
|
||||
await initiateShutdown();
|
||||
|
||||
// The new callback should not be executed in this shutdown
|
||||
expect(addingCallback).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should handle very large number of callbacks', async () => {
|
||||
const callbacks = Array.from({ length: 100 }, (_, i) =>
|
||||
mock(async () => {})
|
||||
);
|
||||
|
||||
callbacks.forEach((cb, i) => {
|
||||
onShutdown(cb, `handler-${i}`, i);
|
||||
});
|
||||
|
||||
expect(getShutdownCallbackCount()).toBe(100);
|
||||
|
||||
const result = await initiateShutdown();
|
||||
|
||||
expect(result.total).toBe(100);
|
||||
expect(result.successful).toBe(100);
|
||||
|
||||
callbacks.forEach(cb => {
|
||||
expect(cb).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle callbacks with same priority', async () => {
|
||||
const order: string[] = [];
|
||||
|
||||
const callback1 = mock(async () => { order.push('1'); });
|
||||
const callback2 = mock(async () => { order.push('2'); });
|
||||
const callback3 = mock(async () => { order.push('3'); });
|
||||
|
||||
// All with same priority
|
||||
onShutdown(callback1, 'handler-1', 50);
|
||||
onShutdown(callback2, 'handler-2', 50);
|
||||
onShutdown(callback3, 'handler-3', 50);
|
||||
|
||||
await initiateShutdown();
|
||||
|
||||
// Should execute all, order between same priority is not guaranteed
|
||||
expect(order).toHaveLength(3);
|
||||
expect(order).toContain('1');
|
||||
expect(order).toContain('2');
|
||||
expect(order).toContain('3');
|
||||
});
|
||||
|
||||
it('should handle callback that throws non-Error', async () => {
|
||||
const throwingCallback = mock(async () => {
|
||||
throw 'string error'; // Non-Error thrown
|
||||
});
|
||||
|
||||
onShutdown(throwingCallback, 'throwing-handler');
|
||||
|
||||
const result = await initiateShutdown();
|
||||
|
||||
expect(result.failed).toBe(1);
|
||||
expect(result.errors[0]).toContain('throwing-handler');
|
||||
});
|
||||
|
||||
it('should handle undefined callback name', () => {
|
||||
const callback = mock(async () => {});
|
||||
|
||||
onShutdown(callback, undefined as any);
|
||||
|
||||
expect(getShutdownCallbackCount()).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('ShutdownResult Accuracy', () => {
|
||||
it('should provide accurate timing information', async () => {
|
||||
const delays = [10, 20, 30];
|
||||
const callbacks = delays.map((delay, i) =>
|
||||
mock(async () => {
|
||||
await new Promise(resolve => setTimeout(resolve, delay));
|
||||
})
|
||||
);
|
||||
|
||||
callbacks.forEach((cb, i) => {
|
||||
onShutdown(cb, `timer-${i}`);
|
||||
});
|
||||
|
||||
const startTime = Date.now();
|
||||
const result = await initiateShutdown();
|
||||
const totalTime = Date.now() - startTime;
|
||||
|
||||
expect(result.duration).toBeGreaterThan(0);
|
||||
expect(result.duration).toBeLessThanOrEqual(totalTime);
|
||||
expect(result.startTime).toBeInstanceOf(Date);
|
||||
expect(result.endTime).toBeInstanceOf(Date);
|
||||
expect(result.endTime.getTime() - result.startTime.getTime()).toBe(result.duration);
|
||||
});
|
||||
|
||||
it('should track individual callback execution', async () => {
|
||||
const successCount = 3;
|
||||
const errorCount = 2;
|
||||
|
||||
for (let i = 0; i < successCount; i++) {
|
||||
onShutdown(async () => {}, `success-${i}`);
|
||||
}
|
||||
|
||||
for (let i = 0; i < errorCount; i++) {
|
||||
onShutdown(async () => {
|
||||
throw new Error(`Error ${i}`);
|
||||
}, `error-${i}`);
|
||||
}
|
||||
|
||||
const result = await initiateShutdown();
|
||||
|
||||
expect(result.total).toBe(successCount + errorCount);
|
||||
expect(result.successful).toBe(successCount);
|
||||
expect(result.failed).toBe(errorCount);
|
||||
expect(result.errors).toHaveLength(errorCount);
|
||||
expect(result.timedOut).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Global State Management', () => {
|
||||
it('should properly reset global state', () => {
|
||||
// Add some callbacks
|
||||
onShutdown(async () => {});
|
||||
onShutdownHigh(async () => {});
|
||||
onShutdownLow(async () => {});
|
||||
|
||||
expect(getShutdownCallbackCount()).toBe(3);
|
||||
|
||||
resetShutdown();
|
||||
|
||||
expect(getShutdownCallbackCount()).toBe(0);
|
||||
expect(isShuttingDown()).toBe(false);
|
||||
});
|
||||
|
||||
it('should maintain singleton across imports', () => {
|
||||
const instance1 = Shutdown.getInstance();
|
||||
const instance2 = Shutdown.getInstance();
|
||||
|
||||
expect(instance1).toBe(instance2);
|
||||
});
|
||||
});
|
||||
});
|
||||
Loading…
Add table
Add a link
Reference in a new issue