added cli-covarage tool and fixed more tests
This commit is contained in:
parent
b63e58784c
commit
b845a8eade
57 changed files with 11917 additions and 295 deletions
515
libs/core/config/test/config-manager.test.ts
Normal file
515
libs/core/config/test/config-manager.test.ts
Normal file
|
|
@ -0,0 +1,515 @@
|
|||
import { describe, it, expect, beforeEach, mock, spyOn } from 'bun:test';
|
||||
import { z } from 'zod';
|
||||
import { ConfigManager } from '../src/config-manager';
|
||||
import { ConfigError, ConfigValidationError } from '../src/errors';
|
||||
import type { ConfigLoader, Environment } from '../src/types';
|
||||
|
||||
// Mock the logger
|
||||
mock.module('@stock-bot/logger', () => ({
|
||||
getLogger: () => ({
|
||||
info: mock(() => {}),
|
||||
error: mock(() => {}),
|
||||
warn: mock(() => {}),
|
||||
debug: mock(() => {}),
|
||||
})
|
||||
}));
|
||||
|
||||
// Mock loader class
|
||||
class MockLoader implements ConfigLoader {
|
||||
constructor(
|
||||
private data: Record<string, unknown>,
|
||||
public priority: number = 0
|
||||
) {}
|
||||
|
||||
load(): Record<string, unknown> {
|
||||
return this.data;
|
||||
}
|
||||
}
|
||||
|
||||
describe('ConfigManager', () => {
|
||||
let manager: ConfigManager<any>;
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset environment
|
||||
delete process.env.NODE_ENV;
|
||||
});
|
||||
|
||||
describe('constructor', () => {
|
||||
it('should initialize with default loaders', () => {
|
||||
manager = new ConfigManager();
|
||||
expect(manager).toBeDefined();
|
||||
expect(manager.getEnvironment()).toBe('development');
|
||||
});
|
||||
|
||||
it('should detect environment from NODE_ENV', () => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
manager = new ConfigManager();
|
||||
expect(manager.getEnvironment()).toBe('production');
|
||||
});
|
||||
|
||||
it('should handle various environment values', () => {
|
||||
const envMap: Record<string, Environment> = {
|
||||
'production': 'production',
|
||||
'prod': 'production',
|
||||
'test': 'test',
|
||||
'development': 'development',
|
||||
'dev': 'development',
|
||||
'unknown': 'development',
|
||||
};
|
||||
|
||||
for (const [input, expected] of Object.entries(envMap)) {
|
||||
process.env.NODE_ENV = input;
|
||||
manager = new ConfigManager();
|
||||
expect(manager.getEnvironment()).toBe(expected);
|
||||
}
|
||||
});
|
||||
|
||||
it('should use custom loaders when provided', () => {
|
||||
const customLoader = new MockLoader({ custom: 'data' });
|
||||
manager = new ConfigManager({
|
||||
loaders: [customLoader],
|
||||
});
|
||||
|
||||
manager.initialize();
|
||||
expect(manager.get()).toEqual({ custom: 'data', environment: 'development' });
|
||||
});
|
||||
|
||||
it('should use custom environment when provided', () => {
|
||||
manager = new ConfigManager({
|
||||
environment: 'test',
|
||||
});
|
||||
expect(manager.getEnvironment()).toBe('test');
|
||||
});
|
||||
});
|
||||
|
||||
describe('initialize', () => {
|
||||
it('should load and merge configurations', () => {
|
||||
const loader1 = new MockLoader({ a: 1, b: { c: 2 } }, 1);
|
||||
const loader2 = new MockLoader({ b: { d: 3 }, e: 4 }, 2);
|
||||
|
||||
manager = new ConfigManager({
|
||||
loaders: [loader1, loader2],
|
||||
});
|
||||
|
||||
const config = manager.initialize();
|
||||
|
||||
expect(config).toEqual({
|
||||
a: 1,
|
||||
b: { c: 2, d: 3 },
|
||||
e: 4,
|
||||
environment: 'development',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return cached config on subsequent calls', () => {
|
||||
const loader = new MockLoader({ test: 'data' });
|
||||
const loadSpy = spyOn(loader, 'load');
|
||||
|
||||
manager = new ConfigManager({
|
||||
loaders: [loader],
|
||||
});
|
||||
|
||||
const config1 = manager.initialize();
|
||||
const config2 = manager.initialize();
|
||||
|
||||
expect(config1).toBe(config2);
|
||||
expect(loadSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should validate config with schema', () => {
|
||||
const schema = z.object({
|
||||
name: z.string(),
|
||||
port: z.number(),
|
||||
environment: z.string(),
|
||||
});
|
||||
|
||||
const loader = new MockLoader({
|
||||
name: 'test-app',
|
||||
port: 3000,
|
||||
});
|
||||
|
||||
manager = new ConfigManager({
|
||||
loaders: [loader],
|
||||
});
|
||||
|
||||
const config = manager.initialize(schema);
|
||||
|
||||
expect(config).toEqual({
|
||||
name: 'test-app',
|
||||
port: 3000,
|
||||
environment: 'development',
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw validation error for invalid config', () => {
|
||||
const schema = z.object({
|
||||
name: z.string(),
|
||||
port: z.number(),
|
||||
});
|
||||
|
||||
const loader = new MockLoader({
|
||||
name: 'test-app',
|
||||
port: 'invalid', // Should be number
|
||||
});
|
||||
|
||||
manager = new ConfigManager({
|
||||
loaders: [loader],
|
||||
});
|
||||
|
||||
expect(() => manager.initialize(schema)).toThrow(ConfigValidationError);
|
||||
});
|
||||
|
||||
it('should handle empty loaders', () => {
|
||||
manager = new ConfigManager({
|
||||
loaders: [],
|
||||
});
|
||||
|
||||
const config = manager.initialize();
|
||||
expect(config).toEqual({ environment: 'development' });
|
||||
});
|
||||
|
||||
it('should ignore loaders that return empty config', () => {
|
||||
const loader1 = new MockLoader({});
|
||||
const loader2 = new MockLoader({ data: 'value' });
|
||||
|
||||
manager = new ConfigManager({
|
||||
loaders: [loader1, loader2],
|
||||
});
|
||||
|
||||
const config = manager.initialize();
|
||||
expect(config).toEqual({ data: 'value', environment: 'development' });
|
||||
});
|
||||
|
||||
it('should respect loader priority order', () => {
|
||||
const loader1 = new MockLoader({ value: 'first' }, 1);
|
||||
const loader2 = new MockLoader({ value: 'second' }, 2);
|
||||
const loader3 = new MockLoader({ value: 'third' }, 0);
|
||||
|
||||
manager = new ConfigManager({
|
||||
loaders: [loader1, loader2, loader3],
|
||||
});
|
||||
|
||||
const config = manager.initialize();
|
||||
// Priority order: 0, 1, 2 (lowest to highest)
|
||||
// So 'second' should win
|
||||
expect(config.value).toBe('second');
|
||||
});
|
||||
|
||||
it('should handle validation errors with detailed error info', () => {
|
||||
const schema = z.object({
|
||||
name: z.string(),
|
||||
port: z.number().min(1).max(65535),
|
||||
features: z.object({
|
||||
enabled: z.boolean(),
|
||||
}),
|
||||
});
|
||||
|
||||
const loader = new MockLoader({
|
||||
name: 123, // Should be string
|
||||
port: 99999, // Out of range
|
||||
features: {
|
||||
enabled: 'yes', // Should be boolean
|
||||
},
|
||||
});
|
||||
|
||||
manager = new ConfigManager({
|
||||
loaders: [loader],
|
||||
});
|
||||
|
||||
try {
|
||||
manager.initialize(schema);
|
||||
expect(true).toBe(false); // Should not reach here
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(ConfigValidationError);
|
||||
const validationError = error as ConfigValidationError;
|
||||
expect(validationError.errors).toBeDefined();
|
||||
expect(validationError.errors.length).toBeGreaterThan(0);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('get', () => {
|
||||
it('should return config after initialization', () => {
|
||||
const loader = new MockLoader({ test: 'data' });
|
||||
manager = new ConfigManager({ loaders: [loader] });
|
||||
|
||||
manager.initialize();
|
||||
expect(manager.get()).toEqual({ test: 'data', environment: 'development' });
|
||||
});
|
||||
|
||||
it('should throw error if not initialized', () => {
|
||||
manager = new ConfigManager();
|
||||
|
||||
expect(() => manager.get()).toThrow(ConfigError);
|
||||
expect(() => manager.get()).toThrow('Configuration not initialized');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getValue', () => {
|
||||
beforeEach(() => {
|
||||
const loader = new MockLoader({
|
||||
database: {
|
||||
host: 'localhost',
|
||||
port: 5432,
|
||||
credentials: {
|
||||
username: 'admin',
|
||||
password: 'secret',
|
||||
},
|
||||
},
|
||||
cache: {
|
||||
enabled: true,
|
||||
ttl: 3600,
|
||||
},
|
||||
});
|
||||
|
||||
manager = new ConfigManager({ loaders: [loader] });
|
||||
manager.initialize();
|
||||
});
|
||||
|
||||
it('should get value by path', () => {
|
||||
expect(manager.getValue('database.host')).toBe('localhost');
|
||||
expect(manager.getValue('database.port')).toBe(5432);
|
||||
expect(manager.getValue('cache.enabled')).toBe(true);
|
||||
});
|
||||
|
||||
it('should get nested values', () => {
|
||||
expect(manager.getValue('database.credentials.username')).toBe('admin');
|
||||
expect(manager.getValue('database.credentials.password')).toBe('secret');
|
||||
});
|
||||
|
||||
it('should throw error for non-existent path', () => {
|
||||
expect(() => manager.getValue('nonexistent.path')).toThrow(ConfigError);
|
||||
expect(() => manager.getValue('nonexistent.path')).toThrow('Configuration key not found');
|
||||
});
|
||||
|
||||
it('should handle top-level values', () => {
|
||||
expect(manager.getValue('database')).toEqual({
|
||||
host: 'localhost',
|
||||
port: 5432,
|
||||
credentials: {
|
||||
username: 'admin',
|
||||
password: 'secret',
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('has', () => {
|
||||
beforeEach(() => {
|
||||
const loader = new MockLoader({
|
||||
database: { host: 'localhost' },
|
||||
cache: { enabled: true },
|
||||
});
|
||||
|
||||
manager = new ConfigManager({ loaders: [loader] });
|
||||
manager.initialize();
|
||||
});
|
||||
|
||||
it('should return true for existing paths', () => {
|
||||
expect(manager.has('database')).toBe(true);
|
||||
expect(manager.has('database.host')).toBe(true);
|
||||
expect(manager.has('cache.enabled')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for non-existent paths', () => {
|
||||
expect(manager.has('nonexistent')).toBe(false);
|
||||
expect(manager.has('database.port')).toBe(false);
|
||||
expect(manager.has('cache.ttl')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('set', () => {
|
||||
beforeEach(() => {
|
||||
const loader = new MockLoader({
|
||||
app: { name: 'test', version: '1.0.0' },
|
||||
port: 3000,
|
||||
});
|
||||
|
||||
manager = new ConfigManager({ loaders: [loader] });
|
||||
manager.initialize();
|
||||
});
|
||||
|
||||
it('should update configuration values', () => {
|
||||
manager.set({ port: 4000 });
|
||||
expect(manager.get().port).toBe(4000);
|
||||
|
||||
manager.set({ app: { version: '2.0.0' } });
|
||||
expect(manager.get().app.version).toBe('2.0.0');
|
||||
expect(manager.get().app.name).toBe('test'); // Unchanged
|
||||
});
|
||||
|
||||
it('should validate updates when schema is present', () => {
|
||||
const schema = z.object({
|
||||
app: z.object({
|
||||
name: z.string(),
|
||||
version: z.string(),
|
||||
}),
|
||||
port: z.number().min(1000).max(9999),
|
||||
environment: z.string(),
|
||||
});
|
||||
|
||||
manager = new ConfigManager({ loaders: [new MockLoader({ app: { name: 'test', version: '1.0.0' }, port: 3000 })] });
|
||||
manager.initialize(schema);
|
||||
|
||||
// Valid update
|
||||
manager.set({ port: 4000 });
|
||||
expect(manager.get().port).toBe(4000);
|
||||
|
||||
// Invalid update
|
||||
expect(() => manager.set({ port: 99999 })).toThrow(ConfigValidationError);
|
||||
});
|
||||
|
||||
it('should throw error if not initialized', () => {
|
||||
const newManager = new ConfigManager();
|
||||
expect(() => newManager.set({ test: 'value' })).toThrow(ConfigError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('reset', () => {
|
||||
it('should clear configuration', () => {
|
||||
const loader = new MockLoader({ test: 'data' });
|
||||
manager = new ConfigManager({ loaders: [loader] });
|
||||
|
||||
manager.initialize();
|
||||
expect(manager.get()).toBeDefined();
|
||||
|
||||
manager.reset();
|
||||
expect(() => manager.get()).toThrow(ConfigError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validate', () => {
|
||||
it('should validate current config against schema', () => {
|
||||
const loader = new MockLoader({
|
||||
name: 'test-app',
|
||||
port: 3000,
|
||||
});
|
||||
|
||||
manager = new ConfigManager({ loaders: [loader] });
|
||||
manager.initialize();
|
||||
|
||||
const schema = z.object({
|
||||
name: z.string(),
|
||||
port: z.number(),
|
||||
environment: z.string(),
|
||||
});
|
||||
|
||||
const validated = manager.validate(schema);
|
||||
expect(validated).toEqual({
|
||||
name: 'test-app',
|
||||
port: 3000,
|
||||
environment: 'development',
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw if validation fails', () => {
|
||||
const loader = new MockLoader({
|
||||
name: 'test-app',
|
||||
port: 'invalid',
|
||||
});
|
||||
|
||||
manager = new ConfigManager({ loaders: [loader] });
|
||||
manager.initialize();
|
||||
|
||||
const schema = z.object({
|
||||
name: z.string(),
|
||||
port: z.number(),
|
||||
});
|
||||
|
||||
expect(() => manager.validate(schema)).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('createTypedGetter', () => {
|
||||
it('should create a typed getter function', () => {
|
||||
const loader = new MockLoader({
|
||||
database: {
|
||||
host: 'localhost',
|
||||
port: 5432,
|
||||
},
|
||||
});
|
||||
|
||||
manager = new ConfigManager({ loaders: [loader] });
|
||||
manager.initialize();
|
||||
|
||||
const schema = z.object({
|
||||
database: z.object({
|
||||
host: z.string(),
|
||||
port: z.number(),
|
||||
}),
|
||||
environment: z.string(),
|
||||
});
|
||||
|
||||
const getConfig = manager.createTypedGetter(schema);
|
||||
const config = getConfig();
|
||||
|
||||
expect(config.database.host).toBe('localhost');
|
||||
expect(config.database.port).toBe(5432);
|
||||
expect(config.environment).toBe('development');
|
||||
});
|
||||
});
|
||||
|
||||
describe('deepMerge', () => {
|
||||
it('should handle circular references', () => {
|
||||
const obj1: any = { a: 1 };
|
||||
const obj2: any = { b: 2 };
|
||||
obj1.circular = obj1; // Create circular reference
|
||||
obj2.ref = obj1;
|
||||
|
||||
const loader1 = new MockLoader(obj1);
|
||||
const loader2 = new MockLoader(obj2);
|
||||
|
||||
manager = new ConfigManager({ loaders: [loader1, loader2] });
|
||||
|
||||
// Should not throw on circular reference
|
||||
const config = manager.initialize();
|
||||
expect(config.a).toBe(1);
|
||||
expect(config.b).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle null and undefined values', () => {
|
||||
const loader1 = new MockLoader({ a: null, b: 'value' });
|
||||
const loader2 = new MockLoader({ a: 'overridden', c: undefined });
|
||||
|
||||
manager = new ConfigManager({ loaders: [loader1, loader2] });
|
||||
const config = manager.initialize();
|
||||
|
||||
expect(config.a).toBe('overridden');
|
||||
expect(config.b).toBe('value');
|
||||
expect(config.c).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle Date and RegExp objects', () => {
|
||||
const date = new Date('2024-01-01');
|
||||
const regex = /test/gi;
|
||||
|
||||
const loader = new MockLoader({
|
||||
date: date,
|
||||
pattern: regex,
|
||||
nested: {
|
||||
date: date,
|
||||
pattern: regex,
|
||||
},
|
||||
});
|
||||
|
||||
manager = new ConfigManager({ loaders: [loader] });
|
||||
const config = manager.initialize();
|
||||
|
||||
expect(config.date).toBe(date);
|
||||
expect(config.pattern).toBe(regex);
|
||||
expect(config.nested.date).toBe(date);
|
||||
expect(config.nested.pattern).toBe(regex);
|
||||
});
|
||||
|
||||
it('should handle arrays without merging', () => {
|
||||
const loader1 = new MockLoader({ items: [1, 2, 3] });
|
||||
const loader2 = new MockLoader({ items: [4, 5, 6] });
|
||||
|
||||
manager = new ConfigManager({ loaders: [loader1, loader2] });
|
||||
const config = manager.initialize();
|
||||
|
||||
// Arrays should be replaced, not merged
|
||||
expect(config.items).toEqual([4, 5, 6]);
|
||||
});
|
||||
});
|
||||
});
|
||||
633
libs/core/config/test/env.loader.test.ts
Normal file
633
libs/core/config/test/env.loader.test.ts
Normal file
|
|
@ -0,0 +1,633 @@
|
|||
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from 'bun:test';
|
||||
import { readFileSync } from 'fs';
|
||||
import { EnvLoader } from '../src/loaders/env.loader';
|
||||
import { ConfigLoaderError } from '../src/errors';
|
||||
|
||||
// Mock fs module
|
||||
mock.module('fs', () => ({
|
||||
readFileSync: mock(() => '')
|
||||
}));
|
||||
|
||||
describe('EnvLoader', () => {
|
||||
let loader: EnvLoader;
|
||||
const originalEnv = { ...process.env };
|
||||
|
||||
beforeEach(() => {
|
||||
// Clear environment
|
||||
for (const key in process.env) {
|
||||
delete process.env[key];
|
||||
}
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original environment
|
||||
for (const key in process.env) {
|
||||
delete process.env[key];
|
||||
}
|
||||
Object.assign(process.env, originalEnv);
|
||||
});
|
||||
|
||||
describe('constructor', () => {
|
||||
it('should have highest priority', () => {
|
||||
loader = new EnvLoader();
|
||||
expect(loader.priority).toBe(100);
|
||||
});
|
||||
|
||||
it('should accept prefix and options', () => {
|
||||
loader = new EnvLoader('APP_', {
|
||||
convertCase: true,
|
||||
parseJson: false,
|
||||
});
|
||||
expect(loader).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('load', () => {
|
||||
it('should load environment variables without prefix', () => {
|
||||
process.env.TEST_VAR = 'test_value';
|
||||
process.env.ANOTHER_VAR = 'another_value';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
// Environment variables with underscores are converted to nested structure
|
||||
interface ExpectedConfig {
|
||||
test?: { var: string };
|
||||
another?: { var: string };
|
||||
}
|
||||
expect((config as ExpectedConfig).test?.var).toBe('test_value');
|
||||
expect((config as ExpectedConfig).another?.var).toBe('another_value');
|
||||
});
|
||||
|
||||
it('should filter by prefix', () => {
|
||||
process.env.APP_NAME = 'myapp';
|
||||
process.env.APP_VERSION = '1.0.0';
|
||||
process.env.OTHER_VAR = 'ignored';
|
||||
|
||||
loader = new EnvLoader('APP_');
|
||||
const config = loader.load();
|
||||
|
||||
expect(config.NAME).toBe('myapp');
|
||||
expect(config.VERSION).toBe('1.0.0');
|
||||
expect(config.OTHER_VAR).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should parse values by default', () => {
|
||||
process.env.BOOL_TRUE = 'true';
|
||||
process.env.BOOL_FALSE = 'false';
|
||||
process.env.NUMBER = '42';
|
||||
process.env.STRING = 'hello';
|
||||
process.env.NULL_VAL = 'null';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
// Values are nested based on underscores
|
||||
expect((config as any).bool?.true).toBe(true);
|
||||
expect((config as any).bool?.false).toBe(false);
|
||||
expect((config as any).NUMBER).toBe(42); // No underscore, keeps original case
|
||||
expect((config as any).STRING).toBe('hello'); // No underscore, keeps original case
|
||||
expect((config as any).null?.val).toBeNull();
|
||||
});
|
||||
|
||||
it('should parse JSON values', () => {
|
||||
process.env.JSON_ARRAY = '["a","b","c"]';
|
||||
process.env.JSON_OBJECT = '{"key":"value","num":123}';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
// JSON values are parsed and nested
|
||||
expect((config as any).json?.array).toEqual(['a', 'b', 'c']);
|
||||
expect((config as any).json?.object).toEqual({ key: 'value', num: 123 });
|
||||
});
|
||||
|
||||
it('should disable parsing when parseValues is false', () => {
|
||||
process.env.VALUE = 'true';
|
||||
|
||||
loader = new EnvLoader('', { parseValues: false, parseJson: false });
|
||||
const config = loader.load();
|
||||
|
||||
expect(config.VALUE).toBe('true'); // String, not boolean
|
||||
});
|
||||
|
||||
it('should convert to camelCase when enabled', () => {
|
||||
process.env.MY_VAR_NAME = 'value';
|
||||
process.env.ANOTHER_TEST_VAR = 'test';
|
||||
|
||||
loader = new EnvLoader('', { convertCase: true });
|
||||
const config = loader.load();
|
||||
|
||||
expect(config.myVarName).toBe('value');
|
||||
expect(config.anotherTestVar).toBe('test');
|
||||
});
|
||||
|
||||
it('should handle nested delimiter', () => {
|
||||
process.env.APP__NAME = 'myapp';
|
||||
process.env.APP__CONFIG__PORT = '3000';
|
||||
|
||||
loader = new EnvLoader('', { nestedDelimiter: '__' });
|
||||
const config = loader.load();
|
||||
|
||||
expect(config).toEqual({
|
||||
APP: {
|
||||
NAME: 'myapp',
|
||||
CONFIG: {
|
||||
PORT: 3000
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should convert underscores to nested structure by default', () => {
|
||||
process.env.DATABASE_HOST = 'localhost';
|
||||
process.env.DATABASE_PORT = '5432';
|
||||
process.env.DATABASE_CREDENTIALS_USER = 'admin';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
expect(config).toEqual({
|
||||
database: {
|
||||
host: 'localhost',
|
||||
port: 5432,
|
||||
credentials: {
|
||||
user: 'admin'
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle single keys without underscores', () => {
|
||||
process.env.PORT = '3000';
|
||||
process.env.NAME = 'app';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
// Single keys without underscores keep their original case
|
||||
expect((config as any).PORT).toBe(3000);
|
||||
// NAME has a special mapping to 'name'
|
||||
expect((config as any).name).toBe('app');
|
||||
});
|
||||
});
|
||||
|
||||
describe('provider mappings', () => {
|
||||
it('should map WebShare environment variables', () => {
|
||||
process.env.WEBSHARE_API_KEY = 'secret-key';
|
||||
process.env.WEBSHARE_ENABLED = 'true';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
expect(config.webshare).toEqual({
|
||||
apiKey: 'secret-key',
|
||||
enabled: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should map EOD provider variables', () => {
|
||||
process.env.EOD_API_KEY = 'eod-key';
|
||||
process.env.EOD_BASE_URL = 'https://api.eod.com';
|
||||
process.env.EOD_TIER = 'premium';
|
||||
process.env.EOD_ENABLED = 'true';
|
||||
process.env.EOD_PRIORITY = '1';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
expect(config.providers).toEqual({
|
||||
eod: {
|
||||
apiKey: 'eod-key',
|
||||
baseUrl: 'https://api.eod.com',
|
||||
tier: 'premium',
|
||||
enabled: true,
|
||||
priority: 1,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should map Interactive Brokers variables', () => {
|
||||
process.env.IB_GATEWAY_HOST = 'localhost';
|
||||
process.env.IB_GATEWAY_PORT = '7497';
|
||||
process.env.IB_CLIENT_ID = '1';
|
||||
process.env.IB_ENABLED = 'false';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
expect(config.providers).toEqual({
|
||||
ib: {
|
||||
gateway: {
|
||||
host: 'localhost',
|
||||
port: 7497,
|
||||
clientId: 1,
|
||||
},
|
||||
enabled: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should map log configuration', () => {
|
||||
process.env.LOG_LEVEL = 'debug';
|
||||
process.env.LOG_FORMAT = 'json';
|
||||
process.env.LOG_HIDE_OBJECT = 'true';
|
||||
process.env.LOG_LOKI_ENABLED = 'true';
|
||||
process.env.LOG_LOKI_HOST = 'loki.example.com';
|
||||
process.env.LOG_LOKI_PORT = '3100';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
expect(config.log).toEqual({
|
||||
level: 'debug',
|
||||
format: 'json',
|
||||
hideObject: true,
|
||||
loki: {
|
||||
enabled: true,
|
||||
host: 'loki.example.com',
|
||||
port: 3100,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should not apply provider mappings when prefix is set', () => {
|
||||
process.env.APP_WEBSHARE_API_KEY = 'key';
|
||||
|
||||
loader = new EnvLoader('APP_');
|
||||
const config = loader.load();
|
||||
|
||||
// Should not map to webshare.apiKey, but still converts underscores to nested
|
||||
expect((config as any).webshare?.api?.key).toBe('key');
|
||||
expect((config as any).webshare?.apiKey).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not apply provider mappings when convertCase is true', () => {
|
||||
process.env.WEBSHARE_API_KEY = 'key';
|
||||
|
||||
loader = new EnvLoader('', { convertCase: true });
|
||||
const config = loader.load();
|
||||
|
||||
// Should convert to camelCase instead of mapping
|
||||
expect(config.webshareApiKey).toBe('key');
|
||||
expect(config.webshare).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadEnvFile', () => {
|
||||
it('should load .env file', () => {
|
||||
const envContent = `
|
||||
# Comment line
|
||||
TEST_VAR=value1
|
||||
ANOTHER_VAR="quoted value"
|
||||
NUMBER_VAR=42
|
||||
|
||||
# Another comment
|
||||
BOOL_VAR=true
|
||||
`;
|
||||
|
||||
(readFileSync as any).mockReturnValue(envContent);
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
expect(process.env.TEST_VAR).toBe('value1');
|
||||
expect(process.env.ANOTHER_VAR).toBe('quoted value');
|
||||
expect((config as any).test?.var).toBe('value1');
|
||||
expect((config as any).another?.var).toBe('quoted value');
|
||||
expect((config as any).number?.var).toBe(42);
|
||||
expect((config as any).bool?.var).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle single quoted values', () => {
|
||||
const envContent = `VAR='single quoted'`;
|
||||
(readFileSync as any).mockReturnValue(envContent);
|
||||
|
||||
loader = new EnvLoader();
|
||||
loader.load();
|
||||
|
||||
expect(process.env.VAR).toBe('single quoted');
|
||||
});
|
||||
|
||||
it('should skip invalid lines', () => {
|
||||
const envContent = `
|
||||
VALID=value
|
||||
INVALID_LINE_WITHOUT_EQUALS
|
||||
ANOTHER_VALID=value2
|
||||
=NO_KEY
|
||||
KEY_WITHOUT_VALUE=
|
||||
`;
|
||||
|
||||
(readFileSync as any).mockReturnValue(envContent);
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
expect((config as any).VALID).toBe('value');
|
||||
expect((config as any).another?.valid).toBe('value2');
|
||||
expect((config as any).key?.without?.value).toBe(''); // Empty string
|
||||
});
|
||||
|
||||
it('should not override existing environment variables', () => {
|
||||
process.env.EXISTING = 'original';
|
||||
|
||||
const envContent = `EXISTING=from_file`;
|
||||
(readFileSync as any).mockReturnValue(envContent);
|
||||
|
||||
loader = new EnvLoader();
|
||||
loader.load();
|
||||
|
||||
expect(process.env.EXISTING).toBe('original');
|
||||
});
|
||||
|
||||
it('should handle file not found gracefully', () => {
|
||||
(readFileSync as any).mockImplementation(() => {
|
||||
const error: any = new Error('File not found');
|
||||
error.code = 'ENOENT';
|
||||
throw error;
|
||||
});
|
||||
|
||||
loader = new EnvLoader();
|
||||
// Should not throw
|
||||
expect(() => loader.load()).not.toThrow();
|
||||
});
|
||||
|
||||
it('should warn on other file errors', () => {
|
||||
const consoleWarnSpy = spyOn(console, 'warn').mockImplementation(() => {});
|
||||
|
||||
(readFileSync as any).mockImplementation(() => {
|
||||
const error: any = new Error('Permission denied');
|
||||
error.code = 'EACCES';
|
||||
throw error;
|
||||
});
|
||||
|
||||
loader = new EnvLoader();
|
||||
loader.load();
|
||||
|
||||
expect(consoleWarnSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should try multiple env file paths', () => {
|
||||
const readFileSpy = readFileSync as any;
|
||||
readFileSpy.mockImplementation((path: string) => {
|
||||
if (path === '../../.env') {
|
||||
return 'FOUND=true';
|
||||
}
|
||||
const error: any = new Error('Not found');
|
||||
error.code = 'ENOENT';
|
||||
throw error;
|
||||
});
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
expect(readFileSpy).toHaveBeenCalledWith('./.env', 'utf-8');
|
||||
expect(readFileSpy).toHaveBeenCalledWith('../.env', 'utf-8');
|
||||
expect(readFileSpy).toHaveBeenCalledWith('../../.env', 'utf-8');
|
||||
expect((config as any).FOUND).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle empty values', () => {
|
||||
process.env.EMPTY = '';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
expect((config as any).EMPTY).toBe('');
|
||||
});
|
||||
|
||||
it('should handle very long values', () => {
|
||||
const longValue = 'a'.repeat(10000);
|
||||
process.env.LONG = longValue;
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
expect((config as any).LONG).toBe(longValue);
|
||||
});
|
||||
|
||||
it('should handle special characters in values', () => {
|
||||
process.env.SPECIAL = '!@#$%^&*()_+-=[]{}|;:,.<>?';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
expect((config as any).SPECIAL).toBe('!@#$%^&*()_+-=[]{}|;:,.<>?');
|
||||
});
|
||||
|
||||
it('should handle readonly properties gracefully', () => {
|
||||
// Simulate readonly property scenario
|
||||
const config = { readonly: 'original' };
|
||||
Object.defineProperty(config, 'readonly', {
|
||||
writable: false,
|
||||
configurable: false
|
||||
});
|
||||
|
||||
process.env.READONLY = 'new_value';
|
||||
|
||||
loader = new EnvLoader();
|
||||
// Should not throw when trying to set readonly properties
|
||||
expect(() => loader.load()).not.toThrow();
|
||||
});
|
||||
|
||||
it('should parse undefined string as undefined', () => {
|
||||
process.env.UNDEF = 'undefined';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
expect((config as any).UNDEF).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle number-like strings that should remain strings', () => {
|
||||
process.env.ZIP_CODE = '00123'; // Leading zeros
|
||||
process.env.PHONE = '+1234567890';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
expect((config as any).zip?.code).toBe('00123'); // Should remain string
|
||||
expect((config as any).PHONE).toBe('+1234567890'); // Should remain string
|
||||
});
|
||||
|
||||
it('should handle deeply nested structures', () => {
|
||||
process.env.A_B_C_D_E_F = 'deep';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
expect(config.a).toEqual({
|
||||
b: {
|
||||
c: {
|
||||
d: {
|
||||
e: {
|
||||
f: 'deep'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw ConfigLoaderError on unexpected error', () => {
|
||||
// Mock an error during load
|
||||
const originalEntries = Object.entries;
|
||||
Object.entries = () => {
|
||||
throw new Error('Unexpected error');
|
||||
};
|
||||
|
||||
loader = new EnvLoader();
|
||||
|
||||
try {
|
||||
expect(() => loader.load()).toThrow(ConfigLoaderError);
|
||||
expect(() => loader.load()).toThrow('Failed to load environment variables');
|
||||
} finally {
|
||||
Object.entries = originalEntries;
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle empty path in setNestedValue', () => {
|
||||
loader = new EnvLoader();
|
||||
const config = {};
|
||||
|
||||
// Test private method indirectly by setting an env var with special key
|
||||
process.env.EMPTY_PATH_TEST = 'value';
|
||||
|
||||
// Force an empty path scenario through provider mapping
|
||||
const privateLoader = loader as any;
|
||||
const result = privateLoader.setNestedValue(config, [], 'value');
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle QuoteMedia provider mappings', () => {
|
||||
process.env.QM_USERNAME = 'testuser';
|
||||
process.env.QM_PASSWORD = 'testpass';
|
||||
process.env.QM_BASE_URL = 'https://api.quotemedia.com';
|
||||
process.env.QM_WEBMASTER_ID = '12345';
|
||||
process.env.QM_ENABLED = 'true';
|
||||
process.env.QM_PRIORITY = '5';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
expect(config.providers).toEqual(expect.objectContaining({
|
||||
qm: {
|
||||
username: 'testuser',
|
||||
password: 'testpass',
|
||||
baseUrl: 'https://api.quotemedia.com',
|
||||
webmasterId: '12345',
|
||||
enabled: true,
|
||||
priority: 5,
|
||||
},
|
||||
}));
|
||||
});
|
||||
|
||||
it('should handle Yahoo Finance provider mappings', () => {
|
||||
process.env.YAHOO_BASE_URL = 'https://finance.yahoo.com';
|
||||
process.env.YAHOO_COOKIE_JAR = '/path/to/cookies';
|
||||
process.env.YAHOO_CRUMB = 'abc123';
|
||||
process.env.YAHOO_ENABLED = 'false';
|
||||
process.env.YAHOO_PRIORITY = '10';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
expect(config.providers).toEqual(expect.objectContaining({
|
||||
yahoo: {
|
||||
baseUrl: 'https://finance.yahoo.com',
|
||||
cookieJar: '/path/to/cookies',
|
||||
crumb: 'abc123',
|
||||
enabled: false,
|
||||
priority: 10,
|
||||
},
|
||||
}));
|
||||
});
|
||||
|
||||
it('should handle additional provider mappings', () => {
|
||||
process.env.WEBSHARE_API_URL = 'https://api.webshare.io';
|
||||
process.env.IB_ACCOUNT = 'DU123456';
|
||||
process.env.IB_MARKET_DATA_TYPE = '1';
|
||||
process.env.IB_PRIORITY = '3';
|
||||
process.env.VERSION = '1.2.3';
|
||||
process.env.DEBUG_MODE = 'true';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
expect(config.webshare).toEqual(expect.objectContaining({
|
||||
apiUrl: 'https://api.webshare.io',
|
||||
}));
|
||||
expect(config.providers?.ib).toEqual(expect.objectContaining({
|
||||
account: 'DU123456',
|
||||
marketDataType: '1',
|
||||
priority: 3,
|
||||
}));
|
||||
expect(config.version).toBe('1.2.3');
|
||||
expect(config.debug).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle all .env file paths exhausted', () => {
|
||||
const readFileSpy = readFileSync as any;
|
||||
readFileSpy.mockImplementation((path: string) => {
|
||||
const error: any = new Error('Not found');
|
||||
error.code = 'ENOENT';
|
||||
throw error;
|
||||
});
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
// Should try all paths
|
||||
expect(readFileSpy).toHaveBeenCalledWith('./.env', 'utf-8');
|
||||
expect(readFileSpy).toHaveBeenCalledWith('../.env', 'utf-8');
|
||||
expect(readFileSpy).toHaveBeenCalledWith('../../.env', 'utf-8');
|
||||
expect(readFileSpy).toHaveBeenCalledWith('../../../.env', 'utf-8');
|
||||
|
||||
// Should return empty config when no env files found
|
||||
expect(config).toEqual({});
|
||||
});
|
||||
|
||||
it('should handle key without equals in env file', () => {
|
||||
const envContent = `KEY_WITHOUT_EQUALS`;
|
||||
(readFileSync as any).mockReturnValue(envContent);
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
// Should skip lines without equals
|
||||
expect(Object.keys(config).length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle nested structure with existing non-object value', () => {
|
||||
process.env.CONFIG = 'string_value';
|
||||
process.env.CONFIG_NESTED = 'nested_value';
|
||||
|
||||
loader = new EnvLoader();
|
||||
const config = loader.load();
|
||||
|
||||
// CONFIG should be an object with nested value
|
||||
expect((config as any).config).toEqual({
|
||||
nested: 'nested_value'
|
||||
});
|
||||
});
|
||||
|
||||
it('should skip setNestedValue when path reduction fails', () => {
|
||||
// Create a scenario where the reduce operation would fail
|
||||
const testConfig: any = {};
|
||||
Object.defineProperty(testConfig, 'protected', {
|
||||
value: 'immutable',
|
||||
writable: false,
|
||||
configurable: false
|
||||
});
|
||||
|
||||
process.env.PROTECTED_NESTED_VALUE = 'test';
|
||||
|
||||
loader = new EnvLoader();
|
||||
// Should not throw, but skip the problematic variable
|
||||
expect(() => loader.load()).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
436
libs/core/config/test/file.loader.test.ts
Normal file
436
libs/core/config/test/file.loader.test.ts
Normal file
|
|
@ -0,0 +1,436 @@
|
|||
import { describe, it, expect, beforeEach, afterEach, mock } from 'bun:test';
|
||||
import { existsSync, readFileSync } from 'fs';
|
||||
import { FileLoader } from '../src/loaders/file.loader';
|
||||
import { ConfigLoaderError } from '../src/errors';
|
||||
|
||||
// Mock fs module
|
||||
mock.module('fs', () => ({
|
||||
existsSync: mock(() => false),
|
||||
readFileSync: mock(() => '')
|
||||
}));
|
||||
|
||||
describe('FileLoader', () => {
|
||||
let loader: FileLoader;
|
||||
const configPath = '/app/config';
|
||||
const environment = 'development';
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset mocks
|
||||
(existsSync as any).mockReset();
|
||||
(readFileSync as any).mockReset();
|
||||
});
|
||||
|
||||
describe('constructor', () => {
|
||||
it('should have medium priority', () => {
|
||||
loader = new FileLoader(configPath, environment);
|
||||
expect(loader.priority).toBe(50);
|
||||
});
|
||||
|
||||
it('should store config path and environment', () => {
|
||||
loader = new FileLoader('/custom/path', 'production');
|
||||
expect(loader).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('load', () => {
|
||||
it('should load only default.json when environment file does not exist', () => {
|
||||
const defaultConfig = {
|
||||
name: 'app',
|
||||
port: 3000,
|
||||
features: ['auth', 'cache'],
|
||||
};
|
||||
|
||||
(existsSync as any).mockImplementation((path: string) => {
|
||||
return path.endsWith('default.json');
|
||||
});
|
||||
|
||||
(readFileSync as any).mockImplementation((path: string) => {
|
||||
if (path.endsWith('default.json')) {
|
||||
return JSON.stringify(defaultConfig);
|
||||
}
|
||||
return '{}';
|
||||
});
|
||||
|
||||
loader = new FileLoader(configPath, environment);
|
||||
const config = loader.load();
|
||||
|
||||
expect(existsSync).toHaveBeenCalledWith('/app/config/default.json');
|
||||
expect(existsSync).toHaveBeenCalledWith('/app/config/development.json');
|
||||
expect(readFileSync).toHaveBeenCalledWith('/app/config/default.json', 'utf-8');
|
||||
expect(config).toEqual(defaultConfig);
|
||||
});
|
||||
|
||||
it('should load and merge default and environment configs', () => {
|
||||
const defaultConfig = {
|
||||
name: 'app',
|
||||
port: 3000,
|
||||
database: {
|
||||
host: 'localhost',
|
||||
port: 5432,
|
||||
},
|
||||
};
|
||||
|
||||
const devConfig = {
|
||||
port: 3001,
|
||||
database: {
|
||||
host: 'dev-db',
|
||||
},
|
||||
debug: true,
|
||||
};
|
||||
|
||||
(existsSync as any).mockReturnValue(true);
|
||||
|
||||
(readFileSync as any).mockImplementation((path: string) => {
|
||||
if (path.endsWith('default.json')) {
|
||||
return JSON.stringify(defaultConfig);
|
||||
}
|
||||
if (path.endsWith('development.json')) {
|
||||
return JSON.stringify(devConfig);
|
||||
}
|
||||
return '{}';
|
||||
});
|
||||
|
||||
loader = new FileLoader(configPath, environment);
|
||||
const config = loader.load();
|
||||
|
||||
expect(config).toEqual({
|
||||
name: 'app',
|
||||
port: 3001, // Overridden by dev config
|
||||
database: {
|
||||
host: 'dev-db', // Overridden by dev config
|
||||
port: 5432, // Preserved from default
|
||||
},
|
||||
debug: true, // Added by dev config
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle production environment', () => {
|
||||
const defaultConfig = { name: 'app', debug: true };
|
||||
const prodConfig = { debug: false, secure: true };
|
||||
|
||||
(existsSync as any).mockReturnValue(true);
|
||||
|
||||
(readFileSync as any).mockImplementation((path: string) => {
|
||||
if (path.endsWith('default.json')) {
|
||||
return JSON.stringify(defaultConfig);
|
||||
}
|
||||
if (path.endsWith('production.json')) {
|
||||
return JSON.stringify(prodConfig);
|
||||
}
|
||||
return '{}';
|
||||
});
|
||||
|
||||
loader = new FileLoader(configPath, 'production');
|
||||
const config = loader.load();
|
||||
|
||||
expect(existsSync).toHaveBeenCalledWith('/app/config/production.json');
|
||||
expect(config).toEqual({
|
||||
name: 'app',
|
||||
debug: false,
|
||||
secure: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return empty object when no config files exist', () => {
|
||||
(existsSync as any).mockReturnValue(false);
|
||||
|
||||
loader = new FileLoader(configPath, environment);
|
||||
const config = loader.load();
|
||||
|
||||
expect(config).toEqual({});
|
||||
expect(readFileSync).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should throw ConfigLoaderError on JSON parse error', () => {
|
||||
(existsSync as any).mockReturnValue(true);
|
||||
(readFileSync as any).mockReturnValue('{ invalid json');
|
||||
|
||||
loader = new FileLoader(configPath, environment);
|
||||
|
||||
expect(() => loader.load()).toThrow(ConfigLoaderError);
|
||||
expect(() => loader.load()).toThrow('Failed to load configuration files');
|
||||
});
|
||||
|
||||
it('should throw ConfigLoaderError on file read error', () => {
|
||||
(existsSync as any).mockReturnValue(true);
|
||||
(readFileSync as any).mockImplementation(() => {
|
||||
throw new Error('Permission denied');
|
||||
});
|
||||
|
||||
loader = new FileLoader(configPath, environment);
|
||||
|
||||
expect(() => loader.load()).toThrow(ConfigLoaderError);
|
||||
expect(() => loader.load()).toThrow('Failed to load configuration files');
|
||||
});
|
||||
|
||||
it('should handle different config paths', () => {
|
||||
const customPath = '/custom/config/dir';
|
||||
const config = { custom: true };
|
||||
|
||||
(existsSync as any).mockImplementation((path: string) => {
|
||||
return path.startsWith(customPath);
|
||||
});
|
||||
|
||||
(readFileSync as any).mockReturnValue(JSON.stringify(config));
|
||||
|
||||
loader = new FileLoader(customPath, environment);
|
||||
loader.load();
|
||||
|
||||
expect(existsSync).toHaveBeenCalledWith(`${customPath}/default.json`);
|
||||
expect(existsSync).toHaveBeenCalledWith(`${customPath}/development.json`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deepMerge', () => {
|
||||
it('should handle null and undefined values', () => {
|
||||
const defaultConfig = {
|
||||
a: 'value',
|
||||
b: null,
|
||||
c: 'default',
|
||||
};
|
||||
|
||||
const envConfig = {
|
||||
a: null,
|
||||
b: 'updated',
|
||||
// Note: undefined values are not preserved in JSON
|
||||
};
|
||||
|
||||
(existsSync as any).mockReturnValue(true);
|
||||
(readFileSync as any).mockImplementation((path: string) => {
|
||||
if (path.endsWith('default.json')) {
|
||||
return JSON.stringify(defaultConfig);
|
||||
}
|
||||
if (path.endsWith('development.json')) {
|
||||
return JSON.stringify(envConfig);
|
||||
}
|
||||
return '{}';
|
||||
});
|
||||
|
||||
loader = new FileLoader(configPath, environment);
|
||||
const config = loader.load();
|
||||
|
||||
expect(config).toEqual({
|
||||
a: null,
|
||||
b: 'updated',
|
||||
c: 'default', // Preserved from default since envConfig doesn't have 'c'
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle arrays correctly', () => {
|
||||
const defaultConfig = {
|
||||
items: [1, 2, 3],
|
||||
features: ['auth', 'cache'],
|
||||
};
|
||||
|
||||
const envConfig = {
|
||||
items: [4, 5],
|
||||
features: ['auth', 'cache', 'search'],
|
||||
};
|
||||
|
||||
(existsSync as any).mockReturnValue(true);
|
||||
(readFileSync as any).mockImplementation((path: string) => {
|
||||
if (path.endsWith('default.json')) {
|
||||
return JSON.stringify(defaultConfig);
|
||||
}
|
||||
if (path.endsWith('development.json')) {
|
||||
return JSON.stringify(envConfig);
|
||||
}
|
||||
return '{}';
|
||||
});
|
||||
|
||||
loader = new FileLoader(configPath, environment);
|
||||
const config = loader.load();
|
||||
|
||||
// Arrays should be replaced, not merged
|
||||
expect(config).toEqual({
|
||||
items: [4, 5],
|
||||
features: ['auth', 'cache', 'search'],
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle deeply nested objects', () => {
|
||||
const defaultConfig = {
|
||||
level1: {
|
||||
level2: {
|
||||
level3: {
|
||||
a: 1,
|
||||
b: 2,
|
||||
},
|
||||
c: 3,
|
||||
},
|
||||
d: 4,
|
||||
},
|
||||
};
|
||||
|
||||
const envConfig = {
|
||||
level1: {
|
||||
level2: {
|
||||
level3: {
|
||||
b: 22,
|
||||
e: 5,
|
||||
},
|
||||
f: 6,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
(existsSync as any).mockReturnValue(true);
|
||||
(readFileSync as any).mockImplementation((path: string) => {
|
||||
if (path.endsWith('default.json')) {
|
||||
return JSON.stringify(defaultConfig);
|
||||
}
|
||||
if (path.endsWith('development.json')) {
|
||||
return JSON.stringify(envConfig);
|
||||
}
|
||||
return '{}';
|
||||
});
|
||||
|
||||
loader = new FileLoader(configPath, environment);
|
||||
const config = loader.load();
|
||||
|
||||
expect(config).toEqual({
|
||||
level1: {
|
||||
level2: {
|
||||
level3: {
|
||||
a: 1,
|
||||
b: 22,
|
||||
e: 5,
|
||||
},
|
||||
c: 3,
|
||||
f: 6,
|
||||
},
|
||||
d: 4,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle Date and RegExp objects', () => {
|
||||
// Dates and RegExps in JSON are serialized as strings
|
||||
const defaultConfig = {
|
||||
createdAt: '2023-01-01T00:00:00.000Z',
|
||||
pattern: '/test/gi',
|
||||
};
|
||||
|
||||
const envConfig = {
|
||||
updatedAt: '2023-06-01T00:00:00.000Z',
|
||||
};
|
||||
|
||||
(existsSync as any).mockReturnValue(true);
|
||||
(readFileSync as any).mockImplementation((path: string) => {
|
||||
if (path.endsWith('default.json')) {
|
||||
return JSON.stringify(defaultConfig);
|
||||
}
|
||||
if (path.endsWith('development.json')) {
|
||||
return JSON.stringify(envConfig);
|
||||
}
|
||||
return '{}';
|
||||
});
|
||||
|
||||
loader = new FileLoader(configPath, environment);
|
||||
const config = loader.load();
|
||||
|
||||
expect(config).toEqual({
|
||||
createdAt: '2023-01-01T00:00:00.000Z',
|
||||
pattern: '/test/gi',
|
||||
updatedAt: '2023-06-01T00:00:00.000Z',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle empty JSON files', () => {
|
||||
(existsSync as any).mockReturnValue(true);
|
||||
(readFileSync as any).mockReturnValue('{}');
|
||||
|
||||
loader = new FileLoader(configPath, environment);
|
||||
const config = loader.load();
|
||||
|
||||
expect(config).toEqual({});
|
||||
});
|
||||
|
||||
it('should handle whitespace in JSON files', () => {
|
||||
const config = { test: 'value' };
|
||||
|
||||
(existsSync as any).mockReturnValue(true);
|
||||
(readFileSync as any).mockReturnValue(` \n\t${JSON.stringify(config)}\n `);
|
||||
|
||||
loader = new FileLoader(configPath, environment);
|
||||
const result = loader.load();
|
||||
|
||||
expect(result).toEqual(config);
|
||||
});
|
||||
|
||||
it('should handle very large config files', () => {
|
||||
const largeConfig: Record<string, unknown> = {};
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
largeConfig[`key_${i}`] = {
|
||||
value: i,
|
||||
nested: { data: `data_${i}` },
|
||||
};
|
||||
}
|
||||
|
||||
(existsSync as any).mockReturnValue(true);
|
||||
(readFileSync as any).mockReturnValue(JSON.stringify(largeConfig));
|
||||
|
||||
loader = new FileLoader(configPath, environment);
|
||||
const config = loader.load();
|
||||
|
||||
expect(Object.keys(config)).toHaveLength(1000);
|
||||
expect(config.key_500).toEqual({
|
||||
value: 500,
|
||||
nested: { data: 'data_500' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle unicode in config values', () => {
|
||||
const config = {
|
||||
emoji: '🚀',
|
||||
chinese: '你好',
|
||||
arabic: 'مرحبا',
|
||||
};
|
||||
|
||||
(existsSync as any).mockReturnValue(true);
|
||||
(readFileSync as any).mockReturnValue(JSON.stringify(config));
|
||||
|
||||
loader = new FileLoader(configPath, environment);
|
||||
const result = loader.load();
|
||||
|
||||
expect(result).toEqual(config);
|
||||
});
|
||||
|
||||
it('should handle config with circular reference patterns', () => {
|
||||
// JSON doesn't support circular references, but we can have
|
||||
// patterns that look circular
|
||||
const config = {
|
||||
parent: {
|
||||
child: {
|
||||
ref: 'parent',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
(existsSync as any).mockReturnValue(true);
|
||||
(readFileSync as any).mockReturnValue(JSON.stringify(config));
|
||||
|
||||
loader = new FileLoader(configPath, environment);
|
||||
const result = loader.load();
|
||||
|
||||
expect(result).toEqual(config);
|
||||
});
|
||||
|
||||
it('should handle numeric string keys', () => {
|
||||
const config = {
|
||||
'123': 'numeric key',
|
||||
'456': { nested: 'value' },
|
||||
};
|
||||
|
||||
(existsSync as any).mockReturnValue(true);
|
||||
(readFileSync as any).mockReturnValue(JSON.stringify(config));
|
||||
|
||||
loader = new FileLoader(configPath, environment);
|
||||
const result = loader.load();
|
||||
|
||||
expect(result).toEqual(config);
|
||||
});
|
||||
});
|
||||
});
|
||||
896
libs/core/config/test/schemas.test.ts
Normal file
896
libs/core/config/test/schemas.test.ts
Normal file
|
|
@ -0,0 +1,896 @@
|
|||
import { describe, it, expect } from 'bun:test';
|
||||
import { z } from 'zod';
|
||||
import {
|
||||
baseConfigSchema,
|
||||
environmentSchema,
|
||||
serviceConfigSchema,
|
||||
loggingConfigSchema,
|
||||
queueConfigSchema,
|
||||
httpConfigSchema,
|
||||
webshareConfigSchema,
|
||||
browserConfigSchema,
|
||||
proxyConfigSchema,
|
||||
postgresConfigSchema,
|
||||
questdbConfigSchema,
|
||||
mongodbConfigSchema,
|
||||
dragonflyConfigSchema,
|
||||
databaseConfigSchema,
|
||||
baseProviderConfigSchema,
|
||||
eodProviderConfigSchema,
|
||||
ibProviderConfigSchema,
|
||||
qmProviderConfigSchema,
|
||||
yahooProviderConfigSchema,
|
||||
webshareProviderConfigSchema,
|
||||
providerConfigSchema,
|
||||
} from '../src/schemas';
|
||||
|
||||
describe('Config Schemas', () => {
|
||||
describe('environmentSchema', () => {
|
||||
it('should accept valid environments', () => {
|
||||
expect(environmentSchema.parse('development')).toBe('development');
|
||||
expect(environmentSchema.parse('test')).toBe('test');
|
||||
expect(environmentSchema.parse('production')).toBe('production');
|
||||
});
|
||||
|
||||
it('should reject invalid environments', () => {
|
||||
expect(() => environmentSchema.parse('staging')).toThrow();
|
||||
expect(() => environmentSchema.parse('dev')).toThrow();
|
||||
expect(() => environmentSchema.parse('')).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('baseConfigSchema', () => {
|
||||
it('should accept minimal valid config', () => {
|
||||
const config = baseConfigSchema.parse({});
|
||||
expect(config).toEqual({
|
||||
debug: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept full valid config', () => {
|
||||
const input = {
|
||||
environment: 'production',
|
||||
name: 'test-app',
|
||||
version: '1.0.0',
|
||||
debug: true,
|
||||
};
|
||||
|
||||
const config = baseConfigSchema.parse(input);
|
||||
expect(config).toEqual(input);
|
||||
});
|
||||
|
||||
it('should apply default values', () => {
|
||||
const config = baseConfigSchema.parse({ name: 'app' });
|
||||
expect(config.debug).toBe(false);
|
||||
});
|
||||
|
||||
it('should reject invalid environment in base config', () => {
|
||||
expect(() => baseConfigSchema.parse({ environment: 'invalid' })).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('serviceConfigSchema', () => {
|
||||
it('should require name and port', () => {
|
||||
expect(() => serviceConfigSchema.parse({})).toThrow();
|
||||
expect(() => serviceConfigSchema.parse({ name: 'test' })).toThrow();
|
||||
expect(() => serviceConfigSchema.parse({ port: 3000 })).toThrow();
|
||||
});
|
||||
|
||||
it('should accept minimal valid config', () => {
|
||||
const config = serviceConfigSchema.parse({
|
||||
name: 'test-service',
|
||||
port: 3000,
|
||||
});
|
||||
|
||||
expect(config).toEqual({
|
||||
name: 'test-service',
|
||||
port: 3000,
|
||||
host: '0.0.0.0',
|
||||
healthCheckPath: '/health',
|
||||
metricsPath: '/metrics',
|
||||
shutdownTimeout: 30000,
|
||||
cors: {
|
||||
enabled: true,
|
||||
origin: '*',
|
||||
credentials: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept full config', () => {
|
||||
const input = {
|
||||
name: 'test-service',
|
||||
serviceName: 'test-service',
|
||||
port: 8080,
|
||||
host: 'localhost',
|
||||
healthCheckPath: '/api/health',
|
||||
metricsPath: '/api/metrics',
|
||||
shutdownTimeout: 60000,
|
||||
cors: {
|
||||
enabled: false,
|
||||
origin: ['http://localhost:3000', 'https://example.com'],
|
||||
credentials: false,
|
||||
},
|
||||
};
|
||||
|
||||
const config = serviceConfigSchema.parse(input);
|
||||
expect(config).toEqual(input);
|
||||
});
|
||||
|
||||
it('should validate port range', () => {
|
||||
expect(() => serviceConfigSchema.parse({ name: 'test', port: 0 })).toThrow();
|
||||
expect(() => serviceConfigSchema.parse({ name: 'test', port: 65536 })).toThrow();
|
||||
expect(() => serviceConfigSchema.parse({ name: 'test', port: -1 })).toThrow();
|
||||
|
||||
// Valid ports
|
||||
expect(serviceConfigSchema.parse({ name: 'test', port: 1 }).port).toBe(1);
|
||||
expect(serviceConfigSchema.parse({ name: 'test', port: 65535 }).port).toBe(65535);
|
||||
});
|
||||
|
||||
it('should handle CORS origin as string or array', () => {
|
||||
const stringOrigin = serviceConfigSchema.parse({
|
||||
name: 'test',
|
||||
port: 3000,
|
||||
cors: { origin: 'http://localhost:3000' },
|
||||
});
|
||||
expect(stringOrigin.cors.origin).toBe('http://localhost:3000');
|
||||
|
||||
const arrayOrigin = serviceConfigSchema.parse({
|
||||
name: 'test',
|
||||
port: 3000,
|
||||
cors: { origin: ['http://localhost:3000', 'https://example.com'] },
|
||||
});
|
||||
expect(arrayOrigin.cors.origin).toEqual(['http://localhost:3000', 'https://example.com']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('loggingConfigSchema', () => {
|
||||
it('should accept minimal config with defaults', () => {
|
||||
const config = loggingConfigSchema.parse({});
|
||||
expect(config).toEqual({
|
||||
level: 'info',
|
||||
format: 'json',
|
||||
hideObject: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept all log levels', () => {
|
||||
const levels = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'];
|
||||
for (const level of levels) {
|
||||
const config = loggingConfigSchema.parse({ level });
|
||||
expect(config.level).toBe(level);
|
||||
}
|
||||
});
|
||||
|
||||
it('should reject invalid log levels', () => {
|
||||
expect(() => loggingConfigSchema.parse({ level: 'verbose' })).toThrow();
|
||||
expect(() => loggingConfigSchema.parse({ level: 'warning' })).toThrow();
|
||||
});
|
||||
|
||||
it('should accept loki configuration', () => {
|
||||
const config = loggingConfigSchema.parse({
|
||||
loki: {
|
||||
enabled: true,
|
||||
host: 'loki.example.com',
|
||||
port: 3100,
|
||||
labels: { app: 'test', env: 'prod' },
|
||||
},
|
||||
});
|
||||
|
||||
expect(config.loki).toEqual({
|
||||
enabled: true,
|
||||
host: 'loki.example.com',
|
||||
port: 3100,
|
||||
labels: { app: 'test', env: 'prod' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should apply loki defaults', () => {
|
||||
const config = loggingConfigSchema.parse({
|
||||
loki: { enabled: true },
|
||||
});
|
||||
|
||||
expect(config.loki).toEqual({
|
||||
enabled: true,
|
||||
host: 'localhost',
|
||||
port: 3100,
|
||||
labels: {},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('queueConfigSchema', () => {
|
||||
it('should accept minimal config with defaults', () => {
|
||||
const config = queueConfigSchema.parse({
|
||||
redis: {}, // redis is required, but its properties have defaults
|
||||
});
|
||||
expect(config).toEqual({
|
||||
enabled: true,
|
||||
redis: {
|
||||
host: 'localhost',
|
||||
port: 6379,
|
||||
db: 1,
|
||||
},
|
||||
workers: 1,
|
||||
concurrency: 1,
|
||||
enableScheduledJobs: true,
|
||||
defaultJobOptions: {
|
||||
attempts: 3,
|
||||
backoff: {
|
||||
type: 'exponential',
|
||||
delay: 1000,
|
||||
},
|
||||
removeOnComplete: 100,
|
||||
removeOnFail: 100,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept full config', () => {
|
||||
const input = {
|
||||
enabled: false,
|
||||
redis: {
|
||||
host: 'redis.example.com',
|
||||
port: 6380,
|
||||
password: 'secret',
|
||||
db: 2,
|
||||
},
|
||||
workers: 4,
|
||||
concurrency: 10,
|
||||
enableScheduledJobs: false,
|
||||
defaultJobOptions: {
|
||||
attempts: 5,
|
||||
backoff: {
|
||||
type: 'fixed' as const,
|
||||
delay: 2000,
|
||||
},
|
||||
removeOnComplete: 50,
|
||||
removeOnFail: 200,
|
||||
timeout: 60000,
|
||||
},
|
||||
};
|
||||
|
||||
const config = queueConfigSchema.parse(input);
|
||||
expect(config).toEqual(input);
|
||||
});
|
||||
|
||||
it('should validate backoff type', () => {
|
||||
const exponential = queueConfigSchema.parse({
|
||||
redis: {},
|
||||
defaultJobOptions: { backoff: { type: 'exponential' } },
|
||||
});
|
||||
expect(exponential.defaultJobOptions.backoff.type).toBe('exponential');
|
||||
|
||||
const fixed = queueConfigSchema.parse({
|
||||
redis: {},
|
||||
defaultJobOptions: { backoff: { type: 'fixed' } },
|
||||
});
|
||||
expect(fixed.defaultJobOptions.backoff.type).toBe('fixed');
|
||||
|
||||
expect(() =>
|
||||
queueConfigSchema.parse({
|
||||
redis: {},
|
||||
defaultJobOptions: { backoff: { type: 'linear' } },
|
||||
})
|
||||
).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('httpConfigSchema', () => {
|
||||
it('should accept minimal config with defaults', () => {
|
||||
const config = httpConfigSchema.parse({});
|
||||
expect(config).toEqual({
|
||||
timeout: 30000,
|
||||
retries: 3,
|
||||
retryDelay: 1000,
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept full config', () => {
|
||||
const input = {
|
||||
timeout: 60000,
|
||||
retries: 5,
|
||||
retryDelay: 2000,
|
||||
userAgent: 'MyApp/1.0',
|
||||
proxy: {
|
||||
enabled: true,
|
||||
url: 'http://proxy.example.com:8080',
|
||||
auth: {
|
||||
username: 'user',
|
||||
password: 'pass',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const config = httpConfigSchema.parse(input);
|
||||
expect(config).toEqual(input);
|
||||
});
|
||||
|
||||
it('should validate proxy URL', () => {
|
||||
expect(() =>
|
||||
httpConfigSchema.parse({
|
||||
proxy: { url: 'not-a-url' },
|
||||
})
|
||||
).toThrow();
|
||||
|
||||
const validProxy = httpConfigSchema.parse({
|
||||
proxy: { url: 'http://proxy.example.com' },
|
||||
});
|
||||
expect(validProxy.proxy?.url).toBe('http://proxy.example.com');
|
||||
});
|
||||
});
|
||||
|
||||
describe('webshareConfigSchema', () => {
|
||||
it('should accept minimal config with defaults', () => {
|
||||
const config = webshareConfigSchema.parse({});
|
||||
expect(config).toEqual({
|
||||
apiUrl: 'https://proxy.webshare.io/api/v2/',
|
||||
enabled: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept full config', () => {
|
||||
const input = {
|
||||
apiKey: 'test-api-key',
|
||||
apiUrl: 'https://custom.webshare.io/api/v3/',
|
||||
enabled: false,
|
||||
};
|
||||
|
||||
const config = webshareConfigSchema.parse(input);
|
||||
expect(config).toEqual(input);
|
||||
});
|
||||
});
|
||||
|
||||
describe('browserConfigSchema', () => {
|
||||
it('should accept minimal config with defaults', () => {
|
||||
const config = browserConfigSchema.parse({});
|
||||
expect(config).toEqual({
|
||||
headless: true,
|
||||
timeout: 30000,
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept custom values', () => {
|
||||
const config = browserConfigSchema.parse({
|
||||
headless: false,
|
||||
timeout: 60000,
|
||||
});
|
||||
expect(config).toEqual({
|
||||
headless: false,
|
||||
timeout: 60000,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('proxyConfigSchema', () => {
|
||||
it('should accept minimal config with defaults', () => {
|
||||
const config = proxyConfigSchema.parse({});
|
||||
expect(config).toEqual({
|
||||
enabled: false,
|
||||
cachePrefix: 'proxy:',
|
||||
ttl: 3600,
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept full config', () => {
|
||||
const input = {
|
||||
enabled: true,
|
||||
cachePrefix: 'custom:proxy:',
|
||||
ttl: 7200,
|
||||
webshare: {
|
||||
apiKey: 'test-key',
|
||||
apiUrl: 'https://api.webshare.io/v2/',
|
||||
},
|
||||
};
|
||||
|
||||
const config = proxyConfigSchema.parse(input);
|
||||
expect(config).toEqual(input);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Schema Composition', () => {
|
||||
it('should be able to compose schemas', () => {
|
||||
const appConfigSchema = z.object({
|
||||
base: baseConfigSchema,
|
||||
service: serviceConfigSchema,
|
||||
logging: loggingConfigSchema,
|
||||
});
|
||||
|
||||
const config = appConfigSchema.parse({
|
||||
base: {
|
||||
name: 'test-app',
|
||||
version: '1.0.0',
|
||||
},
|
||||
service: {
|
||||
name: 'test-service',
|
||||
port: 3000,
|
||||
},
|
||||
logging: {
|
||||
level: 'debug',
|
||||
},
|
||||
});
|
||||
|
||||
expect(config.base.debug).toBe(false);
|
||||
expect(config.service.host).toBe('0.0.0.0');
|
||||
expect(config.logging.format).toBe('json');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle empty strings appropriately', () => {
|
||||
// Empty strings are allowed by z.string() unless .min(1) is specified
|
||||
const serviceConfig = serviceConfigSchema.parse({ name: '', port: 3000 });
|
||||
expect(serviceConfig.name).toBe('');
|
||||
|
||||
const baseConfig = baseConfigSchema.parse({ name: '' });
|
||||
expect(baseConfig.name).toBe('');
|
||||
});
|
||||
|
||||
it('should handle null values', () => {
|
||||
expect(() => serviceConfigSchema.parse({ name: null, port: 3000 })).toThrow();
|
||||
expect(() => queueConfigSchema.parse({ redis: {}, workers: null })).toThrow();
|
||||
});
|
||||
|
||||
it('should handle undefined values for optional fields', () => {
|
||||
const config = serviceConfigSchema.parse({
|
||||
name: 'test',
|
||||
port: 3000,
|
||||
serviceName: undefined,
|
||||
});
|
||||
expect(config.serviceName).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle numeric strings for number fields', () => {
|
||||
expect(() => serviceConfigSchema.parse({ name: 'test', port: '3000' })).toThrow();
|
||||
expect(() => queueConfigSchema.parse({ redis: {}, workers: '4' })).toThrow();
|
||||
});
|
||||
|
||||
it('should strip unknown properties', () => {
|
||||
const config = baseConfigSchema.parse({
|
||||
name: 'test',
|
||||
unknownProp: 'should be removed',
|
||||
});
|
||||
expect('unknownProp' in config).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('postgresConfigSchema', () => {
|
||||
it('should accept minimal config with defaults', () => {
|
||||
const config = postgresConfigSchema.parse({
|
||||
database: 'testdb',
|
||||
user: 'testuser',
|
||||
password: 'testpass',
|
||||
});
|
||||
expect(config).toEqual({
|
||||
enabled: true,
|
||||
host: 'localhost',
|
||||
port: 5432,
|
||||
database: 'testdb',
|
||||
user: 'testuser',
|
||||
password: 'testpass',
|
||||
ssl: false,
|
||||
poolSize: 10,
|
||||
connectionTimeout: 30000,
|
||||
idleTimeout: 10000,
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept full config', () => {
|
||||
const input = {
|
||||
enabled: false,
|
||||
host: 'db.example.com',
|
||||
port: 5433,
|
||||
database: 'proddb',
|
||||
user: 'admin',
|
||||
password: 'secret',
|
||||
ssl: true,
|
||||
poolSize: 20,
|
||||
connectionTimeout: 60000,
|
||||
idleTimeout: 30000,
|
||||
};
|
||||
const config = postgresConfigSchema.parse(input);
|
||||
expect(config).toEqual(input);
|
||||
});
|
||||
|
||||
it('should validate poolSize range', () => {
|
||||
expect(() => postgresConfigSchema.parse({
|
||||
database: 'testdb',
|
||||
user: 'testuser',
|
||||
password: 'testpass',
|
||||
poolSize: 0,
|
||||
})).toThrow();
|
||||
|
||||
expect(() => postgresConfigSchema.parse({
|
||||
database: 'testdb',
|
||||
user: 'testuser',
|
||||
password: 'testpass',
|
||||
poolSize: 101,
|
||||
})).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('questdbConfigSchema', () => {
|
||||
it('should accept minimal config with defaults', () => {
|
||||
const config = questdbConfigSchema.parse({});
|
||||
expect(config).toEqual({
|
||||
enabled: true,
|
||||
host: 'localhost',
|
||||
ilpPort: 9009,
|
||||
httpPort: 9000,
|
||||
pgPort: 8812,
|
||||
database: 'questdb',
|
||||
bufferSize: 65536,
|
||||
flushInterval: 1000,
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept full config', () => {
|
||||
const input = {
|
||||
enabled: false,
|
||||
host: 'questdb.example.com',
|
||||
ilpPort: 9010,
|
||||
httpPort: 9001,
|
||||
pgPort: 8813,
|
||||
database: 'metrics',
|
||||
user: 'admin',
|
||||
password: 'secret',
|
||||
bufferSize: 131072,
|
||||
flushInterval: 2000,
|
||||
};
|
||||
const config = questdbConfigSchema.parse(input);
|
||||
expect(config).toEqual(input);
|
||||
});
|
||||
});
|
||||
|
||||
describe('mongodbConfigSchema', () => {
|
||||
it('should accept minimal config', () => {
|
||||
const config = mongodbConfigSchema.parse({
|
||||
uri: 'mongodb://localhost:27017',
|
||||
database: 'testdb',
|
||||
});
|
||||
expect(config).toEqual({
|
||||
enabled: true,
|
||||
uri: 'mongodb://localhost:27017',
|
||||
database: 'testdb',
|
||||
poolSize: 10,
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept full config', () => {
|
||||
const input = {
|
||||
enabled: false,
|
||||
uri: 'mongodb://user:pass@cluster.mongodb.net',
|
||||
database: 'proddb',
|
||||
poolSize: 50,
|
||||
host: 'cluster.mongodb.net',
|
||||
port: 27017,
|
||||
user: 'admin',
|
||||
password: 'secret',
|
||||
authSource: 'admin',
|
||||
replicaSet: 'rs0',
|
||||
};
|
||||
const config = mongodbConfigSchema.parse(input);
|
||||
expect(config).toEqual(input);
|
||||
});
|
||||
|
||||
it('should validate URI format', () => {
|
||||
expect(() => mongodbConfigSchema.parse({
|
||||
uri: 'invalid-uri',
|
||||
database: 'testdb',
|
||||
})).toThrow();
|
||||
});
|
||||
|
||||
it('should validate poolSize range', () => {
|
||||
expect(() => mongodbConfigSchema.parse({
|
||||
uri: 'mongodb://localhost',
|
||||
database: 'testdb',
|
||||
poolSize: 0,
|
||||
})).toThrow();
|
||||
|
||||
expect(() => mongodbConfigSchema.parse({
|
||||
uri: 'mongodb://localhost',
|
||||
database: 'testdb',
|
||||
poolSize: 101,
|
||||
})).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('dragonflyConfigSchema', () => {
|
||||
it('should accept minimal config with defaults', () => {
|
||||
const config = dragonflyConfigSchema.parse({});
|
||||
expect(config).toEqual({
|
||||
enabled: true,
|
||||
host: 'localhost',
|
||||
port: 6379,
|
||||
db: 0,
|
||||
maxRetries: 3,
|
||||
retryDelay: 100,
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept full config', () => {
|
||||
const input = {
|
||||
enabled: false,
|
||||
host: 'cache.example.com',
|
||||
port: 6380,
|
||||
password: 'secret',
|
||||
db: 5,
|
||||
keyPrefix: 'app:',
|
||||
ttl: 3600,
|
||||
maxRetries: 5,
|
||||
retryDelay: 200,
|
||||
};
|
||||
const config = dragonflyConfigSchema.parse(input);
|
||||
expect(config).toEqual(input);
|
||||
});
|
||||
|
||||
it('should validate db range', () => {
|
||||
expect(() => dragonflyConfigSchema.parse({ db: -1 })).toThrow();
|
||||
expect(() => dragonflyConfigSchema.parse({ db: 16 })).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('databaseConfigSchema', () => {
|
||||
it('should accept complete database configuration', () => {
|
||||
const config = databaseConfigSchema.parse({
|
||||
postgres: {
|
||||
database: 'testdb',
|
||||
user: 'testuser',
|
||||
password: 'testpass',
|
||||
},
|
||||
questdb: {},
|
||||
mongodb: {
|
||||
uri: 'mongodb://localhost',
|
||||
database: 'testdb',
|
||||
},
|
||||
dragonfly: {},
|
||||
});
|
||||
|
||||
expect(config.postgres.host).toBe('localhost');
|
||||
expect(config.questdb.enabled).toBe(true);
|
||||
expect(config.mongodb.poolSize).toBe(10);
|
||||
expect(config.dragonfly.port).toBe(6379);
|
||||
});
|
||||
});
|
||||
|
||||
describe('baseProviderConfigSchema', () => {
|
||||
it('should accept minimal config with defaults', () => {
|
||||
const config = baseProviderConfigSchema.parse({
|
||||
name: 'test-provider',
|
||||
});
|
||||
expect(config).toEqual({
|
||||
name: 'test-provider',
|
||||
enabled: true,
|
||||
priority: 0,
|
||||
timeout: 30000,
|
||||
retries: 3,
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept full config', () => {
|
||||
const input = {
|
||||
name: 'test-provider',
|
||||
enabled: false,
|
||||
priority: 10,
|
||||
rateLimit: {
|
||||
maxRequests: 50,
|
||||
windowMs: 30000,
|
||||
},
|
||||
timeout: 60000,
|
||||
retries: 5,
|
||||
};
|
||||
const config = baseProviderConfigSchema.parse(input);
|
||||
expect(config).toEqual(input);
|
||||
});
|
||||
});
|
||||
|
||||
describe('eodProviderConfigSchema', () => {
|
||||
it('should accept minimal config', () => {
|
||||
const config = eodProviderConfigSchema.parse({
|
||||
name: 'eod',
|
||||
apiKey: 'test-key',
|
||||
});
|
||||
expect(config).toEqual({
|
||||
name: 'eod',
|
||||
apiKey: 'test-key',
|
||||
enabled: true,
|
||||
priority: 0,
|
||||
timeout: 30000,
|
||||
retries: 3,
|
||||
baseUrl: 'https://eodhistoricaldata.com/api',
|
||||
tier: 'free',
|
||||
});
|
||||
});
|
||||
|
||||
it('should validate tier values', () => {
|
||||
expect(() => eodProviderConfigSchema.parse({
|
||||
name: 'eod',
|
||||
apiKey: 'test-key',
|
||||
tier: 'premium',
|
||||
})).toThrow();
|
||||
|
||||
const validTiers = ['free', 'fundamentals', 'all-in-one'];
|
||||
for (const tier of validTiers) {
|
||||
const config = eodProviderConfigSchema.parse({
|
||||
name: 'eod',
|
||||
apiKey: 'test-key',
|
||||
tier,
|
||||
});
|
||||
expect(config.tier).toBe(tier);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('ibProviderConfigSchema', () => {
|
||||
it('should accept minimal config', () => {
|
||||
const config = ibProviderConfigSchema.parse({
|
||||
name: 'ib',
|
||||
});
|
||||
expect(config).toEqual({
|
||||
name: 'ib',
|
||||
enabled: true,
|
||||
priority: 0,
|
||||
timeout: 30000,
|
||||
retries: 3,
|
||||
gateway: {
|
||||
host: 'localhost',
|
||||
port: 5000,
|
||||
clientId: 1,
|
||||
},
|
||||
marketDataType: 'delayed',
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept full config', () => {
|
||||
const input = {
|
||||
name: 'ib',
|
||||
enabled: false,
|
||||
priority: 5,
|
||||
gateway: {
|
||||
host: 'gateway.example.com',
|
||||
port: 7497,
|
||||
clientId: 99,
|
||||
},
|
||||
account: 'DU123456',
|
||||
marketDataType: 'live' as const,
|
||||
};
|
||||
const config = ibProviderConfigSchema.parse(input);
|
||||
expect(config).toEqual(expect.objectContaining(input));
|
||||
});
|
||||
|
||||
it('should validate marketDataType', () => {
|
||||
expect(() => ibProviderConfigSchema.parse({
|
||||
name: 'ib',
|
||||
marketDataType: 'realtime',
|
||||
})).toThrow();
|
||||
|
||||
const validTypes = ['live', 'delayed', 'frozen'];
|
||||
for (const type of validTypes) {
|
||||
const config = ibProviderConfigSchema.parse({
|
||||
name: 'ib',
|
||||
marketDataType: type,
|
||||
});
|
||||
expect(config.marketDataType).toBe(type);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('qmProviderConfigSchema', () => {
|
||||
it('should require all credentials', () => {
|
||||
expect(() => qmProviderConfigSchema.parse({
|
||||
name: 'qm',
|
||||
})).toThrow();
|
||||
|
||||
const config = qmProviderConfigSchema.parse({
|
||||
name: 'qm',
|
||||
username: 'testuser',
|
||||
password: 'testpass',
|
||||
webmasterId: '12345',
|
||||
});
|
||||
expect(config.baseUrl).toBe('https://app.quotemedia.com/quotetools');
|
||||
});
|
||||
});
|
||||
|
||||
describe('yahooProviderConfigSchema', () => {
|
||||
it('should accept minimal config', () => {
|
||||
const config = yahooProviderConfigSchema.parse({
|
||||
name: 'yahoo',
|
||||
});
|
||||
expect(config).toEqual({
|
||||
name: 'yahoo',
|
||||
enabled: true,
|
||||
priority: 0,
|
||||
timeout: 30000,
|
||||
retries: 3,
|
||||
baseUrl: 'https://query1.finance.yahoo.com',
|
||||
cookieJar: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept crumb parameter', () => {
|
||||
const config = yahooProviderConfigSchema.parse({
|
||||
name: 'yahoo',
|
||||
crumb: 'abc123xyz',
|
||||
});
|
||||
expect(config.crumb).toBe('abc123xyz');
|
||||
});
|
||||
});
|
||||
|
||||
describe('webshareProviderConfigSchema', () => {
|
||||
it('should not require name like other providers', () => {
|
||||
const config = webshareProviderConfigSchema.parse({});
|
||||
expect(config).toEqual({
|
||||
apiUrl: 'https://proxy.webshare.io/api/v2/',
|
||||
enabled: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept apiKey', () => {
|
||||
const config = webshareProviderConfigSchema.parse({
|
||||
apiKey: 'test-key',
|
||||
enabled: false,
|
||||
});
|
||||
expect(config.apiKey).toBe('test-key');
|
||||
expect(config.enabled).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('providerConfigSchema', () => {
|
||||
it('should accept empty config', () => {
|
||||
const config = providerConfigSchema.parse({});
|
||||
expect(config).toEqual({});
|
||||
});
|
||||
|
||||
it('should accept partial provider config', () => {
|
||||
const config = providerConfigSchema.parse({
|
||||
eod: {
|
||||
name: 'eod',
|
||||
apiKey: 'test-key',
|
||||
},
|
||||
yahoo: {
|
||||
name: 'yahoo',
|
||||
},
|
||||
});
|
||||
expect(config.eod?.apiKey).toBe('test-key');
|
||||
expect(config.yahoo?.baseUrl).toBe('https://query1.finance.yahoo.com');
|
||||
expect(config.ib).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should accept full provider config', () => {
|
||||
const config = providerConfigSchema.parse({
|
||||
eod: {
|
||||
name: 'eod',
|
||||
apiKey: 'eod-key',
|
||||
tier: 'all-in-one',
|
||||
},
|
||||
ib: {
|
||||
name: 'ib',
|
||||
gateway: {
|
||||
host: 'gateway.ib.com',
|
||||
port: 7497,
|
||||
clientId: 2,
|
||||
},
|
||||
},
|
||||
qm: {
|
||||
name: 'qm',
|
||||
username: 'user',
|
||||
password: 'pass',
|
||||
webmasterId: '123',
|
||||
},
|
||||
yahoo: {
|
||||
name: 'yahoo',
|
||||
crumb: 'xyz',
|
||||
},
|
||||
webshare: {
|
||||
apiKey: 'ws-key',
|
||||
},
|
||||
});
|
||||
|
||||
expect(config.eod?.tier).toBe('all-in-one');
|
||||
expect(config.ib?.gateway.port).toBe(7497);
|
||||
expect(config.qm?.username).toBe('user');
|
||||
expect(config.yahoo?.crumb).toBe('xyz');
|
||||
expect(config.webshare?.apiKey).toBe('ws-key');
|
||||
});
|
||||
});
|
||||
});
|
||||
519
libs/core/config/test/utils.test.ts
Normal file
519
libs/core/config/test/utils.test.ts
Normal file
|
|
@ -0,0 +1,519 @@
|
|||
import { describe, it, expect, beforeEach, afterEach } from 'bun:test';
|
||||
import { z } from 'zod';
|
||||
import {
|
||||
SecretValue,
|
||||
secret,
|
||||
isSecret,
|
||||
redactSecrets,
|
||||
isSecretEnvVar,
|
||||
wrapSecretEnvVars,
|
||||
secretSchema,
|
||||
secretStringSchema,
|
||||
COMMON_SECRET_PATTERNS,
|
||||
validateConfig,
|
||||
checkRequiredEnvVars,
|
||||
validateCompleteness,
|
||||
formatValidationResult,
|
||||
createStrictSchema,
|
||||
mergeSchemas,
|
||||
type ValidationResult,
|
||||
} from '../src';
|
||||
|
||||
describe('Config Utils', () => {
|
||||
describe('SecretValue', () => {
|
||||
it('should create a secret value', () => {
|
||||
const secret = new SecretValue('my-secret');
|
||||
expect(secret).toBeInstanceOf(SecretValue);
|
||||
expect(secret.toString()).toBe('***');
|
||||
});
|
||||
|
||||
it('should use custom mask', () => {
|
||||
const secret = new SecretValue('my-secret', 'HIDDEN');
|
||||
expect(secret.toString()).toBe('HIDDEN');
|
||||
});
|
||||
|
||||
it('should reveal value with reason', () => {
|
||||
const secret = new SecretValue('my-secret');
|
||||
expect(secret.reveal('testing')).toBe('my-secret');
|
||||
});
|
||||
|
||||
it('should throw when revealing without reason', () => {
|
||||
const secret = new SecretValue('my-secret');
|
||||
expect(() => secret.reveal('')).toThrow('Reason required for revealing secret value');
|
||||
});
|
||||
|
||||
it('should mask value in JSON', () => {
|
||||
const secret = new SecretValue('my-secret');
|
||||
expect(JSON.stringify(secret)).toBe('"***"');
|
||||
expect(secret.toJSON()).toBe('***');
|
||||
});
|
||||
|
||||
it('should compare values without revealing', () => {
|
||||
const secret = new SecretValue('my-secret');
|
||||
expect(secret.equals('my-secret')).toBe(true);
|
||||
expect(secret.equals('other-secret')).toBe(false);
|
||||
});
|
||||
|
||||
it('should map secret values', () => {
|
||||
const secret = new SecretValue('hello');
|
||||
const mapped = secret.map(val => val.toUpperCase(), 'testing transformation');
|
||||
expect(mapped.reveal('checking result')).toBe('HELLO');
|
||||
expect(mapped.toString()).toBe('***');
|
||||
});
|
||||
|
||||
it('should work with non-string types', () => {
|
||||
const numberSecret = new SecretValue(12345, 'XXX');
|
||||
expect(numberSecret.reveal('test')).toBe(12345);
|
||||
expect(numberSecret.toString()).toBe('XXX');
|
||||
|
||||
const objectSecret = new SecretValue({ key: 'value' }, '[OBJECT]');
|
||||
expect(objectSecret.reveal('test')).toEqual({ key: 'value' });
|
||||
expect(objectSecret.toString()).toBe('[OBJECT]');
|
||||
});
|
||||
});
|
||||
|
||||
describe('secret helper function', () => {
|
||||
it('should create secret values', () => {
|
||||
const s = secret('my-secret');
|
||||
expect(s).toBeInstanceOf(SecretValue);
|
||||
expect(s.reveal('test')).toBe('my-secret');
|
||||
});
|
||||
|
||||
it('should accept custom mask', () => {
|
||||
const s = secret('my-secret', 'REDACTED');
|
||||
expect(s.toString()).toBe('REDACTED');
|
||||
});
|
||||
});
|
||||
|
||||
describe('isSecret', () => {
|
||||
it('should identify secret values', () => {
|
||||
expect(isSecret(new SecretValue('test'))).toBe(true);
|
||||
expect(isSecret(secret('test'))).toBe(true);
|
||||
expect(isSecret('test')).toBe(false);
|
||||
expect(isSecret(null)).toBe(false);
|
||||
expect(isSecret(undefined)).toBe(false);
|
||||
expect(isSecret({})).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('secretSchema', () => {
|
||||
it('should validate SecretValue instances', () => {
|
||||
const schema = secretSchema(z.string());
|
||||
const secretVal = new SecretValue('test');
|
||||
|
||||
expect(() => schema.parse(secretVal)).not.toThrow();
|
||||
expect(() => schema.parse('test')).toThrow();
|
||||
expect(() => schema.parse(null)).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('secretStringSchema', () => {
|
||||
it('should transform string to SecretValue', () => {
|
||||
const result = secretStringSchema.parse('my-secret');
|
||||
expect(result).toBeInstanceOf(SecretValue);
|
||||
expect(result.reveal('test')).toBe('my-secret');
|
||||
});
|
||||
|
||||
it('should reject non-strings', () => {
|
||||
expect(() => secretStringSchema.parse(123)).toThrow();
|
||||
expect(() => secretStringSchema.parse(null)).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('redactSecrets', () => {
|
||||
it('should redact specified paths', () => {
|
||||
const obj = {
|
||||
username: 'admin',
|
||||
password: 'secret123',
|
||||
nested: {
|
||||
apiKey: 'key123',
|
||||
public: 'visible',
|
||||
},
|
||||
};
|
||||
|
||||
const redacted = redactSecrets(obj, ['password', 'nested.apiKey']);
|
||||
|
||||
expect(redacted).toEqual({
|
||||
username: 'admin',
|
||||
password: '***REDACTED***',
|
||||
nested: {
|
||||
apiKey: '***REDACTED***',
|
||||
public: 'visible',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should redact SecretValue instances', () => {
|
||||
const obj = {
|
||||
normal: 'value',
|
||||
secret: new SecretValue('hidden', 'MASKED'),
|
||||
nested: {
|
||||
anotherSecret: secret('also-hidden'),
|
||||
},
|
||||
};
|
||||
|
||||
const redacted = redactSecrets(obj);
|
||||
|
||||
expect(redacted).toEqual({
|
||||
normal: 'value',
|
||||
secret: 'MASKED',
|
||||
nested: {
|
||||
anotherSecret: '***',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle arrays', () => {
|
||||
const obj = {
|
||||
items: [
|
||||
{ name: 'item1', secret: new SecretValue('s1') },
|
||||
{ name: 'item2', secret: new SecretValue('s2') },
|
||||
],
|
||||
};
|
||||
|
||||
const redacted = redactSecrets(obj);
|
||||
|
||||
expect(redacted.items).toEqual([
|
||||
{ name: 'item1', secret: '***' },
|
||||
{ name: 'item2', secret: '***' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle null and undefined', () => {
|
||||
const obj = {
|
||||
nullValue: null,
|
||||
undefinedValue: undefined,
|
||||
secret: new SecretValue('test'),
|
||||
};
|
||||
|
||||
const redacted = redactSecrets(obj);
|
||||
|
||||
expect(redacted).toEqual({
|
||||
nullValue: null,
|
||||
undefinedValue: undefined,
|
||||
secret: '***',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle non-existent paths gracefully', () => {
|
||||
const obj = { a: 'value' };
|
||||
const redacted = redactSecrets(obj, ['b.c.d']);
|
||||
expect(redacted).toEqual({ a: 'value' });
|
||||
});
|
||||
|
||||
it('should not modify original object', () => {
|
||||
const obj = { password: 'secret' };
|
||||
const original = { ...obj };
|
||||
redactSecrets(obj, ['password']);
|
||||
expect(obj).toEqual(original);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isSecretEnvVar', () => {
|
||||
it('should identify common secret patterns', () => {
|
||||
// Positive cases
|
||||
expect(isSecretEnvVar('PASSWORD')).toBe(true);
|
||||
expect(isSecretEnvVar('DB_PASSWORD')).toBe(true);
|
||||
expect(isSecretEnvVar('API_KEY')).toBe(true);
|
||||
expect(isSecretEnvVar('API-KEY')).toBe(true);
|
||||
expect(isSecretEnvVar('SECRET_TOKEN')).toBe(true);
|
||||
expect(isSecretEnvVar('AUTH_TOKEN')).toBe(true);
|
||||
expect(isSecretEnvVar('PRIVATE_KEY')).toBe(true);
|
||||
expect(isSecretEnvVar('CREDENTIAL')).toBe(true);
|
||||
expect(isSecretEnvVar('password')).toBe(true); // Case insensitive
|
||||
|
||||
// Negative cases
|
||||
expect(isSecretEnvVar('USERNAME')).toBe(false);
|
||||
expect(isSecretEnvVar('PORT')).toBe(false);
|
||||
expect(isSecretEnvVar('DEBUG')).toBe(false);
|
||||
expect(isSecretEnvVar('NODE_ENV')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('wrapSecretEnvVars', () => {
|
||||
it('should wrap secret environment variables', () => {
|
||||
const env = {
|
||||
USERNAME: 'admin',
|
||||
PASSWORD: 'secret123',
|
||||
API_KEY: 'key123',
|
||||
PORT: '3000',
|
||||
};
|
||||
|
||||
const wrapped = wrapSecretEnvVars(env);
|
||||
|
||||
expect(wrapped.USERNAME).toBe('admin');
|
||||
expect(wrapped.PORT).toBe('3000');
|
||||
|
||||
expect(isSecret(wrapped.PASSWORD)).toBe(true);
|
||||
expect(isSecret(wrapped.API_KEY)).toBe(true);
|
||||
|
||||
const passwordSecret = wrapped.PASSWORD as SecretValue;
|
||||
expect(passwordSecret.reveal('test')).toBe('secret123');
|
||||
expect(passwordSecret.toString()).toBe('***PASSWORD***');
|
||||
});
|
||||
|
||||
it('should handle undefined values', () => {
|
||||
const env = {
|
||||
PASSWORD: undefined,
|
||||
USERNAME: 'admin',
|
||||
};
|
||||
|
||||
const wrapped = wrapSecretEnvVars(env);
|
||||
|
||||
expect(wrapped.PASSWORD).toBeUndefined();
|
||||
expect(wrapped.USERNAME).toBe('admin');
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateConfig', () => {
|
||||
const schema = z.object({
|
||||
name: z.string(),
|
||||
port: z.number(),
|
||||
optional: z.string().optional(),
|
||||
});
|
||||
|
||||
it('should validate valid config', () => {
|
||||
const result = validateConfig({ name: 'app', port: 3000 }, schema);
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.errors).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should return errors for invalid config', () => {
|
||||
const result = validateConfig({ name: 'app', port: 'invalid' }, schema);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toBeDefined();
|
||||
expect(result.errors![0].path).toBe('port');
|
||||
expect(result.errors![0].message).toContain('Expected number');
|
||||
});
|
||||
|
||||
it('should handle missing required fields', () => {
|
||||
const result = validateConfig({ port: 3000 }, schema);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toBeDefined();
|
||||
expect(result.errors![0].path).toBe('name');
|
||||
});
|
||||
|
||||
it('should rethrow non-Zod errors', () => {
|
||||
const badSchema = {
|
||||
parse: () => {
|
||||
throw new Error('Not a Zod error');
|
||||
},
|
||||
} as any;
|
||||
|
||||
expect(() => validateConfig({}, badSchema)).toThrow('Not a Zod error');
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkRequiredEnvVars', () => {
|
||||
const originalEnv = { ...process.env };
|
||||
|
||||
beforeEach(() => {
|
||||
// Clear environment
|
||||
for (const key in process.env) {
|
||||
delete process.env[key];
|
||||
}
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore environment
|
||||
for (const key in process.env) {
|
||||
delete process.env[key];
|
||||
}
|
||||
Object.assign(process.env, originalEnv);
|
||||
});
|
||||
|
||||
it('should pass when all required vars are set', () => {
|
||||
process.env.API_KEY = 'key123';
|
||||
process.env.DATABASE_URL = 'postgres://...';
|
||||
|
||||
const result = checkRequiredEnvVars(['API_KEY', 'DATABASE_URL']);
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.errors).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should fail when required vars are missing', () => {
|
||||
process.env.API_KEY = 'key123';
|
||||
|
||||
const result = checkRequiredEnvVars(['API_KEY', 'DATABASE_URL', 'MISSING_VAR']);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(2);
|
||||
expect(result.errors![0].path).toBe('env.DATABASE_URL');
|
||||
expect(result.errors![1].path).toBe('env.MISSING_VAR');
|
||||
});
|
||||
|
||||
it('should handle empty required list', () => {
|
||||
const result = checkRequiredEnvVars([]);
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.errors).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateCompleteness', () => {
|
||||
it('should validate complete config', () => {
|
||||
const config = {
|
||||
database: {
|
||||
host: 'localhost',
|
||||
port: 5432,
|
||||
credentials: {
|
||||
username: 'admin',
|
||||
password: 'secret',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = validateCompleteness(config, [
|
||||
'database.host',
|
||||
'database.port',
|
||||
'database.credentials.username',
|
||||
]);
|
||||
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.errors).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should detect missing values', () => {
|
||||
const config = {
|
||||
database: {
|
||||
host: 'localhost',
|
||||
credentials: {},
|
||||
},
|
||||
};
|
||||
|
||||
const result = validateCompleteness(config, [
|
||||
'database.host',
|
||||
'database.port',
|
||||
'database.credentials.username',
|
||||
]);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(2);
|
||||
expect(result.errors![0].path).toBe('database.port');
|
||||
expect(result.errors![1].path).toBe('database.credentials.username');
|
||||
});
|
||||
|
||||
it('should handle null and undefined as missing', () => {
|
||||
const config = {
|
||||
a: null,
|
||||
b: undefined,
|
||||
c: 'value',
|
||||
};
|
||||
|
||||
const result = validateCompleteness(config, ['a', 'b', 'c']);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should handle non-existent paths', () => {
|
||||
const config = { a: 'value' };
|
||||
const result = validateCompleteness(config, ['b.c.d']);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors![0].path).toBe('b.c.d');
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatValidationResult', () => {
|
||||
it('should format valid result', () => {
|
||||
const result: ValidationResult = { valid: true };
|
||||
const formatted = formatValidationResult(result);
|
||||
expect(formatted).toBe('✅ Configuration is valid');
|
||||
});
|
||||
|
||||
it('should format errors', () => {
|
||||
const result: ValidationResult = {
|
||||
valid: false,
|
||||
errors: [
|
||||
{ path: 'port', message: 'Expected number' },
|
||||
{
|
||||
path: 'database.host',
|
||||
message: 'Invalid value',
|
||||
expected: 'string',
|
||||
received: 'number',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const formatted = formatValidationResult(result);
|
||||
expect(formatted).toContain('❌ Configuration validation failed');
|
||||
expect(formatted).toContain('Errors:');
|
||||
expect(formatted).toContain('- port: Expected number');
|
||||
expect(formatted).toContain('- database.host: Invalid value');
|
||||
expect(formatted).toContain('Expected: string, Received: number');
|
||||
});
|
||||
|
||||
it('should format warnings', () => {
|
||||
const result: ValidationResult = {
|
||||
valid: true,
|
||||
warnings: [
|
||||
{ path: 'deprecated.feature', message: 'This feature is deprecated' },
|
||||
],
|
||||
};
|
||||
|
||||
const formatted = formatValidationResult(result);
|
||||
expect(formatted).toContain('✅ Configuration is valid');
|
||||
expect(formatted).toContain('Warnings:');
|
||||
expect(formatted).toContain('- deprecated.feature: This feature is deprecated');
|
||||
});
|
||||
});
|
||||
|
||||
describe('createStrictSchema', () => {
|
||||
it('should create strict schema', () => {
|
||||
const schema = createStrictSchema({
|
||||
name: z.string(),
|
||||
age: z.number(),
|
||||
});
|
||||
|
||||
expect(() => schema.parse({ name: 'John', age: 30 })).not.toThrow();
|
||||
expect(() => schema.parse({ name: 'John', age: 30, extra: 'field' })).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('mergeSchemas', () => {
|
||||
it('should merge two schemas', () => {
|
||||
const schema1 = z.object({ a: z.string() });
|
||||
const schema2 = z.object({ b: z.number() });
|
||||
|
||||
const merged = mergeSchemas(schema1, schema2);
|
||||
const result = merged.parse({ a: 'test', b: 123 });
|
||||
|
||||
expect(result).toEqual({ a: 'test', b: 123 });
|
||||
});
|
||||
|
||||
it('should merge multiple schemas', () => {
|
||||
const schema1 = z.object({ a: z.string() });
|
||||
const schema2 = z.object({ b: z.number() });
|
||||
const schema3 = z.object({ c: z.boolean() });
|
||||
|
||||
const merged = mergeSchemas(schema1, schema2, schema3);
|
||||
const result = merged.parse({ a: 'test', b: 123, c: true });
|
||||
|
||||
expect(result).toEqual({ a: 'test', b: 123, c: true });
|
||||
});
|
||||
|
||||
it('should throw with less than two schemas', () => {
|
||||
expect(() => mergeSchemas(z.object({}))).toThrow('At least two schemas required');
|
||||
expect(() => mergeSchemas()).toThrow('At least two schemas required');
|
||||
});
|
||||
|
||||
it('should handle overlapping fields', () => {
|
||||
const schema1 = z.object({ a: z.string(), shared: z.string() });
|
||||
const schema2 = z.object({ b: z.number(), shared: z.string() });
|
||||
|
||||
const merged = mergeSchemas(schema1, schema2);
|
||||
|
||||
// Both schemas require 'shared' to be a string
|
||||
expect(() => merged.parse({ a: 'test', b: 123, shared: 'value' })).not.toThrow();
|
||||
expect(() => merged.parse({ a: 'test', b: 123, shared: 123 })).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('COMMON_SECRET_PATTERNS', () => {
|
||||
it('should be an array of RegExp', () => {
|
||||
expect(Array.isArray(COMMON_SECRET_PATTERNS)).toBe(true);
|
||||
expect(COMMON_SECRET_PATTERNS.length).toBeGreaterThan(0);
|
||||
|
||||
for (const pattern of COMMON_SECRET_PATTERNS) {
|
||||
expect(pattern).toBeInstanceOf(RegExp);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
Loading…
Add table
Add a link
Reference in a new issue