moved folders around

This commit is contained in:
Boki 2025-06-21 18:27:00 -04:00
parent 4f89affc2b
commit 36cb84b343
202 changed files with 1160 additions and 660 deletions

View file

@ -0,0 +1,50 @@
# Environment
NODE_ENV=development
# Service Configuration
STOCKBOT_SERVICE_NAME=stock-bot-service
STOCKBOT_SERVICE_PORT=3000
# Database Configuration
STOCKBOT_DATABASE_POSTGRES_HOST=localhost
STOCKBOT_DATABASE_POSTGRES_PORT=5432
STOCKBOT_DATABASE_POSTGRES_DATABASE=stockbot
STOCKBOT_DATABASE_POSTGRES_USER=postgres
STOCKBOT_DATABASE_POSTGRES_PASSWORD=postgres
STOCKBOT_DATABASE_QUESTDB_HOST=localhost
STOCKBOT_DATABASE_QUESTDB_ILP_PORT=9009
STOCKBOT_DATABASE_QUESTDB_HTTP_PORT=9000
STOCKBOT_DATABASE_MONGODB_HOST=localhost
STOCKBOT_DATABASE_MONGODB_PORT=27017
STOCKBOT_DATABASE_MONGODB_DATABASE=stockbot
STOCKBOT_DATABASE_DRAGONFLY_HOST=localhost
STOCKBOT_DATABASE_DRAGONFLY_PORT=6379
# Provider Configuration
STOCKBOT_PROVIDERS_EOD_API_KEY=your_eod_api_key
STOCKBOT_PROVIDERS_EOD_ENABLED=true
STOCKBOT_PROVIDERS_IB_ENABLED=false
STOCKBOT_PROVIDERS_IB_GATEWAY_HOST=localhost
STOCKBOT_PROVIDERS_IB_GATEWAY_PORT=5000
STOCKBOT_PROVIDERS_IB_ACCOUNT=your_account_id
STOCKBOT_PROVIDERS_QM_ENABLED=false
STOCKBOT_PROVIDERS_QM_USERNAME=your_username
STOCKBOT_PROVIDERS_QM_PASSWORD=your_password
STOCKBOT_PROVIDERS_QM_WEBMASTER_ID=your_webmaster_id
# Logging
STOCKBOT_LOGGING_LEVEL=info
STOCKBOT_LOGGING_LOKI_ENABLED=false
STOCKBOT_LOGGING_LOKI_HOST=localhost
STOCKBOT_LOGGING_LOKI_PORT=3100
# HTTP Proxy (optional)
STOCKBOT_HTTP_PROXY_ENABLED=false
STOCKBOT_HTTP_PROXY_URL=http://proxy.example.com:8080
STOCKBOT_HTTP_PROXY_AUTH_USERNAME=username
STOCKBOT_HTTP_PROXY_AUTH_PASSWORD=password

243
libs/core/config/README.md Normal file
View file

@ -0,0 +1,243 @@
# @stock-bot/config
A robust, type-safe configuration library for the Stock Bot application. Built with Zod for validation and supports multiple configuration sources with proper precedence.
## Features
- **Type-safe configuration** with Zod schemas
- **Multiple configuration sources**: JSON files and environment variables
- **Environment-specific overrides** (development, test, production)
- **Dynamic provider configurations**
- **No circular dependencies** - designed to be used by all other libraries
- **Clear error messages** with validation
- **Runtime configuration updates** (useful for testing)
- **Singleton pattern** for global configuration access
## Installation
```bash
bun add @stock-bot/config
```
## Usage
### Basic Usage
```typescript
import { initializeConfig, getConfig } from '@stock-bot/config';
// Initialize configuration (call once at app startup)
await initializeConfig();
// Get configuration
const config = getConfig();
console.log(config.database.postgres.host);
// Use convenience functions
import { getDatabaseConfig, isProduction } from '@stock-bot/config';
const dbConfig = getDatabaseConfig();
if (isProduction()) {
// Production-specific logic
}
```
### Custom Configuration
```typescript
import { ConfigManager } from '@stock-bot/config';
import { z } from 'zod';
// Define your schema
const myConfigSchema = z.object({
app: z.object({
name: z.string(),
version: z.string(),
}),
features: z.object({
enableBeta: z.boolean().default(false),
}),
});
// Create config manager
const configManager = new ConfigManager({
configPath: './my-config',
});
// Initialize with schema
const config = await configManager.initialize(myConfigSchema);
```
### Provider-Specific Configuration
```typescript
import { getProviderConfig } from '@stock-bot/config';
// Get provider configuration
const eodConfig = getProviderConfig('eod');
console.log(eodConfig.apiKey);
// Check if provider is enabled
if (eodConfig.enabled) {
// Use EOD provider
}
```
### Environment Variables
Environment variables are loaded with the `STOCKBOT_` prefix and follow a naming convention:
```bash
# Database configuration
STOCKBOT_DATABASE_POSTGRES_HOST=localhost
STOCKBOT_DATABASE_POSTGRES_PORT=5432
# Provider configuration
STOCKBOT_PROVIDERS_EOD_API_KEY=your_api_key
STOCKBOT_PROVIDERS_EOD_ENABLED=true
# Service configuration
STOCKBOT_SERVICE_PORT=3000
STOCKBOT_LOGGING_LEVEL=debug
```
### Configuration Precedence
Configuration is loaded in the following order (later sources override earlier ones):
1. `config/default.json` - Base configuration
2. `config/{environment}.json` - Environment-specific overrides
3. Environment variables - Highest priority
### Advanced Usage
```typescript
import { getConfigManager } from '@stock-bot/config';
const manager = getConfigManager();
// Get specific value by path
const port = manager.getValue<number>('service.port');
// Check if configuration exists
if (manager.has('providers.ib')) {
// IB provider is configured
}
// Update configuration at runtime (useful for testing)
manager.set({
logging: {
level: 'debug'
}
});
// Create typed getter
const getQueueConfig = manager.createTypedGetter(queueConfigSchema);
const queueConfig = getQueueConfig();
```
## Configuration Schema
The library provides pre-defined schemas for common configurations:
### Base Configuration
- `environment` - Current environment (development/test/production)
- `name` - Application name
- `version` - Application version
- `debug` - Debug mode flag
### Service Configuration
- `name` - Service name
- `port` - Service port
- `host` - Service host
- `healthCheckPath` - Health check endpoint
- `cors` - CORS configuration
### Database Configuration
- `postgres` - PostgreSQL configuration
- `questdb` - QuestDB configuration
- `mongodb` - MongoDB configuration
- `dragonfly` - Dragonfly/Redis configuration
### Provider Configuration
- `eod` - EOD Historical Data provider
- `ib` - Interactive Brokers provider
- `qm` - QuoteMedia provider
- `yahoo` - Yahoo Finance provider
## Testing
```typescript
import { resetConfig, initializeConfig } from '@stock-bot/config';
beforeEach(() => {
resetConfig();
});
test('custom config', async () => {
process.env.NODE_ENV = 'test';
process.env.STOCKBOT_SERVICE_PORT = '4000';
await initializeConfig();
const config = getConfig();
expect(config.service.port).toBe(4000);
});
```
## Custom Loaders
You can create custom configuration loaders:
```typescript
import { ConfigLoader } from '@stock-bot/config';
class ApiConfigLoader implements ConfigLoader {
readonly priority = 75; // Between file (50) and env (100)
async load(): Promise<Record<string, unknown>> {
// Fetch configuration from API
const response = await fetch('https://api.example.com/config');
return response.json();
}
}
// Use custom loader
const configManager = new ConfigManager({
loaders: [
new FileLoader('./config', 'production'),
new ApiConfigLoader(),
new EnvLoader('STOCKBOT_'),
],
});
```
## Error Handling
The library provides specific error types:
```typescript
import { ConfigError, ConfigValidationError } from '@stock-bot/config';
try {
await initializeConfig();
} catch (error) {
if (error instanceof ConfigValidationError) {
console.error('Validation failed:', error.errors);
} else if (error instanceof ConfigError) {
console.error('Configuration error:', error.message);
}
}
```
## Best Practices
1. **Initialize once**: Call `initializeConfig()` once at application startup
2. **Use schemas**: Always define and validate configurations with Zod schemas
3. **Environment variables**: Use the `STOCKBOT_` prefix for all env vars
4. **Type safety**: Leverage TypeScript types from the schemas
5. **Testing**: Reset configuration between tests with `resetConfig()`
## License
MIT

View file

@ -0,0 +1,94 @@
{
"name": "stock-bot",
"version": "1.0.0",
"debug": false,
"service": {
"name": "stock-bot-service",
"port": 3000,
"host": "0.0.0.0",
"healthCheckPath": "/health",
"metricsPath": "/metrics",
"shutdownTimeout": 30000,
"cors": {
"enabled": true,
"origin": "*",
"credentials": true
}
},
"logging": {
"level": "info",
"format": "json",
"loki": {
"enabled": false,
"host": "localhost",
"port": 3100,
"labels": {}
}
},
"database": {
"postgres": {
"host": "localhost",
"port": 5432,
"database": "stockbot",
"user": "postgres",
"password": "postgres",
"ssl": false,
"poolSize": 10,
"connectionTimeout": 30000,
"idleTimeout": 10000
},
"questdb": {
"host": "localhost",
"ilpPort": 9009,
"httpPort": 9000,
"pgPort": 8812,
"database": "questdb",
"user": "admin",
"password": "quest",
"bufferSize": 65536,
"flushInterval": 1000
},
"mongodb": {
"host": "localhost",
"port": 27017,
"database": "stockbot",
"authSource": "admin",
"poolSize": 10
},
"dragonfly": {
"host": "localhost",
"port": 6379,
"db": 0,
"maxRetries": 3,
"retryDelay": 100
}
},
"queue": {
"redis": {
"host": "localhost",
"port": 6379,
"db": 0
},
"defaultJobOptions": {
"attempts": 3,
"backoff": {
"type": "exponential",
"delay": 1000
},
"removeOnComplete": true,
"removeOnFail": false
}
},
"http": {
"timeout": 30000,
"retries": 3,
"retryDelay": 1000,
"proxy": {
"enabled": false
}
},
"webshare": {
"apiKey": "",
"apiUrl": "https://proxy.webshare.io/api/v2/"
}
}

View file

@ -0,0 +1,48 @@
{
"debug": true,
"logging": {
"level": "debug",
"format": "pretty"
},
"providers": {
"eod": {
"name": "eod-historical-data",
"enabled": true,
"priority": 1,
"apiKey": "demo",
"tier": "free",
"rateLimit": {
"maxRequests": 20,
"windowMs": 60000
}
},
"yahoo": {
"name": "yahoo-finance",
"enabled": true,
"priority": 2,
"rateLimit": {
"maxRequests": 100,
"windowMs": 60000
}
},
"ib": {
"name": "interactive-brokers",
"enabled": false,
"priority": 0,
"gateway": {
"host": "localhost",
"port": 5000,
"clientId": 1
},
"marketDataType": "delayed"
},
"qm": {
"name": "quotemedia",
"enabled": false,
"priority": 3,
"username": "",
"password": "",
"webmasterId": ""
}
}
}

View file

@ -0,0 +1,32 @@
{
"debug": false,
"logging": {
"level": "warn",
"format": "json",
"loki": {
"enabled": true,
"labels": {
"app": "stock-bot",
"env": "production"
}
}
},
"database": {
"postgres": {
"ssl": true,
"poolSize": 20
},
"questdb": {
"bufferSize": 131072,
"flushInterval": 500
},
"mongodb": {
"poolSize": 20
}
},
"http": {
"timeout": 60000,
"retries": 5,
"retryDelay": 2000
}
}

View file

@ -0,0 +1,42 @@
{
"debug": true,
"logging": {
"level": "error",
"format": "json"
},
"service": {
"port": 0,
"shutdownTimeout": 5000
},
"database": {
"postgres": {
"database": "stockbot_test",
"poolSize": 5
},
"questdb": {
"database": "questdb_test"
},
"mongodb": {
"database": "stockbot_test",
"poolSize": 5
},
"dragonfly": {
"db": 15,
"keyPrefix": "test:"
}
},
"queue": {
"redis": {
"db": 15
},
"defaultJobOptions": {
"attempts": 1,
"removeOnComplete": false,
"removeOnFail": false
}
},
"http": {
"timeout": 5000,
"retries": 1
}
}

View file

@ -0,0 +1,33 @@
{
"name": "@stock-bot/config",
"version": "1.0.0",
"type": "module",
"exports": {
".": {
"import": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
"scripts": {
"build": "tsc",
"dev": "tsc --watch",
"test": "bun test",
"clean": "rm -rf dist",
"cli": "bun run src/cli.ts",
"validate": "bun run src/cli.ts --validate",
"check": "bun run src/cli.ts --check"
},
"bin": {
"config-cli": "./dist/cli.js"
},
"dependencies": {
"zod": "^3.22.4"
},
"devDependencies": {
"@types/bun": "^1.0.0",
"@types/node": "^20.10.5",
"typescript": "^5.3.3"
}
}

196
libs/core/config/src/cli.ts Normal file
View file

@ -0,0 +1,196 @@
#!/usr/bin/env bun
/* eslint-disable no-console */
import { parseArgs } from 'util';
import { join } from 'path';
import { ConfigManager } from './config-manager';
import { appConfigSchema } from './schemas';
import {
validateConfig,
formatValidationResult,
checkDeprecations,
checkRequiredEnvVars,
validateCompleteness
} from './utils/validation';
import { redactSecrets } from './utils/secrets';
import type { Environment } from './types';
interface CliOptions {
config?: string;
env?: string;
validate?: boolean;
show?: boolean;
check?: boolean;
json?: boolean;
help?: boolean;
}
const DEPRECATIONS = {
'service.legacyMode': 'Use service.mode instead',
'database.redis': 'Use database.dragonfly instead',
};
const REQUIRED_PATHS = [
'service.name',
'service.port',
'database.postgres.host',
'database.postgres.database',
];
const REQUIRED_ENV_VARS = [
'NODE_ENV',
];
const SECRET_PATHS = [
'database.postgres.password',
'database.mongodb.uri',
'providers.quoteMedia.apiKey',
'providers.interactiveBrokers.clientId',
];
function printUsage() {
console.log(`
Stock Bot Configuration CLI
Usage: bun run config-cli [options]
Options:
--config <path> Path to config directory (default: ./config)
--env <env> Environment to use (development, test, production)
--validate Validate configuration against schema
--show Show current configuration (secrets redacted)
--check Run all configuration checks
--json Output in JSON format
--help Show this help message
Examples:
# Validate configuration
bun run config-cli --validate
# Show configuration for production
bun run config-cli --env production --show
# Run all checks
bun run config-cli --check
# Output configuration as JSON
bun run config-cli --show --json
`);
}
async function main() {
const { values } = parseArgs({
args: process.argv.slice(2),
options: {
config: { type: 'string' },
env: { type: 'string' },
validate: { type: 'boolean' },
show: { type: 'boolean' },
check: { type: 'boolean' },
json: { type: 'boolean' },
help: { type: 'boolean' },
},
}) as { values: CliOptions };
if (values.help) {
printUsage();
process.exit(0);
}
const configPath = values.config || join(process.cwd(), 'config');
const environment = values.env as Environment;
try {
const manager = new ConfigManager({
configPath,
environment,
});
const config = await manager.initialize(appConfigSchema);
if (values.validate) {
const result = validateConfig(config, appConfigSchema);
if (values.json) {
console.log(JSON.stringify(result, null, 2));
} else {
console.log(formatValidationResult(result));
}
process.exit(result.valid ? 0 : 1);
}
if (values.show) {
const redacted = redactSecrets(config, SECRET_PATHS);
if (values.json) {
console.log(JSON.stringify(redacted, null, 2));
} else {
console.log('Current Configuration:');
console.log(JSON.stringify(redacted, null, 2));
}
}
if (values.check) {
console.log('Running configuration checks...\n');
// Schema validation
console.log('1. Schema Validation:');
const schemaResult = validateConfig(config, appConfigSchema);
console.log(formatValidationResult(schemaResult));
console.log();
// Environment variables
console.log('2. Required Environment Variables:');
const envResult = checkRequiredEnvVars(REQUIRED_ENV_VARS);
console.log(formatValidationResult(envResult));
console.log();
// Required paths
console.log('3. Required Configuration Paths:');
const pathResult = validateCompleteness(config, REQUIRED_PATHS);
console.log(formatValidationResult(pathResult));
console.log();
// Deprecations
console.log('4. Deprecation Warnings:');
const warnings = checkDeprecations(config, DEPRECATIONS);
if (warnings && warnings.length > 0) {
for (const warning of warnings) {
console.log(` ⚠️ ${warning.path}: ${warning.message}`);
}
} else {
console.log(' ✅ No deprecated options found');
}
console.log();
// Overall result
const allValid = schemaResult.valid && envResult.valid && pathResult.valid;
if (allValid) {
console.log('✅ All configuration checks passed!');
process.exit(0);
} else {
console.log('❌ Some configuration checks failed');
process.exit(1);
}
}
if (!values.validate && !values.show && !values.check) {
console.log('No action specified. Use --help for usage information.');
process.exit(1);
}
} catch (error) {
if (values.json) {
console.error(JSON.stringify({ error: String(error) }));
} else {
console.error('Error:', error);
}
process.exit(1);
}
}
// Run CLI
if (import.meta.main) {
main();
}

View file

@ -0,0 +1,228 @@
import { join } from 'path';
import { z } from 'zod';
import { EnvLoader } from './loaders/env.loader';
import { FileLoader } from './loaders/file.loader';
import { ConfigError, ConfigValidationError } from './errors';
import {
ConfigLoader,
ConfigManagerOptions,
ConfigSchema,
DeepPartial,
Environment,
} from './types';
export class ConfigManager<T = Record<string, unknown>> {
private config: T | null = null;
private loaders: ConfigLoader[];
private environment: Environment;
private schema?: ConfigSchema;
constructor(options: ConfigManagerOptions = {}) {
this.environment = options.environment || this.detectEnvironment();
// Default loaders if none provided
if (options.loaders) {
this.loaders = options.loaders;
} else {
const configPath = options.configPath || join(process.cwd(), 'config');
this.loaders = [
new FileLoader(configPath, this.environment),
new EnvLoader(''), // No prefix for env vars to match our .env file
];
}
}
/**
* Initialize the configuration by loading from all sources synchronously.
*/
initialize(schema?: ConfigSchema): T {
if (this.config) {
return this.config;
}
this.schema = schema;
// Sort loaders by priority (higher priority last)
const sortedLoaders = [...this.loaders].sort((a, b) => a.priority - b.priority);
// Load configurations from all sources
const configs: Record<string, unknown>[] = [];
for (const loader of sortedLoaders) {
// Assuming all loaders now have a synchronous `load` method
const config = loader.load();
if (config && Object.keys(config).length > 0) {
configs.push(config);
}
}
// Merge all configurations
const mergedConfig = this.deepMerge(...configs) as T;
// Add environment if not present
if (
typeof mergedConfig === 'object' &&
mergedConfig !== null &&
!('environment' in mergedConfig)
) {
(mergedConfig as Record<string, unknown>)['environment'] = this.environment;
}
// Validate if schema provided
if (this.schema) {
try {
this.config = this.schema.parse(mergedConfig) as T;
} catch (error) {
if (error instanceof z.ZodError) {
throw new ConfigValidationError('Configuration validation failed', error.errors);
}
throw error;
}
} else {
this.config = mergedConfig;
}
return this.config;
}
/**
* Get the current configuration
*/
get(): T {
if (!this.config) {
throw new ConfigError('Configuration not initialized. Call initialize() first.');
}
return this.config;
}
/**
* Get a specific configuration value by path
*/
getValue<R = unknown>(path: string): R {
const config = this.get();
const keys = path.split('.');
let value: unknown = config;
for (const key of keys) {
if (value && typeof value === 'object' && key in value) {
value = (value as Record<string, unknown>)[key];
} else {
throw new ConfigError(`Configuration key not found: ${path}`);
}
}
return value as R;
}
/**
* Check if a configuration path exists
*/
has(path: string): boolean {
try {
this.getValue(path);
return true;
} catch {
return false;
}
}
/**
* Update configuration at runtime (useful for testing)
*/
set(updates: DeepPartial<T>): void {
if (!this.config) {
throw new ConfigError('Configuration not initialized. Call initialize() first.');
}
const updated = this.deepMerge(
this.config as Record<string, unknown>,
updates as Record<string, unknown>
) as T;
// Re-validate if schema is present
if (this.schema) {
try {
this.config = this.schema.parse(updated) as T;
} catch (error) {
if (error instanceof z.ZodError) {
throw new ConfigValidationError(
'Configuration validation failed after update',
error.errors
);
}
throw error;
}
} else {
this.config = updated;
}
}
/**
* Get the current environment
*/
getEnvironment(): Environment {
return this.environment;
}
/**
* Reset configuration (useful for testing)
*/
reset(): void {
this.config = null;
}
/**
* Validate configuration against a schema
*/
validate<S extends ConfigSchema>(schema: S): z.infer<S> {
const config = this.get();
return schema.parse(config);
}
/**
* Create a typed configuration getter
*/
createTypedGetter<S extends z.ZodSchema>(schema: S): () => z.infer<S> {
return () => this.validate(schema);
}
private detectEnvironment(): Environment {
const env = process.env.NODE_ENV?.toLowerCase();
switch (env) {
case 'production':
case 'prod':
return 'production';
case 'test':
return 'test';
case 'development':
case 'dev':
default:
return 'development';
}
}
private deepMerge(...objects: Record<string, unknown>[]): Record<string, unknown> {
const result: Record<string, unknown> = {};
for (const obj of objects) {
for (const [key, value] of Object.entries(obj)) {
if (value === null || value === undefined) {
result[key] = value;
} else if (
typeof value === 'object' &&
!Array.isArray(value) &&
!(value instanceof Date) &&
!(value instanceof RegExp)
) {
result[key] = this.deepMerge(
(result[key] as Record<string, unknown>) || ({} as Record<string, unknown>),
value as Record<string, unknown>
);
} else {
result[key] = value;
}
}
}
return result;
}
}

View file

@ -0,0 +1,20 @@
export class ConfigError extends Error {
constructor(message: string) {
super(message);
this.name = 'ConfigError';
}
}
export class ConfigValidationError extends ConfigError {
constructor(message: string, public errors: unknown) {
super(message);
this.name = 'ConfigValidationError';
}
}
export class ConfigLoaderError extends ConfigError {
constructor(message: string, public loader: string) {
super(`${loader}: ${message}`);
this.name = 'ConfigLoaderError';
}
}

View file

@ -0,0 +1,188 @@
// Import necessary types for singleton
import { EnvLoader } from './loaders/env.loader';
import { FileLoader } from './loaders/file.loader';
import { ConfigManager } from './config-manager';
import { AppConfig, appConfigSchema } from './schemas';
// Create singleton instance
let configInstance: ConfigManager<AppConfig> | null = null;
// Synchronously load critical env vars for early initialization
function loadCriticalEnvVarsSync(): void {
// Load .env file synchronously if it exists
try {
const fs = require('fs');
const path = require('path');
const envPath = path.resolve(process.cwd(), '.env');
if (fs.existsSync(envPath)) {
const envContent = fs.readFileSync(envPath, 'utf-8');
const lines = envContent.split('\n');
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed || trimmed.startsWith('#')) {
continue;
}
const equalIndex = trimmed.indexOf('=');
if (equalIndex === -1) {
continue;
}
const key = trimmed.substring(0, equalIndex).trim();
let value = trimmed.substring(equalIndex + 1).trim();
// Remove surrounding quotes
if (
(value.startsWith('"') && value.endsWith('"')) ||
(value.startsWith("'") && value.endsWith("'"))
) {
value = value.slice(1, -1);
}
// Only set if not already set
if (!(key in process.env)) {
process.env[key] = value;
}
}
}
} catch {
// Ignore errors - env file is optional
}
}
// Load critical env vars immediately
loadCriticalEnvVarsSync();
/**
* Initialize the global configuration synchronously.
*
* This loads configuration from all sources in the correct hierarchy:
* 1. Schema defaults (lowest priority)
* 2. default.json
* 3. [environment].json (e.g., development.json)
* 4. .env file values
* 5. process.env values (highest priority)
*/
export function initializeConfig(configPath?: string): AppConfig {
if (!configInstance) {
configInstance = new ConfigManager<AppConfig>({
configPath,
});
}
return configInstance.initialize(appConfigSchema);
}
/**
* Initialize configuration for a service in a monorepo.
* Automatically loads configs from:
* 1. Root config directory (../../config)
* 2. Service-specific config directory (./config)
* 3. Environment variables
*/
export function initializeServiceConfig(): AppConfig {
if (!configInstance) {
const environment = process.env.NODE_ENV || 'development';
configInstance = new ConfigManager<AppConfig>({
loaders: [
new FileLoader('../../config', environment), // Root config
new FileLoader('./config', environment), // Service config
new EnvLoader(''), // Environment variables
],
});
}
return configInstance.initialize(appConfigSchema);
}
/**
* Get the current configuration
*/
export function getConfig(): AppConfig {
if (!configInstance) {
throw new Error('Configuration not initialized. Call initializeConfig() first.');
}
return configInstance.get();
}
/**
* Get configuration manager instance
*/
export function getConfigManager(): ConfigManager<AppConfig> {
if (!configInstance) {
throw new Error('Configuration not initialized. Call initializeConfig() first.');
}
return configInstance;
}
/**
* Reset configuration (useful for testing)
*/
export function resetConfig(): void {
if (configInstance) {
configInstance.reset();
configInstance = null;
}
}
// Export convenience functions for common configs
export function getDatabaseConfig() {
return getConfig().database;
}
export function getServiceConfig() {
return getConfig().service;
}
export function getLogConfig() {
return getConfig().log;
}
// Deprecated alias for backward compatibility
export function getLoggingConfig() {
return getConfig().log;
}
export function getProviderConfig(provider: string) {
const providers = getConfig().providers;
if (!providers || !(provider in providers)) {
throw new Error(`Provider configuration not found: ${provider}`);
}
return (providers as Record<string, unknown>)[provider];
}
export function getQueueConfig() {
return getConfig().queue;
}
// Export environment helpers
export function isDevelopment(): boolean {
return getConfig().environment === 'development';
}
export function isProduction(): boolean {
return getConfig().environment === 'production';
}
export function isTest(): boolean {
return getConfig().environment === 'test';
}
// Export all schemas
export * from './schemas';
// Export types
export * from './types';
// Export errors
export * from './errors';
// Export loaders
export { EnvLoader } from './loaders/env.loader';
export { FileLoader } from './loaders/file.loader';
// Export ConfigManager
export { ConfigManager } from './config-manager';
// Export utilities
export * from './utils/secrets';
export * from './utils/validation';

View file

@ -0,0 +1,277 @@
import { readFileSync } from 'fs';
import { ConfigLoaderError } from '../errors';
import { ConfigLoader } from '../types';
export interface EnvLoaderOptions {
convertCase?: boolean;
parseJson?: boolean;
parseValues?: boolean;
nestedDelimiter?: string;
}
export class EnvLoader implements ConfigLoader {
readonly priority = 100; // Highest priority
constructor(
private prefix = '',
private options: EnvLoaderOptions = {}
) {
this.options = {
convertCase: false,
parseJson: true,
parseValues: true,
nestedDelimiter: '_',
...options,
};
}
load(): Record<string, unknown> {
try {
// Load root .env file - try multiple possible locations
const possiblePaths = ['./.env', '../.env', '../../.env'];
for (const path of possiblePaths) {
this.loadEnvFile(path);
}
const config: Record<string, unknown> = {};
const envVars = process.env;
for (const [key, value] of Object.entries(envVars)) {
if (this.prefix && !key.startsWith(this.prefix)) {
continue;
}
const configKey = this.prefix ? key.slice(this.prefix.length) : key;
if (!this.options.convertCase && !this.options.nestedDelimiter) {
// Simple case - just keep the key as is
config[configKey] = this.parseValue(value || '');
} else {
// Handle nested structure or case conversion
this.setConfigValue(config, configKey, value || '');
}
}
return config;
} catch (error) {
throw new ConfigLoaderError(`Failed to load environment variables: ${error}`, 'EnvLoader');
}
}
private setConfigValue(config: Record<string, unknown>, key: string, value: string): void {
const parsedValue = this.parseValue(value);
try {
// Handle provider-specific environment variables (only for application usage, not tests)
if (!this.prefix && !this.options.convertCase) {
const providerMapping = this.getProviderMapping(key);
if (providerMapping) {
this.setNestedValue(config, providerMapping.path, parsedValue);
return;
}
}
if (this.options.convertCase) {
// Convert to camelCase
const camelKey = this.toCamelCase(key);
config[camelKey] = parsedValue;
} else if (
this.options.nestedDelimiter &&
this.options.nestedDelimiter !== '_' &&
key.includes(this.options.nestedDelimiter)
) {
// Handle nested delimiter (e.g., APP__NAME -> { APP: { NAME: value } })
const parts = key.split(this.options.nestedDelimiter);
this.setNestedValue(config, parts, parsedValue);
} else {
// Convert to nested structure based on underscores, or keep as-is if no underscores
if (key.includes('_')) {
const path = key.toLowerCase().split('_');
this.setNestedValue(config, path, parsedValue);
} else {
// Single key without underscores - keep original case
config[key] = parsedValue;
}
}
} catch {
// Skip environment variables that can't be set (readonly properties)
// This is expected behavior for system environment variables
}
}
private setNestedValue(obj: Record<string, unknown>, path: string[], value: unknown): boolean {
if (path.length === 0) {
return false; // Cannot set value on empty path
}
const lastKey = path.pop();
if (!lastKey) {
return false; // This should never happen due to length check above
}
try {
const target = path.reduce((acc, key) => {
if (!acc[key] || typeof acc[key] !== 'object') {
acc[key] = {};
}
return acc[key] as Record<string, unknown>;
}, obj);
(target as Record<string, unknown>)[lastKey] = value;
return true;
} catch {
// If we can't assign to any property (readonly), skip this env var silently
return false;
}
}
private toCamelCase(str: string): string {
return str.toLowerCase().replace(/_([a-z])/g, (_, char) => char.toUpperCase());
}
private getProviderMapping(envKey: string): { path: string[] } | null {
// Provider-specific and special environment variable mappings
const providerMappings: Record<string, string[]> = {
// WebShare provider mappings
WEBSHARE_API_KEY: ['webshare', 'apiKey'],
WEBSHARE_API_URL: ['webshare', 'apiUrl'],
WEBSHARE_ENABLED: ['webshare', 'enabled'],
// EOD provider mappings
EOD_API_KEY: ['providers', 'eod', 'apiKey'],
EOD_BASE_URL: ['providers', 'eod', 'baseUrl'],
EOD_TIER: ['providers', 'eod', 'tier'],
EOD_ENABLED: ['providers', 'eod', 'enabled'],
EOD_PRIORITY: ['providers', 'eod', 'priority'],
// Interactive Brokers provider mappings
IB_GATEWAY_HOST: ['providers', 'ib', 'gateway', 'host'],
IB_GATEWAY_PORT: ['providers', 'ib', 'gateway', 'port'],
IB_CLIENT_ID: ['providers', 'ib', 'gateway', 'clientId'],
IB_ACCOUNT: ['providers', 'ib', 'account'],
IB_MARKET_DATA_TYPE: ['providers', 'ib', 'marketDataType'],
IB_ENABLED: ['providers', 'ib', 'enabled'],
IB_PRIORITY: ['providers', 'ib', 'priority'],
// QuoteMedia provider mappings
QM_USERNAME: ['providers', 'qm', 'username'],
QM_PASSWORD: ['providers', 'qm', 'password'],
QM_BASE_URL: ['providers', 'qm', 'baseUrl'],
QM_WEBMASTER_ID: ['providers', 'qm', 'webmasterId'],
QM_ENABLED: ['providers', 'qm', 'enabled'],
QM_PRIORITY: ['providers', 'qm', 'priority'],
// Yahoo Finance provider mappings
YAHOO_BASE_URL: ['providers', 'yahoo', 'baseUrl'],
YAHOO_COOKIE_JAR: ['providers', 'yahoo', 'cookieJar'],
YAHOO_CRUMB: ['providers', 'yahoo', 'crumb'],
YAHOO_ENABLED: ['providers', 'yahoo', 'enabled'],
YAHOO_PRIORITY: ['providers', 'yahoo', 'priority'],
// General application config mappings
NAME: ['name'],
VERSION: ['version'],
// Log mappings (using LOG_ prefix for all)
LOG_LEVEL: ['log', 'level'],
LOG_FORMAT: ['log', 'format'],
LOG_HIDE_OBJECT: ['log', 'hideObject'],
LOG_LOKI_ENABLED: ['log', 'loki', 'enabled'],
LOG_LOKI_HOST: ['log', 'loki', 'host'],
LOG_LOKI_PORT: ['log', 'loki', 'port'],
// Special mappings to avoid conflicts
DEBUG_MODE: ['debug'],
};
const mapping = providerMappings[envKey];
return mapping ? { path: mapping } : null;
}
private parseValue(value: string): unknown {
if (!this.options.parseValues && !this.options.parseJson) {
return value;
}
// Try to parse as JSON first if enabled
if (this.options.parseJson) {
try {
return JSON.parse(value);
} catch {
// Not JSON, continue with other parsing
}
}
if (!this.options.parseValues) {
return value;
}
// Handle booleans
if (value.toLowerCase() === 'true') {
return true;
}
if (value.toLowerCase() === 'false') {
return false;
}
// Handle numbers
const num = Number(value);
if (!isNaN(num) && value !== '') {
return num;
}
// Handle null/undefined
if (value.toLowerCase() === 'null') {
return null;
}
if (value.toLowerCase() === 'undefined') {
return undefined;
}
// Return as string
return value;
}
private loadEnvFile(filePath: string): void {
try {
const envContent = readFileSync(filePath, 'utf-8');
const lines = envContent.split('\n');
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed || trimmed.startsWith('#')) {
continue; // Skip empty lines and comments
}
const equalIndex = trimmed.indexOf('=');
if (equalIndex === -1) {
continue; // Skip lines without =
}
const key = trimmed.substring(0, equalIndex).trim();
let value = trimmed.substring(equalIndex + 1).trim();
// Remove surrounding quotes if present
if (
(value.startsWith('"') && value.endsWith('"')) ||
(value.startsWith("'") && value.endsWith("'"))
) {
value = value.slice(1, -1);
}
// Only set if not already set (allows override precedence)
if (!(key in process.env)) {
process.env[key] = value;
}
}
} catch (error: unknown) {
// File not found is not an error (env files are optional)
if (error && typeof error === 'object' && 'code' in error && error.code !== 'ENOENT') {
// eslint-disable-next-line no-console
console.warn(
`Warning: Could not load env file ${filePath}:`,
error instanceof Error ? error.message : String(error)
);
}
}
}
}

View file

@ -0,0 +1,68 @@
import { existsSync, readFileSync } from 'fs';
import { join } from 'path';
import { ConfigLoaderError } from '../errors';
import { ConfigLoader } from '../types';
export class FileLoader implements ConfigLoader {
readonly priority = 50; // Medium priority
constructor(
private configPath: string,
private environment: string
) {}
load(): Record<string, unknown> {
try {
const configs: Record<string, unknown>[] = [];
// Load default config
const defaultConfig = this.loadFile('default.json');
if (defaultConfig) {
configs.push(defaultConfig);
}
// Load environment-specific config
const envConfig = this.loadFile(`${this.environment}.json`);
if (envConfig) {
configs.push(envConfig);
}
// Merge configs (later configs override earlier ones)
return this.deepMerge(...configs);
} catch (error) {
throw new ConfigLoaderError(`Failed to load configuration files: ${error}`, 'FileLoader');
}
}
private loadFile(filename: string): Record<string, unknown> | null {
const filepath = join(this.configPath, filename);
if (!existsSync(filepath)) {
return null;
}
const content = readFileSync(filepath, 'utf-8');
return JSON.parse(content);
}
private deepMerge(...objects: Record<string, unknown>[]): Record<string, unknown> {
const result: Record<string, unknown> = {};
for (const obj of objects) {
for (const [key, value] of Object.entries(obj)) {
if (value === null || value === undefined) {
result[key] = value;
} else if (typeof value === 'object' && !Array.isArray(value) && value !== null) {
result[key] = this.deepMerge(
(result[key] as Record<string, unknown>) || {},
value as Record<string, unknown>
);
} else {
result[key] = value;
}
}
}
return result;
}
}

View file

@ -0,0 +1,10 @@
import { z } from 'zod';
export const environmentSchema = z.enum(['development', 'test', 'production']);
export const baseConfigSchema = z.object({
environment: environmentSchema.optional(),
name: z.string().optional(),
version: z.string().optional(),
debug: z.boolean().default(false),
});

View file

@ -0,0 +1,60 @@
import { z } from 'zod';
// PostgreSQL configuration
export const postgresConfigSchema = z.object({
host: z.string().default('localhost'),
port: z.number().default(5432),
database: z.string(),
user: z.string(),
password: z.string(),
ssl: z.boolean().default(false),
poolSize: z.number().min(1).max(100).default(10),
connectionTimeout: z.number().default(30000),
idleTimeout: z.number().default(10000),
});
// QuestDB configuration
export const questdbConfigSchema = z.object({
host: z.string().default('localhost'),
ilpPort: z.number().default(9009),
httpPort: z.number().default(9000),
pgPort: z.number().default(8812),
database: z.string().default('questdb'),
user: z.string().default('admin'),
password: z.string().default('quest'),
bufferSize: z.number().default(65536),
flushInterval: z.number().default(1000),
});
// MongoDB configuration
export const mongodbConfigSchema = z.object({
uri: z.string().url().optional(),
host: z.string().default('localhost'),
port: z.number().default(27017),
database: z.string(),
user: z.string().optional(),
password: z.string().optional(),
authSource: z.string().default('admin'),
replicaSet: z.string().optional(),
poolSize: z.number().min(1).max(100).default(10),
});
// Dragonfly/Redis configuration
export const dragonflyConfigSchema = z.object({
host: z.string().default('localhost'),
port: z.number().default(6379),
password: z.string().optional(),
db: z.number().min(0).max(15).default(0),
keyPrefix: z.string().optional(),
ttl: z.number().optional(),
maxRetries: z.number().default(3),
retryDelay: z.number().default(100),
});
// Combined database configuration
export const databaseConfigSchema = z.object({
postgres: postgresConfigSchema,
questdb: questdbConfigSchema,
mongodb: mongodbConfigSchema,
dragonfly: dragonflyConfigSchema,
});

View file

@ -0,0 +1,98 @@
export * from './base.schema';
export * from './database.schema';
export * from './provider.schema';
export * from './service.schema';
import { z } from 'zod';
import { baseConfigSchema, environmentSchema } from './base.schema';
import { providerConfigSchema, webshareProviderConfigSchema } from './provider.schema';
import { httpConfigSchema, queueConfigSchema } from './service.schema';
// Flexible service schema with defaults
const flexibleServiceConfigSchema = z.object({
name: z.string().default('default-service'),
port: z.number().min(1).max(65535).default(3000),
host: z.string().default('0.0.0.0'),
healthCheckPath: z.string().default('/health'),
metricsPath: z.string().default('/metrics'),
shutdownTimeout: z.number().default(30000),
cors: z.object({
enabled: z.boolean().default(true),
origin: z.union([z.string(), z.array(z.string())]).default('*'),
credentials: z.boolean().default(true),
}).default({}),
}).default({});
// Flexible database schema with defaults
const flexibleDatabaseConfigSchema = z.object({
postgres: z.object({
host: z.string().default('localhost'),
port: z.number().default(5432),
database: z.string().default('test_db'),
user: z.string().default('test_user'),
password: z.string().default('test_pass'),
ssl: z.boolean().default(false),
poolSize: z.number().min(1).max(100).default(10),
connectionTimeout: z.number().default(30000),
idleTimeout: z.number().default(10000),
}).default({}),
questdb: z.object({
host: z.string().default('localhost'),
ilpPort: z.number().default(9009),
httpPort: z.number().default(9000),
pgPort: z.number().default(8812),
database: z.string().default('questdb'),
user: z.string().default('admin'),
password: z.string().default('quest'),
bufferSize: z.number().default(65536),
flushInterval: z.number().default(1000),
}).default({}),
mongodb: z.object({
uri: z.string().url().optional(),
host: z.string().default('localhost'),
port: z.number().default(27017),
database: z.string().default('test_mongo'),
user: z.string().optional(),
password: z.string().optional(),
authSource: z.string().default('admin'),
replicaSet: z.string().optional(),
poolSize: z.number().min(1).max(100).default(10),
}).default({}),
dragonfly: z.object({
host: z.string().default('localhost'),
port: z.number().default(6379),
password: z.string().optional(),
db: z.number().min(0).max(15).default(0),
keyPrefix: z.string().optional(),
ttl: z.number().optional(),
maxRetries: z.number().default(3),
retryDelay: z.number().default(100),
}).default({}),
}).default({});
// Flexible log schema with defaults (renamed from logging)
const flexibleLogConfigSchema = z.object({
level: z.enum(['trace', 'debug', 'info', 'warn', 'error', 'fatal']).default('info'),
format: z.enum(['json', 'pretty']).default('json'),
hideObject: z.boolean().default(false),
loki: z.object({
enabled: z.boolean().default(false),
host: z.string().default('localhost'),
port: z.number().default(3100),
labels: z.record(z.string()).default({}),
}).optional(),
}).default({});
// Complete application configuration schema
export const appConfigSchema = baseConfigSchema.extend({
environment: environmentSchema.default('development'),
service: flexibleServiceConfigSchema,
log: flexibleLogConfigSchema,
database: flexibleDatabaseConfigSchema,
queue: queueConfigSchema.optional(),
http: httpConfigSchema.optional(),
providers: providerConfigSchema.optional(),
webshare: webshareProviderConfigSchema.optional(),
});
export type AppConfig = z.infer<typeof appConfigSchema>;

View file

@ -0,0 +1,74 @@
import { z } from 'zod';
// Base provider configuration
export const baseProviderConfigSchema = z.object({
name: z.string(),
enabled: z.boolean().default(true),
priority: z.number().default(0),
rateLimit: z.object({
maxRequests: z.number().default(100),
windowMs: z.number().default(60000),
}).optional(),
timeout: z.number().default(30000),
retries: z.number().default(3),
});
// EOD Historical Data provider
export const eodProviderConfigSchema = baseProviderConfigSchema.extend({
apiKey: z.string(),
baseUrl: z.string().default('https://eodhistoricaldata.com/api'),
tier: z.enum(['free', 'fundamentals', 'all-in-one']).default('free'),
});
// Interactive Brokers provider
export const ibProviderConfigSchema = baseProviderConfigSchema.extend({
gateway: z.object({
host: z.string().default('localhost'),
port: z.number().default(5000),
clientId: z.number().default(1),
}),
account: z.string().optional(),
marketDataType: z.enum(['live', 'delayed', 'frozen']).default('delayed'),
});
// QuoteMedia provider
export const qmProviderConfigSchema = baseProviderConfigSchema.extend({
username: z.string(),
password: z.string(),
baseUrl: z.string().default('https://app.quotemedia.com/quotetools'),
webmasterId: z.string(),
});
// Yahoo Finance provider
export const yahooProviderConfigSchema = baseProviderConfigSchema.extend({
baseUrl: z.string().default('https://query1.finance.yahoo.com'),
cookieJar: z.boolean().default(true),
crumb: z.string().optional(),
});
// WebShare proxy provider
export const webshareProviderConfigSchema = z.object({
apiKey: z.string().optional(),
apiUrl: z.string().default('https://proxy.webshare.io/api/v2/'),
enabled: z.boolean().default(true),
});
// Combined provider configuration
export const providerConfigSchema = z.object({
eod: eodProviderConfigSchema.optional(),
ib: ibProviderConfigSchema.optional(),
qm: qmProviderConfigSchema.optional(),
yahoo: yahooProviderConfigSchema.optional(),
webshare: webshareProviderConfigSchema.optional(),
});
// Dynamic provider configuration type
export type ProviderName = 'eod' | 'ib' | 'qm' | 'yahoo' | 'webshare';
export const providerSchemas = {
eod: eodProviderConfigSchema,
ib: ibProviderConfigSchema,
qm: qmProviderConfigSchema,
yahoo: yahooProviderConfigSchema,
webshare: webshareProviderConfigSchema,
} as const;

View file

@ -0,0 +1,63 @@
import { z } from 'zod';
// Common service configuration
export const serviceConfigSchema = z.object({
name: z.string(),
port: z.number().min(1).max(65535),
host: z.string().default('0.0.0.0'),
healthCheckPath: z.string().default('/health'),
metricsPath: z.string().default('/metrics'),
shutdownTimeout: z.number().default(30000),
cors: z.object({
enabled: z.boolean().default(true),
origin: z.union([z.string(), z.array(z.string())]).default('*'),
credentials: z.boolean().default(true),
}).default({}),
});
// Logging configuration
export const loggingConfigSchema = z.object({
level: z.enum(['trace', 'debug', 'info', 'warn', 'error', 'fatal']).default('info'),
format: z.enum(['json', 'pretty']).default('json'),
loki: z.object({
enabled: z.boolean().default(false),
host: z.string().default('localhost'),
port: z.number().default(3100),
labels: z.record(z.string()).default({}),
}).optional(),
});
// Queue configuration
export const queueConfigSchema = z.object({
redis: z.object({
host: z.string().default('localhost'),
port: z.number().default(6379),
password: z.string().optional(),
db: z.number().default(1),
}),
defaultJobOptions: z.object({
attempts: z.number().default(3),
backoff: z.object({
type: z.enum(['exponential', 'fixed']).default('exponential'),
delay: z.number().default(1000),
}).default({}),
removeOnComplete: z.number().default(10),
removeOnFail: z.number().default(5),
}).default({}),
});
// HTTP client configuration
export const httpConfigSchema = z.object({
timeout: z.number().default(30000),
retries: z.number().default(3),
retryDelay: z.number().default(1000),
userAgent: z.string().optional(),
proxy: z.object({
enabled: z.boolean().default(false),
url: z.string().url().optional(),
auth: z.object({
username: z.string(),
password: z.string(),
}).optional(),
}).optional(),
});

View file

@ -0,0 +1,28 @@
import { z } from 'zod';
export type Environment = 'development' | 'test' | 'production';
export interface ConfigLoader {
load(): Record<string, unknown>;
readonly priority: number;
}
export interface ConfigManagerOptions {
environment?: Environment;
configPath?: string;
loaders?: ConfigLoader[];
}
export type DeepPartial<T> = T extends object
? {
[P in keyof T]?: DeepPartial<T[P]>;
}
: T;
export type ConfigSchema = z.ZodSchema<unknown>;
export interface ProviderConfig {
name: string;
enabled: boolean;
[key: string]: unknown;
}

View file

@ -0,0 +1,183 @@
import { z } from 'zod';
/**
* Secret value wrapper to prevent accidental logging
*/
export class SecretValue<T = string> {
private readonly value: T;
private readonly masked: string;
constructor(value: T, mask: string = '***') {
this.value = value;
this.masked = mask;
}
/**
* Get the actual secret value
* @param reason - Required reason for accessing the secret
*/
reveal(reason: string): T {
if (!reason) {
throw new Error('Reason required for revealing secret value');
}
return this.value;
}
/**
* Get masked representation
*/
toString(): string {
return this.masked;
}
/**
* Prevent JSON serialization of actual value
*/
toJSON(): string {
return this.masked;
}
/**
* Check if value matches without revealing it
*/
equals(other: T): boolean {
return this.value === other;
}
/**
* Transform the secret value
*/
map<R>(fn: (value: T) => R, reason: string): SecretValue<R> {
return new SecretValue(fn(this.reveal(reason)));
}
}
/**
* Zod schema for secret values
*/
export const secretSchema = <T extends z.ZodTypeAny>(_schema: T) => {
return z.custom<SecretValue<z.infer<T>>>(
(val) => val instanceof SecretValue,
{
message: 'Expected SecretValue instance',
}
);
};
/**
* Transform string to SecretValue in Zod schema
*/
export const secretStringSchema = z
.string()
.transform((val) => new SecretValue(val));
/**
* Create a secret value
*/
export function secret<T = string>(value: T, mask?: string): SecretValue<T> {
return new SecretValue(value, mask);
}
/**
* Check if a value is a secret
*/
export function isSecret(value: unknown): value is SecretValue {
return value instanceof SecretValue;
}
/**
* Redact secrets from an object
*/
export function redactSecrets<T extends Record<string, any>>(
obj: T,
secretPaths: string[] = []
): T {
const result = { ...obj };
// Redact known secret paths
for (const path of secretPaths) {
const keys = path.split('.');
let current: any = result;
for (let i = 0; i < keys.length - 1; i++) {
const key = keys[i];
if (key && current[key] && typeof current[key] === 'object') {
current = current[key];
} else {
break;
}
}
const lastKey = keys[keys.length - 1];
if (current && lastKey && lastKey in current) {
current[lastKey] = '***REDACTED***';
}
}
// Recursively redact SecretValue instances
function redactSecretValues(obj: any): any {
if (obj === null || obj === undefined) {
return obj;
}
if (isSecret(obj)) {
return obj.toString();
}
if (Array.isArray(obj)) {
return obj.map(redactSecretValues);
}
if (typeof obj === 'object') {
const result: any = {};
for (const [key, value] of Object.entries(obj)) {
result[key] = redactSecretValues(value);
}
return result;
}
return obj;
}
return redactSecretValues(result);
}
/**
* Environment variable names that should be treated as secrets
*/
export const COMMON_SECRET_PATTERNS = [
/password/i,
/secret/i,
/key/i,
/token/i,
/credential/i,
/private/i,
/auth/i,
/api[-_]?key/i,
];
/**
* Check if an environment variable name indicates a secret
*/
export function isSecretEnvVar(name: string): boolean {
return COMMON_SECRET_PATTERNS.some(pattern => pattern.test(name));
}
/**
* Wrap environment variables that look like secrets
*/
export function wrapSecretEnvVars(
env: Record<string, string | undefined>
): Record<string, string | SecretValue | undefined> {
const result: Record<string, string | SecretValue | undefined> = {};
for (const [key, value] of Object.entries(env)) {
if (value !== undefined && isSecretEnvVar(key)) {
result[key] = new SecretValue(value, `***${key}***`);
} else {
result[key] = value;
}
}
return result;
}

View file

@ -0,0 +1,195 @@
import { z } from 'zod';
export interface ValidationResult {
valid: boolean;
errors?: Array<{
path: string;
message: string;
expected?: string;
received?: string;
}>;
warnings?: Array<{
path: string;
message: string;
}>;
}
/**
* Validate configuration against a schema
*/
export function validateConfig<T>(
config: unknown,
schema: z.ZodSchema<T>
): ValidationResult {
try {
schema.parse(config);
return { valid: true };
} catch (error) {
if (error instanceof z.ZodError) {
const errors = error.errors.map(err => ({
path: err.path.join('.'),
message: err.message,
expected: 'expected' in err ? String(err.expected) : undefined,
received: 'received' in err ? String(err.received) : undefined,
}));
return { valid: false, errors };
}
throw error;
}
}
/**
* Check for deprecated configuration options
*/
export function checkDeprecations(
config: Record<string, unknown>,
deprecations: Record<string, string>
): ValidationResult['warnings'] {
const warnings: ValidationResult['warnings'] = [];
function checkObject(obj: Record<string, unknown>, path: string[] = []): void {
for (const [key, value] of Object.entries(obj)) {
const currentPath = [...path, key];
const pathStr = currentPath.join('.');
if (pathStr in deprecations) {
const deprecationMessage = deprecations[pathStr];
if (deprecationMessage) {
warnings?.push({
path: pathStr,
message: deprecationMessage,
});
}
}
if (value && typeof value === 'object' && !Array.isArray(value)) {
checkObject(value as Record<string, unknown>, currentPath);
}
}
}
checkObject(config);
return warnings;
}
/**
* Check for required environment variables
*/
export function checkRequiredEnvVars(
required: string[]
): ValidationResult {
const errors: ValidationResult['errors'] = [];
for (const envVar of required) {
if (!process.env[envVar]) {
errors.push({
path: `env.${envVar}`,
message: `Required environment variable ${envVar} is not set`,
});
}
}
return {
valid: errors.length === 0,
errors: errors.length > 0 ? errors : undefined,
};
}
/**
* Validate configuration completeness
*/
export function validateCompleteness(
config: Record<string, any>,
required: string[]
): ValidationResult {
const errors: ValidationResult['errors'] = [];
for (const path of required) {
const keys = path.split('.');
let current: any = config;
let found = true;
for (const key of keys) {
if (current && typeof current === 'object' && key in current) {
current = current[key];
} else {
found = false;
break;
}
}
if (!found || current === undefined || current === null) {
errors.push({
path,
message: `Required configuration value is missing`,
});
}
}
return {
valid: errors.length === 0,
errors: errors.length > 0 ? errors : undefined,
};
}
/**
* Format validation result for display
*/
export function formatValidationResult(result: ValidationResult): string {
const lines: string[] = [];
if (result.valid) {
lines.push('✅ Configuration is valid');
} else {
lines.push('❌ Configuration validation failed');
}
if (result.errors && result.errors.length > 0) {
lines.push('\nErrors:');
for (const error of result.errors) {
lines.push(` - ${error.path}: ${error.message}`);
if (error.expected && error.received) {
lines.push(` Expected: ${error.expected}, Received: ${error.received}`);
}
}
}
if (result.warnings && result.warnings.length > 0) {
lines.push('\nWarnings:');
for (const warning of result.warnings) {
lines.push(` - ${warning.path}: ${warning.message}`);
}
}
return lines.join('\n');
}
/**
* Create a strict schema that doesn't allow extra properties
*/
export function createStrictSchema<T extends z.ZodRawShape>(
shape: T
): z.ZodObject<T, 'strict'> {
return z.object(shape).strict();
}
/**
* Merge multiple schemas
*/
export function mergeSchemas<T extends z.ZodSchema[]>(
...schemas: T
): z.ZodIntersection<T[0], T[1]> {
if (schemas.length < 2) {
throw new Error('At least two schemas required for merge');
}
let result = schemas[0]!.and(schemas[1]!);
for (let i = 2; i < schemas.length; i++) {
result = result.and(schemas[i]!) as any;
}
return result as any;
}

View file

@ -0,0 +1,215 @@
import { describe, test, expect, beforeEach } from 'bun:test';
import { z } from 'zod';
import { ConfigManager } from '../src/config-manager';
import { ConfigLoader } from '../src/types';
import { ConfigValidationError } from '../src/errors';
// Mock loader for testing
class MockLoader implements ConfigLoader {
priority = 0;
constructor(
private data: Record<string, unknown>,
public override priority: number = 0
) {}
async load(): Promise<Record<string, unknown>> {
return this.data;
}
}
// Test schema
const testSchema = z.object({
app: z.object({
name: z.string(),
version: z.string(),
port: z.number().positive(),
}),
database: z.object({
host: z.string(),
port: z.number(),
}),
environment: z.enum(['development', 'test', 'production']),
});
type TestConfig = z.infer<typeof testSchema>;
describe('ConfigManager', () => {
let manager: ConfigManager<TestConfig>;
beforeEach(() => {
manager = new ConfigManager<TestConfig>({
loaders: [
new MockLoader({
app: {
name: 'test-app',
version: '1.0.0',
port: 3000,
},
database: {
host: 'localhost',
port: 5432,
},
}),
],
environment: 'test',
});
});
test('should initialize configuration', async () => {
const config = await manager.initialize(testSchema);
expect(config.app.name).toBe('test-app');
expect(config.app.version).toBe('1.0.0');
expect(config.environment).toBe('test');
});
test('should merge multiple loaders by priority', async () => {
manager = new ConfigManager<TestConfig>({
loaders: [
new MockLoader({ app: { name: 'base', port: 3000 } }, 0),
new MockLoader({ app: { name: 'override', version: '2.0.0' } }, 10),
new MockLoader({ database: { host: 'prod-db' } }, 5),
],
environment: 'test',
});
const config = await manager.initialize();
expect(config.app.name).toBe('override');
expect(config.app.version).toBe('2.0.0');
expect(config.app.port).toBe(3000);
expect(config.database.host).toBe('prod-db');
});
test('should validate configuration with schema', async () => {
manager = new ConfigManager<TestConfig>({
loaders: [
new MockLoader({
app: {
name: 'test-app',
version: '1.0.0',
port: 'invalid', // Should be number
},
}),
],
});
await expect(manager.initialize(testSchema)).rejects.toThrow(ConfigValidationError);
});
test('should get configuration value by path', async () => {
await manager.initialize(testSchema);
expect(manager.getValue('app.name')).toBe('test-app');
expect(manager.getValue<number>('database.port')).toBe(5432);
});
test('should check if configuration path exists', async () => {
await manager.initialize(testSchema);
expect(manager.has('app.name')).toBe(true);
expect(manager.has('app.nonexistent')).toBe(false);
});
test('should update configuration at runtime', async () => {
await manager.initialize(testSchema);
manager.set({
app: {
name: 'updated-app',
},
});
const config = manager.get();
expect(config.app.name).toBe('updated-app');
expect(config.app.version).toBe('1.0.0'); // Should preserve other values
});
test('should validate updates against schema', async () => {
await manager.initialize(testSchema);
expect(() => {
manager.set({
app: {
port: 'invalid' as any,
},
});
}).toThrow(ConfigValidationError);
});
test('should reset configuration', async () => {
await manager.initialize(testSchema);
manager.reset();
expect(() => manager.get()).toThrow('Configuration not initialized');
});
test('should create typed getter', async () => {
await manager.initialize(testSchema);
const appSchema = z.object({
app: z.object({
name: z.string(),
version: z.string(),
}),
});
const getAppConfig = manager.createTypedGetter(appSchema);
const appConfig = getAppConfig();
expect(appConfig.app.name).toBe('test-app');
});
test('should detect environment correctly', () => {
const originalEnv = process.env.NODE_ENV;
process.env.NODE_ENV = 'production';
const prodManager = new ConfigManager({ loaders: [] });
expect(prodManager.getEnvironment()).toBe('production');
process.env.NODE_ENV = 'test';
const testManager = new ConfigManager({ loaders: [] });
expect(testManager.getEnvironment()).toBe('test');
process.env.NODE_ENV = originalEnv;
});
test('should handle deep merge correctly', async () => {
manager = new ConfigManager({
loaders: [
new MockLoader({
app: {
settings: {
feature1: true,
feature2: false,
nested: {
value: 'base',
},
},
},
}, 0),
new MockLoader({
app: {
settings: {
feature2: true,
feature3: true,
nested: {
value: 'override',
extra: 'new',
},
},
},
}, 10),
],
});
const config = await manager.initialize();
expect(config.app.settings.feature1).toBe(true);
expect(config.app.settings.feature2).toBe(true);
expect(config.app.settings.feature3).toBe(true);
expect(config.app.settings.nested.value).toBe('override');
expect(config.app.settings.nested.extra).toBe('new');
});
});

View file

@ -0,0 +1,386 @@
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
import { join } from 'path';
import { mkdirSync, writeFileSync, rmSync, existsSync } from 'fs';
import { ConfigManager } from '../src/config-manager';
import { FileLoader } from '../src/loaders/file.loader';
import { EnvLoader } from '../src/loaders/env.loader';
import { initializeConfig, initializeServiceConfig, resetConfig } from '../src/index';
import { appConfigSchema } from '../src/schemas';
// Test directories setup
const TEST_ROOT = join(__dirname, 'test-scenarios');
const SCENARIOS = {
monorepoRoot: join(TEST_ROOT, 'monorepo'),
appService: join(TEST_ROOT, 'monorepo', 'apps', 'test-service'),
libService: join(TEST_ROOT, 'monorepo', 'libs', 'test-lib'),
nestedService: join(TEST_ROOT, 'monorepo', 'apps', 'nested', 'deep-service'),
standalone: join(TEST_ROOT, 'standalone'),
};
describe('Dynamic Location Config Loading', () => {
beforeEach(() => {
// Clean up any existing test directories
if (existsSync(TEST_ROOT)) {
rmSync(TEST_ROOT, { recursive: true, force: true });
}
// Reset config singleton
resetConfig();
// Create test directory structure
setupTestScenarios();
});
afterEach(() => {
// Clean up test directories
if (existsSync(TEST_ROOT)) {
rmSync(TEST_ROOT, { recursive: true, force: true });
}
// Reset config singleton
resetConfig();
});
test('should load config from monorepo root', async () => {
const originalCwd = process.cwd();
try {
// Change to monorepo root
process.chdir(SCENARIOS.monorepoRoot);
const config = await initializeConfig();
expect(config.name).toBe('monorepo-root');
expect(config.version).toBe('1.0.0');
expect(config.database.postgres.host).toBe('localhost');
} finally {
process.chdir(originalCwd);
}
});
test('should load config from app service directory', async () => {
const originalCwd = process.cwd();
try {
// Change to app service directory
process.chdir(SCENARIOS.appService);
const config = await initializeServiceConfig();
// Should inherit from root + override with service config
expect(config.name).toBe('test-service'); // Overridden by service
expect(config.version).toBe('1.0.0'); // From root
expect(config.database.postgres.host).toBe('service-db'); // Overridden by service
expect(config.service.port).toBe(4000); // Service-specific
} finally {
process.chdir(originalCwd);
}
});
test('should load config from lib directory', async () => {
const originalCwd = process.cwd();
try {
// Change to lib directory
process.chdir(SCENARIOS.libService);
const config = await initializeServiceConfig();
// Should inherit from root + override with lib config
expect(config.name).toBe('test-lib'); // Overridden by lib
expect(config.version).toBe('2.0.0'); // Overridden by lib
expect(config.database.postgres.host).toBe('localhost'); // From root
expect(config.service.port).toBe(5000); // Lib-specific
} finally {
process.chdir(originalCwd);
}
});
test('should load config from deeply nested service', async () => {
const originalCwd = process.cwd();
try {
// Change to nested service directory
process.chdir(SCENARIOS.nestedService);
const config = await initializeServiceConfig();
// Should inherit from root + override with nested service config
expect(config.name).toBe('deep-service'); // Overridden by nested service
// NOTE: Version inheritance doesn't work for deeply nested services (3+ levels)
// because initializeServiceConfig() uses hardcoded '../../config' path
expect(config.version).toBeUndefined(); // Not inherited due to path limitation
expect(config.database.postgres.host).toBe('deep-db'); // Overridden by nested service
expect(config.service.port).toBe(6000); // Nested service-specific
} finally {
process.chdir(originalCwd);
}
});
test('should load config from standalone project', async () => {
const originalCwd = process.cwd();
try {
// Change to standalone directory
process.chdir(SCENARIOS.standalone);
const config = await initializeConfig();
expect(config.name).toBe('standalone-app');
expect(config.version).toBe('0.1.0');
expect(config.database.postgres.host).toBe('standalone-db');
} finally {
process.chdir(originalCwd);
}
});
test('should handle missing config files gracefully', async () => {
const originalCwd = process.cwd();
try {
// Change to directory with no config files
const emptyDir = join(TEST_ROOT, 'empty');
mkdirSync(emptyDir, { recursive: true });
process.chdir(emptyDir);
// Should not throw but use defaults and env vars
const config = await initializeConfig();
// Should have default values from schema
expect(config.environment).toBe('test'); // Tests run with NODE_ENV=test
expect(typeof config.service).toBe('object');
} finally {
process.chdir(originalCwd);
}
});
test('should prioritize environment variables over file configs', async () => {
const originalCwd = process.cwd();
const originalEnv = { ...process.env };
try {
// Set environment variables
process.env.NAME = 'env-override';
process.env.VERSION = '3.0.0';
process.env.DATABASE_POSTGRES_HOST = 'env-db';
process.chdir(SCENARIOS.appService);
resetConfig(); // Reset to test env override
const config = await initializeServiceConfig();
// Environment variables should override file configs
expect(config.name).toBe('env-override');
expect(config.version).toBe('3.0.0');
expect(config.database.postgres.host).toBe('env-db');
} finally {
process.chdir(originalCwd);
process.env = originalEnv;
}
});
test('should work with custom config paths', async () => {
const originalCwd = process.cwd();
try {
process.chdir(SCENARIOS.monorepoRoot);
// Initialize with custom config path
resetConfig();
const manager = new ConfigManager({
configPath: join(SCENARIOS.appService, 'config')
});
const config = await manager.initialize(appConfigSchema);
// Should load from the custom path
expect(config.name).toBe('test-service');
expect(config.service.port).toBe(4000);
} finally {
process.chdir(originalCwd);
}
});
});
function setupTestScenarios() {
// Create monorepo structure
mkdirSync(SCENARIOS.monorepoRoot, { recursive: true });
mkdirSync(join(SCENARIOS.monorepoRoot, 'config'), { recursive: true });
mkdirSync(join(SCENARIOS.appService, 'config'), { recursive: true });
mkdirSync(join(SCENARIOS.libService, 'config'), { recursive: true });
mkdirSync(join(SCENARIOS.nestedService, 'config'), { recursive: true });
mkdirSync(join(SCENARIOS.standalone, 'config'), { recursive: true });
// Root config (create for both development and test environments)
const rootConfig = {
name: 'monorepo-root',
version: '1.0.0',
service: {
name: 'monorepo-root',
port: 3000
},
database: {
postgres: {
host: 'localhost',
port: 5432,
database: 'test_db',
user: 'test_user',
password: 'test_pass'
},
questdb: {
host: 'localhost',
ilpPort: 9009
},
mongodb: {
host: 'localhost',
port: 27017,
database: 'test_mongo'
},
dragonfly: {
host: 'localhost',
port: 6379
}
},
logging: {
level: 'info'
}
};
writeFileSync(
join(SCENARIOS.monorepoRoot, 'config', 'development.json'),
JSON.stringify(rootConfig, null, 2)
);
writeFileSync(
join(SCENARIOS.monorepoRoot, 'config', 'test.json'),
JSON.stringify(rootConfig, null, 2)
);
// App service config
const appServiceConfig = {
name: 'test-service',
database: {
postgres: {
host: 'service-db'
}
},
service: {
name: 'test-service',
port: 4000
}
};
writeFileSync(
join(SCENARIOS.appService, 'config', 'development.json'),
JSON.stringify(appServiceConfig, null, 2)
);
writeFileSync(
join(SCENARIOS.appService, 'config', 'test.json'),
JSON.stringify(appServiceConfig, null, 2)
);
// Lib config
const libServiceConfig = {
name: 'test-lib',
version: '2.0.0',
service: {
name: 'test-lib',
port: 5000
}
};
writeFileSync(
join(SCENARIOS.libService, 'config', 'development.json'),
JSON.stringify(libServiceConfig, null, 2)
);
writeFileSync(
join(SCENARIOS.libService, 'config', 'test.json'),
JSON.stringify(libServiceConfig, null, 2)
);
// Nested service config
const nestedServiceConfig = {
name: 'deep-service',
database: {
postgres: {
host: 'deep-db'
}
},
service: {
name: 'deep-service',
port: 6000
}
};
writeFileSync(
join(SCENARIOS.nestedService, 'config', 'development.json'),
JSON.stringify(nestedServiceConfig, null, 2)
);
writeFileSync(
join(SCENARIOS.nestedService, 'config', 'test.json'),
JSON.stringify(nestedServiceConfig, null, 2)
);
// Standalone config
const standaloneConfig = {
name: 'standalone-app',
version: '0.1.0',
service: {
name: 'standalone-app',
port: 7000
},
database: {
postgres: {
host: 'standalone-db',
port: 5432,
database: 'standalone_db',
user: 'standalone_user',
password: 'standalone_pass'
},
questdb: {
host: 'localhost',
ilpPort: 9009
},
mongodb: {
host: 'localhost',
port: 27017,
database: 'standalone_mongo'
},
dragonfly: {
host: 'localhost',
port: 6379
}
},
logging: {
level: 'debug'
}
};
writeFileSync(
join(SCENARIOS.standalone, 'config', 'development.json'),
JSON.stringify(standaloneConfig, null, 2)
);
writeFileSync(
join(SCENARIOS.standalone, 'config', 'test.json'),
JSON.stringify(standaloneConfig, null, 2)
);
// Add .env files for testing
writeFileSync(
join(SCENARIOS.monorepoRoot, '.env'),
`NODE_ENV=development
DEBUG=true
`
);
writeFileSync(
join(SCENARIOS.appService, '.env'),
`SERVICE_DEBUG=true
APP_EXTRA_FEATURE=enabled
`
);
}

View file

@ -0,0 +1,384 @@
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
import { join } from 'path';
import { mkdirSync, writeFileSync, rmSync, existsSync, chmodSync } from 'fs';
import { ConfigManager } from '../src/config-manager';
import { FileLoader } from '../src/loaders/file.loader';
import { EnvLoader } from '../src/loaders/env.loader';
import { initializeConfig, initializeServiceConfig, resetConfig } from '../src/index';
import { appConfigSchema } from '../src/schemas';
import { ConfigError, ConfigValidationError } from '../src/errors';
const TEST_DIR = join(__dirname, 'edge-case-tests');
describe('Edge Cases and Error Handling', () => {
let originalEnv: NodeJS.ProcessEnv;
let originalCwd: string;
beforeEach(() => {
originalEnv = { ...process.env };
originalCwd = process.cwd();
resetConfig();
if (existsSync(TEST_DIR)) {
rmSync(TEST_DIR, { recursive: true, force: true });
}
mkdirSync(TEST_DIR, { recursive: true });
});
afterEach(() => {
process.env = originalEnv;
process.chdir(originalCwd);
resetConfig();
if (existsSync(TEST_DIR)) {
rmSync(TEST_DIR, { recursive: true, force: true });
}
});
test('should handle missing .env files gracefully', async () => {
// No .env file exists
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
// Should not throw even without .env file
const config = await manager.initialize(appConfigSchema);
expect(config).toBeDefined();
});
test('should handle corrupted JSON config files', async () => {
const configDir = join(TEST_DIR, 'config');
mkdirSync(configDir, { recursive: true });
// Create corrupted JSON file
writeFileSync(
join(configDir, 'development.json'),
'{ "app": { "name": "test", invalid json }'
);
const manager = new ConfigManager({
loaders: [new FileLoader(configDir, 'development')]
});
// Should throw error for invalid JSON
await expect(manager.initialize(appConfigSchema)).rejects.toThrow();
});
test('should handle missing config directories', async () => {
const nonExistentDir = join(TEST_DIR, 'nonexistent');
const manager = new ConfigManager({
loaders: [new FileLoader(nonExistentDir, 'development')]
});
// Should not throw, should return empty config
const config = await manager.initialize();
expect(config).toBeDefined();
});
test('should handle permission denied on config files', async () => {
const configDir = join(TEST_DIR, 'config');
mkdirSync(configDir, { recursive: true });
const configFile = join(configDir, 'development.json');
writeFileSync(configFile, JSON.stringify({ app: { name: 'test' } }));
// Make file unreadable (this might not work on all systems)
try {
chmodSync(configFile, 0o000);
const manager = new ConfigManager({
loaders: [new FileLoader(configDir, 'development')]
});
// Should handle permission error gracefully
const config = await manager.initialize();
expect(config).toBeDefined();
} finally {
// Restore permissions for cleanup
try {
chmodSync(configFile, 0o644);
} catch {
// Ignore errors during cleanup
}
}
});
test('should handle circular references in config merging', async () => {
// This tests deep merge with potential circular references
const configDir = join(TEST_DIR, 'config');
mkdirSync(configDir, { recursive: true });
writeFileSync(
join(configDir, 'development.json'),
JSON.stringify({
app: {
name: 'test',
settings: {
ref: 'settings'
}
}
})
);
process.env.APP_SETTINGS_NESTED_VALUE = 'deep-value';
const manager = new ConfigManager({
loaders: [
new FileLoader(configDir, 'development'),
new EnvLoader('')
]
});
const config = await manager.initialize(appConfigSchema);
expect(config.app.name).toBe('test');
});
test('should handle extremely deep nesting in environment variables', async () => {
// Test very deep nesting
process.env.LEVEL1_LEVEL2_LEVEL3_LEVEL4_LEVEL5_VALUE = 'deep-value';
const manager = new ConfigManager({
loaders: [new EnvLoader('', { nestedDelimiter: '_' })]
});
const config = await manager.initialize();
// Should create nested structure
expect((config as any).level1?.level2?.level3?.level4?.level5?.value).toBe('deep-value');
});
test('should handle conflicting data types in config merging', async () => {
const configDir = join(TEST_DIR, 'config');
mkdirSync(configDir, { recursive: true });
// File config has object
writeFileSync(
join(configDir, 'development.json'),
JSON.stringify({
database: {
host: 'localhost',
port: 5432
}
})
);
// Environment variable tries to override with string
process.env.DATABASE = 'simple-string';
const manager = new ConfigManager({
loaders: [
new FileLoader(configDir, 'development'),
new EnvLoader('')
]
});
const config = await manager.initialize(appConfigSchema);
// Environment variable should win
expect(config.database).toBe('simple-string');
});
test('should handle different working directories', async () => {
// Create multiple config setups in different directories
const dir1 = join(TEST_DIR, 'dir1');
const dir2 = join(TEST_DIR, 'dir2');
mkdirSync(join(dir1, 'config'), { recursive: true });
mkdirSync(join(dir2, 'config'), { recursive: true });
writeFileSync(
join(dir1, 'config', 'development.json'),
JSON.stringify({ app: { name: 'dir1-app' } })
);
writeFileSync(
join(dir2, 'config', 'development.json'),
JSON.stringify({ app: { name: 'dir2-app' } })
);
// Test from dir1
process.chdir(dir1);
resetConfig();
let config = await initializeConfig();
expect(config.app.name).toBe('dir1-app');
// Test from dir2
process.chdir(dir2);
resetConfig();
config = await initializeConfig();
expect(config.app.name).toBe('dir2-app');
});
test('should handle malformed .env files', async () => {
// Create malformed .env file
writeFileSync(
join(TEST_DIR, '.env'),
`# Good line
VALID_KEY=valid_value
# Malformed lines
MISSING_VALUE=
=MISSING_KEY
SPACES IN KEY=value
KEY_WITH_QUOTES="quoted value"
KEY_WITH_SINGLE_QUOTES='single quoted'
# Complex value
JSON_VALUE={"key": "value", "nested": {"array": [1, 2, 3]}}
`
);
process.chdir(TEST_DIR);
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
const config = await manager.initialize();
// Should handle valid entries
expect(process.env.VALID_KEY).toBe('valid_value');
expect(process.env.KEY_WITH_QUOTES).toBe('quoted value');
expect(process.env.KEY_WITH_SINGLE_QUOTES).toBe('single quoted');
});
test('should handle empty config files', async () => {
const configDir = join(TEST_DIR, 'config');
mkdirSync(configDir, { recursive: true });
// Create empty JSON file
writeFileSync(join(configDir, 'development.json'), '{}');
const manager = new ConfigManager({
loaders: [new FileLoader(configDir, 'development')]
});
const config = await manager.initialize(appConfigSchema);
expect(config).toBeDefined();
expect(config.environment).toBe('development'); // Should have default
});
test('should handle config initialization without schema', async () => {
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
// Initialize without schema
const config = await manager.initialize();
expect(config).toBeDefined();
expect(typeof config).toBe('object');
});
test('should handle accessing config before initialization', () => {
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
// Should throw error when accessing uninitialized config
expect(() => manager.get()).toThrow('Configuration not initialized');
expect(() => manager.getValue('some.path')).toThrow('Configuration not initialized');
expect(() => manager.has('some.path')).toThrow('Configuration not initialized');
});
test('should handle invalid config paths in getValue', async () => {
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
const config = await manager.initialize(appConfigSchema);
// Should throw for invalid paths
expect(() => manager.getValue('nonexistent.path')).toThrow('Configuration key not found');
expect(() => manager.getValue('app.nonexistent')).toThrow('Configuration key not found');
// Should work for valid paths
expect(() => manager.getValue('environment')).not.toThrow();
});
test('should handle null and undefined values in config', async () => {
process.env.NULL_VALUE = 'null';
process.env.UNDEFINED_VALUE = 'undefined';
process.env.EMPTY_VALUE = '';
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
const config = await manager.initialize();
expect((config as any).null_value).toBe(null);
expect((config as any).undefined_value).toBe(undefined);
expect((config as any).empty_value).toBe('');
});
test('should handle schema validation failures', async () => {
// Set up config that will fail schema validation
process.env.APP_NAME = 'valid-name';
process.env.APP_VERSION = 'valid-version';
process.env.SERVICE_PORT = 'not-a-number'; // This should cause validation to fail
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
await expect(manager.initialize(appConfigSchema)).rejects.toThrow(ConfigValidationError);
});
test('should handle config updates with invalid schema', async () => {
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
await manager.initialize(appConfigSchema);
// Try to update with invalid data
expect(() => {
manager.set({
service: {
port: 'invalid-port' as any
}
});
}).toThrow(ConfigValidationError);
});
test('should handle loader priority conflicts', async () => {
const configDir = join(TEST_DIR, 'config');
mkdirSync(configDir, { recursive: true });
writeFileSync(
join(configDir, 'development.json'),
JSON.stringify({ app: { name: 'file-config' } })
);
process.env.APP_NAME = 'env-config';
// Create loaders with different priorities
const manager = new ConfigManager({
loaders: [
new FileLoader(configDir, 'development'), // priority 50
new EnvLoader('') // priority 100
]
});
const config = await manager.initialize(appConfigSchema);
// Environment should win due to higher priority
expect(config.app.name).toBe('env-config');
});
test('should handle readonly environment variables', async () => {
// Some system environment variables might be readonly
const originalPath = process.env.PATH;
// This should not cause the loader to fail
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
const config = await manager.initialize();
expect(config).toBeDefined();
// PATH should not be modified
expect(process.env.PATH).toBe(originalPath);
});
});

View file

@ -0,0 +1,208 @@
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
import { writeFileSync, mkdirSync, rmSync } from 'fs';
import { join } from 'path';
import {
initializeConfig,
getConfig,
getConfigManager,
resetConfig,
getDatabaseConfig,
getServiceConfig,
getLoggingConfig,
getProviderConfig,
isDevelopment,
isProduction,
isTest,
} from '../src';
describe('Config Module', () => {
const testConfigDir = join(process.cwd(), 'test-config-module');
const originalEnv = { ...process.env };
beforeEach(() => {
resetConfig();
mkdirSync(testConfigDir, { recursive: true });
// Create test configuration files
const config = {
name: 'test-app',
version: '1.0.0',
service: {
name: 'test-service',
port: 3000,
},
database: {
postgres: {
host: 'localhost',
port: 5432,
database: 'testdb',
user: 'testuser',
password: 'testpass',
},
questdb: {
host: 'localhost',
httpPort: 9000,
pgPort: 8812,
},
mongodb: {
host: 'localhost',
port: 27017,
database: 'testdb',
},
dragonfly: {
host: 'localhost',
port: 6379,
},
},
logging: {
level: 'info',
format: 'json',
},
providers: {
yahoo: {
enabled: true,
rateLimit: 5,
},
qm: {
enabled: false,
apiKey: 'test-key',
},
},
environment: 'test',
};
writeFileSync(
join(testConfigDir, 'default.json'),
JSON.stringify(config, null, 2)
);
});
afterEach(() => {
resetConfig();
rmSync(testConfigDir, { recursive: true, force: true });
process.env = { ...originalEnv };
});
test('should initialize configuration', async () => {
const config = await initializeConfig(testConfigDir);
expect(config.app.name).toBe('test-app');
expect(config.service.port).toBe(3000);
expect(config.environment).toBe('test');
});
test('should get configuration after initialization', async () => {
await initializeConfig(testConfigDir);
const config = getConfig();
expect(config.app.name).toBe('test-app');
expect(config.database.postgres.host).toBe('localhost');
});
test('should throw if getting config before initialization', () => {
expect(() => getConfig()).toThrow('Configuration not initialized');
});
test('should get config manager instance', async () => {
await initializeConfig(testConfigDir);
const manager = getConfigManager();
expect(manager).toBeDefined();
expect(manager.get().app.name).toBe('test-app');
});
test('should get database configuration', async () => {
await initializeConfig(testConfigDir);
const dbConfig = getDatabaseConfig();
expect(dbConfig.postgres.host).toBe('localhost');
expect(dbConfig.questdb.httpPort).toBe(9000);
expect(dbConfig.mongodb.database).toBe('testdb');
});
test('should get service configuration', async () => {
await initializeConfig(testConfigDir);
const serviceConfig = getServiceConfig();
expect(serviceConfig.name).toBe('test-service');
expect(serviceConfig.port).toBe(3000);
});
test('should get logging configuration', async () => {
await initializeConfig(testConfigDir);
const loggingConfig = getLoggingConfig();
expect(loggingConfig.level).toBe('info');
expect(loggingConfig.format).toBe('json');
});
test('should get provider configuration', async () => {
await initializeConfig(testConfigDir);
const yahooConfig = getProviderConfig('yahoo');
expect(yahooConfig.enabled).toBe(true);
expect(yahooConfig.rateLimit).toBe(5);
const qmConfig = getProviderConfig('quoteMedia');
expect(qmConfig.enabled).toBe(false);
expect(qmConfig.apiKey).toBe('test-key');
});
test('should throw for non-existent provider', async () => {
await initializeConfig(testConfigDir);
expect(() => getProviderConfig('nonexistent')).toThrow(
'Provider configuration not found: nonexistent'
);
});
test('should check environment correctly', async () => {
await initializeConfig(testConfigDir);
expect(isTest()).toBe(true);
expect(isDevelopment()).toBe(false);
expect(isProduction()).toBe(false);
});
test('should handle environment overrides', async () => {
process.env.NODE_ENV = 'production';
process.env.STOCKBOT_APP__NAME = 'env-override-app';
process.env.STOCKBOT_DATABASE__POSTGRES__HOST = 'prod-db';
const prodConfig = {
database: {
postgres: {
host: 'prod-host',
port: 5432,
},
},
};
writeFileSync(
join(testConfigDir, 'production.json'),
JSON.stringify(prodConfig, null, 2)
);
const config = await initializeConfig(testConfigDir);
expect(config.environment).toBe('production');
expect(config.app.name).toBe('env-override-app');
expect(config.database.postgres.host).toBe('prod-db');
expect(isProduction()).toBe(true);
});
test('should reset configuration', async () => {
await initializeConfig(testConfigDir);
expect(() => getConfig()).not.toThrow();
resetConfig();
expect(() => getConfig()).toThrow('Configuration not initialized');
});
test('should maintain singleton instance', async () => {
const config1 = await initializeConfig(testConfigDir);
const config2 = await initializeConfig(testConfigDir);
expect(config1).toBe(config2);
});
});

View file

@ -0,0 +1,181 @@
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
import { writeFileSync, mkdirSync, rmSync } from 'fs';
import { join } from 'path';
import { EnvLoader } from '../src/loaders/env.loader';
import { FileLoader } from '../src/loaders/file.loader';
describe('EnvLoader', () => {
const originalEnv = { ...process.env };
afterEach(() => {
// Restore original environment
process.env = { ...originalEnv };
});
test('should load environment variables with prefix', async () => {
process.env.TEST_APP_NAME = 'env-app';
process.env.TEST_APP_VERSION = '1.0.0';
process.env.TEST_DATABASE_HOST = 'env-host';
process.env.TEST_DATABASE_PORT = '5432';
process.env.OTHER_VAR = 'should-not-load';
const loader = new EnvLoader('TEST_', { convertCase: false, nestedDelimiter: null });
const config = await loader.load();
expect(config.APP_NAME).toBe('env-app');
expect(config.APP_VERSION).toBe('1.0.0');
expect(config.DATABASE_HOST).toBe('env-host');
expect(config.DATABASE_PORT).toBe(5432); // Should be parsed as number
expect(config.OTHER_VAR).toBeUndefined();
});
test('should convert snake_case to camelCase', async () => {
process.env.TEST_DATABASE_CONNECTION_STRING = 'postgres://localhost';
process.env.TEST_API_KEY_SECRET = 'secret123';
const loader = new EnvLoader('TEST_', { convertCase: true });
const config = await loader.load();
expect(config.databaseConnectionString).toBe('postgres://localhost');
expect(config.apiKeySecret).toBe('secret123');
});
test('should parse JSON values', async () => {
process.env.TEST_SETTINGS = '{"feature": true, "limit": 100}';
process.env.TEST_NUMBERS = '[1, 2, 3]';
const loader = new EnvLoader('TEST_', { parseJson: true });
const config = await loader.load();
expect(config.SETTINGS).toEqual({ feature: true, limit: 100 });
expect(config.NUMBERS).toEqual([1, 2, 3]);
});
test('should parse boolean and number values', async () => {
process.env.TEST_ENABLED = 'true';
process.env.TEST_DISABLED = 'false';
process.env.TEST_PORT = '3000';
process.env.TEST_RATIO = '0.75';
const loader = new EnvLoader('TEST_', { parseValues: true });
const config = await loader.load();
expect(config.ENABLED).toBe(true);
expect(config.DISABLED).toBe(false);
expect(config.PORT).toBe(3000);
expect(config.RATIO).toBe(0.75);
});
test('should handle nested object structure', async () => {
process.env.TEST_APP__NAME = 'nested-app';
process.env.TEST_APP__SETTINGS__ENABLED = 'true';
process.env.TEST_DATABASE__HOST = 'localhost';
const loader = new EnvLoader('TEST_', {
parseValues: true,
nestedDelimiter: '__'
});
const config = await loader.load();
expect(config.APP).toEqual({
NAME: 'nested-app',
SETTINGS: {
ENABLED: true
}
});
expect(config.DATABASE).toEqual({
HOST: 'localhost'
});
});
});
describe('FileLoader', () => {
const testDir = join(process.cwd(), 'test-config');
beforeEach(() => {
mkdirSync(testDir, { recursive: true });
});
afterEach(() => {
rmSync(testDir, { recursive: true, force: true });
});
test('should load JSON configuration file', async () => {
const config = {
app: { name: 'file-app', version: '1.0.0' },
database: { host: 'localhost', port: 5432 }
};
writeFileSync(
join(testDir, 'default.json'),
JSON.stringify(config, null, 2)
);
const loader = new FileLoader(testDir);
const loaded = await loader.load();
expect(loaded).toEqual(config);
});
test('should load environment-specific configuration', async () => {
const defaultConfig = {
app: { name: 'app', port: 3000 },
database: { host: 'localhost' }
};
const prodConfig = {
app: { port: 8080 },
database: { host: 'prod-db' }
};
writeFileSync(
join(testDir, 'default.json'),
JSON.stringify(defaultConfig, null, 2)
);
writeFileSync(
join(testDir, 'production.json'),
JSON.stringify(prodConfig, null, 2)
);
const loader = new FileLoader(testDir, 'production');
const loaded = await loader.load();
expect(loaded).toEqual({
app: { name: 'app', port: 8080 },
database: { host: 'prod-db' }
});
});
test('should handle missing configuration files gracefully', async () => {
const loader = new FileLoader(testDir);
const loaded = await loader.load();
expect(loaded).toEqual({});
});
test('should throw on invalid JSON', async () => {
writeFileSync(
join(testDir, 'default.json'),
'invalid json content'
);
const loader = new FileLoader(testDir);
await expect(loader.load()).rejects.toThrow();
});
test('should support custom configuration', async () => {
const config = { custom: 'value' };
writeFileSync(
join(testDir, 'custom.json'),
JSON.stringify(config, null, 2)
);
const loader = new FileLoader(testDir);
const loaded = await loader.loadFile('custom.json');
expect(loaded).toEqual(config);
});
});

View file

@ -0,0 +1,320 @@
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
import { ConfigManager } from '../src/config-manager';
import { EnvLoader } from '../src/loaders/env.loader';
import { FileLoader } from '../src/loaders/file.loader';
import { appConfigSchema } from '../src/schemas';
import { resetConfig, getProviderConfig } from '../src/index';
import { join } from 'path';
import { mkdirSync, writeFileSync, rmSync, existsSync } from 'fs';
const TEST_DIR = join(__dirname, 'provider-tests');
describe('Provider Configuration Tests', () => {
let originalEnv: NodeJS.ProcessEnv;
beforeEach(() => {
// Save original environment
originalEnv = { ...process.env };
// Reset config singleton
resetConfig();
// Clean up test directory
if (existsSync(TEST_DIR)) {
rmSync(TEST_DIR, { recursive: true, force: true });
}
mkdirSync(TEST_DIR, { recursive: true });
});
afterEach(() => {
// Restore original environment
process.env = originalEnv;
// Clean up
resetConfig();
if (existsSync(TEST_DIR)) {
rmSync(TEST_DIR, { recursive: true, force: true });
}
});
test('should load WebShare provider config from environment variables', async () => {
// Set WebShare environment variables
process.env.WEBSHARE_API_KEY = 'test-webshare-key';
process.env.WEBSHARE_API_URL = 'https://custom.webshare.io/api/v2/';
process.env.WEBSHARE_ENABLED = 'true';
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
const config = await manager.initialize(appConfigSchema);
expect(config.webshare).toBeDefined();
expect(config.webshare?.apiKey).toBe('test-webshare-key');
expect(config.webshare?.apiUrl).toBe('https://custom.webshare.io/api/v2/');
expect(config.webshare?.enabled).toBe(true);
});
test('should load EOD provider config from environment variables', async () => {
// Set EOD environment variables
process.env.EOD_API_KEY = 'test-eod-key';
process.env.EOD_BASE_URL = 'https://custom.eod.com/api';
process.env.EOD_TIER = 'all-in-one';
process.env.EOD_ENABLED = 'true';
process.env.EOD_PRIORITY = '10';
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
const config = await manager.initialize(appConfigSchema);
expect(config.providers?.eod).toBeDefined();
expect(config.providers?.eod?.apiKey).toBe('test-eod-key');
expect(config.providers?.eod?.baseUrl).toBe('https://custom.eod.com/api');
expect(config.providers?.eod?.tier).toBe('all-in-one');
expect(config.providers?.eod?.enabled).toBe(true);
expect(config.providers?.eod?.priority).toBe(10);
});
test('should load Interactive Brokers provider config from environment variables', async () => {
// Set IB environment variables
process.env.IB_GATEWAY_HOST = 'ib-gateway.example.com';
process.env.IB_GATEWAY_PORT = '7497';
process.env.IB_CLIENT_ID = '123';
process.env.IB_ACCOUNT = 'DU123456';
process.env.IB_MARKET_DATA_TYPE = 'live';
process.env.IB_ENABLED = 'true';
process.env.IB_PRIORITY = '5';
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
const config = await manager.initialize(appConfigSchema);
expect(config.providers?.ib).toBeDefined();
expect(config.providers?.ib?.gateway.host).toBe('ib-gateway.example.com');
expect(config.providers?.ib?.gateway.port).toBe(7497);
expect(config.providers?.ib?.gateway.clientId).toBe(123);
expect(config.providers?.ib?.account).toBe('DU123456');
expect(config.providers?.ib?.marketDataType).toBe('live');
expect(config.providers?.ib?.enabled).toBe(true);
expect(config.providers?.ib?.priority).toBe(5);
});
test('should load QuoteMedia provider config from environment variables', async () => {
// Set QM environment variables
process.env.QM_USERNAME = 'test-qm-user';
process.env.QM_PASSWORD = 'test-qm-pass';
process.env.QM_BASE_URL = 'https://custom.quotemedia.com/api';
process.env.QM_WEBMASTER_ID = 'webmaster123';
process.env.QM_ENABLED = 'true';
process.env.QM_PRIORITY = '15';
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
const config = await manager.initialize(appConfigSchema);
expect(config.providers?.qm).toBeDefined();
expect(config.providers?.qm?.username).toBe('test-qm-user');
expect(config.providers?.qm?.password).toBe('test-qm-pass');
expect(config.providers?.qm?.baseUrl).toBe('https://custom.quotemedia.com/api');
expect(config.providers?.qm?.webmasterId).toBe('webmaster123');
expect(config.providers?.qm?.enabled).toBe(true);
expect(config.providers?.qm?.priority).toBe(15);
});
test('should load Yahoo Finance provider config from environment variables', async () => {
// Set Yahoo environment variables
process.env.YAHOO_BASE_URL = 'https://custom.yahoo.com/api';
process.env.YAHOO_COOKIE_JAR = 'false';
process.env.YAHOO_CRUMB = 'test-crumb';
process.env.YAHOO_ENABLED = 'true';
process.env.YAHOO_PRIORITY = '20';
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
const config = await manager.initialize(appConfigSchema);
expect(config.providers?.yahoo).toBeDefined();
expect(config.providers?.yahoo?.baseUrl).toBe('https://custom.yahoo.com/api');
expect(config.providers?.yahoo?.cookieJar).toBe(false);
expect(config.providers?.yahoo?.crumb).toBe('test-crumb');
expect(config.providers?.yahoo?.enabled).toBe(true);
expect(config.providers?.yahoo?.priority).toBe(20);
});
test('should merge file config with environment variables', async () => {
// Create a config file
const configDir = join(TEST_DIR, 'config');
mkdirSync(configDir, { recursive: true });
writeFileSync(
join(configDir, 'development.json'),
JSON.stringify({
providers: {
eod: {
name: 'EOD Historical Data',
apiKey: 'file-eod-key',
baseUrl: 'https://file.eod.com/api',
tier: 'free',
enabled: false,
priority: 1
},
yahoo: {
name: 'Yahoo Finance',
baseUrl: 'https://file.yahoo.com',
enabled: true,
priority: 2
}
}
}, null, 2)
);
// Set environment variables that should override file config
process.env.EOD_API_KEY = 'env-eod-key';
process.env.EOD_ENABLED = 'true';
process.env.EOD_PRIORITY = '10';
process.env.YAHOO_PRIORITY = '25';
const manager = new ConfigManager({
loaders: [
new FileLoader(configDir, 'development'),
new EnvLoader('')
]
});
const config = await manager.initialize(appConfigSchema);
// EOD config should be merged (env overrides file)
expect(config.providers?.eod?.name).toBe('EOD Historical Data'); // From file
expect(config.providers?.eod?.apiKey).toBe('env-eod-key'); // From env
expect(config.providers?.eod?.baseUrl).toBe('https://file.eod.com/api'); // From file
expect(config.providers?.eod?.enabled).toBe(true); // From env (overrides file)
expect(config.providers?.eod?.priority).toBe(10); // From env (overrides file)
// Yahoo config should be merged
expect(config.providers?.yahoo?.name).toBe('Yahoo Finance'); // From file
expect(config.providers?.yahoo?.baseUrl).toBe('https://file.yahoo.com'); // From file
expect(config.providers?.yahoo?.priority).toBe(25); // From env (overrides file)
});
test('should handle invalid provider configurations', async () => {
// Set invalid values
process.env.EOD_TIER = 'invalid-tier'; // Should be one of ['free', 'fundamentals', 'all-in-one']
process.env.IB_MARKET_DATA_TYPE = 'invalid-type'; // Should be one of ['live', 'delayed', 'frozen']
process.env.IB_GATEWAY_PORT = 'not-a-number'; // Should be a number
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
// Should throw validation error
await expect(manager.initialize(appConfigSchema)).rejects.toThrow();
});
test('should work with getProviderConfig helper function', async () => {
// Set up multiple providers
process.env.EOD_API_KEY = 'test-eod-key';
process.env.EOD_ENABLED = 'true';
process.env.WEBSHARE_API_KEY = 'test-webshare-key';
process.env.WEBSHARE_ENABLED = 'true';
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
await manager.initialize(appConfigSchema);
// Test getProviderConfig helper
const eodConfig = getProviderConfig('eod');
expect(eodConfig).toBeDefined();
expect((eodConfig as any).apiKey).toBe('test-eod-key');
const webshareConfig = getProviderConfig('webshare');
expect(webshareConfig).toBeDefined();
expect((webshareConfig as any).apiKey).toBe('test-webshare-key');
// Test non-existent provider
expect(() => getProviderConfig('nonexistent')).toThrow('Provider configuration not found: nonexistent');
});
test('should handle boolean string parsing correctly', async () => {
// Test various boolean representations
process.env.EOD_ENABLED = 'TRUE';
process.env.YAHOO_ENABLED = 'False';
process.env.IB_ENABLED = '1';
process.env.QM_ENABLED = '0';
process.env.WEBSHARE_ENABLED = 'yes'; // Should be treated as string, not boolean
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
const config = await manager.initialize(appConfigSchema);
expect(config.providers?.eod?.enabled).toBe(true);
expect(config.providers?.yahoo?.enabled).toBe(false);
expect(config.providers?.ib?.enabled).toBe(true); // 1 is parsed as number, not boolean
expect(config.providers?.qm?.enabled).toBe(false); // 0 is parsed as number, not boolean
// webshare.enabled should be the string 'yes', but schema validation might reject it
});
test('should handle nested configuration correctly', async () => {
// Test nested IB gateway configuration
process.env.IB_GATEWAY_HOST = 'gateway.ib.com';
process.env.IB_GATEWAY_PORT = '7497';
process.env.IB_GATEWAY_CLIENT_ID = '999';
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
const config = await manager.initialize(appConfigSchema);
expect(config.providers?.ib?.gateway).toBeDefined();
expect(config.providers?.ib?.gateway.host).toBe('gateway.ib.com');
expect(config.providers?.ib?.gateway.port).toBe(7497);
expect(config.providers?.ib?.gateway.clientId).toBe(999);
});
test('should load provider configs from .env file', async () => {
// Create .env file with provider configs
writeFileSync(
join(TEST_DIR, '.env'),
`# Provider configurations
EOD_API_KEY=env-file-eod-key
EOD_ENABLED=true
WEBSHARE_API_KEY=env-file-webshare-key
IB_GATEWAY_HOST=env-file-ib-host
IB_GATEWAY_PORT=7498
YAHOO_BASE_URL=https://env-file.yahoo.com
`
);
const originalCwd = process.cwd();
try {
process.chdir(TEST_DIR);
const manager = new ConfigManager({
loaders: [new EnvLoader('')]
});
const config = await manager.initialize(appConfigSchema);
expect(config.providers?.eod?.apiKey).toBe('env-file-eod-key');
expect(config.providers?.eod?.enabled).toBe(true);
expect(config.webshare?.apiKey).toBe('env-file-webshare-key');
expect(config.providers?.ib?.gateway.host).toBe('env-file-ib-host');
expect(config.providers?.ib?.gateway.port).toBe(7498);
expect(config.providers?.yahoo?.baseUrl).toBe('https://env-file.yahoo.com');
} finally {
process.chdir(originalCwd);
}
});
});

View file

@ -0,0 +1,404 @@
import { afterEach, beforeEach, describe, expect, test } from 'bun:test';
import { existsSync, mkdirSync, rmSync, writeFileSync } from 'fs';
import { join } from 'path';
import {
getConfig,
getDatabaseConfig,
getLoggingConfig,
getProviderConfig,
getServiceConfig,
initializeServiceConfig,
isDevelopment,
isProduction,
isTest,
resetConfig
} from '../src/index';
const TEST_DIR = join(__dirname, 'real-usage-tests');
describe('Real Usage Scenarios', () => {
let originalEnv: NodeJS.ProcessEnv;
let originalCwd: string;
beforeEach(() => {
originalEnv = { ...process.env };
originalCwd = process.cwd();
resetConfig();
if (existsSync(TEST_DIR)) {
rmSync(TEST_DIR, { recursive: true, force: true });
}
setupRealUsageScenarios();
});
afterEach(() => {
process.env = originalEnv;
process.chdir(originalCwd);
resetConfig();
if (existsSync(TEST_DIR)) {
rmSync(TEST_DIR, { recursive: true, force: true });
}
});
test('should work like real data-ingestion usage', async () => {
const dataServiceDir = join(TEST_DIR, 'apps', 'data-ingestion');
process.chdir(dataServiceDir);
// Simulate how data-ingestion would initialize config
const config = await initializeServiceConfig();
// Test typical data-ingestion config access patterns
expect(config.app.name).toBe('data-ingestion');
expect(config.service.port).toBe(3001);
// Test database config access
const dbConfig = getDatabaseConfig();
expect(dbConfig.postgres.host).toBe('localhost');
expect(dbConfig.postgres.port).toBe(5432);
expect(dbConfig.questdb.host).toBe('localhost');
// Test provider access
const yahooConfig = getProviderConfig('yahoo');
expect(yahooConfig).toBeDefined();
expect((yahooConfig as any).enabled).toBe(true);
// Test environment helpers
expect(isDevelopment()).toBe(true);
expect(isProduction()).toBe(false);
});
test('should work like real web-api usage', async () => {
const webApiDir = join(TEST_DIR, 'apps', 'web-api');
process.chdir(webApiDir);
const config = await initializeServiceConfig();
expect(config.app.name).toBe('web-api');
expect(config.service.port).toBe(4000);
// Web API should have access to all the same configs
const serviceConfig = getServiceConfig();
expect(serviceConfig.name).toBe('web-api');
const loggingConfig = getLoggingConfig();
expect(loggingConfig.level).toBe('info');
});
test('should work like real shared library usage', async () => {
const cacheLibDir = join(TEST_DIR, 'libs', 'cache');
process.chdir(cacheLibDir);
const config = await initializeServiceConfig();
// Libraries should inherit from root config
expect(config.app.name).toBe('cache-lib');
expect(config.app.version).toBe('1.0.0'); // From root
// Should have access to cache config
const dbConfig = getDatabaseConfig();
expect(dbConfig.dragonfly).toBeDefined();
expect(dbConfig.dragonfly.host).toBe('localhost');
expect(dbConfig.dragonfly.port).toBe(6379);
});
test('should handle production environment correctly', async () => {
process.env.NODE_ENV = 'production';
const dataServiceDir = join(TEST_DIR, 'apps', 'data-ingestion');
process.chdir(dataServiceDir);
resetConfig();
const config = await initializeServiceConfig();
expect(config.environment).toBe('production');
expect(config.logging.level).toBe('warn'); // Production should use different log level
expect(isProduction()).toBe(true);
expect(isDevelopment()).toBe(false);
});
test('should handle test environment correctly', async () => {
process.env.NODE_ENV = 'test';
const dataServiceDir = join(TEST_DIR, 'apps', 'data-ingestion');
process.chdir(dataServiceDir);
resetConfig();
const config = await initializeServiceConfig();
expect(config.environment).toBe('test');
expect(config.logging.level).toBe('debug'); // Test should use debug level
expect(isTest()).toBe(true);
expect(isDevelopment()).toBe(false);
});
test('should work with environment variable overrides in production', async () => {
process.env.NODE_ENV = 'production';
process.env.DATABASE_POSTGRES_HOST = 'prod-db.example.com';
process.env.DATABASE_POSTGRES_PORT = '5433';
process.env.EOD_API_KEY = 'prod-eod-key';
process.env.SERVICE_PORT = '8080';
const dataServiceDir = join(TEST_ROOT, 'apps', 'data-ingestion');
process.chdir(dataServiceDir);
resetConfig();
const config = await initializeServiceConfig();
// Environment variables should override file configs
const dbConfig = getDatabaseConfig();
expect(dbConfig.postgres.host).toBe('prod-db.example.com');
expect(dbConfig.postgres.port).toBe(5433);
const serviceConfig = getServiceConfig();
expect(serviceConfig.port).toBe(8080);
const eodConfig = getProviderConfig('eod');
expect((eodConfig as any).apiKey).toBe('prod-eod-key');
});
test('should handle missing provider configurations gracefully', async () => {
const dataServiceDir = join(TEST_DIR, 'apps', 'data-ingestion');
process.chdir(dataServiceDir);
const config = await initializeServiceConfig();
// Should throw for non-existent providers
expect(() => getProviderConfig('nonexistent')).toThrow('Provider configuration not found: nonexistent');
// Should work for providers that exist but might not be configured
// (they should have defaults from schema)
const yahooConfig = getProviderConfig('yahoo');
expect(yahooConfig).toBeDefined();
});
test('should support dynamic config access patterns', async () => {
const dataServiceDir = join(TEST_DIR, 'apps', 'data-ingestion');
process.chdir(dataServiceDir);
const config = await initializeServiceConfig();
// Test various access patterns used in real applications
const configManager = (await import('../src/index')).getConfigManager();
// Direct path access
expect(configManager.getValue('app.name')).toBe('data-ingestion');
expect(configManager.getValue('service.port')).toBe(3001);
// Check if paths exist
expect(configManager.has('app.name')).toBe(true);
expect(configManager.has('nonexistent.path')).toBe(false);
// Typed access
const port = configManager.getValue<number>('service.port');
expect(typeof port).toBe('number');
expect(port).toBe(3001);
});
test('should handle config updates at runtime', async () => {
const dataServiceDir = join(TEST_DIR, 'apps', 'data-ingestion');
process.chdir(dataServiceDir);
await initializeServiceConfig();
const configManager = (await import('../src/index')).getConfigManager();
// Update config at runtime (useful for testing)
configManager.set({
service: {
port: 9999
}
});
const updatedConfig = getConfig();
expect(updatedConfig.service.port).toBe(9999);
// Other values should be preserved
expect(updatedConfig.app.name).toBe('data-ingestion');
});
test('should work across multiple service initializations', async () => {
// Simulate multiple services in the same process (like tests)
// First service
const dataServiceDir = join(TEST_DIR, 'apps', 'data-ingestion');
process.chdir(dataServiceDir);
let config = await initializeServiceConfig();
expect(config.app.name).toBe('data-ingestion');
// Reset and switch to another service
resetConfig();
const webApiDir = join(TEST_DIR, 'apps', 'web-api');
process.chdir(webApiDir);
config = await initializeServiceConfig();
expect(config.app.name).toBe('web-api');
// Each service should get its own config
expect(config.service.port).toBe(4000); // web-api port
});
});
const TEST_ROOT = TEST_DIR;
function setupRealUsageScenarios() {
const scenarios = {
root: TEST_ROOT,
dataService: join(TEST_ROOT, 'apps', 'data-ingestion'),
webApi: join(TEST_ROOT, 'apps', 'web-api'),
cacheLib: join(TEST_ROOT, 'libs', 'cache'),
};
// Create directory structure
Object.values(scenarios).forEach(dir => {
mkdirSync(join(dir, 'config'), { recursive: true });
});
// Root config (monorepo/config/*)
const rootConfigs = {
development: {
app: {
name: 'stock-bot-monorepo',
version: '1.0.0'
},
database: {
postgres: {
host: 'localhost',
port: 5432,
database: 'trading_bot',
username: 'trading_user',
password: 'trading_pass_dev'
},
questdb: {
host: 'localhost',
port: 9009,
database: 'questdb'
},
mongodb: {
host: 'localhost',
port: 27017,
database: 'stock'
},
dragonfly: {
host: 'localhost',
port: 6379
}
},
logging: {
level: 'info',
format: 'json'
},
providers: {
yahoo: {
name: 'Yahoo Finance',
enabled: true,
priority: 1,
baseUrl: 'https://query1.finance.yahoo.com'
},
eod: {
name: 'EOD Historical Data',
enabled: false,
priority: 2,
apiKey: 'demo-api-key',
baseUrl: 'https://eodhistoricaldata.com/api'
}
}
},
production: {
logging: {
level: 'warn'
},
database: {
postgres: {
host: 'prod-postgres.internal',
port: 5432
}
}
},
test: {
logging: {
level: 'debug'
},
database: {
postgres: {
database: 'trading_bot_test'
}
}
}
};
Object.entries(rootConfigs).forEach(([env, config]) => {
writeFileSync(
join(scenarios.root, 'config', `${env}.json`),
JSON.stringify(config, null, 2)
);
});
// Data service config
writeFileSync(
join(scenarios.dataService, 'config', 'development.json'),
JSON.stringify({
app: {
name: 'data-ingestion'
},
service: {
name: 'data-ingestion',
port: 3001,
workers: 2
}
}, null, 2)
);
// Web API config
writeFileSync(
join(scenarios.webApi, 'config', 'development.json'),
JSON.stringify({
app: {
name: 'web-api'
},
service: {
name: 'web-api',
port: 4000,
cors: {
origin: ['http://localhost:3000', 'http://localhost:4200']
}
}
}, null, 2)
);
// Cache lib config
writeFileSync(
join(scenarios.cacheLib, 'config', 'development.json'),
JSON.stringify({
app: {
name: 'cache-lib'
},
service: {
name: 'cache-lib'
}
}, null, 2)
);
// Root .env file
writeFileSync(
join(scenarios.root, '.env'),
`NODE_ENV=development
DEBUG=true
# Provider API keys
EOD_API_KEY=demo-key
WEBSHARE_API_KEY=demo-webshare-key
`
);
// Service-specific .env files
writeFileSync(
join(scenarios.dataService, '.env'),
`SERVICE_DEBUG=true
DATA_SERVICE_RATE_LIMIT=1000
`
);
}

View file

@ -0,0 +1,11 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src",
"composite": true
},
"include": ["src/**/*"],
"references": [
]
}

View file

@ -0,0 +1,19 @@
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": [],
"outputs": ["dist/**"],
"inputs": [
"src/**",
"package.json",
"tsconfig.json",
"!**/*.test.ts",
"!**/*.spec.ts",
"!**/test/**",
"!**/tests/**",
"!**/__tests__/**"
]
}
}
}

View file

@ -0,0 +1,280 @@
import { getLogger, type Logger } from '@stock-bot/logger';
import { MongoDBClient, createMongoDBClient, type ConnectionEvents } from '@stock-bot/mongodb';
import { PostgreSQLClient, createPostgreSQLClient } from '@stock-bot/postgres';
import { createCache, type CacheProvider } from '@stock-bot/cache';
import { QueueManager } from '@stock-bot/queue';
import type {
ConnectionFactory as IConnectionFactory,
ConnectionPool,
ConnectionFactoryConfig,
MongoDBPoolConfig,
PostgreSQLPoolConfig,
CachePoolConfig,
QueuePoolConfig,
PoolMetrics,
} from './types';
export class ConnectionFactory implements IConnectionFactory {
private readonly logger: Logger;
private readonly pools: Map<string, ConnectionPool<any>> = new Map();
private readonly config: ConnectionFactoryConfig;
constructor(config: ConnectionFactoryConfig) {
this.config = config;
this.logger = getLogger(`connection-factory:${config.service}`);
}
async createMongoDB(poolConfig: MongoDBPoolConfig): Promise<ConnectionPool<MongoDBClient>> {
const key = `mongodb:${poolConfig.name}`;
if (this.pools.has(key)) {
this.logger.debug('Reusing existing MongoDB pool', { name: poolConfig.name });
return this.pools.get(key)!;
}
this.logger.info('Creating MongoDB connection pool', {
name: poolConfig.name,
poolSize: poolConfig.poolSize,
});
try {
const events: ConnectionEvents = {
onConnect: () => {
this.logger.debug('MongoDB connected', { pool: poolConfig.name });
},
onDisconnect: () => {
this.logger.debug('MongoDB disconnected', { pool: poolConfig.name });
},
onError: (error) => {
this.logger.error('MongoDB error', { pool: poolConfig.name, error });
},
};
const client = createMongoDBClient({
...poolConfig.config,
poolSettings: {
maxPoolSize: poolConfig.maxConnections || poolConfig.poolSize || 10,
minPoolSize: poolConfig.minConnections || 2,
maxIdleTime: 30000,
}
}, events);
await client.connect();
// Warm up the pool
if (poolConfig.minConnections) {
await client.warmupPool();
}
const pool: ConnectionPool<MongoDBClient> = {
name: poolConfig.name,
client,
metrics: client.getPoolMetrics(),
health: async () => {
try {
await client.getDatabase().admin().ping();
return true;
} catch {
return false;
}
},
dispose: async () => {
await client.disconnect();
this.pools.delete(key);
},
};
this.pools.set(key, pool);
return pool;
} catch (error) {
this.logger.error('Failed to create MongoDB pool', { name: poolConfig.name, error });
throw error;
}
}
async createPostgreSQL(poolConfig: PostgreSQLPoolConfig): Promise<ConnectionPool<PostgreSQLClient>> {
const key = `postgres:${poolConfig.name}`;
if (this.pools.has(key)) {
this.logger.debug('Reusing existing PostgreSQL pool', { name: poolConfig.name });
return this.pools.get(key)!;
}
this.logger.info('Creating PostgreSQL connection pool', {
name: poolConfig.name,
poolSize: poolConfig.poolSize,
});
try {
const events: ConnectionEvents = {
onConnect: () => {
this.logger.debug('PostgreSQL connected', { pool: poolConfig.name });
},
onDisconnect: () => {
this.logger.debug('PostgreSQL disconnected', { pool: poolConfig.name });
},
onError: (error) => {
this.logger.error('PostgreSQL error', { pool: poolConfig.name, error });
},
};
const client = createPostgreSQLClient({
...poolConfig.config,
poolSettings: {
max: poolConfig.maxConnections || poolConfig.poolSize || 10,
min: poolConfig.minConnections || 2,
idleTimeoutMillis: poolConfig.idleTimeoutMillis || 30000,
},
}, undefined, events);
await client.connect();
// Warm up the pool
if (poolConfig.minConnections) {
await client.warmupPool();
}
const pool: ConnectionPool<PostgreSQLClient> = {
name: poolConfig.name,
client,
metrics: client.getPoolMetrics(),
health: async () => client.connected,
dispose: async () => {
await client.disconnect();
this.pools.delete(key);
},
};
this.pools.set(key, pool);
return pool;
} catch (error) {
this.logger.error('Failed to create PostgreSQL pool', { name: poolConfig.name, error });
throw error;
}
}
createCache(poolConfig: CachePoolConfig): ConnectionPool<CacheProvider> {
const key = `cache:${poolConfig.name}`;
if (this.pools.has(key)) {
this.logger.debug('Reusing existing cache pool', { name: poolConfig.name });
return this.pools.get(key)!;
}
this.logger.info('Creating cache connection pool', {
name: poolConfig.name,
});
try {
const cache = createCache({
...poolConfig.config,
keyPrefix: `${this.config.service}:${poolConfig.name}:`,
shared: false, // Each pool gets its own connection
});
const pool: ConnectionPool<CacheProvider> = {
name: poolConfig.name,
client: cache,
metrics: this.createInitialMetrics(),
health: async () => cache.health(),
dispose: async () => {
// Cache disposal handled internally
this.pools.delete(key);
},
};
this.pools.set(key, pool);
return pool;
} catch (error) {
this.logger.error('Failed to create cache pool', { name: poolConfig.name, error });
throw error;
}
}
createQueue(poolConfig: QueuePoolConfig): ConnectionPool<QueueManager> {
const key = `queue:${poolConfig.name}`;
if (this.pools.has(key)) {
this.logger.debug('Reusing existing queue manager', { name: poolConfig.name });
return this.pools.get(key)!;
}
this.logger.info('Creating queue manager', {
name: poolConfig.name,
});
try {
// Initialize or get existing QueueManager instance
const queueManager = QueueManager.getOrInitialize(poolConfig.config);
const pool: ConnectionPool<QueueManager> = {
name: poolConfig.name,
client: queueManager,
metrics: this.createInitialMetrics(),
health: async () => {
try {
// Check if QueueManager is initialized
queueManager.getQueueNames();
return true;
} catch {
return false;
}
},
dispose: async () => {
// QueueManager handles its own shutdown
await queueManager.shutdown();
this.pools.delete(key);
},
};
this.pools.set(key, pool);
return pool;
} catch (error) {
this.logger.error('Failed to create queue manager', { name: poolConfig.name, error });
throw error;
}
}
getPool(type: 'mongodb' | 'postgres' | 'cache' | 'queue', name: string): ConnectionPool<any> | undefined {
const key = `${type}:${name}`;
return this.pools.get(key);
}
listPools(): Array<{ type: string; name: string; metrics: PoolMetrics }> {
const result: Array<{ type: string; name: string; metrics: PoolMetrics }> = [];
for (const [key, pool] of this.pools.entries()) {
const [type, ...nameParts] = key.split(':');
result.push({
type: type || 'unknown',
name: nameParts.join(':'),
metrics: pool.metrics,
});
}
return result;
}
async disposeAll(): Promise<void> {
this.logger.info('Disposing all connection pools', { count: this.pools.size });
const disposePromises: Promise<void>[] = [];
for (const pool of this.pools.values()) {
disposePromises.push(pool.dispose());
}
await Promise.all(disposePromises);
this.pools.clear();
}
private createInitialMetrics(): PoolMetrics {
return {
created: new Date(),
totalConnections: 0,
activeConnections: 0,
idleConnections: 0,
waitingRequests: 0,
errors: 0,
};
}
}

6
libs/core/di/index.ts Normal file
View file

@ -0,0 +1,6 @@
// Export all dependency injection components
export * from './service-container';
export * from './connection-factory';
export * from './operation-context';
export * from './pool-size-calculator';
export * from './types';

View file

@ -0,0 +1,307 @@
/**
* OperationContext - Unified context for handler operations
*
* Provides streamlined access to:
* - Child loggers with hierarchical context
* - Database clients (MongoDB, PostgreSQL)
* - Contextual cache with automatic key prefixing
* - Shared resource management
*/
import { createCache, type CacheProvider } from '@stock-bot/cache';
import { getLogger, type Logger } from '@stock-bot/logger';
import { getDatabaseConfig } from '@stock-bot/config';
import type { ServiceResolver } from './service-container';
import type { MongoDBClient } from '@stock-bot/mongodb';
import type { PostgreSQLClient } from '@stock-bot/postgres';
export interface OperationContextOptions {
handlerName: string;
operationName: string;
parentLogger?: Logger;
container?: ServiceResolver;
}
export class OperationContext {
public readonly logger: Logger;
private readonly container?: ServiceResolver;
private _mongodb?: MongoDBClient;
private _postgres?: PostgreSQLClient;
private _cache?: CacheProvider;
private _queue?: any; // Type will be QueueManager but we avoid import for circular deps
private static sharedCache: CacheProvider | null = null;
private static parentLoggers = new Map<string, Logger>();
private static databaseConfig: any = null;
constructor(
public readonly handlerName: string,
public readonly operationName: string,
parentLoggerOrOptions?: Logger | OperationContextOptions
) {
// Handle both old and new constructor signatures
if (parentLoggerOrOptions && 'container' in parentLoggerOrOptions) {
const options = parentLoggerOrOptions;
this.container = options.container;
const parent = options.parentLogger || this.getOrCreateParentLogger();
this.logger = parent.child(operationName, {
handler: handlerName,
operation: operationName
});
} else {
// Legacy support
const parentLogger = parentLoggerOrOptions as Logger | undefined;
const parent = parentLogger || this.getOrCreateParentLogger();
this.logger = parent.child(operationName, {
handler: handlerName,
operation: operationName
});
}
}
// Lazy load MongoDB client
get mongodb(): MongoDBClient {
if (!this._mongodb) {
if (this.container) {
try {
this._mongodb = this.container.resolve<MongoDBClient>('mongodb');
} catch (error) {
this.logger.warn('Failed to resolve MongoDB from container, falling back to singleton', { error });
this._mongodb = this.getLegacyDatabaseClient('mongodb') as MongoDBClient;
}
} else {
this._mongodb = this.getLegacyDatabaseClient('mongodb') as MongoDBClient;
}
}
return this._mongodb!;
}
// Lazy load PostgreSQL client
get postgres(): PostgreSQLClient {
if (!this._postgres) {
if (this.container) {
try {
this._postgres = this.container.resolve<PostgreSQLClient>('postgres');
} catch (error) {
this.logger.warn('Failed to resolve PostgreSQL from container, falling back to singleton', { error });
this._postgres = this.getLegacyDatabaseClient('postgres') as PostgreSQLClient;
}
} else {
this._postgres = this.getLegacyDatabaseClient('postgres') as PostgreSQLClient;
}
}
return this._postgres!;
}
// Lazy load QueueManager
get queue(): any {
if (!this._queue) {
if (this.container) {
try {
this._queue = this.container.resolve('queue');
} catch (error) {
this.logger.warn('Failed to resolve QueueManager from container, falling back to singleton', { error });
this._queue = this.getLegacyQueueManager();
}
} else {
this._queue = this.getLegacyQueueManager();
}
}
return this._queue!;
}
// Legacy method for QueueManager
private getLegacyQueueManager(): any {
try {
// Dynamic import to avoid TypeScript issues during build
const { QueueManager } = require('@stock-bot/queue');
return QueueManager.getInstance();
} catch (error) {
this.logger.warn('QueueManager not initialized, queue operations may fail', { error });
throw new Error('QueueManager not available');
}
}
// Legacy method for backward compatibility
private getLegacyDatabaseClient(type: 'mongodb' | 'postgres'): any {
try {
if (type === 'mongodb') {
// Dynamic import to avoid TypeScript issues during build
const { getMongoDBClient } = require('@stock-bot/mongodb');
return getMongoDBClient();
} else {
// Dynamic import to avoid TypeScript issues during build
const { getPostgreSQLClient } = require('@stock-bot/postgres');
return getPostgreSQLClient();
}
} catch (error) {
this.logger.warn(`${type} client not initialized, operations may fail`, { error });
return null;
}
}
private getOrCreateParentLogger(): Logger {
const parentKey = `${this.handlerName}-handler`;
if (!OperationContext.parentLoggers.has(parentKey)) {
const parentLogger = getLogger(parentKey);
OperationContext.parentLoggers.set(parentKey, parentLogger);
}
return OperationContext.parentLoggers.get(parentKey)!;
}
/**
* Get contextual cache with automatic key prefixing
* Keys are automatically prefixed as: "operations:handlerName:operationName:key"
*/
get cache(): CacheProvider {
if (!this._cache) {
if (this.container) {
try {
const baseCache = this.container.resolve<CacheProvider>('cache');
this._cache = this.createContextualCache(baseCache);
} catch (error) {
this.logger.warn('Failed to resolve cache from container, using shared cache', { error });
this._cache = this.getOrCreateSharedCache();
}
} else {
this._cache = this.getOrCreateSharedCache();
}
}
return this._cache!;
}
private getOrCreateSharedCache(): CacheProvider {
if (!OperationContext.sharedCache) {
// Get Redis configuration from database config
if (!OperationContext.databaseConfig) {
OperationContext.databaseConfig = getDatabaseConfig();
}
const redisConfig = OperationContext.databaseConfig.dragonfly || {
host: 'localhost',
port: 6379,
db: 1
};
OperationContext.sharedCache = createCache({
keyPrefix: 'operations:',
shared: true, // Use singleton Redis connection
enableMetrics: true,
ttl: 3600, // Default 1 hour TTL
redisConfig
});
}
return this.createContextualCache(OperationContext.sharedCache);
}
private createContextualCache(baseCache: CacheProvider): CacheProvider {
const contextPrefix = `${this.handlerName}:${this.operationName}:`;
// Return a proxy that automatically prefixes keys with context
return {
async get<T>(key: string): Promise<T | null> {
return baseCache.get(`${contextPrefix}${key}`);
},
async set<T>(key: string, value: T, options?: any): Promise<T | null> {
return baseCache.set(`${contextPrefix}${key}`, value, options);
},
async del(key: string): Promise<void> {
return baseCache.del(`${contextPrefix}${key}`);
},
async exists(key: string): Promise<boolean> {
return baseCache.exists(`${contextPrefix}${key}`);
},
async clear(): Promise<void> {
// Not implemented for contextual cache - use del() for specific keys
throw new Error('clear() not implemented for contextual cache - use del() for specific keys');
},
async keys(pattern: string): Promise<string[]> {
const fullPattern = `${contextPrefix}${pattern}`;
return baseCache.keys(fullPattern);
},
getStats() {
return baseCache.getStats();
},
async health(): Promise<boolean> {
return baseCache.health();
},
async waitForReady(timeout?: number): Promise<void> {
return baseCache.waitForReady(timeout);
},
isReady(): boolean {
return baseCache.isReady();
}
} as CacheProvider;
}
/**
* Factory method to create OperationContext
*/
static create(handlerName: string, operationName: string, parentLoggerOrOptions?: Logger | OperationContextOptions): OperationContext {
if (parentLoggerOrOptions && 'container' in parentLoggerOrOptions) {
return new OperationContext(handlerName, operationName, {
...parentLoggerOrOptions,
handlerName,
operationName
});
}
return new OperationContext(handlerName, operationName, parentLoggerOrOptions as Logger | undefined);
}
/**
* Get cache key prefix for this operation context
*/
getCacheKeyPrefix(): string {
return `operations:${this.handlerName}:${this.operationName}:`;
}
/**
* Create a child context for sub-operations
*/
createChild(subOperationName: string): OperationContext {
if (this.container) {
return new OperationContext(
this.handlerName,
`${this.operationName}:${subOperationName}`,
{
handlerName: this.handlerName,
operationName: `${this.operationName}:${subOperationName}`,
parentLogger: this.logger,
container: this.container
}
);
}
return new OperationContext(
this.handlerName,
`${this.operationName}:${subOperationName}`,
this.logger
);
}
/**
* Dispose of resources if using container-based connections
* This is a no-op for legacy singleton connections
*/
async dispose(): Promise<void> {
// If using container, it will handle cleanup
// For singleton connections, they persist
this.logger.debug('OperationContext disposed', {
handler: this.handlerName,
operation: this.operationName,
hasContainer: !!this.container
});
}
}
export default OperationContext;

23
libs/core/di/package.json Normal file
View file

@ -0,0 +1,23 @@
{
"name": "@stock-bot/di",
"version": "1.0.0",
"main": "./src/index.ts",
"types": "./src/index.ts",
"scripts": {
"build": "tsc",
"clean": "rm -rf dist"
},
"dependencies": {
"@stock-bot/config": "workspace:*",
"@stock-bot/logger": "workspace:*",
"@stock-bot/mongodb": "workspace:*",
"@stock-bot/postgres": "workspace:*",
"@stock-bot/cache": "workspace:*",
"@stock-bot/queue": "workspace:*",
"mongodb": "^6.3.0",
"pg": "^8.11.3"
},
"devDependencies": {
"@types/pg": "^8.10.7"
}
}

View file

@ -0,0 +1,80 @@
import type { ConnectionPoolConfig } from './types';
export interface PoolSizeRecommendation {
min: number;
max: number;
idle: number;
}
export class PoolSizeCalculator {
private static readonly DEFAULT_SIZES: Record<string, PoolSizeRecommendation> = {
// Service-level defaults
'data-ingestion': { min: 5, max: 50, idle: 10 },
'data-pipeline': { min: 3, max: 30, idle: 5 },
'processing-service': { min: 2, max: 20, idle: 3 },
'web-api': { min: 2, max: 10, idle: 2 },
'portfolio-service': { min: 2, max: 15, idle: 3 },
'strategy-service': { min: 3, max: 25, idle: 5 },
'execution-service': { min: 2, max: 10, idle: 2 },
// Handler-level defaults
'batch-import': { min: 10, max: 100, idle: 20 },
'real-time': { min: 2, max: 10, idle: 3 },
'analytics': { min: 5, max: 30, idle: 10 },
'reporting': { min: 3, max: 20, idle: 5 },
};
static calculate(
serviceName: string,
handlerName?: string,
customConfig?: Partial<ConnectionPoolConfig>
): PoolSizeRecommendation {
// Check for custom configuration first
if (customConfig?.minConnections && customConfig?.maxConnections) {
return {
min: customConfig.minConnections,
max: customConfig.maxConnections,
idle: Math.floor((customConfig.minConnections + customConfig.maxConnections) / 4),
};
}
// Try handler-specific sizes first, then service-level
const key = handlerName || serviceName;
const recommendation = this.DEFAULT_SIZES[key] || this.DEFAULT_SIZES[serviceName];
if (recommendation) {
return { ...recommendation };
}
// Fall back to generic defaults
return {
min: 2,
max: 10,
idle: 3,
};
}
static getOptimalPoolSize(
expectedConcurrency: number,
averageQueryTimeMs: number,
targetLatencyMs: number
): number {
// Little's Law: L = λ * W
// L = number of connections needed
// λ = arrival rate (requests per second)
// W = average time in system (seconds)
const requestsPerSecond = expectedConcurrency;
const averageTimeInSystem = averageQueryTimeMs / 1000;
const minConnections = Math.ceil(requestsPerSecond * averageTimeInSystem);
// Add buffer for burst traffic (20% overhead)
const recommendedSize = Math.ceil(minConnections * 1.2);
// Ensure we meet target latency
const latencyBasedSize = Math.ceil(expectedConcurrency * (averageQueryTimeMs / targetLatencyMs));
return Math.max(recommendedSize, latencyBasedSize, 2); // Minimum 2 connections
}
}

View file

@ -0,0 +1,215 @@
import { getLogger, type Logger } from '@stock-bot/logger';
import type { ConnectionFactory } from './connection-factory';
export interface ServiceRegistration<T = any> {
name: string;
factory: () => T | Promise<T>;
singleton?: boolean;
dispose?: (instance: T) => Promise<void>;
}
export interface ServiceResolver {
resolve<T>(name: string, options?: any): T;
resolveAsync<T>(name: string, options?: any): Promise<T>;
}
export class ServiceContainer implements ServiceResolver {
private readonly logger: Logger;
private readonly registrations = new Map<string, ServiceRegistration>();
private readonly instances = new Map<string, any>();
private readonly scopedInstances = new Map<string, any>();
private readonly parent?: ServiceContainer;
constructor(name: string, parent?: ServiceContainer) {
this.logger = getLogger(`service-container:${name}`);
this.parent = parent;
}
register<T>(registration: ServiceRegistration<T>): void {
this.registrations.set(registration.name, registration);
this.logger.debug('Service registered', { name: registration.name, singleton: registration.singleton });
}
resolve<T>(name: string, options?: any): T {
const instance = this.resolveAsync<T>(name, options);
if (instance instanceof Promise) {
throw new Error(`Service ${name} is async. Use resolveAsync() instead.`);
}
return instance as T;
}
async resolveAsync<T>(name: string, _options?: any): Promise<T> {
// Check scoped instances first
if (this.scopedInstances.has(name)) {
return this.scopedInstances.get(name);
}
// Check singleton instances
if (this.instances.has(name)) {
return this.instances.get(name);
}
// Get registration from this container or parent
const registration = this.getRegistration(name);
if (!registration) {
throw new Error(`Service ${name} not registered`);
}
// Create instance
const instance = await Promise.resolve(registration.factory());
// Store based on singleton flag
if (registration.singleton) {
this.instances.set(name, instance);
} else {
this.scopedInstances.set(name, instance);
}
return instance as T;
}
createScope(): ServiceContainer {
return new ServiceContainer('scoped', this);
}
async dispose(): Promise<void> {
// Dispose scoped instances
for (const [name, instance] of this.scopedInstances.entries()) {
const registration = this.getRegistration(name);
if (registration?.dispose) {
await registration.dispose(instance);
}
}
this.scopedInstances.clear();
// Only dispose singletons if this is the root container
if (!this.parent) {
for (const [name, instance] of this.instances.entries()) {
const registration = this.registrations.get(name);
if (registration?.dispose) {
await registration.dispose(instance);
}
}
this.instances.clear();
}
}
private getRegistration(name: string): ServiceRegistration | undefined {
return this.registrations.get(name) || this.parent?.getRegistration(name);
}
}
// Enhanced service container factory with infrastructure services
export function createServiceContainer(
serviceName: string,
connectionFactory: ConnectionFactory,
config?: any
): ServiceContainer {
const container = new ServiceContainer(serviceName);
// Register configuration if provided
if (config) {
container.register({
name: 'config',
factory: () => config,
singleton: true,
});
}
// Register connection factories
container.register({
name: 'mongodb',
factory: async () => {
const pool = await connectionFactory.createMongoDB({
name: 'default',
config: {} as any, // Config injected by factory
});
return pool.client;
},
singleton: true,
});
container.register({
name: 'postgres',
factory: async () => {
const pool = await connectionFactory.createPostgreSQL({
name: 'default',
config: {} as any, // Config injected by factory
});
return pool.client;
},
singleton: true,
});
container.register({
name: 'cache',
factory: () => {
const pool = connectionFactory.createCache({
name: 'default',
config: {} as any, // Config injected by factory
});
return pool.client;
},
singleton: true,
});
container.register({
name: 'queue',
factory: () => {
const pool = connectionFactory.createQueue({
name: 'default',
config: {} as any, // Config injected by factory
});
return pool.client;
},
singleton: true,
});
// Register ProxyManager
container.register({
name: 'proxyManager',
factory: async () => {
const { ProxyManager } = await import('@stock-bot/utils');
await ProxyManager.initialize();
return ProxyManager.getInstance();
},
singleton: true,
dispose: async (proxyManager) => {
// ProxyManager handles its own cleanup
if (proxyManager && typeof proxyManager.shutdown === 'function') {
await proxyManager.shutdown();
}
}
});
// Register Browser service
container.register({
name: 'browser',
factory: async () => {
const { Browser } = await import('@stock-bot/browser');
return Browser;
},
singleton: true,
dispose: async (browser) => {
if (browser && typeof browser.close === 'function') {
await browser.close();
}
}
});
// Register HttpClient with default configuration
container.register({
name: 'httpClient',
factory: async () => {
const { createHttpClient } = await import('@stock-bot/http');
return createHttpClient({
timeout: 30000,
retries: 3,
userAgent: 'stock-bot/1.0',
});
},
singleton: true,
});
return container;
}

View file

@ -0,0 +1,21 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"rootDir": "./",
"outDir": "./dist",
"composite": true,
"declaration": true,
"declarationMap": true,
"types": ["node", "bun-types"]
},
"include": ["./**/*.ts"],
"exclude": ["node_modules", "dist"],
"references": [
{ "path": "../config" },
{ "path": "../logger" },
{ "path": "../../data/mongodb" },
{ "path": "../../data/postgres" },
{ "path": "../../data/cache" },
{ "path": "../../services/queue" }
]
}

68
libs/core/di/types.ts Normal file
View file

@ -0,0 +1,68 @@
import type { MongoDBClientConfig } from '@stock-bot/mongodb-client';
import type { PostgreSQLClientConfig } from '@stock-bot/postgres-client';
import type { CacheOptions } from '@stock-bot/cache';
import type { QueueManagerConfig } from '@stock-bot/queue';
export interface ConnectionPoolConfig {
name: string;
poolSize?: number;
minConnections?: number;
maxConnections?: number;
idleTimeoutMillis?: number;
connectionTimeoutMillis?: number;
enableMetrics?: boolean;
}
export interface MongoDBPoolConfig extends ConnectionPoolConfig {
config: MongoDBClientConfig;
}
export interface PostgreSQLPoolConfig extends ConnectionPoolConfig {
config: PostgreSQLClientConfig;
}
export interface CachePoolConfig extends ConnectionPoolConfig {
config: CacheOptions;
}
export interface QueuePoolConfig extends ConnectionPoolConfig {
config: QueueManagerConfig;
}
export interface ConnectionFactoryConfig {
service: string;
environment: 'development' | 'production' | 'test';
pools?: {
mongodb?: Partial<MongoDBPoolConfig>;
postgres?: Partial<PostgreSQLPoolConfig>;
cache?: Partial<CachePoolConfig>;
queue?: Partial<QueuePoolConfig>;
};
}
export interface ConnectionPool<T> {
name: string;
client: T;
metrics: PoolMetrics;
health(): Promise<boolean>;
dispose(): Promise<void>;
}
export interface PoolMetrics {
created: Date;
totalConnections: number;
activeConnections: number;
idleConnections: number;
waitingRequests: number;
errors: number;
}
export interface ConnectionFactory {
createMongoDB(config: MongoDBPoolConfig): Promise<ConnectionPool<any>>;
createPostgreSQL(config: PostgreSQLPoolConfig): Promise<ConnectionPool<any>>;
createCache(config: CachePoolConfig): ConnectionPool<any>;
createQueue(config: QueuePoolConfig): ConnectionPool<any>;
getPool(type: 'mongodb' | 'postgres' | 'cache' | 'queue', name: string): ConnectionPool<any> | undefined;
listPools(): Array<{ type: string; name: string; metrics: PoolMetrics }>;
disposeAll(): Promise<void>;
}

View file

@ -0,0 +1,22 @@
{
"name": "@stock-bot/handlers",
"version": "1.0.0",
"description": "Universal handler system for queue and event-driven operations",
"main": "./src/index.ts",
"types": "./src/index.ts",
"scripts": {
"build": "tsc",
"clean": "rimraf dist",
"test": "bun test"
},
"dependencies": {
"@stock-bot/config": "workspace:*",
"@stock-bot/logger": "workspace:*",
"@stock-bot/di": "workspace:*"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"bun-types": "^1.2.15"
}
}

View file

@ -0,0 +1,69 @@
import type { ServiceContainer } from '@stock-bot/di';
import { getLogger } from '@stock-bot/logger';
import type { IHandler, ExecutionContext } from '../types/types';
/**
* Abstract base class for all handlers
* Provides common functionality and structure for queue/event operations
*/
export abstract class BaseHandler implements IHandler {
protected readonly logger;
constructor(protected readonly container: ServiceContainer) {
this.logger = getLogger(this.constructor.name);
}
/**
* Main execution method - must be implemented by subclasses
* Works with queue (events commented for future)
*/
abstract execute(operation: string, input: unknown, context: ExecutionContext): Promise<unknown>;
/**
* Queue helper methods
*/
protected async scheduleOperation(operation: string, payload: unknown, delay?: number): Promise<void> {
const queue = await this.container.resolveAsync('queue');
await queue.add(operation, payload, { delay });
}
/**
* Get a service from the container
*/
protected async getService<T>(serviceName: string): Promise<T> {
return await this.container.resolveAsync<T>(serviceName);
}
/**
* Event methods - commented for future
*/
// protected async publishEvent(eventName: string, payload: unknown): Promise<void> {
// const eventBus = await this.container.resolveAsync('eventBus');
// await eventBus.publish(eventName, payload);
// }
/**
* Lifecycle hooks - can be overridden by subclasses
*/
async onInit?(): Promise<void>;
async onStart?(): Promise<void>;
async onStop?(): Promise<void>;
async onDispose?(): Promise<void>;
}
/**
* Specialized handler for operations that have scheduled jobs
*/
export abstract class ScheduledHandler extends BaseHandler {
/**
* Get scheduled job configurations for this handler
* Override in subclasses to define schedules
*/
getScheduledJobs?(): Array<{
operation: string;
cronPattern: string;
priority?: number;
immediately?: boolean;
description?: string;
}>;
}

View file

@ -0,0 +1,86 @@
// Simple decorators for handler registration
// These are placeholders for now - can be enhanced with reflection later
/**
* Handler decorator - marks a class as a handler
* @param name Handler name for registration
*/
export function Handler(name: string) {
return function <T extends { new (...args: any[]): {} }>(constructor: T) {
// Store handler name on the constructor for future use
(constructor as any).__handlerName = name;
return constructor;
};
}
/**
* Operation decorator - marks a method as an operation
* @param name Operation name
*/
export function Operation(name: string) {
return function (target: any, propertyName: string, descriptor: PropertyDescriptor) {
// Store operation metadata for future use
if (!target.constructor.__operations) {
target.constructor.__operations = [];
}
target.constructor.__operations.push({
name,
method: propertyName,
});
return descriptor;
};
}
/**
* Queue schedule decorator - marks an operation as scheduled
* @param cronPattern Cron pattern for scheduling
* @param options Additional scheduling options
*/
export function QueueSchedule(
cronPattern: string,
options?: {
priority?: number;
immediately?: boolean;
description?: string;
}
) {
return function (target: any, propertyName: string, descriptor: PropertyDescriptor) {
// Store schedule metadata for future use
if (!target.constructor.__schedules) {
target.constructor.__schedules = [];
}
target.constructor.__schedules.push({
operation: propertyName,
cronPattern,
...options,
});
return descriptor;
};
}
// Future event decorators - commented for now
// export function EventListener(eventName: string) {
// return function (target: any, propertyName: string, descriptor: PropertyDescriptor) {
// if (!target.constructor.__eventListeners) {
// target.constructor.__eventListeners = [];
// }
// target.constructor.__eventListeners.push({
// eventName,
// method: propertyName,
// });
// return descriptor;
// };
// }
// export function EventPublisher(eventName: string) {
// return function (target: any, propertyName: string, descriptor: PropertyDescriptor) {
// if (!target.constructor.__eventPublishers) {
// target.constructor.__eventPublishers = [];
// }
// target.constructor.__eventPublishers.push({
// eventName,
// method: propertyName,
// });
// return descriptor;
// };
// }

View file

@ -0,0 +1,26 @@
// Base handler classes
export { BaseHandler, ScheduledHandler } from './base/BaseHandler';
// Handler registry
export { handlerRegistry } from './registry/HandlerRegistry';
// Types
export type {
ExecutionContext,
IHandler,
JobHandler,
ScheduledJob,
HandlerConfig,
HandlerConfigWithSchedule,
TypedJobHandler,
HandlerMetadata,
OperationMetadata,
} from './types/types';
export { createJobHandler } from './types/types';
// Decorators
export { Handler, Operation, QueueSchedule } from './decorators/decorators';
// Future exports - commented for now
// export { EventListener, EventPublisher } from './decorators/decorators';

View file

@ -0,0 +1,191 @@
import { getLogger } from '@stock-bot/logger';
import type { JobHandler, HandlerConfig, HandlerConfigWithSchedule, ScheduledJob } from '../types/types';
const logger = getLogger('handler-registry');
class HandlerRegistry {
private handlers = new Map<string, HandlerConfig>();
private handlerSchedules = new Map<string, ScheduledJob[]>();
/**
* Register a handler with its operations (simple config)
*/
register(handlerName: string, config: HandlerConfig): void {
logger.info(`Registering handler: ${handlerName}`, {
operations: Object.keys(config),
});
this.handlers.set(handlerName, config);
}
/**
* Register a handler with operations and scheduled jobs (full config)
*/
registerWithSchedule(config: HandlerConfigWithSchedule): void {
logger.info(`Registering handler with schedule: ${config.name}`, {
operations: Object.keys(config.operations),
scheduledJobs: config.scheduledJobs?.length || 0,
});
this.handlers.set(config.name, config.operations);
if (config.scheduledJobs && config.scheduledJobs.length > 0) {
this.handlerSchedules.set(config.name, config.scheduledJobs);
}
}
/**
* Get a handler for a specific handler and operation
*/
getHandler(handler: string, operation: string): JobHandler | null {
const handlerConfig = this.handlers.get(handler);
if (!handlerConfig) {
logger.warn(`Handler not found: ${handler}`);
return null;
}
const jobHandler = handlerConfig[operation];
if (!jobHandler) {
logger.warn(`Operation not found: ${handler}:${operation}`, {
availableOperations: Object.keys(handlerConfig),
});
return null;
}
return jobHandler;
}
/**
* Get all scheduled jobs from all handlers
*/
getAllScheduledJobs(): Array<{ handler: string; job: ScheduledJob }> {
const allJobs: Array<{ handler: string; job: ScheduledJob }> = [];
for (const [handlerName, jobs] of this.handlerSchedules) {
for (const job of jobs) {
allJobs.push({
handler: handlerName,
job,
});
}
}
return allJobs;
}
/**
* Get scheduled jobs for a specific handler
*/
getScheduledJobs(handler: string): ScheduledJob[] {
return this.handlerSchedules.get(handler) || [];
}
/**
* Check if a handler has scheduled jobs
*/
hasScheduledJobs(handler: string): boolean {
return this.handlerSchedules.has(handler);
}
/**
* Get all registered handlers with their configurations
*/
getHandlerConfigs(): Array<{ name: string; operations: string[]; scheduledJobs: number }> {
return Array.from(this.handlers.keys()).map(name => ({
name,
operations: Object.keys(this.handlers.get(name) || {}),
scheduledJobs: this.handlerSchedules.get(name)?.length || 0,
}));
}
/**
* Get all handlers with their full configurations for queue manager registration
*/
getAllHandlers(): Map<string, { operations: HandlerConfig; scheduledJobs?: ScheduledJob[] }> {
const result = new Map<
string,
{ operations: HandlerConfig; scheduledJobs?: ScheduledJob[] }
>();
for (const [name, operations] of this.handlers) {
const scheduledJobs = this.handlerSchedules.get(name);
result.set(name, {
operations,
scheduledJobs,
});
}
return result;
}
/**
* Get all registered handlers
*/
getHandlers(): string[] {
return Array.from(this.handlers.keys());
}
/**
* Get operations for a specific handler
*/
getOperations(handler: string): string[] {
const handlerConfig = this.handlers.get(handler);
return handlerConfig ? Object.keys(handlerConfig) : [];
}
/**
* Check if a handler exists
*/
hasHandler(handler: string): boolean {
return this.handlers.has(handler);
}
/**
* Check if a handler has a specific operation
*/
hasOperation(handler: string, operation: string): boolean {
const handlerConfig = this.handlers.get(handler);
return handlerConfig ? operation in handlerConfig : false;
}
/**
* Remove a handler
*/
unregister(handler: string): boolean {
this.handlerSchedules.delete(handler);
return this.handlers.delete(handler);
}
/**
* Clear all handlers
*/
clear(): void {
this.handlers.clear();
this.handlerSchedules.clear();
}
/**
* Get registry statistics
*/
getStats(): { handlers: number; totalOperations: number; totalScheduledJobs: number } {
let totalOperations = 0;
let totalScheduledJobs = 0;
for (const config of this.handlers.values()) {
totalOperations += Object.keys(config).length;
}
for (const jobs of this.handlerSchedules.values()) {
totalScheduledJobs += jobs.length;
}
return {
handlers: this.handlers.size,
totalOperations,
totalScheduledJobs,
};
}
}
// Export singleton instance
export const handlerRegistry = new HandlerRegistry();

View file

@ -0,0 +1,73 @@
import type { ServiceContainer } from '@stock-bot/di';
// Simple execution context - mostly queue for now
export interface ExecutionContext {
type: 'queue'; // | 'event' - commented for future
serviceContainer: ServiceContainer;
metadata: {
source?: string;
jobId?: string;
attempts?: number;
timestamp: number;
[key: string]: unknown;
};
}
// Simple handler interface
export interface IHandler {
execute(operation: string, input: unknown, context: ExecutionContext): Promise<unknown>;
}
// Job handler type for queue operations
export interface JobHandler<TPayload = unknown, TResult = unknown> {
(payload: TPayload): Promise<TResult>;
}
// Scheduled job configuration
export interface ScheduledJob<T = unknown> {
type: string;
operation: string;
payload?: T;
cronPattern: string;
priority?: number;
description?: string;
immediately?: boolean;
delay?: number;
}
// Handler configuration
export interface HandlerConfig {
[operation: string]: JobHandler;
}
// Handler configuration with schedule
export interface HandlerConfigWithSchedule {
name: string;
operations: Record<string, JobHandler>;
scheduledJobs?: ScheduledJob[];
}
// Type-safe wrapper for creating job handlers
export type TypedJobHandler<TPayload, TResult = unknown> = (payload: TPayload) => Promise<TResult>;
// Helper to create type-safe job handlers
export function createJobHandler<TPayload = unknown, TResult = unknown>(
handler: TypedJobHandler<TPayload, TResult>
): JobHandler<unknown, TResult> {
return async (payload: unknown): Promise<TResult> => {
return handler(payload as TPayload);
};
}
// Handler metadata for decorators (future)
export interface HandlerMetadata {
name: string;
operations: OperationMetadata[];
}
export interface OperationMetadata {
name: string;
schedules?: string[];
// eventListeners?: string[]; // Future
// eventPublishers?: string[]; // Future
}

View file

@ -0,0 +1,14 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src",
"composite": true
},
"include": ["src/**/*"],
"references": [
{ "path": "../config" },
{ "path": "../logger" },
{ "path": "../di" }
]
}

337
libs/core/logger/README.md Normal file
View file

@ -0,0 +1,337 @@
# @stock-bot/logger
Enhanced logging library with Loki integration for the Stock Bot platform (June 2025).
## Features
- 🎯 **Multiple Log Levels**: debug, info, warn, error, http
- 🌐 **Loki Integration**: Centralized logging with Grafana visualization
- 📁 **File Logging**: Daily rotating log files with compression
- 🎨 **Console Logging**: Colored, formatted console output
- 📊 **Structured Logging**: JSON-formatted logs with metadata
- ⚡ **Performance Optimized**: Batching and async logging
- 🔐 **Security**: Automatic sensitive data masking
- 🎭 **Express Middleware**: Request/response logging
- 📈 **Business Events**: Specialized logging for trading operations
## Installation
```bash
# Using Bun (current runtime)
bun install
```
## Basic Usage
### Simple Logging
```typescript
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('my-service');
logger.info('Service started');
logger.warn('This is a warning');
logger.error('An error occurred', new Error('Something went wrong'));
```
### With Context
```typescript
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('trading-service');
logger.info('Trade executed', {
symbol: 'AAPL',
quantity: 100,
price: 150.25,
userId: '12345',
sessionId: 'abc-def-ghi'
});
```
### Performance Logging
```typescript
import { getLogger, createTimer } from '@stock-bot/logger';
const logger = getLogger('data-processor');
const timer = createTimer('data-processing');
// ... do some work ...
const timing = timer.end();
logger.performance('Data processing completed', timing);
```
### Business Events
```typescript
import { getLogger, createBusinessEvent } from '@stock-bot/logger';
const logger = getLogger('order-service');
logger.business('Order placed', createBusinessEvent(
'order',
'place',
{
entity: 'order-123',
result: 'success',
symbol: 'TSLA',
amount: 50000
}
));
```
### Security Events
```typescript
import { getLogger, createSecurityEvent } from '@stock-bot/logger';
const logger = getLogger('auth-service');
logger.security('Failed login attempt', createSecurityEvent(
'authentication',
{
user: 'john@example.com',
result: 'failure',
ip: '192.168.1.100',
severity: 'medium'
}
));
```
## Express Middleware
### Basic Request Logging
```typescript
import express from 'express';
import { loggingMiddleware } from '@stock-bot/logger';
const app = express();
app.use(loggingMiddleware({
serviceName: 'api-gateway',
skipPaths: ['/health', '/metrics']
}));
```
### Error Logging
```typescript
import { errorLoggingMiddleware, getLogger } from '@stock-bot/logger';
const logger = getLogger('api-gateway');
// Add after your routes but before error handlers
app.use(errorLoggingMiddleware(logger));
```
### Request-scoped Logger
```typescript
import { createRequestLogger, getLogger } from '@stock-bot/logger';
const baseLogger = getLogger('api-gateway');
app.use((req, res, next) => {
req.logger = createRequestLogger(req, baseLogger);
next();
});
app.get('/api/data', (req, res) => {
req.logger.info('Processing data request');
// ... handle request ...
});
```
## Configuration
The logger uses configuration from `@stock-bot/config`. Key environment variables:
```bash
# Logging
LOG_LEVEL=info
LOG_CONSOLE=true
LOG_FILE=true
LOG_FILE_PATH=./logs
# Loki
LOKI_HOST=localhost
LOKI_PORT=3100
LOKI_BATCH_SIZE=1024
```
## Advanced Usage
### Child Loggers
```typescript
import { getLogger } from '@stock-bot/logger';
const parentLogger = getLogger('trading-service');
const orderLogger = parentLogger.child({
module: 'order-processing',
orderId: '12345'
});
orderLogger.info('Order validated'); // Will include parent context
```
### Custom Configuration
```typescript
import { getLogger } from '@stock-bot/logger';
// Uses standard getLogger with service-specific configuration
const logger = getLogger('custom-service');
```
### Sensitive Data Masking
```typescript
import { sanitizeMetadata, maskSensitiveData } from '@stock-bot/logger';
const unsafeData = {
username: 'john',
password: 'secret123',
apiKey: 'abc123def456'
};
const safeData = sanitizeMetadata(unsafeData);
// { username: 'john', password: '[REDACTED]', apiKey: '[REDACTED]' }
const message = maskSensitiveData('User API key: abc123def456');
// 'User API key: [API_KEY]'
```
### Log Throttling
```typescript
import { LogThrottle } from '@stock-bot/logger';
const throttle = new LogThrottle(10, 60000); // 10 logs per minute
if (throttle.shouldLog('error-key')) {
logger.error('This error will be throttled');
}
```
## Viewing Logs
### Grafana Dashboard
1. Start the monitoring stack: `docker-compose up grafana loki`
2. Open Grafana at http://localhost:3000
3. Use the "Stock Bot Logs" dashboard
4. Query logs with LogQL: `{service="your-service"}`
### Log Files
When file logging is enabled, logs are written to:
- `./logs/{service-name}-YYYY-MM-DD.log` - All logs
- `./logs/{service-name}-error-YYYY-MM-DD.log` - Error logs only
## Best Practices
1. **Use appropriate log levels**:
- `debug`: Detailed development information
- `info`: General operational messages
- `warn`: Potential issues
- `error`: Actual errors requiring attention
2. **Include context**: Always provide relevant metadata
```typescript
logger.info('Trade executed', { symbol, quantity, price, orderId });
```
3. **Use structured logging**: Avoid string concatenation
```typescript
// Good
logger.info('User logged in', { userId, ip, userAgent });
// Avoid
logger.info(`User ${userId} logged in from ${ip}`);
```
4. **Handle sensitive data**: Use sanitization utilities
```typescript
const safeMetadata = sanitizeMetadata(requestData);
logger.info('API request', safeMetadata);
```
5. **Use correlation IDs**: Track requests across services
```typescript
const logger = getLogger('service').child({
correlationId: req.headers['x-correlation-id']
});
```
## Integration with Services
To use in your service:
1. Add dependency to your service's `package.json`:
```json
{
"dependencies": {
"@stock-bot/logger": "*"
}
}
```
2. Update your service's `tsconfig.json` references:
```json
{
"references": [
{ "path": "../../../libs/logger" }
]
}
```
3. Import and use:
```typescript
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('my-service');
```
## Performance Considerations
- Logs are batched and sent asynchronously to Loki
- File logging uses daily rotation to prevent large files
- Console logging can be disabled in production
- Use log throttling for high-frequency events
- Sensitive data is automatically masked
## Troubleshooting
### Logs not appearing in Loki
1. Check Loki connection:
```bash
curl http://localhost:3100/ready
```
2. Verify environment variables:
```bash
echo $LOKI_HOST $LOKI_PORT
```
3. Check container logs:
```bash
docker logs stock-bot-loki
```
### High memory usage
- Reduce `LOKI_BATCH_SIZE` if batching too many logs
- Disable file logging if not needed
### Missing logs
- Check log level configuration
- Verify service name matches expectations
- Ensure proper error handling around logger calls

View file

@ -0,0 +1,18 @@
# Logger library Bun configuration
[test]
# Configure coverage and test behavior
coverage = true
timeout = "30s"
# Configure test environment
preload = ["./test/setup.ts"]
# Environment variables for tests
[test.env]
NODE_ENV = "test"
LOG_LEVEL = "silent"
LOG_CONSOLE = "false"
LOG_FILE = "false"
LOKI_HOST = ""
LOKI_URL = ""

View file

@ -0,0 +1,35 @@
{
"name": "@stock-bot/logger",
"version": "1.0.0",
"description": "Enhanced logging library with Loki integration for stock-bot services",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"clean": "rimraf dist",
"test": "bun test"
},
"dependencies": {
"got": "^14.4.7",
"pino": "^9.7.0",
"pino-loki": "^2.6.0",
"pino-pretty": "^13.0.0"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"bun-types": "^1.2.15"
},
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
]
}

View file

@ -0,0 +1,14 @@
/**
* @stock-bot/logger - Simplified logging library
*
* Main exports for the logger library
*/
// Core logger classes and functions
export { Logger, getLogger, shutdownLoggers, setLoggerConfig } from './logger';
// Type definitions
export type { LogLevel, LogContext, LogMetadata, LoggerConfig } from './types';
// Default export
export { getLogger as default } from './logger';

View file

@ -0,0 +1,402 @@
/**
* Simplified Pino-based logger for Stock Bot platform
*
* Features:
* - High performance JSON logging with Pino
* - Console, file, and Loki transports
* - Structured logging with metadata
* - Service-specific context
*/
import pino from 'pino';
import pretty from 'pino-pretty';
import type { LogContext, LoggerConfig, LogLevel, LogMetadata } from './types';
// Simple cache for logger instances
const loggerCache = new Map<string, pino.Logger>();
// Global config that can be set
let globalConfig: LoggerConfig = {
logLevel: 'info', // Default to info, but trace and fatal are supported
logConsole: true,
logFile: false,
logFilePath: './logs',
logLoki: false,
environment: 'development',
hideObject: false,
};
// Log level priorities for comparison
const LOG_LEVELS: Record<LogLevel, number> = {
trace: 10,
debug: 20,
info: 30,
warn: 40,
error: 50,
fatal: 60,
};
/**
* Set global logger configuration
*/
export function setLoggerConfig(config: LoggerConfig): void {
globalConfig = { ...globalConfig, ...config };
// Clear cache to force recreation with new config
loggerCache.clear();
}
/**
* Create logger destination using multistream approach:
* - Console: In-process pretty stream (fast shutdown, disabled in production)
* - File/Loki: Worker transports (default timeout, ok to wait)
*/
function createDestination(
serviceName: string,
config: LoggerConfig = globalConfig
): pino.DestinationStream | null {
const streams: pino.StreamEntry[] = [];
// Console: In-process pretty stream for dev (fast shutdown)
if (config.logConsole && config.environment !== 'production') {
const prettyStream = pretty({
sync: true, // IMPORTANT: Make async to prevent blocking the event loop
colorize: true,
translateTime: 'yyyy-mm-dd HH:MM:ss.l',
messageFormat: '[{service}{childName}] {msg}',
singleLine: false, // This was causing logs to be on one line
hideObject: false, // Hide metadata objects
ignore: 'pid,hostname,service,environment,version,childName',
errorLikeObjectKeys: ['err', 'error'],
errorProps: 'message,stack,name,code',
});
streams.push({ stream: prettyStream });
}
// File: Worker transport (has timeout but acceptable)
if (config.logFile) {
streams.push(
pino.transport({
target: 'pino/file',
level: config.logLevel || 'info',
options: {
destination: `${config.logFilePath}/${serviceName}.log`,
mkdir: true,
},
})
);
}
// Loki: Worker transport (has timeout but acceptable)
if (config.logLoki && config.lokiHost) {
streams.push(
pino.transport({
target: 'pino-loki',
level: config.logLevel || 'info',
options: {
host: config.lokiHost,
labels: {
service: serviceName,
environment: config.environment || 'development',
},
ignore: 'childName',
...(config.lokiUser && config.lokiPassword
? {
basicAuth: {
username: config.lokiUser,
password: config.lokiPassword,
},
}
: {}),
},
})
);
}
return streams.length > 0 ? pino.multistream(streams) : null;
}
/**
* Get or create pino logger
*/
function getPinoLogger(serviceName: string, config: LoggerConfig = globalConfig): pino.Logger {
const cacheKey = `${serviceName}-${JSON.stringify(config)}`;
if (!loggerCache.has(cacheKey)) {
const destination = createDestination(serviceName, config);
const loggerOptions: pino.LoggerOptions = {
level: config.logLevel || 'info',
base: {
service: serviceName,
environment: config.environment || 'development',
version: '1.0.0',
},
};
const logger = destination ? pino(loggerOptions, destination) : pino(loggerOptions);
loggerCache.set(cacheKey, logger);
}
const logger = loggerCache.get(cacheKey);
if (!logger) {
throw new Error(`Expected logger ${cacheKey} to exist in cache`);
}
return logger;
}
/**
* Simplified Logger class
*/
export class Logger {
private pino: pino.Logger;
private context: LogContext;
private serviceName: string;
private childName?: string;
constructor(serviceName: string, context: LogContext = {}, config?: LoggerConfig) {
this.pino = getPinoLogger(serviceName, config);
this.context = context;
this.serviceName = serviceName;
}
/**
* Check if a log level should be output based on global config
*/
private shouldLog(level: LogLevel): boolean {
const currentLevel = globalConfig.logLevel || 'info';
return LOG_LEVELS[level] >= LOG_LEVELS[currentLevel];
}
/**
* Core log method
*/
private log(level: LogLevel, message: string | object, metadata?: LogMetadata): void {
// Skip if level is below current threshold
if (!this.shouldLog(level)) {
return;
}
let data = { ...this.context, ...metadata };
// Hide all metadata if hideObject is enabled
if (globalConfig.hideObject) {
data = {}; // Clear all metadata
}
if (typeof message === 'string') {
(this.pino as any)[level](data, message);
} else {
if (globalConfig.hideObject) {
(this.pino as any)[level]({}, `Object logged (hidden)`);
} else {
(this.pino as any)[level]({ ...data, data: message }, 'Object logged');
}
}
}
// Simple log level methods
trace(message: string | object, metadata?: LogMetadata): void {
this.log('trace', message, metadata);
}
debug(message: string | object, metadata?: LogMetadata): void {
this.log('debug', message, metadata);
}
info(message: string | object, metadata?: LogMetadata): void {
this.log('info', message, metadata);
}
warn(message: string | object, metadata?: LogMetadata): void {
this.log('warn', message, metadata);
}
error(message: string | object, metadata?: (LogMetadata & { error?: any }) | unknown): void {
let data: any = {};
// Handle metadata parameter normalization
if (metadata instanceof Error) {
// Direct Error object as metadata
data = { error: metadata };
} else if (metadata !== null && typeof metadata === 'object') {
// Object metadata (including arrays, but not null)
data = { ...metadata };
} else if (metadata !== undefined) {
// Primitive values (string, number, boolean, etc.)
data = { metadata };
}
// Handle multiple error properties in metadata
const errorKeys = ['error', 'err', 'primaryError', 'secondaryError'];
errorKeys.forEach(key => {
if (data[key]) {
const normalizedKey = key === 'error' ? 'err' : `${key}_normalized`;
data[normalizedKey] = this.normalizeError(data[key]);
// Only delete the original 'error' key to maintain other error properties
if (key === 'error') {
delete data.error;
}
}
});
this.log('error', message, data);
}
fatal(message: string | object, metadata?: (LogMetadata & { error?: any }) | unknown): void {
let data: any = {};
// Handle metadata parameter normalization (same as error)
if (metadata instanceof Error) {
data = { error: metadata };
} else if (metadata !== null && typeof metadata === 'object') {
data = { ...metadata };
} else if (metadata !== undefined) {
data = { metadata };
}
// Normalize error objects in the data
const errorKeys = ['error', 'err', 'primaryError', 'secondaryError'];
errorKeys.forEach(key => {
if (data[key]) {
const normalizedKey = key === 'error' ? 'err' : `${key}_normalized`;
data[normalizedKey] = this.normalizeError(data[key]);
if (key === 'error') {
delete data.error;
}
}
});
this.log('fatal', message, data);
}
/**
* Normalize any error type to a structured format
*/
private normalizeError(error: any): any {
if (error instanceof Error) {
return {
name: error.name,
message: error.message,
stack: error.stack,
};
}
if (error && typeof error === 'object') {
// Handle error-like objects
return {
name: error.name || 'UnknownError',
message: error.message || error.toString(),
...(error.stack && { stack: error.stack }),
...(error.code && { code: error.code }),
...(error.status && { status: error.status }),
};
}
// Handle primitives (string, number, etc.)
return {
name: 'UnknownError',
message: String(error),
};
}
/**
* Create child logger with additional context
*/
child(serviceName: string, context?: LogContext): Logger {
// Create child logger that shares the same pino instance with additional context
const childLogger = Object.create(Logger.prototype);
childLogger.serviceName = this.serviceName;
childLogger.childName = serviceName;
childLogger.context = { ...this.context, ...context };
const childBindings = {
service: this.serviceName,
childName: ' -> ' + serviceName,
...(context || childLogger.context),
};
childLogger.pino = this.pino.child(childBindings);
return childLogger;
// }
// childLogger.pino = this.pino.child(context || childLogger.context); // Let pino handle level inheritance naturally
// return childLogger;
}
// Getters for service and context
getServiceName(): string {
return this.serviceName;
}
getChildName(): string | undefined {
return this.childName;
}
}
/**
* Main factory function
*/
export function getLogger(
serviceName: string,
context?: LogContext,
config?: LoggerConfig
): Logger {
return new Logger(serviceName, context, config);
}
/**
* Gracefully shutdown all logger instances
* This ensures all transports are flushed and closed properly
*/
export async function shutdownLoggers(): Promise<void> {
try {
// Log final message before shutdown
for (const logger of loggerCache.values()) {
logger.info('Logger shutting down...');
}
const flushPromises = Array.from(loggerCache.values()).map(logger => logger.flush());
await Promise.all(flushPromises);
// Give transports time to finish writing
// This is especially important for file and network transports
await new Promise(resolve => setTimeout(resolve, 100));
} catch (error) {
// eslint-disable-next-line no-console
console.error('Logger shutdown failed:', error);
} finally {
loggerCache.clear();
}
}
/**
* Graceful shutdown - flush all logger transports quickly
* Use this in your application shutdown handlers
*/
export async function gracefulShutdown(): Promise<void> {
const flushPromises: Promise<void>[] = [];
for (const logger of loggerCache.values()) {
// Use pino v9's flush() method - this is much faster than the complex shutdown
flushPromises.push(
new Promise<void>((resolve, reject) => {
logger.flush((err?: Error) => {
if (err) {
reject(err);
} else {
resolve();
}
});
})
);
}
try {
await Promise.all(flushPromises);
} catch (error) {
// eslint-disable-next-line no-console
console.error('Logger graceful shutdown failed:', error);
}
}
// Export types for convenience
export type { LogContext, LogLevel, LogMetadata } from './types';

View file

@ -0,0 +1,30 @@
/**
* Simplified type definitions for the logger library
*/
// Standard log levels (simplified to pino defaults)
export type LogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error' | 'fatal';
// Context that persists across log calls
export interface LogContext {
[key: string]: any;
}
// Metadata for individual log entries
export interface LogMetadata {
[key: string]: any;
}
// Logger configuration
export interface LoggerConfig {
logLevel?: LogLevel;
logConsole?: boolean;
logFile?: boolean;
logFilePath?: string;
logLoki?: boolean;
lokiHost?: string;
lokiUser?: string;
lokiPassword?: string;
environment?: string;
hideObject?: boolean;
}

View file

@ -0,0 +1,201 @@
/**
* Advanced Logger Tests
*
* Tests for advanced logger functionality including complex metadata handling,
* child loggers, and advanced error scenarios.
*/
import { afterEach, beforeEach, describe, expect, it } from 'bun:test';
import { Logger, shutdownLoggers } from '../src';
import { loggerTestHelpers } from './setup';
describe('Advanced Logger Features', () => {
let logger: Logger;
let testLoggerInstance: ReturnType<typeof loggerTestHelpers.createTestLogger>;
beforeEach(() => {
testLoggerInstance = loggerTestHelpers.createTestLogger('advanced-features');
logger = testLoggerInstance.logger;
});
afterEach(async () => {
testLoggerInstance.clearCapturedLogs();
// Clear any global logger cache
await shutdownLoggers();
});
describe('Complex Metadata Handling', () => {
it('should handle nested metadata objects', () => {
const complexMetadata = {
user: { id: '123', name: 'John Doe' },
session: { id: 'sess-456', timeout: 3600 },
request: { method: 'POST', path: '/api/test' },
};
logger.info('Complex operation', complexMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].user).toEqual({ id: '123', name: 'John Doe' });
expect(logs[0].session).toEqual({ id: 'sess-456', timeout: 3600 });
expect(logs[0].request).toEqual({ method: 'POST', path: '/api/test' });
});
it('should handle arrays in metadata', () => {
const arrayMetadata = {
tags: ['user', 'authentication', 'success'],
ids: [1, 2, 3, 4],
};
logger.info('Array metadata test', arrayMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].tags).toEqual(['user', 'authentication', 'success']);
expect(logs[0].ids).toEqual([1, 2, 3, 4]);
});
it('should handle null and undefined metadata values', () => {
const nullMetadata = {
nullValue: null,
undefinedValue: undefined,
emptyString: '',
zeroValue: 0,
};
logger.info('Null metadata test', nullMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].nullValue).toBe(null);
expect(logs[0].emptyString).toBe('');
expect(logs[0].zeroValue).toBe(0);
});
});
describe('Child Logger Functionality', () => {
it('should create child logger with additional context', () => {
const childLogger = logger.child({
component: 'auth-service',
version: '1.2.3',
});
childLogger.info('Child logger message');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].component).toBe('auth-service');
expect(logs[0].version).toBe('1.2.3');
expect(logs[0].msg).toBe('Child logger message');
});
it('should support nested child loggers', () => {
const childLogger = logger.child({ level1: 'parent' });
const grandChildLogger = childLogger.child({ level2: 'child' });
grandChildLogger.warn('Nested child message');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level1).toBe('parent');
expect(logs[0].level2).toBe('child');
expect(logs[0].level).toBe('warn');
});
it('should merge child context with log metadata', () => {
const childLogger = logger.child({ service: 'api' });
childLogger.info('Request processed', {
requestId: 'req-789',
duration: 150,
});
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].service).toBe('api');
expect(logs[0].requestId).toBe('req-789');
expect(logs[0].duration).toBe(150);
});
});
describe('Advanced Error Handling', () => {
it('should handle Error objects with custom properties', () => {
const customError = new Error('Custom error message');
(customError as any).code = 'ERR_CUSTOM';
(customError as any).statusCode = 500;
logger.error('Custom error occurred', { error: customError });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Custom error occurred');
});
it('should handle multiple errors in metadata', () => {
const error1 = new Error('First error');
const error2 = new Error('Second error');
logger.error('Multiple errors', {
primaryError: error1,
secondaryError: error2,
context: 'batch processing',
});
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].context).toBe('batch processing');
});
it('should handle error objects with circular references', () => {
const errorWithCircular: any = { name: 'CircularError', message: 'Circular reference error' };
// Create a simple circular reference
errorWithCircular.self = errorWithCircular;
// Should not throw when logging circular references
expect(() => {
logger.error('Circular error test', { error: errorWithCircular });
}).not.toThrow();
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
// Clean up circular reference to prevent memory issues
delete errorWithCircular.self;
});
});
describe('Performance and Edge Cases', () => {
it('should handle moderate metadata objects', () => {
const moderateMetadata: any = {};
for (let i = 0; i < 10; i++) {
moderateMetadata[`key${i}`] = `value${i}`;
}
logger.debug('Moderate metadata test', moderateMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].key0).toBe('value0');
expect(logs[0].key9).toBe('value9');
});
it('should handle special characters in messages', () => {
const specialMessage = 'Special chars: 🚀 ñ ü';
logger.info(specialMessage);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].msg).toBe(specialMessage);
});
it('should handle empty and whitespace-only messages', () => {
logger.info('');
logger.info(' ');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(2);
expect(logs[0].msg).toBe('');
expect(logs[1].msg).toBe(' ');
});
});
});

View file

@ -0,0 +1,169 @@
/**
* Basic Logger Tests
*
* Tests for the core logger functionality and utilities.
*/
import { afterEach, beforeEach, describe, expect, it } from 'bun:test';
import { getLogger, Logger, shutdownLoggers } from '../src';
import { loggerTestHelpers } from './setup';
describe('Basic Logger Tests', () => {
let logger: Logger;
let testLoggerInstance: ReturnType<typeof loggerTestHelpers.createTestLogger>;
beforeEach(() => {
testLoggerInstance = loggerTestHelpers.createTestLogger('utils-test');
logger = testLoggerInstance.logger;
});
afterEach(async () => {
testLoggerInstance.clearCapturedLogs();
// Clear any global logger cache
await shutdownLoggers();
});
describe('Logger Factory Functions', () => {
it('should create logger with getLogger', () => {
expect(typeof getLogger).toBe('function');
// Test that getLogger doesn't throw
expect(() => {
const anotherTestLoggerInstance = loggerTestHelpers.createTestLogger('factory-test');
anotherTestLoggerInstance.logger.info('Factory test');
}).not.toThrow();
});
});
describe('Logger Methods', () => {
it('should have all required logging methods', () => {
expect(typeof logger.debug).toBe('function');
expect(typeof logger.info).toBe('function');
expect(typeof logger.warn).toBe('function');
expect(typeof logger.error).toBe('function');
expect(typeof logger.child).toBe('function');
});
it('should log with different message types', () => {
// String message
logger.info('String message');
// Object message
logger.info({ event: 'object_message', data: 'test' });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(2);
expect(logs[0].msg).toBe('String message');
expect(logs[1].level).toBe('info');
});
it('should handle metadata correctly', () => {
const metadata = {
userId: 'user123',
sessionId: 'session456',
requestId: 'req789',
};
logger.info('Request processed', metadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].userId).toBe('user123');
expect(logs[0].sessionId).toBe('session456');
expect(logs[0].requestId).toBe('req789');
});
});
describe('Child Logger Functionality', () => {
it('should create child loggers with additional context', () => {
const childLogger = logger.child({
module: 'payment',
version: '1.0.0',
});
childLogger.info('Payment processed');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].msg).toBe('Payment processed');
});
it('should inherit service name in child loggers', () => {
const childLogger = logger.child({ operation: 'test' });
childLogger.info('Child operation');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].service).toBe('utils-test');
});
});
describe('Error Normalization', () => {
it('should handle Error objects', () => {
const error = new Error('Test error');
error.stack = 'Error stack trace';
logger.error('Error test', error);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
});
it('should handle error-like objects', () => {
const errorLike = {
name: 'ValidationError',
message: 'Invalid input',
code: 'VALIDATION_FAILED',
};
logger.error('Validation failed', { error: errorLike });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
});
it('should handle primitive error values', () => {
logger.error('Simple error', { error: 'Error string' });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
});
});
describe('Service Context', () => {
it('should include service name in all logs', () => {
logger.debug('Debug message');
logger.info('Info message');
logger.warn('Warn message');
logger.error('Error message');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(4);
logs.forEach(log => {
expect(log.service).toBe('utils-test');
});
});
it('should support different service names', () => {
const logger1Instance = loggerTestHelpers.createTestLogger('service-one');
const logger2Instance = loggerTestHelpers.createTestLogger('service-two');
logger1Instance.logger.info('Message from service one');
logger2Instance.logger.info('Message from service two');
// Since each logger instance has its own capture, we check them separately
// or combine them if that's the desired test logic.
// For this test, it seems we want to ensure they are separate.
const logs1 = logger1Instance.getCapturedLogs();
expect(logs1.length).toBe(1);
expect(logs1[0].service).toBe('service-one');
const logs2 = logger2Instance.getCapturedLogs();
expect(logs2.length).toBe(1);
expect(logs2[0].service).toBe('service-two');
});
});
});

View file

@ -0,0 +1,188 @@
/**
* Logger Integration Tests
*
* Tests the core functionality of the simplified @stock-bot/logger package.
*/
import { afterEach, beforeEach, describe, expect, it } from 'bun:test';
import { getLogger, Logger, shutdownLoggers } from '../src';
import { loggerTestHelpers } from './setup';
describe('Logger Integration Tests', () => {
let logger: Logger;
let testLoggerInstance: ReturnType<typeof loggerTestHelpers.createTestLogger>;
beforeEach(() => {
testLoggerInstance = loggerTestHelpers.createTestLogger('integration-test');
logger = testLoggerInstance.logger;
});
afterEach(async () => {
testLoggerInstance.clearCapturedLogs();
// Clear any global logger cache
await shutdownLoggers();
});
describe('Core Logger Functionality', () => {
it('should log messages at different levels', () => {
// Test multiple log levels
logger.debug('Debug message');
logger.info('Info message');
logger.warn('Warning message');
logger.error('Error message');
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify logs were captured
expect(logs.length).toBe(4);
expect(logs[0].level).toBe('debug');
expect(logs[0].msg).toBe('Debug message');
expect(logs[1].level).toBe('info');
expect(logs[1].msg).toBe('Info message');
expect(logs[2].level).toBe('warn');
expect(logs[2].msg).toBe('Warning message');
expect(logs[3].level).toBe('error');
expect(logs[3].msg).toBe('Error message');
});
it('should log objects as structured logs', () => {
// Log an object
logger.info('User logged in', { userId: '123', action: 'login' });
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify structured log
expect(logs.length).toBe(1);
expect(logs[0].userId).toBe('123');
expect(logs[0].action).toBe('login');
expect(logs[0].msg).toBe('User logged in');
});
it('should handle error objects in error logs', () => {
const testError = new Error('Test error message');
// Log error with error object
logger.error('Something went wrong', { error: testError });
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify error was logged
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Something went wrong');
});
it('should create child loggers with additional context', () => {
// Create a child logger with additional context
const childLogger = logger.child({
transactionId: 'tx-789',
operation: 'payment',
});
// Log with child logger
childLogger.info('Child logger test');
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify child logger logged something
expect(logs.length).toBe(1);
expect(logs[0].msg).toBe('Child logger test');
});
});
describe('Factory Functions', () => {
it('should export factory functions', () => {
// Verify that the factory functions are exported and callable
expect(typeof getLogger).toBe('function');
});
it('should create different logger instances', () => {
const logger1Instance = loggerTestHelpers.createTestLogger('service-1');
const logger2Instance = loggerTestHelpers.createTestLogger('service-2');
logger1Instance.logger.info('Message from service 1');
logger2Instance.logger.info('Message from service 2');
const logs1 = logger1Instance.getCapturedLogs();
expect(logs1.length).toBe(1);
expect(logs1[0].service).toBe('service-1');
const logs2 = logger2Instance.getCapturedLogs();
expect(logs2.length).toBe(1);
expect(logs2[0].service).toBe('service-2');
});
});
describe('Error Handling', () => {
it('should normalize Error objects', () => {
const error = new Error('Test error');
error.stack = 'Error stack trace';
logger.error('Error occurred', error);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Error occurred');
});
it('should handle error-like objects', () => {
const errorLike = {
name: 'CustomError',
message: 'Custom error message',
code: 'ERR_CUSTOM',
};
logger.error('Custom error occurred', { error: errorLike });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Custom error occurred');
});
it('should handle primitive error values', () => {
logger.error('String error occurred', { error: 'Simple string error' });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('String error occurred');
});
});
describe('Metadata Handling', () => {
it('should include metadata in logs', () => {
const metadata = {
requestId: 'req-123',
userId: 'user-456',
operation: 'data-fetch',
};
logger.info('Operation completed', metadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].requestId).toBe('req-123');
expect(logs[0].userId).toBe('user-456');
expect(logs[0].operation).toBe('data-fetch');
});
it('should handle object messages', () => {
const objectMessage = {
event: 'user_action',
action: 'login',
timestamp: Date.now(),
};
logger.info(objectMessage);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('info');
});
});
});

View file

@ -0,0 +1,150 @@
/**
* Logger Test Setup
*
* Setup file specific to Logger library tests.
* Provides utilities and mocks for testing logging operations.
*/
import { afterAll, afterEach, beforeAll } from 'bun:test';
import { shutdownLoggers } from '../src';
// Store original console methods
const originalConsole = {
log: console.log,
info: console.info,
warn: console.warn,
error: console.error,
debug: console.debug,
};
// Create a test logger helper
export const loggerTestHelpers = {
/**
* Mock Loki transport
*/
mockLokiTransport: () => ({
on: () => {},
write: () => {},
}),
/**
* Create a mock Hono context for middleware tests
*/ createHonoContextMock: (options: any = {}) => {
// Default path and method
const path = options.path || '/test';
const method = options.method || 'GET';
// Create request headers
const headerEntries = Object.entries(options.req?.headers || {});
const headerMap = new Map(headerEntries);
const rawHeaders = new Headers();
headerEntries.forEach(([key, value]) => rawHeaders.set(key, value as string));
// Create request with standard properties needed for middleware
const req = {
method,
url: `http://localhost${path}`,
path,
raw: {
url: `http://localhost${path}`,
method,
headers: rawHeaders,
},
query: {},
param: () => undefined,
header: (name: string) => rawHeaders.get(name.toLowerCase()),
headers: headerMap,
...options.req,
};
// Create mock response
const res = {
status: 200,
statusText: 'OK',
body: null,
headers: new Map(),
clone: function () {
return { ...this, text: async () => JSON.stringify(this.body) };
},
text: async () => JSON.stringify(res.body),
...options.res,
};
// Create context with all required Hono methods
const c: any = {
req,
env: {},
res,
header: (name: string, value: string) => {
c.res.headers.set(name.toLowerCase(), value);
return c;
},
get: (key: string) => c[key],
set: (key: string, value: any) => {
c[key] = value;
return c;
},
status: (code: number) => {
c.res.status = code;
return c;
},
json: (body: any) => {
c.res.body = body;
return c;
},
executionCtx: {
waitUntil: (fn: Function) => {
fn();
},
},
};
return c;
},
/**
* Create a mock Next function for middleware tests
*/
createNextMock: () => {
return async () => {
// Do nothing, simulate middleware completion
return;
};
},
};
// Setup environment before tests
beforeAll(() => {
// Don't let real logs through during tests
console.log = () => {};
console.info = () => {};
console.warn = () => {};
console.error = () => {};
console.debug = () => {};
// Override NODE_ENV for tests
process.env.NODE_ENV = 'test';
// Disable real logging during tests
process.env.LOG_LEVEL = 'silent';
process.env.LOG_CONSOLE = 'false';
process.env.LOG_FILE = 'false';
// Mock Loki config to prevent real connections
process.env.LOKI_HOST = '';
process.env.LOKI_URL = '';
});
// Clean up after each test
afterEach(async () => {
// Clear logger cache to prevent state pollution between tests
await shutdownLoggers();
});
// Restore everything after tests
afterAll(() => {
console.log = originalConsole.log;
console.info = originalConsole.info;
console.warn = originalConsole.warn;
console.error = originalConsole.error;
console.debug = originalConsole.debug;
});

View file

@ -0,0 +1,11 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src",
"composite": true
},
"include": ["src/**/*"],
"references": [
]
}

View file

@ -0,0 +1,35 @@
{
"name": "@stock-bot/types",
"version": "1.0.0",
"description": "Shared type definitions for Stock Bot platform",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"type-check": "tsc --noEmit",
"clean": "rimraf dist",
"test": "bun test"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"bun-types": "^1.2.15"
},
"keywords": [
"types",
"typescript",
"stock-bot"
],
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
]
}

View file

@ -0,0 +1,34 @@
/**
* Backtesting & Strategy Types
* Types for strategy backtesting and analysis
*/
import type { TradeExecution, TradePerformance } from './trading';
import type { PortfolioAnalysis } from './portfolio';
import type { RiskMetrics, DrawdownAnalysis } from './risk-metrics';
/**
* Backtesting results
*/
export interface BacktestResults {
/** All trades executed */
trades: TradeExecution[];
/** Equity curve over time */
equityCurve: Array<{ value: number; date: Date }>;
/** Performance metrics */
performance: PortfolioAnalysis;
/** Risk metrics */
riskMetrics: RiskMetrics;
/** Drawdown analysis */
drawdownAnalysis: DrawdownAnalysis;
/** Trade performance */
tradePerformance: TradePerformance;
/** Start date */
startDate: Date;
/** End date */
endDate: Date;
/** Initial capital */
initialCapital: number;
/** Final value */
finalValue: number;
}

View file

@ -0,0 +1,197 @@
/**
* Financial Statements Types
* Types for fundamental financial statement data
*/
/**
* Balance Sheet data structure
*/
export interface BalanceSheet {
/** Reporting period date */
date: string;
/** Fiscal period (Q1, Q2, Q3, Q4, FY) */
period: string;
/** Currency */
currency: string;
// Assets
/** Total current assets */
totalCurrentAssets: number;
/** Cash and cash equivalents */
cashAndEquivalents: number;
/** Short-term investments */
shortTermInvestments?: number;
/** Accounts receivable */
accountsReceivable: number;
/** Inventory */
inventory: number;
/** Prepaid expenses */
prepaidExpenses?: number;
/** Other current assets */
otherCurrentAssets?: number;
/** Total non-current assets */
totalNonCurrentAssets: number;
/** Property, plant & equipment (net) */
propertyPlantEquipmentNet: number;
/** Intangible assets */
intangibleAssets?: number;
/** Goodwill */
goodwill?: number;
/** Long-term investments */
longTermInvestments?: number;
/** Other non-current assets */
otherNonCurrentAssets?: number;
/** Total assets */
totalAssets: number;
// Liabilities
/** Total current liabilities */
totalCurrentLiabilities: number;
/** Accounts payable */
accountsPayable: number;
/** Short-term debt */
shortTermDebt: number;
/** Accrued liabilities */
accruedLiabilities?: number;
/** Other current liabilities */
otherCurrentLiabilities?: number;
/** Total non-current liabilities */
totalNonCurrentLiabilities: number;
/** Long-term debt */
longTermDebt: number;
/** Deferred tax liabilities */
deferredTaxLiabilities?: number;
/** Other non-current liabilities */
otherNonCurrentLiabilities?: number;
/** Total liabilities */
totalLiabilities: number;
// Equity
/** Total stockholders' equity */
totalStockholdersEquity: number;
/** Common stock */
commonStock: number;
/** Retained earnings */
retainedEarnings: number;
/** Additional paid-in capital */
additionalPaidInCapital?: number;
/** Treasury stock */
treasuryStock?: number;
/** Accumulated other comprehensive income */
accumulatedOtherComprehensiveIncome?: number;
}
/**
* Income Statement data structure
*/
export interface IncomeStatement {
/** Reporting period date */
date: string;
/** Fiscal period (Q1, Q2, Q3, Q4, FY) */
period: string;
/** Currency */
currency: string;
/** Total revenue/net sales */
totalRevenue: number;
/** Cost of goods sold */
costOfGoodsSold: number;
/** Gross profit */
grossProfit: number;
/** Operating expenses */
operatingExpenses: number;
/** Research and development */
researchAndDevelopment?: number;
/** Selling, general & administrative */
sellingGeneralAdministrative?: number;
/** Depreciation and amortization */
depreciationAmortization?: number;
/** Other operating expenses */
otherOperatingExpenses?: number;
/** Operating income */
operatingIncome: number;
/** Interest income */
interestIncome?: number;
/** Interest expense */
interestExpense?: number;
/** Other income/expense */
otherIncomeExpense?: number;
/** Income before taxes */
incomeBeforeTaxes: number;
/** Income tax expense */
incomeTaxExpense: number;
/** Net income */
netIncome: number;
/** Earnings per share (basic) */
earningsPerShareBasic: number;
/** Earnings per share (diluted) */
earningsPerShareDiluted: number;
/** Weighted average shares outstanding (basic) */
sharesOutstandingBasic: number;
/** Weighted average shares outstanding (diluted) */
sharesOutstandingDiluted: number;
}
/**
* Cash Flow Statement data structure
*/
export interface CashFlowStatement {
/** Reporting period date */
date: string;
/** Fiscal period (Q1, Q2, Q3, Q4, FY) */
period: string;
/** Currency */
currency: string;
// Operating Activities
/** Net income */
netIncome: number;
/** Depreciation and amortization */
depreciationAmortization: number;
/** Changes in working capital */
changesInWorkingCapital: number;
/** Other operating activities */
otherOperatingActivities?: number;
/** Net cash from operating activities */
netCashFromOperatingActivities: number;
// Investing Activities
/** Capital expenditures */
capitalExpenditures: number;
/** Acquisitions */
acquisitions?: number;
/** Investments */
investments?: number;
/** Other investing activities */
otherInvestingActivities?: number;
/** Net cash from investing activities */
netCashFromInvestingActivities: number;
// Financing Activities
/** Debt issuance/repayment */
debtIssuanceRepayment?: number;
/** Equity issuance/repurchase */
equityIssuanceRepurchase?: number;
/** Dividends paid */
dividendsPaid?: number;
/** Other financing activities */
otherFinancingActivities?: number;
/** Net cash from financing activities */
netCashFromFinancingActivities: number;
/** Net change in cash */
netChangeInCash: number;
/** Cash at beginning of period */
cashAtBeginningOfPeriod: number;
/** Cash at end of period */
cashAtEndOfPeriod: number;
}

View file

@ -0,0 +1,36 @@
/**
* Helper Types
* Generic utility types for making functions work across different data types
*/
/**
* Interface for data that has a close price
* Used to make functions generic across different data types
*/
export interface HasClose {
close: number;
}
/**
* Interface for data that has OHLC prices
*/
export interface HasOHLC {
open: number;
high: number;
low: number;
close: number;
}
/**
* Interface for data that has volume
*/
export interface HasVolume {
volume: number;
}
/**
* Interface for data that has timestamp
*/
export interface HasTimestamp {
timestamp: number;
}

View file

@ -0,0 +1,49 @@
/**
* Stock Bot Types Library
* Standardized type definitions for the entire trading platform
*/
// Export all market data types
export type {
LiquidityMetrics,
MarketData,
MarketRegime,
OHLCV,
OHLCVWithMetadata,
} from './market-data';
// Export all portfolio & position types
export type {
AssetAllocation,
KellyParams,
PortfolioAnalysis,
PortfolioPosition,
PositionSizeParams,
} from './portfolio';
// Export all trading & execution types
export type { TradeExecution, TradePerformance } from './trading';
// Export all risk & performance metrics
export type { DrawdownAnalysis, ReturnAnalysis, RiskMetrics } from './risk-metrics';
// Export all options pricing types
export type { GreeksCalculation, OptionParameters, OptionPricing } from './options';
// Export all financial statement types
export type { BalanceSheet, CashFlowStatement, IncomeStatement } from './financial-statements';
// Export all technical analysis types
export type {
CorrelationMatrix,
CorrelationResult,
GARCHParameters,
TechnicalIndicators,
VolatilityEstimates,
} from './technical-analysis';
// Export backtesting types
export type { BacktestResults } from './backtesting';
// Export helper types
export type { HasClose, HasOHLC, HasTimestamp, HasVolume } from './helpers';

View file

@ -0,0 +1,107 @@
/**
* Market Data Types
* Standard types for market data and pricing information
*/
/**
* Standard OHLCV (Open, High, Low, Close, Volume) data structure
* Used for candlestick/bar chart data across all market data providers
*/
export interface OHLCV {
/** Opening price for the time period */
open: number;
/** Highest price during the time period */
high: number;
/** Lowest price during the time period */
low: number;
/** Closing price for the time period */
close: number;
/** Trading volume during the time period */
volume: number;
/** Timestamp of the data point (Unix timestamp in milliseconds) */
timestamp: number;
/** Symbol/ticker for the security */
symbol: string;
/** Time interval (e.g., '1m', '5m', '1h', '1d') */
interval?: string;
}
/**
* OHLCV data with additional metadata
*/
export interface OHLCVWithMetadata extends OHLCV {
/** Source of the data (e.g., 'yahoo', 'ib', 'quotemedia') */
source: string;
/** Whether this is adjusted data */
adjusted?: boolean;
/** Number of trades during the period */
trades?: number;
/** Volume weighted average price */
vwap?: number;
}
/**
* Market data structure
*/
export interface MarketData {
/** Security symbol */
symbol: string;
/** Current price */
price: number;
/** Trading volume */
volume: number;
/** Timestamp */
timestamp: number;
/** Bid price */
bid?: number;
/** Ask price */
ask?: number;
/** Bid size */
bidSize?: number;
/** Ask size */
askSize?: number;
/** Previous close */
previousClose?: number;
/** Day's high */
high?: number;
/** Day's low */
low?: number;
/** Day's open */
open?: number;
}
/**
* Market liquidity metrics
*/
export interface LiquidityMetrics {
/** Bid-ask spread */
bidAskSpread: number;
/** Bid-ask spread percentage */
bidAskSpreadPercent: number;
/** Market depth */
marketDepth: number;
/** Average daily volume */
averageDailyVolume: number;
/** Volume rate */
volumeRate: number;
/** Price impact */
priceImpact: number;
/** Liquidity score */
liquidityScore: number;
}
/**
* Market regime classification
*/
export interface MarketRegime {
/** Current regime */
regime: 'trending' | 'ranging' | 'volatile' | 'quiet';
/** Regime strength (0-1) */
strength: number;
/** Regime duration (periods) */
duration: number;
/** Trend direction (if trending) */
trendDirection?: 'up' | 'down';
/** Volatility level */
volatilityLevel: 'low' | 'medium' | 'high';
}

View file

@ -0,0 +1,58 @@
/**
* Options Pricing Types
* Types for options pricing and derivatives
*/
/**
* Options pricing parameters
*/
export interface OptionParameters {
/** Underlying asset price */
spotPrice: number;
/** Strike price */
strikePrice: number;
/** Time to expiration (in years) */
timeToExpiry: number;
/** Risk-free interest rate */
riskFreeRate: number;
/** Volatility */
volatility: number;
/** Dividend yield */
dividendYield?: number;
/** Option type */
optionType: 'call' | 'put';
}
/**
* Option pricing results
*/
export interface OptionPricing {
/** Call option price */
callPrice: number;
/** Put option price */
putPrice: number;
/** Call intrinsic value */
callIntrinsic: number;
/** Put intrinsic value */
putIntrinsic: number;
/** Call time value */
callTimeValue: number;
/** Put time value */
putTimeValue: number;
}
/**
* Option Greeks calculation
*/
export interface GreeksCalculation {
/** Delta - price sensitivity */
delta: number;
/** Gamma - delta sensitivity */
gamma: number;
/** Theta - time decay */
theta: number;
/** Vega - volatility sensitivity */
vega: number;
/** Rho - interest rate sensitivity */
rho: number;
}

View file

@ -0,0 +1,108 @@
/**
* Portfolio & Position Types
* Types for portfolio management and position tracking
*/
/**
* Individual portfolio position
*/
export interface PortfolioPosition {
/** Security symbol/ticker */
symbol: string;
/** Number of shares/units */
shares: number;
/** Average entry price */
averagePrice: number;
/** Current market price */
currentPrice: number;
/** Current market value */
marketValue: number;
/** Unrealized P&L */
unrealizedPnL: number;
/** Unrealized P&L percentage */
unrealizedPnLPercent: number;
/** Weight in portfolio */
weight: number;
/** Security type (stock, bond, option, etc.) */
securityType?: string;
/** Currency */
currency?: string;
}
/**
* Portfolio analysis metrics
*/
export interface PortfolioAnalysis {
/** Total portfolio value */
totalValue: number;
/** Total cash balance */
cash: number;
/** Total invested amount */
invested: number;
/** Total unrealized P&L */
unrealizedPnL: number;
/** Total unrealized P&L percentage */
unrealizedPnLPercent: number;
/** Total return */
totalReturn: number;
/** Total return percentage */
totalReturnPercent: number;
/** Annualized return */
annualizedReturn: number;
/** Portfolio volatility (annualized) */
volatility: number;
/** Sharpe ratio */
sharpeRatio: number;
/** Maximum drawdown */
maxDrawdown: number;
/** Number of positions */
positionCount: number;
/** Portfolio concentration (largest position weight) */
concentration: number;
}
/**
* Asset allocation breakdown
*/
export interface AssetAllocation {
/** Asset class or category */
category: string;
/** Allocation value */
value: number;
/** Allocation percentage */
percentage: number;
/** Target allocation percentage */
target?: number;
/** Deviation from target */
deviation?: number;
}
/**
* Position sizing parameters
*/
export interface PositionSizeParams {
/** Account size */
accountSize: number;
/** Risk percentage per trade */
riskPercent: number;
/** Entry price */
entryPrice: number;
/** Stop loss price */
stopPrice: number;
/** Commission per share */
commission?: number;
}
/**
* Kelly Criterion parameters
*/
export interface KellyParams {
/** Win rate (0-1) */
winRate: number;
/** Average winning trade */
averageWin: number;
/** Average losing trade */
averageLoss: number;
/** Risk-free rate */
riskFreeRate?: number;
}

View file

@ -0,0 +1,86 @@
/**
* Risk & Performance Metrics Types
* Types for risk measurement and performance analysis
*/
/**
* Comprehensive risk metrics
*/
export interface RiskMetrics {
/** Value at Risk 95% */
var95: number;
/** Value at Risk 99% */
var99: number;
/** Conditional VaR 95% */
cvar95: number;
/** Maximum drawdown */
maxDrawdown: number;
/** Volatility (annualized) */
volatility: number;
/** Downside deviation */
downside_deviation: number;
/** Calmar ratio */
calmar_ratio: number;
/** Sortino ratio */
sortino_ratio: number;
/** Beta (vs benchmark) */
beta: number;
/** Alpha (vs benchmark) */
alpha: number;
/** Sharpe ratio */
sharpeRatio: number;
/** Treynor ratio */
treynorRatio: number;
/** Tracking error */
trackingError: number;
/** Information ratio */
informationRatio: number;
}
/**
* Drawdown analysis
*/
export interface DrawdownAnalysis {
/** Maximum drawdown percentage */
maxDrawdown: number;
/** Maximum drawdown duration (days) */
maxDrawdownDuration: number;
/** Current drawdown percentage */
currentDrawdown: number;
/** Current drawdown duration (days) */
currentDrawdownDuration: number;
/** Average drawdown percentage */
averageDrawdown: number;
/** Average drawdown duration (days) */
averageDrawdownDuration: number;
/** Number of drawdown periods */
drawdownPeriods: number;
/** Recovery factor */
recoveryFactor: number;
}
/**
* Return analysis statistics
*/
export interface ReturnAnalysis {
/** Total return */
totalReturn: number;
/** Annualized return */
annualizedReturn: number;
/** Volatility (annualized) */
volatility: number;
/** Skewness */
skewness: number;
/** Kurtosis */
kurtosis: number;
/** Best period return */
bestPeriod: number;
/** Worst period return */
worstPeriod: number;
/** Positive periods percentage */
positivePeriods: number;
/** Average positive return */
averagePositiveReturn: number;
/** Average negative return */
averageNegativeReturn: number;
}

View file

@ -0,0 +1,109 @@
/**
* Technical Analysis Types
* Types for technical indicators and market analysis
*/
/**
* Technical indicators collection
*/
export interface TechnicalIndicators {
/** Simple Moving Average */
sma: number[];
/** Exponential Moving Average */
ema: number[];
/** Relative Strength Index */
rsi: number[];
/** MACD indicator */
macd: {
macd: number[];
signal: number[];
histogram: number[];
};
/** Bollinger Bands */
bollinger: {
upper: number[];
middle: number[];
lower: number[];
};
/** Average True Range */
atr: number[];
/** Stochastic Oscillator */
stochastic: {
k: number[];
d: number[];
};
/** Williams %R */
williams_r: number[];
/** Commodity Channel Index */
cci: number[];
/** Momentum */
momentum: number[];
/** Rate of Change */
roc: number[];
}
/**
* Correlation analysis result
*/
export interface CorrelationResult {
/** Correlation coefficient */
correlation: number;
/** P-value for statistical significance */
pValue: number;
/** Is statistically significant */
isSignificant: boolean;
/** Confidence interval */
confidenceInterval: [number, number];
/** Sample size */
sampleSize: number;
}
/**
* Correlation matrix
*/
export interface CorrelationMatrix {
/** Asset symbols */
symbols: string[];
/** Correlation matrix values */
matrix: number[][];
/** Eigenvalues */
eigenvalues: number[];
/** Condition number */
conditionNumber: number;
/** Is positive definite */
isPositiveDefinite: boolean;
}
/**
* Volatility estimates using different models
*/
export interface VolatilityEstimates {
/** Close-to-close volatility */
closeToClose: number;
/** Parkinson volatility */
parkinson: number;
/** Garman-Klass volatility */
garmanKlass: number;
/** Rogers-Satchell volatility */
rogersSatchell: number;
/** Yang-Zhang volatility */
yangZhang: number;
}
/**
* GARCH model parameters
*/
export interface GARCHParameters {
/** Alpha parameter */
alpha: number;
/** Beta parameter */
beta: number;
/** Omega parameter */
omega: number;
/** Log likelihood */
logLikelihood: number;
/** AIC (Akaike Information Criterion) */
aic: number;
/** BIC (Bayesian Information Criterion) */
bic: number;
}

View file

@ -0,0 +1,62 @@
/**
* Trading & Execution Types
* Types for trade execution and performance analysis
*/
/**
* Trade execution record
*/
export interface TradeExecution {
/** Trade ID */
id?: string;
/** Security symbol */
symbol: string;
/** Trade type */
type: 'buy' | 'sell' | 'short' | 'cover';
/** Number of shares/units */
quantity: number;
/** Execution price */
price: number;
/** Total trade value */
value: number;
/** Commission/fees */
commission?: number;
/** Execution timestamp */
timestamp: number;
/** Order ID reference */
orderId?: string;
/** Execution venue */
venue?: string;
}
/**
* Trade performance analysis
*/
export interface TradePerformance {
/** Total number of trades */
totalTrades: number;
/** Number of winning trades */
winningTrades: number;
/** Number of losing trades */
losingTrades: number;
/** Win rate percentage */
winRate: number;
/** Average winning trade */
averageWin: number;
/** Average losing trade */
averageLoss: number;
/** Largest winning trade */
largestWin: number;
/** Largest losing trade */
largestLoss: number;
/** Profit factor (gross profit / gross loss) */
profitFactor: number;
/** Mathematical expectancy */
expectancy: number;
/** Total gross profit */
grossProfit: number;
/** Total gross loss */
grossLoss: number;
/** Net profit */
netProfit: number;
}

View file

@ -0,0 +1,11 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src",
"composite": true
},
"include": ["src/**/*"],
"references": [
]
}