huge refactor with a million of things to make the code much more managable and easier to create new services #3

Merged
boki merged 70 commits from di-refactor into master 2025-06-24 01:43:57 +00:00
69 changed files with 41 additions and 2956 deletions
Showing only changes of commit be6afef832 - Show all commits

View file

@ -40,7 +40,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co
**Microservices Architecture** with shared libraries and multi-database storage:
### Core Services (`apps/`)
- **data-service** - Market data ingestion from multiple providers (Yahoo, QuoteMedia, IB)
- **data-ingestion** - Market data ingestion from multiple providers (Yahoo, QuoteMedia, IB)
- **processing-service** - Data cleaning, validation, and technical indicators
- **strategy-service** - Trading strategies and backtesting (multi-mode: live, event-driven, vectorized, hybrid)
- **execution-service** - Order management and risk controls

View file

@ -1,6 +1,6 @@
{
"service": {
"name": "data-service",
"name": "data-ingestion",
"port": 2001,
"host": "0.0.0.0",
"healthCheckPath": "/health",

View file

@ -1,7 +1,7 @@
{
"name": "@stock-bot/data-service",
"name": "@stock-bot/data-ingestion",
"version": "1.0.0",
"description": "Combined data ingestion and historical data service",
"description": "Market data ingestion from multiple providers with proxy support and rate limiting",
"main": "dist/index.js",
"type": "module",
"scripts": {

View file

@ -29,7 +29,7 @@ if (config.log) {
}
// Create logger AFTER config is set
const logger = getLogger('data-service');
const logger = getLogger('data-ingestion');
const app = new Hono();

View file

@ -6,7 +6,7 @@ const health = new Hono();
health.get('/', c => {
return c.json({
status: 'healthy',
service: 'data-service',
service: 'data-ingestion',
timestamp: new Date().toISOString(),
});
});

View file

@ -1,6 +1,6 @@
{
"service": {
"name": "data-sync-service",
"name": "data-pipeline",
"port": 3005,
"host": "0.0.0.0",
"healthCheckPath": "/health",

View file

@ -1,7 +1,7 @@
{
"name": "@stock-bot/data-sync-service",
"name": "@stock-bot/data-pipeline",
"version": "1.0.0",
"description": "Sync service from MongoDB raw data to PostgreSQL master records",
"description": "Data processing pipeline for syncing and transforming raw data to normalized records",
"main": "dist/index.js",
"type": "module",
"scripts": {

View file

@ -9,7 +9,7 @@ import { connectPostgreSQL } from '@stock-bot/postgres-client';
import { QueueManager, type QueueManagerConfig } from '@stock-bot/queue';
import { Shutdown } from '@stock-bot/shutdown';
// Local imports
import { healthRoutes, enhancedSyncRoutes, statsRoutes, syncRoutes } from './routes';
import { enhancedSyncRoutes, healthRoutes, statsRoutes, syncRoutes } from './routes';
const config = initializeServiceConfig();
console.log('Data Sync Service Configuration:', JSON.stringify(config, null, 2));
@ -28,7 +28,7 @@ if (config.log) {
}
// Create logger AFTER config is set
const logger = getLogger('data-sync-service');
const logger = getLogger('data-pipeline');
const app = new Hono();

View file

@ -6,9 +6,9 @@ const health = new Hono();
health.get('/', c => {
return c.json({
status: 'healthy',
service: 'data-sync-service',
service: 'data-pipeline',
timestamp: new Date().toISOString(),
});
});
export { health as healthRoutes };
export { health as healthRoutes };

2914
bun.lock

File diff suppressed because it is too large Load diff

View file

@ -173,4 +173,4 @@ The functional approach automatically handles cache initialization. No need to m
## Need Help?
Check the examples in `apps/data-service/src/examples/batch-processing-examples.ts` for more detailed usage patterns.
Check the examples in `apps/data-ingestion/src/examples/batch-processing-examples.ts` for more detailed usage patterns.

View file

@ -1,18 +1,17 @@
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
import { afterEach, beforeEach, describe, expect, test } from 'bun:test';
import { existsSync, mkdirSync, rmSync, writeFileSync } from 'fs';
import { join } from 'path';
import { mkdirSync, writeFileSync, rmSync, existsSync } from 'fs';
import {
initializeConfig,
initializeServiceConfig,
getConfig,
import {
getConfig,
getDatabaseConfig,
getServiceConfig,
getLoggingConfig,
getProviderConfig,
getServiceConfig,
initializeServiceConfig,
isDevelopment,
isProduction,
isTest,
resetConfig
resetConfig
} from '../src/index';
const TEST_DIR = join(__dirname, 'real-usage-tests');
@ -44,15 +43,15 @@ describe('Real Usage Scenarios', () => {
}
});
test('should work like real data-service usage', async () => {
const dataServiceDir = join(TEST_DIR, 'apps', 'data-service');
test('should work like real data-ingestion usage', async () => {
const dataServiceDir = join(TEST_DIR, 'apps', 'data-ingestion');
process.chdir(dataServiceDir);
// Simulate how data-service would initialize config
// Simulate how data-ingestion would initialize config
const config = await initializeServiceConfig();
// Test typical data-service config access patterns
expect(config.app.name).toBe('data-service');
// Test typical data-ingestion config access patterns
expect(config.app.name).toBe('data-ingestion');
expect(config.service.port).toBe(3001);
// Test database config access
@ -108,7 +107,7 @@ describe('Real Usage Scenarios', () => {
test('should handle production environment correctly', async () => {
process.env.NODE_ENV = 'production';
const dataServiceDir = join(TEST_DIR, 'apps', 'data-service');
const dataServiceDir = join(TEST_DIR, 'apps', 'data-ingestion');
process.chdir(dataServiceDir);
resetConfig();
@ -124,7 +123,7 @@ describe('Real Usage Scenarios', () => {
test('should handle test environment correctly', async () => {
process.env.NODE_ENV = 'test';
const dataServiceDir = join(TEST_DIR, 'apps', 'data-service');
const dataServiceDir = join(TEST_DIR, 'apps', 'data-ingestion');
process.chdir(dataServiceDir);
resetConfig();
@ -144,7 +143,7 @@ describe('Real Usage Scenarios', () => {
process.env.EOD_API_KEY = 'prod-eod-key';
process.env.SERVICE_PORT = '8080';
const dataServiceDir = join(TEST_ROOT, 'apps', 'data-service');
const dataServiceDir = join(TEST_ROOT, 'apps', 'data-ingestion');
process.chdir(dataServiceDir);
resetConfig();
@ -163,7 +162,7 @@ describe('Real Usage Scenarios', () => {
});
test('should handle missing provider configurations gracefully', async () => {
const dataServiceDir = join(TEST_DIR, 'apps', 'data-service');
const dataServiceDir = join(TEST_DIR, 'apps', 'data-ingestion');
process.chdir(dataServiceDir);
const config = await initializeServiceConfig();
@ -178,7 +177,7 @@ describe('Real Usage Scenarios', () => {
});
test('should support dynamic config access patterns', async () => {
const dataServiceDir = join(TEST_DIR, 'apps', 'data-service');
const dataServiceDir = join(TEST_DIR, 'apps', 'data-ingestion');
process.chdir(dataServiceDir);
const config = await initializeServiceConfig();
@ -187,7 +186,7 @@ describe('Real Usage Scenarios', () => {
const configManager = (await import('../src/index')).getConfigManager();
// Direct path access
expect(configManager.getValue('app.name')).toBe('data-service');
expect(configManager.getValue('app.name')).toBe('data-ingestion');
expect(configManager.getValue('service.port')).toBe(3001);
// Check if paths exist
@ -201,7 +200,7 @@ describe('Real Usage Scenarios', () => {
});
test('should handle config updates at runtime', async () => {
const dataServiceDir = join(TEST_DIR, 'apps', 'data-service');
const dataServiceDir = join(TEST_DIR, 'apps', 'data-ingestion');
process.chdir(dataServiceDir);
await initializeServiceConfig();
@ -218,18 +217,18 @@ describe('Real Usage Scenarios', () => {
expect(updatedConfig.service.port).toBe(9999);
// Other values should be preserved
expect(updatedConfig.app.name).toBe('data-service');
expect(updatedConfig.app.name).toBe('data-ingestion');
});
test('should work across multiple service initializations', async () => {
// Simulate multiple services in the same process (like tests)
// First service
const dataServiceDir = join(TEST_DIR, 'apps', 'data-service');
const dataServiceDir = join(TEST_DIR, 'apps', 'data-ingestion');
process.chdir(dataServiceDir);
let config = await initializeServiceConfig();
expect(config.app.name).toBe('data-service');
expect(config.app.name).toBe('data-ingestion');
// Reset and switch to another service
resetConfig();
@ -249,7 +248,7 @@ const TEST_ROOT = TEST_DIR;
function setupRealUsageScenarios() {
const scenarios = {
root: TEST_ROOT,
dataService: join(TEST_ROOT, 'apps', 'data-service'),
dataService: join(TEST_ROOT, 'apps', 'data-ingestion'),
webApi: join(TEST_ROOT, 'apps', 'web-api'),
cacheLib: join(TEST_ROOT, 'libs', 'cache'),
};
@ -344,10 +343,10 @@ function setupRealUsageScenarios() {
join(scenarios.dataService, 'config', 'development.json'),
JSON.stringify({
app: {
name: 'data-service'
name: 'data-ingestion'
},
service: {
name: 'data-service',
name: 'data-ingestion',
port: 3001,
workers: 2
}

View file

@ -28,7 +28,7 @@ bun add @stock-bot/event-bus
import { createEventBus, TradingEventType } from '@stock-bot/event-bus';
const eventBus = createEventBus({
serviceName: 'data-service',
serviceName: 'data-ingestion',
redisConfig: {
host: 'localhost',
port: 6379,

View file

@ -52,7 +52,7 @@
"infra:reset": "docker-compose down -v && docker-compose up -d dragonfly postgres questdb mongodb",
"dev:full": "npm run infra:up && npm run docker:admin && turbo run dev",
"dev:clean": "npm run infra:reset && npm run dev:full",
"proxy": "bun run ./apps/data-service/src/proxy-demo.ts"
"proxy": "bun run ./apps/data-ingestion/src/proxy-demo.ts"
},
"workspaces": [
"libs/*",