adding backtest table / pages
This commit is contained in:
parent
38a6e73ad5
commit
a876f3c35b
19 changed files with 1058 additions and 69 deletions
70
apps/stock/web-api/src/migrations/migration-runner.ts
Normal file
70
apps/stock/web-api/src/migrations/migration-runner.ts
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
import { readFileSync, readdirSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
import type { IServiceContainer } from '@stock-bot/handlers';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
|
||||
const logger = getLogger('migration-runner');
|
||||
|
||||
export async function runMigrations(container: IServiceContainer): Promise<void> {
|
||||
logger.info('Migration runner called');
|
||||
logger.info('Container type:', typeof container);
|
||||
logger.info('Container postgres available:', !!container.postgres);
|
||||
|
||||
if (!container.postgres) {
|
||||
logger.warn('PostgreSQL not available, skipping migrations');
|
||||
logger.info('Container keys:', Object.keys(container));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Create migrations table if it doesn't exist
|
||||
await container.postgres.query(`
|
||||
CREATE TABLE IF NOT EXISTS migrations (
|
||||
id SERIAL PRIMARY KEY,
|
||||
filename VARCHAR(255) NOT NULL UNIQUE,
|
||||
executed_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
|
||||
)
|
||||
`);
|
||||
|
||||
// Get list of migration files from database/postgres/init
|
||||
const migrationsDir = join(process.cwd(), 'database', 'postgres', 'init');
|
||||
logger.info('Looking for migrations in:', migrationsDir);
|
||||
|
||||
const files = readdirSync(migrationsDir)
|
||||
.filter(f => f.endsWith('.sql') && f.startsWith('001_')) // Only run our backtest migration for now
|
||||
.sort();
|
||||
|
||||
logger.info('Found migration files:', files);
|
||||
|
||||
for (const file of files) {
|
||||
// Check if migration has already been run
|
||||
const result = await container.postgres.query(
|
||||
'SELECT 1 FROM migrations WHERE filename = $1',
|
||||
[file]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
logger.info(`Running migration: ${file}`);
|
||||
|
||||
// Read and execute migration
|
||||
const sql = readFileSync(join(migrationsDir, file), 'utf8');
|
||||
await container.postgres.query(sql);
|
||||
|
||||
// Record migration as executed
|
||||
await container.postgres.query(
|
||||
'INSERT INTO migrations (filename) VALUES ($1)',
|
||||
[file]
|
||||
);
|
||||
|
||||
logger.info(`Migration completed: ${file}`);
|
||||
} else {
|
||||
logger.debug(`Migration already executed: ${file}`);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info('All migrations completed successfully');
|
||||
} catch (error) {
|
||||
logger.error('Migration failed', { error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue