test
This commit is contained in:
parent
167a586257
commit
1f5dac28c7
4 changed files with 198 additions and 1 deletions
|
|
@ -14,6 +14,7 @@ import { WebShareHandler } from './webshare/webshare.handler';
|
||||||
import { TradingViewHandler } from './tradingview/tradingview.handler';
|
import { TradingViewHandler } from './tradingview/tradingview.handler';
|
||||||
import { TeHandler } from './te/te.handler';
|
import { TeHandler } from './te/te.handler';
|
||||||
import { EodHandler } from './eod/eod.handler';
|
import { EodHandler } from './eod/eod.handler';
|
||||||
|
import { TtHandler } from './tt/tt.handler';
|
||||||
import { createEODOperationRegistry } from './eod/shared';
|
import { createEODOperationRegistry } from './eod/shared';
|
||||||
import { createQMOperationRegistry } from './qm/shared/operation-provider';
|
import { createQMOperationRegistry } from './qm/shared/operation-provider';
|
||||||
|
|
||||||
|
|
@ -62,7 +63,7 @@ export async function initializeAllHandlers(serviceContainer: IServiceContainer)
|
||||||
// The HandlerScanner in the DI container will handle the actual registration
|
// The HandlerScanner in the DI container will handle the actual registration
|
||||||
// We just need to ensure handlers are imported so their decorators run
|
// We just need to ensure handlers are imported so their decorators run
|
||||||
|
|
||||||
const handlers = [CeoHandler, IbHandler, QMHandler, WebShareHandler, TradingViewHandler, TeHandler, EodHandler];
|
const handlers = [CeoHandler, IbHandler, QMHandler, WebShareHandler, TradingViewHandler, TeHandler, EodHandler, TtHandler];
|
||||||
|
|
||||||
logger.info('Handler imports loaded', {
|
logger.info('Handler imports loaded', {
|
||||||
count: handlers.length,
|
count: handlers.length,
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,147 @@
|
||||||
|
import * as cheerio from 'cheerio';
|
||||||
|
import { getRandomUserAgent } from '@stock-bot/utils';
|
||||||
|
import { existsSync, mkdirSync, rmSync, readdirSync, readFileSync } from 'fs';
|
||||||
|
import { execSync } from 'child_process';
|
||||||
|
import { join } from 'path';
|
||||||
|
import type { TtHandler } from '../tt.handler';
|
||||||
|
|
||||||
|
const BASE_URL = 'https://www.turtletrader.com';
|
||||||
|
const TMP_DIR = '/tmp/tt';
|
||||||
|
|
||||||
|
const MONTH_MAP: Record<string, string> = {
|
||||||
|
F: '01', G: '02', H: '03', J: '04', K: '05', M: '06',
|
||||||
|
N: '07', Q: '08', U: '09', V: '10', X: '11', Z: '12',
|
||||||
|
};
|
||||||
|
|
||||||
|
function parseDate(yymmdd: string): string {
|
||||||
|
const yy = parseInt(yymmdd.slice(0, 2));
|
||||||
|
const mm = yymmdd.slice(2, 4);
|
||||||
|
const dd = yymmdd.slice(4, 6);
|
||||||
|
const year = yy >= 50 ? 1900 + yy : 2000 + yy;
|
||||||
|
return `${year}-${mm}-${dd}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function importData(this: TtHandler): Promise<{ totalRecords: number }> {
|
||||||
|
const { logger, mongodb, http } = this;
|
||||||
|
|
||||||
|
// 1. Fetch page and extract zip links
|
||||||
|
const response = await http.get(`${BASE_URL}/hpd/`, {
|
||||||
|
headers: { 'User-Agent': getRandomUserAgent() },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to fetch TurtleTrader page: ${response.status}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const html = await response.text();
|
||||||
|
const $ = cheerio.load(html);
|
||||||
|
|
||||||
|
const zipLinks: Array<{ href: string; name: string }> = [];
|
||||||
|
$('a[href$=".zip"]').each((_, el) => {
|
||||||
|
const href = $(el).attr('href') || '';
|
||||||
|
const name = $(el).text().trim();
|
||||||
|
if (href) zipLinks.push({ href, name });
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!zipLinks.length) {
|
||||||
|
throw new Error('No zip links found on page');
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Found ${zipLinks.length} zip files to download`);
|
||||||
|
|
||||||
|
// 2. Setup temp dir
|
||||||
|
if (existsSync(TMP_DIR)) {
|
||||||
|
rmSync(TMP_DIR, { recursive: true });
|
||||||
|
}
|
||||||
|
mkdirSync(TMP_DIR, { recursive: true });
|
||||||
|
|
||||||
|
let totalRecords = 0;
|
||||||
|
|
||||||
|
try {
|
||||||
|
for (const { href, name } of zipLinks) {
|
||||||
|
const zipUrl = href.startsWith('http') ? href : `${BASE_URL}${href}`;
|
||||||
|
const symbol = href.split('/').pop()!.replace('.zip', '').toUpperCase();
|
||||||
|
const zipPath = join(TMP_DIR, `${symbol}.zip`);
|
||||||
|
const extractDir = join(TMP_DIR, symbol);
|
||||||
|
|
||||||
|
// Download
|
||||||
|
logger.info(`Downloading ${name} (${symbol})`);
|
||||||
|
const zipResp = await http.get(zipUrl, {
|
||||||
|
headers: { 'User-Agent': getRandomUserAgent() },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!zipResp.ok) {
|
||||||
|
logger.error(`Failed to download ${zipUrl}: ${zipResp.status}`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const buf = Buffer.from(await zipResp.arrayBuffer());
|
||||||
|
require('fs').writeFileSync(zipPath, buf);
|
||||||
|
|
||||||
|
// Extract
|
||||||
|
mkdirSync(extractDir, { recursive: true });
|
||||||
|
execSync(`unzip -o "${zipPath}" -d "${extractDir}"`, { stdio: 'pipe' });
|
||||||
|
|
||||||
|
// Parse each txt file
|
||||||
|
const files = readdirSync(extractDir).filter(f => f.endsWith('.txt'));
|
||||||
|
const records: any[] = [];
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
const contract = file.replace('.txt', '');
|
||||||
|
// Parse contract code: e.g. CL83M → symbol=CL, year=83, month=M
|
||||||
|
const match = contract.match(/^([A-Z]+)(\d{2})([A-Z])$/);
|
||||||
|
if (!match) {
|
||||||
|
logger.warn(`Skipping unrecognized contract format: ${contract}`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const [, sym, year, month] = match;
|
||||||
|
const filePath = join(extractDir, file);
|
||||||
|
const content = readFileSync(filePath, 'utf-8');
|
||||||
|
|
||||||
|
for (const line of content.split('\n')) {
|
||||||
|
const trimmed = line.trim();
|
||||||
|
if (!trimmed) continue;
|
||||||
|
|
||||||
|
const parts = trimmed.split(',');
|
||||||
|
if (parts.length < 7) continue;
|
||||||
|
|
||||||
|
const [dateStr, o, h, l, c, v, oi] = parts;
|
||||||
|
|
||||||
|
records.push({
|
||||||
|
contract,
|
||||||
|
symbol: sym,
|
||||||
|
name,
|
||||||
|
month,
|
||||||
|
year,
|
||||||
|
date: parseDate(dateStr),
|
||||||
|
o: parseFloat(o),
|
||||||
|
h: parseFloat(h),
|
||||||
|
l: parseFloat(l),
|
||||||
|
c: parseFloat(c),
|
||||||
|
v: parseInt(v),
|
||||||
|
oi: parseInt(oi),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (records.length) {
|
||||||
|
await mongodb?.batchUpsert('ttFutures', records, ['contract', 'date']);
|
||||||
|
totalRecords += records.length;
|
||||||
|
logger.info(`Imported ${records.length} records for ${name} (${symbol}, ${files.length} contracts)`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean up this zip
|
||||||
|
rmSync(zipPath, { force: true });
|
||||||
|
rmSync(extractDir, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
// Clean up temp dir
|
||||||
|
if (existsSync(TMP_DIR)) {
|
||||||
|
rmSync(TMP_DIR, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`TurtleTrader import complete: ${totalRecords} total records`);
|
||||||
|
return { totalRecords };
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
export * from './import.action';
|
||||||
48
apps/stock/data-ingestion/src/handlers/tt/tt.handler.ts
Normal file
48
apps/stock/data-ingestion/src/handlers/tt/tt.handler.ts
Normal file
|
|
@ -0,0 +1,48 @@
|
||||||
|
import {
|
||||||
|
BaseHandler,
|
||||||
|
Handler,
|
||||||
|
ScheduledOperation,
|
||||||
|
} from '@stock-bot/handlers';
|
||||||
|
import type { DataIngestionServices } from '../../types';
|
||||||
|
import { importData } from './actions';
|
||||||
|
|
||||||
|
@Handler('tt')
|
||||||
|
export class TtHandler extends BaseHandler<DataIngestionServices> {
|
||||||
|
constructor(services: any) {
|
||||||
|
super(services);
|
||||||
|
}
|
||||||
|
|
||||||
|
async onInit(): Promise<void> {
|
||||||
|
if (!this.mongodb) return;
|
||||||
|
|
||||||
|
const indexes = [
|
||||||
|
{
|
||||||
|
indexSpec: { contract: 1, date: 1 },
|
||||||
|
options: { name: 'contract_date_unique_idx', unique: true, background: true },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
indexSpec: { symbol: 1, date: 1 },
|
||||||
|
options: { name: 'symbol_date_idx', background: true },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
indexSpec: { date: 1 },
|
||||||
|
options: { name: 'date_idx', background: true },
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const index of indexes) {
|
||||||
|
try {
|
||||||
|
await this.mongodb.createIndex('ttFutures', index.indexSpec, index.options);
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.debug(`ttFutures index ${index.options.name} may already exist:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ScheduledOperation('tt-import', '0 0 * * 0', {
|
||||||
|
priority: 5,
|
||||||
|
description: 'Import TurtleTrader futures data',
|
||||||
|
immediately: true,
|
||||||
|
})
|
||||||
|
importData = importData;
|
||||||
|
}
|
||||||
Loading…
Add table
Add a link
Reference in a new issue