adding data-services
This commit is contained in:
parent
e3bfd05b90
commit
405b818c86
139 changed files with 55943 additions and 416 deletions
|
|
@ -0,0 +1,220 @@
|
|||
import { Context } from 'hono';
|
||||
import { FeatureComputationService } from '../services/FeatureComputationService';
|
||||
import { Logger } from '@stock-bot/utils';
|
||||
import {
|
||||
ComputationJob,
|
||||
CreateComputationJobRequest,
|
||||
UpdateComputationJobRequest
|
||||
} from '../types/FeatureStore';
|
||||
|
||||
export class ComputationController {
|
||||
constructor(
|
||||
private computationService: FeatureComputationService,
|
||||
private logger: Logger
|
||||
) {}
|
||||
|
||||
async createComputationJob(c: Context) {
|
||||
try {
|
||||
const request: CreateComputationJobRequest = await c.req.json();
|
||||
|
||||
const job = await this.computationService.createComputationJob(request);
|
||||
|
||||
this.logger.info('Computation job created', { jobId: job.id });
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
data: job
|
||||
}, 201);
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to create computation job', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async getComputationJob(c: Context) {
|
||||
try {
|
||||
const jobId = c.req.param('id');
|
||||
|
||||
const job = await this.computationService.getComputationJob(jobId);
|
||||
|
||||
if (!job) {
|
||||
return c.json({
|
||||
success: false,
|
||||
error: 'Computation job not found'
|
||||
}, 404);
|
||||
}
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
data: job
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to get computation job', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async updateComputationJob(c: Context) {
|
||||
try {
|
||||
const jobId = c.req.param('id');
|
||||
const request: UpdateComputationJobRequest = await c.req.json();
|
||||
|
||||
const job = await this.computationService.updateComputationJob(jobId, request);
|
||||
|
||||
if (!job) {
|
||||
return c.json({
|
||||
success: false,
|
||||
error: 'Computation job not found'
|
||||
}, 404);
|
||||
}
|
||||
|
||||
this.logger.info('Computation job updated', { jobId });
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
data: job
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to update computation job', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async deleteComputationJob(c: Context) {
|
||||
try {
|
||||
const jobId = c.req.param('id');
|
||||
|
||||
await this.computationService.deleteComputationJob(jobId);
|
||||
|
||||
this.logger.info('Computation job deleted', { jobId });
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
message: 'Computation job deleted successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to delete computation job', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async listComputationJobs(c: Context) {
|
||||
try {
|
||||
const featureGroupId = c.req.query('featureGroupId');
|
||||
const status = c.req.query('status');
|
||||
|
||||
const jobs = await this.computationService.listComputationJobs({
|
||||
featureGroupId,
|
||||
status: status as any
|
||||
});
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
data: jobs
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to list computation jobs', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async executeComputationJob(c: Context) {
|
||||
try {
|
||||
const jobId = c.req.param('id');
|
||||
|
||||
const result = await this.computationService.executeComputationJob(jobId);
|
||||
|
||||
this.logger.info('Computation job executed', { jobId, result });
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
data: result
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to execute computation job', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async scheduleComputationJob(c: Context) {
|
||||
try {
|
||||
const jobId = c.req.param('id');
|
||||
const { schedule } = await c.req.json();
|
||||
|
||||
await this.computationService.scheduleComputationJob(jobId, schedule);
|
||||
|
||||
this.logger.info('Computation job scheduled', { jobId, schedule });
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
message: 'Computation job scheduled successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to schedule computation job', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async unscheduleComputationJob(c: Context) {
|
||||
try {
|
||||
const jobId = c.req.param('id');
|
||||
|
||||
await this.computationService.unscheduleComputationJob(jobId);
|
||||
|
||||
this.logger.info('Computation job unscheduled', { jobId });
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
message: 'Computation job unscheduled successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to unschedule computation job', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async getComputationJobHistory(c: Context) {
|
||||
try {
|
||||
const jobId = c.req.param('id');
|
||||
const limit = parseInt(c.req.query('limit') || '10');
|
||||
const offset = parseInt(c.req.query('offset') || '0');
|
||||
|
||||
const history = await this.computationService.getComputationJobHistory(jobId, limit, offset);
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
data: history
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to get computation job history', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,226 @@
|
|||
import { Context } from 'hono';
|
||||
import { FeatureStoreService } from '../services/FeatureStoreService';
|
||||
import { Logger } from '@stock-bot/utils';
|
||||
import {
|
||||
FeatureGroup,
|
||||
CreateFeatureGroupRequest,
|
||||
UpdateFeatureGroupRequest,
|
||||
FeatureValue,
|
||||
GetFeaturesRequest
|
||||
} from '../types/FeatureStore';
|
||||
|
||||
export class FeatureController {
|
||||
constructor(
|
||||
private featureStoreService: FeatureStoreService,
|
||||
private logger: Logger
|
||||
) {}
|
||||
|
||||
async createFeatureGroup(c: Context) {
|
||||
try {
|
||||
const request: CreateFeatureGroupRequest = await c.req.json();
|
||||
|
||||
const featureGroup = await this.featureStoreService.createFeatureGroup(request);
|
||||
|
||||
this.logger.info('Feature group created', { featureGroupId: featureGroup.id });
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
data: featureGroup
|
||||
}, 201);
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to create feature group', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async getFeatureGroup(c: Context) {
|
||||
try {
|
||||
const featureGroupId = c.req.param('id');
|
||||
|
||||
const featureGroup = await this.featureStoreService.getFeatureGroup(featureGroupId);
|
||||
|
||||
if (!featureGroup) {
|
||||
return c.json({
|
||||
success: false,
|
||||
error: 'Feature group not found'
|
||||
}, 404);
|
||||
}
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
data: featureGroup
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to get feature group', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async updateFeatureGroup(c: Context) {
|
||||
try {
|
||||
const featureGroupId = c.req.param('id');
|
||||
const request: UpdateFeatureGroupRequest = await c.req.json();
|
||||
|
||||
const featureGroup = await this.featureStoreService.updateFeatureGroup(featureGroupId, request);
|
||||
|
||||
if (!featureGroup) {
|
||||
return c.json({
|
||||
success: false,
|
||||
error: 'Feature group not found'
|
||||
}, 404);
|
||||
}
|
||||
|
||||
this.logger.info('Feature group updated', { featureGroupId });
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
data: featureGroup
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to update feature group', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async deleteFeatureGroup(c: Context) {
|
||||
try {
|
||||
const featureGroupId = c.req.param('id');
|
||||
|
||||
await this.featureStoreService.deleteFeatureGroup(featureGroupId);
|
||||
|
||||
this.logger.info('Feature group deleted', { featureGroupId });
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
message: 'Feature group deleted successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to delete feature group', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async listFeatureGroups(c: Context) {
|
||||
try {
|
||||
const featureGroups = await this.featureStoreService.listFeatureGroups();
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
data: featureGroups
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to list feature groups', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async getFeatures(c: Context) {
|
||||
try {
|
||||
const featureGroupId = c.req.param('id');
|
||||
const entityId = c.req.query('entityId');
|
||||
const timestamp = c.req.query('timestamp');
|
||||
|
||||
if (!entityId) {
|
||||
return c.json({
|
||||
success: false,
|
||||
error: 'entityId query parameter is required'
|
||||
}, 400);
|
||||
}
|
||||
|
||||
const request: GetFeaturesRequest = {
|
||||
featureGroupId,
|
||||
entityId,
|
||||
timestamp: timestamp ? new Date(timestamp) : undefined
|
||||
};
|
||||
|
||||
const features = await this.featureStoreService.getFeatures(request);
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
data: features
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to get features', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async storeFeatures(c: Context) {
|
||||
try {
|
||||
const featureGroupId = c.req.param('id');
|
||||
const features: FeatureValue[] = await c.req.json();
|
||||
|
||||
await this.featureStoreService.storeFeatures(featureGroupId, features);
|
||||
|
||||
this.logger.info('Features stored', {
|
||||
featureGroupId,
|
||||
featureCount: features.length
|
||||
});
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
message: 'Features stored successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to store features', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async getFeatureHistory(c: Context) {
|
||||
try {
|
||||
const featureGroupId = c.req.param('id');
|
||||
const featureName = c.req.param('featureName');
|
||||
const entityId = c.req.query('entityId');
|
||||
const startTime = c.req.query('startTime');
|
||||
const endTime = c.req.query('endTime');
|
||||
|
||||
if (!entityId) {
|
||||
return c.json({
|
||||
success: false,
|
||||
error: 'entityId query parameter is required'
|
||||
}, 400);
|
||||
}
|
||||
|
||||
const history = await this.featureStoreService.getFeatureHistory(
|
||||
featureGroupId,
|
||||
featureName,
|
||||
entityId,
|
||||
startTime ? new Date(startTime) : undefined,
|
||||
endTime ? new Date(endTime) : undefined
|
||||
);
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
data: history
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to get feature history', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,166 @@
|
|||
import { Context } from 'hono';
|
||||
import { Logger } from '@stock-bot/utils';
|
||||
|
||||
export class HealthController {
|
||||
constructor(private logger: Logger) {}
|
||||
|
||||
async getHealth(c: Context) {
|
||||
try {
|
||||
const health = {
|
||||
status: 'healthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
service: 'feature-store',
|
||||
version: '1.0.0',
|
||||
uptime: process.uptime(),
|
||||
memory: {
|
||||
used: Math.round((process.memoryUsage().heapUsed / 1024 / 1024) * 100) / 100,
|
||||
total: Math.round((process.memoryUsage().heapTotal / 1024 / 1024) * 100) / 100
|
||||
},
|
||||
dependencies: {
|
||||
redis: await this.checkRedisHealth(),
|
||||
database: await this.checkDatabaseHealth(),
|
||||
eventBus: await this.checkEventBusHealth()
|
||||
}
|
||||
};
|
||||
|
||||
return c.json(health);
|
||||
} catch (error) {
|
||||
this.logger.error('Health check failed', { error });
|
||||
return c.json({
|
||||
status: 'unhealthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
service: 'feature-store',
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async getReadiness(c: Context) {
|
||||
try {
|
||||
const readiness = {
|
||||
status: 'ready',
|
||||
timestamp: new Date().toISOString(),
|
||||
service: 'feature-store',
|
||||
checks: {
|
||||
onlineStore: await this.checkOnlineStoreReadiness(),
|
||||
offlineStore: await this.checkOfflineStoreReadiness(),
|
||||
metadataStore: await this.checkMetadataStoreReadiness(),
|
||||
computationEngine: await this.checkComputationEngineReadiness()
|
||||
}
|
||||
};
|
||||
|
||||
const isReady = Object.values(readiness.checks).every(check => check.status === 'ready');
|
||||
|
||||
return c.json(readiness, isReady ? 200 : 503);
|
||||
} catch (error) {
|
||||
this.logger.error('Readiness check failed', { error });
|
||||
return c.json({
|
||||
status: 'not_ready',
|
||||
timestamp: new Date().toISOString(),
|
||||
service: 'feature-store',
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 503);
|
||||
}
|
||||
}
|
||||
|
||||
async getLiveness(c: Context) {
|
||||
try {
|
||||
const liveness = {
|
||||
status: 'alive',
|
||||
timestamp: new Date().toISOString(),
|
||||
service: 'feature-store',
|
||||
pid: process.pid,
|
||||
uptime: process.uptime()
|
||||
};
|
||||
|
||||
return c.json(liveness);
|
||||
} catch (error) {
|
||||
this.logger.error('Liveness check failed', { error });
|
||||
return c.json({
|
||||
status: 'dead',
|
||||
timestamp: new Date().toISOString(),
|
||||
service: 'feature-store',
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
private async checkRedisHealth(): Promise<{ status: string; latency?: number }> {
|
||||
try {
|
||||
const start = Date.now();
|
||||
// TODO: Implement actual Redis health check
|
||||
const latency = Date.now() - start;
|
||||
return { status: 'healthy', latency };
|
||||
} catch (error) {
|
||||
return { status: 'unhealthy' };
|
||||
}
|
||||
}
|
||||
|
||||
private async checkDatabaseHealth(): Promise<{ status: string; latency?: number }> {
|
||||
try {
|
||||
const start = Date.now();
|
||||
// TODO: Implement actual database health check
|
||||
const latency = Date.now() - start;
|
||||
return { status: 'healthy', latency };
|
||||
} catch (error) {
|
||||
return { status: 'unhealthy' };
|
||||
}
|
||||
}
|
||||
|
||||
private async checkEventBusHealth(): Promise<{ status: string }> {
|
||||
try {
|
||||
// TODO: Implement actual event bus health check
|
||||
return { status: 'healthy' };
|
||||
} catch (error) {
|
||||
return { status: 'unhealthy' };
|
||||
}
|
||||
}
|
||||
|
||||
private async checkOnlineStoreReadiness(): Promise<{ status: string; message?: string }> {
|
||||
try {
|
||||
// TODO: Implement actual online store readiness check
|
||||
return { status: 'ready' };
|
||||
} catch (error) {
|
||||
return {
|
||||
status: 'not_ready',
|
||||
message: error instanceof Error ? error.message : 'Unknown error'
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private async checkOfflineStoreReadiness(): Promise<{ status: string; message?: string }> {
|
||||
try {
|
||||
// TODO: Implement actual offline store readiness check
|
||||
return { status: 'ready' };
|
||||
} catch (error) {
|
||||
return {
|
||||
status: 'not_ready',
|
||||
message: error instanceof Error ? error.message : 'Unknown error'
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private async checkMetadataStoreReadiness(): Promise<{ status: string; message?: string }> {
|
||||
try {
|
||||
// TODO: Implement actual metadata store readiness check
|
||||
return { status: 'ready' };
|
||||
} catch (error) {
|
||||
return {
|
||||
status: 'not_ready',
|
||||
message: error instanceof Error ? error.message : 'Unknown error'
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private async checkComputationEngineReadiness(): Promise<{ status: string; message?: string }> {
|
||||
try {
|
||||
// TODO: Implement actual computation engine readiness check
|
||||
return { status: 'ready' };
|
||||
} catch (error) {
|
||||
return {
|
||||
status: 'not_ready',
|
||||
message: error instanceof Error ? error.message : 'Unknown error'
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,123 @@
|
|||
import { Context } from 'hono';
|
||||
import { FeatureMonitoringService } from '../services/FeatureMonitoringService';
|
||||
import { Logger } from '@stock-bot/utils';
|
||||
import {
|
||||
FeatureMonitoringConfig,
|
||||
FeatureValue
|
||||
} from '../types/FeatureStore';
|
||||
|
||||
export class MonitoringController {
|
||||
constructor(
|
||||
private monitoringService: FeatureMonitoringService,
|
||||
private logger: Logger
|
||||
) {}
|
||||
|
||||
async startMonitoring(c: Context) {
|
||||
try {
|
||||
const featureGroupId = c.req.param('id');
|
||||
const config: FeatureMonitoringConfig = await c.req.json();
|
||||
|
||||
await this.monitoringService.startMonitoring(featureGroupId, config);
|
||||
|
||||
this.logger.info('Monitoring started', { featureGroupId });
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
message: 'Monitoring started successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to start monitoring', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async stopMonitoring(c: Context) {
|
||||
try {
|
||||
const featureGroupId = c.req.param('id');
|
||||
|
||||
await this.monitoringService.stopMonitoring(featureGroupId);
|
||||
|
||||
this.logger.info('Monitoring stopped', { featureGroupId });
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
message: 'Monitoring stopped successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to stop monitoring', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async detectDrift(c: Context) {
|
||||
try {
|
||||
const featureGroupId = c.req.param('id');
|
||||
const recentData: FeatureValue[] = await c.req.json();
|
||||
|
||||
const alerts = await this.monitoringService.detectDrift(featureGroupId, recentData);
|
||||
|
||||
this.logger.info('Drift detection completed', {
|
||||
featureGroupId,
|
||||
alertsCount: alerts.length
|
||||
});
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
data: alerts
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to detect drift', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async getMonitoringMetrics(c: Context) {
|
||||
try {
|
||||
const featureGroupId = c.req.param('id');
|
||||
|
||||
const metrics = await this.monitoringService.getMonitoringMetrics(featureGroupId);
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
data: metrics
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to get monitoring metrics', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async updateMonitoringConfig(c: Context) {
|
||||
try {
|
||||
const featureGroupId = c.req.param('id');
|
||||
const config: FeatureMonitoringConfig = await c.req.json();
|
||||
|
||||
await this.monitoringService.updateMonitoringConfig(featureGroupId, config);
|
||||
|
||||
this.logger.info('Monitoring config updated', { featureGroupId });
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
message: 'Monitoring configuration updated successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to update monitoring config', { error });
|
||||
return c.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
}
|
||||
41
apps/data-services/feature-store/src/index.ts
Normal file
41
apps/data-services/feature-store/src/index.ts
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
import { Hono } from 'hono';
|
||||
import { cors } from 'hono/cors';
|
||||
import { logger } from 'hono/logger';
|
||||
|
||||
// Controllers
|
||||
import { HealthController } from './controllers/HealthController';
|
||||
|
||||
const app = new Hono();
|
||||
|
||||
// Middleware
|
||||
app.use('*', cors());
|
||||
app.use('*', logger());
|
||||
|
||||
// Initialize logger for services
|
||||
const appLogger = { info: console.log, error: console.error, warn: console.warn, debug: console.log };
|
||||
|
||||
// Controllers
|
||||
const healthController = new HealthController(appLogger);
|
||||
|
||||
// Health endpoints
|
||||
app.get('/health', healthController.getHealth.bind(healthController));
|
||||
app.get('/health/readiness', healthController.getReadiness.bind(healthController));
|
||||
app.get('/health/liveness', healthController.getLiveness.bind(healthController));
|
||||
|
||||
// API endpoints will be implemented as services are completed
|
||||
app.get('/api/v1/feature-groups', async (c) => {
|
||||
return c.json({ message: 'Feature groups endpoint - not implemented yet' });
|
||||
});
|
||||
|
||||
app.post('/api/v1/feature-groups', async (c) => {
|
||||
return c.json({ message: 'Create feature group endpoint - not implemented yet' });
|
||||
});
|
||||
|
||||
const port = process.env.PORT || 3003;
|
||||
|
||||
console.log(`Feature Store service running on port ${port}`);
|
||||
|
||||
export default {
|
||||
port,
|
||||
fetch: app.fetch,
|
||||
};
|
||||
|
|
@ -0,0 +1,167 @@
|
|||
import { logger } from '@stock-bot/utils';
|
||||
import {
|
||||
FeatureComputation,
|
||||
ComputationStatus,
|
||||
ComputationError
|
||||
} from '../types/FeatureStore';
|
||||
|
||||
export class FeatureComputationService {
|
||||
private computations: Map<string, FeatureComputation> = new Map();
|
||||
private runningComputations: Set<string> = new Set();
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
logger.info('🔄 Initializing Feature Computation Service...');
|
||||
|
||||
this.computations.clear();
|
||||
this.runningComputations.clear();
|
||||
|
||||
logger.info('✅ Feature Computation Service initialized');
|
||||
}
|
||||
|
||||
async startComputation(
|
||||
featureGroupId: string,
|
||||
parameters: Record<string, any>
|
||||
): Promise<FeatureComputation> {
|
||||
const computation: FeatureComputation = {
|
||||
id: this.generateComputationId(),
|
||||
featureGroupId,
|
||||
status: ComputationStatus.PENDING,
|
||||
startTime: new Date(),
|
||||
recordsProcessed: 0,
|
||||
recordsGenerated: 0,
|
||||
errors: [],
|
||||
metadata: parameters,
|
||||
};
|
||||
|
||||
this.computations.set(computation.id, computation);
|
||||
|
||||
// Start computation asynchronously
|
||||
this.executeComputation(computation);
|
||||
|
||||
logger.info(`⚙️ Started feature computation: ${computation.id} for group: ${featureGroupId}`);
|
||||
return computation;
|
||||
}
|
||||
|
||||
async getComputation(id: string): Promise<FeatureComputation | null> {
|
||||
return this.computations.get(id) || null;
|
||||
}
|
||||
|
||||
async listComputations(featureGroupId?: string): Promise<FeatureComputation[]> {
|
||||
const computations = Array.from(this.computations.values());
|
||||
return featureGroupId ?
|
||||
computations.filter(c => c.featureGroupId === featureGroupId) :
|
||||
computations;
|
||||
}
|
||||
|
||||
async cancelComputation(id: string): Promise<boolean> {
|
||||
const computation = this.computations.get(id);
|
||||
if (!computation) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (computation.status === ComputationStatus.RUNNING) {
|
||||
computation.status = ComputationStatus.CANCELLED;
|
||||
computation.endTime = new Date();
|
||||
this.runningComputations.delete(id);
|
||||
|
||||
logger.info(`❌ Cancelled computation: ${id}`);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private async executeComputation(computation: FeatureComputation): Promise<void> {
|
||||
try {
|
||||
computation.status = ComputationStatus.RUNNING;
|
||||
this.runningComputations.add(computation.id);
|
||||
|
||||
logger.info(`⚙️ Executing computation: ${computation.id}`);
|
||||
|
||||
// Simulate computation work
|
||||
const totalRecords = 1000; // Mock value
|
||||
const batchSize = 100;
|
||||
|
||||
for (let processed = 0; processed < totalRecords; processed += batchSize) {
|
||||
// Check if computation was cancelled
|
||||
if (computation.status === ComputationStatus.CANCELLED) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Simulate processing time
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
const currentBatch = Math.min(batchSize, totalRecords - processed);
|
||||
computation.recordsProcessed += currentBatch;
|
||||
computation.recordsGenerated += currentBatch; // Assume 1:1 for simplicity
|
||||
|
||||
// Simulate some errors
|
||||
if (Math.random() < 0.05) { // 5% error rate
|
||||
const error: ComputationError = {
|
||||
entityId: `entity_${processed}`,
|
||||
error: 'Simulated processing error',
|
||||
timestamp: new Date(),
|
||||
};
|
||||
computation.errors.push(error);
|
||||
}
|
||||
}
|
||||
|
||||
computation.status = ComputationStatus.COMPLETED;
|
||||
computation.endTime = new Date();
|
||||
this.runningComputations.delete(computation.id);
|
||||
|
||||
logger.info(`✅ Completed computation: ${computation.id}`);
|
||||
|
||||
} catch (error) {
|
||||
computation.status = ComputationStatus.FAILED;
|
||||
computation.endTime = new Date();
|
||||
this.runningComputations.delete(computation.id);
|
||||
|
||||
const computationError: ComputationError = {
|
||||
entityId: 'unknown',
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
timestamp: new Date(),
|
||||
};
|
||||
computation.errors.push(computationError);
|
||||
|
||||
logger.error(`❌ Computation failed: ${computation.id}`, error);
|
||||
}
|
||||
}
|
||||
|
||||
private generateComputationId(): string {
|
||||
return `comp_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
||||
}
|
||||
|
||||
async getComputationStats(): Promise<any> {
|
||||
const computations = Array.from(this.computations.values());
|
||||
|
||||
return {
|
||||
total: computations.length,
|
||||
running: this.runningComputations.size,
|
||||
byStatus: {
|
||||
pending: computations.filter(c => c.status === ComputationStatus.PENDING).length,
|
||||
running: computations.filter(c => c.status === ComputationStatus.RUNNING).length,
|
||||
completed: computations.filter(c => c.status === ComputationStatus.COMPLETED).length,
|
||||
failed: computations.filter(c => c.status === ComputationStatus.FAILED).length,
|
||||
cancelled: computations.filter(c => c.status === ComputationStatus.CANCELLED).length,
|
||||
},
|
||||
totalRecordsProcessed: computations.reduce((sum, c) => sum + c.recordsProcessed, 0),
|
||||
totalRecordsGenerated: computations.reduce((sum, c) => sum + c.recordsGenerated, 0),
|
||||
totalErrors: computations.reduce((sum, c) => sum + c.errors.length, 0),
|
||||
};
|
||||
}
|
||||
|
||||
async shutdown(): Promise<void> {
|
||||
logger.info('🔄 Shutting down Feature Computation Service...');
|
||||
|
||||
// Cancel all running computations
|
||||
for (const computationId of this.runningComputations) {
|
||||
await this.cancelComputation(computationId);
|
||||
}
|
||||
|
||||
this.computations.clear();
|
||||
this.runningComputations.clear();
|
||||
|
||||
logger.info('✅ Feature Computation Service shutdown complete');
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,246 @@
|
|||
import { EventBus } from '@stock-bot/event-bus';
|
||||
import { Logger } from '@stock-bot/utils';
|
||||
import {
|
||||
FeatureGroup,
|
||||
FeatureDriftAlert,
|
||||
FeatureMonitoringConfig,
|
||||
FeatureMonitoringMetrics,
|
||||
FeatureValue,
|
||||
DriftDetectionMethod
|
||||
} from '../types/FeatureStore';
|
||||
|
||||
export interface FeatureMonitoringService {
|
||||
startMonitoring(featureGroupId: string, config: FeatureMonitoringConfig): Promise<void>;
|
||||
stopMonitoring(featureGroupId: string): Promise<void>;
|
||||
detectDrift(featureGroupId: string, recentData: FeatureValue[]): Promise<FeatureDriftAlert[]>;
|
||||
getMonitoringMetrics(featureGroupId: string): Promise<FeatureMonitoringMetrics>;
|
||||
updateMonitoringConfig(featureGroupId: string, config: FeatureMonitoringConfig): Promise<void>;
|
||||
}
|
||||
|
||||
export class FeatureMonitoringServiceImpl implements FeatureMonitoringService {
|
||||
private monitoringJobs: Map<string, NodeJS.Timeout> = new Map();
|
||||
private baselineStats: Map<string, any> = new Map();
|
||||
|
||||
constructor(
|
||||
private eventBus: EventBus,
|
||||
private logger: Logger
|
||||
) {}
|
||||
|
||||
async startMonitoring(featureGroupId: string, config: FeatureMonitoringConfig): Promise<void> {
|
||||
try {
|
||||
// Stop existing monitoring if running
|
||||
await this.stopMonitoring(featureGroupId);
|
||||
|
||||
// Start new monitoring job
|
||||
const interval = setInterval(async () => {
|
||||
await this.runMonitoringCheck(featureGroupId, config);
|
||||
}, config.checkInterval * 1000);
|
||||
|
||||
this.monitoringJobs.set(featureGroupId, interval);
|
||||
|
||||
this.logger.info(`Started monitoring for feature group: ${featureGroupId}`);
|
||||
|
||||
await this.eventBus.emit('feature.monitoring.started', {
|
||||
featureGroupId,
|
||||
config,
|
||||
timestamp: new Date()
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to start feature monitoring', { featureGroupId, error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async stopMonitoring(featureGroupId: string): Promise<void> {
|
||||
try {
|
||||
const job = this.monitoringJobs.get(featureGroupId);
|
||||
if (job) {
|
||||
clearInterval(job);
|
||||
this.monitoringJobs.delete(featureGroupId);
|
||||
|
||||
this.logger.info(`Stopped monitoring for feature group: ${featureGroupId}`);
|
||||
|
||||
await this.eventBus.emit('feature.monitoring.stopped', {
|
||||
featureGroupId,
|
||||
timestamp: new Date()
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to stop feature monitoring', { featureGroupId, error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async detectDrift(featureGroupId: string, recentData: FeatureValue[]): Promise<FeatureDriftAlert[]> {
|
||||
try {
|
||||
const alerts: FeatureDriftAlert[] = [];
|
||||
const baseline = this.baselineStats.get(featureGroupId);
|
||||
|
||||
if (!baseline) {
|
||||
// No baseline available, collect current data as baseline
|
||||
await this.updateBaseline(featureGroupId, recentData);
|
||||
return alerts;
|
||||
}
|
||||
|
||||
// Group data by feature name
|
||||
const featureData = this.groupByFeature(recentData);
|
||||
|
||||
for (const [featureName, values] of featureData) {
|
||||
const currentStats = this.calculateStatistics(values);
|
||||
const baselineFeatureStats = baseline[featureName];
|
||||
|
||||
if (!baselineFeatureStats) continue;
|
||||
|
||||
// Detect drift using various methods
|
||||
const driftScore = await this.calculateDriftScore(
|
||||
baselineFeatureStats,
|
||||
currentStats,
|
||||
DriftDetectionMethod.KOLMOGOROV_SMIRNOV
|
||||
);
|
||||
|
||||
if (driftScore > 0.1) { // Threshold for drift detection
|
||||
alerts.push({
|
||||
id: `drift_${featureGroupId}_${featureName}_${Date.now()}`,
|
||||
featureGroupId,
|
||||
featureName,
|
||||
driftScore,
|
||||
severity: driftScore > 0.3 ? 'high' : driftScore > 0.2 ? 'medium' : 'low',
|
||||
detectionMethod: DriftDetectionMethod.KOLMOGOROV_SMIRNOV,
|
||||
baselineStats: baselineFeatureStats,
|
||||
currentStats,
|
||||
detectedAt: new Date(),
|
||||
message: `Feature drift detected for ${featureName} with score ${driftScore.toFixed(3)}`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (alerts.length > 0) {
|
||||
await this.eventBus.emit('feature.drift.detected', {
|
||||
featureGroupId,
|
||||
alerts,
|
||||
timestamp: new Date()
|
||||
});
|
||||
}
|
||||
|
||||
return alerts;
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to detect drift', { featureGroupId, error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async getMonitoringMetrics(featureGroupId: string): Promise<FeatureMonitoringMetrics> {
|
||||
try {
|
||||
const isActive = this.monitoringJobs.has(featureGroupId);
|
||||
const baseline = this.baselineStats.get(featureGroupId);
|
||||
|
||||
return {
|
||||
featureGroupId,
|
||||
isActive,
|
||||
lastCheckTime: new Date(),
|
||||
totalChecks: 0, // Would be stored in persistent storage
|
||||
driftAlertsCount: 0, // Would be queried from alert storage
|
||||
averageDriftScore: 0,
|
||||
featuresMonitored: baseline ? Object.keys(baseline).length : 0,
|
||||
uptime: isActive ? Date.now() : 0 // Would calculate actual uptime
|
||||
};
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to get monitoring metrics', { featureGroupId, error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async updateMonitoringConfig(featureGroupId: string, config: FeatureMonitoringConfig): Promise<void> {
|
||||
try {
|
||||
// Restart monitoring with new config
|
||||
if (this.monitoringJobs.has(featureGroupId)) {
|
||||
await this.stopMonitoring(featureGroupId);
|
||||
await this.startMonitoring(featureGroupId, config);
|
||||
}
|
||||
|
||||
this.logger.info(`Updated monitoring config for feature group: ${featureGroupId}`);
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to update monitoring config', { featureGroupId, error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private async runMonitoringCheck(featureGroupId: string, config: FeatureMonitoringConfig): Promise<void> {
|
||||
try {
|
||||
// In a real implementation, this would fetch recent data from the feature store
|
||||
const recentData: FeatureValue[] = []; // Placeholder
|
||||
|
||||
await this.detectDrift(featureGroupId, recentData);
|
||||
} catch (error) {
|
||||
this.logger.error('Monitoring check failed', { featureGroupId, error });
|
||||
}
|
||||
}
|
||||
|
||||
private async updateBaseline(featureGroupId: string, data: FeatureValue[]): Promise<void> {
|
||||
const featureData = this.groupByFeature(data);
|
||||
const baseline: Record<string, any> = {};
|
||||
|
||||
for (const [featureName, values] of featureData) {
|
||||
baseline[featureName] = this.calculateStatistics(values);
|
||||
}
|
||||
|
||||
this.baselineStats.set(featureGroupId, baseline);
|
||||
}
|
||||
|
||||
private groupByFeature(data: FeatureValue[]): Map<string, number[]> {
|
||||
const grouped = new Map<string, number[]>();
|
||||
|
||||
for (const item of data) {
|
||||
if (!grouped.has(item.featureName)) {
|
||||
grouped.set(item.featureName, []);
|
||||
}
|
||||
grouped.get(item.featureName)!.push(item.value as number);
|
||||
}
|
||||
|
||||
return grouped;
|
||||
}
|
||||
|
||||
private calculateStatistics(values: number[]): any {
|
||||
const sorted = values.sort((a, b) => a - b);
|
||||
const n = values.length;
|
||||
const mean = values.reduce((sum, val) => sum + val, 0) / n;
|
||||
const variance = values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / n;
|
||||
const stdDev = Math.sqrt(variance);
|
||||
|
||||
return {
|
||||
count: n,
|
||||
mean,
|
||||
stdDev,
|
||||
min: sorted[0],
|
||||
max: sorted[n - 1],
|
||||
median: n % 2 === 0 ? (sorted[n/2 - 1] + sorted[n/2]) / 2 : sorted[Math.floor(n/2)],
|
||||
q25: sorted[Math.floor(n * 0.25)],
|
||||
q75: sorted[Math.floor(n * 0.75)]
|
||||
};
|
||||
}
|
||||
|
||||
private async calculateDriftScore(
|
||||
baseline: any,
|
||||
current: any,
|
||||
method: DriftDetectionMethod
|
||||
): Promise<number> {
|
||||
switch (method) {
|
||||
case DriftDetectionMethod.KOLMOGOROV_SMIRNOV:
|
||||
// Simplified KS test approximation
|
||||
return Math.abs(baseline.mean - current.mean) / (baseline.stdDev + current.stdDev + 1e-8);
|
||||
|
||||
case DriftDetectionMethod.POPULATION_STABILITY_INDEX:
|
||||
// Simplified PSI calculation
|
||||
const expectedRatio = baseline.mean / (baseline.mean + current.mean + 1e-8);
|
||||
const actualRatio = current.mean / (baseline.mean + current.mean + 1e-8);
|
||||
return Math.abs(expectedRatio - actualRatio);
|
||||
|
||||
case DriftDetectionMethod.JENSEN_SHANNON_DIVERGENCE:
|
||||
// Simplified JS divergence approximation
|
||||
return Math.min(1.0, Math.abs(baseline.mean - current.mean) / Math.max(baseline.stdDev, current.stdDev, 1e-8));
|
||||
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,195 @@
|
|||
import { logger } from '@stock-bot/utils';
|
||||
import { FeatureStatistics, HistogramBucket, ValueCount } from '../types/FeatureStore';
|
||||
|
||||
export class FeatureStatisticsService {
|
||||
private statistics: Map<string, FeatureStatistics> = new Map();
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
logger.info('🔄 Initializing Feature Statistics Service...');
|
||||
|
||||
this.statistics.clear();
|
||||
|
||||
logger.info('✅ Feature Statistics Service initialized');
|
||||
}
|
||||
|
||||
async computeStatistics(
|
||||
featureGroupId: string,
|
||||
featureName: string,
|
||||
data: any[]
|
||||
): Promise<FeatureStatistics> {
|
||||
const values = data.map(item => item[featureName]).filter(v => v !== null && v !== undefined);
|
||||
|
||||
const statistics: FeatureStatistics = {
|
||||
featureGroupId,
|
||||
featureName,
|
||||
statistics: {
|
||||
count: data.length,
|
||||
nullCount: data.length - values.length,
|
||||
distinctCount: new Set(values).size,
|
||||
},
|
||||
computedAt: new Date(),
|
||||
};
|
||||
|
||||
// Compute numerical statistics if applicable
|
||||
const numericalValues = values.filter(v => typeof v === 'number');
|
||||
if (numericalValues.length > 0) {
|
||||
const sorted = numericalValues.sort((a, b) => a - b);
|
||||
const sum = numericalValues.reduce((acc, val) => acc + val, 0);
|
||||
const mean = sum / numericalValues.length;
|
||||
|
||||
statistics.statistics.min = sorted[0];
|
||||
statistics.statistics.max = sorted[sorted.length - 1];
|
||||
statistics.statistics.mean = mean;
|
||||
statistics.statistics.median = this.calculateMedian(sorted);
|
||||
statistics.statistics.stdDev = this.calculateStandardDeviation(numericalValues, mean);
|
||||
statistics.statistics.percentiles = this.calculatePercentiles(sorted);
|
||||
statistics.statistics.histogram = this.calculateHistogram(numericalValues);
|
||||
}
|
||||
|
||||
// Compute top values for categorical data
|
||||
statistics.statistics.topValues = this.calculateTopValues(values);
|
||||
|
||||
const key = `${featureGroupId}.${featureName}`;
|
||||
this.statistics.set(key, statistics);
|
||||
|
||||
logger.info(`📊 Computed statistics for feature: ${featureGroupId}.${featureName}`);
|
||||
return statistics;
|
||||
}
|
||||
|
||||
async getStatistics(featureGroupId: string, featureName: string): Promise<FeatureStatistics | null> {
|
||||
const key = `${featureGroupId}.${featureName}`;
|
||||
return this.statistics.get(key) || null;
|
||||
}
|
||||
|
||||
async getFeatureGroupStatistics(featureGroupId: string): Promise<FeatureStatistics[]> {
|
||||
const groupStats: FeatureStatistics[] = [];
|
||||
|
||||
for (const [key, stats] of this.statistics.entries()) {
|
||||
if (stats.featureGroupId === featureGroupId) {
|
||||
groupStats.push(stats);
|
||||
}
|
||||
}
|
||||
|
||||
return groupStats;
|
||||
}
|
||||
|
||||
async getAllStatistics(): Promise<FeatureStatistics[]> {
|
||||
return Array.from(this.statistics.values());
|
||||
}
|
||||
|
||||
async deleteStatistics(featureGroupId: string, featureName?: string): Promise<void> {
|
||||
if (featureName) {
|
||||
const key = `${featureGroupId}.${featureName}`;
|
||||
this.statistics.delete(key);
|
||||
} else {
|
||||
// Delete all statistics for the feature group
|
||||
const keysToDelete: string[] = [];
|
||||
for (const [key, stats] of this.statistics.entries()) {
|
||||
if (stats.featureGroupId === featureGroupId) {
|
||||
keysToDelete.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
for (const key of keysToDelete) {
|
||||
this.statistics.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private calculateMedian(sortedValues: number[]): number {
|
||||
const length = sortedValues.length;
|
||||
if (length % 2 === 0) {
|
||||
return (sortedValues[length / 2 - 1] + sortedValues[length / 2]) / 2;
|
||||
} else {
|
||||
return sortedValues[Math.floor(length / 2)];
|
||||
}
|
||||
}
|
||||
|
||||
private calculateStandardDeviation(values: number[], mean: number): number {
|
||||
const squaredDifferences = values.map(value => Math.pow(value - mean, 2));
|
||||
const avgSquaredDiff = squaredDifferences.reduce((acc, val) => acc + val, 0) / values.length;
|
||||
return Math.sqrt(avgSquaredDiff);
|
||||
}
|
||||
|
||||
private calculatePercentiles(sortedValues: number[]): Record<string, number> {
|
||||
const percentiles = [5, 10, 25, 50, 75, 90, 95];
|
||||
const result: Record<string, number> = {};
|
||||
|
||||
for (const p of percentiles) {
|
||||
const index = (p / 100) * (sortedValues.length - 1);
|
||||
if (Number.isInteger(index)) {
|
||||
result[`p${p}`] = sortedValues[index];
|
||||
} else {
|
||||
const lower = Math.floor(index);
|
||||
const upper = Math.ceil(index);
|
||||
const weight = index - lower;
|
||||
result[`p${p}`] = sortedValues[lower] * (1 - weight) + sortedValues[upper] * weight;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private calculateHistogram(values: number[], buckets: number = 10): HistogramBucket[] {
|
||||
const min = Math.min(...values);
|
||||
const max = Math.max(...values);
|
||||
const bucketSize = (max - min) / buckets;
|
||||
|
||||
const histogram: HistogramBucket[] = [];
|
||||
|
||||
for (let i = 0; i < buckets; i++) {
|
||||
const bucketMin = min + i * bucketSize;
|
||||
const bucketMax = i === buckets - 1 ? max : min + (i + 1) * bucketSize;
|
||||
|
||||
const count = values.filter(v => v >= bucketMin && v < bucketMax).length;
|
||||
|
||||
histogram.push({
|
||||
min: bucketMin,
|
||||
max: bucketMax,
|
||||
count,
|
||||
});
|
||||
}
|
||||
|
||||
return histogram;
|
||||
}
|
||||
|
||||
private calculateTopValues(values: any[], limit: number = 10): ValueCount[] {
|
||||
const valueCounts = new Map<any, number>();
|
||||
|
||||
for (const value of values) {
|
||||
valueCounts.set(value, (valueCounts.get(value) || 0) + 1);
|
||||
}
|
||||
|
||||
const sortedCounts = Array.from(valueCounts.entries())
|
||||
.map(([value, count]) => ({
|
||||
value,
|
||||
count,
|
||||
percentage: (count / values.length) * 100,
|
||||
}))
|
||||
.sort((a, b) => b.count - a.count)
|
||||
.slice(0, limit);
|
||||
|
||||
return sortedCounts;
|
||||
}
|
||||
|
||||
async getStatisticsSummary(): Promise<any> {
|
||||
const allStats = Array.from(this.statistics.values());
|
||||
|
||||
return {
|
||||
totalFeatures: allStats.length,
|
||||
totalRecords: allStats.reduce((sum, s) => sum + s.statistics.count, 0),
|
||||
totalNullValues: allStats.reduce((sum, s) => sum + s.statistics.nullCount, 0),
|
||||
featureGroups: new Set(allStats.map(s => s.featureGroupId)).size,
|
||||
lastComputed: allStats.length > 0 ?
|
||||
Math.max(...allStats.map(s => s.computedAt.getTime())) : null,
|
||||
};
|
||||
}
|
||||
|
||||
async shutdown(): Promise<void> {
|
||||
logger.info('🔄 Shutting down Feature Statistics Service...');
|
||||
|
||||
this.statistics.clear();
|
||||
|
||||
logger.info('✅ Feature Statistics Service shutdown complete');
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,313 @@
|
|||
import { EventBus } from '@stock-bot/event-bus';
|
||||
import { logger } from '@stock-bot/utils';
|
||||
import {
|
||||
FeatureGroup,
|
||||
FeatureGroupStatus,
|
||||
FeatureVector,
|
||||
FeatureRequest,
|
||||
FeatureResponse,
|
||||
FeatureStorageConfig,
|
||||
FeatureRegistry
|
||||
} from '../types/FeatureStore';
|
||||
import { OnlineStore } from './storage/OnlineStore';
|
||||
import { OfflineStore } from './storage/OfflineStore';
|
||||
import { MetadataStore } from './storage/MetadataStore';
|
||||
|
||||
export class FeatureStoreService {
|
||||
private eventBus: EventBus;
|
||||
private onlineStore: OnlineStore;
|
||||
private offlineStore: OfflineStore;
|
||||
private metadataStore: MetadataStore;
|
||||
private registry: FeatureRegistry;
|
||||
|
||||
constructor() {
|
||||
this.eventBus = new EventBus();
|
||||
this.onlineStore = new OnlineStore();
|
||||
this.offlineStore = new OfflineStore();
|
||||
this.metadataStore = new MetadataStore();
|
||||
this.registry = {
|
||||
featureGroups: new Map(),
|
||||
features: new Map(),
|
||||
dependencies: new Map(),
|
||||
lineage: new Map()
|
||||
};
|
||||
}
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
logger.info('🔄 Initializing Feature Store Service...');
|
||||
|
||||
await this.eventBus.initialize();
|
||||
await this.onlineStore.initialize();
|
||||
await this.offlineStore.initialize();
|
||||
await this.metadataStore.initialize();
|
||||
|
||||
// Load existing feature groups from metadata store
|
||||
await this.loadFeatureGroups();
|
||||
|
||||
// Subscribe to feature events
|
||||
await this.eventBus.subscribe('feature.*', this.handleFeatureEvent.bind(this));
|
||||
|
||||
logger.info('✅ Feature Store Service initialized');
|
||||
}
|
||||
|
||||
async createFeatureGroup(featureGroup: Omit<FeatureGroup, 'id' | 'createdAt' | 'updatedAt'>): Promise<FeatureGroup> {
|
||||
const featureGroupWithId: FeatureGroup = {
|
||||
...featureGroup,
|
||||
id: this.generateFeatureGroupId(),
|
||||
status: FeatureGroupStatus.DRAFT,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
// Store in metadata store
|
||||
await this.metadataStore.saveFeatureGroup(featureGroupWithId);
|
||||
|
||||
// Update registry
|
||||
this.registry.featureGroups.set(featureGroupWithId.id, featureGroupWithId);
|
||||
|
||||
// Register individual features
|
||||
for (const feature of featureGroupWithId.features) {
|
||||
const featureKey = `${featureGroupWithId.id}.${feature.name}`;
|
||||
this.registry.features.set(featureKey, feature);
|
||||
}
|
||||
|
||||
await this.eventBus.publish('feature.group.created', {
|
||||
featureGroupId: featureGroupWithId.id,
|
||||
featureGroup: featureGroupWithId,
|
||||
});
|
||||
|
||||
logger.info(`📋 Created feature group: ${featureGroupWithId.name} (${featureGroupWithId.id})`);
|
||||
return featureGroupWithId;
|
||||
}
|
||||
|
||||
async updateFeatureGroup(id: string, updates: Partial<FeatureGroup>): Promise<FeatureGroup> {
|
||||
const existingGroup = this.registry.featureGroups.get(id);
|
||||
if (!existingGroup) {
|
||||
throw new Error(`Feature group not found: ${id}`);
|
||||
}
|
||||
|
||||
const updatedGroup: FeatureGroup = {
|
||||
...existingGroup,
|
||||
...updates,
|
||||
id, // Ensure ID doesn't change
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
// Store in metadata store
|
||||
await this.metadataStore.saveFeatureGroup(updatedGroup);
|
||||
|
||||
// Update registry
|
||||
this.registry.featureGroups.set(id, updatedGroup);
|
||||
|
||||
await this.eventBus.publish('feature.group.updated', {
|
||||
featureGroupId: id,
|
||||
featureGroup: updatedGroup,
|
||||
});
|
||||
|
||||
logger.info(`📝 Updated feature group: ${updatedGroup.name} (${id})`);
|
||||
return updatedGroup;
|
||||
}
|
||||
|
||||
async deleteFeatureGroup(id: string): Promise<void> {
|
||||
const featureGroup = this.registry.featureGroups.get(id);
|
||||
if (!featureGroup) {
|
||||
throw new Error(`Feature group not found: ${id}`);
|
||||
}
|
||||
|
||||
// Remove from stores
|
||||
await this.metadataStore.deleteFeatureGroup(id);
|
||||
await this.onlineStore.deleteFeatureGroup(id);
|
||||
await this.offlineStore.deleteFeatureGroup(id);
|
||||
|
||||
// Update registry
|
||||
this.registry.featureGroups.delete(id);
|
||||
|
||||
// Remove features from registry
|
||||
for (const feature of featureGroup.features) {
|
||||
const featureKey = `${id}.${feature.name}`;
|
||||
this.registry.features.delete(featureKey);
|
||||
}
|
||||
|
||||
await this.eventBus.publish('feature.group.deleted', {
|
||||
featureGroupId: id,
|
||||
featureGroup,
|
||||
});
|
||||
|
||||
logger.info(`🗑️ Deleted feature group: ${featureGroup.name} (${id})`);
|
||||
}
|
||||
|
||||
async getFeatureGroup(id: string): Promise<FeatureGroup | null> {
|
||||
return this.registry.featureGroups.get(id) || null;
|
||||
}
|
||||
|
||||
async listFeatureGroups(status?: FeatureGroupStatus): Promise<FeatureGroup[]> {
|
||||
const groups = Array.from(this.registry.featureGroups.values());
|
||||
return status ? groups.filter(group => group.status === status) : groups;
|
||||
}
|
||||
|
||||
async getOnlineFeatures(request: FeatureRequest): Promise<FeatureResponse[]> {
|
||||
logger.info(`🔍 Getting online features for ${request.entityIds.length} entities`);
|
||||
|
||||
const responses: FeatureResponse[] = [];
|
||||
|
||||
for (const entityId of request.entityIds) {
|
||||
const features: Record<string, any> = {};
|
||||
const metadata: Record<string, any> = {};
|
||||
|
||||
for (const featureGroupId of request.featureGroups) {
|
||||
const featureGroup = this.registry.featureGroups.get(featureGroupId);
|
||||
if (!featureGroup) {
|
||||
logger.warn(`Feature group not found: ${featureGroupId}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const featureVector = await this.onlineStore.getFeatures(
|
||||
entityId,
|
||||
request.entityType,
|
||||
featureGroupId,
|
||||
request.asOfTime
|
||||
);
|
||||
|
||||
if (featureVector) {
|
||||
Object.assign(features, featureVector.values);
|
||||
if (request.includeMetadata) {
|
||||
metadata[featureGroupId] = featureVector.metadata;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
responses.push({
|
||||
entityId,
|
||||
entityType: request.entityType,
|
||||
features,
|
||||
metadata,
|
||||
timestamp: request.asOfTime || new Date(),
|
||||
});
|
||||
}
|
||||
|
||||
return responses;
|
||||
}
|
||||
|
||||
async getHistoricalFeatures(request: FeatureRequest): Promise<FeatureResponse[]> {
|
||||
logger.info(`📊 Getting historical features for ${request.entityIds.length} entities`);
|
||||
|
||||
return await this.offlineStore.getHistoricalFeatures(request);
|
||||
}
|
||||
|
||||
async getBatchFeatures(request: FeatureRequest): Promise<FeatureResponse[]> {
|
||||
logger.info(`📦 Getting batch features for ${request.entityIds.length} entities`);
|
||||
|
||||
// For batch requests, use offline store for efficiency
|
||||
return await this.offlineStore.getBatchFeatures(request);
|
||||
}
|
||||
|
||||
async ingestFeatures(featureVectors: FeatureVector[]): Promise<void> {
|
||||
logger.info(`📥 Ingesting ${featureVectors.length} feature vectors`);
|
||||
|
||||
// Store in both online and offline stores
|
||||
await Promise.all([
|
||||
this.onlineStore.writeFeatures(featureVectors),
|
||||
this.offlineStore.writeFeatures(featureVectors)
|
||||
]);
|
||||
|
||||
await this.eventBus.publish('feature.ingested', {
|
||||
vectorCount: featureVectors.length,
|
||||
timestamp: new Date(),
|
||||
});
|
||||
}
|
||||
|
||||
async searchFeatures(query: string, filters?: Record<string, any>): Promise<any[]> {
|
||||
const results: any[] = [];
|
||||
|
||||
for (const [groupId, group] of this.registry.featureGroups) {
|
||||
for (const feature of group.features) {
|
||||
const featureInfo = {
|
||||
featureGroupId: groupId,
|
||||
featureGroupName: group.name,
|
||||
featureName: feature.name,
|
||||
description: feature.description,
|
||||
type: feature.type,
|
||||
valueType: feature.valueType,
|
||||
tags: feature.tags,
|
||||
};
|
||||
|
||||
// Simple text search
|
||||
const searchText = `${group.name} ${feature.name} ${feature.description || ''} ${feature.tags.join(' ')}`.toLowerCase();
|
||||
if (searchText.includes(query.toLowerCase())) {
|
||||
// Apply filters if provided
|
||||
if (filters) {
|
||||
let matches = true;
|
||||
for (const [key, value] of Object.entries(filters)) {
|
||||
if (featureInfo[key as keyof typeof featureInfo] !== value) {
|
||||
matches = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (matches) {
|
||||
results.push(featureInfo);
|
||||
}
|
||||
} else {
|
||||
results.push(featureInfo);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
async getFeatureLineage(featureGroupId: string, featureName: string): Promise<any> {
|
||||
const lineageKey = `${featureGroupId}.${featureName}`;
|
||||
return this.registry.lineage.get(lineageKey) || null;
|
||||
}
|
||||
|
||||
async getFeatureUsage(featureGroupId: string, featureName: string): Promise<any> {
|
||||
// In a real implementation, this would track feature usage across models and applications
|
||||
return {
|
||||
featureGroupId,
|
||||
featureName,
|
||||
usageCount: 0,
|
||||
lastUsed: null,
|
||||
consumers: [],
|
||||
models: []
|
||||
};
|
||||
}
|
||||
|
||||
private async loadFeatureGroups(): Promise<void> {
|
||||
logger.info('📂 Loading existing feature groups...');
|
||||
|
||||
const featureGroups = await this.metadataStore.getAllFeatureGroups();
|
||||
|
||||
for (const group of featureGroups) {
|
||||
this.registry.featureGroups.set(group.id, group);
|
||||
|
||||
// Register individual features
|
||||
for (const feature of group.features) {
|
||||
const featureKey = `${group.id}.${feature.name}`;
|
||||
this.registry.features.set(featureKey, feature);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`📂 Loaded ${featureGroups.length} feature groups`);
|
||||
}
|
||||
|
||||
private async handleFeatureEvent(event: any): Promise<void> {
|
||||
logger.debug('📨 Received feature event:', event);
|
||||
// Handle feature-level events
|
||||
}
|
||||
|
||||
private generateFeatureGroupId(): string {
|
||||
return `fg_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
||||
}
|
||||
|
||||
async shutdown(): Promise<void> {
|
||||
logger.info('🔄 Shutting down Feature Store Service...');
|
||||
|
||||
await this.onlineStore.shutdown();
|
||||
await this.offlineStore.shutdown();
|
||||
await this.metadataStore.shutdown();
|
||||
await this.eventBus.disconnect();
|
||||
|
||||
logger.info('✅ Feature Store Service shutdown complete');
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
import { logger } from '@stock-bot/utils';
|
||||
import { FeatureGroup } from '../../types/FeatureStore';
|
||||
|
||||
export class MetadataStore {
|
||||
private featureGroups: Map<string, FeatureGroup> = new Map();
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
logger.info('🔄 Initializing Metadata Store...');
|
||||
|
||||
// In a real implementation, connect to PostgreSQL or other metadata store
|
||||
this.featureGroups.clear();
|
||||
|
||||
logger.info('✅ Metadata Store initialized');
|
||||
}
|
||||
|
||||
async saveFeatureGroup(featureGroup: FeatureGroup): Promise<void> {
|
||||
this.featureGroups.set(featureGroup.id, { ...featureGroup });
|
||||
logger.debug(`💾 Saved feature group metadata: ${featureGroup.id}`);
|
||||
}
|
||||
|
||||
async getFeatureGroup(id: string): Promise<FeatureGroup | null> {
|
||||
return this.featureGroups.get(id) || null;
|
||||
}
|
||||
|
||||
async getAllFeatureGroups(): Promise<FeatureGroup[]> {
|
||||
return Array.from(this.featureGroups.values());
|
||||
}
|
||||
|
||||
async deleteFeatureGroup(id: string): Promise<void> {
|
||||
this.featureGroups.delete(id);
|
||||
logger.debug(`🗑️ Deleted feature group metadata: ${id}`);
|
||||
}
|
||||
|
||||
async findFeatureGroups(criteria: Partial<FeatureGroup>): Promise<FeatureGroup[]> {
|
||||
const groups = Array.from(this.featureGroups.values());
|
||||
|
||||
return groups.filter(group => {
|
||||
for (const [key, value] of Object.entries(criteria)) {
|
||||
if (group[key as keyof FeatureGroup] !== value) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
async shutdown(): Promise<void> {
|
||||
logger.info('🔄 Shutting down Metadata Store...');
|
||||
this.featureGroups.clear();
|
||||
logger.info('✅ Metadata Store shutdown complete');
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,121 @@
|
|||
import { logger } from '@stock-bot/utils';
|
||||
import { FeatureVector, FeatureRequest, FeatureResponse } from '../../types/FeatureStore';
|
||||
|
||||
export class OfflineStore {
|
||||
private store: Map<string, FeatureVector[]> = new Map();
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
logger.info('🔄 Initializing Offline Store...');
|
||||
|
||||
// In a real implementation, connect to data warehouse, S3, etc.
|
||||
this.store.clear();
|
||||
|
||||
logger.info('✅ Offline Store initialized');
|
||||
}
|
||||
|
||||
async writeFeatures(featureVectors: FeatureVector[]): Promise<void> {
|
||||
for (const vector of featureVectors) {
|
||||
const partitionKey = this.buildPartitionKey(vector.entityType, vector.featureGroupId);
|
||||
|
||||
if (!this.store.has(partitionKey)) {
|
||||
this.store.set(partitionKey, []);
|
||||
}
|
||||
|
||||
this.store.get(partitionKey)!.push(vector);
|
||||
}
|
||||
|
||||
logger.debug(`💾 Stored ${featureVectors.length} feature vectors in offline store`);
|
||||
}
|
||||
|
||||
async getHistoricalFeatures(request: FeatureRequest): Promise<FeatureResponse[]> {
|
||||
const responses: FeatureResponse[] = [];
|
||||
|
||||
for (const entityId of request.entityIds) {
|
||||
const features: Record<string, any> = {};
|
||||
const metadata: Record<string, any> = {};
|
||||
|
||||
for (const featureGroupId of request.featureGroups) {
|
||||
const partitionKey = this.buildPartitionKey(request.entityType, featureGroupId);
|
||||
const vectors = this.store.get(partitionKey) || [];
|
||||
|
||||
// Find the most recent vector for this entity before asOfTime
|
||||
const relevantVectors = vectors
|
||||
.filter(v => v.entityId === entityId)
|
||||
.filter(v => !request.asOfTime || v.timestamp <= request.asOfTime)
|
||||
.sort((a, b) => b.timestamp.getTime() - a.timestamp.getTime());
|
||||
|
||||
if (relevantVectors.length > 0) {
|
||||
const latestVector = relevantVectors[0];
|
||||
Object.assign(features, latestVector.values);
|
||||
|
||||
if (request.includeMetadata) {
|
||||
metadata[featureGroupId] = latestVector.metadata;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
responses.push({
|
||||
entityId,
|
||||
entityType: request.entityType,
|
||||
features,
|
||||
metadata,
|
||||
timestamp: request.asOfTime || new Date(),
|
||||
});
|
||||
}
|
||||
|
||||
return responses;
|
||||
}
|
||||
|
||||
async getBatchFeatures(request: FeatureRequest): Promise<FeatureResponse[]> {
|
||||
// For simplicity, use the same logic as historical features
|
||||
// In a real implementation, this would use optimized batch processing
|
||||
return await this.getHistoricalFeatures(request);
|
||||
}
|
||||
|
||||
async getFeatureData(
|
||||
featureGroupId: string,
|
||||
entityType: string,
|
||||
startTime?: Date,
|
||||
endTime?: Date
|
||||
): Promise<FeatureVector[]> {
|
||||
const partitionKey = this.buildPartitionKey(entityType, featureGroupId);
|
||||
let vectors = this.store.get(partitionKey) || [];
|
||||
|
||||
// Apply time filters
|
||||
if (startTime) {
|
||||
vectors = vectors.filter(v => v.timestamp >= startTime);
|
||||
}
|
||||
|
||||
if (endTime) {
|
||||
vectors = vectors.filter(v => v.timestamp <= endTime);
|
||||
}
|
||||
|
||||
return vectors;
|
||||
}
|
||||
|
||||
async deleteFeatureGroup(featureGroupId: string): Promise<void> {
|
||||
const keysToDelete: string[] = [];
|
||||
|
||||
for (const key of this.store.keys()) {
|
||||
if (key.includes(`:${featureGroupId}`)) {
|
||||
keysToDelete.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
for (const key of keysToDelete) {
|
||||
this.store.delete(key);
|
||||
}
|
||||
|
||||
logger.debug(`🗑️ Deleted ${keysToDelete.length} partitions for feature group: ${featureGroupId}`);
|
||||
}
|
||||
|
||||
private buildPartitionKey(entityType: string, featureGroupId: string): string {
|
||||
return `${entityType}:${featureGroupId}`;
|
||||
}
|
||||
|
||||
async shutdown(): Promise<void> {
|
||||
logger.info('🔄 Shutting down Offline Store...');
|
||||
this.store.clear();
|
||||
logger.info('✅ Offline Store shutdown complete');
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,75 @@
|
|||
import { logger } from '@stock-bot/utils';
|
||||
import { FeatureVector, FeatureRequest, FeatureResponse } from '../../types/FeatureStore';
|
||||
|
||||
export class OnlineStore {
|
||||
private store: Map<string, any> = new Map();
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
logger.info('🔄 Initializing Online Store...');
|
||||
|
||||
// In a real implementation, connect to Redis or other online store
|
||||
this.store.clear();
|
||||
|
||||
logger.info('✅ Online Store initialized');
|
||||
}
|
||||
|
||||
async writeFeatures(featureVectors: FeatureVector[]): Promise<void> {
|
||||
for (const vector of featureVectors) {
|
||||
const key = this.buildKey(vector.entityId, vector.entityType, vector.featureGroupId);
|
||||
|
||||
this.store.set(key, {
|
||||
...vector,
|
||||
timestamp: vector.timestamp,
|
||||
});
|
||||
}
|
||||
|
||||
logger.debug(`💾 Stored ${featureVectors.length} feature vectors in online store`);
|
||||
}
|
||||
|
||||
async getFeatures(
|
||||
entityId: string,
|
||||
entityType: string,
|
||||
featureGroupId: string,
|
||||
asOfTime?: Date
|
||||
): Promise<FeatureVector | null> {
|
||||
const key = this.buildKey(entityId, entityType, featureGroupId);
|
||||
const storedVector = this.store.get(key);
|
||||
|
||||
if (!storedVector) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// If asOfTime is specified, check if the stored vector is valid at that time
|
||||
if (asOfTime && storedVector.timestamp > asOfTime) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return storedVector;
|
||||
}
|
||||
|
||||
async deleteFeatureGroup(featureGroupId: string): Promise<void> {
|
||||
const keysToDelete: string[] = [];
|
||||
|
||||
for (const key of this.store.keys()) {
|
||||
if (key.includes(`:${featureGroupId}`)) {
|
||||
keysToDelete.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
for (const key of keysToDelete) {
|
||||
this.store.delete(key);
|
||||
}
|
||||
|
||||
logger.debug(`🗑️ Deleted ${keysToDelete.length} records for feature group: ${featureGroupId}`);
|
||||
}
|
||||
|
||||
private buildKey(entityId: string, entityType: string, featureGroupId: string): string {
|
||||
return `${entityType}:${entityId}:${featureGroupId}`;
|
||||
}
|
||||
|
||||
async shutdown(): Promise<void> {
|
||||
logger.info('🔄 Shutting down Online Store...');
|
||||
this.store.clear();
|
||||
logger.info('✅ Online Store shutdown complete');
|
||||
}
|
||||
}
|
||||
243
apps/data-services/feature-store/src/types/FeatureStore.ts
Normal file
243
apps/data-services/feature-store/src/types/FeatureStore.ts
Normal file
|
|
@ -0,0 +1,243 @@
|
|||
// Feature Store Types
|
||||
|
||||
export interface FeatureGroup {
|
||||
id: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
version: string;
|
||||
features: Feature[];
|
||||
source: FeatureSource;
|
||||
schedule?: FeatureSchedule;
|
||||
metadata: Record<string, any>;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
status: FeatureGroupStatus;
|
||||
}
|
||||
|
||||
export enum FeatureGroupStatus {
|
||||
DRAFT = 'draft',
|
||||
ACTIVE = 'active',
|
||||
DEPRECATED = 'deprecated',
|
||||
ARCHIVED = 'archived',
|
||||
}
|
||||
|
||||
export interface Feature {
|
||||
name: string;
|
||||
type: FeatureType;
|
||||
description?: string;
|
||||
valueType: 'number' | 'string' | 'boolean' | 'array' | 'object';
|
||||
nullable: boolean;
|
||||
defaultValue?: any;
|
||||
validation?: FeatureValidation;
|
||||
transformation?: FeatureTransformation;
|
||||
tags: string[];
|
||||
}
|
||||
|
||||
export enum FeatureType {
|
||||
NUMERICAL = 'numerical',
|
||||
CATEGORICAL = 'categorical',
|
||||
BOOLEAN = 'boolean',
|
||||
TEXT = 'text',
|
||||
TIMESTAMP = 'timestamp',
|
||||
DERIVED = 'derived',
|
||||
}
|
||||
|
||||
export interface FeatureSource {
|
||||
type: 'batch' | 'stream' | 'sql' | 'api' | 'file';
|
||||
connection: Record<string, any>;
|
||||
query?: string;
|
||||
transformation?: string;
|
||||
refreshInterval?: number;
|
||||
}
|
||||
|
||||
export interface FeatureSchedule {
|
||||
cronExpression: string;
|
||||
enabled: boolean;
|
||||
lastRun: Date | null;
|
||||
nextRun: Date | null;
|
||||
}
|
||||
|
||||
export interface FeatureValidation {
|
||||
required: boolean;
|
||||
minValue?: number;
|
||||
maxValue?: number;
|
||||
allowedValues?: any[];
|
||||
pattern?: string;
|
||||
customValidator?: string;
|
||||
}
|
||||
|
||||
export interface FeatureTransformation {
|
||||
type: 'normalize' | 'standardize' | 'encode' | 'custom';
|
||||
parameters: Record<string, any>;
|
||||
}
|
||||
|
||||
// Feature Value Types
|
||||
|
||||
export interface FeatureVector {
|
||||
entityId: string;
|
||||
entityType: string;
|
||||
featureGroupId: string;
|
||||
timestamp: Date;
|
||||
values: Record<string, any>;
|
||||
metadata?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface FeatureRequest {
|
||||
entityIds: string[];
|
||||
entityType: string;
|
||||
featureGroups: string[];
|
||||
asOfTime?: Date;
|
||||
pointInTime?: boolean;
|
||||
includeMetadata?: boolean;
|
||||
}
|
||||
|
||||
export interface FeatureResponse {
|
||||
entityId: string;
|
||||
entityType: string;
|
||||
features: Record<string, any>;
|
||||
metadata: Record<string, any>;
|
||||
timestamp: Date;
|
||||
}
|
||||
|
||||
// Feature Store Operations
|
||||
|
||||
export interface FeatureComputation {
|
||||
id: string;
|
||||
featureGroupId: string;
|
||||
status: ComputationStatus;
|
||||
startTime: Date;
|
||||
endTime?: Date;
|
||||
recordsProcessed: number;
|
||||
recordsGenerated: number;
|
||||
errors: ComputationError[];
|
||||
metadata: Record<string, any>;
|
||||
}
|
||||
|
||||
export enum ComputationStatus {
|
||||
PENDING = 'pending',
|
||||
RUNNING = 'running',
|
||||
COMPLETED = 'completed',
|
||||
FAILED = 'failed',
|
||||
CANCELLED = 'cancelled',
|
||||
}
|
||||
|
||||
export interface ComputationError {
|
||||
entityId: string;
|
||||
error: string;
|
||||
timestamp: Date;
|
||||
}
|
||||
|
||||
// Feature Statistics
|
||||
|
||||
export interface FeatureStatistics {
|
||||
featureGroupId: string;
|
||||
featureName: string;
|
||||
statistics: {
|
||||
count: number;
|
||||
nullCount: number;
|
||||
distinctCount: number;
|
||||
min?: number;
|
||||
max?: number;
|
||||
mean?: number;
|
||||
median?: number;
|
||||
stdDev?: number;
|
||||
percentiles?: Record<string, number>;
|
||||
histogram?: HistogramBucket[];
|
||||
topValues?: ValueCount[];
|
||||
};
|
||||
computedAt: Date;
|
||||
}
|
||||
|
||||
export interface HistogramBucket {
|
||||
min: number;
|
||||
max: number;
|
||||
count: number;
|
||||
}
|
||||
|
||||
export interface ValueCount {
|
||||
value: any;
|
||||
count: number;
|
||||
percentage: number;
|
||||
}
|
||||
|
||||
// Feature Registry
|
||||
|
||||
export interface FeatureRegistry {
|
||||
featureGroups: Map<string, FeatureGroup>;
|
||||
features: Map<string, Feature>;
|
||||
dependencies: Map<string, string[]>;
|
||||
lineage: Map<string, FeatureLineage>;
|
||||
}
|
||||
|
||||
export interface FeatureLineage {
|
||||
featureGroupId: string;
|
||||
featureName: string;
|
||||
upstream: FeatureDependency[];
|
||||
downstream: FeatureDependency[];
|
||||
transformations: string[];
|
||||
}
|
||||
|
||||
export interface FeatureDependency {
|
||||
featureGroupId: string;
|
||||
featureName: string;
|
||||
dependencyType: 'direct' | 'derived' | 'aggregated';
|
||||
}
|
||||
|
||||
// Storage Types
|
||||
|
||||
export interface FeatureStorageConfig {
|
||||
online: OnlineStoreConfig;
|
||||
offline: OfflineStoreConfig;
|
||||
metadata: MetadataStoreConfig;
|
||||
}
|
||||
|
||||
export interface OnlineStoreConfig {
|
||||
type: 'redis' | 'dynamodb' | 'cassandra';
|
||||
connection: Record<string, any>;
|
||||
ttl?: number;
|
||||
keyFormat?: string;
|
||||
}
|
||||
|
||||
export interface OfflineStoreConfig {
|
||||
type: 'parquet' | 'delta' | 'postgresql' | 's3';
|
||||
connection: Record<string, any>;
|
||||
partitioning?: PartitioningConfig;
|
||||
}
|
||||
|
||||
export interface MetadataStoreConfig {
|
||||
type: 'postgresql' | 'mysql' | 'sqlite';
|
||||
connection: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface PartitioningConfig {
|
||||
columns: string[];
|
||||
strategy: 'time' | 'hash' | 'range';
|
||||
granularity?: 'hour' | 'day' | 'month';
|
||||
}
|
||||
|
||||
// Monitoring and Alerting
|
||||
|
||||
export interface FeatureMonitoring {
|
||||
featureGroupId: string;
|
||||
featureName: string;
|
||||
monitors: FeatureMonitor[];
|
||||
alerts: FeatureAlert[];
|
||||
}
|
||||
|
||||
export interface FeatureMonitor {
|
||||
name: string;
|
||||
type: 'drift' | 'freshness' | 'availability' | 'quality';
|
||||
threshold: number;
|
||||
enabled: boolean;
|
||||
configuration: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface FeatureAlert {
|
||||
id: string;
|
||||
monitorName: string;
|
||||
level: 'warning' | 'error' | 'critical';
|
||||
message: string;
|
||||
timestamp: Date;
|
||||
resolved: boolean;
|
||||
resolvedAt?: Date;
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue