import cron from 'node-cron'; import { config } from './config/config.js'; import { createKafkaConsumers } from './kafka/consumer.js'; import { createRedisClient } from './redis/redisClient.js'; import { RedisIntegration } from './redis/redisIntegration.js'; import { HeartbeatDbManager } from './db/heartbeatDbManager.js'; import { HeartbeatBuffer } from './buffer/heartbeatBuffer.js'; import { parseHeartbeat } from './processor/heartbeatParser.js'; import { MetricCollector } from './utils/metricCollector.js'; import { logger } from './utils/logger.js'; const bootstrap = async () => { // 1. Metric Collector const metricCollector = new MetricCollector(); // 2. Redis const redisClient = await createRedisClient(config.redis); const redisIntegration = new RedisIntegration( redisClient, config.redis.projectName, config.redis.apiBaseUrl ); redisIntegration.startHeartbeat(); logger.info('Redis connected & heartbeat started'); // 3. Database (G5) const dbManager = new HeartbeatDbManager(config.db); const dbOk = await dbManager.testConnection(); if (!dbOk) { logger.error('PostgreSQL G5 connection test failed'); } else { logger.info('PostgreSQL G5 connected', { host: config.db.host, port: config.db.port, database: config.db.database, schema: config.db.schema, table: config.db.table }); } // 4. Heartbeat Buffer (5秒 flush, 以 hotel_id:room_id 去重) const heartbeatBuffer = new HeartbeatBuffer(dbManager, { flushInterval: config.heartbeatBuffer.flushInterval, maxBufferSize: config.heartbeatBuffer.maxBufferSize, redisIntegration, metricCollector }); // 5. Minute Metrics Cron cron.schedule('* * * * *', async () => { const metrics = metricCollector.getAndReset(); const report = `[Minute Metrics] Pulled: ${metrics.kafka_pulled}, Parse Error: ${metrics.parse_error}, Upserted: ${metrics.db_upserted}, Failed: ${metrics.db_failed}`; console.log(report); logger.info(report, metrics); try { await redisIntegration.info('Minute Metrics', metrics); } catch (err) { logger.error('Failed to report metrics to Redis', { error: err?.message }); } }); // 6. Kafka message handler const handleMessage = async (message) => { metricCollector.increment('kafka_pulled'); heartbeatBuffer.notePulled(); try { const raw = Buffer.isBuffer(message.value) ? message.value.toString('utf8') : message.value; const record = parseHeartbeat(raw); if (!record) { metricCollector.increment('parse_error'); return; } heartbeatBuffer.noteEligible(); heartbeatBuffer.add(record); } catch (error) { metricCollector.increment('parse_error'); logger.error('Message processing error', { error: error?.message }); } }; // 7. Start Kafka consumers const consumers = await createKafkaConsumers({ kafkaConfig: config.kafka, onMessage: handleMessage, onError: (error) => { logger.error('Kafka consumer error', { error: error?.message }); } }); logger.info(`Started ${consumers.length} Kafka consumer(s) on topic: ${config.kafka.topic}`); // 8. Graceful shutdown const shutdown = async () => { logger.info('Shutting down...'); try { await heartbeatBuffer.flush(); } catch { // best effort } try { await dbManager.close(); } catch { // best effort } try { await redisClient.quit(); } catch { // best effort } process.exit(0); }; process.on('SIGTERM', shutdown); process.on('SIGINT', shutdown); logger.info('bls-oldrcu-heartbeat-backend started'); }; bootstrap().catch((err) => { logger.error('Bootstrap failed', { error: err?.message, stack: err?.stack }); process.exit(1); });