Files
Web_BLS_OldRcu_Heartbeat_Se…/bls-oldrcu-heartbeat-backend/src/index.js
XuJiacheng e45d14b720 feat: 实现心跳消息处理模块
- 新增 HeartbeatBuffer 类,用于收集和去重 Kafka 心跳消息,并定期将数据刷新到数据库。
- 新增 HeartbeatDbManager 类,负责与 PostgreSQL 数据库的交互,支持批量 upsert 操作。
- 新增配置文件 config.js,支持从环境变量加载配置。
- 新增 Kafka 消费者模块,支持从 Kafka 中消费心跳消息。
- 新增 Redis 集成模块,支持将日志和心跳信息推送到 Redis。
- 新增心跳消息解析器,负责解析 Kafka 消息并提取心跳字段。
- 新增日志记录工具,支持不同级别的日志输出。
- 新增指标收集器,跟踪 Kafka 消息处理和数据库操作的指标。
- 新增单元测试,覆盖 HeartbeatBuffer 和 HeartbeatDbManager 的主要功能。
- 新增数据库表结构 SQL 文件,定义 room_status_moment_g5 表的结构。
- 配置 Vite 构建工具,支持 Node.js 环境的构建。
2026-03-12 14:11:02 +08:00

128 lines
3.8 KiB
JavaScript

import cron from 'node-cron';
import { config } from './config/config.js';
import { createKafkaConsumers } from './kafka/consumer.js';
import { createRedisClient } from './redis/redisClient.js';
import { RedisIntegration } from './redis/redisIntegration.js';
import { HeartbeatDbManager } from './db/heartbeatDbManager.js';
import { HeartbeatBuffer } from './buffer/heartbeatBuffer.js';
import { parseHeartbeat } from './processor/heartbeatParser.js';
import { MetricCollector } from './utils/metricCollector.js';
import { logger } from './utils/logger.js';
const bootstrap = async () => {
// 1. Metric Collector
const metricCollector = new MetricCollector();
// 2. Redis
const redisClient = await createRedisClient(config.redis);
const redisIntegration = new RedisIntegration(
redisClient,
config.redis.projectName,
config.redis.apiBaseUrl
);
redisIntegration.startHeartbeat();
logger.info('Redis connected & heartbeat started');
// 3. Database (G5)
const dbManager = new HeartbeatDbManager(config.db);
const dbOk = await dbManager.testConnection();
if (!dbOk) {
logger.error('PostgreSQL G5 connection test failed');
} else {
logger.info('PostgreSQL G5 connected', {
host: config.db.host,
port: config.db.port,
database: config.db.database,
schema: config.db.schema,
table: config.db.table
});
}
// 4. Heartbeat Buffer (5秒 flush, 以 hotel_id:room_id 去重)
const heartbeatBuffer = new HeartbeatBuffer(dbManager, {
flushInterval: config.heartbeatBuffer.flushInterval,
maxBufferSize: config.heartbeatBuffer.maxBufferSize,
redisIntegration,
metricCollector
});
// 5. Minute Metrics Cron
cron.schedule('* * * * *', async () => {
const metrics = metricCollector.getAndReset();
const report = `[Minute Metrics] Pulled: ${metrics.kafka_pulled}, Parse Error: ${metrics.parse_error}, Upserted: ${metrics.db_upserted}, Failed: ${metrics.db_failed}`;
console.log(report);
logger.info(report, metrics);
try {
await redisIntegration.info('Minute Metrics', metrics);
} catch (err) {
logger.error('Failed to report metrics to Redis', { error: err?.message });
}
});
// 6. Kafka message handler
const handleMessage = async (message) => {
metricCollector.increment('kafka_pulled');
heartbeatBuffer.notePulled();
try {
const raw = Buffer.isBuffer(message.value)
? message.value.toString('utf8')
: message.value;
const record = parseHeartbeat(raw);
if (!record) {
metricCollector.increment('parse_error');
return;
}
heartbeatBuffer.noteEligible();
heartbeatBuffer.add(record);
} catch (error) {
metricCollector.increment('parse_error');
logger.error('Message processing error', { error: error?.message });
}
};
// 7. Start Kafka consumers
const consumers = await createKafkaConsumers({
kafkaConfig: config.kafka,
onMessage: handleMessage,
onError: (error) => {
logger.error('Kafka consumer error', { error: error?.message });
}
});
logger.info(`Started ${consumers.length} Kafka consumer(s) on topic: ${config.kafka.topic}`);
// 8. Graceful shutdown
const shutdown = async () => {
logger.info('Shutting down...');
try {
await heartbeatBuffer.flush();
} catch {
// best effort
}
try {
await dbManager.close();
} catch {
// best effort
}
try {
await redisClient.quit();
} catch {
// best effort
}
process.exit(0);
};
process.on('SIGTERM', shutdown);
process.on('SIGINT', shutdown);
logger.info('bls-oldrcu-heartbeat-backend started');
};
bootstrap().catch((err) => {
logger.error('Bootstrap failed', { error: err?.message, stack: err?.stack });
process.exit(1);
});