feat: 实现Redis集成与Kafka消息处理优化
- 新增Redis集成模块,支持心跳写入与控制台日志队列 - 优化Kafka消费者实现,支持多实例与自动重连 - 改进消息处理器,支持批量处理与多层解码 - 更新数据库表结构,调整字段类型与约束 - 添加Redis与Kafka的配置项和环境变量支持 - 补充测试用例和文档说明
This commit is contained in:
@@ -13,9 +13,9 @@ CREATE TABLE IF NOT EXISTS heartbeat.heartbeat_events (
|
||||
|
||||
ts_ms bigint NOT NULL,
|
||||
hotel_id int2 NOT NULL,
|
||||
room_id int4 NOT NULL,
|
||||
room_id varchar(50) NOT NULL,
|
||||
device_id varchar(64) NOT NULL,
|
||||
ip inet NOT NULL,
|
||||
ip varchar(21) NOT NULL,
|
||||
power_state int2 NOT NULL,
|
||||
guest_type int2 NOT NULL,
|
||||
cardless_state int2 NOT NULL,
|
||||
@@ -23,7 +23,7 @@ CREATE TABLE IF NOT EXISTS heartbeat.heartbeat_events (
|
||||
pms_state int2 NOT NULL,
|
||||
carbon_state int2 NOT NULL,
|
||||
device_count int2 NOT NULL,
|
||||
comm_seq int2 NOT NULL,
|
||||
comm_seq int4 NOT NULL,
|
||||
|
||||
-- 弹性字段:电参/空调等(后续可结构化拆列;当前先放 extra)
|
||||
extra jsonb,
|
||||
@@ -33,14 +33,14 @@ CREATE TABLE IF NOT EXISTS heartbeat.heartbeat_events (
|
||||
-- CHECK 约束:先做“非负+上界”约束(避免未来枚举扩展导致写入失败)
|
||||
CONSTRAINT chk_ts_ms_positive CHECK (ts_ms > 0),
|
||||
CONSTRAINT chk_hotel_id_range CHECK (hotel_id >= 0 AND hotel_id <= 32767),
|
||||
CONSTRAINT chk_room_id_range CHECK (room_id >= 0),
|
||||
CONSTRAINT chk_room_id_len CHECK (char_length(room_id) > 0 AND char_length(room_id) <= 50),
|
||||
CONSTRAINT chk_power_state_range CHECK (power_state >= 0 AND power_state <= 32767),
|
||||
CONSTRAINT chk_guest_type_range CHECK (guest_type >= 0 AND guest_type <= 32767),
|
||||
CONSTRAINT chk_cardless_state_range CHECK (cardless_state >= 0 AND cardless_state <= 32767),
|
||||
CONSTRAINT chk_pms_state_range CHECK (pms_state >= 0 AND pms_state <= 32767),
|
||||
CONSTRAINT chk_carbon_state_range CHECK (carbon_state >= 0 AND carbon_state <= 32767),
|
||||
CONSTRAINT chk_device_count_range CHECK (device_count >= 0 AND device_count <= 32767),
|
||||
CONSTRAINT chk_comm_seq_range CHECK (comm_seq >= 0 AND comm_seq <= 32767)
|
||||
CONSTRAINT chk_comm_seq_range CHECK (comm_seq >= 0)
|
||||
)
|
||||
PARTITION BY RANGE (ts_ms);
|
||||
|
||||
|
||||
@@ -15,10 +15,10 @@ function getEnv(name, fallback) {
|
||||
function buildClientConfig(database) {
|
||||
const db = config.db;
|
||||
return {
|
||||
host: getEnv('PGHOST', db.host),
|
||||
port: Number(getEnv('PGPORT', db.port)),
|
||||
user: getEnv('PGUSER', db.user),
|
||||
password: getEnv('PGPASSWORD', db.password),
|
||||
host: getEnv('POSTGRES_HOST', getEnv('PGHOST', db.host)),
|
||||
port: Number(getEnv('POSTGRES_PORT', getEnv('PGPORT', db.port))),
|
||||
user: getEnv('POSTGRES_USER', getEnv('PGUSER', db.user)),
|
||||
password: getEnv('POSTGRES_PASSWORD', getEnv('PGPASSWORD', db.password)),
|
||||
database,
|
||||
};
|
||||
}
|
||||
@@ -36,7 +36,7 @@ async function main() {
|
||||
const schemaFile = path.join(scriptsDir, '010_heartbeat_schema.sql');
|
||||
const partitionFile = path.join(scriptsDir, '020_partitioning_auto_daily.sql');
|
||||
|
||||
const targetDb = getEnv('PGTARGETDB', config.db.database);
|
||||
const targetDb = getEnv('POSTGRES_DATABASE', getEnv('PGTARGETDB', config.db.database));
|
||||
|
||||
console.log(`[db] Connecting to target db: ${targetDb}`);
|
||||
const targetClient = new Client(buildClientConfig(targetDb));
|
||||
|
||||
@@ -15,6 +15,29 @@ async function main() {
|
||||
// 预创建今日分区,避免“无分区时 INSERT 直接失败”
|
||||
await client.query('SELECT heartbeat.ensure_partitions(current_date, current_date)');
|
||||
|
||||
const ipType = await client.query(
|
||||
`
|
||||
SELECT format_type(a.atttypid, a.atttypmod) AS type
|
||||
FROM pg_attribute a
|
||||
JOIN pg_class c ON c.oid = a.attrelid
|
||||
JOIN pg_namespace n ON n.oid = c.relnamespace
|
||||
WHERE n.nspname = 'heartbeat'
|
||||
AND c.relname = 'heartbeat_events'
|
||||
AND a.attname = 'ip'
|
||||
AND a.attnum > 0
|
||||
AND NOT a.attisdropped
|
||||
`
|
||||
);
|
||||
|
||||
const type = String(ipType?.rows?.[0]?.type ?? '').toLowerCase();
|
||||
if (type.startsWith('inet')) {
|
||||
await client.query(
|
||||
`ALTER TABLE heartbeat.heartbeat_events
|
||||
ALTER COLUMN ip TYPE varchar(21)
|
||||
USING ip::text`
|
||||
);
|
||||
}
|
||||
|
||||
const ts = Date.now();
|
||||
await client.query(
|
||||
`INSERT INTO heartbeat.heartbeat_events (
|
||||
@@ -27,7 +50,7 @@ async function main() {
|
||||
1,
|
||||
101,
|
||||
'dev-1',
|
||||
'192.168.0.1',
|
||||
'192.168.0.1:12345',
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
@@ -51,11 +74,33 @@ async function main() {
|
||||
ORDER BY c.relname`
|
||||
);
|
||||
|
||||
const parent = await client.query(
|
||||
`
|
||||
SELECT c.relkind AS kind
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace n ON n.oid = c.relnamespace
|
||||
WHERE n.nspname = 'heartbeat'
|
||||
AND c.relname = 'heartbeat_events'
|
||||
`
|
||||
);
|
||||
|
||||
const parentIndexes = await client.query(
|
||||
`
|
||||
SELECT indexname
|
||||
FROM pg_indexes
|
||||
WHERE schemaname = 'heartbeat'
|
||||
AND tablename = 'heartbeat_events'
|
||||
ORDER BY indexname
|
||||
`
|
||||
);
|
||||
|
||||
const cnt = await client.query(
|
||||
'SELECT count(*)::int AS n FROM heartbeat.heartbeat_events'
|
||||
);
|
||||
|
||||
console.log('parentKind:', parent.rows?.[0]?.kind);
|
||||
console.log('partitions:', partitions.rows.map((r) => r.partition));
|
||||
console.log('parentIndexes:', parentIndexes.rows.map((r) => r.indexname));
|
||||
console.log('rows:', cnt.rows[0].n);
|
||||
|
||||
await client.end();
|
||||
|
||||
66
scripts/kafka/decodeMessage.js
Normal file
66
scripts/kafka/decodeMessage.js
Normal file
@@ -0,0 +1,66 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import process from 'node:process';
|
||||
|
||||
import { HeartbeatProcessor } from '../../src/processor/heartbeatProcessor.js';
|
||||
|
||||
function usageAndExit(code = 1) {
|
||||
console.log(`\n用法:\n node scripts/kafka/decodeMessage.js --base64 <str>\n node scripts/kafka/decodeMessage.js --hex <str>\n node scripts/kafka/decodeMessage.js --file <path> [--encoding base64|hex|raw]\n\n说明:\n- 用于验证 Kafka message.value 的反向解码结果(对端为 JSON + UTF-8 bytes)\n- 会尝试:UTF-8 JSON / base64 -> (gzip|deflate|raw deflate|brotli) 循环解压(兼容但对端当前未用)\n`);
|
||||
process.exit(code);
|
||||
}
|
||||
function parseArgs(argv) {
|
||||
const args = {};
|
||||
for (let i = 2; i < argv.length; i++) {
|
||||
const a = argv[i];
|
||||
if (a === '--base64') args.base64 = argv[++i];
|
||||
else if (a === '--hex') args.hex = argv[++i];
|
||||
else if (a === '--file') args.file = argv[++i];
|
||||
else if (a === '--encoding') args.encoding = argv[++i];
|
||||
else if (a === '--help' || a === '-h') args.help = true;
|
||||
else args._ = [...(args._ ?? []), a];
|
||||
}
|
||||
return args;
|
||||
}
|
||||
|
||||
const args = parseArgs(process.argv);
|
||||
if (args.help) usageAndExit(0);
|
||||
|
||||
const processor = new HeartbeatProcessor(
|
||||
{ batchSize: 9999, batchTimeout: 1000 },
|
||||
{ insertHeartbeatEvents: async () => {} }
|
||||
);
|
||||
|
||||
let buf;
|
||||
if (args.base64) {
|
||||
buf = Buffer.from(String(args.base64).trim(), 'base64');
|
||||
} else if (args.hex) {
|
||||
buf = Buffer.from(String(args.hex).trim().replace(/\s+/g, ''), 'hex');
|
||||
} else if (args.file) {
|
||||
const p = path.resolve(process.cwd(), args.file);
|
||||
const raw = fs.readFileSync(p);
|
||||
const enc = (args.encoding ?? 'raw').toLowerCase();
|
||||
if (enc === 'raw') buf = raw;
|
||||
else if (enc === 'base64') buf = Buffer.from(raw.toString('utf8').trim(), 'base64');
|
||||
else if (enc === 'hex') buf = Buffer.from(raw.toString('utf8').trim().replace(/\s+/g, ''), 'hex');
|
||||
else {
|
||||
console.error('未知 encoding:', enc);
|
||||
usageAndExit(1);
|
||||
}
|
||||
} else {
|
||||
usageAndExit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
const obj = processor.decodeToObject(buf);
|
||||
const items = Array.isArray(obj) ? obj : [obj];
|
||||
|
||||
console.log('[decode] ok; items:', items.length);
|
||||
console.log(JSON.stringify(obj, null, 2));
|
||||
|
||||
const normalized = items.map((x) => processor.normalizeHeartbeat(processor.unwrapPayload(x)));
|
||||
const validCount = normalized.filter((x) => processor.validateData(x)).length;
|
||||
console.log('[normalize] valid (required fields present):', validCount, '/', items.length);
|
||||
} catch (err) {
|
||||
console.error('[decode] failed:', err);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
17
scripts/redis/smokeTest.js
Normal file
17
scripts/redis/smokeTest.js
Normal file
@@ -0,0 +1,17 @@
|
||||
import config from '../../src/config/config.js';
|
||||
import { RedisIntegration } from '../../src/redis/redisIntegration.js';
|
||||
|
||||
async function main() {
|
||||
const redis = new RedisIntegration(config.redis);
|
||||
await redis.connect();
|
||||
|
||||
await redis.writeHeartbeat();
|
||||
await redis.info('redis smoke test: ok', { module: 'redis' });
|
||||
|
||||
await redis.disconnect();
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error('redis smoke test failed:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
Reference in New Issue
Block a user