feat: 添加 Kafka 消费者和消息处理功能

- 新增 Kafka 消费者实现,支持消息处理和错误处理。
- 实现 OffsetTracker 类,用于跟踪消息偏移量。
- 新增消息解析和数据库插入逻辑,支持从 Kafka 消息构建数据库行。
- 实现 UDP 数据包解析功能,支持不同类型的 UDP 消息。
- 新增 Redis 错误队列处理,支持错误重试机制。
- 实现 Redis 客户端和集成类,支持日志记录和心跳机制。
- 添加 Zod 验证模式,确保 Kafka 消息有效性。
- 新增日志记录和指标收集工具,支持系统监控。
- 添加 UUID 生成工具,支持唯一标识符生成。
- 编写处理器逻辑的单元测试,确保功能正确性。
- 配置 Vite 构建工具,支持 Node.js 环境下的构建。
This commit is contained in:
2026-03-14 17:33:19 +08:00
parent d62f83b4a4
commit 677dda80b9
101 changed files with 14904 additions and 0 deletions

View File

@@ -0,0 +1,41 @@
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
import { spawnSync } from 'child_process';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const projectRoot = path.resolve(__dirname, '..');
const targets = ['src', 'tests'];
const collectFiles = (dir) => {
if (!fs.existsSync(dir)) {
return [];
}
const entries = fs.readdirSync(dir, { withFileTypes: true });
return entries.flatMap((entry) => {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
return collectFiles(fullPath);
}
if (entry.isFile() && fullPath.endsWith('.js')) {
return [fullPath];
}
return [];
});
};
const files = targets.flatMap((target) => collectFiles(path.join(projectRoot, target)));
const failures = [];
files.forEach((file) => {
const result = spawnSync(process.execPath, ['--check', file], { stdio: 'inherit' });
if (result.status !== 0) {
failures.push(file);
}
});
if (failures.length > 0) {
process.exit(1);
}

View File

@@ -0,0 +1,66 @@
import dotenv from 'dotenv';
import kafka from 'kafka-node';
dotenv.config();
const probeTs = Number(process.argv[2] || Date.now());
const probeRoom = process.argv[3] || `PROBE-${probeTs}`;
const probeHotelId = Number(process.argv[4] || 1085);
const topic = process.env.KAFKA_TOPIC || process.env.KAFKA_TOPICS || 'blwlog4Nodejs-rcu-register-topic';
const kafkaHost = (process.env.KAFKA_BROKERS || '').split(',').map((s) => s.trim()).filter(Boolean).join(',');
const saslEnabled = process.env.KAFKA_SASL_ENABLED === 'true';
const sslEnabled = process.env.KAFKA_SSL_ENABLED === 'true';
const kafkaClientOptions = {
kafkaHost,
clientId: process.env.KAFKA_CLIENT_ID || 'bls-register-producer'
};
if (saslEnabled && process.env.KAFKA_SASL_USERNAME && process.env.KAFKA_SASL_PASSWORD) {
kafkaClientOptions.sasl = {
mechanism: process.env.KAFKA_SASL_MECHANISM || 'plain',
username: process.env.KAFKA_SASL_USERNAME,
password: process.env.KAFKA_SASL_PASSWORD
};
}
if (sslEnabled) {
kafkaClientOptions.sslOptions = { rejectUnauthorized: false };
}
const payload = {
ts_ms: probeTs,
upgrade_ts_ms: probeTs + 1234,
hotel_id: Number.isFinite(probeHotelId) ? probeHotelId : 1085,
room_id: probeRoom,
device_id: `probe-device-${probeTs}`,
is_send: 0,
udp_raw: 'abc\u0000def',
extra: { source: 'probe', note: 'with\\u0000nul' },
app_version: 'v1\u0000.2',
launcher_version: 'launcher-1',
config_version: 'cfg-1'
};
const client = new kafka.KafkaClient(kafkaClientOptions);
const producer = new kafka.Producer(client);
producer.on('ready', () => {
producer.send(
[{ topic, messages: JSON.stringify(payload) }],
(err) => {
if (err) {
console.error(`[probe] publish failed: ${err.message}`);
process.exit(1);
}
console.log(`[probe] published topic=${topic} ts_ms=${probeTs} hotel_id=${payload.hotel_id} room_id=${probeRoom}`);
producer.close(() => client.close(() => process.exit(0)));
}
);
});
producer.on('error', (err) => {
console.error(`[probe] producer error: ${err.message}`);
process.exit(1);
});

View File

@@ -0,0 +1,7 @@
$ErrorActionPreference = "Stop"
Remove-Item -Force out.log -ErrorAction SilentlyContinue
$p = Start-Process -FilePath node -ArgumentList 'src/index.js' -WorkingDirectory (Get-Location).Path -RedirectStandardOutput 'out.log' -RedirectStandardError 'out.log' -PassThru
Start-Sleep -Seconds 30
Stop-Process -Id $p.Id -Force
Start-Sleep -Seconds 1
Add-Content -Path out.log -Value "[runner] stopped after 30s"

View File

@@ -0,0 +1,67 @@
import dotenv from 'dotenv';
import pg from 'pg';
dotenv.config();
const probeTs = Number(process.argv[2]);
const probeRoom = process.argv[3];
const probeHotelId = Number(process.argv[4]);
if (!Number.isFinite(probeTs) || !probeRoom || !Number.isFinite(probeHotelId)) {
console.error('Usage: node scripts/verifyProbeInDb.js <probeTs> <probeRoom> <probeHotelId>');
process.exit(1);
}
const pool = new pg.Pool({
host: process.env.POSTGRES_HOST_G5,
port: Number(process.env.POSTGRES_PORT_G5 || 5434),
user: process.env.POSTGRES_USER_G5,
password: process.env.POSTGRES_PASSWORD_G5,
database: process.env.POSTGRES_DATABASE_G5,
max: 1
});
const main = async () => {
const eventResult = await pool.query(
`SELECT ts_ms, room_id, app_version, udp_raw
FROM rcu_info.rcu_info_events_g5
WHERE ts_ms = $1 AND room_id = $2
ORDER BY write_ts_ms DESC
LIMIT 1`,
[probeTs, probeRoom]
);
const statusResult = await pool.query(
`SELECT hotel_id, room_id, app_version, launcher_version, config_version, upgrade_ts_ms, register_ts_ms
FROM room_status.room_status_moment_g5
WHERE hotel_id = $1 AND room_id = $2
LIMIT 1`,
[probeHotelId, probeRoom]
);
console.log(`[probe-db] event_rows=${eventResult.rowCount}`);
if (eventResult.rowCount > 0) {
const row = eventResult.rows[0];
console.log(`[probe-db] event.ts_ms=${row.ts_ms} room_id=${row.room_id} app_version=${row.app_version}`);
console.log(`[probe-db] event.udp_raw=${row.udp_raw}`);
}
console.log(`[probe-db] room_status_rows=${statusResult.rowCount}`);
if (statusResult.rowCount > 0) {
const row = statusResult.rows[0];
console.log(`[probe-db] room_status.room_id=${row.room_id} register_ts_ms=${row.register_ts_ms} upgrade_ts_ms=${row.upgrade_ts_ms}`);
} else {
console.log('[probe-db] room_status row not found (expected behavior when key does not exist)');
}
};
main()
.then(async () => {
await pool.end();
process.exit(0);
})
.catch(async (err) => {
console.error(`[probe-db] verify failed: ${err.message}`);
await pool.end();
process.exit(1);
});

View File

@@ -0,0 +1,36 @@
import { config } from '../src/config/config.js';
import dbManager from '../src/db/databaseManager.js';
import { logger } from '../src/utils/logger.js';
const verifyData = async () => {
const client = await dbManager.pool.connect();
try {
console.log('Verifying data in database...');
// Count total rows
const countSql = `SELECT count(*) FROM ${config.db.schema}.${config.db.table}`;
const countRes = await client.query(countSql);
console.log(`Total rows in ${config.db.schema}.${config.db.table}: ${countRes.rows[0].count}`);
// Check recent rows
const recentSql = `
SELECT * FROM ${config.db.schema}.${config.db.table}
ORDER BY ts_ms DESC
LIMIT 5
`;
const recentRes = await client.query(recentSql);
console.log('Recent 5 rows:');
recentRes.rows.forEach(row => {
console.log(JSON.stringify(row));
});
} catch (err) {
console.error('Error verifying data:', err);
} finally {
client.release();
await dbManager.pool.end();
}
};
verifyData();