feat: 实现GUID主键与service_mask索引改造
- 将主键从自增id改为GUID格式并添加格式校验 - 为service_mask添加表达式索引优化首位查询性能 - 更新相关文档说明改造方案与验证步骤 - 添加统计模块记录数据库写入与Kafka消费量 - 重构Redis心跳协议改用LIST类型存储项目状态 - 修复部署脚本中的服务名称不一致问题
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import { HeartbeatProcessor } from '../src/processor/heartbeatProcessor.js';
|
||||
import { RedisIntegration } from '../src/redis/redisIntegration.js';
|
||||
|
||||
describe('HeartbeatProcessor smoke', () => {
|
||||
it('decodes JSON buffer into object', () => {
|
||||
@@ -25,3 +26,108 @@ describe('HeartbeatProcessor smoke', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('RedisIntegration protocol', () => {
|
||||
it('writes heartbeat to 项目心跳 LIST', async () => {
|
||||
const redis = new RedisIntegration({
|
||||
enabled: true,
|
||||
projectName: 'BLS主机心跳日志',
|
||||
apiBaseUrl: 'http://127.0.0.1:3000',
|
||||
});
|
||||
|
||||
const calls = { rPush: [], lTrim: [] };
|
||||
redis.client = {
|
||||
isReady: true,
|
||||
rPush: async (key, value) => {
|
||||
calls.rPush.push({ key, value });
|
||||
},
|
||||
lTrim: async (key, start, stop) => {
|
||||
calls.lTrim.push({ key, start, stop });
|
||||
},
|
||||
};
|
||||
|
||||
const before = Date.now();
|
||||
await redis.writeHeartbeat();
|
||||
const after = Date.now();
|
||||
|
||||
assert.equal(calls.rPush.length, 1);
|
||||
assert.equal(calls.rPush[0].key, '项目心跳');
|
||||
const payload = JSON.parse(calls.rPush[0].value);
|
||||
assert.equal(payload.projectName, 'BLS主机心跳日志');
|
||||
assert.equal(payload.apiBaseUrl, 'http://127.0.0.1:3000');
|
||||
assert.equal(typeof payload.lastActiveAt, 'number');
|
||||
assert.ok(payload.lastActiveAt >= before && payload.lastActiveAt <= after);
|
||||
|
||||
assert.equal(calls.lTrim.length, 1);
|
||||
assert.deepEqual(calls.lTrim[0], { key: '项目心跳', start: -2000, stop: -1 });
|
||||
});
|
||||
|
||||
it('caches heartbeat when redis is not ready and flushes later', async () => {
|
||||
const redis = new RedisIntegration({
|
||||
enabled: true,
|
||||
projectName: 'BLS主机心跳日志',
|
||||
apiBaseUrl: 'http://127.0.0.1:3000',
|
||||
});
|
||||
|
||||
const calls = { rPush: [], lTrim: [] };
|
||||
redis.client = {
|
||||
isReady: false,
|
||||
connect: async () => {},
|
||||
rPush: async (key, value) => {
|
||||
calls.rPush.push({ key, value });
|
||||
},
|
||||
lTrim: async (key, start, stop) => {
|
||||
calls.lTrim.push({ key, start, stop });
|
||||
},
|
||||
};
|
||||
|
||||
await redis.writeHeartbeat();
|
||||
assert.ok(redis._pendingHeartbeat);
|
||||
|
||||
redis.client.isReady = true;
|
||||
await redis.flushPendingHeartbeat();
|
||||
|
||||
assert.equal(redis._pendingHeartbeat, null);
|
||||
assert.equal(calls.rPush.length, 1);
|
||||
assert.equal(calls.rPush[0].key, '项目心跳');
|
||||
const payload = JSON.parse(calls.rPush[0].value);
|
||||
assert.equal(payload.projectName, 'BLS主机心跳日志');
|
||||
assert.equal(payload.apiBaseUrl, 'http://127.0.0.1:3000');
|
||||
assert.equal(typeof payload.lastActiveAt, 'number');
|
||||
assert.equal(calls.lTrim.length, 1);
|
||||
});
|
||||
|
||||
it('buffers console logs when redis is not ready', async () => {
|
||||
const redis = new RedisIntegration({
|
||||
enabled: true,
|
||||
projectName: 'BLS主机心跳日志',
|
||||
apiBaseUrl: 'http://127.0.0.1:3000',
|
||||
});
|
||||
|
||||
const calls = { rPush: [], lTrim: [] };
|
||||
redis.client = {
|
||||
isReady: false,
|
||||
connect: async () => {},
|
||||
rPush: async (key, ...values) => {
|
||||
calls.rPush.push({ key, values });
|
||||
},
|
||||
lTrim: async (key, start, stop) => {
|
||||
calls.lTrim.push({ key, start, stop });
|
||||
},
|
||||
};
|
||||
|
||||
await redis.info('hello', { module: 'test' });
|
||||
assert.equal(redis._pendingConsoleLogs.length, 1);
|
||||
|
||||
redis.client.isReady = true;
|
||||
await redis.flushPendingConsoleLogs();
|
||||
|
||||
assert.equal(redis._pendingConsoleLogs.length, 0);
|
||||
assert.equal(calls.rPush.length, 1);
|
||||
assert.equal(calls.rPush[0].key, 'BLS主机心跳日志_项目控制台');
|
||||
assert.equal(calls.rPush[0].values.length, 1);
|
||||
const entry = JSON.parse(calls.rPush[0].values[0]);
|
||||
assert.equal(entry.level, 'info');
|
||||
assert.equal(entry.message, 'hello');
|
||||
assert.equal(entry.metadata.module, 'test');
|
||||
});
|
||||
});
|
||||
|
||||
70
test/stats.test.js
Normal file
70
test/stats.test.js
Normal file
@@ -0,0 +1,70 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import { StatsCounters, StatsReporter } from '../src/stats/statsManager.js';
|
||||
import { HeartbeatProcessor } from '../src/processor/heartbeatProcessor.js';
|
||||
|
||||
describe('StatsCounters', () => {
|
||||
it('snapshots and resets minute counters atomically', () => {
|
||||
const stats = new StatsCounters();
|
||||
stats.incDbWritten(3);
|
||||
stats.incFiltered(2);
|
||||
stats.incKafkaPulled(5);
|
||||
|
||||
const first = stats.snapshotAndResetMinute();
|
||||
assert.equal(first.dbWritten, 3n);
|
||||
assert.equal(first.filtered, 2n);
|
||||
assert.equal(first.kafkaPulled, 5n);
|
||||
|
||||
const second = stats.snapshotAndResetMinute();
|
||||
assert.equal(second.dbWritten, 0n);
|
||||
assert.equal(second.filtered, 0n);
|
||||
assert.equal(second.kafkaPulled, 0n);
|
||||
});
|
||||
});
|
||||
|
||||
describe('StatsReporter', () => {
|
||||
it('writes three [STATS] info logs to redis console', () => {
|
||||
const stats = new StatsCounters();
|
||||
stats.incDbWritten(7);
|
||||
stats.incFiltered(8);
|
||||
stats.incKafkaPulled(9);
|
||||
|
||||
const calls = { push: [] };
|
||||
const redis = {
|
||||
isEnabled: () => true,
|
||||
pushConsoleLog: ({ level, message, metadata }) => {
|
||||
calls.push.push({ level, message, metadata });
|
||||
},
|
||||
};
|
||||
|
||||
const reporter = new StatsReporter({ redis, stats });
|
||||
reporter.flushOnce();
|
||||
|
||||
assert.equal(calls.push.length, 3);
|
||||
assert.equal(calls.push[0].level, 'info');
|
||||
assert.equal(calls.push[1].level, 'info');
|
||||
assert.equal(calls.push[2].level, 'info');
|
||||
assert.match(calls.push[0].message, /^\[STATS\] \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3} 数据库写入量: 7条$/);
|
||||
assert.match(calls.push[1].message, /^\[STATS\] \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3} 数据过滤量: 8条$/);
|
||||
assert.match(calls.push[2].message, /^\[STATS\] \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3} Kafka拉取量: 9条$/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('HeartbeatProcessor db write error logging', () => {
|
||||
it('emits [ERROR] warn log with raw data', () => {
|
||||
const calls = { warn: [] };
|
||||
const redis = {
|
||||
isEnabled: () => true,
|
||||
pushConsoleLog: ({ level, message }) => {
|
||||
if (level === 'warn') calls.warn.push(message);
|
||||
},
|
||||
};
|
||||
|
||||
const processor = new HeartbeatProcessor({ batchSize: 1, batchTimeout: 10 }, {}, { redis });
|
||||
processor._emitDbWriteError(new Error('boom'), [{ a: 1 }]);
|
||||
|
||||
assert.equal(calls.warn.length >= 1, true);
|
||||
assert.match(calls.warn[0], /^\[ERROR\] \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3} db_write_failed: /);
|
||||
assert.match(calls.warn[0], /"errorId":"db_write_failed"/);
|
||||
assert.match(calls.warn[0], /"rawData":\{"a":1\}/);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user