feat: 实现GUID主键与service_mask索引改造
- 将主键从自增id改为GUID格式并添加格式校验 - 为service_mask添加表达式索引优化首位查询性能 - 更新相关文档说明改造方案与验证步骤 - 添加统计模块记录数据库写入与Kafka消费量 - 重构Redis心跳协议改用LIST类型存储项目状态 - 修复部署脚本中的服务名称不一致问题
This commit is contained in:
@@ -76,10 +76,12 @@ class DatabaseManager {
|
||||
const v2SchemaQuery = `
|
||||
BEGIN;
|
||||
|
||||
CREATE EXTENSION IF NOT EXISTS pgcrypto;
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS heartbeat;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS heartbeat.heartbeat_events (
|
||||
id bigserial,
|
||||
guid varchar(32) NOT NULL DEFAULT replace(gen_random_uuid()::text, '-', ''),
|
||||
|
||||
ts_ms bigint NOT NULL,
|
||||
hotel_id int2 NOT NULL,
|
||||
@@ -112,7 +114,8 @@ class DatabaseManager {
|
||||
|
||||
extra jsonb,
|
||||
|
||||
CONSTRAINT heartbeat_events_pk PRIMARY KEY (ts_ms, id),
|
||||
CONSTRAINT heartbeat_events_pk PRIMARY KEY (ts_ms, guid),
|
||||
CONSTRAINT chk_guid_32_hex CHECK (guid ~ '^[0-9a-f]{32}$'),
|
||||
|
||||
CONSTRAINT chk_ts_ms_positive CHECK (ts_ms > 0),
|
||||
CONSTRAINT chk_hotel_id_range CHECK (hotel_id >= 0 AND hotel_id <= 32767),
|
||||
@@ -147,6 +150,10 @@ class DatabaseManager {
|
||||
CREATE INDEX IF NOT EXISTS idx_heartbeat_events_guest_type ON heartbeat.heartbeat_events (guest_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_heartbeat_events_device_id ON heartbeat.heartbeat_events (device_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_heartbeat_events_service_mask_brin ON heartbeat.heartbeat_events USING BRIN (service_mask);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_service_mask_first_bit
|
||||
ON heartbeat.heartbeat_events ((service_mask & 1));
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_heartbeat_events_hotel_ts ON heartbeat.heartbeat_events (hotel_id, ts_ms);
|
||||
CREATE INDEX IF NOT EXISTS idx_heartbeat_events_elec_address_gin ON heartbeat.heartbeat_events USING GIN (elec_address);
|
||||
CREATE INDEX IF NOT EXISTS idx_heartbeat_events_air_address_gin ON heartbeat.heartbeat_events USING GIN (air_address);
|
||||
@@ -200,6 +207,7 @@ class DatabaseManager {
|
||||
EXECUTE format('CREATE INDEX IF NOT EXISTS %I ON heartbeat.%I (guest_type);', 'idx_'||part_name||'_guest_type', part_name);
|
||||
EXECUTE format('CREATE INDEX IF NOT EXISTS %I ON heartbeat.%I (device_id);', 'idx_'||part_name||'_device_id', part_name);
|
||||
EXECUTE format('CREATE INDEX IF NOT EXISTS %I ON heartbeat.%I USING BRIN (service_mask);', 'idx_'||part_name||'_service_mask_brin', part_name);
|
||||
EXECUTE format('CREATE INDEX IF NOT EXISTS %I ON heartbeat.%I ((service_mask & 1));', 'idx_'||part_name||'_service_mask_first_bit', part_name);
|
||||
EXECUTE format('CREATE INDEX IF NOT EXISTS %I ON heartbeat.%I (hotel_id, ts_ms);', 'idx_'||part_name||'_hotel_ts', part_name);
|
||||
END;
|
||||
$$;
|
||||
|
||||
17
src/index.js
17
src/index.js
@@ -4,6 +4,7 @@ import { KafkaConsumer } from './kafka/consumer.js';
|
||||
import { HeartbeatProcessor } from './processor/heartbeatProcessor.js';
|
||||
import { DatabaseManager } from './db/databaseManager.js';
|
||||
import { RedisIntegration } from './redis/redisIntegration.js';
|
||||
import { StatsCounters, StatsReporter } from './stats/statsManager.js';
|
||||
|
||||
class WebBLSHeartbeatServer {
|
||||
constructor() {
|
||||
@@ -13,6 +14,8 @@ class WebBLSHeartbeatServer {
|
||||
this.databaseManager = null;
|
||||
this.redis = null;
|
||||
this.consumers = null;
|
||||
this.stats = new StatsCounters();
|
||||
this.statsReporter = null;
|
||||
}
|
||||
|
||||
async start() {
|
||||
@@ -21,6 +24,8 @@ class WebBLSHeartbeatServer {
|
||||
this.redis = new RedisIntegration(this.config.redis);
|
||||
await this.redis.connect();
|
||||
this.redis.startHeartbeat();
|
||||
this.statsReporter = new StatsReporter({ redis: this.redis, stats: this.stats });
|
||||
this.statsReporter.start();
|
||||
|
||||
// 初始化数据库连接
|
||||
this.databaseManager = new DatabaseManager({ ...this.config.db, maxConnections: 1 });
|
||||
@@ -36,11 +41,14 @@ class WebBLSHeartbeatServer {
|
||||
groupId: this.config.kafka?.groupId,
|
||||
fromOffset: this.config.kafka?.fromOffset ?? 'latest',
|
||||
ssl: !!this.config.kafka?.sslEnabled,
|
||||
sasl: !!this.config.kafka?.saslEnabled ? `enabled (mechanism: ${this.config.kafka?.saslMechanism})` : 'disabled'
|
||||
sasl: this.config.kafka?.saslEnabled ? `enabled (mechanism: ${this.config.kafka?.saslMechanism})` : 'disabled'
|
||||
});
|
||||
|
||||
// 初始化处理器(共享批处理队列)
|
||||
this.heartbeatProcessor = new HeartbeatProcessor(this.config.processor, this.databaseManager);
|
||||
this.heartbeatProcessor = new HeartbeatProcessor(this.config.processor, this.databaseManager, {
|
||||
redis: this.redis,
|
||||
stats: this.stats,
|
||||
});
|
||||
|
||||
// 在单进程内启动 N 个消费者实例(与分区数匹配)
|
||||
const instances = Math.max(1, Number(this.config.kafka?.consumerInstances ?? 1));
|
||||
@@ -69,6 +77,11 @@ class WebBLSHeartbeatServer {
|
||||
|
||||
async stop() {
|
||||
try {
|
||||
if (this.statsReporter) {
|
||||
this.statsReporter.stop();
|
||||
this.statsReporter = null;
|
||||
}
|
||||
|
||||
if (this.consumers && Array.isArray(this.consumers)) {
|
||||
for (const { consumer } of this.consumers) {
|
||||
await consumer.stopConsuming();
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
// 心跳处理器模块
|
||||
import { brotliDecompressSync, gunzipSync, inflateRawSync, inflateSync } from 'node:zlib';
|
||||
import { formatTimestamp } from '../stats/statsManager.js';
|
||||
|
||||
class HeartbeatProcessor {
|
||||
constructor(config, databaseManager) {
|
||||
constructor(config, databaseManager, deps = {}) {
|
||||
this.config = config;
|
||||
this.databaseManager = databaseManager;
|
||||
this.redis = deps?.redis ?? null;
|
||||
this.stats = deps?.stats ?? null;
|
||||
this.batchQueue = [];
|
||||
this.batchMessageQueue = [];
|
||||
this.batchTimer = null;
|
||||
@@ -13,9 +16,22 @@ class HeartbeatProcessor {
|
||||
|
||||
async processMessage(message) {
|
||||
const deferred = this.createDeferred();
|
||||
this.stats?.incKafkaPulled?.(1);
|
||||
|
||||
// 解包心跳消息
|
||||
const unpackedData = this.unpackMessage(message);
|
||||
let unpackedData = null;
|
||||
try {
|
||||
unpackedData = this.unpackMessage(message);
|
||||
} catch (err) {
|
||||
this.stats?.incFiltered?.(1);
|
||||
this._emitRejectedRecord({
|
||||
errorId: 'decode_failed',
|
||||
error: err,
|
||||
rawData: this._extractRawKafkaValue(message),
|
||||
});
|
||||
deferred.resolve({ insertedCount: 0 });
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
// 支持批量上报:message.value 可能是 JSON 数组
|
||||
const items = Array.isArray(unpackedData) ? unpackedData : [unpackedData];
|
||||
@@ -27,12 +43,29 @@ class HeartbeatProcessor {
|
||||
// 验证心跳数据
|
||||
const isValid = this.validateData(effective);
|
||||
if (!isValid) {
|
||||
this.stats?.incFiltered?.(1);
|
||||
this._emitRejectedRecord({
|
||||
errorId: 'validation_failed',
|
||||
rawData: { item, effective },
|
||||
});
|
||||
console.error('无效的心跳数据:', effective);
|
||||
continue;
|
||||
}
|
||||
|
||||
// 转换数据格式
|
||||
const transformedData = this.transformData(effective);
|
||||
let transformedData = null;
|
||||
try {
|
||||
transformedData = this.transformData(effective);
|
||||
} catch (err) {
|
||||
this.stats?.incFiltered?.(1);
|
||||
this._emitRejectedRecord({
|
||||
errorId: 'transform_failed',
|
||||
error: err,
|
||||
rawData: { item, effective },
|
||||
});
|
||||
console.error('转换心跳数据失败:', err);
|
||||
continue;
|
||||
}
|
||||
|
||||
// 添加到批量队列
|
||||
this.batchQueue.push(transformedData);
|
||||
@@ -163,10 +196,11 @@ class HeartbeatProcessor {
|
||||
|
||||
this._batchInFlight = true;
|
||||
let hasMore = false;
|
||||
let batchData = null;
|
||||
|
||||
try {
|
||||
const { batchEventCount, batchMessageCount } = this.computeNextBatchWindow();
|
||||
const batchData = this.batchQueue.slice(0, batchEventCount);
|
||||
batchData = this.batchQueue.slice(0, batchEventCount);
|
||||
const batchMessages = this.batchMessageQueue.slice(0, batchMessageCount);
|
||||
|
||||
let insertedCount = 0;
|
||||
@@ -189,10 +223,12 @@ class HeartbeatProcessor {
|
||||
entry.deferred.resolve({ insertedCount: entry.eventCount });
|
||||
}
|
||||
|
||||
this.stats?.incDbWritten?.(batchData.length);
|
||||
console.log(`成功处理批次数据,共 ${batchData.length} 条`);
|
||||
hasMore = this.batchQueue.length > 0;
|
||||
} catch (error) {
|
||||
console.error('批量处理失败:', error);
|
||||
this._emitDbWriteError(error, batchData);
|
||||
if (!this.batchTimer) {
|
||||
const retryDelay = Math.max(250, Number(this.config.batchTimeout ?? 1000));
|
||||
this.batchTimer = setTimeout(() => this.processBatch(), retryDelay);
|
||||
@@ -207,6 +243,70 @@ class HeartbeatProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
_emitDbWriteError(error, rawData) {
|
||||
if (!this.redis?.isEnabled?.()) return;
|
||||
const list = Array.isArray(rawData) ? rawData : rawData ? [rawData] : [];
|
||||
for (const record of list) {
|
||||
this._emitRejectedRecord({
|
||||
errorId: 'db_write_failed',
|
||||
error,
|
||||
rawData: record,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
_emitRejectedRecord({ errorId, error, rawData }) {
|
||||
if (!this.redis?.isEnabled?.()) return;
|
||||
const ts = formatTimestamp(new Date());
|
||||
const errMsg = error ? String(error?.stack ?? error?.message ?? error) : undefined;
|
||||
const payload = this._safeStringify({
|
||||
errorId,
|
||||
error: errMsg,
|
||||
rawData,
|
||||
});
|
||||
const base = `[ERROR] ${ts} ${errorId}: `;
|
||||
const maxChunkChars = 50_000;
|
||||
if (payload.length <= maxChunkChars) {
|
||||
this.redis.pushConsoleLog?.({ level: 'warn', message: `${base}${payload}`, metadata: { module: 'processor' } });
|
||||
return;
|
||||
}
|
||||
const parts = Math.ceil(payload.length / maxChunkChars);
|
||||
for (let i = 0; i < parts; i += 1) {
|
||||
const chunk = payload.slice(i * maxChunkChars, (i + 1) * maxChunkChars);
|
||||
this.redis.pushConsoleLog?.({
|
||||
level: 'warn',
|
||||
message: `${base}(part ${i + 1}/${parts}) ${chunk}`,
|
||||
metadata: { module: 'processor' },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
_extractRawKafkaValue(message) {
|
||||
try {
|
||||
const raw = message?.value;
|
||||
if (Buffer.isBuffer(raw)) {
|
||||
return { type: 'buffer', bytes: raw.length, base64: raw.toString('base64') };
|
||||
}
|
||||
if (typeof raw === 'string') {
|
||||
return { type: 'string', chars: raw.length, value: raw };
|
||||
}
|
||||
return { type: typeof raw, value: raw };
|
||||
} catch (err) {
|
||||
return { type: 'unknown', error: String(err?.message ?? err) };
|
||||
}
|
||||
}
|
||||
|
||||
_safeStringify(obj) {
|
||||
try {
|
||||
return JSON.stringify(obj);
|
||||
} catch (err) {
|
||||
return JSON.stringify({
|
||||
stringifyError: String(err?.message ?? err),
|
||||
type: typeof obj,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
shouldFlushNow() {
|
||||
const max = Math.max(1, Number(this.config.batchSize ?? 1));
|
||||
return this.batchQueue.length >= max;
|
||||
|
||||
@@ -8,6 +8,9 @@ class RedisIntegration {
|
||||
|
||||
this._connectPromise = null;
|
||||
this._lastErrorLogAt = 0;
|
||||
this._pendingConsoleLogs = [];
|
||||
this._flushingConsoleLogs = false;
|
||||
this._pendingHeartbeat = null;
|
||||
}
|
||||
|
||||
isEnabled() {
|
||||
@@ -23,7 +26,7 @@ class RedisIntegration {
|
||||
}
|
||||
|
||||
getHeartbeatKey() {
|
||||
return `${this.getProjectName()}_项目心跳`;
|
||||
return '项目心跳';
|
||||
}
|
||||
|
||||
getConsoleKey() {
|
||||
@@ -50,6 +53,14 @@ class RedisIntegration {
|
||||
return Number.isFinite(n) && n > 0 ? Math.floor(n) : null;
|
||||
}
|
||||
|
||||
getHeartbeatMaxLen() {
|
||||
const v = this.config?.heartbeatMaxLen;
|
||||
if (v === undefined) return 2000;
|
||||
if (v === null) return null;
|
||||
const n = Number(v);
|
||||
return Number.isFinite(n) && n > 0 ? Math.floor(n) : null;
|
||||
}
|
||||
|
||||
getConsoleMaxLen() {
|
||||
const v = this.config?.consoleMaxLen;
|
||||
if (v === undefined || v === null) return null;
|
||||
@@ -137,6 +148,7 @@ class RedisIntegration {
|
||||
.connect()
|
||||
.then(() => {
|
||||
console.log('[redis] connected');
|
||||
return Promise.all([this.flushPendingHeartbeat(), this.flushPendingConsoleLogs()]);
|
||||
})
|
||||
.catch((err) => {
|
||||
// connect 失败不抛出到上层;依赖 redis 内建重连策略或下次调用再触发
|
||||
@@ -163,15 +175,32 @@ class RedisIntegration {
|
||||
} finally {
|
||||
this.client = null;
|
||||
this._connectPromise = null;
|
||||
this._pendingConsoleLogs = [];
|
||||
this._pendingHeartbeat = null;
|
||||
console.log('[redis] disconnected');
|
||||
}
|
||||
}
|
||||
|
||||
async flushPendingHeartbeat() {
|
||||
if (!this.isEnabled()) return;
|
||||
if (!this.client || !this.client.isReady) return;
|
||||
if (!this._pendingHeartbeat) return;
|
||||
|
||||
const key = this.getHeartbeatKey();
|
||||
const value = this._pendingHeartbeat;
|
||||
this._pendingHeartbeat = null;
|
||||
|
||||
await this.client.rPush(key, value);
|
||||
const maxLen = this.getHeartbeatMaxLen();
|
||||
if (maxLen) {
|
||||
await this.client.lTrim(key, -maxLen, -1);
|
||||
}
|
||||
}
|
||||
|
||||
async writeHeartbeat() {
|
||||
if (!this.isEnabled()) return;
|
||||
if (!this.client || !this.client.isReady) return;
|
||||
|
||||
const payload = {
|
||||
projectName: this.getProjectName(),
|
||||
apiBaseUrl: this.getApiBaseUrl(),
|
||||
lastActiveAt: Date.now(),
|
||||
};
|
||||
@@ -179,12 +208,34 @@ class RedisIntegration {
|
||||
const key = this.getHeartbeatKey();
|
||||
const value = JSON.stringify(payload);
|
||||
|
||||
const ttl = this.getHeartbeatTtlSeconds();
|
||||
if (ttl) {
|
||||
await this.client.set(key, value, { EX: ttl });
|
||||
} else {
|
||||
await this.client.set(key, value);
|
||||
if (!this.client || !this.client.isReady) {
|
||||
this._pendingHeartbeat = value;
|
||||
this.ensureConnectedInBackground();
|
||||
return;
|
||||
}
|
||||
|
||||
let lastError = null;
|
||||
for (let attempt = 1; attempt <= 2; attempt += 1) {
|
||||
try {
|
||||
await this.client.rPush(key, value);
|
||||
const maxLen = this.getHeartbeatMaxLen();
|
||||
if (maxLen) {
|
||||
await this.client.lTrim(key, -maxLen, -1);
|
||||
}
|
||||
this._pendingHeartbeat = null;
|
||||
return;
|
||||
} catch (err) {
|
||||
lastError = err;
|
||||
this.ensureConnectedInBackground();
|
||||
if (attempt < 2) {
|
||||
await new Promise((r) => setTimeout(r, 250));
|
||||
if (!this.client?.isReady) return;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError;
|
||||
}
|
||||
|
||||
startHeartbeat() {
|
||||
@@ -214,13 +265,52 @@ class RedisIntegration {
|
||||
}
|
||||
}
|
||||
|
||||
async pushConsoleLog({ level, message, metadata }) {
|
||||
async flushPendingConsoleLogs() {
|
||||
if (!this.isEnabled()) return;
|
||||
if (!this.client || !this.client.isReady) return;
|
||||
if (this._flushingConsoleLogs) return;
|
||||
if (!this._pendingConsoleLogs.length) return;
|
||||
|
||||
this._flushingConsoleLogs = true;
|
||||
try {
|
||||
const key = this.getConsoleKey();
|
||||
while (this._pendingConsoleLogs.length) {
|
||||
const batch = this._pendingConsoleLogs.splice(0, 200);
|
||||
await this.client.rPush(key, ...batch);
|
||||
|
||||
const maxLen = this.getConsoleMaxLen();
|
||||
if (maxLen) {
|
||||
await this.client.lTrim(key, -maxLen, -1);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
this._flushingConsoleLogs = false;
|
||||
}
|
||||
}
|
||||
|
||||
async pushConsoleLog({ level, message, metadata }) {
|
||||
if (!this.isEnabled()) return;
|
||||
const normalizedLevel = String(level ?? '').toLowerCase();
|
||||
if (!this.client || !this.client.isReady) {
|
||||
if (this._pendingConsoleLogs.length < 5000) {
|
||||
const entry = {
|
||||
timestamp: new Date().toISOString(),
|
||||
level: normalizedLevel,
|
||||
message,
|
||||
metadata: metadata ?? undefined,
|
||||
};
|
||||
const value = JSON.stringify(entry);
|
||||
if (Buffer.byteLength(value, 'utf8') <= 64 * 1024) {
|
||||
this._pendingConsoleLogs.push(value);
|
||||
}
|
||||
}
|
||||
this.ensureConnectedInBackground();
|
||||
return;
|
||||
}
|
||||
|
||||
const entry = {
|
||||
timestamp: new Date().toISOString(),
|
||||
level,
|
||||
level: normalizedLevel,
|
||||
message,
|
||||
metadata: metadata ?? undefined,
|
||||
};
|
||||
|
||||
102
src/stats/statsManager.js
Normal file
102
src/stats/statsManager.js
Normal file
@@ -0,0 +1,102 @@
|
||||
class StatsCounters {
|
||||
constructor() {
|
||||
this._minuteBuf = new SharedArrayBuffer(BigInt64Array.BYTES_PER_ELEMENT * 3);
|
||||
this._minute = new BigInt64Array(this._minuteBuf);
|
||||
}
|
||||
|
||||
incDbWritten(n = 1) {
|
||||
const v = BigInt(Math.max(0, Number(n) || 0));
|
||||
if (v === 0n) return;
|
||||
Atomics.add(this._minute, 0, v);
|
||||
}
|
||||
|
||||
incFiltered(n = 1) {
|
||||
const v = BigInt(Math.max(0, Number(n) || 0));
|
||||
if (v === 0n) return;
|
||||
Atomics.add(this._minute, 1, v);
|
||||
}
|
||||
|
||||
incKafkaPulled(n = 1) {
|
||||
const v = BigInt(Math.max(0, Number(n) || 0));
|
||||
if (v === 0n) return;
|
||||
Atomics.add(this._minute, 2, v);
|
||||
}
|
||||
|
||||
snapshotAndResetMinute() {
|
||||
const dbWritten = Atomics.exchange(this._minute, 0, 0n);
|
||||
const filtered = Atomics.exchange(this._minute, 1, 0n);
|
||||
const kafkaPulled = Atomics.exchange(this._minute, 2, 0n);
|
||||
return { dbWritten, filtered, kafkaPulled };
|
||||
}
|
||||
}
|
||||
|
||||
const pad2 = (n) => String(n).padStart(2, '0');
|
||||
const pad3 = (n) => String(n).padStart(3, '0');
|
||||
|
||||
const formatTimestamp = (d) => {
|
||||
const year = d.getFullYear();
|
||||
const month = pad2(d.getMonth() + 1);
|
||||
const day = pad2(d.getDate());
|
||||
const hour = pad2(d.getHours());
|
||||
const minute = pad2(d.getMinutes());
|
||||
const second = pad2(d.getSeconds());
|
||||
const ms = pad3(d.getMilliseconds());
|
||||
return `${year}-${month}-${day} ${hour}:${minute}:${second}.${ms}`;
|
||||
};
|
||||
|
||||
class StatsReporter {
|
||||
constructor({ redis, stats }) {
|
||||
this.redis = redis;
|
||||
this.stats = stats;
|
||||
this._timer = null;
|
||||
this._running = false;
|
||||
}
|
||||
|
||||
start() {
|
||||
if (this._running) return;
|
||||
this._running = true;
|
||||
this._scheduleNext();
|
||||
}
|
||||
|
||||
stop() {
|
||||
this._running = false;
|
||||
if (this._timer) {
|
||||
clearTimeout(this._timer);
|
||||
this._timer = null;
|
||||
}
|
||||
}
|
||||
|
||||
flushOnce() {
|
||||
if (!this.redis?.isEnabled?.()) return;
|
||||
const { dbWritten, filtered, kafkaPulled } = this.stats.snapshotAndResetMinute();
|
||||
const ts = formatTimestamp(new Date());
|
||||
this.redis.pushConsoleLog?.({ level: 'info', message: `[STATS] ${ts} 数据库写入量: ${dbWritten}条`, metadata: { module: 'stats' } });
|
||||
this.redis.pushConsoleLog?.({ level: 'info', message: `[STATS] ${ts} 数据过滤量: ${filtered}条`, metadata: { module: 'stats' } });
|
||||
this.redis.pushConsoleLog?.({ level: 'info', message: `[STATS] ${ts} Kafka拉取量: ${kafkaPulled}条`, metadata: { module: 'stats' } });
|
||||
}
|
||||
|
||||
_scheduleNext() {
|
||||
if (!this._running) return;
|
||||
if (this._timer) return;
|
||||
|
||||
const now = Date.now();
|
||||
const delay = 60_000 - (now % 60_000);
|
||||
this._timer = setTimeout(() => {
|
||||
this._timer = null;
|
||||
try {
|
||||
this.flushOnce();
|
||||
} catch (err) {
|
||||
this.redis?.pushConsoleLog?.({
|
||||
level: 'warn',
|
||||
message: `[ERROR] ${formatTimestamp(new Date())} 统计任务异常: ${String(err?.message ?? err)}`,
|
||||
metadata: { module: 'stats' },
|
||||
});
|
||||
} finally {
|
||||
this._scheduleNext();
|
||||
}
|
||||
}, delay);
|
||||
}
|
||||
}
|
||||
|
||||
export { StatsCounters, StatsReporter, formatTimestamp };
|
||||
|
||||
Reference in New Issue
Block a user