feat(processor): 实现批量写库容错机制并添加失败统计

添加数据库批量写入失败处理逻辑,当批量写入失败时自动切换为逐条写入
记录失败数据并统计失败数量,同时更新相关测试和统计模块
符合新增的批量写库容错需求规范
This commit is contained in:
2026-01-20 08:22:55 +08:00
parent 41301f9ce5
commit b90faf4aa4
5 changed files with 111 additions and 44 deletions

View File

@@ -425,46 +425,51 @@ class DatabaseManager {
'extra',
];
const toRowValues = (e) => [
e.ts_ms,
e.hotel_id,
e.room_id,
e.device_id,
e.ip,
e.power_state,
e.guest_type,
e.cardless_state,
e.service_mask,
e.pms_state,
e.carbon_state,
e.device_count,
e.comm_seq,
Array.isArray(e.elec_address) ? e.elec_address : null,
Array.isArray(e.air_address) ? e.air_address : null,
Array.isArray(e.voltage) ? e.voltage : null,
Array.isArray(e.ampere) ? e.ampere : null,
Array.isArray(e.power) ? e.power : null,
Array.isArray(e.phase) ? e.phase : null,
Array.isArray(e.energy) ? e.energy : null,
Array.isArray(e.sum_energy) ? e.sum_energy : null,
Array.isArray(e.state) ? e.state : null,
Array.isArray(e.model) ? e.model : null,
Array.isArray(e.speed) ? e.speed : null,
Array.isArray(e.set_temp) ? e.set_temp : null,
Array.isArray(e.now_temp) ? e.now_temp : null,
Array.isArray(e.solenoid_valve) ? e.solenoid_valve : null,
e.extra ?? null,
];
const values = [];
const placeholders = events
.map((e, rowIndex) => {
const base = rowIndex * columns.length;
values.push(
e.ts_ms,
e.hotel_id,
e.room_id,
e.device_id,
e.ip,
e.power_state,
e.guest_type,
e.cardless_state,
e.service_mask,
e.pms_state,
e.carbon_state,
e.device_count,
e.comm_seq,
Array.isArray(e.elec_address) ? e.elec_address : null,
Array.isArray(e.air_address) ? e.air_address : null,
Array.isArray(e.voltage) ? e.voltage : null,
Array.isArray(e.ampere) ? e.ampere : null,
Array.isArray(e.power) ? e.power : null,
Array.isArray(e.phase) ? e.phase : null,
Array.isArray(e.energy) ? e.energy : null,
Array.isArray(e.sum_energy) ? e.sum_energy : null,
Array.isArray(e.state) ? e.state : null,
Array.isArray(e.model) ? e.model : null,
Array.isArray(e.speed) ? e.speed : null,
Array.isArray(e.set_temp) ? e.set_temp : null,
Array.isArray(e.now_temp) ? e.now_temp : null,
Array.isArray(e.solenoid_valve) ? e.solenoid_valve : null,
e.extra ?? null
);
values.push(...toRowValues(e));
const row = columns.map((_, colIndex) => `$${base + colIndex + 1}`).join(', ');
return `(${row})`;
})
.join(', ');
const sql = `INSERT INTO heartbeat.heartbeat_events (${columns.join(', ')}) VALUES ${placeholders}`;
const singleSql = `INSERT INTO heartbeat.heartbeat_events (${columns.join(', ')}) VALUES (${columns
.map((_, i) => `$${i + 1}`)
.join(', ')})`;
const runInsertOnce = async () => {
const tsValues = events.map((e) => Number(e.ts_ms)).filter((n) => Number.isFinite(n));
@@ -518,7 +523,24 @@ class DatabaseManager {
}
}
throw lastError;
const failedRecords = [];
let insertedCount = 0;
console.error('[db] 批量写入失败,已切换为逐条写入:', lastError);
for (const event of events) {
try {
await this.pool.query(singleSql, toRowValues(event));
insertedCount += 1;
} catch (error) {
failedRecords.push({ error, record: event });
}
}
if (insertedCount === 0 && failedRecords.length === events.length) {
throw lastError;
}
return { insertedCount, failedRecords, batchError: lastError };
}
async insertHeartbeatData(data) {