From c98b83635d284e0d64060df4a75c190791a48d37 Mon Sep 17 00:00:00 2001 From: XuJiacheng Date: Mon, 2 Mar 2026 11:49:02 +0800 Subject: [PATCH] =?UTF-8?q?fix:=20=E4=BC=98=E5=8C=96=E6=8C=87=E6=A0=87?= =?UTF-8?q?=E6=8A=A5=E5=91=8A=E6=A0=BC=E5=BC=8F=EF=BC=8C=E7=AE=80=E5=8C=96?= =?UTF-8?q?=E6=97=A5=E5=BF=97=E8=BE=93=E5=87=BA?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- bls-onoffline-backend/dist/index.js | 4 ++-- bls-onoffline-backend/src/index.js | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bls-onoffline-backend/dist/index.js b/bls-onoffline-backend/dist/index.js index 0911b58..dc94eeb 100644 --- a/bls-onoffline-backend/dist/index.js +++ b/bls-onoffline-backend/dist/index.js @@ -875,9 +875,9 @@ const bootstrap = async () => { const metrics = metricCollector.getAndReset(); const flushAvgMs = metrics.batch_flush_count > 0 ? (metrics.batch_flush_ms_sum / metrics.batch_flush_count).toFixed(1) : "0.0"; const dbAvgMs = metrics.db_insert_count > 0 ? (metrics.db_insert_ms_sum / metrics.db_insert_count).toFixed(1) : "0.0"; - const report = `[Minute Metrics] Pulled: ${metrics.kafka_pulled}, Parse Error: ${metrics.parse_error}, Inserted: ${metrics.db_inserted}, Failed: ${metrics.db_failed}, FlushAvgMs: ${flushAvgMs}, DbAvgMs: ${dbAvgMs}, PulledByPartition: ${JSON.stringify(metrics.keyed?.kafka_pulled_by_partition || {})}, InsertedByPartition: ${JSON.stringify(metrics.keyed?.db_inserted_by_partition || {})}, FailedByPartition: ${JSON.stringify(metrics.keyed?.db_failed_by_partition || {})}, InsertedByDay: ${JSON.stringify(metrics.keyed?.db_inserted_by_day || {})}, DbMsByDay: ${JSON.stringify(metrics.keyed?.db_insert_ms_sum_by_day || {})}`; + const report = `[Metrics] Pulled:${metrics.kafka_pulled} ParseErr:${metrics.parse_error} Inserted:${metrics.db_inserted} Failed:${metrics.db_failed} FlushAvg:${flushAvgMs}ms DbAvg:${dbAvgMs}ms`; console.log(report); - logger.info(report, metrics); + logger.info(report); try { await redisIntegration.info("Minute Metrics", metrics); } catch (err) { diff --git a/bls-onoffline-backend/src/index.js b/bls-onoffline-backend/src/index.js index 0964284..0451cba 100644 --- a/bls-onoffline-backend/src/index.js +++ b/bls-onoffline-backend/src/index.js @@ -78,9 +78,9 @@ const bootstrap = async () => { const metrics = metricCollector.getAndReset(); const flushAvgMs = metrics.batch_flush_count > 0 ? (metrics.batch_flush_ms_sum / metrics.batch_flush_count).toFixed(1) : '0.0'; const dbAvgMs = metrics.db_insert_count > 0 ? (metrics.db_insert_ms_sum / metrics.db_insert_count).toFixed(1) : '0.0'; - const report = `[Minute Metrics] Pulled: ${metrics.kafka_pulled}, Parse Error: ${metrics.parse_error}, Inserted: ${metrics.db_inserted}, Failed: ${metrics.db_failed}, FlushAvgMs: ${flushAvgMs}, DbAvgMs: ${dbAvgMs}, PulledByPartition: ${JSON.stringify(metrics.keyed?.kafka_pulled_by_partition || {})}, InsertedByPartition: ${JSON.stringify(metrics.keyed?.db_inserted_by_partition || {})}, FailedByPartition: ${JSON.stringify(metrics.keyed?.db_failed_by_partition || {})}, InsertedByDay: ${JSON.stringify(metrics.keyed?.db_inserted_by_day || {})}, DbMsByDay: ${JSON.stringify(metrics.keyed?.db_insert_ms_sum_by_day || {})}`; + const report = `[Metrics] Pulled:${metrics.kafka_pulled} ParseErr:${metrics.parse_error} Inserted:${metrics.db_inserted} Failed:${metrics.db_failed} FlushAvg:${flushAvgMs}ms DbAvg:${dbAvgMs}ms`; console.log(report); - logger.info(report, metrics); + logger.info(report); try { await redisIntegration.info('Minute Metrics', metrics);