feat: 添加 G5 数据库支持,更新配置和文档
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
import cron from 'node-cron';
|
||||
import { config } from './config/config.js';
|
||||
import dbManager from './db/databaseManager.js';
|
||||
import g5DbManager from './db/g5DatabaseManager.js';
|
||||
import { createKafkaConsumers } from './kafka/consumer.js';
|
||||
import { parseMessageToRows } from './processor/index.js';
|
||||
import { createRedisClient } from './redis/redisClient.js';
|
||||
@@ -53,7 +54,7 @@ const bootstrap = async () => {
|
||||
const report = `[Metrics] Pulled:${metrics.kafka_pulled} ParseErr:${metrics.parse_error} Inserted:${metrics.db_inserted} Failed:${metrics.db_failed} FlushAvg:${flushAvgMs}ms DbAvg:${dbAvgMs}ms`;
|
||||
console.log(report);
|
||||
logger.info(report);
|
||||
|
||||
|
||||
try {
|
||||
await redisIntegration.info('Minute Metrics', metrics);
|
||||
} catch (err) {
|
||||
@@ -106,7 +107,7 @@ const bootstrap = async () => {
|
||||
const BATCH_SIZE = Math.max(10, Math.min(configuredBatchSize, configuredMaxInFlight));
|
||||
const BATCH_TIMEOUT_MS = Math.max(1, configuredBatchTimeoutMs);
|
||||
const commitOnAttempt = config.kafka.commitOnAttempt === true;
|
||||
|
||||
|
||||
const batchStates = new Map();
|
||||
|
||||
const partitionKeyFromMessage = (message) => {
|
||||
@@ -175,7 +176,16 @@ const bootstrap = async () => {
|
||||
const startedAt = Date.now();
|
||||
while (true) {
|
||||
try {
|
||||
await dbManager.insertRows({ schema: config.db.schema, table: config.db.table, rows });
|
||||
const promises = [
|
||||
dbManager.insertRows({ schema: config.db.schema, table: config.db.table, rows })
|
||||
];
|
||||
if (config.g5db.enabled) {
|
||||
promises.push(g5DbManager.insertRows({ schema: config.g5db.schema, table: config.g5db.table, rows }).catch(e => {
|
||||
logger.error('G5 Database insert failed but non-blocking', { error: e.message });
|
||||
}));
|
||||
}
|
||||
await Promise.all(promises);
|
||||
|
||||
metricCollector.increment('db_insert_count', 1);
|
||||
metricCollector.increment('db_insert_ms_sum', Date.now() - startedAt);
|
||||
return;
|
||||
@@ -196,7 +206,15 @@ const bootstrap = async () => {
|
||||
|
||||
const insertRowsOnce = async (rows) => {
|
||||
const startedAt = Date.now();
|
||||
await dbManager.insertRows({ schema: config.db.schema, table: config.db.table, rows });
|
||||
const promises = [
|
||||
dbManager.insertRows({ schema: config.db.schema, table: config.db.table, rows })
|
||||
];
|
||||
if (config.g5db.enabled) {
|
||||
promises.push(g5DbManager.insertRows({ schema: config.g5db.schema, table: config.g5db.table, rows }).catch(e => {
|
||||
logger.error('G5 Database insert failed in insertOnce', { error: e.message });
|
||||
}));
|
||||
}
|
||||
await Promise.all(promises);
|
||||
metricCollector.increment('db_insert_count', 1);
|
||||
metricCollector.increment('db_insert_ms_sum', Date.now() - startedAt);
|
||||
};
|
||||
@@ -329,7 +347,7 @@ const bootstrap = async () => {
|
||||
for (const item of unresolvedItems) {
|
||||
try {
|
||||
await handleError(err, item.message);
|
||||
} catch {}
|
||||
} catch { }
|
||||
item.resolve();
|
||||
}
|
||||
}
|
||||
@@ -355,7 +373,7 @@ const bootstrap = async () => {
|
||||
metricCollector.increment('kafka_pulled');
|
||||
metricCollector.incrementKeyed('kafka_pulled_by_partition', `${message.topic}-${message.partition}`, 1);
|
||||
}
|
||||
|
||||
|
||||
// const messageValue = Buffer.isBuffer(message.value)
|
||||
// ? message.value.toString('utf8')
|
||||
// : message.value;
|
||||
@@ -371,7 +389,7 @@ const bootstrap = async () => {
|
||||
// value: config.kafka.logMessages ? messageValue : undefined,
|
||||
// valueLength: !config.kafka.logMessages && typeof messageValue === 'string' ? messageValue.length : null
|
||||
// };
|
||||
|
||||
|
||||
// logger.info('Kafka message received', logDetails);
|
||||
|
||||
const partitionKey = partitionKeyFromMessage(message);
|
||||
@@ -414,7 +432,7 @@ const bootstrap = async () => {
|
||||
// Graceful Shutdown Logic
|
||||
const shutdown = async (signal) => {
|
||||
logger.info(`Received ${signal}, shutting down...`);
|
||||
|
||||
|
||||
try {
|
||||
// 1. Close Kafka Consumer
|
||||
if (consumers && consumers.length > 0) {
|
||||
@@ -429,8 +447,9 @@ const bootstrap = async () => {
|
||||
await redisClient.quit();
|
||||
logger.info('Redis client closed');
|
||||
|
||||
// 4. Close Database Pool
|
||||
// 4. Close Database Pools
|
||||
await dbManager.close();
|
||||
await g5DbManager.close();
|
||||
logger.info('Database connection closed');
|
||||
|
||||
process.exit(0);
|
||||
|
||||
Reference in New Issue
Block a user