2026-01-30 11:05:00 +08:00
|
|
|
import kafka from 'kafka-node';
|
|
|
|
|
import { logger } from '../utils/logger.js';
|
|
|
|
|
|
|
|
|
|
const { ConsumerGroup } = kafka;
|
|
|
|
|
|
2026-01-30 20:09:46 +08:00
|
|
|
const createOneConsumer = ({ kafkaConfig, onMessage, onError, instanceIndex }) => {
|
2026-01-30 11:05:00 +08:00
|
|
|
const kafkaHost = kafkaConfig.brokers.join(',');
|
2026-01-30 20:09:46 +08:00
|
|
|
const clientId = instanceIndex === 0 ? kafkaConfig.clientId : `${kafkaConfig.clientId}-${instanceIndex}`;
|
|
|
|
|
const id = `${clientId}-${process.pid}-${Date.now()}`;
|
|
|
|
|
const maxInFlight = Number.isFinite(kafkaConfig.maxInFlight) ? kafkaConfig.maxInFlight : 50;
|
|
|
|
|
let inFlight = 0;
|
|
|
|
|
|
2026-01-30 11:05:00 +08:00
|
|
|
const consumer = new ConsumerGroup(
|
|
|
|
|
{
|
|
|
|
|
kafkaHost,
|
|
|
|
|
groupId: kafkaConfig.groupId,
|
2026-01-30 20:09:46 +08:00
|
|
|
clientId,
|
|
|
|
|
id,
|
2026-01-30 11:05:00 +08:00
|
|
|
fromOffset: 'earliest',
|
|
|
|
|
protocol: ['roundrobin'],
|
|
|
|
|
outOfRangeOffset: 'latest',
|
|
|
|
|
autoCommit: true,
|
2026-01-30 20:09:46 +08:00
|
|
|
autoCommitIntervalMs: kafkaConfig.autoCommitIntervalMs,
|
|
|
|
|
fetchMaxBytes: kafkaConfig.fetchMaxBytes,
|
|
|
|
|
fetchMinBytes: kafkaConfig.fetchMinBytes,
|
|
|
|
|
fetchMaxWaitMs: kafkaConfig.fetchMaxWaitMs,
|
2026-01-30 11:05:00 +08:00
|
|
|
sasl: kafkaConfig.sasl
|
|
|
|
|
},
|
|
|
|
|
kafkaConfig.topic
|
|
|
|
|
);
|
|
|
|
|
|
2026-01-30 20:09:46 +08:00
|
|
|
const tryResume = () => {
|
|
|
|
|
if (inFlight < maxInFlight) {
|
|
|
|
|
consumer.resume();
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2026-01-30 11:05:00 +08:00
|
|
|
consumer.on('message', (message) => {
|
2026-01-30 20:09:46 +08:00
|
|
|
inFlight += 1;
|
|
|
|
|
if (inFlight >= maxInFlight) {
|
|
|
|
|
consumer.pause();
|
|
|
|
|
}
|
|
|
|
|
Promise.resolve(onMessage(message))
|
|
|
|
|
.catch((error) => {
|
|
|
|
|
logger.error('Kafka message handling failed', { error: error?.message });
|
|
|
|
|
if (onError) {
|
|
|
|
|
onError(error, message);
|
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
.finally(() => {
|
|
|
|
|
inFlight -= 1;
|
|
|
|
|
tryResume();
|
|
|
|
|
});
|
2026-01-30 11:05:00 +08:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
consumer.on('error', (error) => {
|
|
|
|
|
logger.error('Kafka consumer error', { error: error?.message });
|
|
|
|
|
if (onError) {
|
|
|
|
|
onError(error);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
return consumer;
|
|
|
|
|
};
|
2026-01-30 20:09:46 +08:00
|
|
|
|
|
|
|
|
export const createKafkaConsumers = ({ kafkaConfig, onMessage, onError }) => {
|
|
|
|
|
const instances = Number.isFinite(kafkaConfig.consumerInstances) ? kafkaConfig.consumerInstances : 1;
|
|
|
|
|
const count = Math.max(1, instances);
|
|
|
|
|
return Array.from({ length: count }, (_, idx) =>
|
|
|
|
|
createOneConsumer({ kafkaConfig, onMessage, onError, instanceIndex: idx })
|
|
|
|
|
);
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
export const createKafkaConsumer = ({ kafkaConfig, onMessage, onError }) =>
|
|
|
|
|
createKafkaConsumers({ kafkaConfig, onMessage, onError })[0];
|