Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add threadsafe support for consumer/producer creation and support mul… #59

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ RUN apk --no-cache add \
cyrus-sasl-dev \
openssl-dev \
make \
python \
python3 \
git

RUN apk add --no-cache --virtual .build-deps gcc zlib-dev libc-dev bsd-compat-headers py-setuptools bash
Expand Down
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,8 @@
"graphql-subscriptions": "^1.1.0",
"iterall": "^1.2.2",
"node-rdkafka": "^2.8.1",
"uuid": "^7.0.3"
"uuid": "^7.0.3",
"async-mutex": "^0.5.0"
},
"devDependencies": {
"@types/jest": "^25.2.1",
Expand Down
52 changes: 31 additions & 21 deletions src/kafka-pubsub.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@ import * as Logger from 'bunyan';
import { createChildLogger } from './child-logger';
import { v4 as uuidv4 } from 'uuid';
import { EventEmitter } from 'events';
import { Mutex } from 'async-mutex';

export interface IKafkaOptions {
topic: string
topics: string[]
host: string
port: string
logger?: Logger,
Expand All @@ -32,6 +33,7 @@ export class KafkaPubSub extends PubSubEngine {
private ee: EventEmitter;
private subscriptions: { [key: string]: [string, (...args: any[]) => void] }
private subIdCounter: number;
private mutex = new Mutex(); // Create a mutex instance

protected logger: Logger

Expand All @@ -47,9 +49,13 @@ export class KafkaPubSub extends PubSubEngine {

public async publish(channel: string, payload: any): Promise<void> {
// only create producer if we actually publish something
this.producer = this.producer || await this.createProducer()
if (!this.producer) {
const release = await this.mutex.acquire();
this.producer = this.producer || await this.createProducer()
release()
}

let kafkaPayload = payload
let { _targetTopic, ...kafkaPayload } = payload;
if (!this.options.useHeaders) {
kafkaPayload = {
channel: channel,
Expand All @@ -63,25 +69,29 @@ export class KafkaPubSub extends PubSubEngine {

return new Promise((resolve, reject) => {
this.producer.produce(
this.options.topic,
_targetTopic || channel,
null,
this.serialiseMessage(kafkaPayload),
this.options.keyFun ? this.options.keyFun(kafkaPayload) : null,
Date.now(),
this.options.useHeaders ? [ {channel: Buffer.from(channel)} ] : null,
(err) => {
if (err) {
reject(err)
reject(err)
} else {
resolve()
resolve()
}
})
})
}

public async subscribe(channel: string, onMessage: (...args: any[]) => void, options?: Object): Promise<number> {
this.logger.info("Subscribing to %s", channel)
this.consumer = this.consumer || await this.createConsumer(this.options.topic)
if (!this.consumer) {
const release = await this.mutex.acquire();
this.consumer = this.consumer || await this.createConsumer(this.options.topics)
release()
}
const internalMessage = (...args: any[]) => {
if (this.options.useHeaders) {
onMessage(this.deserialiseMessage(args[0]))
Expand Down Expand Up @@ -160,11 +170,11 @@ export class KafkaPubSub extends PubSubEngine {
this.logger.error(err)
})
return new Promise((resolve, reject) => {
producer.on('ready', (data, metadata) => {
let topics = metadata.topics.map(topic => topic.name);
this.logger.info('Connected, found topics: %s', topics);
producer.on('ready', (_data, metadata) => {
let kafkaTopics = metadata.topics.map(topic => topic.name);
this.logger.info('Connected, found topics: %s', kafkaTopics);

if (topics.includes(this.options.topic)) {
if (this.options.topics.every(topic => kafkaTopics.includes(topic))) {
resolve(producer);
} else {
this.logger.error('Could not find requested topic %s', this.options.topic);
Expand All @@ -178,7 +188,7 @@ export class KafkaPubSub extends PubSubEngine {
})
}

private async createConsumer(topic: string): Promise<Kafka.KafkaConsumer> {
private async createConsumer(topics: string[]): Promise<Kafka.KafkaConsumer> {
// Create a group for each instance. The consumer will receive all messages from the topic
const groupId = this.options.groupId || uuidv4()

Expand All @@ -197,7 +207,7 @@ export class KafkaPubSub extends PubSubEngine {
this.options.topicConfig
));

consumer.on('data', (message) => {
consumer.on('data', (message: Kafka.Message) => {
if (this.logger.debug) {
this.logger.debug("Received %s", message.value.toString())
}
Expand All @@ -217,7 +227,7 @@ export class KafkaPubSub extends PubSubEngine {
this.ee.emit(parsedMessage.channel, parsedMessage.payload)
} else {
// No channel abstraction, publish over the whole topic
this.ee.emit(topic, parsedMessage)
this.ee.emit(message.topic, parsedMessage)
}
}

Expand All @@ -228,17 +238,17 @@ export class KafkaPubSub extends PubSubEngine {
});

return new Promise((resolve, reject) => {
consumer.on('ready', (data, metadata) => {
let topics = metadata.topics.map(topic => topic.name);
this.logger.info('Connected, found topics: %s', topics);
consumer.on('ready', (_data, metadata) => {
let kafkaTopics = metadata.topics.map(topic => topic.name);
this.logger.info('Connected, found topics: %s', kafkaTopics);

if (topics.includes(topic)) {
this.logger.info("Subscribing to %s", topic)
consumer.subscribe([topic]);
if (topics.every(topic => kafkaTopics.includes(topic))) {
this.logger.info("Subscribing to %s", topics.join(','))
consumer.subscribe(topics);
consumer.consume();
resolve(consumer);
} else {
this.logger.error('Could not find requested topic %s', topic);
this.logger.error('Could not find requested topic %s', topics.join(','));
consumer.disconnect()
reject('Could not find requested topic %s')
}
Expand Down
23 changes: 22 additions & 1 deletion src/test/kafka-pubsub-headers.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ let pubsub: KafkaPubSub
beforeAll(() => {
jest.setTimeout(60000);
pubsub = new KafkaPubSub({
topic: process.env.KAFKA_TOPIC || 'test',
topics: process.env.KAFKA_TOPIC.split(',') || ['test1', 'test2'],
host: process.env.KAFKA_HOST || 'localhost',
port: process.env.KAFKA_PORT || '9092',
logger: Logger.createLogger({
Expand Down Expand Up @@ -69,4 +69,25 @@ describe('KafkaPubSub Basic Tests', () => {
await iter.return();
})

test('should subscribe and publish messages correctly with _targetTopic', async (done) => {

const inputChannel = 'test-subscribe'
const inputPayload = {
_targetTopic: 'test1',
id: 'subscribe-value',
}

function onMessage(payload) {
try {
expect(payload).toStrictEqual(inputPayload);
done();
} catch (error) {
done(error);
}
}

const subscription = await pubsub.subscribe(inputChannel, onMessage)
await new Promise(r => setTimeout(r, 5000));
await pubsub.publish(inputChannel, inputPayload)
})
})
2 changes: 1 addition & 1 deletion src/test/kafka-pubsub.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ let pubsub: KafkaPubSub
beforeAll(() => {
jest.setTimeout(60000);
pubsub = new KafkaPubSub({
topic: process.env.KAFKA_TOPIC || 'test',
topics: process.env.KAFKA_TOPIC.split(',') || ['test', 'test1'],
host: process.env.KAFKA_HOST || 'localhost',
port: process.env.KAFKA_PORT || '9092',
logger: Logger.createLogger({
Expand Down