-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
refactor: make consumer the only public interface for live event list…
…ener (#91)
- Loading branch information
Kishan Sairam Adapa
authored
Dec 28, 2023
1 parent
7b78923
commit 12462bd
Showing
6 changed files
with
143 additions
and
96 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
54 changes: 54 additions & 0 deletions
54
...t-listener/src/main/java/org/hypertrace/core/kafka/event/listener/KafkaConsumerUtils.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,54 @@ | ||
package org.hypertrace.core.kafka.event.listener; | ||
|
||
import com.typesafe.config.Config; | ||
import com.typesafe.config.ConfigFactory; | ||
import com.typesafe.config.ConfigValue; | ||
import java.util.HashMap; | ||
import java.util.Map; | ||
import java.util.Properties; | ||
import java.util.Set; | ||
import org.apache.kafka.clients.consumer.Consumer; | ||
import org.apache.kafka.clients.consumer.ConsumerConfig; | ||
import org.apache.kafka.clients.consumer.KafkaConsumer; | ||
import org.apache.kafka.common.serialization.ByteArrayDeserializer; | ||
import org.apache.kafka.common.serialization.Deserializer; | ||
|
||
public class KafkaConsumerUtils { | ||
public static final String TOPIC_NAME = "topic.name"; // required key in kafkaConfig | ||
public static final String POLL_TIMEOUT = "poll.timeout"; // defaults to 30s if not provided | ||
|
||
/** | ||
* Returns a kafka consumer for provided config and key value deserializers. Only one instance of | ||
* consumer should be required per pod, ensure singleton. | ||
*/ | ||
public static <K, V> Consumer<K, V> getKafkaConsumer( | ||
Config kafkaConfig, Deserializer<K> keyDeserializer, Deserializer<V> valueDeserializer) { | ||
return new KafkaConsumer<>( | ||
getKafkaConsumerConfigs(kafkaConfig.withFallback(getDefaultKafkaConsumerConfigs())), | ||
keyDeserializer, | ||
valueDeserializer); | ||
} | ||
|
||
private static Properties getKafkaConsumerConfigs(Config configs) { | ||
Map<String, String> configMap = new HashMap<>(); | ||
Set<Map.Entry<String, ConfigValue>> entries = configs.entrySet(); | ||
for (Map.Entry<String, ConfigValue> entry : entries) { | ||
String key = entry.getKey(); | ||
configMap.put(key, configs.getString(key)); | ||
} | ||
Properties props = new Properties(); | ||
props.putAll(configMap); | ||
return props; | ||
} | ||
|
||
private static Config getDefaultKafkaConsumerConfigs() { | ||
Map<String, String> defaultKafkaConsumerConfigMap = new HashMap<>(); | ||
defaultKafkaConsumerConfigMap.put( | ||
ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); | ||
defaultKafkaConsumerConfigMap.put( | ||
ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); | ||
defaultKafkaConsumerConfigMap.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "1000"); | ||
defaultKafkaConsumerConfigMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); | ||
return ConfigFactory.parseMap(defaultKafkaConsumerConfigMap); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
71 changes: 71 additions & 0 deletions
71
...testFixtures/java/org/hypertrace/core/kafka/event/listener/KafkaMockConsumerTestUtil.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,71 @@ | ||
package org.hypertrace.core.kafka.event.listener; | ||
|
||
import java.util.List; | ||
import java.util.Map; | ||
import java.util.Objects; | ||
import java.util.function.Function; | ||
import java.util.stream.Collectors; | ||
import java.util.stream.IntStream; | ||
import org.apache.kafka.clients.consumer.ConsumerRecord; | ||
import org.apache.kafka.clients.consumer.MockConsumer; | ||
import org.apache.kafka.clients.consumer.OffsetResetStrategy; | ||
import org.apache.kafka.common.Node; | ||
import org.apache.kafka.common.PartitionInfo; | ||
import org.apache.kafka.common.TopicPartition; | ||
|
||
public class KafkaMockConsumerTestUtil<K, V> { | ||
private final String topicName; | ||
private final Map<TopicPartition, Long> currentOffsets; | ||
|
||
private final MockConsumer<K, V> mockConsumer; | ||
|
||
public KafkaMockConsumerTestUtil(String topicName, int numPartitions) { | ||
this.topicName = topicName; | ||
mockConsumer = new MockConsumer<>(OffsetResetStrategy.LATEST); | ||
List<PartitionInfo> partitionInfos = | ||
IntStream.range(0, numPartitions) | ||
.mapToObj(i -> getPartitionInfo(topicName, i)) | ||
.collect(Collectors.toUnmodifiableList()); | ||
mockConsumer.updatePartitions(topicName, partitionInfos); | ||
currentOffsets = | ||
IntStream.range(0, numPartitions) | ||
.mapToObj(i -> getTopicPartition(topicName, i)) | ||
.collect(Collectors.toMap(Function.identity(), k -> 1L)); | ||
mockConsumer.updateEndOffsets(currentOffsets); | ||
} | ||
|
||
/** creates 1 partition by default */ | ||
public KafkaMockConsumerTestUtil(String topicName) { | ||
this(topicName, 1); | ||
} | ||
|
||
public MockConsumer<K, V> getMockConsumer() { | ||
return mockConsumer; | ||
} | ||
|
||
/** adds to 0th partition by default */ | ||
public void addRecord(K key, V value) { | ||
addRecordToPartition(0, key, value); | ||
} | ||
|
||
public void addRecordToPartition(int partition, K key, V value) { | ||
Long latestOffset = | ||
currentOffsets.computeIfPresent(getTopicPartition(topicName, partition), (k, v) -> v + 1); | ||
if (Objects.isNull(latestOffset)) { | ||
throw new UnsupportedOperationException( | ||
"cannot add to partition " | ||
+ partition | ||
+ ", total partitions is " | ||
+ currentOffsets.size()); | ||
} | ||
mockConsumer.addRecord(new ConsumerRecord<>(topicName, partition, latestOffset, key, value)); | ||
} | ||
|
||
private static PartitionInfo getPartitionInfo(String topic, int partition) { | ||
return new PartitionInfo(topic, partition, Node.noNode(), new Node[0], new Node[0]); | ||
} | ||
|
||
private static TopicPartition getTopicPartition(String topic, int partition) { | ||
return new TopicPartition(topic, partition); | ||
} | ||
} |