-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathProducerDemoKeys.java
76 lines (59 loc) · 2.86 KB
/
ProducerDemoKeys.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
package kafka.tutorial1;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
public class ProducerDemoKeys {
public static void main(String[] args) throws ExecutionException, InterruptedException {
final Logger logger = LoggerFactory.getLogger(ProducerDemoKeys.class);
String bootstrapServers = "127.0.0.1:9092";
// create Producer properties
Properties properties = new Properties();
properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
// create the producer
KafkaProducer<String, String> producer = new KafkaProducer<String, String>(properties);
for (int i=0; i<10; i++ ) {
// create a producer record
String topic = "first_topic";
String value = "hello world " + Integer.toString(i);
String key = "id_" + Integer.toString(i);
ProducerRecord<String, String> record =
new ProducerRecord<String, String>(topic, key, value);
logger.info("Key: " + key); // log the key
// id_0 is going to partition 1
// id_1 partition 0
// id_2 partition 2
// id_3 partition 0
// id_4 partition 2
// id_5 partition 2
// id_6 partition 0
// id_7 partition 2
// id_8 partition 1
// id_9 partition 2
// send data - asynchronous
producer.send(record, new Callback() {
public void onCompletion(RecordMetadata recordMetadata, Exception e) {
// executes every time a record is successfully sent or an exception is thrown
if (e == null) {
// the record was successfully sent
logger.info("Received new metadata. \n" +
"Topic:" + recordMetadata.topic() + "\n" +
"Partition: " + recordMetadata.partition() + "\n" +
"Offset: " + recordMetadata.offset() + "\n" +
"Timestamp: " + recordMetadata.timestamp());
} else {
logger.error("Error while producing", e);
}
}
}).get(); // block the .send() to make it synchronous - don't do this in production!
}
// flush data
producer.flush();
// flush and close producer
producer.close();
}
}