Committed Changes Of Producer and Consumer Demos

parent ceb4a654
package com.github.nagarjun.kafka.demo1;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
public class ConsumerDemo {
public static void main(String[] args) {
Logger logger = LoggerFactory.getLogger(ConsumerDemo.class);
String group_id = "my_consumer_application_group";
// Creating Consumer Config Properties
Properties properties = new Properties();
properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, group_id);
properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
// Create Consumer
KafkaConsumer<String,String> kafkaConsumer = new KafkaConsumer<String, String>(properties);
// Subscribe Consumer to our topic
kafkaConsumer.subscribe(Arrays.asList("my_topic1"));
// Poll for New Data
while(true){
ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofMillis(100));
for(ConsumerRecord<String, String> record : records){
logger.info("Key : "+ record.key() + " , Value :"+ record.value());
logger.info("Partition :"+ record.partition() + " , Offset :"+record.offset());
}
}
}
}
package com.github.nagarjun.kafka.demo1;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
public class ConsumerDemoAssignAndSeek {
public static void main(String[] args) {
Logger logger = LoggerFactory.getLogger(ConsumerDemoAssignAndSeek.class);
// Creating Consumer Config Properties
Properties properties = new Properties();
properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
// Create Consumer
KafkaConsumer<String,String> kafkaConsumer = new KafkaConsumer<String, String>(properties);
//assign and seek are mostly used to fetch a specific messages
//assign
TopicPartition topicPartition = new TopicPartition("my_topic1", 0);
kafkaConsumer.assign(Arrays.asList(topicPartition));
//seek
long offSetToReadFrom = 15L;
kafkaConsumer.seek(topicPartition, offSetToReadFrom);
int noOfMessagesToRead = 5;
int noOfMessagesReadSoFar = 0;
boolean keepOnReading = true;
// Poll for New Data
while(keepOnReading){
ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofMillis(100));
for(ConsumerRecord<String, String> record : records){
noOfMessagesReadSoFar += 1;
logger.info("Key : "+ record.key() + " , Value :"+ record.value());
logger.info("Partition :"+ record.partition() + " , Offset :"+record.offset());
if(noOfMessagesReadSoFar >= noOfMessagesToRead){
keepOnReading = false;
break;
}
}
}
logger.info("Exited from the Application");
}
}
package com.github.nagarjun.kafka.demo1;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
public class ConsumerDemoGroups {
public static void main(String[] args) {
Logger logger = LoggerFactory.getLogger(ConsumerDemoGroups.class);
String group_id = "my_consumer_application_group1";
// Creating Consumer Config Properties
Properties properties = new Properties();
properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, group_id);
properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
// Create Consumer
KafkaConsumer<String,String> kafkaConsumer = new KafkaConsumer<String, String>(properties);
// Subscribe Consumer to our topic
kafkaConsumer.subscribe(Arrays.asList("my_topic1"));
// Poll for New Data
while(true){
ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofMillis(100));
for(ConsumerRecord<String, String> record : records){
logger.info("Key : "+ record.key() + " , Value :"+ record.value());
logger.info("Partition :"+ record.partition() + " , Offset :"+record.offset());
}
}
}
}
...@@ -9,12 +9,12 @@ import org.slf4j.LoggerFactory; ...@@ -9,12 +9,12 @@ import org.slf4j.LoggerFactory;
import java.util.Properties; import java.util.Properties;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
public class ProducerDemoKeys { public class ProducerDemoWithKeys {
public static void main(String[] args) throws ExecutionException, InterruptedException { public static void main(String[] args) throws ExecutionException, InterruptedException {
final Logger logger = LoggerFactory.getLogger(ProducerDemoKeys.class); final Logger logger = LoggerFactory.getLogger(ProducerDemoWithKeys.class);
//Create Producer Properties //Create Producer Properties
Properties properties = new Properties(); Properties properties = new Properties();
properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092"); properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
......
...@@ -8,12 +8,12 @@ import org.slf4j.LoggerFactory; ...@@ -8,12 +8,12 @@ import org.slf4j.LoggerFactory;
import java.util.Properties; import java.util.Properties;
public class ProducerDemoWithCallback { public class ProducerWithCallbackDemo {
public static void main(String[] args) { public static void main(String[] args) {
final Logger logger = LoggerFactory.getLogger(ProducerDemoWithCallback.class); final Logger logger = LoggerFactory.getLogger(ProducerWithCallbackDemo.class);
//Create Producer Properties //Create Producer Properties
Properties properties = new Properties(); Properties properties = new Properties();
properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092"); properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment