【Kafka】使用Java连接Kafka并上传/读取数据
使用Java连接Kafka并上传/读取数据ProducerConsumerProducerimport org.apache.kafka.clients.producer.KafkaProducer;import org.apache.kafka.clients.producer.ProducerConfig;import org.apache.kafka.clients.producer.Pro
·
Producer
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Properties;
public class MyProducer {
public static void main(String[] args) {
Properties prop = new Properties();
prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.134.150:9092");
prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,StringSerializer.class);
prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class);
prop.put(ProducerConfig.ACKS_CONFIG,"-1");
KafkaProducer<Object, String> producer = new KafkaProducer<>(prop);
for (int i=0;i<200;i++){
ProducerRecord<Object, String> producerRecord=new ProducerRecord<>("kafkademo01","hello world "+i);
producer.send(producerRecord);
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
System.out.println("Game Over!");
}
}
Consumer
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.util.Collections;
import java.util.Properties;
public class MyConsumer {
public static void main(String[] args) {
Properties prop = new Properties();
prop.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.134.150:9092");
prop.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class);
prop.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class);
prop.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG,"30000");
prop.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,"false");
prop.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,"1000");
//earliest latest none(会报错)
prop.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"latest");
// prop.put(ConsumerConfig.GROUP_ID_CONFIG,"G1");
// KafkaConsumer<String, String> consumer = new KafkaConsumer<>(prop);
// consumer.subscribe(Collections.singleton("kafkademo02"));
// while (true) {
// ConsumerRecords<String, String> poll = consumer.poll(100);
// for (ConsumerRecord<String, String> record : poll) {
// System.out.println(record.offset()+"\t"+record.key()+"\t"+record.value());
// }
// }
//一个消费者组G1里只有一个消费者
//模拟多消费者在同一个分组G2
prop.put(ConsumerConfig.GROUP_ID_CONFIG,"G2");
for (int i=0;i<4;i++){
new Thread(new Runnable() {
@Override
public void run() {
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(prop);
consumer.subscribe(Collections.singleton("kafkademo01"));
while (true) {
ConsumerRecords<String, String> poll = consumer.poll(100);
for (ConsumerRecord<String, String> record : poll) {
System.out.println(Thread.currentThread().getName()+"\t"+record.offset()+"\t"+record.key()+"\t"+record.value());
}
}
}
}).start();
}
}
}
更多推荐
已为社区贡献1条内容
所有评论(0)