小结:创建两个或多个数据文本文件,利用多线程同时发送及接受

Producer

Map<String,Object> config = new HashMap<String, Object>();

        config.put("bootstrap.servers","Kafka集群所在IP:9092");
        config.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        config.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        final KafkaProducer producer = new KafkaProducer(config);

        new Thread(){
            @Override
            public void run() {
                try{
                    BufferedReader in = new BufferedReader(new FileReader("文本1.txt"));
                    while (true){
                        String line = in.readLine();
                        if(line == null){
                            break;
                        }
                        line+="......文本1";
                        ProducerRecord<Object,String> record = new ProducerRecord<Object, String>("主题名称",0,null,line);
                        producer.send(record);
                        System.out.println(record.value());
                        Thread.sleep(500);
                    }
                }catch (Exception e){
                    e.printStackTrace();
                }
            }
        }.start();

        new Thread(){
            @Override
            public void run() {
                try{
                    BufferedReader in = new BufferedReader(new FileReader("文本2.txt"));
                    while (true){
                        String line = in.readLine();
                        if(line == null){
                            break;
                        }
                        line+="......文本2";
                        ProducerRecord<Object,String> record = new ProducerRecord<Object, String>("主题名称",1,null,line);
                        producer.send(record);
                        System.out.println(+record.value());
                        Thread.sleep(500);
                    }
                }catch (Exception e){
                    e.printStackTrace();
                }
            }
        }.start();
Consumer
final Map<String,Object> config = new HashMap<String, Object>();
        config.put("bootstrap.servers","Kafka集群所在IP:9092");
        config.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        config.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        config.put("group.id", "主题名称");
        final List<String> topics=new ArrayList<String>();
        topics.add("school");

        //两个消费者
        new Thread(){
            @Override
            public void run() {

                KafkaConsumer<String,Object> consumer=new KafkaConsumer<String, Object>(config);
                consumer.subscribe(topics);
                while (true){
                    ConsumerRecords records = consumer.poll(1000);
                    Iterator<ConsumerRecord<Object,String>> it = records.iterator();
                    while (it.hasNext()){
                        System.out.println("第一个消费者:(文本1)"+it.next().value());
                    }
                }

            }
        }.start();
        new Thread(){
            @Override
            public void run() {
                KafkaConsumer<String,Object> consumer=new KafkaConsumer<String, Object>(config);
                consumer.subscribe(topics);
                while (true){
                    ConsumerRecords records = consumer.poll(1000);
                    Iterator<ConsumerRecord<Object,String>> it = records.iterator();
                    while (it.hasNext()){
                        System.out.println("第二个消费者:(文本2)"+it.next().value());
                    }
                }
            }
        }.start();


Logo

Kafka开源项目指南提供详尽教程,助开发者掌握其架构、配置和使用,实现高效数据流管理和实时处理。它高性能、可扩展,适合日志收集和实时数据处理,通过持久化保障数据安全,是企业大数据生态系统的核心。

更多推荐