kafka-sasl消费示范例子

Posted huaweitman

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了kafka-sasl消费示范例子相关的知识,希望对你有一定的参考价值。

kafka-sasl消费示范例子

package cn.cuiot.dmp.rocketmq;

import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.config.SslConfigs;

import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import java.util.Arrays;
import java.util.Properties;


public class AclConsumerMain extends AbstractConsumer 
    //consumer
    private static org.apache.kafka.clients.consumer.KafkaConsumer<String, String> consumer;


        public static void main(String[] args) 
            System.out.println("enter args quantity error, e.g. " +
                    "java -cp 《jar名》《启动类》《kafka Bootstrap servers地址》《sasl配置文件目录》《groupId》" +
                    "《topic》\\n");
            String kafkaBootstrapServers = "kafka.cuiot.cn:9093,kafka.cuiot.cn:9193,kafka.cuiot.cn:9293";
            String kafkaSaslConfDir = "C:\\\\data\\\\consumer.conf";
            String groupIdConfig = "";
            String topic = "";
            if (args.length == 7) 
                kafkaBootstrapServers = args[0];
                kafkaSaslConfDir = args[1];
                groupIdConfig = args[2];
                topic = args[3];
            
            System.setProperty("java.security.auth.login.config", kafkaSaslConfDir);
            Properties properties = new Properties();
            properties.setProperty("bootstrap.servers", kafkaBootstrapServers);
            properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
            properties.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

            properties.setProperty("security.protocol", "SASL_PLAINTEXT");
            properties.setProperty(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-256");

            //指定消费者组
            properties.put(ConsumerConfig.GROUP_ID_CONFIG, groupIdConfig);
            properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 30000);
            //设置单次拉取的量,走公网访问时,该参数会有较大影响
            properties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, 32000);
            properties.put(ConsumerConfig.FETCH_MAX_BYTES_CONFIG, 32000);
            //每次poll的最大数量
            //注意该值不要改得太大,如果poll太多数据,而不能在下次poll之前消费完,则会触发一次负载均衡,产生卡顿
            properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 30);
            //hostname校验改成空
            properties.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "");
            //自动提交offset
//        properties.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, ConfigUtils.getInstance().getString("enable.auto.commit"));
            //提交offset间隔
//        properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, ConfigUtils.getInstance().getString("auto.commit.interval.ms"));
//        properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, ConfigUtils.getInstance().getString("max.poll.records"));
//        properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, ConfigUtils.getInstance().getString("session.timeout.ms"));
//        properties.put("transaction.timeout.ms", 1000 * 60 * 5 + "");
            consumer = new org.apache.kafka.clients.consumer.KafkaConsumer<>(properties);
            consumer.subscribe(Arrays.asList(topic));

            System.out.println("---------开始Sasl消费---------");

            exeConsumer(consumer);

    





        public static String sha256_mac(String message, String key) 
            String outPut = null;
            try 
                Mac sha256Hmac = Mac.getInstance("HmacSHA256");
                SecretKeySpec secretKey = new SecretKeySpec(key.getBytes(), "HmacSHA256");
                sha256Hmac.init(secretKey);
                byte[] bytes = sha256Hmac.doFinal(message.getBytes());
                outPut = byteArrayToHexString(bytes);
             catch (Exception e) 
                System.out.println("Error HmacSHA256========" + e.getMessage());
            
            return outPut;
        

        private static String byteArrayToHexString(byte[] b) 
            StringBuilder sb = new StringBuilder();
            String stmp;
            for (int n = 0; b != null && n < b.length; n++) 
                stmp = Integer.toHexString(b[n] & 0XFF);
                if (stmp.length() == 1) 
                    sb.append('0');
                
                sb.append(stmp);
            
            return sb.toString().toLowerCase();
        

//        public static void main(String[] strs) 
//            String consumerGroupId = "";
//            String secretKey = "";
//            System.out.println(sha256_mac(consumerGroupId, secretKey));
//        



consumer.conf

KafkaClient
org.apache.kafka.common.security.scram.ScramLoginModule required
username="918593438985748480"
password="6a4b5dbc795aa5eb3ad0be248f308c7bba4b230cdcce9aee4a5f62b810abadb0";
;

以上是关于kafka-sasl消费示范例子的主要内容,如果未能解决你的问题,请参考以下文章

kafka-sasl消费示范例子

kafka-sasl消费示范例子

转:8583报文手动组包——详细分析每个示范域

坚持数字革命,打造消费知名品牌

前端系列——Object.assign的正确使用与错误示范

预付式消费不安全怎么办?区块链技术来了