这里kafka版本是0.10.0,版本有点老,不过方法大同小异;
当kafka开启Kerberos认证后,如何使用java API生产或消费数据呢?其实就是在生产消费者的代码中加入jaas、keytab这些认证有关的配置,下面我们直接看代码:
1.其实连接Kerberos集群很简单,需要下面三个文件:
1).KerberosServer的配置文件krb5.conf,让程序知道我应该哪个kdc去登录认证;
[libdefaults] udp_preference_limit = 1 renew_lifetime = 3650d forwardable = true default_realm = CHINAUNICOM ticket_lifetime = 3650d dns_lookup_realm = false dns_lookup_kdc = false default_ccache_name = /tmp/krb5cc_%{uid} #default_tgs_enctypes = aes des3-cbc-sha1 rc4 des-cbc-md5 #default_tkt_enctypes = aes des3-cbc-sha1 rc4 des-cbc-md5[domain_realm] .CHINAUNICOM = CHINAUNICOM[logging] default = FILE:/var/log/krb5kdc.log admin_server = FILE:/var/log/kadmind.log kdc = FILE:/var/log/krb5kdc.log[realms] CHINAUNICOM = { admin_server = master98.hadoop.ljs kdc = master98.hadoop.ljs }
2).认证肯定需要指定认证方式这里需要一个jaas.conf文件,一般集群的conf目录下都有;
KafkaClient { com.sun.security.auth.module.Krb5LoginModule required useKeyTab=true keyTab="D:\\kafkaSSL\\kafka.service.keytab" storeKey=true useTicketCache=false principal="kafka/salver32.hadoop.unicom@CHINAUNICOM" serviceName=kafka;};
3).就是用户的登录认证票据和认证文件,票据和keytab文件这里就不在贴了;
2.pom.xml文件依赖,有多余的依赖你可以删除;
<properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <maven.compiler.source>1.8</maven.compiler.source> <maven.compiler.target>1.8</maven.compiler.target> <log4j.version>1.2.17</log4j.version> <slf4j.version>1.7.22</slf4j.version> </properties> <dependencies> <dependency> <groupId>org.apache.kafka</groupId> <artifactId>kafka_2.11</artifactId> <version>0.10.1.0</version> </dependency> <dependency> <groupId>net.sf.json-lib</groupId> <artifactId>json-lib</artifactId> <version>2.4</version> <classifier>jdk15</classifier> </dependency> </dependencies>
3.Java生产者发送消息,代码实例:
package com.hadoop.ljs.kafka010.security;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.util.Date;
import java.util.Properties;
/**
* @author: Created By lujisen
* @company ChinaUnicom Software JiNan
* @date: 2020-02-28 15:47
* @version: v1.0
* @description: com.hadoop.ljs.kafka010.security
*/
public class KafkaKerberosProducer3 {
public static final String krb5Conf="D:\\kafkaSSL\\krb5.conf";
public static final String kafkaJaasConf="D:\\kafkaSSL\\kafka_client_jaas.conf";
public static final String bootstrapServers="salver31.hadoop.ljs:6667,salver32.hadoop.ljs:6667";
public static final String topic="topic1";
private static long count =5;
public static void main(String[] args) {
//Kerberos认证必须添加
System.setProperty("java.security.krb5.conf", krb5Conf);
System.setProperty("java.security.auth.login.config", kafkaJaasConf)
Properties props = new Properties();
props.put("bootstrap.servers", bootstrapServers);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
//Kerberos认证必须添加以下三行
props.put("security.protocol", "SASL_PLAINTEXT");
props.put("sasl.kerberos.service.name", "kafka");
props.put("sasl.mechanism", "GSSAPI");
KafkaProducer<String,String> producer = new KafkaProducer<String, String>(props);
int i=1;
while (true){
String message = "{\"id\":" + i + ",\"ip\":\"192.168.0." + i + "\",\"date\":" + new Date().toString() + "}";
System.out.println(message);
producer.send(new ProducerRecord<String, String>(topic, message));
try {
Thread.sleep(200);
} catch (InterruptedException e) {
e.printStackTrace();
}
if(i++>count){
break;
}
}
}
}
4.Java消费者接收消息,代码实例:
package com.hadoop.ljs.kafka010.security;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.util.Arrays;
import java.util.Properties;
/**
* @author: Created By lujisen
* @company ChinaUnicom Software JiNan
* @date: 2020-02-28 15:04
* @version: v1.0
* @description: com.hadoop.ljs.kafka010.security
*/
public class KafkaKerberosConsumer {
public static final String krb5Conf="D:\\kafkaSSL\\krb5.conf";
public static final String kafkaJaasConf="D:\\kafkaSSL\\kafka_client_jaas.conf";
public static final String bootstrapServers="salver31.hadoop.ljs:6667,salver32.hadoop.ljs:6667";
public static final String topic="topic1";
public static final String comsumerGroup="group_topic1";
public static void main(String[] args) {
/*kerberos认证,需要添加以下两行*/
System.setProperty("java.security.krb5.conf", krb5Conf);
System.setProperty("java.security.auth.login.config", kafkaJaasConf);
Properties props = new Properties();
props.put("bootstrap.servers", bootstrapServers);
props.put("group.id", comsumerGroup);
props.put("enable.auto.commit", "false");
props.put("auto.commit.interval.ms", "1000");
props.put("auto.offset.reset", "earliest");
props.put("session.timeout.ms", "30000");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
/*kerberos认证,需要添加以下三个属性*/
props.put("security.protocol", "SASL_PLAINTEXT");
props.put("sasl.mechanism", "GSSAPI");
props.put("sasl.kerberos.service.name", "kafka");
KafkaConsumer kafkaConsumer = new KafkaConsumer<>(props);
kafkaConsumer.subscribe(Arrays.asList(topic));
while (true) {
ConsumerRecords<String, String> records = kafkaConsumer.poll(1);
for (ConsumerRecord<String, String> record : records)
System.out.println("Partition: " + record.partition() + " Offset: " + record.offset() + " Value: " + record.value() + " ThreadID: " + Thread.currentThread().getId());
}
}
}