export KAFKA_CLIENT_KERBEROS_PARAMS="-Djava.security.auth.login.config=/usr/hdp/current/kafka-broker/config/kafka_client_jaas.conf"
Use this jaas conf file
cat /usr/hdp/current/kafka-broker/config/kafka_client_jaas.conf
KafkaClient {
com.sun.security.auth.module.Krb5LoginModule required
useTicketCache=true
renewTicket=true
debug=true
serviceName="kafka";
};
Create a topic testacl
./kafka-topics.sh --create --zookeeper hdp265secure3.mycluster:2181 --replication-factor 2 --partitions 3 --topic testacl
Start Kafka Console Consumer
./kafka-console-consumer.sh --bootstrap-server hdp265secure3.mycluster:6667 --topic testacl --from-beginning --security-protocol PLAINTEXTSASL
Start Kafka Console producer and produce some messages
./kafka-console-producer.sh --broker-list hdp265secure3.mycluster:6667 --topic testacl --security-protocol PLAINTEXTSASL
You shall view the new messages on Kafka console consumer
cat /usr/hdp/current/kafka-broker/config/kafka_client_jaas.conf
KafkaClient {
com.sun.security.auth.module.Krb5LoginModule required
doNotPrompt=true
useTicketCache=false
principal="kafka/[email protected]"
useKeyTab=true
serviceName="kafka"
keyTab="/etc/security/keytabs/kafka.service.keytab"
client=true;
};
Start Kafka Console Consumer
./kafka-console-consumer.sh --bootstrap-server hdp265secure3.mycluster:6667 --topic testacl --from-beginning --security-protocol PLAINTEXTSASL
Start Kafka Console producer and produce some messages
./kafka-console-producer.sh --broker-list hdp265secure3.mycluster:6667 --topic testacl --security-protocol PLAINTEXTSASL
You shall view the new messages on console consumer
Login to KDC node as root user and run below command to create a new principal producer-user
kadmin.local addprinc producer-user
Create producer-user.keytab
and transfer this file to where you are going to run console producer
kadmin.local ktadd -k /etc/security/keytabs/producer-user.keytab producer-user
Login to the node, where you are going to run console producer and kinit as user kafka
Change kafka_client_jaas.conf
as shown below
KafkaClient {
com.sun.security.auth.module.Krb5LoginModule required
useTicketCache=true
renewTicket=true
debug=true
serviceName="kafka";
};
Give Kafka produce ACL rights to producer-user
./kafka-acls.sh --authorizer kafka.security.auth.SimpleAclAuthorizer --authorizer-properties zookeeper.connect=hdp265secure3.mycluster:2181 --add --allow-principal User:producer-user --producer --topic testacl
kinit
as producer-user
Start Kafka producer
./kafka-console-producer.sh --broker-list hdp265secure3.mycluster:6667 --topic testacl --security-protocol PLAINTEXTSASL
Login to KDC node as root user and run below command to create a new principal consumer-user
kadmin.local addprinc consumer-user
Create consumer-user.keytab
kadmin.local ktadd -k /etc/security/keytabs/consumer-user.keytab consumer-user
kinit
as kafka
user
Change kafka_client_jaas.conf
as shown below
KafkaClient {
com.sun.security.auth.module.Krb5LoginModule required
useTicketCache=true
renewTicket=true
debug=true
serviceName="kafka";
};
Give Kafka consumer ACL rights to consumer-user
./kafka-acls.sh --authorizer kafka.security.auth.SimpleAclAuthorizer --authorizer-properties zookeeper.connect=hdp265secure3.mycluster:2181 --add --allow-principal User:consumer-user --consumer --topic testacl --group '*'
kinit
as consumer-user
Start Kafka Console Consumer
./kafka-console-consumer.sh --bootstrap-server hdp265secure3.mycluster:6667 --topic testacl --from-beginning --security-protocol PLAINTEXTSASL
KafkaClient {
com.sun.security.auth.module.Krb5LoginModule required
doNotPrompt=true
useTicketCache=false
principal="[email protected]"
useKeyTab=true
serviceName="kafka"
keyTab="/etc/security/keytabs/client-keytabs/producer-user.keytab"
client=true;
};
./kafka-console-producer.sh --broker-list hdp265secure3.mycluster:6667 --topic testacl --security-protocol PLAINTEXTSASL
KafkaClient {
com.sun.security.auth.module.Krb5LoginModule required
doNotPrompt=true
useTicketCache=false
principal="[email protected]"
useKeyTab=true
serviceName="kafka"
keyTab="/etc/security/keytabs/client-keytabs/consumer-user.keytab"
client=true;
};
Start Consumer
./kafka-console-consumer.sh --bootstrap-server hdp265secure3.mycluster:6667 --topic testacl --from-beginning --security-protocol PLAINTEXTSASL
Refer : https://github.com/vinodkc/KafkaExperiments/tree/master/KafkaSecureClientDemo
Copy keytab files of [producer-user , consumer-user]
, krb5.conf
, client jaas file from cluster and paste into node outside cluster
Try Producer and Consumer test
java -Djava.security.auth.login.config=./kafka_client_jaas.conf -Djava.security.krb5.conf=./krb5.conf -cp ./target/KafkaSecureClientDemo-1.0-SNAPSHOT-jar-with-dependencies.jar com.vkc.SecureProducer testacl hdp265secure3.mycluster:6667 1000
java -Djava.security.auth.login.config=./kafka_consumer_jaas.conf -Djava.security.krb5.conf=./krb5.conf -cp ./target/KafkaSecureClientDemo-1.0-SNAPSHOT-jar-th-dependencies.jar com.vkc.SecureConsumer testacl hdp265secure3.mycluster:6667
Reference
-
https://henning.kropponline.de/2014/10/05/kerberized-hadoop-cluster-sandbox-example/
-
https://henning.kropponline.de/2015/11/15/kafka-security-with-kerberos/
Generate SSL key and certificate for each Kafka broker in 3 nodes cluster
login to node 1
keytool -keystore server.keystore.jks -alias localhost -validity 365 -genkey -keyalg RSA
verify content, if required
keytool -list -v -keystore server.keystore.jks
login to node 2
keytool -keystore server.keystore.jks -alias localhost -validity 365 -genkey -keyalg RSA
login to node 3
keytool -keystore server.keystore.jks -alias localhost -validity 365 -genkey -keyalg RSA
from any node/(or in your laptop) create CA certificate to sign above ssl certificates
openssl req -new -x509 -keyout ca-key -out ca-cert -days 365
Now we have a public-private key pair ca-cert
, and a certificate ca-key
to identify the machine
Create 2 trust store files client.truststore.jks
& server.truststore.jks
and import Root CA
cert
keytool -keystore client.truststore.jks -alias CARoot -import -file ca-cert
keytool -keystore server.truststore.jks -alias CARoot -import -file ca-cert
copy ca-cert , ca-key
and server.truststore.jks
into all 3 nodes
Sign the certificate
login to node 1
export the certificate from the keystore to sign it using CA certificate (eg: ca-cert
)
keytool -keystore server.keystore.jks -alias localhost -certreq -file cert-file
Then sign exported cert-file
with the CA
openssl x509 -req -CA ca-cert -CAkey ca-key -in cert-file -out cert-signed -days {validity} -CAcreateserial -passin pass:{ca-password}
Note: fill {validity}
and {ca-password}
List of files
-rw-r--r--. 1 root root 1273 Jun 22 07:17 ca-cert
-rw-r--r--. 1 root root 17 Jun 22 07:32 ca-cert.srl
-rw-r--r--. 1 root root 1858 Jun 22 07:26 ca-key
-rw-r--r--. 1 root root 1105 Jun 22 07:31 cert-file
-rw-r--r--. 1 root root 1249 Jun 22 07:32 cert-signed
-rw-r--r--. 1 root root 4078 Jun 22 07:36 server.keystore.jks
-rw-r--r--. 1 root root 963 Jun 22 07:46 server.truststore.jks
Import the certificate of the CA
and the signed certificate
into the keystore
keytool -keystore server.keystore.jks -alias CARoot -import -file ca-cert
keytool -keystore server.keystore.jks -alias localhost -import -file cert-signed
The definitions of the parameters are the following:
keystore
: the location of the keystoreca-cert
: the certificate of the CAca-key
: the private key of the CAca-password
: the passphrase of the CAcert-file
: the exported, unsigned certificate of the servercert-signed
: the signed certificate of the server
Move server.keystore.jks
& server.truststore.jks
into /var/private/ssl/
mkdir -p /var/private/ssl/ && mv server.*store.jks /var/private/ssl
login to node 2
export the certificate from the keystore to sign it using CA
certificate (eg: ca-cert
)
keytool -keystore server.keystore.jks -alias localhost -certreq -file cert-file
Then sign exported cert-file
with the CA
openssl x509 -req -CA ca-cert -CAkey ca-key -in cert-file -out cert-signed -days {validity} -CAcreateserial -passin pass:{ca-password}
Import both the certificate of the CA
and the signed certificate into the keystore
keytool -keystore server.keystore.jks -alias CARoot -import -file ca-cert
keytool -keystore server.keystore.jks -alias localhost -import -file cert-signed
Move server.keystore.jks
& server.truststore.jks
into /var/private/ssl/
mkdir -p /var/private/ssl/ && mv server.*store.jks /var/private/ssl
login to node 3
export the certificate from the keystore to sign it using CA certificate (eg: ca-cert
)
keytool -keystore server.keystore.jks -alias localhost -certreq -file cert-file
Then sign exported cert-file
with the CA
openssl x509 -req -CA ca-cert -CAkey ca-key -in cert-file -out cert-signed -days {validity} -CAcreateserial -passin pass:{ca-password}
Import both the certificate of the CA and the signed certificate into the keystore
keytool -keystore server.keystore.jks -alias CARoot -import -file ca-cert
keytool -keystore server.keystore.jks -alias localhost -import -file cert-signed
Move server.keystore.jks & server.truststore.jks into /var/private/ssl/
mkdir -p /var/private/ssl/ && mv server.*store.jks /var/private/ssl
SSL setup in Kafka
In Kafka conf in ambari, replace value of listerner with PLAINTEXT://localhost:6667,SASL_SSL://localhost:6668
In Custom kafka-broker section, add
ssl.keystore.location=/var/private/ssl/server.keystore.jks
ssl.keystore.password=test1234
ssl.key.password=test1234
ssl.truststore.location=/var/private/ssl/server.truststore.jks
ssl.truststore.password=test1234
Save and restart kafka
test ssl setup using
openssl s_client -debug -connect hdp265secure3.mycluster:6668 -tls1
output:
Certificate chain
.....
Server certificate
-----BEGIN CERTIFICATE-----
....
-----END CERTIFICATE-----
If the certificate does not show up or if there are any other error messages then your keystore is not setup properly.
Optional setting,If you want to enable SSL for inter-broker communication
security.inter.broker.protocol=SASL_SSL
cat client-ssl.properties
security.protocol=SASL_SSL
ssl.truststore.location=/var/private/ssl/client.truststore.jks
ssl.truststore.password=test1234
Start Consumer
./kafka-console-consumer.sh --bootstrap-server hdp265secure2.mycluster:6668 --topic testacl --consumer.config /tmp/ssl/client-ssl.properties
Start Producer
./kafka-console-producer.sh --broker-list hdp265secure3.mycluster:6668 --topic testacl --producer.config /tmp/ssl/client-ssl.properties --security-protocol SASL_SSL
Note: Due to a bug in kafka-console-producer.sh, need to pass --security-protocol SASL_SSL
even though security.protocol=SASL_SSL
exists in client-ssl.properties
Build : https://github.com/vinodkc/KafkaExperiments/tree/master/KafkaSecureClientDemo
Start SASL_SSL Producer
java -Djava.security.auth.login.config=./kafka_client_jaas.conf -Djava.security.krb5.conf=./krb5.conf -cp ./target/KafkaSecureClientDemo-1.0-SNAPSHOT-jar-with-dependencies.jar com.vkc.SecureSSLProducer testacl hdp265secure3.mycluster:6668 10 ./client.truststore.jks test1234
Start SASL_SSL Consumer
java -Djava.security.auth.login.config=./kafka_consumer_jaas.conf -Djava.security.krb5.conf=./krb5.conf -cp ./target/KafkaSecureClientDemo-1.0-SNAPSHOT-jar-th-dependencies.jar com.vkc.SecureSSLConsumer testacl hdp265secure3.mycluster:6668 ./client.truststore.jks test1234
Reference