Sunday, 14 August 2016

Kerberos Installtion And Configuration To Access Hadoop ( CDH 5.X)



Install Kerberos :

1) On client nodes

# yum install krb5-libs krb5-auth-dialog krb5-workstation

On server node

# yum install krb5-server krb5-libs krb5-auth-dialog krb5-workstation

2) On all nodes edit krb5.conf

[root@cdh084 krb5kdc]# cat /etc/krb5.conf
[logging]
 default = FILE:/var/log/krb5libs.log
 kdc = FILE:/var/log/krb5kdc.log
 admin_server = FILE:/var/log/kadmind.log

[libdefaults]
 default_realm = TUXHUB.COM
 dns_lookup_realm = false
 dns_lookup_kdc = false
 ticket_lifetime = 24h
 renew_lifetime = 7d
 forwardable = true

[realms]
 TUXHUB.COM = {
  kdc = cdh084.tuxhub.com
  admin_server = cdh084.tuxhub.com
 }

[domain_realm]
 .tuxhub.com = TUXHUB.COM
 tuxhub.com = TUXHUB.COM
[root@cdh084 krb5kdc]#

3) Edit kerberos acl

[root@cdh084 krb5kdc]# cat  /var/kerberos/krb5kdc/kadm5.acl
*/admin@TUXHUB.COM      *
[root@cdh084 krb5kdc]#

4) kerberos database config

[root@cdh084 krb5kdc]# cat  /var/kerberos/krb5kdc/kdc.conf
[kdcdefaults]
 kdc_ports = 88
 kdc_tcp_ports = 88

[realms]
 TUXHUB.COM = {
  #master_key_type = aes256-cts
  acl_file = /var/kerberos/krb5kdc/kadm5.acl
  dict_file = /usr/share/dict/words
  admin_keytab = /var/kerberos/krb5kdc/kadm5.keytab
  supported_enctypes = aes256-cts:normal aes128-cts:normal des3-hmac-sha1:normal arcfour-hmac:normal des-hmac-sha1:normal des-cbc-md5:normal des-cbc-crc:normal
 }
[root@cdh084 krb5kdc]#

5) Create database

[root@cdh084 ~]# kdb5_util create -r TUXHUB.COM -s
Loading random data
Initializing database '/var/kerberos/krb5kdc/principal' for realm 'TUXHUB.COM',
master key name 'K/M@TUXHUB.COM'
You will be prompted for the database Master Password.
It is important that you NOT FORGET this password.
Enter KDC database master key: master
Re-enter KDC database master key to verify: master

[root@cdh084 ~]#

6) Create admin principal  create the first user principal. This must be done on the KDC server itself, while you are logged in as root:

[root@cdh084 ~]# kadmin.local
Authenticating as principal root/admin@TUXHUB.COM with password.
kadmin.local:  addprinc root/admin
WARNING: no policy specified for root/admin@TUXHUB.COM; defaulting to no policy
Enter password for principal "root/admin@TUXHUB.COM": admin
Re-enter password for principal "root/admin@TUXHUB.COM": admin
Principal "root/admin@TUXHUB.COM" created.

7) Start services

[root@cdh084 krb5kdc]# service kadmin start
Starting Kerberos 5 Admin Server:                          [  OK  ]
[root@cdh084 krb5kdc]# service krb5kdc start
Starting Kerberos 5 KDC:                                   [  OK  ]

8) Creating Service Principals for every host ( chnage host name)

[root@cdh084  ]# kadmin
kadmin:  add_principal -randkey hdfs/cdh084.tuxhub.com@TUXHUB.COM
kadmin:  add_principal -randkey mapred/cdh084.tuxhub.com@TUXHUB.COM
kadmin:  add_principal -randkey HTTP/cdh084.tuxhub.com@TUXHUB.COM
kadmin:  add_principal -randkey yarn/cdh084.tuxhub.com@TUXHUB.COM

9) Create Keytab files add everyhost principal
[root@cdh084  ]# kadmin
kadmin:  xst -k /tmp/hdfs.keytab hdfs/cdh085.tuxhub.com@TUXHUB.COM HTTP/cdh085.tuxhub.com@TUXHUB.COM ( ADD ALL HOST )
kadmin:  xst -k /tmp/mapred.keytab mapred/cdh085.tuxhub.com@TUXHUB.COM HTTP/cdh085.tuxhub.com@TUXHUB.COM ( ADD ALL HOST )
kadmin:  xst -k /tmp/yarn.keytab yarn/cdh085.tuxhub.com@TUXHUB.COM HTTP/cdh085.tuxhub.com@TUXHUB.COM  ( ADD ALL HOST )


10) Permission
[root@cdh084 keytab]# cp -pav /tmp/*.keytab /etc/hadoop/conf
[root@cdh084 keytab]# chown hdfs:hadoop /etc/hadoop/conf/hdfs.keytab
[root@cdh084 keytab]# chown mapred:hadoop /etc/hadoop/conf/mapred.keytab
[root@cdh084 keytab]# chown yarn:hadoop /etc/hadoop/conf/yarn.keytab
[root@cdh084 keytab]# chmod 400 /etc/hadoop/conf/hdfs.keytab /etc/hadoop/conf/mapred.keytab /etc/hadoop/conf/yarn.keytab
[root@cdh084 keytab]#

11) EDIT core-site.xml

<configuration>

<property>
 <name>fs.defaultFS</name>
 <value>hdfs://cdh081.tuxhub.com:8020</value>
</property>


<property>
 <name>hadoop.proxyuser.mapred.groups</name>
 <value>*</value>
</property>

<property>
 <name>hadoop.proxyuser.mapred.hosts</name>
 <value>*</value>
</property>

<property>
<name>hadoop.proxyuser.httpfs.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.httpfs.groups</name>
<value>*</value>
</property>

<property>
  <name>hadoop.security.authentication</name>
    <value>kerberos</value>
</property>

<property>
  <name>hadoop.security.authorization</name>
    <value>true</value>
</property>


</configuration>
[root@cdh081 ~]#

12) hdfs.site.xml

<configuration>
  <property>
     <name>dfs.namenode.name.dir</name>
     <value>file:///var/lib/hadoop-hdfs/cache/hdfs/dfs/name</value>
  </property>

  <property>
     <name>dfs.permissions.superusergroup</name>
     <value>hadoop</value>
  </property>


  <property>
     <name>dfs.datanode.data.dir</name>
     <value>file:///home/data/1/dfs/dn</value>
  </property>

 <property>
   <name>dfs.webhdfs.enabled</name>
    <value>true</value>
 </property>


<!-- SECURITY -->

<!-- General HDFS security config -->
<property>
  <name>dfs.block.access.token.enable</name>
  <value>true</value>
</property>

<!-- NameNode security config -->
<property>
  <name>dfs.namenode.keytab.file</name>
  <value>/etc/hadoop/conf/hdfs.keytab</value> <!-- path to the HDFS keytab -->
</property>
<property>
  <name>dfs.namenode.kerberos.principal</name>
  <value>hdfs/_HOST@TUXHUB.COM</value>
</property>
<property>
  <name>dfs.namenode.kerberos.internal.spnego.principal</name>
  <value>HTTP/_HOST@TUXHUB.COM</value>
</property>

<!-- DataNode security config -->
<property>
  <name>dfs.datanode.data.dir.perm</name>
  <value>700</value>
</property>
<property>
  <name>dfs.datanode.address</name>
  <value>cdh081.tuxhub.com:1004</value>
</property>
<property>
  <name>dfs.datanode.http.address</name>
  <value>cdh081.tuxhub.com:1006</value>
</property>
<property>
  <name>dfs.datanode.keytab.file</name>
  <value>/etc/hadoop/conf/hdfs.keytab</value> <!-- path to the HDFS keytab -->
</property>
<property>
  <name>dfs.datanode.kerberos.principal</name>
  <value>hdfs/_HOST@TUXHUB.COM</value>
</property>

<!-- Web Authentication config -->
<property>
  <name>dfs.web.authentication.kerberos.principal</name>
  <value>HTTP/_HOST@TUXHUB.COM</value>
 </property>

<property>
<name>dfs.http.policy</name>
<value>HTTPS_ONLY</value>
</property>

</configuration>

13) SECURE Datanodes

vim /etc/default/hadoop-hdfs-datanode

export HADOOP_SECURE_DN_USER=hdfs
export HADOOP_SECURE_DN_PID_DIR=/var/lib/hadoop-hdfs
export HADOOP_SECURE_DN_LOG_DIR=/var/log/hadoop-hdfs
export JSVC_HOME=/usr/lib/bigtop-utils/

14) SSL

 keytool -genkey -alias replserver -keyalg RSA -keystore keystore.jks -dname "cn=localhost, ou=IT, o=Continuent, c=US"  -storepass password -keypass password
 keytool -export -alias replserver -file client.cer -keystore keystore.jks
 keytool -import -v -trustcacerts -alias replserver -file client.cer -keystore truststore.ts

 Copy SSL files

 cp -pav keystore.jks truststore.ts /etc/hadoop/conf/

15) Hadoop SSL config

vim /etc/hadoop/conf/ssl-server.xml

<configuration>

<property>
  <name>ssl.server.truststore.location</name>
  <value>/etc/hadoop/conf/truststore.ts</value>
</property>

<property>
  <name>ssl.server.truststore.password</name>
  <value>keystore</value>
</property>

<property>
  <name>ssl.server.truststore.type</name>
  <value>jks</value>
</property>

<property>
  <name>ssl.server.truststore.reload.interval</name>
  <value>10000</value>
  Default value is 10000 (10 seconds).
</property>

<property>
  <name>ssl.server.keystore.location</name>
  <value>/etc/hadoop/conf/keystore.jks</value>
</property>

<property>
  <name>ssl.server.keystore.password</name>
  <value>keystore</value>
</property>

<property>
  <name>ssl.server.keystore.keypassword</name>
  <value>keystore</value>
</property>

<property>
  <name>ssl.server.keystore.type</name>
  <value>jks</value>
</property>

<property>
  <name>ssl.server.exclude.cipher.list</name>
  <value>TLS_ECDHE_RSA_WITH_RC4_128_SHA,SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA,
  SSL_RSA_WITH_DES_CBC_SHA,SSL_DHE_RSA_WITH_DES_CBC_SHA,
  SSL_RSA_EXPORT_WITH_RC4_40_MD5,SSL_RSA_EXPORT_WITH_DES40_CBC_SHA,
  SSL_RSA_WITH_RC4_128_MD5</value>
</property>

</configuration>

# /etc/hadoop/conf/ssl-client.xml

<configuration>

<property>
  <name>ssl.client.truststore.location</name>
  <value>/etc/hadoop/conf/truststore.ts</value>
  specified.
</property>

<property>
  <name>ssl.client.truststore.password</name>
  <value>keystore</value>
</property>

<property>
  <name>ssl.client.truststore.type</name>
  <value>jks</value>
</property>

<property>
  <name>ssl.client.truststore.reload.interval</name>
  <value>10000</value>
  Default value is 10000 (10 seconds).
</property>

<property>
  <name>ssl.client.keystore.location</name>
  <value>/etc/hadoop/conf/keystore.jks</value>
  specified.
</property>

<property>
  <name>ssl.client.keystore.password</name>
  <value>keystore</value>
</property>

<property>
  <name>ssl.client.keystore.keypassword</name>
  <value>keystore</value>
</property>

<property>
  <name>ssl.client.keystore.type</name>
  <value>jks</value>
</property>

</configuration>

16 ) NameNode start

# services hadoop-hdfs-namenode restart

17) Datanode start

#  services hadoop-hdfs-datanode restart

18) any one node

kinit -kt /etc/hadoop/conf/hdfs.keytab hdfs/cdh084.tuxhub.com@TUXHUB.COM

14) Klist

-bash-4.1$ klist
Ticket cache: FILE:/tmp/krb5cc_496
Default principal: hdfs/cdh084.tuxhub.com@TUXHUB.COM

Valid starting     Expires            Service principal
08/14/16 21:05:43  08/15/16 21:05:43  krbtgt/TUXHUB.COM@TUXHUB.COM
        renew until 08/14/16 21:05:43
-bash-4.1$

16)  Hadoop command

-bash-4.1$ hadoop fs -ls /

17) Please yarn-site.xml  to use kerberos.

Note :- All files present on
https://drive.google.com/file/d/0BxAxRcNkM4a0aWZxdGdOMWRtNDQ/view?usp=sharing

No comments:

Post a Comment

Ansible Cheat sheet

Install Ansible  # yum install ansible Host file configuration  File  [ansible@kuber2 ~]$ cat /etc/ansible/hosts     [loca...