Java/Scala 訪問啟用kerberos驗證的Hadoop叢集
阿新 • • 發佈:2019-01-31
通過keytab的方式訪問HDFS
Configuration config = new Configuration(); FileSystem hdfs = null; try { if (isKerberosEnable){ System.setProperty("java.security.krb5.conf", "./config/kerberos/krb5.conf"); config.set("hadoop.security.authentication","kerberos"); config.addResource("./config/cluster/hadoop/core-site.xml"); config.addResource("./config/cluster/hadoop/hdfs-site.xml"); UserGroupInformation.setConfiguration(config); UserGroupInformation.loginUserFromKeytab(kerberosLoginUser, "./config/kerberos/hdfs.keytab"); UserGroupInformation userGroupInformation = UserGroupInformation.getLoginUser(); } hdfs = FileSystem.get(URI.create(hdfsUri), config); FSDataOutputStream outputStream = hdfs.create(new Path(hdfsFile), true); IOUtils.copyBytes(inputStream, outputStream, 4096, true);
通過keytab方式訪問HBASE
def setKerberosConf(conf: Configuration, sparkConf: SparkConf): Unit = { conf.set("hbase.zookeeper.quorum", s"${zkaddress}") conf.setInt("hbase.zookeeper.property.clientPort", 2181) conf.set("zookeeper.znode.parent", "/hbase") System.setProperty("java.security.krb5.conf", "conf/kerberos/krb5.conf") conf.set("hadoop.security.authentication", "kerberos") conf.set("hbase.security.authentication", "kerberos") conf.set("keytab.file", Contants.HBASE_KERBEROS_KEYTAB_FILE) conf.set("kerberos.principal", sparkConf.get("hbase.kerberos.principal", Contants.HBASE_KERBEROS_PRINCIPAL)) conf.set("hbase.master.kerberos.principal", sparkConf.get("hbase.master.kerberos.principal", Contants.HBASE_MASTER_KERBEROS_PRINCIPAL)) conf.set("hbase.regionserver.kerberos.principal", sparkConf.get("hbase.regionserver.kerberos.principal", Contants.HBASE_REGIONSERVER_KERBEROS_PRINCIPAL)) UserGroupInformation.setConfiguration(conf) UserGroupInformation.loginUserFromKeytab( sparkConf.get("hbase.kerberos.user", Contants.HBASE_KERBEROS_USER), Contants.HBASE_KERBEROS_KEYTAB_FILE) }