windows环境 java jdbc 连接impala (kerberos认证)
生活随笔
收集整理的這篇文章主要介紹了
windows环境 java jdbc 连接impala (kerberos认证)
小編覺得挺不錯的,現在分享給大家,幫大家做個參考.
在網上找了下使用kerberos認證來通過jdbc連接impala,發現基本都是報錯了。我使用兩種方法jdbc連接impala,分別是通過hive來連接,還有通過impala自身的jdbc驅動來連接。
很多人都把“設置kerberos系統屬性”放在后面,這是不對的,會報錯。
第二種(使用hive的jdbc連接Impala)
項目代碼如下: package com.dmp.demo;import java.io.IOException; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.Statement;import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation;public class TestImpala {private static String driverName = "org.apache.hive.jdbc.HiveDriver";//使用hive-jdbc-0.14.0.2.2.0.0-2041.jar /** * @param args */ public static void main(String[] args) { // TODO Auto-generated method stub String url = "jdbc:hive2://xxx.xxx.xxx.xxx:21050/default;principal=impala/fonova-openstack1.fx01@ABC.COM";//impalad使用的服務器 //impala/sjzx5.test.com@TEST.COM 為CDH注冊的impala用戶可以在CDH管理界面的Kerberos配置界面查看 //設置系統的Kerberos配置信息 System.setProperty("java.security.krb5.conf","D:/workspace/work1_1/jetcloud-impala-demo/src/main/resources/krb5.conf");Configuration conf = new Configuration(); conf.set("hadoop.security.authentication", "Kerberos"); //設置認證方式為Kerberos try { UserGroupInformation.setConfiguration(conf); //認證的用戶和對應的keytab文件 UserGroupInformation.loginUserFromKeytab("tenglq/fonova-openstack1.fx01@ABC.COM","D:/workspace/work1_1/jetcloud-impala-demo/src/main/resources/tenglq_fonova-openstack1.fx01.keytab");} catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } try { Class.forName(driverName); Connection conn = DriverManager.getConnection(url); Statement stmt = conn.createStatement(); String sql = "show databases;"; ResultSet rs = stmt.executeQuery(sql); while(rs.next()){ System.out.println(rs.getString(1)); } } catch (Exception e) { e.printStackTrace(); } } }
第一種:(使用impala自身的jdbc驅動來連接,本例子是在impala的demo中進行了修改,添加了kerberos認證)
pom文件如下:
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"><modelVersion>4.0.0</modelVersion><groupId>com.xxxx.tlq</groupId><artifactId>tlq-impala-demo</artifactId><version>0.0.1-SNAPSHOT</version><packaging>jar</packaging><description>Cloudera Impala JDBC Example for CDH 5.5.2</description><properties><project.build.sourceEncoding>UTF-8</project.build.sourceEncoding><impala.jdbc.version>2.5.30</impala.jdbc.version><uber.jar.name>cloudera-impala-jdbc-example-uber.jar</uber.jar.name><uber.jar.main.class>com.cloudera.example.ClouderaImpalaJdbcExample</uber.jar.main.class></properties><dependencies><!-- These dependencies provided by your local repo --><dependency><groupId>com.cloudera.impala.jdbc</groupId><artifactId>hive_metastore.jar</artifactId><version>${impala.jdbc.version}</version></dependency><dependency><groupId>com.cloudera.impala.jdbc</groupId><artifactId>hive_service.jar</artifactId><version>${impala.jdbc.version}</version></dependency><dependency><groupId>com.cloudera.impala.jdbc</groupId><artifactId>ImpalaJDBC41.jar</artifactId><version>${impala.jdbc.version}</version></dependency><dependency><groupId>com.cloudera.impala.jdbc</groupId><artifactId>ql.jar</artifactId><version>${impala.jdbc.version}</version></dependency><dependency><groupId>com.cloudera.impala.jdbc</groupId><artifactId>TCLIServiceClient.jar</artifactId><version>${impala.jdbc.version}</version></dependency><dependency><groupId>org.apache.thrift</groupId><artifactId>libfb303</artifactId><version>0.9.0</version></dependency><dependency><groupId>org.apache.thrift</groupId><artifactId>libthrift</artifactId><version>0.9.0</version></dependency><dependency><groupId>log4j</groupId><artifactId>log4j</artifactId><version>1.2.14</version></dependency><dependency><groupId>org.slf4j</groupId><artifactId>slf4j-api</artifactId><version>1.5.11</version></dependency><dependency><groupId>org.slf4j</groupId><artifactId>slf4j-log4j12</artifactId><version>1.5.11</version></dependency><dependency><groupId>org.apache.zookeeper</groupId><artifactId>zookeeper</artifactId><version>3.4.6</version></dependency><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-common</artifactId><version>2.8.1</version></dependency></dependencies><build><pluginManagement><plugins><plugin><groupId>org.codehaus.mojo</groupId><artifactId>exec-maven-plugin</artifactId><version>1.2.1</version></plugin></plugins></pluginManagement><plugins><plugin><groupId>org.apache.maven.plugins</groupId><artifactId>maven-compiler-plugin</artifactId><version>2.3.2</version><configuration><source>1.6</source><target>1.6</target></configuration></plugin><plugin><groupId>org.apache.maven.plugins</groupId><artifactId>maven-jar-plugin</artifactId><version>2.4</version><configuration><outputDirectory>${basedir}</outputDirectory></configuration></plugin><plugin><artifactId>maven-clean-plugin</artifactId><version>2.6.1</version><configuration><filesets><fileset><directory>.</directory><includes><include>*.jar</include></includes><followSymlinks>false</followSymlinks></fileset></filesets></configuration></plugin><plugin><groupId>org.apache.maven.plugins</groupId><artifactId>maven-shade-plugin</artifactId><version>2.2</version><configuration><shadedArtifactAttached>false</shadedArtifactAttached><outputFile>target/${uber.jar.name}</outputFile><artifactSet><includes><include>*:*</include></includes></artifactSet><filters><filter><artifact>*:*</artifact><excludes><exclude>META-INF/*.SF</exclude><exclude>META-INF/*.DSA</exclude><exclude>META-INF/*.RSA</exclude></excludes></filter></filters></configuration><executions><execution><phase>package</phase><goals><goal>shade</goal></goals><configuration><transformers><transformerimplementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" /><transformerimplementation="org.apache.maven.plugins.shade.resource.AppendingTransformer"><resource>reference.conf</resource></transformer><transformerimplementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"><mainClass>${uber.jar.main.class}</mainClass></transformer></transformers></configuration></execution></executions></plugin></plugins></build><repositories><repository><id>fonova-snapshot</id><name>nexus distribution snapshot repository</name><url>http://xxxxxxxxx:8081/content/repositories</url><snapshots><enabled>false</enabled></snapshots></repository><repository><id>cdh.repo</id><url>https://repository.cloudera.com/artifactory/cloudera-repos</url><name>Cloudera Repositories</name><snapshots><enabled>false</enabled></snapshots></repository><repository><id>central</id><url>http://repo1.maven.org/maven2/</url><releases><enabled>true</enabled></releases><snapshots><enabled>false</enabled></snapshots></repository></repositories> </project>其中impala相關的jar需要自己下載然后導入到自己的maven倉庫中,這些jar在maven的中央倉庫是不存在。代碼如下:
package com.cloudera.example;import java.io.IOException; import java.io.InputStream; import java.net.URLDecoder; import java.security.PrivilegedAction; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Properties;import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation;public class ClouderaImpalaJdbcExample {private static final String CONNECTION_URL_PROPERTY = "connection.url";private static final String JDBC_DRIVER_NAME_PROPERTY = "jdbc.driver.class.name";private static String connectionUrl;private static String jdbcDriverName;private static void loadConfiguration() throws IOException {InputStream input = null;try {String filename = ClouderaImpalaJdbcExample.class.getSimpleName() + ".conf";input = ClouderaImpalaJdbcExample.class.getClassLoader().getResourceAsStream(filename);Properties prop = new Properties();prop.load(input);connectionUrl = prop.getProperty(CONNECTION_URL_PROPERTY);jdbcDriverName = prop.getProperty(JDBC_DRIVER_NAME_PROPERTY);} finally {try {if (input != null)input.close();} catch (IOException e) {// nothing to do}}}public static void main(String[] args) throws IOException {// if (args.length != 1) { // System.out.println("Syntax: ClouderaImpalaJdbcExample \"<SQL_query>\""); // System.exit(1); // } // String sqlStatement = args[0];final String sqlStatement = "show databases;";loadConfiguration();System.out.println("\n=============================================");System.out.println("Cloudera Impala JDBC Example");System.out.println("Using Connection URL: " + connectionUrl);System.out.println("Running Query: " + sqlStatement);try {String krb5Path = URLDecoder.decode(ClassLoader.getSystemResource("krb5.conf").getPath(), "utf-8");System.out.println("krb5Path : "+krb5Path);//設置系統的Kerberos配置信息 System.setProperty("java.security.krb5.conf",krb5Path); // System.setProperty("java.security.krb5.conf","D:/workspace/work1_1/cloud-impala-demo/src/main/resources/krb5.conf");Configuration conf = new Configuration(); conf.set("hadoop.security.authentication", "Kerberos"); UserGroupInformation.setConfiguration(conf); String keyTabPath = URLDecoder.decode(ClassLoader.getSystemResource("tenglq_fonova-openstack1.fx01.keytab").getPath(), "utf-8");System.out.println("keyTabPath : "+keyTabPath); // UserGroupInformation.loginUserFromKeytab("tenglq/fonova-openstack1.fx01@FONOVA.COM","D:/workspace/work1_1/jetcloud-impala-demo/src/main/resources/tenglq_fonova-openstack1.fx01.keytab");UserGroupInformation.loginUserFromKeytab("tenglq/fonova-openstack1.fx01@ABC.COM",keyTabPath);UserGroupInformation loginUser = UserGroupInformation.getLoginUser();loginUser.doAs(new PrivilegedAction<Void>() {@Overridepublic Void run() {Connection con = null;try {Class.forName(jdbcDriverName);con = DriverManager.getConnection(connectionUrl);System.out.println(con);Statement stmt = con.createStatement();ResultSet rs = stmt.executeQuery(sqlStatement);System.out.println("\n== Begin Query Results ======================");// print the results to the consolewhile (rs.next()) {// the example query returns one String columnSystem.out.println(rs.getString(1));}System.out.println("== End Query Results =======================\n\n");} catch (ClassNotFoundException e) {// TODO Auto-generated catch blocke.printStackTrace();} catch (SQLException e) {// TODO Auto-generated catch blocke.printStackTrace();}return null;}});}catch (Exception e) {e.printStackTrace();} finally { // try { // con.close(); // } catch (Exception e) { // // swallow // }}} }很多人都把“設置kerberos系統屬性”放在后面,這是不對的,會報錯。
conf配置文件如下
connection.url = jdbc:impala://xxx.xxx.xxx.xxx:21050/;AuthMech=1;KrbRealm=ABC.COM;KrbHostFQDN=abc-openstack1.fx01;KrbServiceName=impala jdbc.driver.class.name = com.cloudera.impala.jdbc41.Driver#connection.url = jdbc:hive2://xxx.xxx.xxx.xxx:21050/;principal=tenglq/fonova-openstack1.fx01@ABC.COM #jdbc.driver.class.name = org.apache.hive.jdbc.HiveDriver第二種(使用hive的jdbc連接Impala)
項目pom文件如下
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"><modelVersion>4.0.0</modelVersion><groupId>com.abc.tlq</groupId><artifactId>impala-jdbc-demo</artifactId><version>0.0.1-SNAPSHOT</version><dependencies><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-common</artifactId><version>2.8.1</version></dependency><dependency><groupId>org.apache.hive</groupId><artifactId>hive-jdbc</artifactId><version>0.14.0.2.2.0.0-2041</version></dependency></dependencies> </project>項目代碼如下: package com.dmp.demo;import java.io.IOException; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.Statement;import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation;public class TestImpala {private static String driverName = "org.apache.hive.jdbc.HiveDriver";//使用hive-jdbc-0.14.0.2.2.0.0-2041.jar /** * @param args */ public static void main(String[] args) { // TODO Auto-generated method stub String url = "jdbc:hive2://xxx.xxx.xxx.xxx:21050/default;principal=impala/fonova-openstack1.fx01@ABC.COM";//impalad使用的服務器 //impala/sjzx5.test.com@TEST.COM 為CDH注冊的impala用戶可以在CDH管理界面的Kerberos配置界面查看 //設置系統的Kerberos配置信息 System.setProperty("java.security.krb5.conf","D:/workspace/work1_1/jetcloud-impala-demo/src/main/resources/krb5.conf");Configuration conf = new Configuration(); conf.set("hadoop.security.authentication", "Kerberos"); //設置認證方式為Kerberos try { UserGroupInformation.setConfiguration(conf); //認證的用戶和對應的keytab文件 UserGroupInformation.loginUserFromKeytab("tenglq/fonova-openstack1.fx01@ABC.COM","D:/workspace/work1_1/jetcloud-impala-demo/src/main/resources/tenglq_fonova-openstack1.fx01.keytab");} catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } try { Class.forName(driverName); Connection conn = DriverManager.getConnection(url); Statement stmt = conn.createStatement(); String sql = "show databases;"; ResultSet rs = stmt.executeQuery(sql); while(rs.next()){ System.out.println(rs.getString(1)); } } catch (Exception e) { e.printStackTrace(); } } }
總結
以上是生活随笔為你收集整理的windows环境 java jdbc 连接impala (kerberos认证)的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: 多核芯片间的核间通信IPC机制
- 下一篇: C 实现黑客帝国数字雨