1. 版本
HIVE 2.1.1-cdh6.3.2版本需对应版本的 Hive JDBC 和 Impala JDBC,如下:
注意:单独使用HIVE JDBC ,使用版本 3.1.0,使用 Impala JDBC 需要搭配 HIVE JDBC 1.1.0使用,不然会报错;搭配HIVE JDBC 3.1.0 报:
java.lang.NoClassDefFoundError:org/apache/hive/service/cli/thrift/TCLIService$Client
1 | org.apache.hivehive-exec3.1.0org.apache.hadoophadoop-common3.1.0org.apache.hadoophadoop-auth3.1.0com.cloudera.impalaimpala-jdbc412.5.28org.apache.hivehive-jdbc1.1.0 |
1.1 手动安装 maven
如果 Maven 库没有相应JDBC,手动安装 maven,如下:
mvn install:install-file -D file=C:UsersmxDesktopimpala-jdbc41-0.0.1.jar -D groupId=com.cloudera.impala -D artifactId=impala-jdbc42 -D version=2.6.33 -D packaging=jar
2. 映射地址
因kerberos SPN配置必须使用主机名链接,Linux 和 Windows需要配置hosts,格式:IP地址 主机名,配置完成之后 telnet 检测
说明:如果是docker容器链接 Hive 或 Impala ,还需要在docker容器中配置 hosts,保证主机和容器 telent 都是通的
telnet hostname port
2.1 端口
开通 HIVE 和 Impala 相应的端口,本地调试按需将 IP 和 端口加入白名单
- HIVE TCP端口:jdbc:hive2://hostname:10000
- Impala TCP端口:jdbc:impala://hostname:21050
- Hdfs TCP端口:hdfs://hostname:8020
- Kerberos UDP端口:88
2.2 调试
使用 telnet hostname port ,调试以上 hostname 和 端口,保障网络连通
3. HIVE JDBC 连接 Kerberos 认证的 HIVE 集群
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 | package com.test; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.Statement; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; public class HiveKerberosConnectionExample { private static String krb5 = "src/main/resources/krb5.conf" ; private static String keytab = "src/main/resources/hive.keytab" ; private static String principal = "hive@EXAMPLE.COM" ; private static String hiveUrl = "jdbc:hive2://host:10000/test;principal=hive/_HOST@EXAMPLE.COM" ; public static void main(String[] args) throws Exception { // 设置Kerberos配置文件路径 System.setProperty( "java.security.krb5.conf" , krb5); System.setProperty( "sun.security.krb5.debug" , "true" ); // 可选,用于调试 Kerberos 认证过程 // 初始化Hadoop配置 Configuration conf = new Configuration(); conf.set( "hadoop.security.authentication" , "kerberos" ); // 使用Keytab进行Kerberos身份验证 UserGroupInformation.setConfiguration(conf); UserGroupInformation.loginUserFromKeytab(principal, keytab); // 建立Hive连接 Class.forName( "org.apache.hive.jdbc.HiveDriver" ); Connection connection = DriverManager.getConnection(hiveUrl, "" , "" ); // 执行查询 Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery( "select * from test.tblname" ); // 处理结果 while (res.next()) { //a,b,c,ds 为 tblname表字段 String data = res.getString( "a" ) + "," + res.getString( "b" ) + "," + res.getString( "c" ) + "," + res.getString( "ds" ); System.out.println( "************** 输出 tblname***************************" ); System.out.println(data); } // 关闭资源 res.close(); stmt.close(); connection.close(); } } |
4. HIVE JDBC 连接 Kerberos 认证的 Impala 集群
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 | package com.test; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; import java.security.PrivilegedAction; import java.sql.*; public class ImpalaKerberosConnectionExample { private static String krb5 = "src/main/resources/krb5.conf" ; private static String keytab = "src/main/resources/impala.keytab" ; private static String principal = "impala@EXAMPLE.COM" ; private static String impalaUrl = "jdbc:hive2://host:21050/test;principal=impala/_HOST@EXAMPLE.COM" ; public static void main(String[] args) throws Exception { // 设置Kerberos配置文件路径 System.setProperty( "java.security.krb5.conf" , krb5); System.setProperty( "sun.security.krb5.debug" , "true" ); // 可选,用于调试 Kerberos 认证过程 // 初始化Hadoop配置 Configuration conf = new Configuration(); conf.set( "hadoop.security.authentication" , "kerberos" ); // 使用Keytab进行Kerberos身份验证 UserGroupInformation.setConfiguration(conf); UserGroupInformation.loginUserFromKeytab(principal, keytab); UserGroupInformation loginUser = UserGroupInformation.getLoginUser(); //使用HIVE jdbc 建立Impala连接 Class.forName( "org.apache.hive.jdbc.HiveDriver" ); // Impala Jdbc 和 HIVE Jdbc 查询 // loginUser.doAs((PrivilegedAction) () -> { // try { // try (Connection connection = DriverManager.getConnection(impalaUrl,"","")) { // try (Statement statement = connection.createStatement()) { // ResultSet resultSet = statement.executeQuery("SELECT * FROM test.tblname LIMIT 10"); // while (resultSet.next()) { // String data = resultSet.getString("a") + "," + resultSet.getString("b") + "," + resultSet.getString("c") + "," + resultSet.getString("ds"); // System.out.println("************** 输出 tblname***************************"); // System.out.println(data); // } // resultSet.close(); // } // } // } catch (SQLException e) { // e.printStackTrace(); // } // return null; // }); // HIVE JDBC 查询,Impala Jdbc 查询报 kerberos 认证失败 1312 码 Connection connection = DriverManager.getConnection(impalaUrl, "" , "" ); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery( "SELECT * FROM test.tlbname LIMIT 10" ); // 处理结果 while (res.next()) { String data = res.getString( "a" ) + "," + res.getString( "b" ) + "," + res.getString( "c" ) + "," + res.getString( "ds" ); System.out.println( "************** Impala 输出 tblname ***************************" ); System.out.println(data); } // 关闭资源 res.close(); stmt.close(); connection.close(); } } |
5. Impala JDBC 连接 Kerberos 认证的 Impala 集群
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 | package com.test; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; import java.security.PrivilegedAction; import java.sql.*; public class ImpalaKerberosConnectionExample { private static String krb5 = "src/main/resources/krb5.conf" ; private static String keytab = "src/main/resources/impala.keytab" ; private static String principal = "impala@EXAMPLE.COM" ; private static String impalaUrl = "jdbc:impala://host:21050/test;AuthMech=1;KrbRealm=EXAMPLE.COM;KrbHostFQDN=host;KrbServiceName=impala" ; public static void main(String[] args) throws Exception { // 设置Kerberos配置文件路径 System.setProperty( "java.security.krb5.conf" , krb5); System.setProperty( "sun.security.krb5.debug" , "true" ); // 可选,用于调试 Kerberos 认证过程 // 初始化Hadoop配置 Configuration conf = new Configuration(); conf.set( "hadoop.security.authentication" , "kerberos" ); // 使用Keytab进行Kerberos身份验证 UserGroupInformation.setConfiguration(conf); UserGroupInformation.loginUserFromKeytab(principal, keytab); UserGroupInformation loginUser = UserGroupInformation.getLoginUser(); // 使用 Impala jdbc 建立Impala连接 Class.forName( "com.cloudera.impala.jdbc41.Driver" ); // 搭配HIVE JDBC 3.1.0报:java.lang.NoClassDefFoundError: org/apache/hive/service/cli/thrift/TCLIService$Client //使用HIVE jdbc 建立Impala连接 // Class.forName("org.apache.hive.jdbc.HiveDriver"); // Impala查询 loginUser.doAs((PrivilegedAction) () -> { try { try (Connection connection = DriverManager.getConnection(impalaUrl, "" , "" )) { try (Statement statement = connection.createStatement()) { ResultSet resultSet = statement.executeQuery( "SELECT * FROM test.tblname LIMIT 10" ); while (resultSet.next()) { String data = resultSet.getString( "a" ) + "," + resultSet.getString( "b" ) + "," + resultSet.getString( "c" ) + "," + resultSet.getString( "ds" ); System.out.println( "************** Impala 输出 tlbname ***************************" ); System.out.println(data); } resultSet.close(); } } } catch (SQLException e) { e.printStackTrace(); } return null ; }); } } |
总结
以上为个人经验,希望能给大家一个参考,也希望大家多多支持IT俱乐部。