第一步:引入Hive和Hadoop的JAR包
$hive/lib/*.jar $
$hadoop/lib/*.jar$
第一步:加载HiveServer2的驱动
Class.forName("org.apache.hive.jdbc.HiveDriver");
第二步:连接Hive
Connection cnct = DriverManager.getConnection("jdbc:hive2://<host>:<port>", "<user>", "<password>");
第三步:创建Statement,并执行SQL
Statement stmt = cnct.createStatement(); ResultSet rset = stmt.executeQuery("SELECT foo FROM bar");
Hive JDBC工具类实现:
import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Hive JDBC 工具类 * @author imzhangjie.cn * */ public class HiveJDBCUtil { public static String driverName = "org.apache.hive.jdbc.HiveDriver"; public static String connectURL = "jdbc:hive2://10.20.112.30:10000/dmp"; public static String connectUSER = "xxxxx"; public static String connectPWD = "xxxxxx"; static{ driverName = ProfileManager.getValue("hive.jdbc.driver",""); connectURL = ProfileManager.getValue("hive.jdbc.connectURL", ""); connectUSER = ProfileManager.getValue("hive.jdbc.connectUSER", ""); connectPWD = ProfileManager.getValue("hive.jdbc.connectPWD", ""); } /** * 获取Hive连接 * @return * @throws SQLException * @throws ClassNotFoundException */ public static Connection getConn() throws SQLException, ClassNotFoundException{ Class.forName(driverName); Connection conn = DriverManager.getConnection(connectURL,connectUSER,connectPWD); return conn; } /** * 查询List集合 * @param sql * @param params * @return * @throws Exception */ public static List<Map<String,Object>> queryForValueList(String[] commands,String sql,Object...params) throws Exception{ ResultSet rs = null; PreparedStatement ps = null; List<Map<String,Object>> rlist= new ArrayList<Map<String,Object>>(); Connection con = null; try { con=getConn(); if(commands != null){//执行调用命令,比如说设置一些前提条件,比如创建临时函数 for(String command : commands){ con.createStatement().execute(command); } } ps = con.prepareStatement(sql); if(params!=null&¶ms.length>0){ int i=1; for(Object o:params){ ps.setObject(i, o); i++; } } rs = ps.executeQuery(); ResultSetMetaData rsmd = rs.getMetaData(); int size = rsmd.getColumnCount(); Map<String,Object> map = null; while (rs.next()){ map = new HashMap<>(); for(int i = 1;i<= size;i++){ map.put(rsmd.getColumnName(i), rs.getObject(i)); } rlist.add(map); } } catch (Exception e) { throw new RuntimeException("jdbc查询出错"); }finally{ if(con!=null) con.close(); } return rlist; } public static void main(String[] args) throws Exception { String [] commands = new String [] {"add jar /data0/custom/lib/hiveUDF.jar","create temporary function group_concat as 'org.apache.hadoop.hive.udf.GroupConcatInSetUdf'"}; String sql = "select group_concat(event_id) as EVENT_id,group_concat(activity_name) as activity_name from pad_log where log_date = ? "; List<Map<String,Object>> list = queryForValueList(commands, sql, "20170127"); System.out.println(list); } }