JDBC连接Hive2,注解式生成对象集合【解决】

    技术2023-05-27  25

    JDBC连接Hive2,注解式生成对象集合【解决】

    由于我无法通过mybatis连接到Hive2,所以选择了古老的JDBC连接大法。

    1. 引依赖

    <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> </dependency> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-metastore</artifactId> </dependency> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-exec</artifactId> </dependency>

    2. 注解

    import java.lang.annotation.*; /** * @author gyh * @csdn https://blog.csdn.net/qq_40788718 * @date 2020/7/2 23:43 */ @Documented @Target({ElementType.FIELD}) @Inherited @Retention(RetentionPolicy.RUNTIME) public @interface ImpalaColumn { String name() default "" ; }

    注解的作用是将JDBC查出的数据的列和属性一一对应,注意name中的字段名要和数据库中的字段名相同。目的是为了将ResultSet中的数据解析成对象。

    3. 实体类

    public class TrafficMapUv { @ImpalaColumn(name = "uv") private Integer uv; @ImpalaColumn(name = "step") private Integer step; @ImpalaColumn(name = "pv") private Integer pv; @ImpalaColumn(name = "url") private String url; @ImpalaColumn(name = "refer_url") private String referUrl; }

    4. JDBC工具类

    首先我使用Hive2的时候只有查数据,因此只写了查数据。

    import org.apache.hive.jdbc.HiveStatement; import java.lang.reflect.Field; import java.sql.*; import java.util.*; /** * @author gyh * @csdn https://blog.csdn.net/qq_40788718 * @date 2020/7/2 21:01 */ public class HiveJDBC { private static final String driverName = "org.apache.hive.jdbc.HiveDriver" ; private static final String url = "jdbc:hive2://10.0.3.84:21050/pwd;auth=noSasl" ; private static final String username = "dev" ; private static final String password = "dev" ; private static Connection con = null ; private static Statement state = null ; private static HiveStatement hiveStatement = null ; private static ResultSet resultSet = null ; private static PreparedStatement pdst = null ; static{ try { Class.forName(driverName) ; } catch (ClassNotFoundException e) { e.printStackTrace(); } } public static <T> List<T> selectSql(String sql , Class<T> clazz) throws Exception { // 创建链接 Properties properties = new Properties() ; properties.setProperty("user" , "dev") ; properties.setProperty("password" , "dev") ; con = (Connection) DriverManager.getConnection(url, properties); //执行查询 hiveStatement = (HiveStatement) con.createStatement(); resultSet = hiveStatement.executeQuery(sql); List<T> list = processRS(resultSet , clazz); if (con != null) { con.close();//关闭连接 } return list ; } private static <T> List<T> processRS(ResultSet resultSet , Class<T> clazz) throws SQLException, IllegalAccessException, InstantiationException { Map<String,Field> colToField = getColToFieldMap(clazz) ; String[] columnNames = getColumnNames(resultSet) ; List<T> list = new ArrayList<>() ; String key ; Field value ; int size = columnNames.length ; while(resultSet.next()){ T t = clazz.newInstance() ; for (int i=0 ; i<size ; i++){ value = colToField.get(key = columnNames[i]) ; if (value == null){ continue ; } if (value.getType() == Integer.class){ value.set(t , resultSet.getInt(key)); }else if (value.getType() == String.class){ value.set(t , resultSet.getString(key)); }else if (value.getType() == Long.class){ value.set(t , resultSet.getLong(key)); }else if (value.getType() == Float.class){ value.set(t , resultSet.getFloat(key)); } } list.add(t) ; } return list ; } private static String[] getColumnNames(ResultSet resultSet) throws SQLException { ResultSetMetaData rsmd = resultSet.getMetaData() ; int count = rsmd.getColumnCount() ; String[] columnNames = new String[count] ; for (int i=0 ; i<count ; i++){ columnNames[i] = rsmd.getColumnName(i+1) ; } return columnNames ; } private static <T> Map<String,Field> getColToFieldMap(Class<T> clazz){ Field[] fields = clazz.getDeclaredFields() ; //列名 -> 属性对象 Map<String , Field> colToField = new HashMap<>() ; for (int i=0 ; i<fields.length ; i++){ if (fields[i].isAnnotationPresent(ImpalaColumn.class)){ ImpalaColumn ic = fields[i].getAnnotation(ImpalaColumn.class) ; fields[i].setAccessible(true); colToField.put(ic.name() , fields[i]) ; } } return colToField ; } }

    5. SQL语句

    package impalajdbc; public class HiveSQL { public static String selectAllTrafficMapUvs(int param1 , String param2 , String param3){ String sql ="select * from table where id = "+param+" and date >= '"+param2+"' and date <= '" +param3+"' ;" ; return sql ; } }

    这个只是做个示范。

    Processed: 0.016, SQL: 8