From 9bd537eb9e2a416ffe46c4d93ef8719f33d89aa7 Mon Sep 17 00:00:00 2001 From: louzin <294098546@qq.com> Date: Sat, 13 May 2023 09:11:18 +0800 Subject: [PATCH] HiveJDBCUpgrade --- .idea/dataSources.xml | 14 +++- .../main/java/com/louzin/hdfsapi/util/hdfsapi.java | 60 ++++++++++++++++ .../java/com/louzin/hiveapi/HiveConnecter.java | 82 ++++++++++++++++++++++ .../src/main/java/com/louzin/hiveapi/test.java | 18 +++++ .../com/louzin/hiveapi/util/jdbcHiveConnect.java | 4 +- .../com/louzin/webserver/WebServerApplication.java | 8 +++ .../louzin/webserver/controller/mycontroller.java | 24 +++++++ .../java/com/louzin/webserver/dao/Daotest.java | 9 +++ .../java/com/louzin/webserver/dao/hiveDao.java | 31 ++++++++ 9 files changed, 245 insertions(+), 5 deletions(-) create mode 100644 louzin_hdfsapi/src/main/java/com/louzin/hdfsapi/util/hdfsapi.java create mode 100644 louzin_hdfsapi/src/main/java/com/louzin/hiveapi/HiveConnecter.java create mode 100644 louzin_hdfsapi/src/main/java/com/louzin/hiveapi/test.java create mode 100644 webServer/src/main/java/com/louzin/webserver/dao/Daotest.java create mode 100644 webServer/src/main/java/com/louzin/webserver/dao/hiveDao.java diff --git a/.idea/dataSources.xml b/.idea/dataSources.xml index c7e6f1c..d462c98 100644 --- a/.idea/dataSources.xml +++ b/.idea/dataSources.xml @@ -1,13 +1,21 @@ - + + hive + true + org.apache.hive.jdbc.HiveDriver + jdbc:hive2://local1:10000/datacenter + $ProjectFileDir$ + + + d9b9cd9b-21ff-42d8-88e4-caad8ce26dbb true Wrapper For Hive Metastore from Big Data Tools plugin com.dbschema.ZeppelinSparkJdbcDriver - Hive Metastore@HiveMetastoreConnections@7622557893670318544 + Hive Metastore@HiveMetastoreConnections@2804852028619532268 - + $ProjectFileDir$ diff --git a/louzin_hdfsapi/src/main/java/com/louzin/hdfsapi/util/hdfsapi.java b/louzin_hdfsapi/src/main/java/com/louzin/hdfsapi/util/hdfsapi.java new file mode 100644 index 0000000..f15fdde --- /dev/null +++ b/louzin_hdfsapi/src/main/java/com/louzin/hdfsapi/util/hdfsapi.java @@ -0,0 +1,60 @@ +package com.louzin.hdfsapi.util; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.*; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class hdfsapi { + static FileSystem hdfs=null; + static{ + // 构造一个配置参数对象,设置一个参数:要访问的HDFS的URI + Configuration conf = new Configuration(); + // 指定使用HDFS访问 + conf.set("fs.defaultFS", "hdfs://local1:8020"); + // 进行客户端身份的设置(root为虚拟机的用户名,hadoop集群节点的其中一个都可以) + System.setProperty("HADOOP_USER_NAME", "root"); + // 通过FileSystem的静态get()方法获取HDFS文件系统客户端对象 + try { + hdfs = FileSystem.get(conf); + } catch (IOException e) { + System.out.println("实例化失败!"); + } + } + + @Before + public void init() throws IOException { + // 构造一个配置参数对象,设置一个参数:要访问的HDFS的URI + Configuration conf = new Configuration(); + // 指定使用HDFS访问 + conf.set("fs.defaultFS", "hdfs://local1:8020"); + // 进行客户端身份的设置(root为虚拟机的用户名,hadoop集群节点的其中一个都可以) + System.setProperty("HADOOP_USER_NAME", "root"); + // 通过FileSystem的静态get()方法获取HDFS文件系统客户端对象 + hdfs = FileSystem.get(conf); + } + //扫描采集日志的路径 + @Test + public List scan() throws IOException { + List lists=new ArrayList(); + Path path=new Path("/flume/events/"); + FileStatus[] fileStatuses=hdfs.listStatus(path);//events下的每日文件夹 + for (FileStatus fs:fileStatuses) { + String[] str=String.valueOf(fs.getPath()).split("8020"); + lists.add(str[1]); + } + close(); + return lists; + } + @After + public void close() throws IOException { + hdfs.close(); + } +} + + diff --git a/louzin_hdfsapi/src/main/java/com/louzin/hiveapi/HiveConnecter.java b/louzin_hdfsapi/src/main/java/com/louzin/hiveapi/HiveConnecter.java new file mode 100644 index 0000000..0f4cbeb --- /dev/null +++ b/louzin_hdfsapi/src/main/java/com/louzin/hiveapi/HiveConnecter.java @@ -0,0 +1,82 @@ +package com.louzin.hiveapi; + +import java.sql.*; + +public class HiveConnecter { + private static String driverName = "org.apache.hive.jdbc.HiveDriver"; + private static String url = "jdbc:hive2://local1:10000/datacenter"; + private static String user = "root"; + private static String password = ""; + private static Connection conn = null; + private static Statement stmt = null; + private static ResultSet rs = null; + private static HiveConnecter instance; + //构造方法私有 + private HiveConnecter() throws ClassNotFoundException { + Class.forName(driverName); + + System.out.println("HiveConnecter Driver Register Success"); + } +// 单例模式构建访问数据库的访问对象 + public static HiveConnecter getInstance() throws SQLException, ClassNotFoundException { + if(instance==null){ + instance=new HiveConnecter(); + } + return instance; + } + //获得新的连接 + public Connection getConn() throws SQLException { + conn = DriverManager.getConnection(url,user,password); + return conn; + } + /** + * 执行静态的SQL语句 + * @param sql + * @return 返回一个整型,整型值为数据库受影响的行数 + * @throws SQLException + */ + public int executeSQLByStatement(String sql) throws SQLException{ + stmt=conn.createStatement(); + return stmt.executeUpdate(sql); + } + /** + * 执行DML语句,例如删除,插入,修改 + * insert into student() + * @param sql + * @param columnName 列名称 + * @return + * @throws SQLException + */ + public int executeSQLBYStatement(String sql,String[] columnName)throws SQLException{ + stmt =conn.createStatement(); + return stmt.executeUpdate(sql,columnName); + } + /** + * 查询返回的结果,可能包含一个或者多个结果 + * 返回值 参数 + * 方法 int executeUpdate​(String sql, int[] columnIndexes) + * @param sql + * @return + * @throws SQLException + */ + public ResultSet executeQueryBYStatement(String sql)throws SQLException{ + stmt =conn.createStatement(); + return stmt.executeQuery(sql); + } + /** + *将关闭流放入方法中减少代码重复量 + */ + public void closeAll(){ + try { + if(stmt!=null) { + stmt.close(); + } + if(conn!=null){ + conn.close(); + } + System.out.println("HiveConnecter Close"); + } catch (SQLException e) { + e.printStackTrace(); + } + } +} diff --git a/louzin_hdfsapi/src/main/java/com/louzin/hiveapi/test.java b/louzin_hdfsapi/src/main/java/com/louzin/hiveapi/test.java new file mode 100644 index 0000000..6760989 --- /dev/null +++ b/louzin_hdfsapi/src/main/java/com/louzin/hiveapi/test.java @@ -0,0 +1,18 @@ +package com.louzin.hiveapi; + +import java.sql.ResultSet; +import java.sql.SQLException; + +public class test { + private static HiveConnecter hiveConnecter; + public static void main(String[] args) throws SQLException, ClassNotFoundException { + hiveConnecter=HiveConnecter.getInstance(); + hiveConnecter.getConn(); + String sql="show tables"; + ResultSet rs = hiveConnecter.executeQueryBYStatement(sql); + while (rs.next()){ + System.out.println(rs.getString(1)); + } + hiveConnecter.closeAll(); + } +} diff --git a/louzin_hdfsapi/src/main/java/com/louzin/hiveapi/util/jdbcHiveConnect.java b/louzin_hdfsapi/src/main/java/com/louzin/hiveapi/util/jdbcHiveConnect.java index f353054..3513848 100644 --- a/louzin_hdfsapi/src/main/java/com/louzin/hiveapi/util/jdbcHiveConnect.java +++ b/louzin_hdfsapi/src/main/java/com/louzin/hiveapi/util/jdbcHiveConnect.java @@ -26,7 +26,7 @@ public class jdbcHiveConnect { stmt = conn.createStatement(); } @Test - public void showAllTable() throws SQLException, ClassNotFoundException { + public List showAllTable() throws SQLException, ClassNotFoundException { Class.forName(driverName); conn = DriverManager.getConnection(url,user,password); stmt = conn.createStatement(); @@ -46,7 +46,7 @@ public class jdbcHiveConnect { if (conn != null) { conn.close(); } -// return lists; + return lists; } @Test public void selectData_all_true() throws SQLException { diff --git a/webServer/src/main/java/com/louzin/webserver/WebServerApplication.java b/webServer/src/main/java/com/louzin/webserver/WebServerApplication.java index 69a0456..23144b3 100644 --- a/webServer/src/main/java/com/louzin/webserver/WebServerApplication.java +++ b/webServer/src/main/java/com/louzin/webserver/WebServerApplication.java @@ -1,5 +1,6 @@ package com.louzin.webserver; +import com.louzin.webserver.dao.hiveDao; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.stereotype.Controller; @@ -8,6 +9,7 @@ import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.ResponseBody; import com.louzin.hiveapi.util.jdbcHiveConnect; +import java.sql.SQLException; import java.util.List; @SpringBootApplication @@ -28,4 +30,10 @@ public class WebServerApplication { public List sauql() throws Exception{ return new jdbcHiveConnect().selectData_day_pv(); } + @PostMapping("/getalltb") + @ResponseBody + public List getAlltb() throws SQLException, ClassNotFoundException { + return new hiveDao().getAllTable(); + } + } diff --git a/webServer/src/main/java/com/louzin/webserver/controller/mycontroller.java b/webServer/src/main/java/com/louzin/webserver/controller/mycontroller.java index ac23276..67827d5 100644 --- a/webServer/src/main/java/com/louzin/webserver/controller/mycontroller.java +++ b/webServer/src/main/java/com/louzin/webserver/controller/mycontroller.java @@ -1,3 +1,27 @@ package com.louzin.webserver.controller; + +import org.springframework.stereotype.Controller; +import org.springframework.web.bind.annotation.*; +import com.louzin.hiveapi.util.jdbcHiveConnect; +import com.louzin.hdfsapi.util.hdfsapi; +import java.sql.SQLException; +import java.util.List; + +@Controller +@ResponseBody +@RequestMapping("/dev_api") +@CrossOrigin public class mycontroller { + @GetMapping("/test") + public String testMethod(){ + return "local:/dev_api/test"; + } + @PostMapping("/getAllTable") + public List saupl() throws SQLException, ClassNotFoundException { + return new jdbcHiveConnect().showAllTable(); + } + @PostMapping("/scan") + public List scanFolder() throws Exception{ + return new hdfsapi().scan(); + } } diff --git a/webServer/src/main/java/com/louzin/webserver/dao/Daotest.java b/webServer/src/main/java/com/louzin/webserver/dao/Daotest.java new file mode 100644 index 0000000..2445d70 --- /dev/null +++ b/webServer/src/main/java/com/louzin/webserver/dao/Daotest.java @@ -0,0 +1,9 @@ +package com.louzin.webserver.dao; + +import java.sql.SQLException; + +public class Daotest { + public static void main(String[] args) throws SQLException, ClassNotFoundException { + System.out.println(new hiveDao().getAllTable()); + } +} diff --git a/webServer/src/main/java/com/louzin/webserver/dao/hiveDao.java b/webServer/src/main/java/com/louzin/webserver/dao/hiveDao.java new file mode 100644 index 0000000..e40c1aa --- /dev/null +++ b/webServer/src/main/java/com/louzin/webserver/dao/hiveDao.java @@ -0,0 +1,31 @@ +package com.louzin.webserver.dao; +import com.louzin.hiveapi.HiveConnecter; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; + +public class hiveDao { + private HiveConnecter hiveConnecter; + public hiveDao() throws SQLException, ClassNotFoundException { + hiveConnecter=HiveConnecter.getInstance(); + } + public List getAllTable(){ + String sql="show tables"; + List tblist=new ArrayList(); + try { + hiveConnecter.getConn(); + ResultSet rs = hiveConnecter.executeQueryBYStatement(sql); + while (rs.next()){ + tblist.add(rs.getString(1)); + } + }catch (Exception e){ + System.out.println(e); + } + finally { + hiveConnecter.closeAll(); + } + return tblist; + } +}