Browse Source

HiveJDBCUpgrade

louzin 1 year ago
parent
commit
9bd537eb9e
  1. 14
      .idea/dataSources.xml
  2. 60
      louzin_hdfsapi/src/main/java/com/louzin/hdfsapi/util/hdfsapi.java
  3. 82
      louzin_hdfsapi/src/main/java/com/louzin/hiveapi/HiveConnecter.java
  4. 18
      louzin_hdfsapi/src/main/java/com/louzin/hiveapi/test.java
  5. 4
      louzin_hdfsapi/src/main/java/com/louzin/hiveapi/util/jdbcHiveConnect.java
  6. 8
      webServer/src/main/java/com/louzin/webserver/WebServerApplication.java
  7. 24
      webServer/src/main/java/com/louzin/webserver/controller/mycontroller.java
  8. 9
      webServer/src/main/java/com/louzin/webserver/dao/Daotest.java
  9. 31
      webServer/src/main/java/com/louzin/webserver/dao/hiveDao.java

14
.idea/dataSources.xml

@ -1,13 +1,21 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<project version="4"> <project version="4">
<component name="DataSourceManagerImpl" format="xml" multifile-model="true"> <component name="DataSourceManagerImpl" format="xml" multifile-model="true">
<data-source source="LOCAL" name="Hive Metastore" read-only="true" uuid="90afea9b-2faf-43db-99e1-96c529675a48"> <data-source source="LOCAL" name="datacenter@local1" uuid="b63f6c6b-9ddc-414b-acb8-6e03cfd211ac">
<driver-ref>hive</driver-ref>
<synchronize>true</synchronize>
<jdbc-driver>org.apache.hive.jdbc.HiveDriver</jdbc-driver>
<jdbc-url>jdbc:hive2://local1:10000/datacenter</jdbc-url>
<working-dir>$ProjectFileDir$</working-dir>
</data-source>
<data-source source="LOCAL" name="Hive Metastore" read-only="true" uuid="22211736-eec1-4b8f-92a6-cd9fac0d376f">
<driver-ref>d9b9cd9b-21ff-42d8-88e4-caad8ce26dbb</driver-ref>
<synchronize>true</synchronize> <synchronize>true</synchronize>
<remarks>Wrapper For Hive Metastore from Big Data Tools plugin</remarks> <remarks>Wrapper For Hive Metastore from Big Data Tools plugin</remarks>
<jdbc-driver>com.dbschema.ZeppelinSparkJdbcDriver</jdbc-driver> <jdbc-driver>com.dbschema.ZeppelinSparkJdbcDriver</jdbc-driver>
<jdbc-url>Hive Metastore@HiveMetastoreConnections@7622557893670318544</jdbc-url> <jdbc-url>Hive Metastore@HiveMetastoreConnections@2804852028619532268</jdbc-url>
<jdbc-additional-properties> <jdbc-additional-properties>
<property name="connectionId" value="Hive Metastore@HiveMetastoreConnections@7622557893670318544" /> <property name="connectionId" value="Hive Metastore@HiveMetastoreConnections@2804852028619532268" />
</jdbc-additional-properties> </jdbc-additional-properties>
<working-dir>$ProjectFileDir$</working-dir> <working-dir>$ProjectFileDir$</working-dir>
</data-source> </data-source>

60
louzin_hdfsapi/src/main/java/com/louzin/hdfsapi/util/hdfsapi.java

@ -0,0 +1,60 @@
package com.louzin.hdfsapi.util;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class hdfsapi {
static FileSystem hdfs=null;
static{
// 构造一个配置参数对象,设置一个参数:要访问的HDFS的URI
Configuration conf = new Configuration();
// 指定使用HDFS访问
conf.set("fs.defaultFS", "hdfs://local1:8020");
// 进行客户端身份的设置(root为虚拟机的用户名,hadoop集群节点的其中一个都可以)
System.setProperty("HADOOP_USER_NAME", "root");
// 通过FileSystem的静态get()方法获取HDFS文件系统客户端对象
try {
hdfs = FileSystem.get(conf);
} catch (IOException e) {
System.out.println("实例化失败!");
}
}
@Before
public void init() throws IOException {
// 构造一个配置参数对象,设置一个参数:要访问的HDFS的URI
Configuration conf = new Configuration();
// 指定使用HDFS访问
conf.set("fs.defaultFS", "hdfs://local1:8020");
// 进行客户端身份的设置(root为虚拟机的用户名,hadoop集群节点的其中一个都可以)
System.setProperty("HADOOP_USER_NAME", "root");
// 通过FileSystem的静态get()方法获取HDFS文件系统客户端对象
hdfs = FileSystem.get(conf);
}
//扫描采集日志的路径
@Test
public List<String> scan() throws IOException {
List<String> lists=new ArrayList();
Path path=new Path("/flume/events/");
FileStatus[] fileStatuses=hdfs.listStatus(path);//events下的每日文件夹
for (FileStatus fs:fileStatuses) {
String[] str=String.valueOf(fs.getPath()).split("8020");
lists.add(str[1]);
}
close();
return lists;
}
@After
public void close() throws IOException {
hdfs.close();
}
}

82
louzin_hdfsapi/src/main/java/com/louzin/hiveapi/HiveConnecter.java

@ -0,0 +1,82 @@
package com.louzin.hiveapi;
import java.sql.*;
public class HiveConnecter {
private static String driverName = "org.apache.hive.jdbc.HiveDriver";
private static String url = "jdbc:hive2://local1:10000/datacenter";
private static String user = "root";
private static String password = "";
private static Connection conn = null;
private static Statement stmt = null;
private static ResultSet rs = null;
private static HiveConnecter instance;
//构造方法私有
private HiveConnecter() throws ClassNotFoundException {
Class.forName(driverName);
System.out.println("HiveConnecter Driver Register Success");
}
// 单例模式构建访问数据库的访问对象
public static HiveConnecter getInstance() throws SQLException, ClassNotFoundException {
if(instance==null){
instance=new HiveConnecter();
}
return instance;
}
//获得新的连接
public Connection getConn() throws SQLException {
conn = DriverManager.getConnection(url,user,password);
return conn;
}
/**
* 执行静态的SQL语句
* @param sql
* @return 返回一个整型整型值为数据库受影响的行数
* @throws SQLException
*/
public int executeSQLByStatement(String sql) throws SQLException{
stmt=conn.createStatement();
return stmt.executeUpdate(sql);
}
/**
* 执行DML语句例如删除插入修改
* insert into student()
* @param sql
* @param columnName 列名称
* @return
* @throws SQLException
*/
public int executeSQLBYStatement(String sql,String[] columnName)throws SQLException{
stmt =conn.createStatement();
return stmt.executeUpdate(sql,columnName);
}
/**
* 查询返回的结果可能包含一个或者多个结果
* 返回值 参数
* 方法 int executeUpdate(String sql, int[] columnIndexes)
* @param sql
* @return
* @throws SQLException
*/
public ResultSet executeQueryBYStatement(String sql)throws SQLException{
stmt =conn.createStatement();
return stmt.executeQuery(sql);
}
/**
*将关闭流放入方法中减少代码重复量
*/
public void closeAll(){
try {
if(stmt!=null) {
stmt.close();
}
if(conn!=null){
conn.close();
}
System.out.println("HiveConnecter Close");
} catch (SQLException e) {
e.printStackTrace();
}
}
}

18
louzin_hdfsapi/src/main/java/com/louzin/hiveapi/test.java

@ -0,0 +1,18 @@
package com.louzin.hiveapi;
import java.sql.ResultSet;
import java.sql.SQLException;
public class test {
private static HiveConnecter hiveConnecter;
public static void main(String[] args) throws SQLException, ClassNotFoundException {
hiveConnecter=HiveConnecter.getInstance();
hiveConnecter.getConn();
String sql="show tables";
ResultSet rs = hiveConnecter.executeQueryBYStatement(sql);
while (rs.next()){
System.out.println(rs.getString(1));
}
hiveConnecter.closeAll();
}
}

4
louzin_hdfsapi/src/main/java/com/louzin/hiveapi/util/jdbcHiveConnect.java

@ -26,7 +26,7 @@ public class jdbcHiveConnect {
stmt = conn.createStatement(); stmt = conn.createStatement();
} }
@Test @Test
public void showAllTable() throws SQLException, ClassNotFoundException { public List<String> showAllTable() throws SQLException, ClassNotFoundException {
Class.forName(driverName); Class.forName(driverName);
conn = DriverManager.getConnection(url,user,password); conn = DriverManager.getConnection(url,user,password);
stmt = conn.createStatement(); stmt = conn.createStatement();
@ -46,7 +46,7 @@ public class jdbcHiveConnect {
if (conn != null) { if (conn != null) {
conn.close(); conn.close();
} }
// return lists; return lists;
} }
@Test @Test
public void selectData_all_true() throws SQLException { public void selectData_all_true() throws SQLException {

8
webServer/src/main/java/com/louzin/webserver/WebServerApplication.java

@ -1,5 +1,6 @@
package com.louzin.webserver; package com.louzin.webserver;
import com.louzin.webserver.dao.hiveDao;
import org.springframework.boot.SpringApplication; import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.stereotype.Controller; import org.springframework.stereotype.Controller;
@ -8,6 +9,7 @@ import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.ResponseBody;
import com.louzin.hiveapi.util.jdbcHiveConnect; import com.louzin.hiveapi.util.jdbcHiveConnect;
import java.sql.SQLException;
import java.util.List; import java.util.List;
@SpringBootApplication @SpringBootApplication
@ -28,4 +30,10 @@ public class WebServerApplication {
public List sauql() throws Exception{ public List sauql() throws Exception{
return new jdbcHiveConnect().selectData_day_pv(); return new jdbcHiveConnect().selectData_day_pv();
} }
@PostMapping("/getalltb")
@ResponseBody
public List<String> getAlltb() throws SQLException, ClassNotFoundException {
return new hiveDao().getAllTable();
}
} }

24
webServer/src/main/java/com/louzin/webserver/controller/mycontroller.java

@ -1,3 +1,27 @@
package com.louzin.webserver.controller; package com.louzin.webserver.controller;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import com.louzin.hiveapi.util.jdbcHiveConnect;
import com.louzin.hdfsapi.util.hdfsapi;
import java.sql.SQLException;
import java.util.List;
@Controller
@ResponseBody
@RequestMapping("/dev_api")
@CrossOrigin
public class mycontroller { public class mycontroller {
@GetMapping("/test")
public String testMethod(){
return "local:/dev_api/test";
}
@PostMapping("/getAllTable")
public List<String> saupl() throws SQLException, ClassNotFoundException {
return new jdbcHiveConnect().showAllTable();
}
@PostMapping("/scan")
public List<String> scanFolder() throws Exception{
return new hdfsapi().scan();
}
} }

9
webServer/src/main/java/com/louzin/webserver/dao/Daotest.java

@ -0,0 +1,9 @@
package com.louzin.webserver.dao;
import java.sql.SQLException;
public class Daotest {
public static void main(String[] args) throws SQLException, ClassNotFoundException {
System.out.println(new hiveDao().getAllTable());
}
}

31
webServer/src/main/java/com/louzin/webserver/dao/hiveDao.java

@ -0,0 +1,31 @@
package com.louzin.webserver.dao;
import com.louzin.hiveapi.HiveConnecter;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
public class hiveDao {
private HiveConnecter hiveConnecter;
public hiveDao() throws SQLException, ClassNotFoundException {
hiveConnecter=HiveConnecter.getInstance();
}
public List<String> getAllTable(){
String sql="show tables";
List<String> tblist=new ArrayList();
try {
hiveConnecter.getConn();
ResultSet rs = hiveConnecter.executeQueryBYStatement(sql);
while (rs.next()){
tblist.add(rs.getString(1));
}
}catch (Exception e){
System.out.println(e);
}
finally {
hiveConnecter.closeAll();
}
return tblist;
}
}
Loading…
Cancel
Save