diff --git a/openhdh/src/main/java/com/louzin/openhdh/OpenhdhApplication.java b/openhdh/src/main/java/com/louzin/openhdh/OpenhdhApplication.java index 27386c6..6ac82f0 100644 --- a/openhdh/src/main/java/com/louzin/openhdh/OpenhdhApplication.java +++ b/openhdh/src/main/java/com/louzin/openhdh/OpenhdhApplication.java @@ -26,5 +26,11 @@ public class OpenhdhApplication { List lists=hapi.scan(); return lists; } - + @GetMapping("/getresulthivepath") + @ResponseBody + public List getresultHivePath() throws IOException { + hdfsapi hapi = new hdfsapi(); + List lists=hapi.scanresult(); + return lists; + } } diff --git a/openhdh/src/main/java/com/louzin/openhdh/controller/ParserController.java b/openhdh/src/main/java/com/louzin/openhdh/controller/ParserController.java new file mode 100644 index 0000000..40e4156 --- /dev/null +++ b/openhdh/src/main/java/com/louzin/openhdh/controller/ParserController.java @@ -0,0 +1,316 @@ +package com.louzin.openhdh.controller; + +import com.louzin.openhdhapi.utils.HiveConnecter; +import org.springframework.stereotype.Controller; +import org.springframework.web.bind.annotation.*; +import com.louzin.openhdhapi.utils.WeblogPreProcess; +import com.louzin.openhdhapi.utils.hdfsapi; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.*; + +@Controller +@ResponseBody +@CrossOrigin +public class ParserController { + private static HiveConnecter hiveConnecter; + //对指定路径下的文件进行采集分析,结果生成至result文件夹下 + @PostMapping("/parser") + public int parserWebLog(@RequestParam("parserpath") String parserpath) { + hdfsapi hdfs = new hdfsapi(); + if (hdfs.ifexistsFolder("/result"+parserpath)){ + if(hdfs.deleteFolder("/result"+parserpath)){//删除成功则进行下一步 + WeblogPreProcess weblogPreProcess=new WeblogPreProcess(); + try { + weblogPreProcess.runjob(parserpath); + return 0; + } catch (Exception e) { + System.out.println("webLogPreProcess runjob Error!"); + return 1; + } + }else{ + System.out.println("hdfs文件权限有误!!!"); + return 2; + } + } + else { + System.out.println("数据已过滤!!!!"); + return 3; + } + } + //将分析好的数据加载到Hive的origin表中 + @GetMapping("/loaddatatohive") + public boolean loaddatatohive(@RequestParam String loaddatapath){ + String loadsql="load data inpath '"+loaddatapath+"' into table ods_weblog_origin"; + try { + hiveConnecter=HiveConnecter.getInstance(); + hiveConnecter.getConn(); + hiveConnecter.executeSQLByStatement(loadsql); + System.out.println("load success!"); + new hdfsapi().deleteFolder(loaddatapath); + return true; + } catch (Exception e) { + System.out.println(e); + return false; + } + } + @GetMapping("/deletefolder") + public boolean deletefolder(@RequestParam String deletepath){ + System.out.println("将要删除 "+deletepath); + return new hdfsapi().deleteFolder(deletepath); + } + //从origin中更新t_ods_tmp_referurl t_ods_tmp_detail并对ods_weblog_detail进行复写 + @PostMapping("/hivedbupdate") + public boolean hivedbupdate(){ + //加载数据到hive + try { + hiveConnecter= HiveConnecter.getInstance(); + hiveConnecter.getConn(); + String sql1="set hive.exec.dynamic.partition=true"; + String sql2="set hive.exec.dynamic.partition.mode=nonstrict"; + String dropt_ods_tmp_referurl="drop table t_ods_tmp_referurl"; + String dropt_ods_tmp_detail="drop table t_ods_tmp_detail"; + hiveConnecter.executeSQLByStatement(sql1); + hiveConnecter.executeSQLByStatement(sql2); + //判断双表是否存在 + ResultSet rs = hiveConnecter.executeQueryBYStatement("show tables in datacenter like 't_ods_tmp_referurl'"); + if(rs.next()){ + System.out.println("t_ods_tmp_referurl表存在!"); + hiveConnecter.executeSQLByStatement(dropt_ods_tmp_referurl); + System.out.println("referurl删除成功"); + } + rs=hiveConnecter.executeQueryBYStatement("show tables in datacenter like 't_ods_tmp_detail'"); + if(rs.next()){ + System.out.println("t_ods_tmp_detail表存在!"); + hiveConnecter.executeSQLByStatement(dropt_ods_tmp_detail); + System.out.println("detail删除成功"); + } + //创建中间表 + String createreferurl="create table t_ods_tmp_referurl as SELECT a.*,b.*\n" + + " FROM ods_weblog_origin a LATERAL VIEW \n" + + " parse_url_tuple(regexp_replace(http_referer, \"\\\"\", \"\"),\n" + + " 'HOST', 'PATH','QUERY', 'QUERY:id') b as host, path, query, query_id"; + hiveConnecter.executeSQLByStatement(createreferurl); + String createdetail="create table t_ods_tmp_detail as select b.*,substring(time_local,0,10) as daystr,\n" + + " substring(time_local,12) as tmstr,\n" + + " substring(time_local,6,2) as month,\n" + + " substring(time_local,9,2) as day,\n" + + " substring(time_local,11,3) as hour\n" + + " from t_ods_tmp_referurl b"; + hiveConnecter.executeSQLByStatement(createdetail); + String insertOverWritedetail="insert overwrite table ods_weblog_detail partition(datestr)\n" + + " select distinct otd.valid,otd.remote_addr,otd.remote_user,\n" + + " otd.time_local,otd.daystr,otd.tmstr,otd.month,otd.day,otd.hour,\n" + + " otr.request,otr.status,otr.body_bytes_sent,\n" + + " otr.http_referer,otr.host,otr.path,\n" + + " otr.query,otr.query_id,otr.http_user_agent,otd.daystr\n" + + " from t_ods_tmp_detail as otd,t_ods_tmp_referurl as otr \n" + + " where otd.remote_addr=otr.remote_addr \n" + + " and otd.time_local=otr.time_local \n" + + " and otd.body_bytes_sent=otr.body_bytes_sent \n" + + " and otd.request=otr.request"; + hiveConnecter.executeSQLByStatement(insertOverWritedetail); + System.out.println("数据入库完成,开始分析"); + //browser分析 + String truncateBrowser="truncate table dw_use_browser"; + String updateBrowser0="insert into table dw_use_browser select 'Firefox' as name,COUNT(*) as count from ods_weblog_detail WHERE http_user_agent like '%Firefox%'"; + String updateBrowser1="insert into table dw_use_browser select 'Chrome' as name,COUNT(*) as count from ods_weblog_detail WHERE http_user_agent like '%Chrome%'"; + String updateBrowser2="insert into table dw_use_browser select 'MobileSafari' as name,COUNT(*) as count from ods_weblog_detail WHERE http_user_agent like '%MobileSafari%'"; + String updateBrowser3="insert into table dw_use_browser select 'Edge/IE' as name,COUNT(*) as count from ods_weblog_detail WHERE http_user_agent like '%Trident%'"; + String updateBrowser4="insert into table dw_use_browser select 'Safari' as name,COUNT(*) as count from ods_weblog_detail WHERE http_user_agent like '%Safari%'"; + hiveConnecter.executeSQLByStatement(truncateBrowser); + hiveConnecter.executeSQLByStatement(updateBrowser0); + hiveConnecter.executeSQLByStatement(updateBrowser1); + hiveConnecter.executeSQLByStatement(updateBrowser2); + hiveConnecter.executeSQLByStatement(updateBrowser3); + hiveConnecter.executeSQLByStatement(updateBrowser4); + //weekpvs分析 + String truncateWeekpvs="truncate table dw_pvs_week"; + String updateWeekpvs="insert into table dw_pvs_week select datestr,COUNT(*) as count from ods_weblog_detail group by datestr"; + hiveConnecter.executeSQLByStatement(truncateWeekpvs); + hiveConnecter.executeSQLByStatement(updateWeekpvs); + //allpvs + String truncateAllpvs="truncate table dw_pvs_all"; + String updateAllpvs="insert into table dw_pvs_all select request,COUNT(*) as pvs from ods_weblog_detail owd where request !='/' group by request ORDER by pvs desc limit 10"; + hiveConnecter.executeSQLByStatement(truncateAllpvs); + hiveConnecter.executeSQLByStatement(updateAllpvs); + //pvsusertop5 + String truncatePvsuser="truncate table dw_pvs_user"; + String updatePvsuser="insert into table dw_pvs_user select remote_addr ,count(remote_addr) as userpvs from ods_weblog_detail owd group by remote_addr order by userpvs desc limit 5"; + hiveConnecter.executeSQLByStatement(truncatePvsuser); + hiveConnecter.executeSQLByStatement(updatePvsuser); + //headerupdate + String truncateHeader="truncate table ods_weblog_count"; + String update1="insert into table ods_weblog_count select 'ct1',count(*) from ods_weblog_origin"; + String update2="insert into table ods_weblog_count select 'ct2',count(*) from ods_weblog_origin where valid='true'"; + String update3="insert into table ods_weblog_count select 'ct3',count(*) from ods_weblog_origin where valid='false'"; + String update4="insert into table ods_weblog_count select 'ct4',count(*) from ods_weblog_detail where valid='false'"; + hiveConnecter.executeSQLByStatement(truncateHeader); + hiveConnecter.executeSQLByStatement(update1); + hiveConnecter.executeSQLByStatement(update2); + hiveConnecter.executeSQLByStatement(update3); + hiveConnecter.executeSQLByStatement(update4); + return true; + } catch (Exception e) { + hiveConnecter.closeAll(); + System.out.println(e); + return false; + } + } + @GetMapping("/truncateall") + public boolean truncateall(){ + String truncate1="truncate table dw_pvs_all"; + String truncate2="truncate table t_ods_tmp_referurl"; + String truncate3="truncate table t_ods_tmp_detail"; + String truncate4="truncate table ods_weblog_detail"; + String truncate5="truncate table dw_use_browser"; + String truncate6="truncate table dw_pvs_user"; + String truncate7="truncate table dw_pvs_week"; + String truncate8="truncate table ods_weblog_count"; + String truncate9="truncate table ods_weblog_origin"; + System.out.println("清除所有数据!"); + try{ + hiveConnecter= HiveConnecter.getInstance(); + hiveConnecter.getConn(); + hiveConnecter.executeSQLByStatement(truncate1); + hiveConnecter.executeSQLByStatement(truncate2); + hiveConnecter.executeSQLByStatement(truncate3); + hiveConnecter.executeSQLByStatement(truncate4); + hiveConnecter.executeSQLByStatement(truncate5); + hiveConnecter.executeSQLByStatement(truncate6); + hiveConnecter.executeSQLByStatement(truncate7); + hiveConnecter.executeSQLByStatement(truncate8); + hiveConnecter.executeSQLByStatement(truncate9); + return true; + } + catch (Exception e){ + return false; + } + } + @GetMapping("/dbcount") + public List dbcount() throws SQLException, ClassNotFoundException { + hiveConnecter=HiveConnecter.getInstance(); + hiveConnecter.getConn(); + List lists=new ArrayList(); + ResultSet rs = hiveConnecter.executeQueryBYStatement("select * from ods_weblog_count"); + while (rs.next()){ + lists.add(rs.getString(2)); + } + lists.forEach(item->{ + System.out.println(item); + }); + rs.close(); + return lists; + } + //总体响应 + @PostMapping("/alldata") + public List alldata(){ + List> listmap=new ArrayList<>(); + Map map=new HashMap<>(); + map.put("weekcount",weekcount()); + map.put("allpvcount",allpvcount()); + map.put("browsercount",browsercount()); + map.put("top5count",top5count()); + listmap.add(map); + return listmap; + } + //top5 + @PostMapping("/top5count") + public List top5count(){ + List> listmap = new ArrayList>(); + String sql ="select * from dw_pvs_user"; + try { + hiveConnecter=HiveConnecter.getInstance(); + hiveConnecter.getConn(); + ResultSet rs = hiveConnecter.executeQueryBYStatement(sql); + while (rs.next()){ + Map map = new HashMap<>(); + map.put("name",rs.getString(1)); + map.put("count",rs.getString(2)); + listmap.add(map); + } + rs.close(); + System.out.println(listmap); + return listmap; + } catch (Exception e) { + System.out.println(e); + return null; + } + } + //每日访问记录折线图 + @PostMapping("/weekcount") + public List weekcount(){ + List> listmap = new ArrayList>(); + String sql ="select * from dw_pvs_week"; + try { + hiveConnecter=HiveConnecter.getInstance(); + hiveConnecter.getConn(); + ResultSet rs = hiveConnecter.executeQueryBYStatement(sql); + while (rs.next()){ + Map map = new HashMap<>(); + map.put("logdate",rs.getString(1)); + map.put("count",rs.getString(2)); + listmap.add(map); + } + rs.close(); + Collections.sort(listmap, new Comparator>() { + @Override + public int compare(Map o1, Map o2) { + String date1 = (String)o1.get("logdate"); + String date2 = (String)o2.get("logdate"); + //降序 + return date1.compareTo(date2); + } + }); + System.out.println(listmap); + return listmap; + } catch (Exception e) { + System.out.println(e); + return null; + } + } + @PostMapping("/allpvcount") + public List allpvcount(){ + List> listmap = new ArrayList>(); + String sql ="select * from dw_pvs_all"; + try { + hiveConnecter=HiveConnecter.getInstance(); + hiveConnecter.getConn(); + ResultSet rs = hiveConnecter.executeQueryBYStatement(sql); + while (rs.next()){ + Map map = new HashMap<>(); + map.put("path",rs.getString(1)); + map.put("count",rs.getString(2)); + listmap.add(map); + } + rs.close(); + System.out.println(listmap); + return listmap; + } catch (Exception e) { + System.out.println(e); + return null; + } + } + @PostMapping("/browsercount") + public List browsercount() { + List> listmap = new ArrayList>(); + String Firefoxsql = "select * from dw_use_browser"; + try { + hiveConnecter = HiveConnecter.getInstance(); + hiveConnecter.getConn(); + ResultSet rs = hiveConnecter.executeQueryBYStatement(Firefoxsql); + while (rs.next()) { + Map map = new HashMap<>(); + map.put("name", rs.getString(1)); + map.put("value", rs.getString(2)); + listmap.add(map); + } + rs.close(); + System.out.println(listmap); + return listmap; + } catch (Exception e) { + System.out.println(e); + return null; + } + } +} diff --git a/openhdh_api/pom.xml b/openhdh_api/pom.xml index 8aeb0b3..8d3b706 100644 --- a/openhdh_api/pom.xml +++ b/openhdh_api/pom.xml @@ -61,5 +61,15 @@ openhdh_api + + + org.apache.maven.plugins + maven-compiler-plugin + + 9 + 9 + + + diff --git a/openhdh_api/src/main/java/com/louzin/openhdhapi/test/helloworld.java b/openhdh_api/src/main/java/com/louzin/openhdhapi/test/helloworld.java index 9f60b43..a50f447 100644 --- a/openhdh_api/src/main/java/com/louzin/openhdhapi/test/helloworld.java +++ b/openhdh_api/src/main/java/com/louzin/openhdhapi/test/helloworld.java @@ -1,7 +1,142 @@ package com.louzin.openhdhapi.test; +import com.louzin.openhdhapi.utils.HiveConnecter; +import com.louzin.openhdhapi.utils.WeblogPreProcess; +import com.louzin.openhdhapi.utils.hdfsapi; +import org.junit.Test; +import java.io.IOException; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.*; public class helloworld { - public void helloworldtest(){ - System.out.println("Hello S!"); + private static HiveConnecter hiveConnecter; + @Test + public void helloworldtest() throws Exception { + //加载数据到hive + hiveConnecter=HiveConnecter.getInstance(); + hiveConnecter.getConn(); + String sql1="set hive.exec.dynamic.partition=true"; + String sql2="set hive.exec.dynamic.partition.mode=nonstrict"; + String dropt_ods_tmp_referurl="drop table t_ods_tmp_referurl"; + String dropt_ods_tmp_detail="drop table t_ods_tmp_detail"; + hiveConnecter.executeSQLByStatement(sql1); + hiveConnecter.executeSQLByStatement(sql2); + //判断双表是否存在 + ResultSet rs = hiveConnecter.executeQueryBYStatement("show tables in datacenter like 't_ods_tmp_referurl'"); + if(rs.next()){ + System.out.println("t_ods_tmp_referurl表存在!"); + hiveConnecter.executeSQLByStatement(dropt_ods_tmp_referurl); + System.out.println("referurl删除成功"); + } + rs=hiveConnecter.executeQueryBYStatement("show tables in datacenter like 't_ods_tmp_detail'"); + if(rs.next()){ + System.out.println("t_ods_tmp_detail表存在!"); + hiveConnecter.executeSQLByStatement(dropt_ods_tmp_detail); + System.out.println("detail删除成功"); + } + //创建中间表 + String createreferurl="create table t_ods_tmp_referurl as SELECT a.*,b.*\n" + + " FROM ods_weblog_origin a LATERAL VIEW \n" + + " parse_url_tuple(regexp_replace(http_referer, \"\\\"\", \"\"),\n" + + " 'HOST', 'PATH','QUERY', 'QUERY:id') b as host, path, query, query_id"; + hiveConnecter.executeSQLByStatement(createreferurl); + String createdetail="create table t_ods_tmp_detail as select b.*,substring(time_local,0,10) as daystr,\n" + + " substring(time_local,12) as tmstr,\n" + + " substring(time_local,6,2) as month,\n" + + " substring(time_local,9,2) as day,\n" + + " substring(time_local,11,3) as hour\n" + + " from t_ods_tmp_referurl b"; + hiveConnecter.executeSQLByStatement(createdetail); + String insertOverWritedetail="insert overwrite table ods_weblog_detail partition(datestr)\n" + + " select distinct otd.valid,otd.remote_addr,otd.remote_user,\n" + + " otd.time_local,otd.daystr,otd.tmstr,otd.month,otd.day,otd.hour,\n" + + " otr.request,otr.status,otr.body_bytes_sent,\n" + + " otr.http_referer,otr.host,otr.path,\n" + + " otr.query,otr.query_id,otr.http_user_agent,otd.daystr\n" + + " from t_ods_tmp_detail as otd,t_ods_tmp_referurl as otr \n" + + " where otd.remote_addr=otr.remote_addr \n" + + " and otd.time_local=otr.time_local \n" + + " and otd.body_bytes_sent=otr.body_bytes_sent \n" + + " and otd.request=otr.request"; + hiveConnecter.executeSQLByStatement(insertOverWritedetail); + System.out.println("数据入库完成"); + } + @Test + public void loaddata(){ + String loadatapath="/result/flume/events/23-05-17/1440/"; + String loadsql="load data inpath '"+loadatapath+"' into table ods_weblog_origin"; + try { + hiveConnecter=HiveConnecter.getInstance(); + hiveConnecter.getConn(); + hiveConnecter.executeSQLByStatement(loadsql); + System.out.println("load success!"); + new hdfsapi().deleteFolder(loadatapath); + } catch (Exception e) { + System.out.println(e); + } + } + @Test + public void weekcount(){ +// Map map=new HashMap<>(); + List> listmap = new ArrayList>(); + String Firefoxsql="select COUNT(*) as pvs from ods_weblog_detail WHERE http_user_agent like '%Firefox%'"; + String Chromesql="select COUNT(*) as pvs from ods_weblog_detail WHERE http_user_agent like '%Chrome%'"; + String MobileSafarisql="select COUNT(*) as pvs from ods_weblog_detail WHERE http_user_agent like '%MobileSafari%'"; + String Tridentsql="select COUNT(*) as pvs from ods_weblog_detail WHERE http_user_agent like '%Trident%'"; + String Safarisql="select COUNT(*) as pvs from ods_weblog_detail WHERE http_user_agent like '%Safari%'"; + try { + hiveConnecter=HiveConnecter.getInstance(); + hiveConnecter.getConn(); + ResultSet rs = hiveConnecter.executeQueryBYStatement(Firefoxsql); + while (rs.next()){ + Map map = new HashMap<>(); + map.put("browser","firefox"); + map.put("count",rs.getString(1)); + listmap.add(map); + } + rs = hiveConnecter.executeQueryBYStatement(Chromesql); + while (rs.next()){ + Map map = new HashMap<>(); + map.put("browser","Chrome"); + map.put("count",rs.getString(1)); + listmap.add(map); + } + rs = hiveConnecter.executeQueryBYStatement(MobileSafarisql); + while (rs.next()){ + Map map = new HashMap<>(); + map.put("browser","MobileSafari"); + map.put("count",rs.getString(1)); + listmap.add(map); + } + rs = hiveConnecter.executeQueryBYStatement(Tridentsql); + while (rs.next()){ + Map map = new HashMap<>(); + map.put("browser","IE/Edge"); + map.put("count",rs.getString(1)); + listmap.add(map); + } + rs = hiveConnecter.executeQueryBYStatement(Safarisql); + while (rs.next()){ + Map map = new HashMap<>(); + map.put("browser","Safari"); + map.put("count",rs.getString(1)); + listmap.add(map); + } +// Collections.sort(listmap, new Comparator>() { +// @Override +// public int compare(Map o1, Map o2) { +// String date1 = (String)o1.get("logdate"); +// String date2 = (String)o2.get("logdate"); +// //降序 +// return date1.compareTo(date2); +// } +// }); + System.out.println(listmap); +// map.forEach((key,value)->{ +// System.out.println(key+" "+value); +// }); + } catch (Exception e) { + System.out.println(e); + } } } diff --git a/openhdh_api/src/main/java/com/louzin/openhdhapi/utils/HiveConnecter.java b/openhdh_api/src/main/java/com/louzin/openhdhapi/utils/HiveConnecter.java new file mode 100644 index 0000000..51a62e8 --- /dev/null +++ b/openhdh_api/src/main/java/com/louzin/openhdhapi/utils/HiveConnecter.java @@ -0,0 +1,82 @@ +package com.louzin.openhdhapi.utils; + +import java.sql.*; + +public class HiveConnecter { + private static String driverName = "org.apache.hive.jdbc.HiveDriver"; + private static String url = "jdbc:hive2://local1:10000/datacenter"; + private static String user = "root"; + private static String password = ""; + private static Connection conn = null; + private static Statement stmt = null; + private static ResultSet rs = null; + private static HiveConnecter instance; + //构造方法私有 + private HiveConnecter() throws ClassNotFoundException { + Class.forName(driverName); + + System.out.println("HiveConnecter Driver Register Success"); + } +// 单例模式构建访问数据库的访问对象 + public static HiveConnecter getInstance() throws SQLException, ClassNotFoundException { + if(instance==null){ + instance=new HiveConnecter(); + } + return instance; + } + //获得新的连接 + public Connection getConn() throws SQLException { + conn = DriverManager.getConnection(url,user,password); + return conn; + } + /** + * 执行静态的SQL语句 + * @param sql + * @return 返回一个整型,整型值为数据库受影响的行数 + * @throws SQLException + */ + public int executeSQLByStatement(String sql) throws SQLException{ + stmt=conn.createStatement(); + return stmt.executeUpdate(sql); + } + /** + * 执行DML语句,例如删除,插入,修改 + * insert into student() + * @param sql + * @param columnName 列名称 + * @return + * @throws SQLException + */ + public int executeSQLBYStatement(String sql,String[] columnName)throws SQLException{ + stmt =conn.createStatement(); + return stmt.executeUpdate(sql,columnName); + } + /** + * 查询返回的结果,可能包含一个或者多个结果 + * 返回值 参数 + * 方法 int executeUpdate​(String sql, int[] columnIndexes) + * @param sql + * @return + * @throws SQLException + */ + public ResultSet executeQueryBYStatement(String sql)throws SQLException{ + stmt =conn.createStatement(); + return stmt.executeQuery(sql); + } + /** + *将关闭流放入方法中减少代码重复量 + */ + public void closeAll(){ + try { + if(stmt!=null) { + stmt.close(); + } + if(conn!=null){ + conn.close(); + } + System.out.println("HiveConnecter Close"); + } catch (SQLException e) { + e.printStackTrace(); + } + } +} diff --git a/openhdh_api/src/main/java/com/louzin/openhdhapi/utils/WeblogPreProcess.java b/openhdh_api/src/main/java/com/louzin/openhdhapi/utils/WeblogPreProcess.java index 381d3c4..32e398b 100644 --- a/openhdh_api/src/main/java/com/louzin/openhdhapi/utils/WeblogPreProcess.java +++ b/openhdh_api/src/main/java/com/louzin/openhdhapi/utils/WeblogPreProcess.java @@ -19,8 +19,8 @@ import java.util.Set; * 处理原始日志,过滤出真实请求数据,转换时间格式,对缺失字段填充默认值,对记录标记valid和invalid */ public class WeblogPreProcess { - - public static void main(String[] args) throws Exception { + public WeblogPreProcess(){} + public int runjob(String path) throws Exception { Configuration conf = new Configuration(); conf.set("mapreduce.framework.name","local"); Job job = Job.getInstance(conf); @@ -28,11 +28,11 @@ public class WeblogPreProcess { job.setMapperClass(WeblogPreProcessMapper.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(NullWritable.class); - FileInputFormat.setInputPaths(job, new Path("hdfs://local1:8020/flume/events/23-05-17/1440")); - FileOutputFormat.setOutputPath(job, new Path("hdfs://local1:8020/result/23-05-17-1440")); + FileInputFormat.setInputPaths(job, new Path("hdfs://local1:8020"+path)); + FileOutputFormat.setOutputPath(job, new Path("hdfs://local1:8020/result/"+path)); job.setNumReduceTasks(1); boolean res = job.waitForCompletion(true); - System.exit(res ? 0 : 1); + return 1; } public static class WeblogPreProcessMapper extends Mapper { diff --git a/openhdh_api/src/main/java/com/louzin/openhdhapi/utils/hdfsapi.java b/openhdh_api/src/main/java/com/louzin/openhdhapi/utils/hdfsapi.java index 3135f45..c0d3787 100644 --- a/openhdh_api/src/main/java/com/louzin/openhdhapi/utils/hdfsapi.java +++ b/openhdh_api/src/main/java/com/louzin/openhdhapi/utils/hdfsapi.java @@ -51,6 +51,42 @@ public class hdfsapi { } return lists; } + @Test + public List scanresult() throws IOException { + List lists=new ArrayList(); + Path path=new Path("/result/flume/events/23-05-17"); + FileStatus[] fileStatuses=hdfs.listStatus(path);//events下的每日文件夹 + for (FileStatus fs:fileStatuses) { + String[] str=String.valueOf(fs.getPath()).split("8020"); + lists.add(str[1]); + } + return lists; + } + @Test + public boolean ifexistsFolder(String pathx) { + try { + if(hdfs.exists(new Path(pathx))){ + System.out.println(pathx + "存在 不操作"); + return false; + }else{ + System.out.println(pathx + "不存在 即将操作"); + return true; + } + } catch (IOException e) { + System.out.println("操作有误!+deleteFolder()"); + return true; + } + } + public boolean deleteFolder(String pathx){ + try { + hdfs.delete(new Path(pathx),true); + System.out.println("删除成功!"); + return true; + } catch (IOException e) { + System.out.println("删除失败!"); + return false; + } + } @After public void close() throws IOException { hdfs.close(); diff --git a/openhdh_api/src/main/java/com/louzin/openhdhapi/utils/jdbcHiveConnect.java b/openhdh_api/src/main/java/com/louzin/openhdhapi/utils/jdbcHiveConnect.java new file mode 100644 index 0000000..09c3b8e --- /dev/null +++ b/openhdh_api/src/main/java/com/louzin/openhdhapi/utils/jdbcHiveConnect.java @@ -0,0 +1,104 @@ +package com.louzin.openhdhapi.utils; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.sql.*; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class jdbcHiveConnect { + public jdbcHiveConnect(){} + private static String driverName = "org.apache.hive.jdbc.HiveDriver"; + private static String url = "jdbc:hive2://local1:10000/datacenter"; + private static String user = "root"; + private static String password = ""; + private static Connection conn = null; + private static Statement stmt = null; + private static ResultSet rs = null; + // 加载驱动、创建连接 + @Before + public void init() throws Exception { + Class.forName(driverName); + conn = DriverManager.getConnection(url,user,password); + stmt = conn.createStatement(); + } + @Test + public List showAllTable() throws SQLException, ClassNotFoundException { + Class.forName(driverName); + conn = DriverManager.getConnection(url,user,password); + stmt = conn.createStatement(); + String sql="show tables"; + rs=stmt.executeQuery(sql); + List lists=new ArrayList<>(); + while (rs.next()){ + System.out.println(rs.getString(1)); + lists.add(rs.getString(1)); + } + if ( rs != null) { + rs.close(); + } + if (stmt != null) { + stmt.close(); + } + if (conn != null) { + conn.close(); + } + return lists; + } + @Test + public void selectData_all_true() throws SQLException { + String sql="select * from ods_weblog_detail owd where valid ='true'"; + rs=stmt.executeQuery(sql); + List lists=new ArrayList<>(); + while (rs.next()){ + List copylist=new ArrayList<>(); + copylist.add(rs.getString("remote_addr")); + copylist.add(rs.getString("time_local")); + copylist.add(rs.getString("daystr")); + copylist.add(rs.getString("request")); + lists.add(copylist); + } + lists.forEach(item-> System.out.println(item.toString())); + } + @Test + public List selectData_day_pv() throws SQLException, ClassNotFoundException { + Class.forName(driverName); + conn = DriverManager.getConnection(url,user,password); + stmt = conn.createStatement(); + String sql="select remote_addr,COUNT(remote_addr)as pv" + + " from ods_weblog_detail" + + " group by remote_addr order by pv desc limit 10"; + rs=stmt.executeQuery(sql); + List lists=new ArrayList<>(); + while (rs.next()){ + Map copymap=Map.of("value",rs.getString(2),"name",rs.getString(1)); + lists.add(copymap); + } + if ( rs != null) { + rs.close(); + } + if (stmt != null) { + stmt.close(); + } + if (conn != null) { + conn.close(); + } +// return lists; + return lists; + } + @After + public void destory() throws Exception { + if ( rs != null) { + rs.close(); + } + if (stmt != null) { + stmt.close(); + } + if (conn != null) { + conn.close(); + } + } +} diff --git a/ruoyi-ui/src/views/echarts/myterminal/index.vue b/ruoyi-ui/src/views/echarts/myterminal/index.vue index 1cc1848..ff9f1d0 100644 --- a/ruoyi-ui/src/views/echarts/myterminal/index.vue +++ b/ruoyi-ui/src/views/echarts/myterminal/index.vue @@ -1,70 +1,136 @@ diff --git a/ruoyi-ui/src/views/echarts/myvue/index.vue b/ruoyi-ui/src/views/echarts/myvue/index.vue index fb18f7c..fdfb391 100644 --- a/ruoyi-ui/src/views/echarts/myvue/index.vue +++ b/ruoyi-ui/src/views/echarts/myvue/index.vue @@ -1,383 +1,476 @@ diff --git a/ruoyi-ui/src/views/login.vue b/ruoyi-ui/src/views/login.vue index 4848bd8..7635319 100644 --- a/ruoyi-ui/src/views/login.vue +++ b/ruoyi-ui/src/views/login.vue @@ -72,8 +72,8 @@ export default { return { codeUrl: "", loginForm: { - username: "admin", - password: "admin123", + username: "", + password: "", rememberMe: false, code: "", uuid: "" @@ -156,12 +156,6 @@ export default {