louzin
1 year ago
11 changed files with 1420 additions and 467 deletions
@ -0,0 +1,316 @@ |
|||||||
|
package com.louzin.openhdh.controller; |
||||||
|
|
||||||
|
import com.louzin.openhdhapi.utils.HiveConnecter; |
||||||
|
import org.springframework.stereotype.Controller; |
||||||
|
import org.springframework.web.bind.annotation.*; |
||||||
|
import com.louzin.openhdhapi.utils.WeblogPreProcess; |
||||||
|
import com.louzin.openhdhapi.utils.hdfsapi; |
||||||
|
import java.sql.ResultSet; |
||||||
|
import java.sql.SQLException; |
||||||
|
import java.util.*; |
||||||
|
|
||||||
|
@Controller |
||||||
|
@ResponseBody |
||||||
|
@CrossOrigin |
||||||
|
public class ParserController { |
||||||
|
private static HiveConnecter hiveConnecter; |
||||||
|
//对指定路径下的文件进行采集分析,结果生成至result文件夹下
|
||||||
|
@PostMapping("/parser") |
||||||
|
public int parserWebLog(@RequestParam("parserpath") String parserpath) { |
||||||
|
hdfsapi hdfs = new hdfsapi(); |
||||||
|
if (hdfs.ifexistsFolder("/result"+parserpath)){ |
||||||
|
if(hdfs.deleteFolder("/result"+parserpath)){//删除成功则进行下一步
|
||||||
|
WeblogPreProcess weblogPreProcess=new WeblogPreProcess(); |
||||||
|
try { |
||||||
|
weblogPreProcess.runjob(parserpath); |
||||||
|
return 0; |
||||||
|
} catch (Exception e) { |
||||||
|
System.out.println("webLogPreProcess runjob Error!"); |
||||||
|
return 1; |
||||||
|
} |
||||||
|
}else{ |
||||||
|
System.out.println("hdfs文件权限有误!!!"); |
||||||
|
return 2; |
||||||
|
} |
||||||
|
} |
||||||
|
else { |
||||||
|
System.out.println("数据已过滤!!!!"); |
||||||
|
return 3; |
||||||
|
} |
||||||
|
} |
||||||
|
//将分析好的数据加载到Hive的origin表中
|
||||||
|
@GetMapping("/loaddatatohive") |
||||||
|
public boolean loaddatatohive(@RequestParam String loaddatapath){ |
||||||
|
String loadsql="load data inpath '"+loaddatapath+"' into table ods_weblog_origin"; |
||||||
|
try { |
||||||
|
hiveConnecter=HiveConnecter.getInstance(); |
||||||
|
hiveConnecter.getConn(); |
||||||
|
hiveConnecter.executeSQLByStatement(loadsql); |
||||||
|
System.out.println("load success!"); |
||||||
|
new hdfsapi().deleteFolder(loaddatapath); |
||||||
|
return true; |
||||||
|
} catch (Exception e) { |
||||||
|
System.out.println(e); |
||||||
|
return false; |
||||||
|
} |
||||||
|
} |
||||||
|
@GetMapping("/deletefolder") |
||||||
|
public boolean deletefolder(@RequestParam String deletepath){ |
||||||
|
System.out.println("将要删除 "+deletepath); |
||||||
|
return new hdfsapi().deleteFolder(deletepath); |
||||||
|
} |
||||||
|
//从origin中更新t_ods_tmp_referurl t_ods_tmp_detail并对ods_weblog_detail进行复写
|
||||||
|
@PostMapping("/hivedbupdate") |
||||||
|
public boolean hivedbupdate(){ |
||||||
|
//加载数据到hive
|
||||||
|
try { |
||||||
|
hiveConnecter= HiveConnecter.getInstance(); |
||||||
|
hiveConnecter.getConn(); |
||||||
|
String sql1="set hive.exec.dynamic.partition=true"; |
||||||
|
String sql2="set hive.exec.dynamic.partition.mode=nonstrict"; |
||||||
|
String dropt_ods_tmp_referurl="drop table t_ods_tmp_referurl"; |
||||||
|
String dropt_ods_tmp_detail="drop table t_ods_tmp_detail"; |
||||||
|
hiveConnecter.executeSQLByStatement(sql1); |
||||||
|
hiveConnecter.executeSQLByStatement(sql2); |
||||||
|
//判断双表是否存在
|
||||||
|
ResultSet rs = hiveConnecter.executeQueryBYStatement("show tables in datacenter like 't_ods_tmp_referurl'"); |
||||||
|
if(rs.next()){ |
||||||
|
System.out.println("t_ods_tmp_referurl表存在!"); |
||||||
|
hiveConnecter.executeSQLByStatement(dropt_ods_tmp_referurl); |
||||||
|
System.out.println("referurl删除成功"); |
||||||
|
} |
||||||
|
rs=hiveConnecter.executeQueryBYStatement("show tables in datacenter like 't_ods_tmp_detail'"); |
||||||
|
if(rs.next()){ |
||||||
|
System.out.println("t_ods_tmp_detail表存在!"); |
||||||
|
hiveConnecter.executeSQLByStatement(dropt_ods_tmp_detail); |
||||||
|
System.out.println("detail删除成功"); |
||||||
|
} |
||||||
|
//创建中间表
|
||||||
|
String createreferurl="create table t_ods_tmp_referurl as SELECT a.*,b.*\n" + |
||||||
|
" FROM ods_weblog_origin a LATERAL VIEW \n" + |
||||||
|
" parse_url_tuple(regexp_replace(http_referer, \"\\\"\", \"\"),\n" + |
||||||
|
" 'HOST', 'PATH','QUERY', 'QUERY:id') b as host, path, query, query_id"; |
||||||
|
hiveConnecter.executeSQLByStatement(createreferurl); |
||||||
|
String createdetail="create table t_ods_tmp_detail as select b.*,substring(time_local,0,10) as daystr,\n" + |
||||||
|
" substring(time_local,12) as tmstr,\n" + |
||||||
|
" substring(time_local,6,2) as month,\n" + |
||||||
|
" substring(time_local,9,2) as day,\n" + |
||||||
|
" substring(time_local,11,3) as hour\n" + |
||||||
|
" from t_ods_tmp_referurl b"; |
||||||
|
hiveConnecter.executeSQLByStatement(createdetail); |
||||||
|
String insertOverWritedetail="insert overwrite table ods_weblog_detail partition(datestr)\n" + |
||||||
|
" select distinct otd.valid,otd.remote_addr,otd.remote_user,\n" + |
||||||
|
" otd.time_local,otd.daystr,otd.tmstr,otd.month,otd.day,otd.hour,\n" + |
||||||
|
" otr.request,otr.status,otr.body_bytes_sent,\n" + |
||||||
|
" otr.http_referer,otr.host,otr.path,\n" + |
||||||
|
" otr.query,otr.query_id,otr.http_user_agent,otd.daystr\n" + |
||||||
|
" from t_ods_tmp_detail as otd,t_ods_tmp_referurl as otr \n" + |
||||||
|
" where otd.remote_addr=otr.remote_addr \n" + |
||||||
|
" and otd.time_local=otr.time_local \n" + |
||||||
|
" and otd.body_bytes_sent=otr.body_bytes_sent \n" + |
||||||
|
" and otd.request=otr.request"; |
||||||
|
hiveConnecter.executeSQLByStatement(insertOverWritedetail); |
||||||
|
System.out.println("数据入库完成,开始分析"); |
||||||
|
//browser分析
|
||||||
|
String truncateBrowser="truncate table dw_use_browser"; |
||||||
|
String updateBrowser0="insert into table dw_use_browser select 'Firefox' as name,COUNT(*) as count from ods_weblog_detail WHERE http_user_agent like '%Firefox%'"; |
||||||
|
String updateBrowser1="insert into table dw_use_browser select 'Chrome' as name,COUNT(*) as count from ods_weblog_detail WHERE http_user_agent like '%Chrome%'"; |
||||||
|
String updateBrowser2="insert into table dw_use_browser select 'MobileSafari' as name,COUNT(*) as count from ods_weblog_detail WHERE http_user_agent like '%MobileSafari%'"; |
||||||
|
String updateBrowser3="insert into table dw_use_browser select 'Edge/IE' as name,COUNT(*) as count from ods_weblog_detail WHERE http_user_agent like '%Trident%'"; |
||||||
|
String updateBrowser4="insert into table dw_use_browser select 'Safari' as name,COUNT(*) as count from ods_weblog_detail WHERE http_user_agent like '%Safari%'"; |
||||||
|
hiveConnecter.executeSQLByStatement(truncateBrowser); |
||||||
|
hiveConnecter.executeSQLByStatement(updateBrowser0); |
||||||
|
hiveConnecter.executeSQLByStatement(updateBrowser1); |
||||||
|
hiveConnecter.executeSQLByStatement(updateBrowser2); |
||||||
|
hiveConnecter.executeSQLByStatement(updateBrowser3); |
||||||
|
hiveConnecter.executeSQLByStatement(updateBrowser4); |
||||||
|
//weekpvs分析
|
||||||
|
String truncateWeekpvs="truncate table dw_pvs_week"; |
||||||
|
String updateWeekpvs="insert into table dw_pvs_week select datestr,COUNT(*) as count from ods_weblog_detail group by datestr"; |
||||||
|
hiveConnecter.executeSQLByStatement(truncateWeekpvs); |
||||||
|
hiveConnecter.executeSQLByStatement(updateWeekpvs); |
||||||
|
//allpvs
|
||||||
|
String truncateAllpvs="truncate table dw_pvs_all"; |
||||||
|
String updateAllpvs="insert into table dw_pvs_all select request,COUNT(*) as pvs from ods_weblog_detail owd where request !='/' group by request ORDER by pvs desc limit 10"; |
||||||
|
hiveConnecter.executeSQLByStatement(truncateAllpvs); |
||||||
|
hiveConnecter.executeSQLByStatement(updateAllpvs); |
||||||
|
//pvsusertop5
|
||||||
|
String truncatePvsuser="truncate table dw_pvs_user"; |
||||||
|
String updatePvsuser="insert into table dw_pvs_user select remote_addr ,count(remote_addr) as userpvs from ods_weblog_detail owd group by remote_addr order by userpvs desc limit 5"; |
||||||
|
hiveConnecter.executeSQLByStatement(truncatePvsuser); |
||||||
|
hiveConnecter.executeSQLByStatement(updatePvsuser); |
||||||
|
//headerupdate
|
||||||
|
String truncateHeader="truncate table ods_weblog_count"; |
||||||
|
String update1="insert into table ods_weblog_count select 'ct1',count(*) from ods_weblog_origin"; |
||||||
|
String update2="insert into table ods_weblog_count select 'ct2',count(*) from ods_weblog_origin where valid='true'"; |
||||||
|
String update3="insert into table ods_weblog_count select 'ct3',count(*) from ods_weblog_origin where valid='false'"; |
||||||
|
String update4="insert into table ods_weblog_count select 'ct4',count(*) from ods_weblog_detail where valid='false'"; |
||||||
|
hiveConnecter.executeSQLByStatement(truncateHeader); |
||||||
|
hiveConnecter.executeSQLByStatement(update1); |
||||||
|
hiveConnecter.executeSQLByStatement(update2); |
||||||
|
hiveConnecter.executeSQLByStatement(update3); |
||||||
|
hiveConnecter.executeSQLByStatement(update4); |
||||||
|
return true; |
||||||
|
} catch (Exception e) { |
||||||
|
hiveConnecter.closeAll(); |
||||||
|
System.out.println(e); |
||||||
|
return false; |
||||||
|
} |
||||||
|
} |
||||||
|
@GetMapping("/truncateall") |
||||||
|
public boolean truncateall(){ |
||||||
|
String truncate1="truncate table dw_pvs_all"; |
||||||
|
String truncate2="truncate table t_ods_tmp_referurl"; |
||||||
|
String truncate3="truncate table t_ods_tmp_detail"; |
||||||
|
String truncate4="truncate table ods_weblog_detail"; |
||||||
|
String truncate5="truncate table dw_use_browser"; |
||||||
|
String truncate6="truncate table dw_pvs_user"; |
||||||
|
String truncate7="truncate table dw_pvs_week"; |
||||||
|
String truncate8="truncate table ods_weblog_count"; |
||||||
|
String truncate9="truncate table ods_weblog_origin"; |
||||||
|
System.out.println("清除所有数据!"); |
||||||
|
try{ |
||||||
|
hiveConnecter= HiveConnecter.getInstance(); |
||||||
|
hiveConnecter.getConn(); |
||||||
|
hiveConnecter.executeSQLByStatement(truncate1); |
||||||
|
hiveConnecter.executeSQLByStatement(truncate2); |
||||||
|
hiveConnecter.executeSQLByStatement(truncate3); |
||||||
|
hiveConnecter.executeSQLByStatement(truncate4); |
||||||
|
hiveConnecter.executeSQLByStatement(truncate5); |
||||||
|
hiveConnecter.executeSQLByStatement(truncate6); |
||||||
|
hiveConnecter.executeSQLByStatement(truncate7); |
||||||
|
hiveConnecter.executeSQLByStatement(truncate8); |
||||||
|
hiveConnecter.executeSQLByStatement(truncate9); |
||||||
|
return true; |
||||||
|
} |
||||||
|
catch (Exception e){ |
||||||
|
return false; |
||||||
|
} |
||||||
|
} |
||||||
|
@GetMapping("/dbcount") |
||||||
|
public List dbcount() throws SQLException, ClassNotFoundException { |
||||||
|
hiveConnecter=HiveConnecter.getInstance(); |
||||||
|
hiveConnecter.getConn(); |
||||||
|
List<String> lists=new ArrayList(); |
||||||
|
ResultSet rs = hiveConnecter.executeQueryBYStatement("select * from ods_weblog_count"); |
||||||
|
while (rs.next()){ |
||||||
|
lists.add(rs.getString(2)); |
||||||
|
} |
||||||
|
lists.forEach(item->{ |
||||||
|
System.out.println(item); |
||||||
|
}); |
||||||
|
rs.close(); |
||||||
|
return lists; |
||||||
|
} |
||||||
|
//总体响应
|
||||||
|
@PostMapping("/alldata") |
||||||
|
public List alldata(){ |
||||||
|
List<Map<String,List>> listmap=new ArrayList<>(); |
||||||
|
Map<String,List> map=new HashMap<>(); |
||||||
|
map.put("weekcount",weekcount()); |
||||||
|
map.put("allpvcount",allpvcount()); |
||||||
|
map.put("browsercount",browsercount()); |
||||||
|
map.put("top5count",top5count()); |
||||||
|
listmap.add(map); |
||||||
|
return listmap; |
||||||
|
} |
||||||
|
//top5
|
||||||
|
@PostMapping("/top5count") |
||||||
|
public List top5count(){ |
||||||
|
List<Map<String, Object>> listmap = new ArrayList<Map<String, Object>>(); |
||||||
|
String sql ="select * from dw_pvs_user"; |
||||||
|
try { |
||||||
|
hiveConnecter=HiveConnecter.getInstance(); |
||||||
|
hiveConnecter.getConn(); |
||||||
|
ResultSet rs = hiveConnecter.executeQueryBYStatement(sql); |
||||||
|
while (rs.next()){ |
||||||
|
Map<String, Object> map = new HashMap<>(); |
||||||
|
map.put("name",rs.getString(1)); |
||||||
|
map.put("count",rs.getString(2)); |
||||||
|
listmap.add(map); |
||||||
|
} |
||||||
|
rs.close(); |
||||||
|
System.out.println(listmap); |
||||||
|
return listmap; |
||||||
|
} catch (Exception e) { |
||||||
|
System.out.println(e); |
||||||
|
return null; |
||||||
|
} |
||||||
|
} |
||||||
|
//每日访问记录折线图
|
||||||
|
@PostMapping("/weekcount") |
||||||
|
public List weekcount(){ |
||||||
|
List<Map<String, Object>> listmap = new ArrayList<Map<String, Object>>(); |
||||||
|
String sql ="select * from dw_pvs_week"; |
||||||
|
try { |
||||||
|
hiveConnecter=HiveConnecter.getInstance(); |
||||||
|
hiveConnecter.getConn(); |
||||||
|
ResultSet rs = hiveConnecter.executeQueryBYStatement(sql); |
||||||
|
while (rs.next()){ |
||||||
|
Map<String, Object> map = new HashMap<>(); |
||||||
|
map.put("logdate",rs.getString(1)); |
||||||
|
map.put("count",rs.getString(2)); |
||||||
|
listmap.add(map); |
||||||
|
} |
||||||
|
rs.close(); |
||||||
|
Collections.sort(listmap, new Comparator<Map<String, Object>>() { |
||||||
|
@Override |
||||||
|
public int compare(Map<String, Object> o1, Map<String, Object> o2) { |
||||||
|
String date1 = (String)o1.get("logdate"); |
||||||
|
String date2 = (String)o2.get("logdate"); |
||||||
|
//降序
|
||||||
|
return date1.compareTo(date2); |
||||||
|
} |
||||||
|
}); |
||||||
|
System.out.println(listmap); |
||||||
|
return listmap; |
||||||
|
} catch (Exception e) { |
||||||
|
System.out.println(e); |
||||||
|
return null; |
||||||
|
} |
||||||
|
} |
||||||
|
@PostMapping("/allpvcount") |
||||||
|
public List allpvcount(){ |
||||||
|
List<Map<String, Object>> listmap = new ArrayList<Map<String, Object>>(); |
||||||
|
String sql ="select * from dw_pvs_all"; |
||||||
|
try { |
||||||
|
hiveConnecter=HiveConnecter.getInstance(); |
||||||
|
hiveConnecter.getConn(); |
||||||
|
ResultSet rs = hiveConnecter.executeQueryBYStatement(sql); |
||||||
|
while (rs.next()){ |
||||||
|
Map<String, Object> map = new HashMap<>(); |
||||||
|
map.put("path",rs.getString(1)); |
||||||
|
map.put("count",rs.getString(2)); |
||||||
|
listmap.add(map); |
||||||
|
} |
||||||
|
rs.close(); |
||||||
|
System.out.println(listmap); |
||||||
|
return listmap; |
||||||
|
} catch (Exception e) { |
||||||
|
System.out.println(e); |
||||||
|
return null; |
||||||
|
} |
||||||
|
} |
||||||
|
@PostMapping("/browsercount") |
||||||
|
public List browsercount() { |
||||||
|
List<Map<String, Object>> listmap = new ArrayList<Map<String, Object>>(); |
||||||
|
String Firefoxsql = "select * from dw_use_browser"; |
||||||
|
try { |
||||||
|
hiveConnecter = HiveConnecter.getInstance(); |
||||||
|
hiveConnecter.getConn(); |
||||||
|
ResultSet rs = hiveConnecter.executeQueryBYStatement(Firefoxsql); |
||||||
|
while (rs.next()) { |
||||||
|
Map<String, Object> map = new HashMap<>(); |
||||||
|
map.put("name", rs.getString(1)); |
||||||
|
map.put("value", rs.getString(2)); |
||||||
|
listmap.add(map); |
||||||
|
} |
||||||
|
rs.close(); |
||||||
|
System.out.println(listmap); |
||||||
|
return listmap; |
||||||
|
} catch (Exception e) { |
||||||
|
System.out.println(e); |
||||||
|
return null; |
||||||
|
} |
||||||
|
} |
||||||
|
} |
@ -1,7 +1,142 @@ |
|||||||
package com.louzin.openhdhapi.test; |
package com.louzin.openhdhapi.test; |
||||||
|
import com.louzin.openhdhapi.utils.HiveConnecter; |
||||||
|
import com.louzin.openhdhapi.utils.WeblogPreProcess; |
||||||
|
import com.louzin.openhdhapi.utils.hdfsapi; |
||||||
|
import org.junit.Test; |
||||||
|
import java.io.IOException; |
||||||
|
import java.sql.ResultSet; |
||||||
|
import java.sql.SQLException; |
||||||
|
import java.util.*; |
||||||
|
|
||||||
public class helloworld { |
public class helloworld { |
||||||
public void helloworldtest(){ |
private static HiveConnecter hiveConnecter; |
||||||
System.out.println("Hello S!"); |
@Test |
||||||
|
public void helloworldtest() throws Exception { |
||||||
|
//加载数据到hive
|
||||||
|
hiveConnecter=HiveConnecter.getInstance(); |
||||||
|
hiveConnecter.getConn(); |
||||||
|
String sql1="set hive.exec.dynamic.partition=true"; |
||||||
|
String sql2="set hive.exec.dynamic.partition.mode=nonstrict"; |
||||||
|
String dropt_ods_tmp_referurl="drop table t_ods_tmp_referurl"; |
||||||
|
String dropt_ods_tmp_detail="drop table t_ods_tmp_detail"; |
||||||
|
hiveConnecter.executeSQLByStatement(sql1); |
||||||
|
hiveConnecter.executeSQLByStatement(sql2); |
||||||
|
//判断双表是否存在
|
||||||
|
ResultSet rs = hiveConnecter.executeQueryBYStatement("show tables in datacenter like 't_ods_tmp_referurl'"); |
||||||
|
if(rs.next()){ |
||||||
|
System.out.println("t_ods_tmp_referurl表存在!"); |
||||||
|
hiveConnecter.executeSQLByStatement(dropt_ods_tmp_referurl); |
||||||
|
System.out.println("referurl删除成功"); |
||||||
|
} |
||||||
|
rs=hiveConnecter.executeQueryBYStatement("show tables in datacenter like 't_ods_tmp_detail'"); |
||||||
|
if(rs.next()){ |
||||||
|
System.out.println("t_ods_tmp_detail表存在!"); |
||||||
|
hiveConnecter.executeSQLByStatement(dropt_ods_tmp_detail); |
||||||
|
System.out.println("detail删除成功"); |
||||||
|
} |
||||||
|
//创建中间表
|
||||||
|
String createreferurl="create table t_ods_tmp_referurl as SELECT a.*,b.*\n" + |
||||||
|
" FROM ods_weblog_origin a LATERAL VIEW \n" + |
||||||
|
" parse_url_tuple(regexp_replace(http_referer, \"\\\"\", \"\"),\n" + |
||||||
|
" 'HOST', 'PATH','QUERY', 'QUERY:id') b as host, path, query, query_id"; |
||||||
|
hiveConnecter.executeSQLByStatement(createreferurl); |
||||||
|
String createdetail="create table t_ods_tmp_detail as select b.*,substring(time_local,0,10) as daystr,\n" + |
||||||
|
" substring(time_local,12) as tmstr,\n" + |
||||||
|
" substring(time_local,6,2) as month,\n" + |
||||||
|
" substring(time_local,9,2) as day,\n" + |
||||||
|
" substring(time_local,11,3) as hour\n" + |
||||||
|
" from t_ods_tmp_referurl b"; |
||||||
|
hiveConnecter.executeSQLByStatement(createdetail); |
||||||
|
String insertOverWritedetail="insert overwrite table ods_weblog_detail partition(datestr)\n" + |
||||||
|
" select distinct otd.valid,otd.remote_addr,otd.remote_user,\n" + |
||||||
|
" otd.time_local,otd.daystr,otd.tmstr,otd.month,otd.day,otd.hour,\n" + |
||||||
|
" otr.request,otr.status,otr.body_bytes_sent,\n" + |
||||||
|
" otr.http_referer,otr.host,otr.path,\n" + |
||||||
|
" otr.query,otr.query_id,otr.http_user_agent,otd.daystr\n" + |
||||||
|
" from t_ods_tmp_detail as otd,t_ods_tmp_referurl as otr \n" + |
||||||
|
" where otd.remote_addr=otr.remote_addr \n" + |
||||||
|
" and otd.time_local=otr.time_local \n" + |
||||||
|
" and otd.body_bytes_sent=otr.body_bytes_sent \n" + |
||||||
|
" and otd.request=otr.request"; |
||||||
|
hiveConnecter.executeSQLByStatement(insertOverWritedetail); |
||||||
|
System.out.println("数据入库完成"); |
||||||
|
} |
||||||
|
@Test |
||||||
|
public void loaddata(){ |
||||||
|
String loadatapath="/result/flume/events/23-05-17/1440/"; |
||||||
|
String loadsql="load data inpath '"+loadatapath+"' into table ods_weblog_origin"; |
||||||
|
try { |
||||||
|
hiveConnecter=HiveConnecter.getInstance(); |
||||||
|
hiveConnecter.getConn(); |
||||||
|
hiveConnecter.executeSQLByStatement(loadsql); |
||||||
|
System.out.println("load success!"); |
||||||
|
new hdfsapi().deleteFolder(loadatapath); |
||||||
|
} catch (Exception e) { |
||||||
|
System.out.println(e); |
||||||
|
} |
||||||
|
} |
||||||
|
@Test |
||||||
|
public void weekcount(){ |
||||||
|
// Map<String,String> map=new HashMap<>();
|
||||||
|
List<Map<String, Object>> listmap = new ArrayList<Map<String, Object>>(); |
||||||
|
String Firefoxsql="select COUNT(*) as pvs from ods_weblog_detail WHERE http_user_agent like '%Firefox%'"; |
||||||
|
String Chromesql="select COUNT(*) as pvs from ods_weblog_detail WHERE http_user_agent like '%Chrome%'"; |
||||||
|
String MobileSafarisql="select COUNT(*) as pvs from ods_weblog_detail WHERE http_user_agent like '%MobileSafari%'"; |
||||||
|
String Tridentsql="select COUNT(*) as pvs from ods_weblog_detail WHERE http_user_agent like '%Trident%'"; |
||||||
|
String Safarisql="select COUNT(*) as pvs from ods_weblog_detail WHERE http_user_agent like '%Safari%'"; |
||||||
|
try { |
||||||
|
hiveConnecter=HiveConnecter.getInstance(); |
||||||
|
hiveConnecter.getConn(); |
||||||
|
ResultSet rs = hiveConnecter.executeQueryBYStatement(Firefoxsql); |
||||||
|
while (rs.next()){ |
||||||
|
Map<String, Object> map = new HashMap<>(); |
||||||
|
map.put("browser","firefox"); |
||||||
|
map.put("count",rs.getString(1)); |
||||||
|
listmap.add(map); |
||||||
|
} |
||||||
|
rs = hiveConnecter.executeQueryBYStatement(Chromesql); |
||||||
|
while (rs.next()){ |
||||||
|
Map<String, Object> map = new HashMap<>(); |
||||||
|
map.put("browser","Chrome"); |
||||||
|
map.put("count",rs.getString(1)); |
||||||
|
listmap.add(map); |
||||||
|
} |
||||||
|
rs = hiveConnecter.executeQueryBYStatement(MobileSafarisql); |
||||||
|
while (rs.next()){ |
||||||
|
Map<String, Object> map = new HashMap<>(); |
||||||
|
map.put("browser","MobileSafari"); |
||||||
|
map.put("count",rs.getString(1)); |
||||||
|
listmap.add(map); |
||||||
|
} |
||||||
|
rs = hiveConnecter.executeQueryBYStatement(Tridentsql); |
||||||
|
while (rs.next()){ |
||||||
|
Map<String, Object> map = new HashMap<>(); |
||||||
|
map.put("browser","IE/Edge"); |
||||||
|
map.put("count",rs.getString(1)); |
||||||
|
listmap.add(map); |
||||||
|
} |
||||||
|
rs = hiveConnecter.executeQueryBYStatement(Safarisql); |
||||||
|
while (rs.next()){ |
||||||
|
Map<String, Object> map = new HashMap<>(); |
||||||
|
map.put("browser","Safari"); |
||||||
|
map.put("count",rs.getString(1)); |
||||||
|
listmap.add(map); |
||||||
|
} |
||||||
|
// Collections.sort(listmap, new Comparator<Map<String, Object>>() {
|
||||||
|
// @Override
|
||||||
|
// public int compare(Map<String, Object> o1, Map<String, Object> o2) {
|
||||||
|
// String date1 = (String)o1.get("logdate");
|
||||||
|
// String date2 = (String)o2.get("logdate");
|
||||||
|
// //降序
|
||||||
|
// return date1.compareTo(date2);
|
||||||
|
// }
|
||||||
|
// });
|
||||||
|
System.out.println(listmap); |
||||||
|
// map.forEach((key,value)->{
|
||||||
|
// System.out.println(key+" "+value);
|
||||||
|
// });
|
||||||
|
} catch (Exception e) { |
||||||
|
System.out.println(e); |
||||||
|
} |
||||||
} |
} |
||||||
} |
} |
||||||
|
@ -0,0 +1,104 @@ |
|||||||
|
package com.louzin.openhdhapi.utils; |
||||||
|
|
||||||
|
import org.junit.After; |
||||||
|
import org.junit.Before; |
||||||
|
import org.junit.Test; |
||||||
|
|
||||||
|
import java.sql.*; |
||||||
|
import java.util.ArrayList; |
||||||
|
import java.util.List; |
||||||
|
import java.util.Map; |
||||||
|
|
||||||
|
public class jdbcHiveConnect { |
||||||
|
public jdbcHiveConnect(){} |
||||||
|
private static String driverName = "org.apache.hive.jdbc.HiveDriver"; |
||||||
|
private static String url = "jdbc:hive2://local1:10000/datacenter"; |
||||||
|
private static String user = "root"; |
||||||
|
private static String password = ""; |
||||||
|
private static Connection conn = null; |
||||||
|
private static Statement stmt = null; |
||||||
|
private static ResultSet rs = null; |
||||||
|
// 加载驱动、创建连接
|
||||||
|
@Before |
||||||
|
public void init() throws Exception { |
||||||
|
Class.forName(driverName); |
||||||
|
conn = DriverManager.getConnection(url,user,password); |
||||||
|
stmt = conn.createStatement(); |
||||||
|
} |
||||||
|
@Test |
||||||
|
public List<String> showAllTable() throws SQLException, ClassNotFoundException { |
||||||
|
Class.forName(driverName); |
||||||
|
conn = DriverManager.getConnection(url,user,password); |
||||||
|
stmt = conn.createStatement(); |
||||||
|
String sql="show tables"; |
||||||
|
rs=stmt.executeQuery(sql); |
||||||
|
List<String> lists=new ArrayList<>(); |
||||||
|
while (rs.next()){ |
||||||
|
System.out.println(rs.getString(1)); |
||||||
|
lists.add(rs.getString(1)); |
||||||
|
} |
||||||
|
if ( rs != null) { |
||||||
|
rs.close(); |
||||||
|
} |
||||||
|
if (stmt != null) { |
||||||
|
stmt.close(); |
||||||
|
} |
||||||
|
if (conn != null) { |
||||||
|
conn.close(); |
||||||
|
} |
||||||
|
return lists; |
||||||
|
} |
||||||
|
@Test |
||||||
|
public void selectData_all_true() throws SQLException { |
||||||
|
String sql="select * from ods_weblog_detail owd where valid ='true'"; |
||||||
|
rs=stmt.executeQuery(sql); |
||||||
|
List lists=new ArrayList<>(); |
||||||
|
while (rs.next()){ |
||||||
|
List copylist=new ArrayList<>(); |
||||||
|
copylist.add(rs.getString("remote_addr")); |
||||||
|
copylist.add(rs.getString("time_local")); |
||||||
|
copylist.add(rs.getString("daystr")); |
||||||
|
copylist.add(rs.getString("request")); |
||||||
|
lists.add(copylist); |
||||||
|
} |
||||||
|
lists.forEach(item-> System.out.println(item.toString())); |
||||||
|
} |
||||||
|
@Test |
||||||
|
public List selectData_day_pv() throws SQLException, ClassNotFoundException { |
||||||
|
Class.forName(driverName); |
||||||
|
conn = DriverManager.getConnection(url,user,password); |
||||||
|
stmt = conn.createStatement(); |
||||||
|
String sql="select remote_addr,COUNT(remote_addr)as pv" + |
||||||
|
" from ods_weblog_detail" + |
||||||
|
" group by remote_addr order by pv desc limit 10"; |
||||||
|
rs=stmt.executeQuery(sql); |
||||||
|
List lists=new ArrayList<>(); |
||||||
|
while (rs.next()){ |
||||||
|
Map<String,String> copymap=Map.of("value",rs.getString(2),"name",rs.getString(1)); |
||||||
|
lists.add(copymap); |
||||||
|
} |
||||||
|
if ( rs != null) { |
||||||
|
rs.close(); |
||||||
|
} |
||||||
|
if (stmt != null) { |
||||||
|
stmt.close(); |
||||||
|
} |
||||||
|
if (conn != null) { |
||||||
|
conn.close(); |
||||||
|
} |
||||||
|
// return lists;
|
||||||
|
return lists; |
||||||
|
} |
||||||
|
@After |
||||||
|
public void destory() throws Exception { |
||||||
|
if ( rs != null) { |
||||||
|
rs.close(); |
||||||
|
} |
||||||
|
if (stmt != null) { |
||||||
|
stmt.close(); |
||||||
|
} |
||||||
|
if (conn != null) { |
||||||
|
conn.close(); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
Loading…
Reference in new issue