Spark sql读取数据库和ES数据进行处理代码详解大数据

读取数据库数据和ElasticSearch数据进行连接处理

import java.util.HashMap; 
import java.util.List; 
import java.util.Map; 
import org.apache.log4j.Level; 
import org.apache.log4j.Logger; 
import org.apache.spark.SparkConf; 
import org.apache.spark.api.java.JavaSparkContext; 
import org.apache.spark.api.java.function.Function; 
import org.apache.spark.sql.DataFrame; 
import org.apache.spark.sql.Row; 
import org.apache.spark.sql.SQLContext; 
import com.dinpay.bdp.rcp.domain.FlowMain; 
import com.dinpay.bdp.rcp.util.CodisUtil; 
import com.dinpay.bdp.rcp.util.Constant; 
import com.google.gson.Gson; 
import redis.clients.jedis.Jedis; 
/** 
* 首页的数据,定时Job定时刷新 
*/ 
public class MainFlowProcedure{ 
private static Logger logger = Logger.getLogger(MainFlowProcedure.class.getSimpleName()); 
public static void main(String[] args) { 
//屏蔽日志 
Logger.getLogger("org.apache.spark").setLevel(Level.WARN); 
//配置SparkConf 
SparkConf conf = new SparkConf().setAppName("MainFlowProcedure").setMaster("local[2]"); 
JavaSparkContext sc =new JavaSparkContext(conf); 
SQLContext sqlContext = new SQLContext(sc); 
 
registerTable(sqlContext,"t_sys_attention_library"); 
registerTable(sqlContext,"t_sys_big_order"); 
registerTable(sqlContext,"t_sys_doubtful_order"); 
registerTable(sqlContext,"t_rc_event"); 
registerESTable(sqlContext, "t_order"); 
 
sqlContext.sql("select merchant_id,count(order_id) as txcnt ,sum(tx_money) as txamount from t_order group by merchant_id") 
.registerTempTable("t_order_merchant"); 
 
sqlContext.sql("select t2.merchant_id,count(t1.order_id) as bigcnt from t_sys_big_order t1 join t_order t2 on t1.order_id = t2.order_id group by t2.merchant_id") 
.registerTempTable("t_big_merchant");

sqlContext.sql("select t2.merchant_id,count(t1.order_id) as dbtcnt from t_sys_doubtful_order t1 join t_order t2 on t1.order_id = t2.order_id group by t2.merchant_id") .registerTempTable("t_doubt_merchant"); sqlContext.sql("select merchant_id,count(*) as rccnt from t_rc_event group by merchant_id") .registerTempTable("t_rc_merchant"); sqlContext.sql("select t1.merchant_id,t2.txcnt,t3.dbtcnt,t4.bigcnt,t2.txamount,t5.rccnt from t_sys_attention_library t1 left join t_order_merchant t2 on t1.merchant_id = t2.merchant_id left join t_doubt_merchant t3 on t1.merchant_id = t3.merchant_id left join t_big_merchant t4 on t1.merchant_id = t4.merchant_id left join t_rc_merchant t5 on t1.merchant_id = t5.merchant_id") .registerTempTable("t_attention"); //生成结果集 DataFrame resultDF =sqlContext.sql("select t.merchant_id,t.txcnt,t.dbtcnt,t.bigcnt,t.txamount,t.rccnt from t_attention t"); List<FlowMain> flowMains = resultDF.javaRDD().map(new Function<Row,FlowMain>(){public FlowMain call(Row row){
         FlowMain flowMain = new FlowMain(); flowMain.setMerchantId(row.getString(
0)); flowMain.setTxCnt(row.isNullAt(1)?0:row.getInt(1)); flowMain.setSuspectCnt(row.isNullAt(2)?0:row.getInt(2)); flowMain.setBigAmountCnt(row.isNullAt(3)?0:row.getInt(3)); flowMain.setTxAmount(row.isNullAt(4)?0.0:row.getDouble(4)); flowMain.setTxRate("偏高"); flowMain.setRcEventCnt(row.isNullAt(5)?0:row.getInt(5)); return flowMain; } }).collect(); Gson gson = new Gson(); String res = gson.toJson(flowMains); //连接codis进行操作,每次将新生成的数据,放到对应的key中 Jedis jedis = CodisUtil.getJedis() ; jedis.set("mainFlow", res); logger.info("插入到Codis数据完成!!!"); sc.stop(); } //获取数据库的表注册为临时表 private static void registerTable(SQLContext sqlContext,String dbtable){ Map<String,String> mcOptions =new HashMap<String, String>(); mcOptions.put("url", Constant.URL); mcOptions.put("driver", Constant.DRIVER); mcOptions.put("dbtable", dbtable); mcOptions.put("user", Constant.USER); mcOptions.put("password", Constant.PASSWD); DataFrame jdbcDF = sqlContext.read().format("jdbc").options(mcOptions).load(); jdbcDF.registerTempTable(dbtable); } //获取ElasticSearch中的索引注册为表 private static void registerESTable(SQLContext sqlContext,String index){ Map<String,String> esOptions =new HashMap<String, String>(); esOptions.put("es.nodes", Constant.ESNODES); esOptions.put("es.port", Constant.ESPORT); esOptions.put("es.index.auto.create", "true"); DataFrame OrderDF = sqlContext.read().format("org.elasticsearch.spark.sql") .options(esOptions) .load(index+"/"+index); OrderDF.registerTempTable(index); } }

 

原创文章,作者:ItWorker,如若转载,请注明出处:https://blog.ytso.com/9053.html

(0)
上一篇 2021年7月19日
下一篇 2021年7月19日

相关推荐

发表回复

登录后才能评论