Commit 7de8101e by guos

会员标签4.0

parent f00fdf9b
......@@ -7,7 +7,7 @@ import java.io.Serializable;
* @author: wangxk
* @date: 2020/5/7
*/
public class TagConsumeBean implements Serializable {
public class TrdEcuSalesLabelBean implements Serializable {
private int ent_id; //企业id
private int card_region_id;//卡域id
......
......@@ -7,7 +7,7 @@ import java.io.Serializable;
* @author: wangxk
* @date: 2020/8/3
*/
public class TagConsumeRecordBean implements Serializable {
public class TrdVirtualOrderBean implements Serializable {
private int ent_id; //企业id
private int area_id;//会员卡域ID
private int order_channel_code;//订单来源渠道(线下1,达摩微商城2,微盟微商城3,线上天猫4)
......@@ -16,6 +16,7 @@ public class TagConsumeRecordBean implements Serializable {
private String store_info_id;//门店id
private int is_eff_order;//是否有效订单(0:否,1是)
private int is_mbr_order;//是否会员订单(0:否,1是)
private String order_id;
private String order_no;
private String oorder_no;
private long goods_num;//订单商品数量
......@@ -24,6 +25,7 @@ public class TagConsumeRecordBean implements Serializable {
private double total_amt;//吊牌价总额
private String receipts_time;//单据日期
private String shop_id;//店铺id
private String virtual_id;//虚拟订单id
public int getEnt_id() {
return ent_id;
......@@ -152,4 +154,20 @@ public class TagConsumeRecordBean implements Serializable {
public void setShop_id(String shop_id) {
this.shop_id = shop_id;
}
public String getOrder_id() {
return order_id;
}
public void setOrder_id(String order_id) {
this.order_id = order_id;
}
public String getVirtual_id() {
return virtual_id;
}
public void setVirtual_id(String virtual_id) {
this.virtual_id = virtual_id;
}
}
package com.gic.spark.entity.bean;
/**
* @description:
* @author: wangxk
* @date: 2020/8/12
*/
public class TrdVirtualOrderItemBean {
private String ent_id;
private String ecu_id;
private String order_id; //真实订单
private String receipts_time;//单据时间
private String sku_code;
private String virtual_order_id;//虚拟订单id
public String getEnt_id() {
return ent_id;
}
public void setEnt_id(String ent_id) {
this.ent_id = ent_id;
}
public String getEcu_id() {
return ecu_id;
}
public void setEcu_id(String ecu_id) {
this.ecu_id = ecu_id;
}
public String getOrder_id() {
return order_id;
}
public void setOrder_id(String order_id) {
this.order_id = order_id;
}
public String getReceipts_time() {
return receipts_time;
}
public void setReceipts_time(String receipts_time) {
this.receipts_time = receipts_time;
}
public String getSku_code() {
return sku_code;
}
public void setSku_code(String sku_code) {
this.sku_code = sku_code;
}
public String getVirtual_order_id() {
return virtual_order_id;
}
public void setVirtual_order_id(String virtual_order_id) {
this.virtual_order_id = virtual_order_id;
}
}
package com.gic.spark.entity.request;
import java.util.List;
/**
* @description:
* @author: wangxk
* @date: 2020/8/12
*/
public class TagConsumeCommodityRequest extends AbstractFilterRequestTime{
private List<String>skuCodeList;
public List<String> getSkuCodeList() {
return skuCodeList;
}
public void setSkuCodeList(List<String> skuCodeList) {
this.skuCodeList = skuCodeList;
}
}
package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceHive;
import com.gic.spark.entity.bean.TagConsumeBean;
import com.gic.spark.entity.bean.TrdEcuSalesLabelBean;
import com.gic.spark.entity.request.TagConsumeRequest;
import com.gic.spark.util.ConstantUtil;
import org.apache.spark.api.java.JavaRDD;
......@@ -17,11 +17,11 @@ import java.util.List;
public abstract class AbstractTagConsumFilter implements TagFilter {
DataSourceHive dataSourceHive = new DataSourceHive(ConstantUtil.ADS_GIC_TRD_ECU_SALES_LABEL_D);
protected static JavaRDD<TagConsumeBean> statisticsTypeHandle(JavaRDD<TagConsumeBean> consumeRDD, TagConsumeRequest consumeRequest){
protected static JavaRDD<TrdEcuSalesLabelBean> statisticsTypeHandle(JavaRDD<TrdEcuSalesLabelBean> consumeRDD, TagConsumeRequest consumeRequest){
consumeRDD=consumeRDD.mapPartitions(data->{
List<TagConsumeBean> result=new ArrayList();
List<TrdEcuSalesLabelBean> result=new ArrayList();
while (data.hasNext()){
TagConsumeBean consumeBean=data.next();
TrdEcuSalesLabelBean consumeBean=data.next();
switch (consumeRequest.getStatisticsType()){
case COMMODITYBRAND:
if(consumeRequest.getStatisticsValList().contains(consumeBean.getEnt_id())){
......
package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceHive;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.AbstractFilterRequestTime;
import com.gic.spark.util.ConstantUtil;
......@@ -23,11 +23,11 @@ public abstract class AbstractTagConsumRecordFilter implements TagFilter{
DataSourceHive dataSourceHive = new DataSourceHive(ConstantUtil.DWD_GIC_TRD_VIRTUAL_WDORDER_D);
protected static JavaRDD<TagConsumeRecordBean> statisticsTypeHandle(JavaRDD<TagConsumeRecordBean> consumeRecordRDD, AbstractFilterRequest request){
protected static JavaRDD<TrdVirtualOrderBean> statisticsTypeHandle(JavaRDD<TrdVirtualOrderBean> consumeRecordRDD, AbstractFilterRequest request){
consumeRecordRDD=consumeRecordRDD.mapPartitions(data->{
List<TagConsumeRecordBean> result=new ArrayList();
List<TrdVirtualOrderBean> result=new ArrayList();
while (data.hasNext()){
TagConsumeRecordBean consumeRecordBean=data.next();
TrdVirtualOrderBean consumeRecordBean=data.next();
switch (request.getStatisticsType()){
case COMMODITYBRAND:
if(request.getStatisticsValList().contains(consumeRecordBean.getEnt_id())){
......
package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.entity.DataSourceHive;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeBean;
import com.gic.spark.entity.bean.TrdEcuSalesLabelBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeRequest;
import org.apache.spark.api.java.JavaRDD;
......@@ -38,16 +37,16 @@ public class TagAssociatedPurchaseRateFilter extends AbstractTagConsumFilter{
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeRequest consumeRequest=(TagConsumeRequest)request;
JavaRDD<TagConsumeBean> consumeRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TagConsumeBean.class).javaRDD();
JavaRDD<TrdEcuSalesLabelBean> consumeRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TrdEcuSalesLabelBean.class).javaRDD();
consumeRDD=statisticsTypeHandle(consumeRDD,consumeRequest);
JavaRDD<Integer>ecuRdd=consumeRDD.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data)).groupByKey()
.mapPartitions(data->{
List<Integer> result=new ArrayList();
while (data.hasNext()){
Tuple2<Integer,Iterable<TagConsumeBean>> tp2=data.next();
Tuple2<Integer,Iterable<TrdEcuSalesLabelBean>> tp2=data.next();
long totalGodsNum=0;
long totalOrderTimes=0;
for(TagConsumeBean consumeBean:tp2._2()){
for(TrdEcuSalesLabelBean consumeBean:tp2._2()){
totalGodsNum+=consumeBean.getGoods_num();
totalOrderTimes+=consumeBean.getOrder_times();
}
......
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeBean;
import com.gic.spark.entity.bean.TrdEcuSalesLabelBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeRequest;
import org.apache.spark.api.java.JavaRDD;
......@@ -39,7 +39,7 @@ public class TagAverageDiscountFactorFilter extends AbstractTagConsumFilter {
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeRequest consumeRequest=(TagConsumeRequest)request;
JavaRDD<TagConsumeBean> consumeRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TagConsumeBean.class).javaRDD();
JavaRDD<TrdEcuSalesLabelBean> consumeRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TrdEcuSalesLabelBean.class).javaRDD();
consumeRDD=statisticsTypeHandle(consumeRDD,consumeRequest);
JavaRDD<Integer>ecuRdd=consumeRDD.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data)).groupByKey()
.mapPartitions(data->{
......@@ -47,8 +47,8 @@ public class TagAverageDiscountFactorFilter extends AbstractTagConsumFilter {
while (data.hasNext()){
double payAmt=0;
double totalAmt=0;
Tuple2<Integer,Iterable<TagConsumeBean>> tp2=data.next();
for(TagConsumeBean consumeBean:tp2._2()){
Tuple2<Integer,Iterable<TrdEcuSalesLabelBean>> tp2=data.next();
for(TrdEcuSalesLabelBean consumeBean:tp2._2()){
payAmt=consumeBean.getPay_amt();
totalAmt=consumeBean.getTotal_amt();
}
......
package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.entity.DataSourceHive;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.bean.TrdVirtualOrderItemBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeCommodityRequest;
import com.gic.spark.util.ConstantUtil;
import com.gic.spark.util.DateUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import scala.Tuple2;
import java.util.ArrayList;
import java.util.List;
/**
* @description:
* @author: wangxk
* @date: 2020/8/12
*/
public class TagConsumeCommodityFilter extends AbstractTagConsumRecordFilter{
DataSourceHive dataSourceHiveOrderItem = new DataSourceHive(ConstantUtil.DWD_GIC_TRD_VIRTUAL_ORDER_ITEM_D);
private static TagConsumeCommodityFilter instance;
public static TagConsumeCommodityFilter getInstance() {
if(null==instance){
instance=new TagConsumeCommodityFilter();
}
return instance;
}
@Override
public List<DataSourceEntity> necessarySourceList() {
List<DataSourceEntity> result = new ArrayList();
result.add(dataSourceHive);
result.add(dataSourceHiveOrderItem);
return result;
}
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeCommodityRequest commodityRequest=(TagConsumeCommodityRequest)request;
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,commodityRequest);
JavaPairRDD<String,Integer>orderRdd= consumeRecordRDD.filter(data-> StringUtils.isNotEmpty(data.getReceipts_time()))
.filter(data->checkTime(commodityRequest,DateUtil.strToDate(data.getReceipts_time(),DateUtil.FORMAT_DATETIME_19).getTime()))
.mapToPair(data-> Tuple2.apply(data.getVirtual_id(),data.getEcu_id()));
JavaPairRDD<String,String> orderItemRDD=MysqlRddManager.getPojoFromDataset(dataSourceHiveOrderItem.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderItemBean.class).javaRDD()
.filter(data->{
if(StringUtils.isNotEmpty(data.getSku_code())
&&commodityRequest.getSkuCodeList().contains(data.getSku_code())){
return true;
}
return false;
}).mapToPair(data->Tuple2.apply(data.getVirtual_order_id(),data.getVirtual_order_id()))
.reduceByKey((x,y)->x);
JavaRDD<Integer>ecuRdd=orderRdd.leftOuterJoin(orderItemRDD)
.filter(data->data._2()._2().isPresent())
.map(data->data._2()._1()).distinct();
return ecuRdd;
}
}
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeTimeRequest;
import com.gic.spark.util.DateUtil;
......@@ -42,8 +42,8 @@ public class TagConsumeTimeFilter extends AbstractTagConsumRecordFilter{
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeTimeRequest consumeTimeRequest=(TagConsumeTimeRequest)request;
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId)
.filter(new Column("is_eff_order").equalTo(1)), TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId)
.filter(new Column("is_eff_order").equalTo(1)), TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,consumeTimeRequest);
JavaRDD<Integer>ecuRdd=consumeRecordRDD.filter(data-> StringUtils.isNotEmpty(data.getReceipts_time()))
.mapToPair(data-> Tuple2.apply(data.getEcu_id(), DateUtil.strToDate(data.getReceipts_time(),DateUtil.FORMAT_DATETIME_19)))
......
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeAmountRequest;
import com.gic.spark.util.CommonUtil;
......@@ -41,7 +41,7 @@ public class TagConsumeTotalFilter extends AbstractTagConsumRecordFilter {
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeAmountRequest consumeAmountRequest=(TagConsumeAmountRequest)request;
JavaRDD<TagConsumeRecordBean>consumeRecordRDD=MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD=MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,consumeAmountRequest);
int configStatus= CommonUtil.getConfigStatus(enterpriseId);
......
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeBean;
import com.gic.spark.entity.bean.TrdEcuSalesLabelBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeRequest;
import org.apache.spark.api.java.JavaRDD;
......@@ -38,7 +38,7 @@ public class TagConsumptionSleepDaysFilter extends AbstractTagConsumFilter{
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeRequest consumeRequest=(TagConsumeRequest)request;
JavaRDD<TagConsumeBean> consumeRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TagConsumeBean.class).javaRDD();
JavaRDD<TrdEcuSalesLabelBean> consumeRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TrdEcuSalesLabelBean.class).javaRDD();
consumeRDD=statisticsTypeHandle(consumeRDD,consumeRequest);
JavaRDD<Integer>ecuRdd=consumeRDD.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data.getSleep_days()))
.reduceByKey((x,y)->x>y?y:x)
......
package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.entity.DataSourceHive;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeBean;
import com.gic.spark.entity.bean.TrdEcuSalesLabelBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeRequest;
import org.apache.spark.api.java.JavaRDD;
......@@ -37,7 +36,7 @@ public class TagConsumptionTimeFilter extends AbstractTagConsumFilter{
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeRequest consumeRequest=(TagConsumeRequest)request;
JavaRDD<TagConsumeBean> consumeRDD=MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TagConsumeBean.class).javaRDD();
JavaRDD<TrdEcuSalesLabelBean> consumeRDD=MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TrdEcuSalesLabelBean.class).javaRDD();
consumeRDD=statisticsTypeHandle(consumeRDD,consumeRequest);
JavaRDD<Integer>ecuRdd=consumeRDD.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data.getOrder_times())).reduceByKey((x,y)->x+y)
.mapPartitions(data->{
......
package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.entity.DataSourceHive;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.bean.TrdVirtualOrderItemBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeCommodityRequest;
import com.gic.spark.util.ConstantUtil;
import com.gic.spark.util.DateUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import scala.Tuple2;
import java.util.ArrayList;
import java.util.List;
/**
* @description:
* @author: wangxk
* @date: 2020/8/12
*/
public class TagFirstConsumeCommodityFilter extends AbstractTagConsumRecordFilter{
DataSourceHive dataSourceHiveOrderItem = new DataSourceHive(ConstantUtil.DWD_GIC_TRD_VIRTUAL_ORDER_ITEM_D);
private static TagFirstConsumeCommodityFilter instance;
public static TagFirstConsumeCommodityFilter getInstance() {
if(null==instance){
instance=new TagFirstConsumeCommodityFilter();
}
return instance;
}
@Override
public List<DataSourceEntity> necessarySourceList() {
List<DataSourceEntity> result = new ArrayList();
result.add(dataSourceHive);
result.add(dataSourceHiveOrderItem);
return result;
}
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeCommodityRequest commodityRequest=(TagConsumeCommodityRequest)request;
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,commodityRequest);
JavaPairRDD<String,Integer>orderRdd= consumeRecordRDD.filter(data-> StringUtils.isNotEmpty(data.getReceipts_time()))
.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data))
.reduceByKey((x,y)->{
if(DateUtil.strToDate(x.getReceipts_time(),DateUtil.FORMAT_DATETIME_19).getTime()
<DateUtil.strToDate(y.getReceipts_time(),DateUtil.FORMAT_DATETIME_19).getTime()){
return x;
}else{
return y;
}
})
.mapToPair(data->Tuple2.apply(data._2().getVirtual_id(),data._1()));
JavaPairRDD<String,String> orderItemRDD=MysqlRddManager.getPojoFromDataset(dataSourceHiveOrderItem.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderItemBean.class).javaRDD()
.filter(data->{
if(StringUtils.isNotEmpty(data.getSku_code())
&&commodityRequest.getSkuCodeList().contains(data.getSku_code())){
return true;
}
return false;
}).mapToPair(data->Tuple2.apply(data.getVirtual_order_id(),data.getVirtual_order_id()))
.reduceByKey((x,y)->x);
JavaRDD<Integer>ecuRdd=orderRdd.leftOuterJoin(orderItemRDD)
.filter(data->data._2()._2().isPresent())
.map(data->data._2()._1()).distinct();
return ecuRdd;
}
}
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeTimeRequest;
import com.gic.spark.util.DateUtil;
......@@ -12,7 +12,6 @@ import org.apache.spark.sql.Column;
import scala.Tuple2;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
......@@ -42,8 +41,8 @@ public class TagFirstConsumeTimeFilter extends AbstractTagConsumRecordFilter {
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeTimeRequest consumeTimeRequest=(TagConsumeTimeRequest)request;
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId)
.filter(new Column("is_eff_order").equalTo(1)), TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId)
.filter(new Column("is_eff_order").equalTo(1)), TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,consumeTimeRequest);
JavaRDD<Integer>ecuRdd=consumeRecordRDD.filter(data->StringUtils.isNotEmpty(data.getReceipts_time()))
......
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeChannelRequest;
import com.gic.spark.util.DateUtil;
......@@ -37,7 +37,7 @@ public class TagFirstConsumptionChannelFilter extends AbstractTagConsumRecordFil
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeChannelRequest channelRequest=(TagConsumeChannelRequest)request;
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,channelRequest);
......
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeAmountRequest;
import com.gic.spark.util.CommonUtil;
......@@ -39,7 +39,7 @@ public class TagFirstConsumptionMoneyFilter extends AbstractTagConsumRecordFilte
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeAmountRequest consumeAmountRequest=(TagConsumeAmountRequest)request;
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,consumeAmountRequest);
int configStatus= CommonUtil.getConfigStatus(enterpriseId);
......@@ -49,11 +49,11 @@ public class TagFirstConsumptionMoneyFilter extends AbstractTagConsumRecordFilte
List<Integer>result=new ArrayList();
while (data.hasNext()){
Tuple2<Integer, Iterable<TagConsumeRecordBean>> tp2=data.next();
Tuple2<Integer, Iterable<TrdVirtualOrderBean>> tp2=data.next();
double firstConsumAmount=0;
TagConsumeRecordBean firstConsumeAmountBean=null;
Map<String,TagConsumeRecordBean>effectiveOrderMap=new HashMap();
List<TagConsumeRecordBean>noEffectiveOrderList=new ArrayList();
TrdVirtualOrderBean firstConsumeAmountBean=null;
Map<String,TrdVirtualOrderBean>effectiveOrderMap=new HashMap();
List<TrdVirtualOrderBean>noEffectiveOrderList=new ArrayList();
tp2._2().forEach(element->{
if(element.getPay_amt()>0){
effectiveOrderMap.put(element.getOorder_no(),element);
......@@ -62,13 +62,13 @@ public class TagFirstConsumptionMoneyFilter extends AbstractTagConsumRecordFilte
}
});
noEffectiveOrderList.forEach(noEffectiveOrder->{
TagConsumeRecordBean effectiveOrder=effectiveOrderMap.get(noEffectiveOrder.getOorder_no());
TrdVirtualOrderBean effectiveOrder=effectiveOrderMap.get(noEffectiveOrder.getOorder_no());
if(noEffectiveOrder.getReceipts_time().substring(0,10).equals(effectiveOrder.getReceipts_time().substring(0,10))){
effectiveOrder.setPaid_amt(effectiveOrder.getPaid_amt()+noEffectiveOrder.getPaid_amt());
effectiveOrder.setPay_amt(effectiveOrder.getPay_amt()+noEffectiveOrder.getPay_amt());
}
});
for(TagConsumeRecordBean amountBean:effectiveOrderMap.values()){
for(TrdVirtualOrderBean amountBean:effectiveOrderMap.values()){
if(null==firstConsumeAmountBean){
firstConsumeAmountBean=amountBean;
}else{
......
package com.gic.spark.filter;
import com.gic.mysql.util.StringUtil;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeStoreRequest;
import com.gic.spark.util.DateUtil;
......@@ -43,7 +42,7 @@ public class TagFirstOfflineConsumptionStoreFilter extends AbstractTagConsumReco
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeStoreRequest storeRequest=(TagConsumeStoreRequest)request;
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,storeRequest);
JavaRDD<Integer>ecuRdd=consumeRecordRDD.filter(data->data.getOrder_channel_code()==1
......
......@@ -2,8 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.enumtype.YearMonthDayType;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeStoreRequest;
import com.gic.spark.util.DateUtil;
......@@ -42,7 +41,7 @@ public class TagFirstOnlineConsumptionStoreFilter extends AbstractTagConsumRecor
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeStoreRequest storeRequest=(TagConsumeStoreRequest)request;
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,storeRequest);
JavaRDD<Integer>ecuRdd=consumeRecordRDD.filter(data->data.getOrder_channel_code()!=1
......
package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.entity.DataSourceHive;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.bean.TrdVirtualOrderItemBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeCommodityRequest;
import com.gic.spark.util.ConstantUtil;
import com.gic.spark.util.DateUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import scala.Tuple2;
import java.util.ArrayList;
import java.util.List;
/**
* @description:
* @author: wangxk
* @date: 2020/8/12
*/
public class TagHistoryConsumeCommodityFilter extends AbstractTagConsumRecordFilter{
DataSourceHive dataSourceHiveOrderItem = new DataSourceHive(ConstantUtil.DWD_GIC_TRD_VIRTUAL_ORDER_ITEM_D);
private static TagHistoryConsumeCommodityFilter instance;
public static TagHistoryConsumeCommodityFilter getInstance() {
if(null==instance){
instance=new TagHistoryConsumeCommodityFilter();
}
return instance;
}
@Override
public List<DataSourceEntity> necessarySourceList() {
List<DataSourceEntity> result = new ArrayList();
result.add(dataSourceHive);
result.add(dataSourceHiveOrderItem);
return result;
}
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeCommodityRequest commodityRequest=(TagConsumeCommodityRequest)request;
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,commodityRequest);
JavaRDD<TrdVirtualOrderItemBean> orderItemRDD=MysqlRddManager.getPojoFromDataset(dataSourceHiveOrderItem.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderItemBean.class).javaRDD();
JavaRDD<Integer>ecuRdd=consumeRecordRDD.mapToPair(data->Tuple2.apply(data.getVirtual_id(),data.getEcu_id()))
.leftOuterJoin(orderItemRDD.mapToPair(data->Tuple2.apply(data.getVirtual_order_id(),data.getSku_code()))
.filter(data->StringUtils.isNotEmpty(data._2())
&&commodityRequest.getSkuCodeList().contains(data._2())))
.filter(data->data._2()._2().isPresent())
.map(data->data._2()._1())
.distinct();
return ecuRdd;
}
}
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeAmountRequest;
import com.gic.spark.util.CommonUtil;
......@@ -40,7 +40,7 @@ public class TagHistoryConsumeTotalFilter extends AbstractTagConsumRecordFilter{
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeAmountRequest consumeAmountRequest=(TagConsumeAmountRequest)request;
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,consumeAmountRequest);
int configStatus= CommonUtil.getConfigStatus(enterpriseId);
......
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeStoreRequest;
import org.apache.commons.lang.StringUtils;
......@@ -40,7 +40,7 @@ public class TagHistoryOfflineConsumptionStoreFilter extends AbstractTagConsumRe
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeStoreRequest storeRequest=(TagConsumeStoreRequest)request;
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,storeRequest);
JavaRDD<Integer>ecuRdd=consumeRecordRDD.filter(data->data.getOrder_channel_code()==1
......
......@@ -2,10 +2,9 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeStoreRequest;
import com.gic.spark.util.DateUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.api.java.JavaRDD;
import scala.Tuple2;
......@@ -41,7 +40,7 @@ public class TagHistoryOnlineConsumptionStoreFilter extends AbstractTagConsumRec
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeStoreRequest storeRequest=(TagConsumeStoreRequest)request;
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,storeRequest);
JavaRDD<Integer>ecuRdd=consumeRecordRDD.filter(data->data.getOrder_channel_code()!=1
......
package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.entity.DataSourceHive;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.bean.TrdVirtualOrderItemBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeCommodityRequest;
import com.gic.spark.util.ConstantUtil;
import com.gic.spark.util.DateUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import scala.Tuple2;
import java.util.ArrayList;
import java.util.List;
/**
* @description:
* @author: wangxk
* @date: 2020/8/12
*/
public class TagLatelyConsumeCommodityFilter extends AbstractTagConsumRecordFilter{
DataSourceHive dataSourceHiveOrderItem = new DataSourceHive(ConstantUtil.DWD_GIC_TRD_VIRTUAL_ORDER_ITEM_D);
private static TagLatelyConsumeCommodityFilter instance;
public static TagLatelyConsumeCommodityFilter getInstance() {
if(null==instance){
instance=new TagLatelyConsumeCommodityFilter();
}
return instance;
}
@Override
public List<DataSourceEntity> necessarySourceList() {
List<DataSourceEntity> result = new ArrayList();
result.add(dataSourceHive);
result.add(dataSourceHiveOrderItem);
return result;
}
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeCommodityRequest commodityRequest=(TagConsumeCommodityRequest)request;
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,commodityRequest);
JavaPairRDD<String,Integer>orderRdd= consumeRecordRDD.filter(data-> StringUtils.isNotEmpty(data.getReceipts_time()))
.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data))
.reduceByKey((x,y)->{
if(DateUtil.strToDate(x.getReceipts_time(),DateUtil.FORMAT_DATETIME_19).getTime()
>DateUtil.strToDate(y.getReceipts_time(),DateUtil.FORMAT_DATETIME_19).getTime()){
return x;
}else{
return y;
}
})
.mapToPair(data->Tuple2.apply(data._2().getVirtual_id(),data._1()));
JavaPairRDD<String,String> orderItemRDD=MysqlRddManager.getPojoFromDataset(dataSourceHiveOrderItem.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderItemBean.class).javaRDD()
.filter(data->{
if(StringUtils.isNotEmpty(data.getSku_code())
&&commodityRequest.getSkuCodeList().contains(data.getSku_code())){
return true;
}
return false;
}).mapToPair(data->Tuple2.apply(data.getVirtual_order_id(),data.getVirtual_order_id()))
.reduceByKey((x,y)->x);
JavaRDD<Integer>ecuRdd=orderRdd.leftOuterJoin(orderItemRDD)
.filter(data->data._2()._2().isPresent())
.map(data->data._2()._1()).distinct();
return ecuRdd;
}
}
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeTimeRequest;
import com.gic.spark.util.DateUtil;
......@@ -40,8 +40,8 @@ public class TagLatelyConsumeTimeFilter extends AbstractTagConsumRecordFilter{
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeTimeRequest consumeTimeRequest=(TagConsumeTimeRequest)request;
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId)
.filter(new Column("is_eff_order").equalTo(1)), TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId)
.filter(new Column("is_eff_order").equalTo(1)), TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,consumeTimeRequest);
JavaRDD<Integer>ecuRdd=consumeRecordRDD.filter(data-> StringUtils.isNotEmpty(data.getReceipts_time()))
......
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeChannelRequest;
import com.gic.spark.util.DateUtil;
......@@ -40,7 +40,7 @@ public class TagLatelyConsumptionChannelFilter extends AbstractTagConsumRecordFi
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeChannelRequest channelRequest=(TagConsumeChannelRequest)request;
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,channelRequest);
......
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeAmountRequest;
import com.gic.spark.util.CommonUtil;
......@@ -42,7 +42,7 @@ public class TagLatelyConsumptionMoneyFilter extends AbstractTagConsumRecordFil
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeAmountRequest consumeAmountRequest=(TagConsumeAmountRequest)request;
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,consumeAmountRequest);
int configStatus= CommonUtil.getConfigStatus(enterpriseId);
......@@ -52,11 +52,11 @@ public class TagLatelyConsumptionMoneyFilter extends AbstractTagConsumRecordFil
List<Integer>result=new ArrayList();
while (data.hasNext()){
Tuple2<Integer, Iterable<TagConsumeRecordBean>> tp2=data.next();
Tuple2<Integer, Iterable<TrdVirtualOrderBean>> tp2=data.next();
double latelyConsumAmount=0;
TagConsumeRecordBean latelyConsumeAmountBean=null;
Map<String,TagConsumeRecordBean> effectiveOrderMap=new HashMap();
List<TagConsumeRecordBean>noEffectiveOrderList=new ArrayList();
TrdVirtualOrderBean latelyConsumeAmountBean=null;
Map<String,TrdVirtualOrderBean> effectiveOrderMap=new HashMap();
List<TrdVirtualOrderBean>noEffectiveOrderList=new ArrayList();
tp2._2().forEach(element->{
if(element.getPay_amt()>0){
effectiveOrderMap.put(element.getOorder_no(),element);
......@@ -65,13 +65,13 @@ public class TagLatelyConsumptionMoneyFilter extends AbstractTagConsumRecordFil
}
});
noEffectiveOrderList.forEach(noEffectiveOrder->{
TagConsumeRecordBean effectiveOrder=effectiveOrderMap.get(noEffectiveOrder.getOorder_no());
TrdVirtualOrderBean effectiveOrder=effectiveOrderMap.get(noEffectiveOrder.getOorder_no());
if(noEffectiveOrder.getReceipts_time().substring(0,10).equals(effectiveOrder.getReceipts_time().substring(0,10))){
effectiveOrder.setPaid_amt(effectiveOrder.getPaid_amt()+noEffectiveOrder.getPaid_amt());
effectiveOrder.setPay_amt(effectiveOrder.getPay_amt()+noEffectiveOrder.getPay_amt());
}
});
for(TagConsumeRecordBean amountBean:effectiveOrderMap.values()){
for(TrdVirtualOrderBean amountBean:effectiveOrderMap.values()){
if(null==latelyConsumeAmountBean){
latelyConsumeAmountBean=amountBean;
}else{
......
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeStoreRequest;
import com.gic.spark.util.DateUtil;
......@@ -41,7 +41,7 @@ public class TagLatelyOnlineConsumptionStoreFilter extends AbstractTagConsumReco
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeStoreRequest storeRequest=(TagConsumeStoreRequest)request;
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,storeRequest);
JavaRDD<Integer>ecuRdd=consumeRecordRDD.filter(data->data.getOrder_channel_code()!=1
......
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeAmountRequest;
import com.gic.spark.util.CommonUtil;
......@@ -40,7 +40,7 @@ public class TagLowestSingleConsumptionMoneyFilter extends AbstractTagConsumReco
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeAmountRequest consumeAmountRequest=(TagConsumeAmountRequest)request;
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,consumeAmountRequest);
int configStatus= CommonUtil.getConfigStatus(enterpriseId);
......@@ -69,11 +69,11 @@ public class TagLowestSingleConsumptionMoneyFilter extends AbstractTagConsumReco
List<Integer>result=new ArrayList();
while (data.hasNext()){
Tuple2<Integer, Iterable<TagConsumeRecordBean>> tp2=data.next();
Tuple2<Integer, Iterable<TrdVirtualOrderBean>> tp2=data.next();
double topConsumAmount=0;
TagConsumeRecordBean topConsumeAmountBean=null;
Map<String,TagConsumeRecordBean> effectiveOrderMap=new HashMap();
List<TagConsumeRecordBean>noEffectiveOrderList=new ArrayList();
TrdVirtualOrderBean topConsumeAmountBean=null;
Map<String,TrdVirtualOrderBean> effectiveOrderMap=new HashMap();
List<TrdVirtualOrderBean>noEffectiveOrderList=new ArrayList();
tp2._2().forEach(element->{
if(element.getPay_amt()>0){
effectiveOrderMap.put(element.getOorder_no(),element);
......@@ -82,13 +82,13 @@ public class TagLowestSingleConsumptionMoneyFilter extends AbstractTagConsumReco
}
});
noEffectiveOrderList.forEach(noEffectiveOrder->{
TagConsumeRecordBean effectiveOrder=effectiveOrderMap.get(noEffectiveOrder.getOorder_no());
TrdVirtualOrderBean effectiveOrder=effectiveOrderMap.get(noEffectiveOrder.getOorder_no());
if(noEffectiveOrder.getReceipts_time().substring(0,10).equals(effectiveOrder.getReceipts_time().substring(0,10))){
effectiveOrder.setPaid_amt(effectiveOrder.getPaid_amt()+noEffectiveOrder.getPaid_amt());
effectiveOrder.setPay_amt(effectiveOrder.getPay_amt()+noEffectiveOrder.getPay_amt());
}
});
for(TagConsumeRecordBean amountBean:effectiveOrderMap.values()){
for(TrdVirtualOrderBean amountBean:effectiveOrderMap.values()){
topConsumAmount=configStatus==1?
(topConsumeAmountBean.getPaid_amt()<topConsumAmount?topConsumeAmountBean.getPaid_amt():topConsumAmount):
(topConsumeAmountBean.getPay_amt()<topConsumAmount?topConsumeAmountBean.getPay_amt():topConsumAmount);
......
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.enumtype.YearMonthDayType;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeStoreRequest;
......@@ -43,7 +43,7 @@ public class TagOfflineConsumptionStoreFilter extends AbstractTagConsumRecordFil
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeStoreRequest storeRequest=(TagConsumeStoreRequest)request;
storeRequest.setYearMonthDayType(YearMonthDayType.DAY);
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,storeRequest);
JavaRDD<Integer>ecuRdd=consumeRecordRDD.filter(data->data.getOrder_channel_code()==1
......
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.enumtype.YearMonthDayType;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeStoreRequest;
......@@ -38,7 +38,7 @@ public class TagOnlineConsumptionStoreFilter extends AbstractTagConsumRecordFilt
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeStoreRequest storeRequest=(TagConsumeStoreRequest)request;
storeRequest.setYearMonthDayType(YearMonthDayType.DAY);
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,storeRequest);
JavaRDD<Integer>ecuRdd=consumeRecordRDD.filter(data->data.getOrder_channel_code()!=1
......
package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.entity.DataSourceHive;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeBean;
import com.gic.spark.entity.bean.TrdEcuSalesLabelBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeRequest;
import org.apache.spark.api.java.JavaRDD;
......@@ -38,16 +37,16 @@ public class TagPerCustomerTransactionFilter extends AbstractTagConsumFilter{
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeRequest consumeRequest=(TagConsumeRequest)request;
JavaRDD<TagConsumeBean> consumeRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TagConsumeBean.class).javaRDD();
JavaRDD<TrdEcuSalesLabelBean> consumeRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TrdEcuSalesLabelBean.class).javaRDD();
consumeRDD=statisticsTypeHandle(consumeRDD,consumeRequest);
JavaRDD<Integer>ecuRdd=consumeRDD.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data)).groupByKey()
.mapPartitions(data->{
List<Integer> result=new ArrayList();
while (data.hasNext()){
Tuple2<Integer,Iterable<TagConsumeBean>> tp2=data.next();
Tuple2<Integer,Iterable<TrdEcuSalesLabelBean>> tp2=data.next();
int consumeTimes=0;
double payAmt=0;
for(TagConsumeBean consumeBean:tp2._2()){
for(TrdEcuSalesLabelBean consumeBean:tp2._2()){
consumeTimes+=consumeBean.getOrder_times();
payAmt+=consumeBean.getPay_amt();
}
......
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeStoreRequest;
import com.gic.spark.util.DateUtil;
......@@ -40,7 +40,7 @@ public class TagRecentlyOfflineConsumptionStoreFilter extends AbstractTagConsumR
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeStoreRequest storeRequest=(TagConsumeStoreRequest)request;
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId),TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,storeRequest);
JavaRDD<Integer>ecuRdd=consumeRecordRDD.filter(data-> data.getOrder_channel_code()==1
......
......@@ -2,7 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TagConsumeRecordBean;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeAmountRequest;
import com.gic.spark.util.CommonUtil;
......@@ -39,7 +39,7 @@ public class TagTopSingleConsumptionMoneyFilter extends AbstractTagConsumRecord
@Override
public JavaRDD<Integer> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeAmountRequest consumeAmountRequest=(TagConsumeAmountRequest)request;
JavaRDD<TagConsumeRecordBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TagConsumeRecordBean.class).javaRDD();
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHive.getDatasetByEnterpriseId(enterpriseId), TrdVirtualOrderBean.class).javaRDD();
consumeRecordRDD=statisticsTypeHandle(consumeRecordRDD,consumeAmountRequest);
int configStatus= CommonUtil.getConfigStatus(enterpriseId);
......@@ -68,11 +68,11 @@ public class TagTopSingleConsumptionMoneyFilter extends AbstractTagConsumRecord
List<Integer>result=new ArrayList();
while (data.hasNext()){
Tuple2<Integer, Iterable<TagConsumeRecordBean>> tp2=data.next();
Tuple2<Integer, Iterable<TrdVirtualOrderBean>> tp2=data.next();
double topConsumAmount=0;
TagConsumeRecordBean topConsumeAmountBean=null;
Map<String,TagConsumeRecordBean> effectiveOrderMap=new HashMap();
List<TagConsumeRecordBean>noEffectiveOrderList=new ArrayList();
TrdVirtualOrderBean topConsumeAmountBean=null;
Map<String,TrdVirtualOrderBean> effectiveOrderMap=new HashMap();
List<TrdVirtualOrderBean>noEffectiveOrderList=new ArrayList();
tp2._2().forEach(element->{
if(element.getPay_amt()>0){
effectiveOrderMap.put(element.getOorder_no(),element);
......@@ -81,13 +81,13 @@ public class TagTopSingleConsumptionMoneyFilter extends AbstractTagConsumRecord
}
});
noEffectiveOrderList.forEach(noEffectiveOrder->{
TagConsumeRecordBean effectiveOrder=effectiveOrderMap.get(noEffectiveOrder.getOorder_no());
TrdVirtualOrderBean effectiveOrder=effectiveOrderMap.get(noEffectiveOrder.getOorder_no());
if(noEffectiveOrder.getReceipts_time().substring(0,10).equals(effectiveOrder.getReceipts_time().substring(0,10))){
effectiveOrder.setPaid_amt(effectiveOrder.getPaid_amt()+noEffectiveOrder.getPaid_amt());
effectiveOrder.setPay_amt(effectiveOrder.getPay_amt()+noEffectiveOrder.getPay_amt());
}
});
for(TagConsumeRecordBean amountBean:effectiveOrderMap.values()){
for(TrdVirtualOrderBean amountBean:effectiveOrderMap.values()){
topConsumAmount=configStatus==1?
(topConsumeAmountBean.getPaid_amt()>topConsumAmount?topConsumeAmountBean.getPaid_amt():topConsumAmount):
(topConsumeAmountBean.getPay_amt()>topConsumAmount?topConsumeAmountBean.getPay_amt():topConsumAmount);
......
......@@ -56,6 +56,12 @@ public class TagConstant {
public static final String TAG_TIME_TYPE_ALL_TIME = "alltime";//目前为止
*/
//消费商品
public static final String TAG_CODE_FIRST_CONSUME_COMMODITY="firstConsumeCommodity";//首次消费商
public static final String TAG_CODE_LATELY_CONSUME_COMMODITY="latelyConsumeCommodity";//最近消费商
public static final String TAG_CODE_HISTORY_CONSUME_COMMODITY="historyConsumeCommodity";//历史消费商
public static final String TAG_CODE_CONSUME_COMMODITY="consumeCommodity";//消费商品
//消费渠道
public static final String TAG_CODE_FIRST_CONSUMPTION_CHANNEL="firstConsumptionChannel";//首次消费渠道
public static final String TAG_CODE_LATELY_CONSUMPTION_CHANNEL="latelyConsumptionChannel";//最近消费渠道
......
......@@ -146,11 +146,60 @@ public class TagValueParser {
case TagConstant.TAG_CODE_ONLINE_CONSUMPTION_STORE:
request=getOnlineConsumeStoreRequest(conditionDTO.getTagTemplateElInfo());
break;
//消费商品
case TagConstant.TAG_CODE_FIRST_CONSUME_COMMODITY:
request=getConsumeCommodityRequest(conditionDTO.getTagTemplateElInfo());
break;
case TagConstant.TAG_CODE_LATELY_CONSUME_COMMODITY:
request=getConsumeCommodityRequest(conditionDTO.getTagTemplateElInfo());
break;
case TagConstant.TAG_CODE_HISTORY_CONSUME_COMMODITY:
request=getConsumeCommodityRequest(conditionDTO.getTagTemplateElInfo());
break;
case TagConstant.TAG_CODE_CONSUME_COMMODITY:
request=getConsumeCommodityRequest(conditionDTO.getTagTemplateElInfo());
break;
default:break;
}
return request;
}
private static AbstractFilterRequest getConsumeCommodityRequest(List<TagConditionValDTO> conditionValDTOList) {
TagConsumeCommodityRequest request=new TagConsumeCommodityRequest();
for (TagConditionValDTO conditionValDTO : conditionValDTOList) {
if (Pattern.compile("flag").matcher(conditionValDTO.getKey()).find()) {
setStatisticsTypeHandle(request, conditionValDTO.getKey(), conditionValDTO.getVal());
}else if(conditionValDTO.getKey().equals(TagValKeyEnum.COMPONENT_COMMODITY.getKey())){
List<String>skuCodeList=null;//需要调用接口
request.setSkuCodeList(skuCodeList);
}else if(Pattern.compile("timeRange").matcher(conditionValDTO.getKey()).find()){
String[] keys=conditionValDTO.getKey().split(".");
switch (keys[1]) {
case TagConstant.TAG_TIME_TYPE_FIXATION:
String[] vals = conditionValDTO.getVal().split(",");
request.setBeginTime(DateUtil.stringToDate(vals[0]));
request.setEndTime(DateUtil.stringToDate(vals[1]));
request.setTimeRangeType(TimeRangeType.FIXATION);
break;
case TagConstant.TAG_TIME_TYPE_LATELY:
request.setTimeNum(Integer.parseInt(conditionValDTO.getVal()));
request.setTimeRangeType(TimeRangeType.LATELY);
request.setYearMonthDayType(YearMonthDayType.DAY);
break;
case TagConstant.TAG_TIME_TYPE_LATER:
request.setTimeNum(Integer.parseInt(conditionValDTO.getVal()));
request.setTimeRangeType(TimeRangeType.LATER);
request.setYearMonthDayType(YearMonthDayType.DAY);
break;
default:
break;
}
}
}
return request;
}
private static AbstractFilterRequest getOnlineConsumeStoreRequest(List<TagConditionValDTO> conditionValDTOList) {
TagConsumeStoreRequest request=new TagConsumeStoreRequest();
List<String>storeList=new ArrayList();
......
......@@ -12,4 +12,5 @@ public class ConstantUtil {
public static final String TAB_INTEGRAL_CU_CHANGE_LOG="tab_integral_cu_change_log";
public static final String ADS_GIC_TRD_ECU_SALES_LABEL_D="demoads.ads_gic_trd_ecu_sales_label_d";
public static final String DWD_GIC_TRD_VIRTUAL_WDORDER_D="democdm.dwd_gic_trd_virtual_wdorder_d";
public static final String DWD_GIC_TRD_VIRTUAL_ORDER_ITEM_D="democdm.dwd_gic_trd_virtual_order_item_d";
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment